Add a bunch of doc comments

This commit is contained in:
Mark Mansi 2018-01-19 16:39:54 -06:00
parent c4befe1710
commit 49431d4966

View File

@ -18,6 +18,8 @@ use tokenstream;
use std::rc::Rc;
/// Contains the sub-token-trees of a "delimited" token tree, such as the contents of `(`. Note
/// thatthat the delimiter itself might be `NoDelim`.
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct Delimited {
pub delim: token::DelimToken,
@ -25,14 +27,17 @@ pub struct Delimited {
}
impl Delimited {
/// Return the opening delimiter (possibly `NoDelim`).
pub fn open_token(&self) -> token::Token {
token::OpenDelim(self.delim)
}
/// Return the closing delimiter (possibly `NoDelim`).
pub fn close_token(&self) -> token::Token {
token::CloseDelim(self.delim)
}
/// Return a `self::TokenTree` witha a `Span` corresponding to the opening delimiter.
pub fn open_tt(&self, span: Span) -> TokenTree {
let open_span = if span == DUMMY_SP {
DUMMY_SP
@ -42,6 +47,7 @@ impl Delimited {
TokenTree::Token(open_span, self.open_token())
}
/// Return a `self::TokenTree` witha a `Span` corresponding to the closing delimiter.
pub fn close_tt(&self, span: Span) -> TokenTree {
let close_span = if span == DUMMY_SP {
DUMMY_SP
@ -75,7 +81,7 @@ pub enum KleeneOp {
}
/// Similar to `tokenstream::TokenTree`, except that `$i`, `$i:ident`, and `$(...)`
/// are "first-class" token trees.
/// are "first-class" token trees. Useful for parsing macros.
#[derive(Debug, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)]
pub enum TokenTree {
Token(Span, token::Token),
@ -93,6 +99,7 @@ pub enum TokenTree {
}
impl TokenTree {
/// Return the number of tokens in the tree.
pub fn len(&self) -> usize {
match *self {
TokenTree::Delimited(_, ref delimed) => match delimed.delim {
@ -104,6 +111,8 @@ impl TokenTree {
}
}
/// Returns true if the given token tree contains no other tokens. This is vacuously true for
/// single tokens or metavar/decls, but may be false for delimited trees or sequences.
pub fn is_empty(&self) -> bool {
match *self {
TokenTree::Delimited(_, ref delimed) => match delimed.delim {
@ -115,6 +124,7 @@ impl TokenTree {
}
}
/// Get the `index`-th sub-token-tree. This only makes sense for delimited trees and sequences.
pub fn get_tt(&self, index: usize) -> TokenTree {
match (self, index) {
(&TokenTree::Delimited(_, ref delimed), _) if delimed.delim == token::NoDelim => {
@ -146,15 +156,39 @@ impl TokenTree {
}
}
/// Takes a `tokenstream::TokenStream` and returns a `Vec<self::TokenTree>`. Specifically, this
/// takes a generic `TokenStream`, such as is used in the rest of the compiler, and returns a
/// collection of `TokenTree` for use in parsing a macro.
///
/// # Parameters
///
/// - `input`: a token stream to read from, the contents of which we are parsing.
/// - `expect_matchers`: `parse` can be used to parse either the "patterns" or the "body" of a
/// macro. Both take roughly the same form _except_ that in a pattern, metavars are declared with
/// their "matcher" type. For example `$var:expr` or `$id:ident`. In this example, `expr` and
/// `ident` are "matchers". They are not present in the body of a macro rule -- just in the
/// pattern, so we pass a parameter to indicate whether to expect them or not.
/// - `sess`: the parsing session. Any errors will be emitted to this session.
///
/// # Returns
///
/// A collection of `self::TokenTree`. There may also be some errors emitted to `sess`.
pub fn parse(
input: tokenstream::TokenStream,
expect_matchers: bool,
sess: &ParseSess,
) -> Vec<TokenTree> {
// Will contain the final collection of `self::TokenTree`
let mut result = Vec::new();
// For each token tree in `input`, parse the token into a `self::TokenTree`, consuming
// additional trees if need be.
let mut trees = input.trees();
while let Some(tree) = trees.next() {
let tree = parse_tree(tree, &mut trees, expect_matchers, sess);
// Given the parsed tree, if there is a metavar and we are expecting matchers, actually
// parse out the matcher (i.e. in `$id:ident` this would parse the `:` and `ident`).
match tree {
TokenTree::MetaVar(start_sp, ident) if expect_matchers => {
let span = match trees.next() {
@ -182,12 +216,27 @@ pub fn parse(
keywords::Invalid.ident(),
));
}
// Not a metavar or no matchers allowed, so just return the tree
_ => result.push(tree),
}
}
result
}
/// Takes a `tokenstream::TokenTree` and returns a `self::TokenTree`. Specifically, this takes a
/// generic `TokenTree`, such as is used in the rest of the compiler, and returns a `TokenTree`
/// for use in parsing a macro.
///
/// Converting the given tree may involve reading more tokens.
///
/// # Parameters
///
/// - `tree`: the tree wish to convert.
/// - `trees`: an iterator over trees. We may need to read more tokens from it in order to finish
/// converting `tree`
/// - `expect_matchers`: same as for `parse` (see above).
/// - `sess`: the parsing session. Any errors will be emitted to this session.
fn parse_tree<I>(
tree: tokenstream::TokenTree,
trees: &mut I,
@ -197,16 +246,24 @@ fn parse_tree<I>(
where
I: Iterator<Item = tokenstream::TokenTree>,
{
// Depending on what `tree` is, we could be parsing different parts of a macro
match tree {
// `tree` is a `$` token. Look at the next token in `trees`
tokenstream::TokenTree::Token(span, token::Dollar) => match trees.next() {
// `tree` is followed by a delimited set of token trees. This indicates the beginning
// of a repetition sequence in the macro (e.g. `$(pat)*`).
Some(tokenstream::TokenTree::Delimited(span, delimited)) => {
// Must have `(` not `{` or `[`
if delimited.delim != token::Paren {
let tok = pprust::token_to_string(&token::OpenDelim(delimited.delim));
let msg = format!("expected `(`, found `{}`", tok);
sess.span_diagnostic.span_err(span, &msg);
}
// Parse the contents of the sequence itself
let sequence = parse(delimited.tts.into(), expect_matchers, sess);
// Get the Kleen operator and optional separator
let (separator, op) = parse_sep_and_kleene_op(trees, span, sess);
// Count the number of captured "names" (i.e. named metavars)
let name_captures = macro_parser::count_names(&sequence);
TokenTree::Sequence(
span,
@ -218,6 +275,9 @@ where
}),
)
}
// `tree` is followed by an `ident`. This could be `$meta_var` or the `$crate` special
// metavariable that names the crate of the invokation.
Some(tokenstream::TokenTree::Token(ident_span, ref token)) if token.is_ident() => {
let ident = token.ident().unwrap();
let span = ident_span.with_lo(span.lo());
@ -231,6 +291,8 @@ where
TokenTree::MetaVar(span, ident)
}
}
// `tree` is followed by a random token. This is an error.
Some(tokenstream::TokenTree::Token(span, tok)) => {
let msg = format!(
"expected identifier, found `{}`",
@ -239,9 +301,16 @@ where
sess.span_diagnostic.span_err(span, &msg);
TokenTree::MetaVar(span, keywords::Invalid.ident())
}
// There are no more tokens. Just return the `$` we already have.
None => TokenTree::Token(span, token::Dollar),
},
// `tree` is an arbitrary token. Keep it.
tokenstream::TokenTree::Token(span, tok) => TokenTree::Token(span, tok),
// `tree` is the beginning of a delimited set of tokens (e.g. `(` or `{`). We need to
// descend into the delimited set and further parse it.
tokenstream::TokenTree::Delimited(span, delimited) => TokenTree::Delimited(
span,
Rc::new(Delimited {
@ -257,8 +326,8 @@ where
/// For example, in a pattern such as `$(a),*`, `a` is the pattern to be repeated, `,` is the
/// separator, and `*` is the Kleene operator. This function is specifically concerned with parsing
/// the last two tokens of such a pattern: namely, the optional separator and the Kleene operator
/// itself. Note that here we are parsing the _pattern_ itself, rather than trying to match some
/// stream of tokens against the pattern.
/// itself. Note that here we are parsing the _macro_ itself, rather than trying to match some
/// stream of tokens in an invokation of a macro.
///
/// This function will take some input iterator `input` corresponding to `span` and a parsing
/// session `sess`. If the next one (or possibly two) tokens in `input` correspond to a Kleene