// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 or the MIT license // , at your // option. This file may not be copied, modified, or distributed // except according to those terms. use abi::{self, Abi}; use ast::{AngleBracketedParameterData, ParenthesizedParameterData, AttrStyle, BareFnTy}; use ast::{RegionTyParamBound, TraitTyParamBound, TraitBoundModifier}; use ast::Unsafety; use ast::{Mod, Arg, Arm, Attribute, BindingMode, TraitItemKind}; use ast::Block; use ast::{BlockCheckMode, CaptureBy, Movability}; use ast::{Constness, Crate}; use ast::Defaultness; use ast::EnumDef; use ast::{Expr, ExprKind, RangeLimits}; use ast::{Field, FnDecl}; use ast::{ForeignItem, ForeignItemKind, FunctionRetTy}; use ast::GenericParam; use ast::{Ident, ImplItem, IsAuto, Item, ItemKind}; use ast::{Label, Lifetime, LifetimeDef, Lit, LitKind, UintTy}; use ast::Local; use ast::MacStmtStyle; use ast::Mac_; use ast::{MutTy, Mutability}; use ast::{Pat, PatKind, PathSegment}; use ast::{PolyTraitRef, QSelf}; use ast::{Stmt, StmtKind}; use ast::{VariantData, StructField}; use ast::StrStyle; use ast::SelfKind; use ast::{TraitItem, TraitRef, TraitObjectSyntax}; use ast::{Ty, TyKind, TypeBinding, TyParam, TyParamBounds}; use ast::{Visibility, WhereClause, CrateSugar}; use ast::{UseTree, UseTreeKind}; use ast::{BinOpKind, UnOp}; use ast::{RangeEnd, RangeSyntax}; use {ast, attr}; use codemap::{self, CodeMap, Spanned, respan}; use syntax_pos::{self, Span, MultiSpan, BytePos, FileName, DUMMY_SP}; use errors::{self, DiagnosticBuilder}; use parse::{self, classify, token}; use parse::common::SeqSep; use parse::lexer::TokenAndSpan; use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration}; use parse::obsolete::ObsoleteSyntax; use parse::{new_sub_parser_from_file, ParseSess, Directory, DirectoryOwnership}; use util::parser::{AssocOp, Fixity}; use print::pprust; use ptr::P; use parse::PResult; use tokenstream::{self, Delimited, ThinTokenStream, TokenTree, TokenStream}; use symbol::{Symbol, keywords}; use util::ThinVec; use std::cmp; use std::collections::HashSet; use std::mem; use std::path::{self, Path, PathBuf}; use std::slice; bitflags! { pub struct Restrictions: u8 { const STMT_EXPR = 1 << 0; const NO_STRUCT_LITERAL = 1 << 1; } } type ItemInfo = (Ident, ItemKind, Option>); /// How to parse a path. #[derive(Copy, Clone, PartialEq)] pub enum PathStyle { /// In some contexts, notably in expressions, paths with generic arguments are ambiguous /// with something else. For example, in expressions `segment < ....` can be interpreted /// as a comparison and `segment ( ....` can be interpreted as a function call. /// In all such contexts the non-path interpretation is preferred by default for practical /// reasons, but the path interpretation can be forced by the disambiguator `::`, e.g. /// `x` - comparisons, `x::` - unambiguously a path. Expr, /// In other contexts, notably in types, no ambiguity exists and paths can be written /// without the disambiguator, e.g. `x` - unambiguously a path. /// Paths with disambiguators are still accepted, `x::` - unambiguously a path too. Type, /// A path with generic arguments disallowed, e.g. `foo::bar::Baz`, used in imports, /// visibilities or attributes. /// Technically, this variant is unnecessary and e.g. `Expr` can be used instead /// (paths in "mod" contexts have to be checked later for absence of generic arguments /// anyway, due to macros), but it is used to avoid weird suggestions about expected /// tokens when something goes wrong. Mod, } #[derive(Clone, Copy, Debug, PartialEq)] pub enum SemiColonMode { Break, Ignore, } #[derive(Clone, Copy, Debug, PartialEq)] pub enum BlockMode { Break, Ignore, } /// Possibly accept an `token::Interpolated` expression (a pre-parsed expression /// dropped into the token stream, which happens while parsing the result of /// macro expansion). Placement of these is not as complex as I feared it would /// be. The important thing is to make sure that lookahead doesn't balk at /// `token::Interpolated` tokens. macro_rules! maybe_whole_expr { ($p:expr) => { if let token::Interpolated(nt) = $p.token.clone() { match nt.0 { token::NtExpr(ref e) => { $p.bump(); return Ok((*e).clone()); } token::NtPath(ref path) => { $p.bump(); let span = $p.span; let kind = ExprKind::Path(None, (*path).clone()); return Ok($p.mk_expr(span, kind, ThinVec::new())); } token::NtBlock(ref block) => { $p.bump(); let span = $p.span; let kind = ExprKind::Block((*block).clone()); return Ok($p.mk_expr(span, kind, ThinVec::new())); } _ => {}, }; } } } /// As maybe_whole_expr, but for things other than expressions macro_rules! maybe_whole { ($p:expr, $constructor:ident, |$x:ident| $e:expr) => { if let token::Interpolated(nt) = $p.token.clone() { if let token::$constructor($x) = nt.0.clone() { $p.bump(); return Ok($e); } } }; } fn maybe_append(mut lhs: Vec, mut rhs: Option>) -> Vec { if let Some(ref mut rhs) = rhs { lhs.append(rhs); } lhs } #[derive(Debug, Clone, Copy, PartialEq)] enum PrevTokenKind { DocComment, Comma, Plus, Interpolated, Eof, Ident, Other, } trait RecoverQPath: Sized { const PATH_STYLE: PathStyle = PathStyle::Expr; fn to_ty(&self) -> Option>; fn to_recovered(&self, qself: Option, path: ast::Path) -> Self; fn to_string(&self) -> String; } impl RecoverQPath for Ty { const PATH_STYLE: PathStyle = PathStyle::Type; fn to_ty(&self) -> Option> { Some(P(self.clone())) } fn to_recovered(&self, qself: Option, path: ast::Path) -> Self { Self { span: path.span, node: TyKind::Path(qself, path), id: self.id } } fn to_string(&self) -> String { pprust::ty_to_string(self) } } impl RecoverQPath for Pat { fn to_ty(&self) -> Option> { self.to_ty() } fn to_recovered(&self, qself: Option, path: ast::Path) -> Self { Self { span: path.span, node: PatKind::Path(qself, path), id: self.id } } fn to_string(&self) -> String { pprust::pat_to_string(self) } } impl RecoverQPath for Expr { fn to_ty(&self) -> Option> { self.to_ty() } fn to_recovered(&self, qself: Option, path: ast::Path) -> Self { Self { span: path.span, node: ExprKind::Path(qself, path), id: self.id, attrs: self.attrs.clone() } } fn to_string(&self) -> String { pprust::expr_to_string(self) } } /* ident is handled by common.rs */ #[derive(Clone)] pub struct Parser<'a> { pub sess: &'a ParseSess, /// the current token: pub token: token::Token, /// the span of the current token: pub span: Span, /// the span of the previous token: pub meta_var_span: Option, pub prev_span: Span, /// the previous token kind prev_token_kind: PrevTokenKind, pub restrictions: Restrictions, /// The set of seen errors about obsolete syntax. Used to suppress /// extra detail when the same error is seen twice pub obsolete_set: HashSet, /// Used to determine the path to externally loaded source files pub directory: Directory, /// Whether to parse sub-modules in other files. pub recurse_into_file_modules: bool, /// Name of the root module this parser originated from. If `None`, then the /// name is not known. This does not change while the parser is descending /// into modules, and sub-parsers have new values for this name. pub root_module_name: Option, pub expected_tokens: Vec, token_cursor: TokenCursor, pub desugar_doc_comments: bool, /// Whether we should configure out of line modules as we parse. pub cfg_mods: bool, } #[derive(Clone)] struct TokenCursor { frame: TokenCursorFrame, stack: Vec, } #[derive(Clone)] struct TokenCursorFrame { delim: token::DelimToken, span: Span, open_delim: bool, tree_cursor: tokenstream::Cursor, close_delim: bool, last_token: LastToken, } /// This is used in `TokenCursorFrame` above to track tokens that are consumed /// by the parser, and then that's transitively used to record the tokens that /// each parse AST item is created with. /// /// Right now this has two states, either collecting tokens or not collecting /// tokens. If we're collecting tokens we just save everything off into a local /// `Vec`. This should eventually though likely save tokens from the original /// token stream and just use slicing of token streams to avoid creation of a /// whole new vector. /// /// The second state is where we're passively not recording tokens, but the last /// token is still tracked for when we want to start recording tokens. This /// "last token" means that when we start recording tokens we'll want to ensure /// that this, the first token, is included in the output. /// /// You can find some more example usage of this in the `collect_tokens` method /// on the parser. #[derive(Clone)] enum LastToken { Collecting(Vec), Was(Option), } impl TokenCursorFrame { fn new(sp: Span, delimited: &Delimited) -> Self { TokenCursorFrame { delim: delimited.delim, span: sp, open_delim: delimited.delim == token::NoDelim, tree_cursor: delimited.stream().into_trees(), close_delim: delimited.delim == token::NoDelim, last_token: LastToken::Was(None), } } } impl TokenCursor { fn next(&mut self) -> TokenAndSpan { loop { let tree = if !self.frame.open_delim { self.frame.open_delim = true; Delimited { delim: self.frame.delim, tts: TokenStream::empty().into() } .open_tt(self.frame.span) } else if let Some(tree) = self.frame.tree_cursor.next() { tree } else if !self.frame.close_delim { self.frame.close_delim = true; Delimited { delim: self.frame.delim, tts: TokenStream::empty().into() } .close_tt(self.frame.span) } else if let Some(frame) = self.stack.pop() { self.frame = frame; continue } else { return TokenAndSpan { tok: token::Eof, sp: syntax_pos::DUMMY_SP } }; match self.frame.last_token { LastToken::Collecting(ref mut v) => v.push(tree.clone()), LastToken::Was(ref mut t) => *t = Some(tree.clone()), } match tree { TokenTree::Token(sp, tok) => return TokenAndSpan { tok: tok, sp: sp }, TokenTree::Delimited(sp, ref delimited) => { let frame = TokenCursorFrame::new(sp, delimited); self.stack.push(mem::replace(&mut self.frame, frame)); } } } } fn next_desugared(&mut self) -> TokenAndSpan { let (sp, name) = match self.next() { TokenAndSpan { sp, tok: token::DocComment(name) } => (sp, name), tok => return tok, }; let stripped = strip_doc_comment_decoration(&name.as_str()); // Searches for the occurrences of `"#*` and returns the minimum number of `#`s // required to wrap the text. let mut num_of_hashes = 0; let mut count = 0; for ch in stripped.chars() { count = match ch { '"' => 1, '#' if count > 0 => count + 1, _ => 0, }; num_of_hashes = cmp::max(num_of_hashes, count); } let body = TokenTree::Delimited(sp, Delimited { delim: token::Bracket, tts: [TokenTree::Token(sp, token::Ident(ast::Ident::from_str("doc"))), TokenTree::Token(sp, token::Eq), TokenTree::Token(sp, token::Literal( token::StrRaw(Symbol::intern(&stripped), num_of_hashes), None))] .iter().cloned().collect::().into(), }); self.stack.push(mem::replace(&mut self.frame, TokenCursorFrame::new(sp, &Delimited { delim: token::NoDelim, tts: if doc_comment_style(&name.as_str()) == AttrStyle::Inner { [TokenTree::Token(sp, token::Pound), TokenTree::Token(sp, token::Not), body] .iter().cloned().collect::().into() } else { [TokenTree::Token(sp, token::Pound), body] .iter().cloned().collect::().into() }, }))); self.next() } } #[derive(PartialEq, Eq, Clone)] pub enum TokenType { Token(token::Token), Keyword(keywords::Keyword), Operator, Lifetime, Ident, Path, Type, } impl TokenType { fn to_string(&self) -> String { match *self { TokenType::Token(ref t) => format!("`{}`", Parser::token_to_string(t)), TokenType::Keyword(kw) => format!("`{}`", kw.name()), TokenType::Operator => "an operator".to_string(), TokenType::Lifetime => "lifetime".to_string(), TokenType::Ident => "identifier".to_string(), TokenType::Path => "path".to_string(), TokenType::Type => "type".to_string(), } } } // Returns true if `IDENT t` can start a type - `IDENT::a::b`, `IDENT`, // `IDENT<::AssocTy>`, `IDENT(u8, u8) -> u8`. fn can_continue_type_after_ident(t: &token::Token) -> bool { t == &token::ModSep || t == &token::Lt || t == &token::BinOp(token::Shl) || t == &token::OpenDelim(token::Paren) } /// Information about the path to a module. pub struct ModulePath { pub name: String, pub path_exists: bool, pub result: Result, } pub struct ModulePathSuccess { pub path: PathBuf, pub directory_ownership: DirectoryOwnership, warn: bool, } pub struct ModulePathError { pub err_msg: String, pub help_msg: String, } pub enum Error { FileNotFoundForModule { mod_name: String, default_path: String, secondary_path: String, dir_path: String, }, DuplicatePaths { mod_name: String, default_path: String, secondary_path: String, }, UselessDocComment, InclusiveRangeWithNoEnd, } impl Error { pub fn span_err>(self, sp: S, handler: &errors::Handler) -> DiagnosticBuilder { match self { Error::FileNotFoundForModule { ref mod_name, ref default_path, ref secondary_path, ref dir_path } => { let mut err = struct_span_err!(handler, sp, E0583, "file not found for module `{}`", mod_name); err.help(&format!("name the file either {} or {} inside the directory {:?}", default_path, secondary_path, dir_path)); err } Error::DuplicatePaths { ref mod_name, ref default_path, ref secondary_path } => { let mut err = struct_span_err!(handler, sp, E0584, "file for module `{}` found at both {} and {}", mod_name, default_path, secondary_path); err.help("delete or rename one of them to remove the ambiguity"); err } Error::UselessDocComment => { let mut err = struct_span_err!(handler, sp, E0585, "found a documentation comment that doesn't document anything"); err.help("doc comments must come before what they document, maybe a comment was \ intended with `//`?"); err } Error::InclusiveRangeWithNoEnd => { let mut err = struct_span_err!(handler, sp, E0586, "inclusive range with no end"); err.help("inclusive ranges must be bounded at the end (`..=b` or `a..=b`)"); err } } } } #[derive(Debug)] pub enum LhsExpr { NotYetParsed, AttributesParsed(ThinVec), AlreadyParsed(P), } impl From>> for LhsExpr { fn from(o: Option>) -> Self { if let Some(attrs) = o { LhsExpr::AttributesParsed(attrs) } else { LhsExpr::NotYetParsed } } } impl From> for LhsExpr { fn from(expr: P) -> Self { LhsExpr::AlreadyParsed(expr) } } /// Create a placeholder argument. fn dummy_arg(span: Span) -> Arg { let spanned = Spanned { span, node: keywords::Invalid.ident() }; let pat = P(Pat { id: ast::DUMMY_NODE_ID, node: PatKind::Ident(BindingMode::ByValue(Mutability::Immutable), spanned, None), span, }); let ty = Ty { node: TyKind::Err, span, id: ast::DUMMY_NODE_ID }; Arg { ty: P(ty), pat: pat, id: ast::DUMMY_NODE_ID } } #[derive(Copy, Clone, Debug, PartialEq, Eq)] enum TokenExpectType { Expect, NoExpect, } impl<'a> Parser<'a> { pub fn new(sess: &'a ParseSess, tokens: TokenStream, directory: Option, recurse_into_file_modules: bool, desugar_doc_comments: bool) -> Self { let mut parser = Parser { sess, token: token::Underscore, span: syntax_pos::DUMMY_SP, prev_span: syntax_pos::DUMMY_SP, meta_var_span: None, prev_token_kind: PrevTokenKind::Other, restrictions: Restrictions::empty(), obsolete_set: HashSet::new(), recurse_into_file_modules, directory: Directory { path: PathBuf::new(), ownership: DirectoryOwnership::Owned { relative: None } }, root_module_name: None, expected_tokens: Vec::new(), token_cursor: TokenCursor { frame: TokenCursorFrame::new(syntax_pos::DUMMY_SP, &Delimited { delim: token::NoDelim, tts: tokens.into(), }), stack: Vec::new(), }, desugar_doc_comments, cfg_mods: true, }; let tok = parser.next_tok(); parser.token = tok.tok; parser.span = tok.sp; if let Some(directory) = directory { parser.directory = directory; } else if !parser.span.source_equal(&DUMMY_SP) { if let FileName::Real(path) = sess.codemap().span_to_unmapped_path(parser.span) { parser.directory.path = path; parser.directory.path.pop(); } } parser.process_potential_macro_variable(); parser } fn next_tok(&mut self) -> TokenAndSpan { let mut next = if self.desugar_doc_comments { self.token_cursor.next_desugared() } else { self.token_cursor.next() }; if next.sp == syntax_pos::DUMMY_SP { next.sp = self.prev_span; } next } /// Convert a token to a string using self's reader pub fn token_to_string(token: &token::Token) -> String { pprust::token_to_string(token) } /// Convert the current token to a string using self's reader pub fn this_token_to_string(&self) -> String { Parser::token_to_string(&self.token) } pub fn token_descr(&self) -> Option<&'static str> { Some(match &self.token { t if t.is_special_ident() => "reserved identifier", t if t.is_used_keyword() => "keyword", t if t.is_unused_keyword() => "reserved keyword", _ => return None, }) } pub fn this_token_descr(&self) -> String { if let Some(prefix) = self.token_descr() { format!("{} `{}`", prefix, self.this_token_to_string()) } else { format!("`{}`", self.this_token_to_string()) } } pub fn unexpected_last(&self, t: &token::Token) -> PResult<'a, T> { let token_str = Parser::token_to_string(t); Err(self.span_fatal(self.prev_span, &format!("unexpected token: `{}`", token_str))) } pub fn unexpected(&mut self) -> PResult<'a, T> { match self.expect_one_of(&[], &[]) { Err(e) => Err(e), Ok(_) => unreachable!(), } } /// Expect and consume the token t. Signal an error if /// the next token is not t. pub fn expect(&mut self, t: &token::Token) -> PResult<'a, ()> { if self.expected_tokens.is_empty() { if self.token == *t { self.bump(); Ok(()) } else { let token_str = Parser::token_to_string(t); let this_token_str = self.this_token_to_string(); Err(self.fatal(&format!("expected `{}`, found `{}`", token_str, this_token_str))) } } else { self.expect_one_of(unsafe { slice::from_raw_parts(t, 1) }, &[]) } } /// Expect next token to be edible or inedible token. If edible, /// then consume it; if inedible, then return without consuming /// anything. Signal a fatal error if next token is unexpected. pub fn expect_one_of(&mut self, edible: &[token::Token], inedible: &[token::Token]) -> PResult<'a, ()>{ fn tokens_to_string(tokens: &[TokenType]) -> String { let mut i = tokens.iter(); // This might be a sign we need a connect method on Iterator. let b = i.next() .map_or("".to_string(), |t| t.to_string()); i.enumerate().fold(b, |mut b, (i, a)| { if tokens.len() > 2 && i == tokens.len() - 2 { b.push_str(", or "); } else if tokens.len() == 2 && i == tokens.len() - 2 { b.push_str(" or "); } else { b.push_str(", "); } b.push_str(&a.to_string()); b }) } if edible.contains(&self.token) { self.bump(); Ok(()) } else if inedible.contains(&self.token) { // leave it in the input Ok(()) } else { let mut expected = edible.iter() .map(|x| TokenType::Token(x.clone())) .chain(inedible.iter().map(|x| TokenType::Token(x.clone()))) .chain(self.expected_tokens.iter().cloned()) .collect::>(); expected.sort_by(|a, b| a.to_string().cmp(&b.to_string())); expected.dedup(); let expect = tokens_to_string(&expected[..]); let actual = self.this_token_to_string(); let (msg_exp, (label_sp, label_exp)) = if expected.len() > 1 { let short_expect = if expected.len() > 6 { format!("{} possible tokens", expected.len()) } else { expect.clone() }; (format!("expected one of {}, found `{}`", expect, actual), (self.sess.codemap().next_point(self.prev_span), format!("expected one of {} here", short_expect))) } else if expected.is_empty() { (format!("unexpected token: `{}`", actual), (self.prev_span, "unexpected token after this".to_string())) } else { (format!("expected {}, found `{}`", expect, actual), (self.sess.codemap().next_point(self.prev_span), format!("expected {} here", expect))) }; let mut err = self.fatal(&msg_exp); let sp = if self.token == token::Token::Eof { // This is EOF, don't want to point at the following char, but rather the last token self.prev_span } else { label_sp }; let cm = self.sess.codemap(); match (cm.lookup_line(self.span.lo()), cm.lookup_line(sp.lo())) { (Ok(ref a), Ok(ref b)) if a.line == b.line => { // When the spans are in the same line, it means that the only content between // them is whitespace, point at the found token in that case: // // X | () => { syntax error }; // | ^^^^^ expected one of 8 possible tokens here // // instead of having: // // X | () => { syntax error }; // | -^^^^^ unexpected token // | | // | expected one of 8 possible tokens here err.span_label(self.span, label_exp); } _ => { err.span_label(sp, label_exp); err.span_label(self.span, "unexpected token"); } } Err(err) } } /// returns the span of expr, if it was not interpolated or the span of the interpolated token fn interpolated_or_expr_span(&self, expr: PResult<'a, P>) -> PResult<'a, (Span, P)> { expr.map(|e| { if self.prev_token_kind == PrevTokenKind::Interpolated { (self.prev_span, e) } else { (e.span, e) } }) } fn expected_ident_found(&self) -> DiagnosticBuilder<'a> { let mut err = self.struct_span_err(self.span, &format!("expected identifier, found {}", self.this_token_descr())); if let Some(token_descr) = self.token_descr() { err.span_label(self.span, format!("expected identifier, found {}", token_descr)); } else { err.span_label(self.span, "expected identifier"); } err } pub fn parse_ident(&mut self) -> PResult<'a, ast::Ident> { self.parse_ident_common(true) } fn parse_ident_common(&mut self, recover: bool) -> PResult<'a, ast::Ident> { match self.token { token::Ident(i) => { if self.token.is_reserved_ident() { let mut err = self.expected_ident_found(); if recover { err.emit(); } else { return Err(err); } } self.bump(); Ok(i) } _ => { Err(if self.prev_token_kind == PrevTokenKind::DocComment { self.span_fatal_err(self.prev_span, Error::UselessDocComment) } else { let mut err = self.expected_ident_found(); if self.token == token::Underscore { err.note("`_` is a wildcard pattern, not an identifier"); } err }) } } } /// Check if the next token is `tok`, and return `true` if so. /// /// This method will automatically add `tok` to `expected_tokens` if `tok` is not /// encountered. pub fn check(&mut self, tok: &token::Token) -> bool { let is_present = self.token == *tok; if !is_present { self.expected_tokens.push(TokenType::Token(tok.clone())); } is_present } /// Consume token 'tok' if it exists. Returns true if the given /// token was present, false otherwise. pub fn eat(&mut self, tok: &token::Token) -> bool { let is_present = self.check(tok); if is_present { self.bump() } is_present } pub fn check_keyword(&mut self, kw: keywords::Keyword) -> bool { self.expected_tokens.push(TokenType::Keyword(kw)); self.token.is_keyword(kw) } /// If the next token is the given keyword, eat it and return /// true. Otherwise, return false. pub fn eat_keyword(&mut self, kw: keywords::Keyword) -> bool { if self.check_keyword(kw) { self.bump(); true } else { false } } pub fn eat_keyword_noexpect(&mut self, kw: keywords::Keyword) -> bool { if self.token.is_keyword(kw) { self.bump(); true } else { false } } /// If the given word is not a keyword, signal an error. /// If the next token is not the given word, signal an error. /// Otherwise, eat it. pub fn expect_keyword(&mut self, kw: keywords::Keyword) -> PResult<'a, ()> { if !self.eat_keyword(kw) { self.unexpected() } else { Ok(()) } } fn check_ident(&mut self) -> bool { if self.token.is_ident() { true } else { self.expected_tokens.push(TokenType::Ident); false } } fn check_path(&mut self) -> bool { if self.token.is_path_start() { true } else { self.expected_tokens.push(TokenType::Path); false } } fn check_type(&mut self) -> bool { if self.token.can_begin_type() { true } else { self.expected_tokens.push(TokenType::Type); false } } /// Expect and consume an `&`. If `&&` is seen, replace it with a single /// `&` and continue. If an `&` is not seen, signal an error. fn expect_and(&mut self) -> PResult<'a, ()> { self.expected_tokens.push(TokenType::Token(token::BinOp(token::And))); match self.token { token::BinOp(token::And) => { self.bump(); Ok(()) } token::AndAnd => { let span = self.span.with_lo(self.span.lo() + BytePos(1)); Ok(self.bump_with(token::BinOp(token::And), span)) } _ => self.unexpected() } } /// Expect and consume an `|`. If `||` is seen, replace it with a single /// `|` and continue. If an `|` is not seen, signal an error. fn expect_or(&mut self) -> PResult<'a, ()> { self.expected_tokens.push(TokenType::Token(token::BinOp(token::Or))); match self.token { token::BinOp(token::Or) => { self.bump(); Ok(()) } token::OrOr => { let span = self.span.with_lo(self.span.lo() + BytePos(1)); Ok(self.bump_with(token::BinOp(token::Or), span)) } _ => self.unexpected() } } pub fn expect_no_suffix(&self, sp: Span, kind: &str, suffix: Option) { match suffix { None => {/* everything ok */} Some(suf) => { let text = suf.as_str(); if text.is_empty() { self.span_bug(sp, "found empty literal suffix in Some") } self.span_err(sp, &format!("{} with a suffix is invalid", kind)); } } } /// Attempt to consume a `<`. If `<<` is seen, replace it with a single /// `<` and continue. If a `<` is not seen, return false. /// /// This is meant to be used when parsing generics on a path to get the /// starting token. fn eat_lt(&mut self) -> bool { self.expected_tokens.push(TokenType::Token(token::Lt)); match self.token { token::Lt => { self.bump(); true } token::BinOp(token::Shl) => { let span = self.span.with_lo(self.span.lo() + BytePos(1)); self.bump_with(token::Lt, span); true } _ => false, } } fn expect_lt(&mut self) -> PResult<'a, ()> { if !self.eat_lt() { self.unexpected() } else { Ok(()) } } /// Expect and consume a GT. if a >> is seen, replace it /// with a single > and continue. If a GT is not seen, /// signal an error. pub fn expect_gt(&mut self) -> PResult<'a, ()> { self.expected_tokens.push(TokenType::Token(token::Gt)); match self.token { token::Gt => { self.bump(); Ok(()) } token::BinOp(token::Shr) => { let span = self.span.with_lo(self.span.lo() + BytePos(1)); Ok(self.bump_with(token::Gt, span)) } token::BinOpEq(token::Shr) => { let span = self.span.with_lo(self.span.lo() + BytePos(1)); Ok(self.bump_with(token::Ge, span)) } token::Ge => { let span = self.span.with_lo(self.span.lo() + BytePos(1)); Ok(self.bump_with(token::Eq, span)) } _ => self.unexpected() } } pub fn parse_seq_to_before_gt_or_return(&mut self, sep: Option, mut f: F) -> PResult<'a, (Vec, bool)> where F: FnMut(&mut Parser<'a>) -> PResult<'a, Option>, { let mut v = Vec::new(); // This loop works by alternating back and forth between parsing types // and commas. For example, given a string `A, B,>`, the parser would // first parse `A`, then a comma, then `B`, then a comma. After that it // would encounter a `>` and stop. This lets the parser handle trailing // commas in generic parameters, because it can stop either after // parsing a type or after parsing a comma. for i in 0.. { if self.check(&token::Gt) || self.token == token::BinOp(token::Shr) || self.token == token::Ge || self.token == token::BinOpEq(token::Shr) { break; } if i % 2 == 0 { match f(self)? { Some(result) => v.push(result), None => return Ok((v, true)) } } else { if let Some(t) = sep.as_ref() { self.expect(t)?; } } } return Ok((v, false)); } /// Parse a sequence bracketed by '<' and '>', stopping /// before the '>'. pub fn parse_seq_to_before_gt(&mut self, sep: Option, mut f: F) -> PResult<'a, Vec> where F: FnMut(&mut Parser<'a>) -> PResult<'a, T>, { let (result, returned) = self.parse_seq_to_before_gt_or_return(sep, |p| Ok(Some(f(p)?)))?; assert!(!returned); return Ok(result); } pub fn parse_seq_to_gt(&mut self, sep: Option, f: F) -> PResult<'a, Vec> where F: FnMut(&mut Parser<'a>) -> PResult<'a, T>, { let v = self.parse_seq_to_before_gt(sep, f)?; self.expect_gt()?; return Ok(v); } pub fn parse_seq_to_gt_or_return(&mut self, sep: Option, f: F) -> PResult<'a, (Vec, bool)> where F: FnMut(&mut Parser<'a>) -> PResult<'a, Option>, { let (v, returned) = self.parse_seq_to_before_gt_or_return(sep, f)?; if !returned { self.expect_gt()?; } return Ok((v, returned)); } /// Eat and discard tokens until one of `kets` is encountered. Respects token trees, /// passes through any errors encountered. Used for error recovery. pub fn eat_to_tokens(&mut self, kets: &[&token::Token]) { let handler = self.diagnostic(); if let Err(ref mut err) = self.parse_seq_to_before_tokens(kets, SeqSep::none(), TokenExpectType::Expect, |p| Ok(p.parse_token_tree())) { handler.cancel(err); } } /// Parse a sequence, including the closing delimiter. The function /// f must consume tokens until reaching the next separator or /// closing bracket. pub fn parse_seq_to_end(&mut self, ket: &token::Token, sep: SeqSep, f: F) -> PResult<'a, Vec> where F: FnMut(&mut Parser<'a>) -> PResult<'a, T>, { let val = self.parse_seq_to_before_end(ket, sep, f)?; self.bump(); Ok(val) } /// Parse a sequence, not including the closing delimiter. The function /// f must consume tokens until reaching the next separator or /// closing bracket. pub fn parse_seq_to_before_end(&mut self, ket: &token::Token, sep: SeqSep, f: F) -> PResult<'a, Vec> where F: FnMut(&mut Parser<'a>) -> PResult<'a, T> { self.parse_seq_to_before_tokens(&[ket], sep, TokenExpectType::Expect, f) } fn parse_seq_to_before_tokens(&mut self, kets: &[&token::Token], sep: SeqSep, expect: TokenExpectType, mut f: F) -> PResult<'a, Vec> where F: FnMut(&mut Parser<'a>) -> PResult<'a, T> { let mut first: bool = true; let mut v = vec![]; while !kets.contains(&&self.token) { match self.token { token::CloseDelim(..) | token::Eof => break, _ => {} }; if let Some(ref t) = sep.sep { if first { first = false; } else { if let Err(mut e) = self.expect(t) { // Attempt to keep parsing if it was a similar separator if let Some(ref tokens) = t.similar_tokens() { if tokens.contains(&self.token) { self.bump(); } } e.emit(); // Attempt to keep parsing if it was an omitted separator match f(self) { Ok(t) => { v.push(t); continue; }, Err(mut e) => { e.cancel(); break; } } } } } if sep.trailing_sep_allowed && kets.iter().any(|k| { match expect { TokenExpectType::Expect => self.check(k), TokenExpectType::NoExpect => self.token == **k, } }) { break; } let t = f(self)?; v.push(t); } Ok(v) } /// Parse a sequence, including the closing delimiter. The function /// f must consume tokens until reaching the next separator or /// closing bracket. pub fn parse_unspanned_seq(&mut self, bra: &token::Token, ket: &token::Token, sep: SeqSep, f: F) -> PResult<'a, Vec> where F: FnMut(&mut Parser<'a>) -> PResult<'a, T>, { self.expect(bra)?; let result = self.parse_seq_to_before_end(ket, sep, f)?; if self.token == *ket { self.bump(); } Ok(result) } // NB: Do not use this function unless you actually plan to place the // spanned list in the AST. pub fn parse_seq(&mut self, bra: &token::Token, ket: &token::Token, sep: SeqSep, f: F) -> PResult<'a, Spanned>> where F: FnMut(&mut Parser<'a>) -> PResult<'a, T>, { let lo = self.span; self.expect(bra)?; let result = self.parse_seq_to_before_end(ket, sep, f)?; let hi = self.span; self.bump(); Ok(respan(lo.to(hi), result)) } /// Advance the parser by one token pub fn bump(&mut self) { if self.prev_token_kind == PrevTokenKind::Eof { // Bumping after EOF is a bad sign, usually an infinite loop. self.bug("attempted to bump the parser past EOF (may be stuck in a loop)"); } self.prev_span = self.meta_var_span.take().unwrap_or(self.span); // Record last token kind for possible error recovery. self.prev_token_kind = match self.token { token::DocComment(..) => PrevTokenKind::DocComment, token::Comma => PrevTokenKind::Comma, token::BinOp(token::Plus) => PrevTokenKind::Plus, token::Interpolated(..) => PrevTokenKind::Interpolated, token::Eof => PrevTokenKind::Eof, token::Ident(..) => PrevTokenKind::Ident, _ => PrevTokenKind::Other, }; let next = self.next_tok(); self.span = next.sp; self.token = next.tok; self.expected_tokens.clear(); // check after each token self.process_potential_macro_variable(); } /// Advance the parser using provided token as a next one. Use this when /// consuming a part of a token. For example a single `<` from `<<`. pub fn bump_with(&mut self, next: token::Token, span: Span) { self.prev_span = self.span.with_hi(span.lo()); // It would be incorrect to record the kind of the current token, but // fortunately for tokens currently using `bump_with`, the // prev_token_kind will be of no use anyway. self.prev_token_kind = PrevTokenKind::Other; self.span = span; self.token = next; self.expected_tokens.clear(); } pub fn look_ahead(&self, dist: usize, f: F) -> R where F: FnOnce(&token::Token) -> R, { if dist == 0 { return f(&self.token) } f(&match self.token_cursor.frame.tree_cursor.look_ahead(dist - 1) { Some(tree) => match tree { TokenTree::Token(_, tok) => tok, TokenTree::Delimited(_, delimited) => token::OpenDelim(delimited.delim), }, None => token::CloseDelim(self.token_cursor.frame.delim), }) } fn look_ahead_span(&self, dist: usize) -> Span { if dist == 0 { return self.span } match self.token_cursor.frame.tree_cursor.look_ahead(dist - 1) { Some(TokenTree::Token(span, _)) | Some(TokenTree::Delimited(span, _)) => span, None => self.look_ahead_span(dist - 1), } } pub fn fatal(&self, m: &str) -> DiagnosticBuilder<'a> { self.sess.span_diagnostic.struct_span_fatal(self.span, m) } pub fn span_fatal>(&self, sp: S, m: &str) -> DiagnosticBuilder<'a> { self.sess.span_diagnostic.struct_span_fatal(sp, m) } pub fn span_fatal_err>(&self, sp: S, err: Error) -> DiagnosticBuilder<'a> { err.span_err(sp, self.diagnostic()) } pub fn span_fatal_help>(&self, sp: S, m: &str, help: &str) -> DiagnosticBuilder<'a> { let mut err = self.sess.span_diagnostic.struct_span_fatal(sp, m); err.help(help); err } pub fn bug(&self, m: &str) -> ! { self.sess.span_diagnostic.span_bug(self.span, m) } pub fn warn(&self, m: &str) { self.sess.span_diagnostic.span_warn(self.span, m) } pub fn span_warn>(&self, sp: S, m: &str) { self.sess.span_diagnostic.span_warn(sp, m) } pub fn span_err>(&self, sp: S, m: &str) { self.sess.span_diagnostic.span_err(sp, m) } pub fn struct_span_err>(&self, sp: S, m: &str) -> DiagnosticBuilder<'a> { self.sess.span_diagnostic.struct_span_err(sp, m) } pub fn span_err_help>(&self, sp: S, m: &str, h: &str) { let mut err = self.sess.span_diagnostic.mut_span_err(sp, m); err.help(h); err.emit(); } pub fn span_bug>(&self, sp: S, m: &str) -> ! { self.sess.span_diagnostic.span_bug(sp, m) } pub fn abort_if_errors(&self) { self.sess.span_diagnostic.abort_if_errors(); } fn cancel(&self, err: &mut DiagnosticBuilder) { self.sess.span_diagnostic.cancel(err) } pub fn diagnostic(&self) -> &'a errors::Handler { &self.sess.span_diagnostic } /// Is the current token one of the keywords that signals a bare function /// type? pub fn token_is_bare_fn_keyword(&mut self) -> bool { self.check_keyword(keywords::Fn) || self.check_keyword(keywords::Unsafe) || self.check_keyword(keywords::Extern) } fn eat_label(&mut self) -> Option