rust/src/libsyntax/parse/parser.rs

7859 lines
314 KiB
Rust
Raw Normal View History

// ignore-tidy-filelength
use crate::ast::{AngleBracketedArgs, ParenthesizedArgs, AttrStyle, BareFnTy};
2019-02-06 17:33:01 +00:00
use crate::ast::{GenericBound, TraitBoundModifier};
use crate::ast::Unsafety;
2019-06-23 09:32:16 +00:00
use crate::ast::{Mod, AnonConst, Arg, Arm, Attribute, BindingMode, TraitItemKind};
2019-02-06 17:33:01 +00:00
use crate::ast::Block;
use crate::ast::{BlockCheckMode, CaptureBy, Movability};
use crate::ast::{Constness, Crate};
use crate::ast::Defaultness;
use crate::ast::EnumDef;
use crate::ast::{Expr, ExprKind, RangeLimits};
use crate::ast::{Field, FnDecl, FnHeader};
use crate::ast::{ForeignItem, ForeignItemKind, FunctionRetTy};
use crate::ast::{GenericParam, GenericParamKind};
use crate::ast::GenericArg;
use crate::ast::{Ident, ImplItem, IsAsync, IsAuto, Item, ItemKind};
use crate::ast::{Label, Lifetime};
use crate::ast::Local;
2019-02-06 17:33:01 +00:00
use crate::ast::MacStmtStyle;
use crate::ast::{Mac, Mac_, MacDelimiter};
use crate::ast::{MutTy, Mutability};
use crate::ast::{Pat, PatKind, PathSegment};
use crate::ast::{PolyTraitRef, QSelf};
use crate::ast::{Stmt, StmtKind};
use crate::ast::{VariantData, StructField};
use crate::ast::StrStyle;
use crate::ast::SelfKind;
use crate::ast::{TraitItem, TraitRef, TraitObjectSyntax};
use crate::ast::{Ty, TyKind, AssocTyConstraint, AssocTyConstraintKind, GenericBounds};
2019-02-06 17:33:01 +00:00
use crate::ast::{Visibility, VisibilityKind, WhereClause, CrateSugar};
use crate::ast::{UseTree, UseTreeKind};
use crate::ast::{BinOpKind, UnOp};
use crate::ast::{RangeEnd, RangeSyntax};
use crate::{ast, attr};
use crate::ext::base::DummyResult;
use crate::ext::hygiene::SyntaxContext;
2019-02-06 17:33:01 +00:00
use crate::source_map::{self, SourceMap, Spanned, respan};
use crate::parse::{SeqSep, classify, literal, token};
use crate::parse::lexer::UnmatchedBrace;
2019-02-06 17:33:01 +00:00
use crate::parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
2019-06-05 11:17:56 +00:00
use crate::parse::token::{Token, TokenKind, DelimToken};
2019-02-06 17:33:01 +00:00
use crate::parse::{new_sub_parser_from_file, ParseSess, Directory, DirectoryOwnership};
use crate::util::parser::{AssocOp, Fixity, prec_let_scrutinee_needs_par};
2019-02-06 17:33:01 +00:00
use crate::print::pprust;
use crate::ptr::P;
use crate::parse::PResult;
use crate::ThinVec;
use crate::tokenstream::{self, DelimSpan, TokenTree, TokenStream, TreeAndJoint};
2019-05-11 14:41:37 +00:00
use crate::symbol::{kw, sym, Symbol};
use crate::parse::diagnostics::{Error, dummy_arg};
2019-02-06 17:33:01 +00:00
use errors::{Applicability, DiagnosticBuilder, DiagnosticId, FatalError};
use rustc_target::spec::abi::{self, Abi};
use syntax_pos::{Span, BytePos, DUMMY_SP, FileName};
use log::debug;
use std::borrow::Cow;
2017-04-24 14:26:04 +00:00
use std::cmp;
2017-01-03 11:19:13 +00:00
use std::mem;
2017-04-24 14:26:04 +00:00
use std::path::{self, Path, PathBuf};
use std::slice;
2018-07-03 17:38:14 +00:00
#[derive(Debug)]
2019-07-31 23:41:54 +00:00
/// Whether the type alias or associated type is a concrete type or an opaque type
2018-07-03 17:38:14 +00:00
pub enum AliasKind {
/// Just a new name for the same type
Weak(P<Ty>),
/// Only trait impls of the type will be usable, not the actual type itself
2019-07-31 23:41:54 +00:00
OpaqueTy(GenericBounds),
2018-07-03 17:38:14 +00:00
}
2019-02-06 17:33:01 +00:00
bitflags::bitflags! {
struct Restrictions: u8 {
const STMT_EXPR = 1 << 0;
const NO_STRUCT_LITERAL = 1 << 1;
}
}
type ItemInfo = (Ident, ItemKind, Option<Vec<Attribute>>);
2019-02-08 13:53:55 +00:00
/// Specifies how to parse a path.
2015-03-30 13:38:59 +00:00
#[derive(Copy, Clone, PartialEq)]
pub enum PathStyle {
2017-07-19 23:39:34 +00:00
/// In some contexts, notably in expressions, paths with generic arguments are ambiguous
/// with something else. For example, in expressions `segment < ....` can be interpreted
/// as a comparison and `segment ( ....` can be interpreted as a function call.
/// In all such contexts the non-path interpretation is preferred by default for practical
/// reasons, but the path interpretation can be forced by the disambiguator `::`, e.g.
/// `x<y>` - comparisons, `x::<y>` - unambiguously a path.
Expr,
2017-07-19 23:39:34 +00:00
/// In other contexts, notably in types, no ambiguity exists and paths can be written
/// without the disambiguator, e.g., `x<y>` - unambiguously a path.
2017-07-29 01:47:12 +00:00
/// Paths with disambiguators are still accepted, `x::<Y>` - unambiguously a path too.
2017-07-19 23:39:34 +00:00
Type,
/// A path with generic arguments disallowed, e.g., `foo::bar::Baz`, used in imports,
2017-07-19 23:39:34 +00:00
/// visibilities or attributes.
/// Technically, this variant is unnecessary and e.g., `Expr` can be used instead
2017-07-19 23:39:34 +00:00
/// (paths in "mod" contexts have to be checked later for absence of generic arguments
/// anyway, due to macros), but it is used to avoid weird suggestions about expected
/// tokens when something goes wrong.
Mod,
}
#[derive(Clone, Copy, PartialEq, Debug)]
crate enum SemiColonMode {
2016-02-10 03:11:27 +00:00
Break,
Ignore,
Comma,
2016-02-10 03:11:27 +00:00
}
#[derive(Clone, Copy, PartialEq, Debug)]
crate enum BlockMode {
Break,
Ignore,
}
2019-02-08 13:53:55 +00:00
/// Possibly accepts an `token::Interpolated` expression (a pre-parsed expression
2014-10-27 08:22:52 +00:00
/// dropped into the token stream, which happens while parsing the result of
/// macro expansion). Placement of these is not as complex as I feared it would
/// be. The important thing is to make sure that lookahead doesn't balk at
/// `token::Interpolated` tokens.
macro_rules! maybe_whole_expr {
($p:expr) => {
2019-06-04 22:17:07 +00:00
if let token::Interpolated(nt) = &$p.token.kind {
match &**nt {
token::NtExpr(e) | token::NtLiteral(e) => {
let e = e.clone();
$p.bump();
return Ok(e);
2013-07-05 10:15:21 +00:00
}
token::NtPath(path) => {
let path = path.clone();
$p.bump();
return Ok($p.mk_expr(
$p.token.span, ExprKind::Path(None, path), ThinVec::new()
));
}
token::NtBlock(block) => {
let block = block.clone();
$p.bump();
return Ok($p.mk_expr(
$p.token.span, ExprKind::Block(block, None), ThinVec::new()
));
2013-07-05 10:15:21 +00:00
}
2019-07-31 19:25:11 +00:00
// N.B: `NtIdent(ident)` is normalized to `Ident` in `fn bump`.
_ => {},
2013-07-05 10:15:21 +00:00
};
}
}
}
2014-06-09 20:12:30 +00:00
/// As maybe_whole_expr, but for things other than expressions
macro_rules! maybe_whole {
($p:expr, $constructor:ident, |$x:ident| $e:expr) => {
2019-06-04 22:17:07 +00:00
if let token::Interpolated(nt) = &$p.token.kind {
if let token::$constructor(x) = &**nt {
let $x = x.clone();
$p.bump();
return Ok($e);
2013-03-02 21:02:27 +00:00
}
2013-07-02 19:47:32 +00:00
}
};
}
/// If the next tokens are ill-formed `$ty::` recover them as `<$ty>::`.
macro_rules! maybe_recover_from_interpolated_ty_qpath {
($self: expr, $allow_qpath_recovery: expr) => {
if $allow_qpath_recovery && $self.look_ahead(1, |t| t == &token::ModSep) {
2019-06-04 22:17:07 +00:00
if let token::Interpolated(nt) = &$self.token.kind {
if let token::NtTy(ty) = &**nt {
let ty = ty.clone();
$self.bump();
return $self.maybe_recover_from_bad_qpath_stage_2($self.prev_span, ty);
}
}
}
}
}
fn maybe_append(mut lhs: Vec<Attribute>, mut rhs: Option<Vec<Attribute>>) -> Vec<Attribute> {
if let Some(ref mut rhs) = rhs {
lhs.append(rhs);
}
lhs
}
#[derive(Debug, Clone, Copy, PartialEq)]
enum PrevTokenKind {
DocComment,
Comma,
Plus,
Interpolated,
Eof,
Ident,
BitOr,
Other,
}
// NOTE: `Ident`s are handled by `common.rs`.
#[derive(Clone)]
2014-03-09 14:54:34 +00:00
pub struct Parser<'a> {
pub sess: &'a ParseSess,
2019-06-05 19:04:52 +00:00
/// The current normalized token.
/// "Normalized" means that some interpolated tokens
/// (`$i: ident` and `$l: lifetime` meta-variables) are replaced
/// with non-interpolated identifier and lifetime tokens they refer to.
/// Perhaps the normalized / non-normalized setup can be simplified somehow.
2019-06-04 22:17:07 +00:00
pub token: Token,
2019-06-05 19:04:52 +00:00
/// Span of the current non-normalized token.
meta_var_span: Option<Span>,
2019-06-05 19:04:52 +00:00
/// Span of the previous non-normalized token.
pub prev_span: Span,
2019-06-05 19:04:52 +00:00
/// Kind of the previous normalized token (in simplified form).
prev_token_kind: PrevTokenKind,
restrictions: Restrictions,
/// Used to determine the path to externally loaded source files.
crate directory: Directory<'a>,
/// `true` to parse sub-modules in other files.
pub recurse_into_file_modules: bool,
/// Name of the root module this parser originated from. If `None`, then the
/// name is not known. This does not change while the parser is descending
/// into modules, and sub-parsers have new values for this name.
pub root_module_name: Option<String>,
crate expected_tokens: Vec<TokenType>,
crate token_cursor: TokenCursor,
desugar_doc_comments: bool,
/// `true` we should configure out of line modules as we parse.
pub cfg_mods: bool,
/// This field is used to keep track of how many left angle brackets we have seen. This is
/// required in order to detect extra leading left angle brackets (`<` characters) and error
/// appropriately.
///
/// See the comments in the `parse_path_segment` function for more details.
crate unmatched_angle_bracket_count: u32,
crate max_angle_bracket_count: u32,
/// List of all unclosed delimiters found by the lexer. If an entry is used for error recovery
/// it gets removed from here. Every entry left at the end gets emitted as an independent
/// error.
crate unclosed_delims: Vec<UnmatchedBrace>,
crate last_unexpected_token_span: Option<Span>,
crate last_type_ascription: Option<(Span, bool /* likely path typo */)>,
2019-05-22 05:17:53 +00:00
/// If present, this `Parser` is not parsing Rust code but rather a macro call.
crate subparser_name: Option<&'static str>,
}
impl<'a> Drop for Parser<'a> {
fn drop(&mut self) {
2019-03-04 20:59:43 +00:00
let diag = self.diagnostic();
emit_unclosed_delims(&mut self.unclosed_delims, diag);
}
}
#[derive(Clone)]
crate struct TokenCursor {
crate frame: TokenCursorFrame,
crate stack: Vec<TokenCursorFrame>,
}
#[derive(Clone)]
crate struct TokenCursorFrame {
crate delim: token::DelimToken,
crate span: DelimSpan,
crate open_delim: bool,
crate tree_cursor: tokenstream::Cursor,
crate close_delim: bool,
crate last_token: LastToken,
}
/// This is used in `TokenCursorFrame` above to track tokens that are consumed
/// by the parser, and then that's transitively used to record the tokens that
/// each parse AST item is created with.
///
/// Right now this has two states, either collecting tokens or not collecting
/// tokens. If we're collecting tokens we just save everything off into a local
/// `Vec`. This should eventually though likely save tokens from the original
/// token stream and just use slicing of token streams to avoid creation of a
/// whole new vector.
///
/// The second state is where we're passively not recording tokens, but the last
/// token is still tracked for when we want to start recording tokens. This
/// "last token" means that when we start recording tokens we'll want to ensure
/// that this, the first token, is included in the output.
///
/// You can find some more example usage of this in the `collect_tokens` method
/// on the parser.
#[derive(Clone)]
crate enum LastToken {
Collecting(Vec<TreeAndJoint>),
Was(Option<TreeAndJoint>),
}
impl TokenCursorFrame {
2019-06-25 21:22:45 +00:00
fn new(span: DelimSpan, delim: DelimToken, tts: &TokenStream) -> Self {
TokenCursorFrame {
2019-06-25 21:22:45 +00:00
delim,
span,
open_delim: delim == token::NoDelim,
tree_cursor: tts.clone().into_trees(),
close_delim: delim == token::NoDelim,
last_token: LastToken::Was(None),
}
}
}
impl TokenCursor {
fn next(&mut self) -> Token {
loop {
let tree = if !self.frame.open_delim {
self.frame.open_delim = true;
TokenTree::open_tt(self.frame.span.open, self.frame.delim)
} else if let Some(tree) = self.frame.tree_cursor.next() {
tree
} else if !self.frame.close_delim {
self.frame.close_delim = true;
TokenTree::close_tt(self.frame.span.close, self.frame.delim)
} else if let Some(frame) = self.stack.pop() {
self.frame = frame;
continue
} else {
return Token::new(token::Eof, DUMMY_SP);
};
match self.frame.last_token {
LastToken::Collecting(ref mut v) => v.push(tree.clone().into()),
LastToken::Was(ref mut t) => *t = Some(tree.clone().into()),
}
match tree {
TokenTree::Token(token) => return token,
TokenTree::Delimited(sp, delim, tts) => {
let frame = TokenCursorFrame::new(sp, delim, &tts);
self.stack.push(mem::replace(&mut self.frame, frame));
}
}
}
}
fn next_desugared(&mut self) -> Token {
2019-06-05 11:17:56 +00:00
let (name, sp) = match self.next() {
Token { kind: token::DocComment(name), span } => (name, span),
tok => return tok,
};
let stripped = strip_doc_comment_decoration(&name.as_str());
// Searches for the occurrences of `"#*` and returns the minimum number of `#`s
// required to wrap the text.
let mut num_of_hashes = 0;
let mut count = 0;
for ch in stripped.chars() {
count = match ch {
'"' => 1,
'#' if count > 0 => count + 1,
_ => 0,
};
num_of_hashes = cmp::max(num_of_hashes, count);
}
let delim_span = DelimSpan::from_single(sp);
let body = TokenTree::Delimited(
delim_span,
token::Bracket,
[
TokenTree::token(token::Ident(sym::doc, false), sp),
TokenTree::token(token::Eq, sp),
2019-06-05 11:17:56 +00:00
TokenTree::token(TokenKind::lit(
token::StrRaw(num_of_hashes), Symbol::intern(&stripped), None
), sp),
]
.iter().cloned().collect::<TokenStream>().into(),
);
self.stack.push(mem::replace(&mut self.frame, TokenCursorFrame::new(
delim_span,
token::NoDelim,
&if doc_comment_style(&name.as_str()) == AttrStyle::Inner {
[TokenTree::token(token::Pound, sp), TokenTree::token(token::Not, sp), body]
2017-02-21 05:05:59 +00:00
.iter().cloned().collect::<TokenStream>().into()
} else {
[TokenTree::token(token::Pound, sp), body]
2017-02-21 05:05:59 +00:00
.iter().cloned().collect::<TokenStream>().into()
},
)));
self.next()
}
}
#[derive(Clone, PartialEq)]
crate enum TokenType {
2019-06-05 11:17:56 +00:00
Token(TokenKind),
2019-05-11 14:41:37 +00:00
Keyword(Symbol),
Operator,
Lifetime,
Ident,
Path,
Type,
Const,
}
impl TokenType {
crate fn to_string(&self) -> String {
match *self {
TokenType::Token(ref t) => format!("`{}`", pprust::token_kind_to_string(t)),
2019-05-11 14:41:37 +00:00
TokenType::Keyword(kw) => format!("`{}`", kw),
TokenType::Operator => "an operator".to_string(),
TokenType::Lifetime => "lifetime".to_string(),
TokenType::Ident => "identifier".to_string(),
TokenType::Path => "path".to_string(),
TokenType::Type => "type".to_string(),
TokenType::Const => "const".to_string(),
}
}
}
2019-02-08 13:53:55 +00:00
/// Returns `true` if `IDENT t` can start a type -- `IDENT::a::b`, `IDENT<u8, u8>`,
2018-02-23 20:39:51 +00:00
/// `IDENT<<u8 as Trait>::AssocTy>`.
///
/// Types can also be of the form `IDENT(u8, u8) -> u8`, however this assumes
2019-02-08 13:53:55 +00:00
/// that `IDENT` is not the ident of a fn trait.
fn can_continue_type_after_non_fn_ident(t: &Token) -> bool {
2017-10-10 14:33:19 +00:00
t == &token::ModSep || t == &token::Lt ||
2018-02-23 20:39:51 +00:00
t == &token::BinOp(token::Shl)
2017-10-10 14:33:19 +00:00
}
/// Information about the path to a module.
pub struct ModulePath {
name: String,
path_exists: bool,
2017-02-12 14:18:41 +00:00
pub result: Result<ModulePathSuccess, Error>,
}
pub struct ModulePathSuccess {
pub path: PathBuf,
pub directory_ownership: DirectoryOwnership,
2016-11-14 09:31:03 +00:00
warn: bool,
}
#[derive(Debug)]
enum LhsExpr {
NotYetParsed,
AttributesParsed(ThinVec<Attribute>),
AlreadyParsed(P<Expr>),
}
impl From<Option<ThinVec<Attribute>>> for LhsExpr {
fn from(o: Option<ThinVec<Attribute>>) -> Self {
if let Some(attrs) = o {
LhsExpr::AttributesParsed(attrs)
} else {
LhsExpr::NotYetParsed
}
}
}
impl From<P<Expr>> for LhsExpr {
fn from(expr: P<Expr>) -> Self {
LhsExpr::AlreadyParsed(expr)
}
}
#[derive(Copy, Clone, Debug)]
crate enum TokenExpectType {
Expect,
NoExpect,
}
2014-03-09 14:54:34 +00:00
impl<'a> Parser<'a> {
pub fn new(
sess: &'a ParseSess,
tokens: TokenStream,
directory: Option<Directory<'a>>,
recurse_into_file_modules: bool,
desugar_doc_comments: bool,
2019-05-22 05:17:53 +00:00
subparser_name: Option<&'static str>,
) -> Self {
let mut parser = Parser {
sess,
token: Token::dummy(),
prev_span: DUMMY_SP,
2017-03-29 07:17:18 +00:00
meta_var_span: None,
prev_token_kind: PrevTokenKind::Other,
restrictions: Restrictions::empty(),
recurse_into_file_modules,
2017-11-28 02:14:24 +00:00
directory: Directory {
path: Cow::from(PathBuf::new()),
2017-11-28 02:14:24 +00:00
ownership: DirectoryOwnership::Owned { relative: None }
},
root_module_name: None,
expected_tokens: Vec::new(),
token_cursor: TokenCursor {
frame: TokenCursorFrame::new(
DelimSpan::dummy(),
token::NoDelim,
&tokens.into(),
),
stack: Vec::new(),
},
desugar_doc_comments,
cfg_mods: true,
unmatched_angle_bracket_count: 0,
max_angle_bracket_count: 0,
unclosed_delims: Vec::new(),
last_unexpected_token_span: None,
last_type_ascription: None,
2019-05-22 05:17:53 +00:00
subparser_name,
};
2019-06-04 22:17:07 +00:00
parser.token = parser.next_tok();
if let Some(directory) = directory {
parser.directory = directory;
} else if !parser.token.span.is_dummy() {
if let FileName::Real(mut path) =
sess.source_map().span_to_unmapped_path(parser.token.span) {
path.pop();
parser.directory.path = Cow::from(path);
}
}
2017-03-29 07:17:18 +00:00
parser.process_potential_macro_variable();
parser
}
fn next_tok(&mut self) -> Token {
let mut next = if self.desugar_doc_comments {
self.token_cursor.next_desugared()
} else {
self.token_cursor.next()
2017-02-21 12:04:45 +00:00
};
if next.span.is_dummy() {
// Tweak the location for better diagnostics, but keep syntactic context intact.
next.span = self.prev_span.with_ctxt(next.span.ctxt());
}
2017-02-21 12:04:45 +00:00
next
}
2014-06-09 20:12:30 +00:00
2019-02-08 13:53:55 +00:00
/// Converts the current token to a string using `self`'s reader.
2015-01-21 11:44:49 +00:00
pub fn this_token_to_string(&self) -> String {
pprust::token_to_string(&self.token)
}
crate fn token_descr(&self) -> Option<&'static str> {
2019-06-04 22:17:07 +00:00
Some(match &self.token.kind {
_ if self.token.is_special_ident() => "reserved identifier",
_ if self.token.is_used_keyword() => "keyword",
_ if self.token.is_unused_keyword() => "reserved keyword",
token::DocComment(..) => "doc comment",
_ => return None,
})
}
crate fn this_token_descr(&self) -> String {
if let Some(prefix) = self.token_descr() {
format!("{} `{}`", prefix, self.this_token_to_string())
} else {
format!("`{}`", self.this_token_to_string())
}
}
crate fn unexpected<T>(&mut self) -> PResult<'a, T> {
match self.expect_one_of(&[], &[]) {
Err(e) => Err(e),
Ok(_) => unreachable!(),
}
}
2019-02-08 13:53:55 +00:00
/// Expects and consumes the token `t`. Signals an error if the next token is not `t`.
2019-06-05 11:17:56 +00:00
pub fn expect(&mut self, t: &TokenKind) -> PResult<'a, bool /* recovered */> {
if self.expected_tokens.is_empty() {
if self.token == *t {
self.bump();
Ok(false)
} else {
2019-05-22 05:17:53 +00:00
self.unexpected_try_recover(t)
}
} else {
self.expect_one_of(slice::from_ref(t), &[])
}
}
2014-06-09 20:12:30 +00:00
/// Expect next token to be edible or inedible token. If edible,
/// then consume it; if inedible, then return without consuming
/// anything. Signal a fatal error if next token is unexpected.
pub fn expect_one_of(
&mut self,
2019-06-05 11:17:56 +00:00
edible: &[TokenKind],
inedible: &[TokenKind],
) -> PResult<'a, bool /* recovered */> {
if edible.contains(&self.token.kind) {
self.bump();
Ok(false)
} else if inedible.contains(&self.token.kind) {
// leave it in the input
Ok(false)
} else if self.last_unexpected_token_span == Some(self.token.span) {
FatalError.raise();
} else {
self.expected_one_of_not_found(edible, inedible)
}
}
2019-02-08 13:53:55 +00:00
/// Returns the span of expr, if it was not interpolated or the span of the interpolated token.
fn interpolated_or_expr_span(
&self,
expr: PResult<'a, P<Expr>>,
) -> PResult<'a, (Span, P<Expr>)> {
expr.map(|e| {
if self.prev_token_kind == PrevTokenKind::Interpolated {
(self.prev_span, e)
} else {
(e.span, e)
}
})
}
2018-06-11 14:04:11 +00:00
pub fn parse_ident(&mut self) -> PResult<'a, ast::Ident> {
self.parse_ident_common(true)
}
fn parse_ident_common(&mut self, recover: bool) -> PResult<'a, ast::Ident> {
2019-06-04 22:17:07 +00:00
match self.token.kind {
token::Ident(name, _) => {
if self.token.is_reserved_ident() {
2018-01-23 03:03:51 +00:00
let mut err = self.expected_ident_found();
if recover {
err.emit();
} else {
return Err(err);
}
}
let span = self.token.span;
self.bump();
Ok(Ident::new(name, span))
}
_ => {
Err(if self.prev_token_kind == PrevTokenKind::DocComment {
2019-06-03 19:06:49 +00:00
self.span_fatal_err(self.prev_span, Error::UselessDocComment)
} else {
self.expected_ident_found()
})
}
}
}
2019-02-08 13:53:55 +00:00
/// Checks if the next token is `tok`, and returns `true` if so.
///
/// This method will automatically add `tok` to `expected_tokens` if `tok` is not
/// encountered.
2019-06-05 11:17:56 +00:00
crate fn check(&mut self, tok: &TokenKind) -> bool {
let is_present = self.token == *tok;
if !is_present { self.expected_tokens.push(TokenType::Token(tok.clone())); }
is_present
}
2019-02-08 13:53:55 +00:00
/// Consumes a token 'tok' if it exists. Returns whether the given token was present.
2019-06-05 11:17:56 +00:00
pub fn eat(&mut self, tok: &TokenKind) -> bool {
let is_present = self.check(tok);
if is_present { self.bump() }
is_present
}
2019-05-11 14:41:37 +00:00
fn check_keyword(&mut self, kw: Symbol) -> bool {
self.expected_tokens.push(TokenType::Keyword(kw));
self.token.is_keyword(kw)
}
2019-02-08 13:53:55 +00:00
/// If the next token is the given keyword, eats it and returns
/// `true`. Otherwise, returns `false`.
2019-05-11 14:41:37 +00:00
pub fn eat_keyword(&mut self, kw: Symbol) -> bool {
if self.check_keyword(kw) {
self.bump();
true
} else {
false
}
}
2019-05-11 14:41:37 +00:00
fn eat_keyword_noexpect(&mut self, kw: Symbol) -> bool {
if self.token.is_keyword(kw) {
self.bump();
true
2014-08-28 04:34:03 +00:00
} else {
false
2014-08-28 04:34:03 +00:00
}
}
2019-02-08 13:53:55 +00:00
/// If the given word is not a keyword, signals an error.
/// If the next token is not the given word, signals an error.
/// Otherwise, eats it.
2019-05-11 14:41:37 +00:00
fn expect_keyword(&mut self, kw: Symbol) -> PResult<'a, ()> {
if !self.eat_keyword(kw) {
self.unexpected()
} else {
Ok(())
}
}
crate fn check_ident(&mut self) -> bool {
if self.token.is_ident() {
true
} else {
self.expected_tokens.push(TokenType::Ident);
false
}
}
fn check_path(&mut self) -> bool {
if self.token.is_path_start() {
true
} else {
self.expected_tokens.push(TokenType::Path);
false
}
}
fn check_type(&mut self) -> bool {
if self.token.can_begin_type() {
true
} else {
self.expected_tokens.push(TokenType::Type);
false
}
}
fn check_const_arg(&mut self) -> bool {
if self.token.can_begin_const_arg() {
true
} else {
self.expected_tokens.push(TokenType::Const);
false
}
}
2019-02-08 13:53:55 +00:00
/// Expects and consumes a `+`. if `+=` is seen, replaces it with a `=`
/// and continues. If a `+` is not seen, returns `false`.
///
2019-02-08 13:53:55 +00:00
/// This is used when token-splitting `+=` into `+`.
/// See issue #47856 for an example of when this may occur.
fn eat_plus(&mut self) -> bool {
self.expected_tokens.push(TokenType::Token(token::BinOp(token::Plus)));
2019-06-04 22:17:07 +00:00
match self.token.kind {
token::BinOp(token::Plus) => {
self.bump();
true
}
token::BinOpEq(token::Plus) => {
let span = self.token.span.with_lo(self.token.span.lo() + BytePos(1));
self.bump_with(token::Eq, span);
true
}
_ => false,
}
}
2018-05-25 21:36:23 +00:00
/// Checks to see if the next token is either `+` or `+=`.
2019-02-08 13:53:55 +00:00
/// Otherwise returns `false`.
fn check_plus(&mut self) -> bool {
if self.token.is_like_plus() {
true
}
else {
2018-06-01 13:52:51 +00:00
self.expected_tokens.push(TokenType::Token(token::BinOp(token::Plus)));
false
}
}
2019-02-08 13:53:55 +00:00
/// Expects and consumes an `&`. If `&&` is seen, replaces it with a single
/// `&` and continues. If an `&` is not seen, signals an error.
2015-12-20 21:00:43 +00:00
fn expect_and(&mut self) -> PResult<'a, ()> {
self.expected_tokens.push(TokenType::Token(token::BinOp(token::And)));
2019-06-04 22:17:07 +00:00
match self.token.kind {
token::BinOp(token::And) => {
self.bump();
Ok(())
}
2014-10-27 08:22:52 +00:00
token::AndAnd => {
let span = self.token.span.with_lo(self.token.span.lo() + BytePos(1));
2017-07-31 20:04:34 +00:00
Ok(self.bump_with(token::BinOp(token::And), span))
}
_ => self.unexpected()
}
}
2019-02-08 13:53:55 +00:00
/// Expects and consumes an `|`. If `||` is seen, replaces it with a single
/// `|` and continues. If an `|` is not seen, signals an error.
fn expect_or(&mut self) -> PResult<'a, ()> {
self.expected_tokens.push(TokenType::Token(token::BinOp(token::Or)));
2019-06-04 22:17:07 +00:00
match self.token.kind {
token::BinOp(token::Or) => {
self.bump();
Ok(())
}
token::OrOr => {
let span = self.token.span.with_lo(self.token.span.lo() + BytePos(1));
Ok(self.bump_with(token::BinOp(token::Or), span))
}
_ => self.unexpected()
}
}
fn expect_no_suffix(&self, sp: Span, kind: &str, suffix: Option<ast::Name>) {
literal::expect_no_suffix(&self.sess.span_diagnostic, sp, kind, suffix)
}
2019-02-08 13:53:55 +00:00
/// Attempts to consume a `<`. If `<<` is seen, replaces it with a single
/// `<` and continue. If `<-` is seen, replaces it with a single `<`
/// and continue. If a `<` is not seen, returns false.
2014-06-09 20:12:30 +00:00
///
/// This is meant to be used when parsing generics on a path to get the
/// starting token.
fn eat_lt(&mut self) -> bool {
self.expected_tokens.push(TokenType::Token(token::Lt));
2019-06-04 22:17:07 +00:00
let ate = match self.token.kind {
token::Lt => {
self.bump();
true
}
2014-10-27 08:22:52 +00:00
token::BinOp(token::Shl) => {
let span = self.token.span.with_lo(self.token.span.lo() + BytePos(1));
2017-07-31 20:04:34 +00:00
self.bump_with(token::Lt, span);
true
}
2019-02-07 09:10:11 +00:00
token::LArrow => {
let span = self.token.span.with_lo(self.token.span.lo() + BytePos(1));
2019-02-07 09:10:11 +00:00
self.bump_with(token::BinOp(token::Minus), span);
true
}
_ => false,
};
if ate {
// See doc comment for `unmatched_angle_bracket_count`.
self.unmatched_angle_bracket_count += 1;
self.max_angle_bracket_count += 1;
debug!("eat_lt: (increment) count={:?}", self.unmatched_angle_bracket_count);
}
ate
}
2015-12-20 21:00:43 +00:00
fn expect_lt(&mut self) -> PResult<'a, ()> {
if !self.eat_lt() {
self.unexpected()
} else {
Ok(())
}
}
2019-02-08 13:53:55 +00:00
/// Expects and consumes a single `>` token. if a `>>` is seen, replaces it
/// with a single `>` and continues. If a `>` is not seen, signals an error.
fn expect_gt(&mut self) -> PResult<'a, ()> {
self.expected_tokens.push(TokenType::Token(token::Gt));
2019-06-04 22:17:07 +00:00
let ate = match self.token.kind {
token::Gt => {
self.bump();
Some(())
}
2014-10-27 08:22:52 +00:00
token::BinOp(token::Shr) => {
let span = self.token.span.with_lo(self.token.span.lo() + BytePos(1));
Some(self.bump_with(token::Gt, span))
2013-12-30 23:17:53 +00:00
}
2014-10-27 08:22:52 +00:00
token::BinOpEq(token::Shr) => {
let span = self.token.span.with_lo(self.token.span.lo() + BytePos(1));
Some(self.bump_with(token::Ge, span))
}
2014-10-27 08:22:52 +00:00
token::Ge => {
let span = self.token.span.with_lo(self.token.span.lo() + BytePos(1));
Some(self.bump_with(token::Eq, span))
}
_ => None,
};
match ate {
Some(_) => {
// See doc comment for `unmatched_angle_bracket_count`.
if self.unmatched_angle_bracket_count > 0 {
self.unmatched_angle_bracket_count -= 1;
debug!("expect_gt: (decrement) count={:?}", self.unmatched_angle_bracket_count);
}
Ok(())
},
None => self.unexpected(),
}
}
2019-02-08 13:53:55 +00:00
/// Parses a sequence, including the closing delimiter. The function
/// `f` must consume tokens until reaching the next separator or
2014-06-09 20:12:30 +00:00
/// closing bracket.
pub fn parse_seq_to_end<T>(
&mut self,
ket: &TokenKind,
sep: SeqSep,
f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
) -> PResult<'a, Vec<T>> {
let (val, _, recovered) = self.parse_seq_to_before_end(ket, sep, f)?;
if !recovered {
self.bump();
}
Ok(val)
}
2019-02-08 13:53:55 +00:00
/// Parses a sequence, not including the closing delimiter. The function
/// `f` must consume tokens until reaching the next separator or
2014-06-09 20:12:30 +00:00
/// closing bracket.
pub fn parse_seq_to_before_end<T>(
&mut self,
2019-06-05 11:17:56 +00:00
ket: &TokenKind,
sep: SeqSep,
f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
) -> PResult<'a, (Vec<T>, bool, bool)> {
self.parse_seq_to_before_tokens(&[ket], sep, TokenExpectType::Expect, f)
}
fn expect_any_with_type(&mut self, kets: &[&TokenKind], expect: TokenExpectType) -> bool {
kets.iter().any(|k| {
match expect {
TokenExpectType::Expect => self.check(k),
TokenExpectType::NoExpect => self.token == **k,
}
})
}
crate fn parse_seq_to_before_tokens<T>(
&mut self,
2019-06-05 11:17:56 +00:00
kets: &[&TokenKind],
sep: SeqSep,
expect: TokenExpectType,
mut f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
) -> PResult<'a, (Vec<T>, bool /* trailing */, bool /* recovered */)> {
let mut first = true;
let mut recovered = false;
let mut trailing = false;
let mut v = vec![];
while !self.expect_any_with_type(kets, expect) {
if let token::CloseDelim(..) | token::Eof = self.token.kind {
break
}
if let Some(ref t) = sep.sep {
if first {
first = false;
} else {
match self.expect(t) {
Ok(false) => {}
Ok(true) => {
recovered = true;
break;
}
Err(mut e) => {
// Attempt to keep parsing if it was a similar separator
if let Some(ref tokens) = t.similar_tokens() {
if tokens.contains(&self.token.kind) {
self.bump();
}
}
e.emit();
// Attempt to keep parsing if it was an omitted separator
match f(self) {
Ok(t) => {
v.push(t);
continue;
},
Err(mut e) => {
e.cancel();
break;
}
}
}
}
}
}
if sep.trailing_sep_allowed && self.expect_any_with_type(kets, expect) {
trailing = true;
break;
}
let t = f(self)?;
v.push(t);
}
Ok((v, trailing, recovered))
}
2019-02-08 13:53:55 +00:00
/// Parses a sequence, including the closing delimiter. The function
/// `f` must consume tokens until reaching the next separator or
2014-06-09 20:12:30 +00:00
/// closing bracket.
fn parse_unspanned_seq<T>(
&mut self,
2019-06-05 11:17:56 +00:00
bra: &TokenKind,
ket: &TokenKind,
sep: SeqSep,
f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
) -> PResult<'a, (Vec<T>, bool)> {
self.expect(bra)?;
let (result, trailing, recovered) = self.parse_seq_to_before_end(ket, sep, f)?;
if !recovered {
self.eat(ket);
}
Ok((result, trailing))
}
fn parse_delim_comma_seq<T>(
&mut self,
delim: DelimToken,
f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
) -> PResult<'a, (Vec<T>, bool)> {
self.parse_unspanned_seq(
&token::OpenDelim(delim),
&token::CloseDelim(delim),
SeqSep::trailing_allowed(token::Comma),
f,
)
}
fn parse_paren_comma_seq<T>(
&mut self,
f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
) -> PResult<'a, (Vec<T>, bool)> {
self.parse_delim_comma_seq(token::Paren, f)
}
2014-06-09 20:12:30 +00:00
/// Advance the parser by one token
pub fn bump(&mut self) {
if self.prev_token_kind == PrevTokenKind::Eof {
// Bumping after EOF is a bad sign, usually an infinite loop.
self.bug("attempted to bump the parser past EOF (may be stuck in a loop)");
}
self.prev_span = self.meta_var_span.take().unwrap_or(self.token.span);
// Record last token kind for possible error recovery.
2019-06-04 22:17:07 +00:00
self.prev_token_kind = match self.token.kind {
token::DocComment(..) => PrevTokenKind::DocComment,
token::Comma => PrevTokenKind::Comma,
token::BinOp(token::Plus) => PrevTokenKind::Plus,
token::BinOp(token::Or) => PrevTokenKind::BitOr,
token::Interpolated(..) => PrevTokenKind::Interpolated,
token::Eof => PrevTokenKind::Eof,
token::Ident(..) => PrevTokenKind::Ident,
_ => PrevTokenKind::Other,
};
2019-06-04 22:17:07 +00:00
self.token = self.next_tok();
self.expected_tokens.clear();
// check after each token
2017-03-29 07:17:18 +00:00
self.process_potential_macro_variable();
}
2013-07-02 19:47:32 +00:00
/// Advance the parser using provided token as a next one. Use this when
/// consuming a part of a token. For example a single `<` from `<<`.
2019-06-05 11:17:56 +00:00
fn bump_with(&mut self, next: TokenKind, span: Span) {
self.prev_span = self.token.span.with_hi(span.lo());
// It would be incorrect to record the kind of the current token, but
// fortunately for tokens currently using `bump_with`, the
// prev_token_kind will be of no use anyway.
self.prev_token_kind = PrevTokenKind::Other;
self.token = Token::new(next, span);
self.expected_tokens.clear();
}
pub fn look_ahead<R, F>(&self, dist: usize, f: F) -> R where
F: FnOnce(&Token) -> R,
{
if dist == 0 {
2019-06-04 22:17:07 +00:00
return f(&self.token);
}
2019-06-04 22:17:07 +00:00
let frame = &self.token_cursor.frame;
f(&match frame.tree_cursor.look_ahead(dist - 1) {
Some(tree) => match tree {
2019-06-04 22:17:07 +00:00
TokenTree::Token(token) => token,
2019-06-05 11:17:56 +00:00
TokenTree::Delimited(dspan, delim, _) =>
Token::new(token::OpenDelim(delim), dspan.open),
2019-06-04 22:17:07 +00:00
}
None => Token::new(token::CloseDelim(frame.delim), frame.span.close)
})
}
/// Returns whether any of the given keywords are `dist` tokens ahead of the current one.
fn is_keyword_ahead(&self, dist: usize, kws: &[Symbol]) -> bool {
self.look_ahead(dist, |t| kws.iter().any(|&kw| t.is_keyword(kw)))
}
2019-02-08 13:53:55 +00:00
/// Is the current token one of the keywords that signals a bare function type?
fn token_is_bare_fn_keyword(&mut self) -> bool {
2019-05-11 14:41:37 +00:00
self.check_keyword(kw::Fn) ||
self.check_keyword(kw::Unsafe) ||
self.check_keyword(kw::Extern)
}
2019-02-08 13:53:55 +00:00
/// Parses a `TyKind::BareFn` type.
2018-05-27 19:07:09 +00:00
fn parse_ty_bare_fn(&mut self, generic_params: Vec<GenericParam>) -> PResult<'a, TyKind> {
/*
[unsafe] [extern "ABI"] fn (S) -> T
^~~~^ ^~~~^ ^~^ ^
| | | |
| | | Return type
| | Argument types
| |
| ABI
Function Style
*/
let unsafety = self.parse_unsafety();
2019-05-11 14:41:37 +00:00
let abi = if self.eat_keyword(kw::Extern) {
self.parse_opt_abi()?.unwrap_or(Abi::C)
} else {
Abi::Rust
};
2019-05-11 14:41:37 +00:00
self.expect_keyword(kw::Fn)?;
let (inputs, c_variadic) = self.parse_fn_args(false, true)?;
let ret_ty = self.parse_ret_ty(false)?;
2014-11-07 11:53:45 +00:00
let decl = P(FnDecl {
inputs,
2014-11-07 11:53:45 +00:00
output: ret_ty,
c_variadic,
2014-11-07 11:53:45 +00:00
});
Ok(TyKind::BareFn(P(BareFnTy {
abi,
unsafety,
generic_params,
decl,
})))
}
2019-02-08 13:53:55 +00:00
/// Parses asyncness: `async` or nothing.
2018-06-19 04:18:10 +00:00
fn parse_asyncness(&mut self) -> IsAsync {
2019-05-11 14:41:37 +00:00
if self.eat_keyword(kw::Async) {
IsAsync::Async {
closure_id: ast::DUMMY_NODE_ID,
return_impl_trait_id: ast::DUMMY_NODE_ID,
}
2018-06-19 04:18:10 +00:00
} else {
IsAsync::NotAsync
}
}
2019-02-08 13:53:55 +00:00
/// Parses unsafety: `unsafe` or nothing.
fn parse_unsafety(&mut self) -> Unsafety {
2019-05-11 14:41:37 +00:00
if self.eat_keyword(kw::Unsafe) {
Unsafety::Unsafe
} else {
Unsafety::Normal
}
}
2019-02-08 13:53:55 +00:00
/// Parses the items in a trait declaration.
pub fn parse_trait_item(&mut self, at_end: &mut bool) -> PResult<'a, TraitItem> {
maybe_whole!(self, NtTraitItem, |x| x);
let attrs = self.parse_outer_attributes()?;
let mut unclosed_delims = vec![];
let (mut item, tokens) = self.collect_tokens(|this| {
let item = this.parse_trait_item_(at_end, attrs);
unclosed_delims.append(&mut this.unclosed_delims);
item
})?;
self.unclosed_delims.append(&mut unclosed_delims);
// See `parse_item` for why this clause is here.
if !item.attrs.iter().any(|attr| attr.style == AttrStyle::Inner) {
item.tokens = Some(tokens);
}
Ok(item)
}
fn parse_trait_item_(&mut self,
at_end: &mut bool,
mut attrs: Vec<Attribute>) -> PResult<'a, TraitItem> {
let lo = self.token.span;
self.eat_bad_pub();
2019-05-11 14:41:37 +00:00
let (name, node, generics) = if self.eat_keyword(kw::Type) {
2018-05-26 18:16:21 +00:00
self.parse_trait_item_assoc_ty()?
} else if self.is_const_item() {
2019-05-11 14:41:37 +00:00
self.expect_keyword(kw::Const)?;
let ident = self.parse_ident()?;
self.expect(&token::Colon)?;
let ty = self.parse_ty()?;
2018-09-02 06:13:29 +00:00
let default = if self.eat(&token::Eq) {
let expr = self.parse_expr()?;
self.expect(&token::Semi)?;
Some(expr)
} else {
self.expect(&token::Semi)?;
None
};
(ident, TraitItemKind::Const(ty, default), ast::Generics::default())
} else if let Some(mac) = self.parse_assoc_macro_invoc("trait", None, &mut false)? {
2016-09-22 22:44:59 +00:00
// trait item macro.
(Ident::invalid(), ast::TraitItemKind::Macro(mac), ast::Generics::default())
2016-09-22 22:44:59 +00:00
} else {
let (constness, unsafety, asyncness, abi) = self.parse_fn_front_matter()?;
2016-09-22 22:44:59 +00:00
let ident = self.parse_ident()?;
let mut generics = self.parse_generics()?;
let decl = self.parse_fn_decl_with_self(|p: &mut Parser<'a>| {
2016-09-22 22:44:59 +00:00
// This is somewhat dubious; We don't want to allow
// argument names to be left off if there is a
// definition...
// We don't allow argument names to be left off in edition 2018.
let is_name_required = p.token.span.rust_2018();
p.parse_arg_general(true, false, |_| is_name_required)
2016-09-22 22:44:59 +00:00
})?;
generics.where_clause = self.parse_where_clause()?;
2016-09-22 22:44:59 +00:00
let sig = ast::MethodSig {
header: FnHeader {
unsafety,
constness,
abi,
2018-06-19 04:18:10 +00:00
asyncness,
},
decl,
2016-09-22 22:44:59 +00:00
};
2019-06-04 22:17:07 +00:00
let body = match self.token.kind {
2016-09-22 22:44:59 +00:00
token::Semi => {
self.bump();
*at_end = true;
2016-09-22 22:44:59 +00:00
debug!("parse_trait_methods(): parsing required method");
None
}
token::OpenDelim(token::Brace) => {
debug!("parse_trait_methods(): parsing provided method");
*at_end = true;
2016-09-22 22:44:59 +00:00
let (inner_attrs, body) = self.parse_inner_attrs_and_block()?;
attrs.extend(inner_attrs.iter().cloned());
Some(body)
}
token::Interpolated(ref nt) => {
match **nt {
token::NtBlock(..) => {
*at_end = true;
let (inner_attrs, body) = self.parse_inner_attrs_and_block()?;
attrs.extend(inner_attrs.iter().cloned());
Some(body)
}
_ => {
return self.expected_semi_or_open_brace();
}
}
}
2016-09-22 22:44:59 +00:00
_ => {
return self.expected_semi_or_open_brace();
2016-09-22 22:44:59 +00:00
}
};
(ident, ast::TraitItemKind::Method(sig, body), generics)
2016-09-22 22:44:59 +00:00
};
Ok(TraitItem {
id: ast::DUMMY_NODE_ID,
ident: name,
attrs,
generics,
node,
span: lo.to(self.prev_span),
tokens: None,
})
}
2019-02-08 13:53:55 +00:00
/// Parses an optional return type `[ -> TY ]` in a function declaration.
fn parse_ret_ty(&mut self, allow_plus: bool) -> PResult<'a, FunctionRetTy> {
if self.eat(&token::RArrow) {
Ok(FunctionRetTy::Ty(self.parse_ty_common(allow_plus, true, false)?))
} else {
Ok(FunctionRetTy::Default(self.token.span.shrink_to_lo()))
}
}
2012-04-10 00:32:49 +00:00
2019-02-08 13:53:55 +00:00
/// Parses a type.
pub fn parse_ty(&mut self) -> PResult<'a, P<Ty>> {
self.parse_ty_common(true, true, false)
}
2019-02-08 13:53:55 +00:00
/// Parses a type in restricted contexts where `+` is not permitted.
///
/// Example 1: `&'a TYPE`
/// `+` is prohibited to maintain operator priority (P(+) < P(&)).
/// Example 2: `value1 as TYPE + value2`
/// `+` is prohibited to avoid interactions with expression grammar.
2017-03-16 21:47:32 +00:00
fn parse_ty_no_plus(&mut self) -> PResult<'a, P<Ty>> {
self.parse_ty_common(false, true, false)
2017-03-16 21:47:32 +00:00
}
fn parse_ty_common(&mut self, allow_plus: bool, allow_qpath_recovery: bool,
allow_c_variadic: bool) -> PResult<'a, P<Ty>> {
maybe_recover_from_interpolated_ty_qpath!(self, allow_qpath_recovery);
maybe_whole!(self, NtTy, |x| x);
2012-08-01 21:34:35 +00:00
let lo = self.token.span;
let mut impl_dyn_multi = false;
2017-03-16 21:47:32 +00:00
let node = if self.eat(&token::OpenDelim(token::Paren)) {
// `(TYPE)` is a parenthesized type.
// `(TYPE,)` is a tuple with a single field of type TYPE.
let mut ts = vec![];
let mut last_comma = false;
while self.token != token::CloseDelim(token::Paren) {
ts.push(self.parse_ty()?);
if self.eat(&token::Comma) {
last_comma = true;
} else {
last_comma = false;
break;
2013-07-02 19:47:32 +00:00
}
2011-08-15 10:18:27 +00:00
}
let trailing_plus = self.prev_token_kind == PrevTokenKind::Plus;
self.expect(&token::CloseDelim(token::Paren))?;
2017-03-16 21:47:32 +00:00
if ts.len() == 1 && !last_comma {
let ty = ts.into_iter().nth(0).unwrap().into_inner();
let maybe_bounds = allow_plus && self.token.is_like_plus();
2017-03-16 21:47:32 +00:00
match ty.node {
// `(TY_BOUND_NOPAREN) + BOUND + ...`.
TyKind::Path(None, ref path) if maybe_bounds => {
self.parse_remaining_bounds(Vec::new(), path.clone(), lo, true)?
2017-03-16 21:47:32 +00:00
}
2017-10-10 14:33:19 +00:00
TyKind::TraitObject(ref bounds, TraitObjectSyntax::None)
if maybe_bounds && bounds.len() == 1 && !trailing_plus => {
let path = match bounds[0] {
2018-06-14 11:23:46 +00:00
GenericBound::Trait(ref pt, ..) => pt.trait_ref.path.clone(),
GenericBound::Outlives(..) => self.bug("unexpected lifetime bound"),
};
self.parse_remaining_bounds(Vec::new(), path, lo, true)?
}
// `(TYPE)`
2017-03-16 21:47:32 +00:00
_ => TyKind::Paren(P(ty))
}
} else {
TyKind::Tup(ts)
}
} else if self.eat(&token::Not) {
2017-03-16 21:47:32 +00:00
// Never type `!`
TyKind::Never
} else if self.eat(&token::BinOp(token::Star)) {
2017-03-16 21:47:32 +00:00
// Raw pointer
TyKind::Ptr(self.parse_ptr()?)
} else if self.eat(&token::OpenDelim(token::Bracket)) {
2017-03-16 21:47:32 +00:00
// Array or slice
let t = self.parse_ty()?;
2017-03-16 21:47:32 +00:00
// Parse optional `; EXPR` in `[TYPE; EXPR]`
let t = match self.maybe_parse_fixed_length_of_vec()? {
None => TyKind::Slice(t),
Some(length) => TyKind::Array(t, AnonConst {
id: ast::DUMMY_NODE_ID,
value: length,
}),
};
self.expect(&token::CloseDelim(token::Bracket))?;
t
2017-03-16 21:47:32 +00:00
} else if self.check(&token::BinOp(token::And)) || self.check(&token::AndAnd) {
// Reference
self.expect_and()?;
self.parse_borrowed_pointee()?
2019-05-11 14:41:37 +00:00
} else if self.eat_keyword_noexpect(kw::Typeof) {
2017-03-16 21:47:32 +00:00
// `typeof(EXPR)`
// In order to not be ambiguous, the type must be surrounded by parens.
self.expect(&token::OpenDelim(token::Paren))?;
let e = AnonConst {
id: ast::DUMMY_NODE_ID,
value: self.parse_expr()?,
};
self.expect(&token::CloseDelim(token::Paren))?;
TyKind::Typeof(e)
2019-05-11 14:41:37 +00:00
} else if self.eat_keyword(kw::Underscore) {
2017-03-16 21:47:32 +00:00
// A type to be inferred `_`
TyKind::Infer
} else if self.token_is_bare_fn_keyword() {
// Function pointer type
self.parse_ty_bare_fn(Vec::new())?
2019-05-11 14:41:37 +00:00
} else if self.check_keyword(kw::For) {
2017-03-16 21:47:32 +00:00
// Function pointer type or bound list (trait object type) starting with a poly-trait.
// `for<'lt> [unsafe] [extern "ABI"] fn (&'lt S) -> T`
// `for<'lt> Trait1<'lt> + Trait2 + 'a`
let lo = self.token.span;
2017-03-16 21:47:32 +00:00
let lifetime_defs = self.parse_late_bound_lifetime_defs()?;
if self.token_is_bare_fn_keyword() {
self.parse_ty_bare_fn(lifetime_defs)?
} else {
2017-03-16 21:47:32 +00:00
let path = self.parse_path(PathStyle::Type)?;
let parse_plus = allow_plus && self.check_plus();
self.parse_remaining_bounds(lifetime_defs, path, lo, parse_plus)?
}
2019-05-11 14:41:37 +00:00
} else if self.eat_keyword(kw::Impl) {
// Always parse bounds greedily for better error recovery.
let bounds = self.parse_generic_bounds(None)?;
impl_dyn_multi = bounds.len() > 1 || self.prev_token_kind == PrevTokenKind::Plus;
TyKind::ImplTrait(ast::DUMMY_NODE_ID, bounds)
2019-05-11 14:41:37 +00:00
} else if self.check_keyword(kw::Dyn) &&
(self.token.span.rust_2018() ||
self.look_ahead(1, |t| t.can_begin_bound() &&
!can_continue_type_after_non_fn_ident(t))) {
2017-10-10 14:33:19 +00:00
self.bump(); // `dyn`
// Always parse bounds greedily for better error recovery.
let bounds = self.parse_generic_bounds(None)?;
impl_dyn_multi = bounds.len() > 1 || self.prev_token_kind == PrevTokenKind::Plus;
TyKind::TraitObject(bounds, TraitObjectSyntax::Dyn)
} else if self.check(&token::Question) ||
self.check_lifetime() && self.look_ahead(1, |t| t.is_like_plus()) {
2017-03-16 21:47:32 +00:00
// Bound list (trait object type)
TyKind::TraitObject(self.parse_generic_bounds_common(allow_plus, None)?,
2017-10-10 14:33:19 +00:00
TraitObjectSyntax::None)
} else if self.eat_lt() {
// Qualified path
let (qself, path) = self.parse_qpath(PathStyle::Type)?;
TyKind::Path(Some(qself), path)
} else if self.token.is_path_start() {
// Simple path
let path = self.parse_path(PathStyle::Type)?;
if self.eat(&token::Not) {
// Macro invocation in type position
let (delim, tts) = self.expect_delimited_token_tree()?;
let node = Mac_ {
path,
tts,
delim,
prior_type_ascription: self.last_type_ascription,
};
TyKind::Mac(respan(lo.to(self.prev_span), node))
2017-10-10 14:33:19 +00:00
} else {
// Just a type path or bound list (trait object type) starting with a trait.
// `Type`
// `Trait1 + Trait2 + 'a`
if allow_plus && self.check_plus() {
2017-10-10 14:33:19 +00:00
self.parse_remaining_bounds(Vec::new(), path, lo, true)?
} else {
TyKind::Path(None, path)
}
}
} else if self.check(&token::DotDotDot) {
if allow_c_variadic {
self.eat(&token::DotDotDot);
TyKind::CVarArgs
} else {
return Err(self.fatal(
"only foreign functions are allowed to be C-variadic"
));
}
} else {
let msg = format!("expected type, found {}", self.this_token_descr());
let mut err = self.fatal(&msg);
err.span_label(self.token.span, "expected type");
self.maybe_annotate_with_ascription(&mut err, true);
return Err(err);
};
let span = lo.to(self.prev_span);
let ty = P(Ty { node, span, id: ast::DUMMY_NODE_ID });
2017-03-16 21:47:32 +00:00
// Try to recover from use of `+` with incorrect priority.
self.maybe_report_ambiguous_plus(allow_plus, impl_dyn_multi, &ty);
2017-03-16 21:47:32 +00:00
self.maybe_recover_from_bad_type_plus(allow_plus, &ty)?;
self.maybe_recover_from_bad_qpath(ty, allow_qpath_recovery)
}
2018-05-27 19:07:09 +00:00
fn parse_remaining_bounds(&mut self, generic_params: Vec<GenericParam>, path: ast::Path,
lo: Span, parse_plus: bool) -> PResult<'a, TyKind> {
let poly_trait_ref = PolyTraitRef::new(generic_params, path, lo.to(self.prev_span));
2018-06-14 11:23:46 +00:00
let mut bounds = vec![GenericBound::Trait(poly_trait_ref, TraitBoundModifier::None)];
if parse_plus {
2018-05-25 21:36:23 +00:00
self.eat_plus(); // `+`, or `+=` gets split and `+` is discarded
bounds.append(&mut self.parse_generic_bounds(Some(self.prev_span))?);
}
2017-10-10 14:33:19 +00:00
Ok(TyKind::TraitObject(bounds, TraitObjectSyntax::None))
}
2017-03-16 21:47:32 +00:00
fn parse_borrowed_pointee(&mut self) -> PResult<'a, TyKind> {
let opt_lifetime = if self.check_lifetime() { Some(self.expect_lifetime()) } else { None };
let mutbl = self.parse_mutability();
let ty = self.parse_ty_no_plus()?;
2019-06-25 21:22:45 +00:00
return Ok(TyKind::Rptr(opt_lifetime, MutTy { ty, mutbl }));
}
fn parse_ptr(&mut self) -> PResult<'a, MutTy> {
2019-05-11 14:41:37 +00:00
let mutbl = if self.eat_keyword(kw::Mut) {
Mutability::Mutable
2019-05-11 14:41:37 +00:00
} else if self.eat_keyword(kw::Const) {
Mutability::Immutable
} else {
let span = self.prev_span;
2019-01-12 06:04:54 +00:00
let msg = "expected mut or const in raw pointer type";
self.struct_span_err(span, msg)
.span_label(span, msg)
.help("use `*mut T` or `*const T` as appropriate")
.emit();
Mutability::Immutable
};
let t = self.parse_ty_no_plus()?;
2019-06-25 21:22:45 +00:00
Ok(MutTy { ty: t, mutbl })
}
fn is_named_argument(&self) -> bool {
2019-06-04 22:17:07 +00:00
let offset = match self.token.kind {
token::Interpolated(ref nt) => match **nt {
token::NtPat(..) => return self.look_ahead(1, |t| t == &token::Colon),
_ => 0,
}
token::BinOp(token::And) | token::AndAnd => 1,
2019-05-11 14:41:37 +00:00
_ if self.token.is_keyword(kw::Mut) => 1,
_ => 0,
2013-07-18 03:04:37 +00:00
};
2018-03-08 11:27:23 +00:00
self.look_ahead(offset, |t| t.is_ident()) &&
self.look_ahead(offset + 1, |t| t == &token::Colon)
}
2019-02-08 13:53:55 +00:00
/// Skips unexpected attributes and doc comments in this position and emits an appropriate
/// error.
/// This version of parse arg doesn't necessarily require identifier names.
fn parse_arg_general<F>(
&mut self,
is_trait_item: bool,
allow_c_variadic: bool,
is_name_required: F,
) -> PResult<'a, Arg>
where
F: Fn(&token::Token) -> bool
{
2019-07-26 22:52:37 +00:00
let lo = self.token.span;
let attrs = self.parse_arg_attributes()?;
if let Some(mut arg) = self.parse_self_arg()? {
arg.attrs = attrs.into();
return self.recover_bad_self_arg(arg, is_trait_item);
}
let is_name_required = is_name_required(&self.token);
let (pat, ty) = if is_name_required || self.is_named_argument() {
debug!("parse_arg_general parse_pat (is_name_required:{})", is_name_required);
2018-08-09 22:23:08 +00:00
let pat = self.parse_pat(Some("argument name"))?;
if let Err(mut err) = self.expect(&token::Colon) {
if let Some(ident) = self.argument_without_type(
&mut err,
pat,
is_name_required,
is_trait_item,
) {
err.emit();
return Ok(dummy_arg(ident));
} else {
return Err(err);
}
}
self.eat_incorrect_doc_comment_for_arg_type();
(pat, self.parse_ty_common(true, true, allow_c_variadic)?)
2018-08-09 22:23:08 +00:00
} else {
debug!("parse_arg_general ident_to_pat");
let parser_snapshot_before_ty = self.clone();
self.eat_incorrect_doc_comment_for_arg_type();
let mut ty = self.parse_ty_common(true, true, allow_c_variadic);
if ty.is_ok() && self.token != token::Comma &&
self.token != token::CloseDelim(token::Paren) {
// This wasn't actually a type, but a pattern looking like a type,
// so we are going to rollback and re-parse for recovery.
ty = self.unexpected();
2018-08-20 00:51:02 +00:00
}
match ty {
Ok(ty) => {
2019-05-11 14:41:37 +00:00
let ident = Ident::new(kw::Invalid, self.prev_span);
let pat = P(Pat {
id: ast::DUMMY_NODE_ID,
node: PatKind::Ident(
BindingMode::ByValue(Mutability::Immutable), ident, None),
span: ty.span,
});
(pat, ty)
}
Err(mut err) => {
// If this is a C-variadic argument and we hit an error, return the
// error.
if self.token == token::DotDotDot {
return Err(err);
}
// Recover from attempting to parse the argument as a type without pattern.
err.cancel();
mem::replace(self, parser_snapshot_before_ty);
self.recover_arg_parse()?
2018-08-09 22:23:08 +00:00
}
}
2018-08-09 22:23:08 +00:00
};
2019-07-26 22:52:37 +00:00
let span = lo.to(self.token.span);
Ok(Arg { attrs: attrs.into(), id: ast::DUMMY_NODE_ID, pat, span, ty })
}
2019-02-08 13:53:55 +00:00
/// Parses an argument in a lambda header (e.g., `|arg, arg|`).
fn parse_fn_block_arg(&mut self) -> PResult<'a, Arg> {
2019-07-26 22:52:37 +00:00
let lo = self.token.span;
let attrs = self.parse_arg_attributes()?;
let pat = self.parse_pat(Some("argument name"))?;
let t = if self.eat(&token::Colon) {
self.parse_ty()?
2013-02-26 00:49:28 +00:00
} else {
P(Ty {
id: ast::DUMMY_NODE_ID,
node: TyKind::Infer,
span: self.prev_span,
})
2013-02-26 00:49:28 +00:00
};
2019-07-26 22:52:37 +00:00
let span = lo.to(self.token.span);
Ok(Arg {
attrs: attrs.into(),
2013-02-26 00:49:28 +00:00
ty: t,
pat,
2019-07-26 22:52:37 +00:00
span,
id: ast::DUMMY_NODE_ID
})
2012-05-04 19:33:04 +00:00
}
fn maybe_parse_fixed_length_of_vec(&mut self) -> PResult<'a, Option<P<ast::Expr>>> {
if self.eat(&token::Semi) {
Ok(Some(self.parse_expr()?))
} else {
Ok(None)
}
}
2019-02-08 13:53:55 +00:00
/// Matches `'-' lit | lit` (cf. `ast_validation::AstValidator::check_expr_within_pat`).
crate fn parse_literal_maybe_minus(&mut self) -> PResult<'a, P<Expr>> {
maybe_whole_expr!(self);
let minus_lo = self.token.span;
let minus_present = self.eat(&token::BinOp(token::Minus));
let lo = self.token.span;
let literal = self.parse_lit()?;
let hi = self.prev_span;
let expr = self.mk_expr(lo.to(hi), ExprKind::Lit(literal), ThinVec::new());
if minus_present {
let minus_hi = self.prev_span;
let unary = self.mk_unary(UnOp::Neg, expr);
Ok(self.mk_expr(minus_lo.to(minus_hi), unary, ThinVec::new()))
} else {
Ok(expr)
}
}
fn parse_path_segment_ident(&mut self) -> PResult<'a, ast::Ident> {
2019-06-04 22:17:07 +00:00
match self.token.kind {
token::Ident(name, _) if name.is_path_segment_keyword() => {
let span = self.token.span;
self.bump();
Ok(Ident::new(name, span))
}
_ => self.parse_ident(),
2018-03-18 13:47:09 +00:00
}
}
fn parse_ident_or_underscore(&mut self) -> PResult<'a, ast::Ident> {
2019-06-04 22:17:07 +00:00
match self.token.kind {
token::Ident(name, false) if name == kw::Underscore => {
let span = self.token.span;
self.bump();
Ok(Ident::new(name, span))
}
_ => self.parse_ident(),
}
}
2019-02-08 13:53:55 +00:00
/// Parses a qualified path.
/// Assumes that the leading `<` has been parsed already.
///
/// `qualified_path = <type [as trait_ref]>::path`
///
2017-07-19 23:39:34 +00:00
/// # Examples
2018-05-22 19:26:35 +00:00
/// `<T>::default`
/// `<T as U>::a`
2017-07-19 23:39:34 +00:00
/// `<T as U>::F::a<S>` (without disambiguator)
/// `<T as U>::F::a::<S>` (with disambiguator)
fn parse_qpath(&mut self, style: PathStyle) -> PResult<'a, (QSelf, ast::Path)> {
let lo = self.prev_span;
let ty = self.parse_ty()?;
2018-05-22 19:26:35 +00:00
// `path` will contain the prefix of the path up to the `>`,
// if any (e.g., `U` in the `<T as U>::*` examples
// above). `path_span` has the span of that path, or an empty
// span in the case of something like `<T>::Bar`.
let (mut path, path_span);
2019-05-11 14:41:37 +00:00
if self.eat_keyword(kw::As) {
let path_lo = self.token.span;
2018-05-22 19:26:35 +00:00
path = self.parse_path(PathStyle::Type)?;
path_span = path_lo.to(self.prev_span);
} else {
path_span = self.token.span.to(self.token.span);
2019-06-03 19:06:49 +00:00
path = ast::Path { segments: Vec::new(), span: path_span };
2018-05-22 19:26:35 +00:00
}
// See doc comment for `unmatched_angle_bracket_count`.
self.expect(&token::Gt)?;
if self.unmatched_angle_bracket_count > 0 {
self.unmatched_angle_bracket_count -= 1;
debug!("parse_qpath: (decrement) count={:?}", self.unmatched_angle_bracket_count);
}
self.expect(&token::ModSep)?;
2018-05-22 19:26:35 +00:00
let qself = QSelf { ty, path_span, position: path.segments.len() };
self.parse_path_segments(&mut path.segments, style)?;
2017-07-19 23:39:34 +00:00
Ok((qself, ast::Path { segments: path.segments, span: lo.to(self.prev_span) }))
}
2017-07-19 23:39:34 +00:00
/// Parses simple paths.
///
/// `path = [::] segment+`
/// `segment = ident | ident[::]<args> | ident[::](args) [-> type]`
///
/// # Examples
/// `a::b::C<D>` (without disambiguator)
/// `a::b::C::<D>` (with disambiguator)
/// `Fn(Args)` (without disambiguator)
/// `Fn::(Args)` (with disambiguator)
pub fn parse_path(&mut self, style: PathStyle) -> PResult<'a, ast::Path> {
maybe_whole!(self, NtPath, |path| {
if style == PathStyle::Mod &&
2018-02-23 17:48:54 +00:00
path.segments.iter().any(|segment| segment.args.is_some()) {
self.diagnostic().span_err(path.span, "unexpected generic arguments in path");
}
path
});
let lo = self.meta_var_span.unwrap_or(self.token.span);
2017-07-19 23:39:34 +00:00
let mut segments = Vec::new();
let mod_sep_ctxt = self.token.span.ctxt();
2017-07-19 23:39:34 +00:00
if self.eat(&token::ModSep) {
2018-12-02 12:15:42 +00:00
segments.push(PathSegment::path_root(lo.shrink_to_lo().with_ctxt(mod_sep_ctxt)));
}
self.parse_path_segments(&mut segments, style)?;
2017-07-19 23:39:34 +00:00
Ok(ast::Path { segments, span: lo.to(self.prev_span) })
}
2019-02-08 13:53:55 +00:00
/// Like `parse_path`, but also supports parsing `Word` meta items into paths for
/// backwards-compatibility. This is used when parsing derive macro paths in `#[derive]`
/// attributes.
2017-07-19 23:39:34 +00:00
pub fn parse_path_allowing_meta(&mut self, style: PathStyle) -> PResult<'a, ast::Path> {
2019-06-04 22:17:07 +00:00
let meta_ident = match self.token.kind {
token::Interpolated(ref nt) => match **nt {
token::NtMeta(ref meta) => match meta.node {
ast::MetaItemKind::Word => Some(meta.path.clone()),
_ => None,
},
_ => None,
},
_ => None,
};
2018-04-17 13:33:39 +00:00
if let Some(path) = meta_ident {
self.bump();
return Ok(path);
}
2017-07-19 23:39:34 +00:00
self.parse_path(style)
}
crate fn parse_path_segments(&mut self,
segments: &mut Vec<PathSegment>,
style: PathStyle)
-> PResult<'a, ()> {
loop {
let segment = self.parse_path_segment(style)?;
if style == PathStyle::Expr {
// In order to check for trailing angle brackets, we must have finished
// recursing (`parse_path_segment` can indirectly call this function),
// that is, the next token must be the highlighted part of the below example:
//
// `Foo::<Bar as Baz<T>>::Qux`
// ^ here
//
// As opposed to the below highlight (if we had only finished the first
// recursion):
//
// `Foo::<Bar as Baz<T>>::Qux`
// ^ here
//
// `PathStyle::Expr` is only provided at the root invocation and never in
// `parse_path_segment` to recurse and therefore can be checked to maintain
// this invariant.
self.check_trailing_angle_brackets(&segment, token::ModSep);
}
segments.push(segment);
if self.is_import_coupler() || !self.eat(&token::ModSep) {
2017-07-19 23:39:34 +00:00
return Ok(());
}
}
}
fn parse_path_segment(&mut self, style: PathStyle) -> PResult<'a, PathSegment> {
2017-07-19 23:39:34 +00:00
let ident = self.parse_path_segment_ident()?;
let is_args_start = |token: &Token| match token.kind {
token::Lt | token::BinOp(token::Shl) | token::OpenDelim(token::Paren)
| token::LArrow => true,
2017-07-19 23:39:34 +00:00
_ => false,
};
let check_args_start = |this: &mut Self| {
this.expected_tokens.extend_from_slice(
&[TokenType::Token(token::Lt), TokenType::Token(token::OpenDelim(token::Paren))]
);
is_args_start(&this.token)
};
Ok(if style == PathStyle::Type && check_args_start(self) ||
style != PathStyle::Mod && self.check(&token::ModSep)
&& self.look_ahead(1, |t| is_args_start(t)) {
// We use `style == PathStyle::Expr` to check if this is in a recursion or not. If
// it isn't, then we reset the unmatched angle bracket count as we're about to start
// parsing a new path.
if style == PathStyle::Expr {
self.unmatched_angle_bracket_count = 0;
self.max_angle_bracket_count = 0;
}
// Generic arguments are found - `<`, `(`, `::<` or `::(`.
self.eat(&token::ModSep);
let lo = self.token.span;
2018-02-23 17:48:54 +00:00
let args = if self.eat_lt() {
2017-07-19 23:39:34 +00:00
// `<'a, T, A = U>`
let (args, constraints) =
self.parse_generic_args_with_leaning_angle_bracket_recovery(style, lo)?;
self.expect_gt()?;
let span = lo.to(self.prev_span);
AngleBracketedArgs { args, constraints, span }.into()
2017-07-19 23:39:34 +00:00
} else {
// `(T, U) -> R`
let (inputs, _) = self.parse_paren_comma_seq(|p| p.parse_ty())?;
let span = lo.to(self.prev_span);
2017-07-19 23:39:34 +00:00
let output = if self.eat(&token::RArrow) {
Some(self.parse_ty_common(false, false, false)?)
} else {
None
};
ParenthesizedArgs { inputs, output, span }.into()
};
2018-08-31 00:01:26 +00:00
PathSegment { ident, args, id: ast::DUMMY_NODE_ID }
2017-07-19 23:39:34 +00:00
} else {
// Generic arguments are not found.
PathSegment::from_ident(ident)
2017-07-19 23:39:34 +00:00
})
}
crate fn check_lifetime(&mut self) -> bool {
2017-03-16 21:47:32 +00:00
self.expected_tokens.push(TokenType::Lifetime);
self.token.is_lifetime()
}
2019-02-08 13:53:55 +00:00
/// Parses a single lifetime `'a` or panics.
crate fn expect_lifetime(&mut self) -> Lifetime {
if let Some(ident) = self.token.lifetime() {
let span = self.token.span;
2017-12-31 02:47:45 +00:00
self.bump();
Lifetime { ident: Ident::new(ident.name, span), id: ast::DUMMY_NODE_ID }
2017-12-31 02:47:45 +00:00
} else {
self.span_bug(self.token.span, "not a lifetime")
}
}
2018-03-19 00:54:56 +00:00
fn eat_label(&mut self) -> Option<Label> {
if let Some(ident) = self.token.lifetime() {
let span = self.token.span;
2018-03-19 00:54:56 +00:00
self.bump();
Some(Label { ident: Ident::new(ident.name, span) })
2018-03-19 00:54:56 +00:00
} else {
None
}
}
2019-02-08 13:53:55 +00:00
/// Parses mutability (`mut` or nothing).
2017-03-16 21:47:32 +00:00
fn parse_mutability(&mut self) -> Mutability {
2019-05-11 14:41:37 +00:00
if self.eat_keyword(kw::Mut) {
2017-03-16 21:47:32 +00:00
Mutability::Mutable
} else {
2017-03-16 21:47:32 +00:00
Mutability::Immutable
}
}
fn parse_field_name(&mut self) -> PResult<'a, Ident> {
2019-06-05 11:17:56 +00:00
if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) =
self.token.kind {
self.expect_no_suffix(self.token.span, "a tuple index", suffix);
self.bump();
Ok(Ident::new(symbol, self.prev_span))
} else {
self.parse_ident_common(false)
}
}
/// Parse ident (COLON expr)?
fn parse_field(&mut self) -> PResult<'a, Field> {
let attrs = self.parse_outer_attributes()?;
let lo = self.token.span;
// Check if a colon exists one ahead. This means we're parsing a fieldname.
let (fieldname, expr, is_shorthand) = if self.look_ahead(1, |t| {
t == &token::Colon || t == &token::Eq
}) {
let fieldname = self.parse_field_name()?;
// Check for an equals token. This means the source incorrectly attempts to
// initialize a field with an eq rather than a colon.
if self.token == token::Eq {
self.diagnostic()
.struct_span_err(self.token.span, "expected `:`, found `=`")
.span_suggestion(
fieldname.span.shrink_to_hi().to(self.token.span),
"replace equals symbol with a colon",
":".to_string(),
Applicability::MachineApplicable,
)
.emit();
}
2018-03-18 13:47:09 +00:00
self.bump(); // `:`
(fieldname, self.parse_expr()?, false)
} else {
let fieldname = self.parse_ident_common(false)?;
// Mimic `x: x` for the `x` field shorthand.
2018-03-19 00:54:56 +00:00
let path = ast::Path::from_ident(fieldname);
2018-03-18 13:47:09 +00:00
let expr = self.mk_expr(fieldname.span, ExprKind::Path(None, path), ThinVec::new());
(fieldname, expr, true)
};
Ok(ast::Field {
2018-03-18 13:47:09 +00:00
ident: fieldname,
span: lo.to(expr.span),
expr,
is_shorthand,
attrs: attrs.into(),
})
}
crate fn mk_expr(&self, span: Span, node: ExprKind, attrs: ThinVec<Attribute>) -> P<Expr> {
P(Expr { node, span, attrs, id: ast::DUMMY_NODE_ID })
}
fn mk_unary(&self, unop: ast::UnOp, expr: P<Expr>) -> ast::ExprKind {
ExprKind::Unary(unop, expr)
}
fn mk_binary(&self, binop: ast::BinOp, lhs: P<Expr>, rhs: P<Expr>) -> ast::ExprKind {
ExprKind::Binary(binop, lhs, rhs)
}
fn mk_call(&self, f: P<Expr>, args: Vec<P<Expr>>) -> ast::ExprKind {
ExprKind::Call(f, args)
}
fn mk_index(&self, expr: P<Expr>, idx: P<Expr>) -> ast::ExprKind {
ExprKind::Index(expr, idx)
}
fn mk_range(&self,
start: Option<P<Expr>>,
2016-01-13 06:23:31 +00:00
end: Option<P<Expr>>,
limits: RangeLimits)
-> PResult<'a, ast::ExprKind> {
if end.is_none() && limits == RangeLimits::Closed {
Err(self.span_fatal_err(self.token.span, Error::InclusiveRangeWithNoEnd))
} else {
Ok(ExprKind::Range(start, end, limits))
}
2014-12-13 05:41:02 +00:00
}
fn mk_assign_op(&self, binop: ast::BinOp,
lhs: P<Expr>, rhs: P<Expr>) -> ast::ExprKind {
ExprKind::AssignOp(binop, lhs, rhs)
}
fn expect_delimited_token_tree(&mut self) -> PResult<'a, (MacDelimiter, TokenStream)> {
2019-06-04 22:17:07 +00:00
let delim = match self.token.kind {
token::OpenDelim(delim) => delim,
_ => {
let msg = "expected open delimiter";
let mut err = self.fatal(msg);
err.span_label(self.token.span, msg);
return Err(err)
}
};
let tts = match self.parse_token_tree() {
TokenTree::Delimited(_, _, tts) => tts,
_ => unreachable!(),
};
let delim = match delim {
token::Paren => MacDelimiter::Parenthesis,
token::Bracket => MacDelimiter::Bracket,
token::Brace => MacDelimiter::Brace,
token::NoDelim => self.bug("unexpected no delimiter"),
};
Ok((delim, tts.into()))
}
2014-06-09 20:12:30 +00:00
/// At the bottom (top?) of the precedence hierarchy,
2019-02-08 13:53:55 +00:00
/// Parses things like parenthesized exprs, macros, `return`, etc.
///
2019-02-08 13:53:55 +00:00
/// N.B., this does not parse outer attributes, and is private because it only works
/// correctly if called from `parse_dot_or_call_expr()`.
2015-12-20 21:00:43 +00:00
fn parse_bottom_expr(&mut self) -> PResult<'a, P<Expr>> {
maybe_recover_from_interpolated_ty_qpath!(self, true);
2012-08-23 00:24:52 +00:00
maybe_whole_expr!(self);
2013-03-02 21:02:27 +00:00
// Outer attributes are already parsed and will be
// added to the return value after the fact.
//
// Therefore, prevent sub-parser from parsing
// attributes by giving them a empty "already parsed" list.
let mut attrs = ThinVec::new();
let lo = self.token.span;
let mut hi = self.token.span;
let ex: ExprKind;
macro_rules! parse_lit {
() => {
match self.parse_lit() {
Ok(literal) => {
hi = self.prev_span;
ex = ExprKind::Lit(literal);
}
Err(mut err) => {
self.cancel(&mut err);
return Err(self.expected_expression_found());
}
}
}
}
// Note: when adding new syntax here, don't forget to adjust TokenKind::can_begin_expr().
2019-06-04 22:17:07 +00:00
match self.token.kind {
// This match arm is a special-case of the `_` match arm below and
// could be removed without changing functionality, but it's faster
// to have it here, especially for programs with large constants.
token::Literal(_) => {
parse_lit!()
}
token::OpenDelim(token::Paren) => {
self.bump();
attrs.extend(self.parse_inner_attributes()?);
2014-07-06 21:29:29 +00:00
// (e) is parenthesized e
// (e,) is a tuple with only one field, e
let mut es = vec![];
2014-07-06 21:29:29 +00:00
let mut trailing_comma = false;
let mut recovered = false;
while self.token != token::CloseDelim(token::Paren) {
es.push(match self.parse_expr() {
Ok(es) => es,
2019-08-08 19:31:24 +00:00
Err(mut err) => {
2019-03-29 02:58:45 +00:00
// recover from parse error in tuple list
2019-08-08 19:31:24 +00:00
match self.token.kind {
token::Ident(name, false)
if name == kw::Underscore && self.look_ahead(1, |t| {
t == &token::Comma
}) => {
// Special-case handling of `Foo<(_, _, _)>`
err.emit();
let sp = self.token.span;
self.bump();
self.mk_expr(sp, ExprKind::Err, ThinVec::new())
}
_ => return Ok(
self.recover_seq_parse_error(token::Paren, lo, Err(err)),
),
}
}
});
recovered = self.expect_one_of(
&[],
&[token::Comma, token::CloseDelim(token::Paren)],
)?;
2018-09-02 06:13:29 +00:00
if self.eat(&token::Comma) {
2014-07-06 21:29:29 +00:00
trailing_comma = true;
} else {
trailing_comma = false;
break;
2014-07-06 21:29:29 +00:00
}
}
if !recovered {
self.bump();
}
hi = self.prev_span;
ex = if es.len() == 1 && !trailing_comma {
ExprKind::Paren(es.into_iter().nth(0).unwrap())
2014-09-13 16:06:01 +00:00
} else {
ExprKind::Tup(es)
};
}
token::OpenDelim(token::Brace) => {
return self.parse_block_expr(None, lo, BlockCheckMode::Default, attrs);
}
token::BinOp(token::Or) | token::OrOr => {
return self.parse_lambda_expr(attrs);
}
token::OpenDelim(token::Bracket) => {
self.bump();
attrs.extend(self.parse_inner_attributes()?);
2018-09-02 06:13:29 +00:00
if self.eat(&token::CloseDelim(token::Bracket)) {
2014-07-06 21:29:29 +00:00
// Empty vector.
ex = ExprKind::Array(Vec::new());
2014-07-06 21:29:29 +00:00
} else {
// Nonempty vector.
let first_expr = self.parse_expr()?;
2018-09-02 06:13:29 +00:00
if self.eat(&token::Semi) {
// Repeating array syntax: [ 0; 512 ]
let count = AnonConst {
id: ast::DUMMY_NODE_ID,
value: self.parse_expr()?,
};
self.expect(&token::CloseDelim(token::Bracket))?;
ex = ExprKind::Repeat(first_expr, count);
2018-09-02 06:13:29 +00:00
} else if self.eat(&token::Comma) {
2014-07-06 21:29:29 +00:00
// Vector with two or more elements.
let remaining_exprs = self.parse_seq_to_end(
&token::CloseDelim(token::Bracket),
SeqSep::trailing_allowed(token::Comma),
|p| Ok(p.parse_expr()?)
2016-03-22 22:58:45 +00:00
)?;
let mut exprs = vec![first_expr];
exprs.extend(remaining_exprs);
ex = ExprKind::Array(exprs);
2014-07-06 21:29:29 +00:00
} else {
// Vector with one element.
self.expect(&token::CloseDelim(token::Bracket))?;
ex = ExprKind::Array(vec![first_expr]);
2014-07-06 21:29:29 +00:00
}
}
hi = self.prev_span;
}
_ => {
if self.eat_lt() {
2017-07-19 23:39:34 +00:00
let (qself, path) = self.parse_qpath(PathStyle::Expr)?;
hi = path.span;
return Ok(self.mk_expr(lo.to(hi), ExprKind::Path(Some(qself), path), attrs));
}
2019-05-11 14:41:37 +00:00
if self.check_keyword(kw::Move) || self.check_keyword(kw::Static) {
return self.parse_lambda_expr(attrs);
}
2019-05-11 14:41:37 +00:00
if self.eat_keyword(kw::If) {
return self.parse_if_expr(attrs);
}
2019-05-11 14:41:37 +00:00
if self.eat_keyword(kw::For) {
let lo = self.prev_span;
return self.parse_for_expr(None, lo, attrs);
}
2019-05-11 14:41:37 +00:00
if self.eat_keyword(kw::While) {
let lo = self.prev_span;
return self.parse_while_expr(None, lo, attrs);
}
if let Some(label) = self.eat_label() {
2018-03-19 00:54:56 +00:00
let lo = label.ident.span;
self.expect(&token::Colon)?;
2019-05-11 14:41:37 +00:00
if self.eat_keyword(kw::While) {
return self.parse_while_expr(Some(label), lo, attrs)
2014-07-26 00:12:51 +00:00
}
2019-05-11 14:41:37 +00:00
if self.eat_keyword(kw::For) {
return self.parse_for_expr(Some(label), lo, attrs)
}
2019-05-11 14:41:37 +00:00
if self.eat_keyword(kw::Loop) {
return self.parse_loop_expr(Some(label), lo, attrs)
}
if self.token == token::OpenDelim(token::Brace) {
return self.parse_block_expr(Some(label),
lo,
BlockCheckMode::Default,
attrs);
}
let msg = "expected `while`, `for`, `loop` or `{` after a label";
let mut err = self.fatal(msg);
err.span_label(self.token.span, msg);
return Err(err);
}
2019-05-11 14:41:37 +00:00
if self.eat_keyword(kw::Loop) {
let lo = self.prev_span;
return self.parse_loop_expr(None, lo, attrs);
}
2019-05-11 14:41:37 +00:00
if self.eat_keyword(kw::Continue) {
let label = self.eat_label();
let ex = ExprKind::Continue(label);
let hi = self.prev_span;
return Ok(self.mk_expr(lo.to(hi), ex, attrs));
}
2019-05-11 14:41:37 +00:00
if self.eat_keyword(kw::Match) {
let match_sp = self.prev_span;
return self.parse_match_expr(attrs).map_err(|mut err| {
err.span_label(match_sp, "while parsing this match expression");
err
});
}
2019-05-11 14:41:37 +00:00
if self.eat_keyword(kw::Unsafe) {
return self.parse_block_expr(
None,
lo,
BlockCheckMode::Unsafe(ast::UserProvided),
attrs);
}
if self.is_do_catch_block() {
let mut db = self.fatal("found removed `do catch` syntax");
db.help("Following RFC #2388, the new non-placeholder syntax is `try`");
return Err(db);
}
if self.is_try_block() {
let lo = self.token.span;
2019-05-11 14:41:37 +00:00
assert!(self.eat_keyword(kw::Try));
return self.parse_try_block(lo, attrs);
}
// Span::rust_2018() is somewhat expensive; don't get it repeatedly.
let is_span_rust_2018 = self.token.span.rust_2018();
if is_span_rust_2018 && self.check_keyword(kw::Async) {
return if self.is_async_block() { // check for `async {` and `async move {`
self.parse_async_block(attrs)
} else {
self.parse_lambda_expr(attrs)
};
}
2019-05-11 14:41:37 +00:00
if self.eat_keyword(kw::Return) {
if self.token.can_begin_expr() {
let e = self.parse_expr()?;
hi = e.span;
ex = ExprKind::Ret(Some(e));
} else {
ex = ExprKind::Ret(None);
}
2019-05-11 14:41:37 +00:00
} else if self.eat_keyword(kw::Break) {
let label = self.eat_label();
let e = if self.token.can_begin_expr()
&& !(self.token == token::OpenDelim(token::Brace)
&& self.restrictions.contains(
Restrictions::NO_STRUCT_LITERAL)) {
Some(self.parse_expr()?)
} else {
None
};
ex = ExprKind::Break(label, e);
hi = self.prev_span;
2019-05-11 14:41:37 +00:00
} else if self.eat_keyword(kw::Yield) {
2016-12-26 13:34:03 +00:00
if self.token.can_begin_expr() {
let e = self.parse_expr()?;
hi = e.span;
ex = ExprKind::Yield(Some(e));
} else {
ex = ExprKind::Yield(None);
}
} else if self.eat_keyword(kw::Let) {
return self.parse_let_expr(attrs);
} else if is_span_rust_2018 && self.eat_keyword(kw::Await) {
2019-07-02 04:30:21 +00:00
let (await_hi, e_kind) = self.parse_incorrect_await_syntax(lo, self.prev_span)?;
hi = await_hi;
ex = e_kind;
} else if self.token.is_path_start() {
let path = self.parse_path(PathStyle::Expr)?;
// `!`, as an operator, is prefix, so we know this isn't that
2016-09-22 22:26:35 +00:00
if self.eat(&token::Not) {
// MACRO INVOCATION expression
let (delim, tts) = self.expect_delimited_token_tree()?;
hi = self.prev_span;
ex = ExprKind::Mac(respan(lo.to(hi), Mac_ {
path,
tts,
delim,
prior_type_ascription: self.last_type_ascription,
}));
} else if self.check(&token::OpenDelim(token::Brace)) {
if let Some(expr) = self.maybe_parse_struct_expr(lo, &path, &attrs) {
return expr;
} else {
hi = path.span;
ex = ExprKind::Path(None, path);
}
} else {
hi = path.span;
ex = ExprKind::Path(None, path);
}
} else {
if !self.unclosed_delims.is_empty() && self.check(&token::Semi) {
// Don't complain about bare semicolons after unclosed braces
// recovery in order to keep the error count down. Fixing the
// delimiters will possibly also fix the bare semicolon found in
// expression context. For example, silence the following error:
// ```
// error: expected expression, found `;`
// --> file.rs:2:13
// |
// 2 | foo(bar(;
// | ^ expected expression
// ```
self.bump();
return Ok(self.mk_expr(self.token.span, ExprKind::Err, ThinVec::new()));
}
parse_lit!()
}
2014-07-06 21:29:29 +00:00
}
}
let expr = self.mk_expr(lo.to(hi), ex, attrs);
self.maybe_recover_from_bad_qpath(expr, true)
}
fn maybe_parse_struct_expr(
&mut self,
lo: Span,
path: &ast::Path,
attrs: &ThinVec<Attribute>,
) -> Option<PResult<'a, P<Expr>>> {
let struct_allowed = !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL);
let certainly_not_a_block = || self.look_ahead(1, |t| t.is_ident()) && (
// `{ ident, ` cannot start a block
self.look_ahead(2, |t| t == &token::Comma) ||
self.look_ahead(2, |t| t == &token::Colon) && (
// `{ ident: token, ` cannot start a block
self.look_ahead(4, |t| t == &token::Comma) ||
// `{ ident: ` cannot start a block unless it's a type ascription `ident: Type`
self.look_ahead(3, |t| !t.can_begin_type())
)
);
if struct_allowed || certainly_not_a_block() {
// This is a struct literal, but we don't can't accept them here
let expr = self.parse_struct_expr(lo, path.clone(), attrs.clone());
if let (Ok(expr), false) = (&expr, struct_allowed) {
2019-07-24 08:51:20 +00:00
self.struct_span_err(
expr.span,
"struct literals are not allowed here",
2019-07-24 08:51:20 +00:00
)
.multipart_suggestion(
2019-05-02 22:53:09 +00:00
"surround the struct literal with parentheses",
vec![
(lo.shrink_to_lo(), "(".to_string()),
(expr.span.shrink_to_hi(), ")".to_string()),
],
Applicability::MachineApplicable,
2019-07-24 08:51:20 +00:00
)
.emit();
}
return Some(expr);
}
None
}
fn parse_struct_expr(&mut self, lo: Span, pth: ast::Path, mut attrs: ThinVec<Attribute>)
2016-09-22 04:45:29 +00:00
-> PResult<'a, P<Expr>> {
let struct_sp = lo.to(self.prev_span);
2016-09-22 04:45:29 +00:00
self.bump();
let mut fields = Vec::new();
let mut base = None;
attrs.extend(self.parse_inner_attributes()?);
while self.token != token::CloseDelim(token::Brace) {
if self.eat(&token::DotDot) {
let exp_span = self.prev_span;
2016-09-22 04:45:29 +00:00
match self.parse_expr() {
Ok(e) => {
base = Some(e);
}
Err(mut e) => {
e.emit();
self.recover_stmt();
}
}
if self.token == token::Comma {
2019-07-24 08:51:20 +00:00
self.struct_span_err(
exp_span.to(self.prev_span),
"cannot use a comma after the base struct",
2019-07-24 08:51:20 +00:00
)
.span_suggestion_short(
self.token.span,
suggestion applicabilities for libsyntax and librustc, run-rustfix tests Consider this a down payment on #50723. To recap, an `Applicability` enum was recently (#50204) added, to convey to Rustfix and other tools whether we think it's OK for them to blindly apply the suggestion, or whether to prompt a human for guidance (because the suggestion might contain placeholders that we can't infer, or because we think it has a sufficiently high probability of being wrong even though it's— presumably—right often enough to be worth emitting in the first place). When a suggestion is marked as `MaybeIncorrect`, we try to use comments to indicate precisely why (although there are a few places where we just say `// speculative` because the present author's subjective judgement balked at the idea that the suggestion has no false positives). The `run-rustfix` directive is opporunistically set on some relevant UI tests (and a couple tests that were in the `test/ui/suggestions` directory, even if the suggestions didn't originate in librustc or libsyntax). This is less trivial than it sounds, because a surprising number of test files aren't equipped to be tested as fixed even when they contain successfully fixable errors, because, e.g., there are more, not-directly-related errors after fixing. Some test files need an attribute or underscore to avoid unused warnings tripping up the "fixed code is still producing diagnostics" check despite the fixes being correct; this is an interesting contrast-to/inconsistency-with the behavior of UI tests (which secretly pass `-A unused`), a behavior which we probably ought to resolve one way or the other (filed issue #50926). A few suggestion labels are reworded (e.g., to avoid phrasing it as a question, which which is discouraged by the style guidelines listed in `.span_suggestion`'s doc-comment).
2018-05-19 21:52:24 +00:00
"remove this comma",
String::new(),
suggestion applicabilities for libsyntax and librustc, run-rustfix tests Consider this a down payment on #50723. To recap, an `Applicability` enum was recently (#50204) added, to convey to Rustfix and other tools whether we think it's OK for them to blindly apply the suggestion, or whether to prompt a human for guidance (because the suggestion might contain placeholders that we can't infer, or because we think it has a sufficiently high probability of being wrong even though it's— presumably—right often enough to be worth emitting in the first place). When a suggestion is marked as `MaybeIncorrect`, we try to use comments to indicate precisely why (although there are a few places where we just say `// speculative` because the present author's subjective judgement balked at the idea that the suggestion has no false positives). The `run-rustfix` directive is opporunistically set on some relevant UI tests (and a couple tests that were in the `test/ui/suggestions` directory, even if the suggestions didn't originate in librustc or libsyntax). This is less trivial than it sounds, because a surprising number of test files aren't equipped to be tested as fixed even when they contain successfully fixable errors, because, e.g., there are more, not-directly-related errors after fixing. Some test files need an attribute or underscore to avoid unused warnings tripping up the "fixed code is still producing diagnostics" check despite the fixes being correct; this is an interesting contrast-to/inconsistency-with the behavior of UI tests (which secretly pass `-A unused`), a behavior which we probably ought to resolve one way or the other (filed issue #50926). A few suggestion labels are reworded (e.g., to avoid phrasing it as a question, which which is discouraged by the style guidelines listed in `.span_suggestion`'s doc-comment).
2018-05-19 21:52:24 +00:00
Applicability::MachineApplicable
2019-07-24 08:51:20 +00:00
)
.note("the base struct must always be the last field")
.emit();
self.recover_stmt();
}
2016-09-22 04:45:29 +00:00
break;
}
let mut recovery_field = None;
if let token::Ident(name, _) = self.token.kind {
if !self.token.is_reserved_ident() && self.look_ahead(1, |t| *t == token::Colon) {
// Use in case of error after field-looking code: `S { foo: () with a }`
recovery_field = Some(ast::Field {
ident: Ident::new(name, self.token.span),
span: self.token.span,
expr: self.mk_expr(self.token.span, ExprKind::Err, ThinVec::new()),
is_shorthand: false,
attrs: ThinVec::new(),
});
}
}
2019-01-20 23:16:36 +00:00
let mut parsed_field = None;
2016-09-22 04:45:29 +00:00
match self.parse_field() {
2019-01-20 23:16:36 +00:00
Ok(f) => parsed_field = Some(f),
2016-09-22 04:45:29 +00:00
Err(mut e) => {
e.span_label(struct_sp, "while parsing this struct");
2016-09-22 04:45:29 +00:00
e.emit();
// If the next token is a comma, then try to parse
// what comes next as additional fields, rather than
// bailing out until next `}`.
if self.token != token::Comma {
self.recover_stmt_(SemiColonMode::Comma, BlockMode::Ignore);
if self.token != token::Comma {
break;
}
}
2016-09-22 04:45:29 +00:00
}
}
match self.expect_one_of(&[token::Comma],
&[token::CloseDelim(token::Brace)]) {
Ok(_) => if let Some(f) = parsed_field.or(recovery_field) {
// only include the field if there's no parse error for the field name
2019-01-20 23:16:36 +00:00
fields.push(f);
}
2016-09-22 04:45:29 +00:00
Err(mut e) => {
if let Some(f) = recovery_field {
fields.push(f);
}
e.span_label(struct_sp, "while parsing this struct");
2016-09-22 04:45:29 +00:00
e.emit();
self.recover_stmt_(SemiColonMode::Comma, BlockMode::Ignore);
self.eat(&token::Comma);
2016-09-22 04:45:29 +00:00
}
}
}
let span = lo.to(self.token.span);
2016-09-22 04:45:29 +00:00
self.expect(&token::CloseDelim(token::Brace))?;
return Ok(self.mk_expr(span, ExprKind::Struct(pth, fields, base), attrs));
2016-09-22 04:45:29 +00:00
}
fn parse_or_use_outer_attributes(&mut self,
already_parsed_attrs: Option<ThinVec<Attribute>>)
-> PResult<'a, ThinVec<Attribute>> {
if let Some(attrs) = already_parsed_attrs {
Ok(attrs)
} else {
self.parse_outer_attributes().map(|a| a.into())
}
}
2019-02-08 13:53:55 +00:00
/// Parses a block or unsafe block.
crate fn parse_block_expr(
&mut self,
opt_label: Option<Label>,
lo: Span,
blk_mode: BlockCheckMode,
outer_attrs: ThinVec<Attribute>,
) -> PResult<'a, P<Expr>> {
self.expect(&token::OpenDelim(token::Brace))?;
let mut attrs = outer_attrs;
attrs.extend(self.parse_inner_attributes()?);
let blk = self.parse_block_tail(lo, blk_mode)?;
return Ok(self.mk_expr(blk.span, ExprKind::Block(blk, opt_label), attrs));
}
2019-02-08 13:53:55 +00:00
/// Parses `a.b` or `a(13)` or `a[4]` or just `a`.
2019-08-08 19:31:24 +00:00
fn parse_dot_or_call_expr(
&mut self,
already_parsed_attrs: Option<ThinVec<Attribute>>,
) -> PResult<'a, P<Expr>> {
let attrs = self.parse_or_use_outer_attributes(already_parsed_attrs)?;
let b = self.parse_bottom_expr();
let (span, b) = self.interpolated_or_expr_span(b)?;
self.parse_dot_or_call_expr_with(b, span, attrs)
}
2019-08-08 19:31:24 +00:00
fn parse_dot_or_call_expr_with(
&mut self,
e0: P<Expr>,
lo: Span,
mut attrs: ThinVec<Attribute>,
) -> PResult<'a, P<Expr>> {
// Stitch the list of outer attributes onto the return value.
// A little bit ugly, but the best way given the current code
// structure
2019-08-08 19:31:24 +00:00
self.parse_dot_or_call_expr_with_(e0, lo).map(|expr|
expr.map(|mut expr| {
attrs.extend::<Vec<_>>(expr.attrs.into());
expr.attrs = attrs;
match expr.node {
ExprKind::If(..) if !expr.attrs.is_empty() => {
// Just point to the first attribute in there...
let span = expr.attrs[0].span;
2019-08-08 19:31:24 +00:00
self.span_err(span, "attributes are not yet allowed on `if` expressions");
}
_ => {}
}
expr
})
)
}
2019-07-02 04:30:21 +00:00
fn mk_await_expr(&mut self, self_arg: P<Expr>, lo: Span) -> PResult<'a, P<Expr>> {
let span = lo.to(self.prev_span);
let await_expr = self.mk_expr(span, ExprKind::Await(self_arg), ThinVec::new());
self.recover_from_await_method_call();
Ok(await_expr)
}
/// Assuming we have just parsed `.`, continue parsing into an expression.
2017-07-19 23:39:34 +00:00
fn parse_dot_suffix(&mut self, self_arg: P<Expr>, lo: Span) -> PResult<'a, P<Expr>> {
if self.token.span.rust_2018() && self.eat_keyword(kw::Await) {
2019-07-02 04:30:21 +00:00
return self.mk_await_expr(self_arg, lo);
}
2019-07-02 04:30:21 +00:00
let segment = self.parse_path_segment(PathStyle::Expr)?;
self.check_trailing_angle_brackets(&segment, token::OpenDelim(token::Paren));
2019-06-04 22:17:07 +00:00
Ok(match self.token.kind {
token::OpenDelim(token::Paren) => {
2017-07-19 23:39:34 +00:00
// Method call `expr.f()`
let mut args = self.parse_paren_expr_seq()?;
2017-07-19 23:39:34 +00:00
args.insert(0, self_arg);
2017-07-19 23:39:34 +00:00
let span = lo.to(self.prev_span);
self.mk_expr(span, ExprKind::MethodCall(segment, args), ThinVec::new())
}
_ => {
2017-07-19 23:39:34 +00:00
// Field access `expr.f`
2018-02-23 17:48:54 +00:00
if let Some(args) = segment.args {
self.span_err(args.span(),
"field expressions may not have generic arguments");
}
2017-07-19 23:39:34 +00:00
let span = lo.to(self.prev_span);
2018-03-18 13:47:09 +00:00
self.mk_expr(span, ExprKind::Field(self_arg, segment.ident), ThinVec::new())
}
})
}
fn parse_dot_or_call_expr_with_(&mut self, e0: P<Expr>, lo: Span) -> PResult<'a, P<Expr>> {
let mut e = e0;
2012-05-24 20:35:57 +00:00
let mut hi;
loop {
// expr?
while self.eat(&token::Question) {
let hi = self.prev_span;
e = self.mk_expr(lo.to(hi), ExprKind::Try(e), ThinVec::new());
}
// expr.f
if self.eat(&token::Dot) {
2019-06-04 22:17:07 +00:00
match self.token.kind {
token::Ident(..) => {
e = self.parse_dot_suffix(e, lo)?;
}
token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) => {
let span = self.token.span;
self.bump();
let field = ExprKind::Field(e, Ident::new(symbol, span));
e = self.mk_expr(lo.to(span), field, ThinVec::new());
2019-03-26 19:09:13 +00:00
self.expect_no_suffix(span, "a tuple index", suffix);
}
token::Literal(token::Lit { kind: token::Float, symbol, .. }) => {
self.bump();
let fstr = symbol.as_str();
let msg = format!("unexpected token: `{}`", symbol);
let mut err = self.diagnostic().struct_span_err(self.prev_span, &msg);
err.span_label(self.prev_span, "unexpected token");
if fstr.chars().all(|x| "0123456789.".contains(x)) {
let float = match fstr.parse::<f64>().ok() {
Some(f) => f,
None => continue,
};
let sugg = pprust::to_string(|s| {
s.popen();
s.print_expr(&e);
s.s.word( ".");
s.print_usize(float.trunc() as usize);
s.pclose();
s.s.word(".");
s.s.word(fstr.splitn(2, ".").last().unwrap().to_string())
});
err.span_suggestion(
lo.to(self.prev_span),
"try parenthesizing the first index",
sugg,
Applicability::MachineApplicable
);
}
return Err(err);
}
_ => {
// FIXME Could factor this out into non_fatal_unexpected or something.
let actual = self.this_token_to_string();
self.span_err(self.token.span, &format!("unexpected token: `{}`", actual));
}
}
continue;
}
2016-02-08 22:55:55 +00:00
if self.expr_is_complete(&e) { break; }
2019-06-04 22:17:07 +00:00
match self.token.kind {
2019-03-29 02:58:45 +00:00
// expr(...)
token::OpenDelim(token::Paren) => {
let seq = self.parse_paren_expr_seq().map(|es| {
let nd = self.mk_call(e, es);
2019-03-29 02:58:45 +00:00
let hi = self.prev_span;
self.mk_expr(lo.to(hi), nd, ThinVec::new())
});
e = self.recover_seq_parse_error(token::Paren, lo, seq);
}
2012-05-31 01:14:40 +00:00
2019-03-29 02:58:45 +00:00
// expr[...]
// Could be either an index expression or a slicing expression.
token::OpenDelim(token::Bracket) => {
self.bump();
let ix = self.parse_expr()?;
hi = self.token.span;
2019-03-29 02:58:45 +00:00
self.expect(&token::CloseDelim(token::Bracket))?;
let index = self.mk_index(e, ix);
e = self.mk_expr(lo.to(hi), index, ThinVec::new())
}
_ => return Ok(e)
2012-05-31 01:14:40 +00:00
}
2010-09-28 17:30:34 +00:00
}
return Ok(e);
2010-09-28 17:30:34 +00:00
}
fn parse_paren_expr_seq(&mut self) -> PResult<'a, Vec<P<Expr>>> {
2019-08-08 19:31:24 +00:00
self.parse_paren_comma_seq(|p| {
match p.parse_expr() {
Ok(expr) => Ok(expr),
Err(mut err) => match p.token.kind {
token::Ident(name, false)
if name == kw::Underscore && p.look_ahead(1, |t| {
t == &token::Comma
}) => {
// Special-case handling of `foo(_, _, _)`
err.emit();
let sp = p.token.span;
p.bump();
Ok(p.mk_expr(sp, ExprKind::Err, ThinVec::new()))
}
_ => Err(err),
},
}
}).map(|(r, _)| r)
}
crate fn process_potential_macro_variable(&mut self) {
2019-06-04 22:17:07 +00:00
self.token = match self.token.kind {
token::Dollar if self.token.span.ctxt() != SyntaxContext::empty() &&
self.look_ahead(1, |t| t.is_ident()) => {
self.bump();
2019-06-04 22:17:07 +00:00
let name = match self.token.kind {
token::Ident(name, _) => name,
_ => unreachable!()
};
let span = self.prev_span.to(self.token.span);
self.diagnostic()
.struct_span_fatal(span, &format!("unknown macro variable `{}`", name))
.span_label(span, "unknown macro variable")
.emit();
self.bump();
2017-03-29 07:17:18 +00:00
return
}
token::Interpolated(ref nt) => {
self.meta_var_span = Some(self.token.span);
// Interpolated identifier and lifetime tokens are replaced with usual identifier
// and lifetime tokens, so the former are never encountered during normal parsing.
match **nt {
2019-06-05 11:17:56 +00:00
token::NtIdent(ident, is_raw) =>
Token::new(token::Ident(ident.name, is_raw), ident.span),
token::NtLifetime(ident) =>
Token::new(token::Lifetime(ident.name), ident.span),
2017-03-29 07:17:18 +00:00
_ => return,
}
}
_ => return,
};
}
2019-02-08 13:53:55 +00:00
/// Parses a single token tree from the input.
crate fn parse_token_tree(&mut self) -> TokenTree {
2019-06-04 22:17:07 +00:00
match self.token.kind {
token::OpenDelim(..) => {
let frame = mem::replace(&mut self.token_cursor.frame,
self.token_cursor.stack.pop().unwrap());
2019-06-04 22:17:07 +00:00
self.token.span = frame.span.entire();
self.bump();
TokenTree::Delimited(
frame.span,
frame.delim,
frame.tree_cursor.stream.into(),
)
},
token::CloseDelim(_) | token::Eof => unreachable!(),
_ => {
let token = self.token.take();
self.bump();
2019-06-04 22:17:07 +00:00
TokenTree::Token(token)
}
2013-02-04 21:15:17 +00:00
}
2012-05-21 17:45:56 +00:00
}
/// Parses a stream of tokens into a list of `TokenTree`s, up to EOF.
2015-12-20 21:00:43 +00:00
pub fn parse_all_token_trees(&mut self) -> PResult<'a, Vec<TokenTree>> {
let mut tts = Vec::new();
2014-10-27 08:22:52 +00:00
while self.token != token::Eof {
2017-03-06 06:45:28 +00:00
tts.push(self.parse_token_tree());
}
Ok(tts)
}
2017-03-08 23:13:35 +00:00
pub fn parse_tokens(&mut self) -> TokenStream {
let mut result = Vec::new();
loop {
2019-06-04 22:17:07 +00:00
match self.token.kind {
2017-03-08 23:13:35 +00:00
token::Eof | token::CloseDelim(..) => break,
_ => result.push(self.parse_token_tree().into()),
}
}
TokenStream::new(result)
2017-03-08 23:13:35 +00:00
}
/// Parse a prefix-unary-operator expr
fn parse_prefix_expr(&mut self,
already_parsed_attrs: Option<ThinVec<Attribute>>)
2015-12-20 21:00:43 +00:00
-> PResult<'a, P<Expr>> {
let attrs = self.parse_or_use_outer_attributes(already_parsed_attrs)?;
let lo = self.token.span;
// Note: when adding new unary operators, don't forget to adjust TokenKind::can_begin_expr()
2019-06-04 22:17:07 +00:00
let (hi, ex) = match self.token.kind {
token::Not => {
self.bump();
let e = self.parse_prefix_expr(None);
let (span, e) = self.interpolated_or_expr_span(e)?;
(lo.to(span), self.mk_unary(UnOp::Not, e))
}
// Suggest `!` for bitwise negation when encountering a `~`
token::Tilde => {
self.bump();
let e = self.parse_prefix_expr(None);
let (span, e) = self.interpolated_or_expr_span(e)?;
let span_of_tilde = lo;
2019-07-24 08:51:20 +00:00
self.struct_span_err(span_of_tilde, "`~` cannot be used as a unary operator")
.span_suggestion_short(
span_of_tilde,
"use `!` to perform bitwise negation",
"!".to_owned(),
Applicability::MachineApplicable
)
.emit();
(lo.to(span), self.mk_unary(UnOp::Not, e))
}
token::BinOp(token::Minus) => {
self.bump();
let e = self.parse_prefix_expr(None);
let (span, e) = self.interpolated_or_expr_span(e)?;
(lo.to(span), self.mk_unary(UnOp::Neg, e))
}
token::BinOp(token::Star) => {
self.bump();
let e = self.parse_prefix_expr(None);
let (span, e) = self.interpolated_or_expr_span(e)?;
(lo.to(span), self.mk_unary(UnOp::Deref, e))
}
token::BinOp(token::And) | token::AndAnd => {
self.expect_and()?;
2017-03-16 21:47:32 +00:00
let m = self.parse_mutability();
let e = self.parse_prefix_expr(None);
let (span, e) = self.interpolated_or_expr_span(e)?;
(lo.to(span), ExprKind::AddrOf(m, e))
}
2019-05-11 14:41:37 +00:00
token::Ident(..) if self.token.is_keyword(kw::Box) => {
self.bump();
let e = self.parse_prefix_expr(None);
let (span, e) = self.interpolated_or_expr_span(e)?;
(lo.to(span), ExprKind::Box(e))
}
token::Ident(..) if self.token.is_ident_named(sym::not) => {
// `not` is just an ordinary identifier in Rust-the-language,
// but as `rustc`-the-compiler, we can issue clever diagnostics
// for confused users who really want to say `!`
let token_cannot_continue_expr = |t: &Token| match t.kind {
// These tokens can start an expression after `!`, but
// can't continue an expression after an ident
token::Ident(name, is_raw) => token::ident_can_begin_expr(name, t.span, is_raw),
token::Literal(..) | token::Pound => true,
2019-07-31 19:25:11 +00:00
_ => t.is_whole_expr(),
};
let cannot_continue_expr = self.look_ahead(1, token_cannot_continue_expr);
if cannot_continue_expr {
self.bump();
// Emit the error ...
2019-07-24 08:51:20 +00:00
self.struct_span_err(
self.token.span,
&format!("unexpected {} after identifier",self.this_token_descr())
)
.span_suggestion_short(
// Span the `not` plus trailing whitespace to avoid
// trailing whitespace after the `!` in our suggestion
self.sess.source_map()
.span_until_non_whitespace(lo.to(self.token.span)),
suggestion applicabilities for libsyntax and librustc, run-rustfix tests Consider this a down payment on #50723. To recap, an `Applicability` enum was recently (#50204) added, to convey to Rustfix and other tools whether we think it's OK for them to blindly apply the suggestion, or whether to prompt a human for guidance (because the suggestion might contain placeholders that we can't infer, or because we think it has a sufficiently high probability of being wrong even though it's— presumably—right often enough to be worth emitting in the first place). When a suggestion is marked as `MaybeIncorrect`, we try to use comments to indicate precisely why (although there are a few places where we just say `// speculative` because the present author's subjective judgement balked at the idea that the suggestion has no false positives). The `run-rustfix` directive is opporunistically set on some relevant UI tests (and a couple tests that were in the `test/ui/suggestions` directory, even if the suggestions didn't originate in librustc or libsyntax). This is less trivial than it sounds, because a surprising number of test files aren't equipped to be tested as fixed even when they contain successfully fixable errors, because, e.g., there are more, not-directly-related errors after fixing. Some test files need an attribute or underscore to avoid unused warnings tripping up the "fixed code is still producing diagnostics" check despite the fixes being correct; this is an interesting contrast-to/inconsistency-with the behavior of UI tests (which secretly pass `-A unused`), a behavior which we probably ought to resolve one way or the other (filed issue #50926). A few suggestion labels are reworded (e.g., to avoid phrasing it as a question, which which is discouraged by the style guidelines listed in `.span_suggestion`'s doc-comment).
2018-05-19 21:52:24 +00:00
"use `!` to perform logical negation",
"!".to_owned(),
Applicability::MachineApplicable
2019-07-24 08:51:20 +00:00
)
.emit();
// —and recover! (just as if we were in the block
// for the `token::Not` arm)
let e = self.parse_prefix_expr(None);
let (span, e) = self.interpolated_or_expr_span(e)?;
(lo.to(span), self.mk_unary(UnOp::Not, e))
} else {
return self.parse_dot_or_call_expr(Some(attrs));
}
}
_ => { return self.parse_dot_or_call_expr(Some(attrs)); }
};
return Ok(self.mk_expr(lo.to(hi), ex, attrs));
}
2019-02-08 13:53:55 +00:00
/// Parses an associative expression.
///
/// This parses an expression accounting for associativity and precedence of the operators in
/// the expression.
#[inline]
2019-08-08 19:31:24 +00:00
fn parse_assoc_expr(
&mut self,
already_parsed_attrs: Option<ThinVec<Attribute>>,
) -> PResult<'a, P<Expr>> {
self.parse_assoc_expr_with(0, already_parsed_attrs.into())
}
2019-02-08 13:53:55 +00:00
/// Parses an associative expression with operators of at least `min_prec` precedence.
fn parse_assoc_expr_with(
&mut self,
min_prec: usize,
lhs: LhsExpr,
) -> PResult<'a, P<Expr>> {
let mut lhs = if let LhsExpr::AlreadyParsed(expr) = lhs {
expr
} else {
let attrs = match lhs {
LhsExpr::AttributesParsed(attrs) => Some(attrs),
_ => None,
};
if [token::DotDot, token::DotDotDot, token::DotDotEq].contains(&self.token.kind) {
return self.parse_prefix_range_expr(attrs);
} else {
self.parse_prefix_expr(attrs)?
}
};
let last_type_ascription_set = self.last_type_ascription.is_some();
match (self.expr_is_complete(&lhs), AssocOp::from_token(&self.token)) {
(true, None) => {
2019-07-19 17:59:02 +00:00
self.last_type_ascription = None;
// Semi-statement forms are odd. See https://github.com/rust-lang/rust/issues/29071
return Ok(lhs);
}
(false, _) => {} // continue parsing the expression
// An exhaustive check is done in the following block, but these are checked first
// because they *are* ambiguous but also reasonable looking incorrect syntax, so we
// want to keep their span info to improve diagnostics in these cases in a later stage.
(true, Some(AssocOp::Multiply)) | // `{ 42 } *foo = bar;` or `{ 42 } * 3`
(true, Some(AssocOp::Subtract)) | // `{ 42 } -5`
2019-06-03 19:06:49 +00:00
(true, Some(AssocOp::LAnd)) | // `{ 42 } &&x` (#61475)
(true, Some(AssocOp::Add)) // `{ 42 } + 42
// If the next token is a keyword, then the tokens above *are* unambiguously incorrect:
// `if x { a } else { b } && if y { c } else { d }`
if !self.look_ahead(1, |t| t.is_reserved_ident()) => {
2019-07-19 17:59:02 +00:00
self.last_type_ascription = None;
// These cases are ambiguous and can't be identified in the parser alone
let sp = self.sess.source_map().start_point(self.token.span);
self.sess.ambiguous_block_expr_parse.borrow_mut().insert(sp, lhs.span);
return Ok(lhs);
}
(true, Some(ref op)) if !op.can_continue_expr_unambiguously() => {
2019-07-19 17:59:02 +00:00
self.last_type_ascription = None;
return Ok(lhs);
}
(true, Some(_)) => {
// We've found an expression that would be parsed as a statement, but the next
// token implies this should be parsed as an expression.
// For example: `if let Some(x) = x { x } else { 0 } / 2`
2019-07-24 08:51:20 +00:00
let mut err = self.struct_span_err(self.token.span, &format!(
"expected expression, found `{}`",
pprust::token_to_string(&self.token),
));
err.span_label(self.token.span, "expected expression");
self.sess.expr_parentheses_needed(
&mut err,
lhs.span,
Some(pprust::expr_to_string(&lhs),
));
err.emit();
}
}
self.expected_tokens.push(TokenType::Operator);
while let Some(op) = AssocOp::from_token(&self.token) {
// Adjust the span for interpolated LHS to point to the `$lhs` token and not to what
// it refers to. Interpolated identifiers are unwrapped early and never show up here
// as `PrevTokenKind::Interpolated` so if LHS is a single identifier we always process
// it as "interpolated", it doesn't change the answer for non-interpolated idents.
let lhs_span = match (self.prev_token_kind, &lhs.node) {
(PrevTokenKind::Interpolated, _) => self.prev_span,
(PrevTokenKind::Ident, &ExprKind::Path(None, ref path))
if path.segments.len() == 1 => self.prev_span,
_ => lhs.span,
};
let cur_op_span = self.token.span;
let restrictions = if op.is_assign_like() {
self.restrictions & Restrictions::NO_STRUCT_LITERAL
} else {
self.restrictions
};
let prec = op.precedence();
if prec < min_prec {
break;
}
// Check for deprecated `...` syntax
if self.token == token::DotDotDot && op == AssocOp::DotDotEq {
self.err_dotdotdot_syntax(self.token.span);
}
self.bump();
if op.is_comparison() {
2016-02-08 22:55:55 +00:00
self.check_no_chained_comparison(&lhs, &op);
}
// Special cases:
if op == AssocOp::As {
lhs = self.parse_assoc_op_cast(lhs, lhs_span, ExprKind::Cast)?;
continue
} else if op == AssocOp::Colon {
let maybe_path = self.could_ascription_be_path(&lhs.node);
self.last_type_ascription = Some((self.prev_span, maybe_path));
lhs = self.parse_assoc_op_cast(lhs, lhs_span, ExprKind::Type)?;
continue
} else if op == AssocOp::DotDot || op == AssocOp::DotDotEq {
// If we didnt have to handle `x..`/`x..=`, it would be pretty easy to
// generalise it to the Fixity::None code.
2016-01-13 06:23:31 +00:00
//
// We have 2 alternatives here: `x..y`/`x..=y` and `x..`/`x..=` The other
// two variants are handled with `parse_prefix_range_expr` call above.
2016-01-13 06:23:31 +00:00
let rhs = if self.is_at_start_of_range_notation_rhs() {
Some(self.parse_assoc_expr_with(prec + 1, LhsExpr::NotYetParsed)?)
2016-01-13 06:23:31 +00:00
} else {
None
};
let (lhs_span, rhs_span) = (lhs.span, if let Some(ref x) = rhs {
x.span
} else {
cur_op_span
});
let limits = if op == AssocOp::DotDot {
RangeLimits::HalfOpen
} else {
RangeLimits::Closed
};
2018-12-01 21:48:55 +00:00
let r = self.mk_range(Some(lhs), rhs, limits)?;
lhs = self.mk_expr(lhs_span.to(rhs_span), r, ThinVec::new());
2016-01-13 06:23:31 +00:00
break
}
2013-07-18 03:04:37 +00:00
let fixity = op.fixity();
let prec_adjustment = match fixity {
Fixity::Right => 0,
Fixity::Left => 1,
2016-03-18 23:04:43 +00:00
// We currently have no non-associative operators that are not handled above by
// the special cases. The code is here only for future convenience.
Fixity::None => 1,
};
let rhs = self.with_res(
restrictions - Restrictions::STMT_EXPR,
|this| this.parse_assoc_expr_with(prec + prec_adjustment, LhsExpr::NotYetParsed)
)?;
// Make sure that the span of the parent node is larger than the span of lhs and rhs,
// including the attributes.
let lhs_span = lhs
.attrs
.iter()
.filter(|a| a.style == AttrStyle::Outer)
.next()
.map_or(lhs_span, |a| a.span);
let span = lhs_span.to(rhs.span);
lhs = match op {
AssocOp::Add | AssocOp::Subtract | AssocOp::Multiply | AssocOp::Divide |
AssocOp::Modulus | AssocOp::LAnd | AssocOp::LOr | AssocOp::BitXor |
AssocOp::BitAnd | AssocOp::BitOr | AssocOp::ShiftLeft | AssocOp::ShiftRight |
AssocOp::Equal | AssocOp::Less | AssocOp::LessEqual | AssocOp::NotEqual |
AssocOp::Greater | AssocOp::GreaterEqual => {
let ast_op = op.to_ast_binop().unwrap();
2018-08-18 10:14:03 +00:00
let binary = self.mk_binary(source_map::respan(cur_op_span, ast_op), lhs, rhs);
self.mk_expr(span, binary, ThinVec::new())
2013-01-31 18:32:57 +00:00
}
2019-05-17 02:56:11 +00:00
AssocOp::Assign => self.mk_expr(span, ExprKind::Assign(lhs, rhs), ThinVec::new()),
AssocOp::AssignOp(k) => {
let aop = match k {
token::Plus => BinOpKind::Add,
token::Minus => BinOpKind::Sub,
token::Star => BinOpKind::Mul,
token::Slash => BinOpKind::Div,
token::Percent => BinOpKind::Rem,
token::Caret => BinOpKind::BitXor,
token::And => BinOpKind::BitAnd,
token::Or => BinOpKind::BitOr,
token::Shl => BinOpKind::Shl,
token::Shr => BinOpKind::Shr,
};
2018-08-18 10:14:03 +00:00
let aopexpr = self.mk_assign_op(source_map::respan(cur_op_span, aop), lhs, rhs);
self.mk_expr(span, aopexpr, ThinVec::new())
2013-01-31 18:32:57 +00:00
}
AssocOp::As | AssocOp::Colon | AssocOp::DotDot | AssocOp::DotDotEq => {
self.bug("AssocOp should have been handled by special case")
}
};
if let Fixity::None = fixity { break }
}
if last_type_ascription_set {
self.last_type_ascription = None;
}
Ok(lhs)
}
2010-09-28 17:30:34 +00:00
fn parse_assoc_op_cast(&mut self, lhs: P<Expr>, lhs_span: Span,
expr_kind: fn(P<Expr>, P<Ty>) -> ExprKind)
-> PResult<'a, P<Expr>> {
let mk_expr = |this: &mut Self, rhs: P<Ty>| {
this.mk_expr(lhs_span.to(rhs.span), expr_kind(lhs, rhs), ThinVec::new())
};
// Save the state of the parser before parsing type normally, in case there is a
// LessThan comparison after this cast.
let parser_snapshot_before_type = self.clone();
match self.parse_ty_no_plus() {
Ok(rhs) => {
Ok(mk_expr(self, rhs))
}
Err(mut type_err) => {
// Rewind to before attempting to parse the type with generics, to recover
// from situations like `x as usize < y` in which we first tried to parse
// `usize < y` as a type with generic arguments.
let parser_snapshot_after_type = self.clone();
mem::replace(self, parser_snapshot_before_type);
2017-07-19 23:39:34 +00:00
match self.parse_path(PathStyle::Expr) {
Ok(path) => {
2019-06-04 22:17:07 +00:00
let (op_noun, op_verb) = match self.token.kind {
token::Lt => ("comparison", "comparing"),
token::BinOp(token::Shl) => ("shift", "shifting"),
_ => {
// We can end up here even without `<` being the next token, for
// example because `parse_ty_no_plus` returns `Err` on keywords,
// but `parse_path` returns `Ok` on them due to error recovery.
// Return original error and parser state.
mem::replace(self, parser_snapshot_after_type);
return Err(type_err);
}
};
// Successfully parsed the type path leaving a `<` yet to parse.
type_err.cancel();
// Report non-fatal diagnostics, keep `x as usize` as an expression
// in AST and continue parsing.
let msg = format!("`<` is interpreted as a start of generic \
arguments for `{}`, not a {}", path, op_noun);
let span_after_type = parser_snapshot_after_type.token.span;
let expr = mk_expr(self, P(Ty {
span: path.span,
node: TyKind::Path(None, path),
id: ast::DUMMY_NODE_ID
}));
2019-07-24 09:01:30 +00:00
let expr_str = self.span_to_snippet(expr.span)
2019-07-24 08:51:20 +00:00
.unwrap_or_else(|_| pprust::expr_to_string(&expr));
self.struct_span_err(self.token.span, &msg)
.span_label(
self.look_ahead(1, |t| t.span).to(span_after_type),
"interpreted as generic arguments"
)
.span_label(self.token.span, format!("not interpreted as {}", op_noun))
.span_suggestion(
expr.span,
&format!("try {} the cast value", op_verb),
format!("({})", expr_str),
Applicability::MachineApplicable
)
.emit();
Ok(expr)
}
Err(mut path_err) => {
// Couldn't parse as a path, return original error and parser state.
path_err.cancel();
mem::replace(self, parser_snapshot_after_type);
Err(type_err)
}
}
}
}
}
/// Parse prefix-forms of range notation: `..expr`, `..`, `..=expr`
fn parse_prefix_range_expr(&mut self,
already_parsed_attrs: Option<ThinVec<Attribute>>)
2015-12-20 21:00:43 +00:00
-> PResult<'a, P<Expr>> {
// Check for deprecated `...` syntax
if self.token == token::DotDotDot {
self.err_dotdotdot_syntax(self.token.span);
}
debug_assert!([token::DotDot, token::DotDotDot, token::DotDotEq].contains(&self.token.kind),
"parse_prefix_range_expr: token {:?} is not DotDot/DotDotEq",
self.token);
2016-01-13 06:23:31 +00:00
let tok = self.token.clone();
let attrs = self.parse_or_use_outer_attributes(already_parsed_attrs)?;
let lo = self.token.span;
let mut hi = self.token.span;
self.bump();
let opt_end = if self.is_at_start_of_range_notation_rhs() {
2016-01-13 06:23:31 +00:00
// RHS must be parsed with more associativity than the dots.
let next_prec = AssocOp::from_token(&tok).unwrap().precedence() + 1;
Some(self.parse_assoc_expr_with(next_prec,
LhsExpr::NotYetParsed)
.map(|x|{
hi = x.span;
x
})?)
} else {
None
};
let limits = if tok == token::DotDot {
RangeLimits::HalfOpen
} else {
RangeLimits::Closed
};
2018-12-01 21:48:55 +00:00
let r = self.mk_range(None, opt_end, limits)?;
Ok(self.mk_expr(lo.to(hi), r, attrs))
}
fn is_at_start_of_range_notation_rhs(&self) -> bool {
if self.token.can_begin_expr() {
// parse `for i in 1.. { }` as infinite loop, not as `for i in (1..{})`.
if self.token == token::OpenDelim(token::Brace) {
return !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL);
}
true
} else {
false
}
}
/// Parses an `if` expression (`if` token already eaten).
fn parse_if_expr(&mut self, attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
let lo = self.prev_span;
let cond = self.parse_cond_expr()?;
// Verify that the parsed `if` condition makes sense as a condition. If it is a block, then
// verify that the last statement is either an implicit return (no `;`) or an explicit
// return. This won't catch blocks with an explicit `return`, but that would be caught by
// the dead code lint.
2019-05-11 14:41:37 +00:00
if self.eat_keyword(kw::Else) || !cond.returns() {
2018-08-18 10:14:09 +00:00
let sp = self.sess.source_map().next_point(lo);
let mut err = self.diagnostic()
.struct_span_err(sp, "missing condition for `if` statemement");
err.span_label(sp, "expected if condition here");
return Err(err)
}
let not_block = self.token != token::OpenDelim(token::Brace);
let thn = self.parse_block().map_err(|mut err| {
2018-02-24 03:38:36 +00:00
if not_block {
err.span_label(lo, "this `if` statement has a condition, but no block");
}
err
})?;
2014-09-13 16:06:01 +00:00
let mut els: Option<P<Expr>> = None;
let mut hi = thn.span;
2019-05-11 14:41:37 +00:00
if self.eat_keyword(kw::Else) {
let elexpr = self.parse_else_expr()?;
hi = elexpr.span;
2014-09-13 16:06:01 +00:00
els = Some(elexpr);
}
Ok(self.mk_expr(lo.to(hi), ExprKind::If(cond, thn, els), attrs))
}
/// Parse the condition of a `if`- or `while`-expression
fn parse_cond_expr(&mut self) -> PResult<'a, P<Expr>> {
let cond = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?;
if let ExprKind::Let(..) = cond.node {
// Remove the last feature gating of a `let` expression since it's stable.
let last = self.sess.let_chains_spans.borrow_mut().pop();
debug_assert_eq!(cond.span, last.unwrap());
}
Ok(cond)
}
/// Parses a `let $pats = $expr` pseudo-expression.
/// The `let` token has already been eaten.
fn parse_let_expr(&mut self, attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
let lo = self.prev_span;
let pats = self.parse_pats()?;
self.expect(&token::Eq)?;
let expr = self.with_res(
Restrictions::NO_STRUCT_LITERAL,
|this| this.parse_assoc_expr_with(1 + prec_let_scrutinee_needs_par(), None.into())
)?;
let span = lo.to(expr.span);
self.sess.let_chains_spans.borrow_mut().push(span);
Ok(self.mk_expr(span, ExprKind::Let(pats, expr), attrs))
2014-08-25 01:04:29 +00:00
}
2019-02-08 13:53:55 +00:00
/// Parses `move |args| expr`.
fn parse_lambda_expr(&mut self,
attrs: ThinVec<Attribute>)
2015-12-20 21:00:43 +00:00
-> PResult<'a, P<Expr>>
{
let lo = self.token.span;
2019-05-11 14:41:37 +00:00
let movability = if self.eat_keyword(kw::Static) {
Movability::Static
} else {
Movability::Movable
};
let asyncness = if self.token.span.rust_2018() {
self.parse_asyncness()
2018-06-06 22:50:59 +00:00
} else {
IsAsync::NotAsync
};
if asyncness.is_async() {
// Feature gate `async ||` closures.
self.sess.async_closure_spans.borrow_mut().push(self.prev_span);
}
let capture_clause = self.parse_capture_clause();
let decl = self.parse_fn_block_decl()?;
let decl_hi = self.prev_span;
let body = match decl.output {
FunctionRetTy::Default(_) => {
let restrictions = self.restrictions - Restrictions::STMT_EXPR;
self.parse_expr_res(restrictions, None)?
},
_ => {
// If an explicit return type is given, require a
// block to appear (RFC 968).
let body_lo = self.token.span;
self.parse_block_expr(None, body_lo, BlockCheckMode::Default, ThinVec::new())?
}
};
Ok(self.mk_expr(
lo.to(body.span),
2018-06-06 22:50:59 +00:00
ExprKind::Closure(capture_clause, asyncness, movability, decl, body, lo.to(decl_hi)),
attrs))
}
/// `else` token already eaten
fn parse_else_expr(&mut self) -> PResult<'a, P<Expr>> {
2019-05-11 14:41:37 +00:00
if self.eat_keyword(kw::If) {
return self.parse_if_expr(ThinVec::new());
} else {
let blk = self.parse_block()?;
return Ok(self.mk_expr(blk.span, ExprKind::Block(blk, None), ThinVec::new()));
}
}
2014-06-09 20:12:30 +00:00
/// Parse a 'for' .. 'in' expression ('for' token already eaten)
2019-07-24 08:51:20 +00:00
fn parse_for_expr(
&mut self,
opt_label: Option<Label>,
span_lo: Span,
mut attrs: ThinVec<Attribute>
) -> PResult<'a, P<Expr>> {
// Parse: `for <src_pat> in <src_expr> <src_loop_block>`
// Record whether we are about to parse `for (`.
// This is used below for recovery in case of `for ( $stuff ) $block`
// in which case we will suggest `for $stuff $block`.
let begin_paren = match self.token.kind {
token::OpenDelim(token::Paren) => Some(self.token.span),
_ => None,
};
let pat = self.parse_top_level_pat()?;
2019-05-11 14:41:37 +00:00
if !self.eat_keyword(kw::In) {
let in_span = self.prev_span.between(self.token.span);
2019-07-24 08:51:20 +00:00
self.struct_span_err(in_span, "missing `in` in `for` loop")
.span_suggestion_short(
in_span,
"try adding `in` here", " in ".into(),
// has been misleading, at least in the past (closed Issue #48492)
Applicability::MaybeIncorrect
)
.emit();
2017-10-31 21:26:49 +00:00
}
2018-09-19 23:23:21 +00:00
let in_span = self.prev_span;
self.check_for_for_in_in_typo(in_span);
let expr = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?;
let pat = self.recover_parens_around_for_head(pat, &expr, begin_paren);
let (iattrs, loop_block) = self.parse_inner_attrs_and_block()?;
attrs.extend(iattrs);
let hi = self.prev_span;
Ok(self.mk_expr(span_lo.to(hi), ExprKind::ForLoop(pat, expr, loop_block, opt_label), attrs))
}
2019-02-08 13:53:55 +00:00
/// Parses a `while` or `while let` expression (`while` token already eaten).
fn parse_while_expr(&mut self, opt_label: Option<Label>,
span_lo: Span,
mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
let cond = self.parse_cond_expr()?;
let (iattrs, body) = self.parse_inner_attrs_and_block()?;
attrs.extend(iattrs);
let span = span_lo.to(body.span);
Ok(self.mk_expr(span, ExprKind::While(cond, body, opt_label), attrs))
}
/// Parse `loop {...}`, `loop` token already eaten.
fn parse_loop_expr(&mut self, opt_label: Option<Label>,
span_lo: Span,
mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
let (iattrs, body) = self.parse_inner_attrs_and_block()?;
attrs.extend(iattrs);
let span = span_lo.to(body.span);
Ok(self.mk_expr(span, ExprKind::Loop(body, opt_label), attrs))
}
/// Parse an optional `move` prefix to a closure lke construct.
fn parse_capture_clause(&mut self) -> CaptureBy {
if self.eat_keyword(kw::Move) {
2018-06-06 22:50:59 +00:00
CaptureBy::Value
} else {
CaptureBy::Ref
}
}
/// Parses an `async move? {...}` expression.
pub fn parse_async_block(&mut self, mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
let span_lo = self.token.span;
self.expect_keyword(kw::Async)?;
let capture_clause = self.parse_capture_clause();
2018-06-06 22:50:59 +00:00
let (iattrs, body) = self.parse_inner_attrs_and_block()?;
attrs.extend(iattrs);
Ok(self.mk_expr(
span_lo.to(body.span),
ExprKind::Async(capture_clause, ast::DUMMY_NODE_ID, body), attrs))
}
2019-02-08 13:53:55 +00:00
/// Parses a `try {...}` expression (`try` token already eaten).
fn parse_try_block(&mut self, span_lo: Span, mut attrs: ThinVec<Attribute>)
-> PResult<'a, P<Expr>>
{
let (iattrs, body) = self.parse_inner_attrs_and_block()?;
attrs.extend(iattrs);
2019-05-11 14:41:37 +00:00
if self.eat_keyword(kw::Catch) {
2019-04-10 17:41:47 +00:00
let mut error = self.struct_span_err(self.prev_span,
2019-04-11 02:22:43 +00:00
"keyword `catch` cannot follow a `try` block");
error.help("try using `match` on the result of the `try` block instead");
2019-04-11 02:22:43 +00:00
error.emit();
Err(error)
} else {
Ok(self.mk_expr(span_lo.to(body.span), ExprKind::TryBlock(body), attrs))
}
}
// `match` token already eaten
fn parse_match_expr(&mut self, mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
let match_span = self.prev_span;
let lo = self.prev_span;
let discriminant = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL,
2016-03-22 22:58:45 +00:00
None)?;
if let Err(mut e) = self.expect(&token::OpenDelim(token::Brace)) {
if self.token == token::Semi {
e.span_suggestion_short(
suggestion applicabilities for libsyntax and librustc, run-rustfix tests Consider this a down payment on #50723. To recap, an `Applicability` enum was recently (#50204) added, to convey to Rustfix and other tools whether we think it's OK for them to blindly apply the suggestion, or whether to prompt a human for guidance (because the suggestion might contain placeholders that we can't infer, or because we think it has a sufficiently high probability of being wrong even though it's— presumably—right often enough to be worth emitting in the first place). When a suggestion is marked as `MaybeIncorrect`, we try to use comments to indicate precisely why (although there are a few places where we just say `// speculative` because the present author's subjective judgement balked at the idea that the suggestion has no false positives). The `run-rustfix` directive is opporunistically set on some relevant UI tests (and a couple tests that were in the `test/ui/suggestions` directory, even if the suggestions didn't originate in librustc or libsyntax). This is less trivial than it sounds, because a surprising number of test files aren't equipped to be tested as fixed even when they contain successfully fixable errors, because, e.g., there are more, not-directly-related errors after fixing. Some test files need an attribute or underscore to avoid unused warnings tripping up the "fixed code is still producing diagnostics" check despite the fixes being correct; this is an interesting contrast-to/inconsistency-with the behavior of UI tests (which secretly pass `-A unused`), a behavior which we probably ought to resolve one way or the other (filed issue #50926). A few suggestion labels are reworded (e.g., to avoid phrasing it as a question, which which is discouraged by the style guidelines listed in `.span_suggestion`'s doc-comment).
2018-05-19 21:52:24 +00:00
match_span,
"try removing this `match`",
String::new(),
suggestion applicabilities for libsyntax and librustc, run-rustfix tests Consider this a down payment on #50723. To recap, an `Applicability` enum was recently (#50204) added, to convey to Rustfix and other tools whether we think it's OK for them to blindly apply the suggestion, or whether to prompt a human for guidance (because the suggestion might contain placeholders that we can't infer, or because we think it has a sufficiently high probability of being wrong even though it's— presumably—right often enough to be worth emitting in the first place). When a suggestion is marked as `MaybeIncorrect`, we try to use comments to indicate precisely why (although there are a few places where we just say `// speculative` because the present author's subjective judgement balked at the idea that the suggestion has no false positives). The `run-rustfix` directive is opporunistically set on some relevant UI tests (and a couple tests that were in the `test/ui/suggestions` directory, even if the suggestions didn't originate in librustc or libsyntax). This is less trivial than it sounds, because a surprising number of test files aren't equipped to be tested as fixed even when they contain successfully fixable errors, because, e.g., there are more, not-directly-related errors after fixing. Some test files need an attribute or underscore to avoid unused warnings tripping up the "fixed code is still producing diagnostics" check despite the fixes being correct; this is an interesting contrast-to/inconsistency-with the behavior of UI tests (which secretly pass `-A unused`), a behavior which we probably ought to resolve one way or the other (filed issue #50926). A few suggestion labels are reworded (e.g., to avoid phrasing it as a question, which which is discouraged by the style guidelines listed in `.span_suggestion`'s doc-comment).
2018-05-19 21:52:24 +00:00
Applicability::MaybeIncorrect // speculative
);
}
return Err(e)
}
attrs.extend(self.parse_inner_attributes()?);
let mut arms: Vec<Arm> = Vec::new();
while self.token != token::CloseDelim(token::Brace) {
2016-02-10 03:11:27 +00:00
match self.parse_arm() {
Ok(arm) => arms.push(arm),
Err(mut e) => {
// Recover by skipping to the end of the block.
e.emit();
self.recover_stmt();
let span = lo.to(self.token.span);
2016-02-10 03:11:27 +00:00
if self.token == token::CloseDelim(token::Brace) {
self.bump();
}
return Ok(self.mk_expr(span, ExprKind::Match(discriminant, arms), attrs));
2016-02-10 03:11:27 +00:00
}
}
}
let hi = self.token.span;
self.bump();
return Ok(self.mk_expr(lo.to(hi), ExprKind::Match(discriminant, arms), attrs));
}
2010-11-24 22:42:01 +00:00
crate fn parse_arm(&mut self) -> PResult<'a, Arm> {
let attrs = self.parse_outer_attributes()?;
let lo = self.token.span;
let pats = self.parse_pats()?;
2019-05-11 14:41:37 +00:00
let guard = if self.eat_keyword(kw::If) {
2019-06-23 09:32:16 +00:00
Some(self.parse_expr()?)
} else {
None
};
let arrow_span = self.token.span;
self.expect(&token::FatArrow)?;
let arm_start_span = self.token.span;
let expr = self.parse_expr_res(Restrictions::STMT_EXPR, None)
.map_err(|mut err| {
2018-02-24 03:38:36 +00:00
err.span_label(arrow_span, "while parsing the `match` arm starting here");
err
})?;
2014-07-29 00:32:51 +00:00
let require_comma = classify::expr_requires_semi_to_be_stmt(&expr)
&& self.token != token::CloseDelim(token::Brace);
2014-07-29 00:32:51 +00:00
let hi = self.token.span;
2019-03-30 22:54:29 +00:00
2014-07-29 00:32:51 +00:00
if require_comma {
2018-08-18 10:14:09 +00:00
let cm = self.sess.source_map();
self.expect_one_of(&[token::Comma], &[token::CloseDelim(token::Brace)])
.map_err(|mut err| {
match (cm.span_to_lines(expr.span), cm.span_to_lines(arm_start_span)) {
(Ok(ref expr_lines), Ok(ref arm_start_lines))
if arm_start_lines.lines[0].end_col == expr_lines.lines[0].end_col
&& expr_lines.lines.len() == 2
&& self.token == token::FatArrow => {
2018-08-19 13:30:23 +00:00
// We check whether there's any trailing code in the parse span,
// if there isn't, we very likely have the following:
//
// X | &Y => "y"
// | -- - missing comma
// | |
// | arrow_span
// X | &X => "x"
// | - ^^ self.token.span
// | |
// | parsed until here as `"y" & X`
err.span_suggestion_short(
2018-02-24 03:38:36 +00:00
cm.next_point(arm_start_span),
"missing a comma here to end this `match` arm",
suggestion applicabilities for libsyntax and librustc, run-rustfix tests Consider this a down payment on #50723. To recap, an `Applicability` enum was recently (#50204) added, to convey to Rustfix and other tools whether we think it's OK for them to blindly apply the suggestion, or whether to prompt a human for guidance (because the suggestion might contain placeholders that we can't infer, or because we think it has a sufficiently high probability of being wrong even though it's— presumably—right often enough to be worth emitting in the first place). When a suggestion is marked as `MaybeIncorrect`, we try to use comments to indicate precisely why (although there are a few places where we just say `// speculative` because the present author's subjective judgement balked at the idea that the suggestion has no false positives). The `run-rustfix` directive is opporunistically set on some relevant UI tests (and a couple tests that were in the `test/ui/suggestions` directory, even if the suggestions didn't originate in librustc or libsyntax). This is less trivial than it sounds, because a surprising number of test files aren't equipped to be tested as fixed even when they contain successfully fixable errors, because, e.g., there are more, not-directly-related errors after fixing. Some test files need an attribute or underscore to avoid unused warnings tripping up the "fixed code is still producing diagnostics" check despite the fixes being correct; this is an interesting contrast-to/inconsistency-with the behavior of UI tests (which secretly pass `-A unused`), a behavior which we probably ought to resolve one way or the other (filed issue #50926). A few suggestion labels are reworded (e.g., to avoid phrasing it as a question, which which is discouraged by the style guidelines listed in `.span_suggestion`'s doc-comment).
2018-05-19 21:52:24 +00:00
",".to_owned(),
Applicability::MachineApplicable
2018-02-24 03:38:36 +00:00
);
}
_ => {
err.span_label(arrow_span,
"while parsing the `match` arm starting here");
}
}
err
})?;
2014-07-29 00:32:51 +00:00
} else {
self.eat(&token::Comma);
2014-07-29 00:32:51 +00:00
}
Ok(ast::Arm {
attrs,
pats,
guard,
2014-07-29 00:32:51 +00:00
body: expr,
2019-03-30 22:54:29 +00:00
span: lo.to(hi),
})
2014-07-29 00:32:51 +00:00
}
2019-02-08 13:53:55 +00:00
/// Parses an expression.
#[inline]
2015-12-20 21:00:43 +00:00
pub fn parse_expr(&mut self) -> PResult<'a, P<Expr>> {
self.parse_expr_res(Restrictions::empty(), None)
}
2019-02-08 13:53:55 +00:00
/// Evaluates the closure with restrictions in place.
///
2019-02-08 13:53:55 +00:00
/// Afters the closure is evaluated, restrictions are reset.
fn with_res<F, T>(&mut self, r: Restrictions, f: F) -> T
where F: FnOnce(&mut Self) -> T
2015-12-20 21:00:43 +00:00
{
let old = self.restrictions;
self.restrictions = r;
let r = f(self);
self.restrictions = old;
return r;
}
2019-02-08 13:53:55 +00:00
/// Parses an expression, subject to the given restrictions.
#[inline]
fn parse_expr_res(&mut self, r: Restrictions,
already_parsed_attrs: Option<ThinVec<Attribute>>)
2015-12-20 21:00:43 +00:00
-> PResult<'a, P<Expr>> {
self.with_res(r, |this| this.parse_assoc_expr(already_parsed_attrs))
}
2019-02-08 13:53:55 +00:00
/// Parses the RHS of a local variable declaration (e.g., '= 14;').
fn parse_initializer(&mut self, skip_eq: bool) -> PResult<'a, Option<P<Expr>>> {
2018-09-02 06:13:29 +00:00
if self.eat(&token::Eq) {
Ok(Some(self.parse_expr()?))
} else if skip_eq {
Ok(Some(self.parse_expr()?))
} else {
Ok(None)
}
2010-10-12 01:20:25 +00:00
}
2019-02-08 13:53:55 +00:00
/// Parses patterns, separated by '|' s.
2015-12-20 21:00:43 +00:00
fn parse_pats(&mut self) -> PResult<'a, Vec<P<Pat>>> {
// Allow a '|' before the pats (RFC 1925 + RFC 2530)
self.eat(&token::BinOp(token::Or));
let mut pats = Vec::new();
loop {
pats.push(self.parse_top_level_pat()?);
2018-01-06 15:01:54 +00:00
if self.token == token::OrOr {
2019-07-24 08:51:20 +00:00
self.struct_span_err(self.token.span, "unexpected token `||` after pattern")
.span_suggestion(
self.token.span,
"use a single `|` to specify multiple patterns",
"|".to_owned(),
Applicability::MachineApplicable
)
.emit();
2018-01-06 15:01:54 +00:00
self.bump();
2018-09-02 06:13:29 +00:00
} else if self.eat(&token::BinOp(token::Or)) {
// This is a No-op. Continue the loop to parse the next
// pattern.
2018-01-06 15:05:02 +00:00
} else {
return Ok(pats);
}
};
}
fn parse_pat_field(
&mut self,
lo: Span,
attrs: Vec<Attribute>
2018-08-18 10:14:03 +00:00
) -> PResult<'a, source_map::Spanned<ast::FieldPat>> {
// Check if a colon exists one ahead. This means we're parsing a fieldname.
let hi;
let (subpat, fieldname, is_shorthand) = if self.look_ahead(1, |t| t == &token::Colon) {
// Parsing a pattern of the form "fieldname: pat"
let fieldname = self.parse_field_name()?;
self.bump();
let pat = self.parse_pat(None)?;
hi = pat.span;
(pat, fieldname, false)
} else {
// Parsing a pattern of the form "(box) (ref) (mut) fieldname"
2019-05-11 14:41:37 +00:00
let is_box = self.eat_keyword(kw::Box);
let boxed_span = self.token.span;
2019-05-11 14:41:37 +00:00
let is_ref = self.eat_keyword(kw::Ref);
let is_mut = self.eat_keyword(kw::Mut);
let fieldname = self.parse_ident()?;
hi = self.prev_span;
let bind_type = match (is_ref, is_mut) {
(true, true) => BindingMode::ByRef(Mutability::Mutable),
(true, false) => BindingMode::ByRef(Mutability::Immutable),
(false, true) => BindingMode::ByValue(Mutability::Mutable),
(false, false) => BindingMode::ByValue(Mutability::Immutable),
};
let fieldpat = P(Pat {
id: ast::DUMMY_NODE_ID,
node: PatKind::Ident(bind_type, fieldname, None),
span: boxed_span.to(hi),
});
let subpat = if is_box {
P(Pat {
id: ast::DUMMY_NODE_ID,
node: PatKind::Box(fieldpat),
span: lo.to(hi),
})
} else {
fieldpat
};
(subpat, fieldname, true)
};
2018-08-18 10:14:03 +00:00
Ok(source_map::Spanned {
span: lo.to(hi),
node: ast::FieldPat {
ident: fieldname,
pat: subpat,
is_shorthand,
attrs: attrs.into(),
}
})
}
2019-02-08 13:53:55 +00:00
/// Parses the fields of a struct-like pattern.
2018-08-18 10:14:03 +00:00
fn parse_pat_fields(&mut self) -> PResult<'a, (Vec<source_map::Spanned<ast::FieldPat>>, bool)> {
let mut fields = Vec::new();
let mut etc = false;
let mut ate_comma = true;
let mut delayed_err: Option<DiagnosticBuilder<'a>> = None;
let mut etc_span = None;
while self.token != token::CloseDelim(token::Brace) {
2019-08-01 21:31:12 +00:00
let attrs = match self.parse_outer_attributes() {
Ok(attrs) => attrs,
Err(err) => {
if let Some(mut delayed) = delayed_err {
delayed.emit();
}
return Err(err);
},
};
let lo = self.token.span;
// check that a comma comes after every field
if !ate_comma {
let err = self.struct_span_err(self.prev_span, "expected `,`");
if let Some(mut delayed) = delayed_err {
delayed.emit();
}
return Err(err);
}
ate_comma = false;
if self.check(&token::DotDot) || self.token == token::DotDotDot {
etc = true;
let mut etc_sp = self.token.span;
if self.token == token::DotDotDot { // Issue #46718
// Accept `...` as if it were `..` to avoid further errors
2019-07-24 08:51:20 +00:00
self.struct_span_err(self.token.span, "expected field pattern, found `...`")
.span_suggestion(
self.token.span,
"to omit remaining fields, use one fewer `.`",
"..".to_owned(),
Applicability::MachineApplicable
)
.emit();
}
self.bump(); // `..` || `...`
if self.token == token::CloseDelim(token::Brace) {
etc_span = Some(etc_sp);
break;
}
let token_str = self.this_token_descr();
let mut err = self.fatal(&format!("expected `}}`, found {}", token_str));
err.span_label(self.token.span, "expected `}`");
let mut comma_sp = None;
if self.token == token::Comma { // Issue #49257
let nw_span = self.sess.source_map().span_until_non_whitespace(self.token.span);
etc_sp = etc_sp.to(nw_span);
err.span_label(etc_sp,
"`..` must be at the end and cannot have a trailing comma");
comma_sp = Some(self.token.span);
self.bump();
ate_comma = true;
}
etc_span = Some(etc_sp.until(self.token.span));
if self.token == token::CloseDelim(token::Brace) {
// If the struct looks otherwise well formed, recover and continue.
if let Some(sp) = comma_sp {
err.span_suggestion_short(
2018-09-17 17:13:08 +00:00
sp,
"remove this comma",
String::new(),
Applicability::MachineApplicable,
);
}
err.emit();
break;
} else if self.token.is_ident() && ate_comma {
// Accept fields coming after `..,`.
// This way we avoid "pattern missing fields" errors afterwards.
// We delay this error until the end in order to have a span for a
// suggested fix.
if let Some(mut delayed_err) = delayed_err {
delayed_err.emit();
return Err(err);
} else {
delayed_err = Some(err);
}
} else {
if let Some(mut err) = delayed_err {
err.emit();
}
return Err(err);
}
}
fields.push(match self.parse_pat_field(lo, attrs) {
Ok(field) => field,
Err(err) => {
if let Some(mut delayed_err) = delayed_err {
delayed_err.emit();
}
return Err(err);
}
});
ate_comma = self.eat(&token::Comma);
}
if let Some(mut err) = delayed_err {
if let Some(etc_span) = etc_span {
err.multipart_suggestion(
"move the `..` to the end of the field list",
vec![
(etc_span, String::new()),
(self.token.span, format!("{}.. }}", if ate_comma { "" } else { ", " })),
],
2019-01-17 15:18:56 +00:00
Applicability::MachineApplicable,
);
}
err.emit();
}
return Ok((fields, etc));
}
2015-12-20 21:00:43 +00:00
fn parse_pat_range_end(&mut self) -> PResult<'a, P<Expr>> {
if self.token.is_path_start() {
let lo = self.token.span;
let (qself, path) = if self.eat_lt() {
// Parse a qualified path
2017-07-19 23:39:34 +00:00
let (qself, path) = self.parse_qpath(PathStyle::Expr)?;
(Some(qself), path)
} else {
// Parse an unqualified path
(None, self.parse_path(PathStyle::Expr)?)
};
let hi = self.prev_span;
Ok(self.mk_expr(lo.to(hi), ExprKind::Path(qself, path), ThinVec::new()))
2015-03-31 06:10:11 +00:00
} else {
self.parse_literal_maybe_minus()
2015-03-31 06:10:11 +00:00
}
}
/// Is the current token suitable as the start of a range patterns end?
fn is_pat_range_end_start(&self) -> bool {
self.token.is_path_start() // e.g. `MY_CONST`;
|| self.token == token::Dot // e.g. `.5` for recovery;
|| self.token.can_begin_literal_or_bool() // e.g. `42`.
|| self.token.is_whole_expr()
}
// Helper function to decide whether to parse as ident binding
// or to try to do something more complex like range patterns.
fn parse_as_ident(&mut self) -> bool {
2019-06-04 22:17:07 +00:00
self.look_ahead(1, |t| match t.kind {
token::OpenDelim(token::Paren) | token::OpenDelim(token::Brace) |
token::DotDotDot | token::DotDotEq | token::DotDot |
token::ModSep | token::Not => false,
_ => true,
})
}
/// Parse and throw away a parentesized comma separated
/// sequence of patterns until `)` is reached.
fn skip_pat_list(&mut self) -> PResult<'a, ()> {
while !self.check(&token::CloseDelim(token::Paren)) {
self.parse_pat(None)?;
if !self.eat(&token::Comma) {
return Ok(())
}
}
Ok(())
}
/// A wrapper around `parse_pat` with some special error handling for the
2018-08-19 13:30:23 +00:00
/// "top-level" patterns in a match arm, `for` loop, `let`, &c. (in contrast
/// to subpatterns within such).
fn parse_top_level_pat(&mut self) -> PResult<'a, P<Pat>> {
let pat = self.parse_pat(None)?;
if self.token == token::Comma {
// An unexpected comma after a top-level pattern is a clue that the
// user (perhaps more accustomed to some other language) forgot the
// parentheses in what should have been a tuple pattern; return a
// suggestion-enhanced error here rather than choking on the comma
// later.
let comma_span = self.token.span;
self.bump();
if let Err(mut err) = self.skip_pat_list() {
// We didn't expect this to work anyway; we just wanted
// to advance to the end of the comma-sequence so we know
// the span to suggest parenthesizing
err.cancel();
}
let seq_span = pat.span.to(self.prev_span);
let mut err = self.struct_span_err(comma_span,
"unexpected `,` in pattern");
2019-07-24 09:01:30 +00:00
if let Ok(seq_snippet) = self.span_to_snippet(seq_span) {
err.span_suggestion(
suggestion applicabilities for libsyntax and librustc, run-rustfix tests Consider this a down payment on #50723. To recap, an `Applicability` enum was recently (#50204) added, to convey to Rustfix and other tools whether we think it's OK for them to blindly apply the suggestion, or whether to prompt a human for guidance (because the suggestion might contain placeholders that we can't infer, or because we think it has a sufficiently high probability of being wrong even though it's— presumably—right often enough to be worth emitting in the first place). When a suggestion is marked as `MaybeIncorrect`, we try to use comments to indicate precisely why (although there are a few places where we just say `// speculative` because the present author's subjective judgement balked at the idea that the suggestion has no false positives). The `run-rustfix` directive is opporunistically set on some relevant UI tests (and a couple tests that were in the `test/ui/suggestions` directory, even if the suggestions didn't originate in librustc or libsyntax). This is less trivial than it sounds, because a surprising number of test files aren't equipped to be tested as fixed even when they contain successfully fixable errors, because, e.g., there are more, not-directly-related errors after fixing. Some test files need an attribute or underscore to avoid unused warnings tripping up the "fixed code is still producing diagnostics" check despite the fixes being correct; this is an interesting contrast-to/inconsistency-with the behavior of UI tests (which secretly pass `-A unused`), a behavior which we probably ought to resolve one way or the other (filed issue #50926). A few suggestion labels are reworded (e.g., to avoid phrasing it as a question, which which is discouraged by the style guidelines listed in `.span_suggestion`'s doc-comment).
2018-05-19 21:52:24 +00:00
seq_span,
"try adding parentheses to match on a tuple..",
suggestion applicabilities for libsyntax and librustc, run-rustfix tests Consider this a down payment on #50723. To recap, an `Applicability` enum was recently (#50204) added, to convey to Rustfix and other tools whether we think it's OK for them to blindly apply the suggestion, or whether to prompt a human for guidance (because the suggestion might contain placeholders that we can't infer, or because we think it has a sufficiently high probability of being wrong even though it's— presumably—right often enough to be worth emitting in the first place). When a suggestion is marked as `MaybeIncorrect`, we try to use comments to indicate precisely why (although there are a few places where we just say `// speculative` because the present author's subjective judgement balked at the idea that the suggestion has no false positives). The `run-rustfix` directive is opporunistically set on some relevant UI tests (and a couple tests that were in the `test/ui/suggestions` directory, even if the suggestions didn't originate in librustc or libsyntax). This is less trivial than it sounds, because a surprising number of test files aren't equipped to be tested as fixed even when they contain successfully fixable errors, because, e.g., there are more, not-directly-related errors after fixing. Some test files need an attribute or underscore to avoid unused warnings tripping up the "fixed code is still producing diagnostics" check despite the fixes being correct; this is an interesting contrast-to/inconsistency-with the behavior of UI tests (which secretly pass `-A unused`), a behavior which we probably ought to resolve one way or the other (filed issue #50926). A few suggestion labels are reworded (e.g., to avoid phrasing it as a question, which which is discouraged by the style guidelines listed in `.span_suggestion`'s doc-comment).
2018-05-19 21:52:24 +00:00
format!("({})", seq_snippet),
Applicability::MachineApplicable
).span_suggestion(
seq_span,
"..or a vertical bar to match on multiple alternatives",
format!("{}", seq_snippet.replace(",", " |")),
Applicability::MachineApplicable
suggestion applicabilities for libsyntax and librustc, run-rustfix tests Consider this a down payment on #50723. To recap, an `Applicability` enum was recently (#50204) added, to convey to Rustfix and other tools whether we think it's OK for them to blindly apply the suggestion, or whether to prompt a human for guidance (because the suggestion might contain placeholders that we can't infer, or because we think it has a sufficiently high probability of being wrong even though it's— presumably—right often enough to be worth emitting in the first place). When a suggestion is marked as `MaybeIncorrect`, we try to use comments to indicate precisely why (although there are a few places where we just say `// speculative` because the present author's subjective judgement balked at the idea that the suggestion has no false positives). The `run-rustfix` directive is opporunistically set on some relevant UI tests (and a couple tests that were in the `test/ui/suggestions` directory, even if the suggestions didn't originate in librustc or libsyntax). This is less trivial than it sounds, because a surprising number of test files aren't equipped to be tested as fixed even when they contain successfully fixable errors, because, e.g., there are more, not-directly-related errors after fixing. Some test files need an attribute or underscore to avoid unused warnings tripping up the "fixed code is still producing diagnostics" check despite the fixes being correct; this is an interesting contrast-to/inconsistency-with the behavior of UI tests (which secretly pass `-A unused`), a behavior which we probably ought to resolve one way or the other (filed issue #50926). A few suggestion labels are reworded (e.g., to avoid phrasing it as a question, which which is discouraged by the style guidelines listed in `.span_suggestion`'s doc-comment).
2018-05-19 21:52:24 +00:00
);
}
return Err(err);
}
Ok(pat)
}
2019-02-08 13:53:55 +00:00
/// Parses a pattern.
pub fn parse_pat(&mut self, expected: Option<&'static str>) -> PResult<'a, P<Pat>> {
self.parse_pat_with_range_pat(true, expected)
}
/// Parse a range-to pattern, e.g. `..X` and `..=X` for recovery.
fn parse_pat_range_to(&mut self, re: RangeEnd, form: &str) -> PResult<'a, PatKind> {
let lo = self.prev_span;
let end = self.parse_pat_range_end()?;
let range_span = lo.to(end.span);
let begin = self.mk_expr(range_span, ExprKind::Err, ThinVec::new());
self.diagnostic()
.struct_span_err(range_span, &format!("`{}X` range patterns are not supported", form))
.span_suggestion(
range_span,
"try using the minimum value for the type",
format!("MIN{}{}", form, pprust::expr_to_string(&end)),
Applicability::HasPlaceholders,
)
.emit();
Ok(PatKind::Range(begin, end, respan(lo, re)))
}
/// Parse the end of a `X..Y`, `X..=Y`, or `X...Y` range pattern or recover
/// if that end is missing treating it as `X..`, `X..=`, or `X...` respectively.
fn parse_pat_range_end_opt(&mut self, begin: &Expr, form: &str) -> PResult<'a, P<Expr>> {
if self.is_pat_range_end_start() {
// Parsing e.g. `X..=Y`.
self.parse_pat_range_end()
} else {
// Parsing e.g. `X..`.
let range_span = begin.span.to(self.prev_span);
self.diagnostic()
.struct_span_err(
range_span,
&format!("`X{}` range patterns are not supported", form),
)
.span_suggestion(
range_span,
"try using the maximum value for the type",
format!("{}{}MAX", pprust::expr_to_string(&begin), form),
Applicability::HasPlaceholders,
)
.emit();
Ok(self.mk_expr(range_span, ExprKind::Err, ThinVec::new()))
}
}
2019-02-08 13:53:55 +00:00
/// Parses a pattern, with a setting whether modern range patterns (e.g., `a..=b`, `a..b` are
/// allowed).
fn parse_pat_with_range_pat(
&mut self,
allow_range_pat: bool,
expected: Option<&'static str>,
) -> PResult<'a, P<Pat>> {
maybe_recover_from_interpolated_ty_qpath!(self, true);
maybe_whole!(self, NtPat, |x| x);
2012-08-01 21:34:35 +00:00
let lo = self.token.span;
2013-04-12 05:10:31 +00:00
let pat;
2019-06-04 22:17:07 +00:00
match self.token.kind {
2016-09-22 22:44:59 +00:00
token::BinOp(token::And) | token::AndAnd => {
// Parse &pat / &mut pat
self.expect_and()?;
2017-03-16 21:47:32 +00:00
let mutbl = self.parse_mutability();
if let token::Lifetime(name) = self.token.kind {
let mut err = self.fatal(&format!("unexpected lifetime `{}` in pattern", name));
err.span_label(self.token.span, "unexpected lifetime");
return Err(err);
2016-09-22 22:44:59 +00:00
}
let subpat = self.parse_pat_with_range_pat(false, expected)?;
2016-09-22 22:44:59 +00:00
pat = PatKind::Ref(subpat, mutbl);
}
token::OpenDelim(token::Paren) => {
// Parse a tuple or parenthesis pattern.
let (fields, trailing_comma) = self.parse_paren_comma_seq(|p| p.parse_pat(None))?;
// Here, `(pat,)` is a tuple pattern.
// For backward compatibility, `(..)` is a tuple pattern as well.
pat = if fields.len() == 1 && !(trailing_comma || fields[0].is_rest()) {
PatKind::Paren(fields.into_iter().nth(0).unwrap())
} else {
PatKind::Tuple(fields)
};
2016-09-22 22:44:59 +00:00
}
token::OpenDelim(token::Bracket) => {
// Parse `[pat, pat,...]` as a slice pattern.
let (slice, _) = self.parse_delim_comma_seq(token::Bracket, |p| p.parse_pat(None))?;
pat = PatKind::Slice(slice);
}
token::DotDot => {
2016-09-22 22:44:59 +00:00
self.bump();
pat = if self.is_pat_range_end_start() {
// Parse `..42` for recovery.
self.parse_pat_range_to(RangeEnd::Excluded, "..")?
} else {
// A rest pattern `..`.
PatKind::Rest
};
}
token::DotDotEq => {
// Parse `..=42` for recovery.
self.bump();
pat = self.parse_pat_range_to(RangeEnd::Included(RangeSyntax::DotDotEq), "..=")?;
}
token::DotDotDot => {
// Parse `...42` for recovery.
self.bump();
pat = self.parse_pat_range_to(RangeEnd::Included(RangeSyntax::DotDotDot), "...")?;
}
2018-03-08 11:27:23 +00:00
// At this point, token != &, &&, (, [
2019-05-11 14:41:37 +00:00
_ => if self.eat_keyword(kw::Underscore) {
2018-03-08 11:27:23 +00:00
// Parse _
pat = PatKind::Wild;
2019-05-11 14:41:37 +00:00
} else if self.eat_keyword(kw::Mut) {
// Parse mut ident @ pat / mut ref ident @ pat
let mutref_span = self.prev_span.to(self.token.span);
2019-05-11 14:41:37 +00:00
let binding_mode = if self.eat_keyword(kw::Ref) {
self.diagnostic()
.struct_span_err(mutref_span, "the order of `mut` and `ref` is incorrect")
.span_suggestion(
suggestion applicabilities for libsyntax and librustc, run-rustfix tests Consider this a down payment on #50723. To recap, an `Applicability` enum was recently (#50204) added, to convey to Rustfix and other tools whether we think it's OK for them to blindly apply the suggestion, or whether to prompt a human for guidance (because the suggestion might contain placeholders that we can't infer, or because we think it has a sufficiently high probability of being wrong even though it's— presumably—right often enough to be worth emitting in the first place). When a suggestion is marked as `MaybeIncorrect`, we try to use comments to indicate precisely why (although there are a few places where we just say `// speculative` because the present author's subjective judgement balked at the idea that the suggestion has no false positives). The `run-rustfix` directive is opporunistically set on some relevant UI tests (and a couple tests that were in the `test/ui/suggestions` directory, even if the suggestions didn't originate in librustc or libsyntax). This is less trivial than it sounds, because a surprising number of test files aren't equipped to be tested as fixed even when they contain successfully fixable errors, because, e.g., there are more, not-directly-related errors after fixing. Some test files need an attribute or underscore to avoid unused warnings tripping up the "fixed code is still producing diagnostics" check despite the fixes being correct; this is an interesting contrast-to/inconsistency-with the behavior of UI tests (which secretly pass `-A unused`), a behavior which we probably ought to resolve one way or the other (filed issue #50926). A few suggestion labels are reworded (e.g., to avoid phrasing it as a question, which which is discouraged by the style guidelines listed in `.span_suggestion`'s doc-comment).
2018-05-19 21:52:24 +00:00
mutref_span,
"try switching the order",
"ref mut".into(),
Applicability::MachineApplicable
).emit();
BindingMode::ByRef(Mutability::Mutable)
} else {
BindingMode::ByValue(Mutability::Mutable)
};
pat = self.parse_pat_ident(binding_mode)?;
2019-05-11 14:41:37 +00:00
} else if self.eat_keyword(kw::Ref) {
2015-03-31 06:10:11 +00:00
// Parse ref ident @ pat / ref mut ident @ pat
2017-03-16 21:47:32 +00:00
let mutbl = self.parse_mutability();
pat = self.parse_pat_ident(BindingMode::ByRef(mutbl))?;
2019-05-11 14:41:37 +00:00
} else if self.eat_keyword(kw::Box) {
2015-03-31 06:10:11 +00:00
// Parse box pat
let subpat = self.parse_pat_with_range_pat(false, None)?;
2016-02-11 18:16:33 +00:00
pat = PatKind::Box(subpat);
} else if self.token.is_ident() && !self.token.is_reserved_ident() &&
self.parse_as_ident() {
2016-09-22 22:44:59 +00:00
// Parse ident @ pat
// This can give false positives and parse nullary enums,
// they are dealt with later in resolve
let binding_mode = BindingMode::ByValue(Mutability::Immutable);
pat = self.parse_pat_ident(binding_mode)?;
} else if self.token.is_path_start() {
2015-03-31 06:10:11 +00:00
// Parse pattern starting with a path
2016-09-22 22:44:59 +00:00
let (qself, path) = if self.eat_lt() {
// Parse a qualified path
2017-07-19 23:39:34 +00:00
let (qself, path) = self.parse_qpath(PathStyle::Expr)?;
2016-09-22 22:44:59 +00:00
(Some(qself), path)
2015-03-31 06:10:11 +00:00
} else {
2016-09-22 22:44:59 +00:00
// Parse an unqualified path
(None, self.parse_path(PathStyle::Expr)?)
};
2019-06-04 22:17:07 +00:00
match self.token.kind {
2016-09-22 22:44:59 +00:00
token::Not if qself.is_none() => {
2015-03-31 06:10:11 +00:00
// Parse macro invocation
self.bump();
let (delim, tts) = self.expect_delimited_token_tree()?;
let mac = respan(lo.to(self.prev_span), Mac_ {
path,
tts,
delim,
prior_type_ascription: self.last_type_ascription,
});
2016-09-22 22:44:59 +00:00
pat = PatKind::Mac(mac);
}
token::DotDotDot | token::DotDotEq | token::DotDot => {
let (end_kind, form) = match self.token.kind {
token::DotDot => (RangeEnd::Excluded, ".."),
token::DotDotDot => (RangeEnd::Included(RangeSyntax::DotDotDot), "..."),
token::DotDotEq => (RangeEnd::Included(RangeSyntax::DotDotEq), "..="),
_ => panic!("can only parse `..`/`...`/`..=` for ranges \
(checked above)"),
};
let op_span = self.token.span;
2015-03-31 06:10:11 +00:00
// Parse range
let span = lo.to(self.prev_span);
let begin = self.mk_expr(span, ExprKind::Path(qself, path), ThinVec::new());
self.bump();
let end = self.parse_pat_range_end_opt(&begin, form)?;
pat = PatKind::Range(begin, end, respan(op_span, end_kind));
2016-09-22 22:44:59 +00:00
}
token::OpenDelim(token::Brace) => {
if qself.is_some() {
let msg = "unexpected `{` after qualified path";
let mut err = self.fatal(msg);
err.span_label(self.token.span, msg);
return Err(err);
}
// Parse struct pattern
self.bump();
2016-02-10 03:11:27 +00:00
let (fields, etc) = self.parse_pat_fields().unwrap_or_else(|mut e| {
e.emit();
self.recover_stmt();
(vec![], true)
2016-02-10 03:11:27 +00:00
});
self.bump();
2016-02-11 18:16:33 +00:00
pat = PatKind::Struct(path, fields, etc);
2016-09-22 22:44:59 +00:00
}
token::OpenDelim(token::Paren) => {
if qself.is_some() {
let msg = "unexpected `(` after qualified path";
let mut err = self.fatal(msg);
err.span_label(self.token.span, msg);
return Err(err);
}
2015-03-31 06:10:11 +00:00
// Parse tuple struct or enum pattern
let (fields, _) = self.parse_paren_comma_seq(|p| p.parse_pat(None))?;
pat = PatKind::TupleStruct(path, fields)
}
2016-09-22 22:44:59 +00:00
_ => pat = PatKind::Path(qself, path),
}
2015-03-31 06:10:11 +00:00
} else {
// Try to parse everything else as literal with optional minus
match self.parse_literal_maybe_minus() {
Ok(begin) => {
let op_span = self.token.span;
if self.check(&token::DotDot) || self.check(&token::DotDotEq) ||
self.check(&token::DotDotDot) {
let (end_kind, form) = if self.eat(&token::DotDotDot) {
(RangeEnd::Included(RangeSyntax::DotDotDot), "...")
} else if self.eat(&token::DotDotEq) {
(RangeEnd::Included(RangeSyntax::DotDotEq), "..=")
} else if self.eat(&token::DotDot) {
(RangeEnd::Excluded, "..")
} else {
panic!("impossible case: we already matched \
on a range-operator token")
};
let end = self.parse_pat_range_end_opt(&begin, form)?;
pat = PatKind::Range(begin, end, respan(op_span, end_kind))
} else {
pat = PatKind::Lit(begin);
}
}
Err(mut err) => {
2016-09-15 19:34:21 +00:00
self.cancel(&mut err);
let expected = expected.unwrap_or("pattern");
let msg = format!(
"expected {}, found {}",
expected,
self.this_token_descr(),
);
let mut err = self.fatal(&msg);
err.span_label(self.token.span, format!("expected {}", expected));
let sp = self.sess.source_map().start_point(self.token.span);
if let Some(sp) = self.sess.ambiguous_block_expr_parse.borrow().get(&sp) {
self.sess.expr_parentheses_needed(&mut err, *sp, None);
}
return Err(err);
}
2015-03-31 06:10:11 +00:00
}
}
}
2015-03-31 06:10:11 +00:00
let pat = P(Pat { node: pat, span: lo.to(self.prev_span), id: ast::DUMMY_NODE_ID });
let pat = self.maybe_recover_from_bad_qpath(pat, true)?;
if !allow_range_pat {
match pat.node {
PatKind::Range(
_, _, Spanned { node: RangeEnd::Included(RangeSyntax::DotDotDot), .. }
) => {},
PatKind::Range(..) => {
let mut err = self.struct_span_err(
pat.span,
"the range pattern here has ambiguous interpretation",
);
err.span_suggestion(
pat.span,
"add parentheses to clarify the precedence",
format!("({})", pprust::pat_to_string(&pat)),
suggestion applicabilities for libsyntax and librustc, run-rustfix tests Consider this a down payment on #50723. To recap, an `Applicability` enum was recently (#50204) added, to convey to Rustfix and other tools whether we think it's OK for them to blindly apply the suggestion, or whether to prompt a human for guidance (because the suggestion might contain placeholders that we can't infer, or because we think it has a sufficiently high probability of being wrong even though it's— presumably—right often enough to be worth emitting in the first place). When a suggestion is marked as `MaybeIncorrect`, we try to use comments to indicate precisely why (although there are a few places where we just say `// speculative` because the present author's subjective judgement balked at the idea that the suggestion has no false positives). The `run-rustfix` directive is opporunistically set on some relevant UI tests (and a couple tests that were in the `test/ui/suggestions` directory, even if the suggestions didn't originate in librustc or libsyntax). This is less trivial than it sounds, because a surprising number of test files aren't equipped to be tested as fixed even when they contain successfully fixable errors, because, e.g., there are more, not-directly-related errors after fixing. Some test files need an attribute or underscore to avoid unused warnings tripping up the "fixed code is still producing diagnostics" check despite the fixes being correct; this is an interesting contrast-to/inconsistency-with the behavior of UI tests (which secretly pass `-A unused`), a behavior which we probably ought to resolve one way or the other (filed issue #50926). A few suggestion labels are reworded (e.g., to avoid phrasing it as a question, which which is discouraged by the style guidelines listed in `.span_suggestion`'s doc-comment).
2018-05-19 21:52:24 +00:00
// "ambiguous interpretation" implies that we have to be guessing
Applicability::MaybeIncorrect
);
return Err(err);
}
_ => {}
}
}
Ok(pat)
}
2019-02-08 13:53:55 +00:00
/// Parses `ident` or `ident @ pat`.
2014-06-09 20:12:30 +00:00
/// used by the copy foo and ref foo patterns to give a good
2019-02-08 13:53:55 +00:00
/// error message when parsing mistakes like `ref foo(a, b)`.
2013-12-30 22:04:00 +00:00
fn parse_pat_ident(&mut self,
binding_mode: ast::BindingMode)
2016-02-11 18:16:33 +00:00
-> PResult<'a, PatKind> {
let ident = self.parse_ident()?;
let sub = if self.eat(&token::At) {
Some(self.parse_pat(Some("binding pattern"))?)
} else {
None
};
// just to be friendly, if they write something like
2012-08-20 19:23:37 +00:00
// ref Some(i)
// we end up here with ( as the current token. This shortly
// leads to a parse error. Note that if there is no explicit
// binding mode then we do not end up here, because the lookahead
// will direct us over to parse_enum_variant()
if self.token == token::OpenDelim(token::Paren) {
return Err(self.span_fatal(
self.prev_span,
"expected identifier, found enum pattern"))
}
2018-03-18 13:47:09 +00:00
Ok(PatKind::Ident(binding_mode, ident, sub))
}
2019-02-08 13:53:55 +00:00
/// Parses a local variable declaration.
fn parse_local(&mut self, attrs: ThinVec<Attribute>) -> PResult<'a, P<Local>> {
let lo = self.prev_span;
let pat = self.parse_top_level_pat()?;
let (err, ty) = if self.eat(&token::Colon) {
// Save the state of the parser before parsing type normally, in case there is a `:`
// instead of an `=` typo.
let parser_snapshot_before_type = self.clone();
let colon_sp = self.prev_span;
match self.parse_ty() {
Ok(ty) => (None, Some(ty)),
Err(mut err) => {
// Rewind to before attempting to parse the type and continue parsing
let parser_snapshot_after_type = self.clone();
mem::replace(self, parser_snapshot_before_type);
2019-07-24 09:01:30 +00:00
let snippet = self.span_to_snippet(pat.span).unwrap();
err.span_label(pat.span, format!("while parsing the type for `{}`", snippet));
(Some((parser_snapshot_after_type, colon_sp, err)), None)
}
}
} else {
(None, None)
};
let init = match (self.parse_initializer(err.is_some()), err) {
(Ok(init), None) => { // init parsed, ty parsed
init
}
(Ok(init), Some((_, colon_sp, mut err))) => { // init parsed, ty error
// Could parse the type as if it were the initializer, it is likely there was a
// typo in the code: `:` instead of `=`. Add suggestion and emit the error.
err.span_suggestion_short(
suggestion applicabilities for libsyntax and librustc, run-rustfix tests Consider this a down payment on #50723. To recap, an `Applicability` enum was recently (#50204) added, to convey to Rustfix and other tools whether we think it's OK for them to blindly apply the suggestion, or whether to prompt a human for guidance (because the suggestion might contain placeholders that we can't infer, or because we think it has a sufficiently high probability of being wrong even though it's— presumably—right often enough to be worth emitting in the first place). When a suggestion is marked as `MaybeIncorrect`, we try to use comments to indicate precisely why (although there are a few places where we just say `// speculative` because the present author's subjective judgement balked at the idea that the suggestion has no false positives). The `run-rustfix` directive is opporunistically set on some relevant UI tests (and a couple tests that were in the `test/ui/suggestions` directory, even if the suggestions didn't originate in librustc or libsyntax). This is less trivial than it sounds, because a surprising number of test files aren't equipped to be tested as fixed even when they contain successfully fixable errors, because, e.g., there are more, not-directly-related errors after fixing. Some test files need an attribute or underscore to avoid unused warnings tripping up the "fixed code is still producing diagnostics" check despite the fixes being correct; this is an interesting contrast-to/inconsistency-with the behavior of UI tests (which secretly pass `-A unused`), a behavior which we probably ought to resolve one way or the other (filed issue #50926). A few suggestion labels are reworded (e.g., to avoid phrasing it as a question, which which is discouraged by the style guidelines listed in `.span_suggestion`'s doc-comment).
2018-05-19 21:52:24 +00:00
colon_sp,
"use `=` if you meant to assign",
"=".to_string(),
Applicability::MachineApplicable
);
err.emit();
2018-02-16 14:56:50 +00:00
// As this was parsed successfully, continue as if the code has been fixed for the
// rest of the file. It will still fail due to the emitted error, but we avoid
// extra noise.
init
}
(Err(mut init_err), Some((snapshot, _, ty_err))) => { // init error, ty error
init_err.cancel();
// Couldn't parse the type nor the initializer, only raise the type error and
// return to the parser state before parsing the type as the initializer.
// let x: <parse_error>;
mem::replace(self, snapshot);
return Err(ty_err);
}
(Err(err), None) => { // init error, ty parsed
// Couldn't parse the initializer and we're not attempting to recover a failed
// parse of the type, return the error.
return Err(err);
}
};
2017-10-06 10:16:16 +00:00
let hi = if self.token == token::Semi {
self.token.span
2017-10-06 10:16:16 +00:00
} else {
self.prev_span
};
Ok(P(ast::Local {
ty,
pat,
init,
id: ast::DUMMY_NODE_ID,
2017-10-06 10:16:16 +00:00
span: lo.to(hi),
attrs,
}))
}
2019-02-08 13:53:55 +00:00
/// Parses a structure field.
2016-08-08 12:35:15 +00:00
fn parse_name_and_ty(&mut self,
lo: Span,
2016-08-08 12:35:15 +00:00
vis: Visibility,
attrs: Vec<Attribute>)
-> PResult<'a, StructField> {
let name = self.parse_ident()?;
self.expect(&token::Colon)?;
let ty = self.parse_ty()?;
2016-04-06 08:19:10 +00:00
Ok(StructField {
span: lo.to(self.prev_span),
2016-04-02 13:47:53 +00:00
ident: Some(name),
vis,
id: ast::DUMMY_NODE_ID,
ty,
attrs,
2016-04-06 08:19:10 +00:00
})
}
2019-02-08 13:53:55 +00:00
/// Emits an expected-item-after-attributes error.
2019-01-20 19:52:16 +00:00
fn expected_item_err(&mut self, attrs: &[Attribute]) -> PResult<'a, ()> {
let message = match attrs.last() {
2016-11-14 12:00:25 +00:00
Some(&Attribute { is_sugared_doc: true, .. }) => "expected item after doc comment",
_ => "expected item after attributes",
};
2019-01-20 19:52:16 +00:00
let mut err = self.diagnostic().struct_span_err(self.prev_span, message);
if attrs.last().unwrap().is_sugared_doc {
err.span_label(self.prev_span, "this doc comment doesn't document anything");
}
Err(err)
}
/// Parse a statement. This stops just before trailing semicolons on everything but items.
/// e.g., a `StmtKind::Semi` parses to a `StmtKind::Expr`, leaving the trailing `;` unconsumed.
pub fn parse_stmt(&mut self) -> PResult<'a, Option<Stmt>> {
2016-07-16 20:41:43 +00:00
Ok(self.parse_stmt_(true))
2016-02-10 03:11:27 +00:00
}
2016-09-06 05:57:58 +00:00
fn parse_stmt_(&mut self, macro_legacy_warnings: bool) -> Option<Stmt> {
self.parse_stmt_without_recovery(macro_legacy_warnings).unwrap_or_else(|mut e| {
2016-02-10 03:11:27 +00:00
e.emit();
self.recover_stmt_(SemiColonMode::Break, BlockMode::Ignore);
2016-02-10 03:11:27 +00:00
None
})
}
fn is_async_block(&self) -> bool {
2019-05-11 14:41:37 +00:00
self.token.is_keyword(kw::Async) &&
2018-06-06 22:50:59 +00:00
(
( // `async move {`
self.is_keyword_ahead(1, &[kw::Move]) &&
2018-06-06 22:50:59 +00:00
self.look_ahead(2, |t| *t == token::OpenDelim(token::Brace))
) || ( // `async {`
self.look_ahead(1, |t| *t == token::OpenDelim(token::Brace))
)
)
}
fn is_async_fn(&self) -> bool {
2019-05-11 14:41:37 +00:00
self.token.is_keyword(kw::Async) &&
self.is_keyword_ahead(1, &[kw::Fn])
}
fn is_do_catch_block(&self) -> bool {
2019-05-11 14:41:37 +00:00
self.token.is_keyword(kw::Do) &&
self.is_keyword_ahead(1, &[kw::Catch]) &&
self.look_ahead(2, |t| *t == token::OpenDelim(token::Brace)) &&
!self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL)
}
fn is_try_block(&self) -> bool {
2019-05-11 14:41:37 +00:00
self.token.is_keyword(kw::Try) &&
self.look_ahead(1, |t| *t == token::OpenDelim(token::Brace)) &&
self.token.span.rust_2018() &&
// prevent `while try {} {}`, `if try {} {} else {}`, etc.
!self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL)
}
fn is_union_item(&self) -> bool {
2019-05-11 14:41:37 +00:00
self.token.is_keyword(kw::Union) &&
self.look_ahead(1, |t| t.is_ident() && !t.is_reserved_ident())
2016-10-19 20:33:41 +00:00
}
2017-11-04 20:56:45 +00:00
fn is_crate_vis(&self) -> bool {
2019-05-11 14:41:37 +00:00
self.token.is_keyword(kw::Crate) && self.look_ahead(1, |t| t != &token::ModSep)
2017-11-04 20:56:45 +00:00
}
fn is_auto_trait_item(&self) -> bool {
// auto trait
(self.token.is_keyword(kw::Auto) &&
self.is_keyword_ahead(1, &[kw::Trait]))
|| // unsafe auto trait
2019-05-11 14:41:37 +00:00
(self.token.is_keyword(kw::Unsafe) &&
self.is_keyword_ahead(1, &[kw::Auto]) &&
self.is_keyword_ahead(2, &[kw::Trait]))
}
2017-09-06 21:11:16 +00:00
fn eat_macro_def(&mut self, attrs: &[Attribute], vis: &Visibility, lo: Span)
-> PResult<'a, Option<P<Item>>> {
let token_lo = self.token.span;
2019-07-01 09:20:44 +00:00
let (ident, def) = if self.eat_keyword(kw::Macro) {
let ident = self.parse_ident()?;
let tokens = if self.check(&token::OpenDelim(token::Brace)) {
match self.parse_token_tree() {
TokenTree::Delimited(_, _, tts) => tts,
_ => unreachable!(),
}
} else if self.check(&token::OpenDelim(token::Paren)) {
let args = self.parse_token_tree();
let body = if self.check(&token::OpenDelim(token::Brace)) {
self.parse_token_tree()
} else {
self.unexpected()?;
unreachable!()
};
2019-07-01 09:20:44 +00:00
TokenStream::new(vec![
args.into(),
TokenTree::token(token::FatArrow, token_lo.to(self.prev_span)).into(),
body.into(),
])
} else {
self.unexpected()?;
unreachable!()
};
2019-07-01 09:20:44 +00:00
(ident, ast::MacroDef { tokens: tokens.into(), legacy: false })
} else if self.check_keyword(sym::macro_rules) &&
self.look_ahead(1, |t| *t == token::Not) &&
self.look_ahead(2, |t| t.is_ident()) {
let prev_span = self.prev_span;
self.complain_if_pub_macro(&vis.node, prev_span);
self.bump();
self.bump();
2019-07-01 09:20:44 +00:00
let ident = self.parse_ident()?;
let (delim, tokens) = self.expect_delimited_token_tree()?;
if delim != MacDelimiter::Brace && !self.eat(&token::Semi) {
self.report_invalid_macro_expansion_item();
}
2019-07-01 09:20:44 +00:00
(ident, ast::MacroDef { tokens, legacy: true })
} else {
return Ok(None);
};
let span = lo.to(self.prev_span);
Ok(Some(self.mk_item(span, ident, ItemKind::MacroDef(def), vis.clone(), attrs.to_vec())))
}
fn parse_stmt_without_recovery(
&mut self,
macro_legacy_warnings: bool,
) -> PResult<'a, Option<Stmt>> {
maybe_whole!(self, NtStmt, |x| Some(x));
2012-08-01 21:34:35 +00:00
let attrs = self.parse_outer_attributes()?;
let lo = self.token.span;
2019-05-11 14:41:37 +00:00
Ok(Some(if self.eat_keyword(kw::Let) {
2016-06-17 02:30:01 +00:00
Stmt {
id: ast::DUMMY_NODE_ID,
node: StmtKind::Local(self.parse_local(attrs.into())?),
span: lo.to(self.prev_span),
2016-06-17 02:30:01 +00:00
}
2018-01-29 05:12:09 +00:00
} else if let Some(macro_def) = self.eat_macro_def(
&attrs,
2018-08-18 10:14:03 +00:00
&source_map::respan(lo, VisibilityKind::Inherited),
2018-01-29 05:12:09 +00:00
lo,
)? {
Stmt {
id: ast::DUMMY_NODE_ID,
node: StmtKind::Item(macro_def),
span: lo.to(self.prev_span),
}
// Starts like a simple path, being careful to avoid contextual keywords
// such as a union items, item with `crate` visibility or auto trait items.
2017-11-04 20:56:45 +00:00
// Our goal here is to parse an arbitrary path `a::b::c` but not something that starts
// like a path (1 token), but it fact not a path.
// `union::b::c` - path, `union U { ... }` - not a path.
// `crate::b::c` - path, `crate struct S;` - not a path.
2016-10-19 20:33:41 +00:00
} else if self.token.is_path_start() &&
!self.token.is_qpath_start() &&
2017-11-04 20:56:45 +00:00
!self.is_union_item() &&
2018-01-01 14:42:32 +00:00
!self.is_crate_vis() &&
!self.is_auto_trait_item() &&
!self.is_async_fn() {
2019-07-01 09:20:44 +00:00
let path = self.parse_path(PathStyle::Expr)?;
if !self.eat(&token::Not) {
let expr = if self.check(&token::OpenDelim(token::Brace)) {
2019-07-01 09:20:44 +00:00
self.parse_struct_expr(lo, path, ThinVec::new())?
} else {
let hi = self.prev_span;
2019-07-01 09:20:44 +00:00
self.mk_expr(lo.to(hi), ExprKind::Path(None, path), ThinVec::new())
};
let expr = self.with_res(Restrictions::STMT_EXPR, |this| {
let expr = this.parse_dot_or_call_expr_with(expr, lo, attrs.into())?;
this.parse_assoc_expr_with(0, LhsExpr::AlreadyParsed(expr))
})?;
return Ok(Some(Stmt {
id: ast::DUMMY_NODE_ID,
node: StmtKind::Expr(expr),
span: lo.to(self.prev_span),
}));
}
let (delim, tts) = self.expect_delimited_token_tree()?;
let hi = self.prev_span;
let style = if delim == MacDelimiter::Brace {
MacStmtStyle::Braces
} else {
MacStmtStyle::NoBraces
};
let mac = respan(lo.to(hi), Mac_ {
path,
tts,
delim,
prior_type_ascription: self.last_type_ascription,
});
let node = if delim == MacDelimiter::Brace ||
self.token == token::Semi || self.token == token::Eof {
StmtKind::Mac(P((mac, style, attrs.into())))
}
// We used to incorrectly stop parsing macro-expanded statements here.
// If the next token will be an error anyway but could have parsed with the
// earlier behavior, stop parsing here and emit a warning to avoid breakage.
else if macro_legacy_warnings &&
self.token.can_begin_expr() &&
match self.token.kind {
// These can continue an expression, so we can't stop parsing and warn.
token::OpenDelim(token::Paren) | token::OpenDelim(token::Bracket) |
token::BinOp(token::Minus) | token::BinOp(token::Star) |
token::BinOp(token::And) | token::BinOp(token::Or) |
token::AndAnd | token::OrOr |
token::DotDot | token::DotDotDot | token::DotDotEq => false,
_ => true,
} {
self.warn_missing_semicolon();
StmtKind::Mac(P((mac, style, attrs.into())))
} else {
let e = self.mk_expr(mac.span, ExprKind::Mac(mac), ThinVec::new());
let e = self.maybe_recover_from_bad_qpath(e, true)?;
let e = self.parse_dot_or_call_expr_with(e, lo, attrs.into())?;
let e = self.parse_assoc_expr_with(0, LhsExpr::AlreadyParsed(e))?;
StmtKind::Expr(e)
};
Stmt {
id: ast::DUMMY_NODE_ID,
span: lo.to(hi),
node,
}
} else {
// FIXME: Bad copy of attrs
let old_directory_ownership =
mem::replace(&mut self.directory.ownership, DirectoryOwnership::UnownedViaBlock);
let item = self.parse_item_(attrs.clone(), false, true)?;
self.directory.ownership = old_directory_ownership;
match item {
2016-06-17 02:30:01 +00:00
Some(i) => Stmt {
id: ast::DUMMY_NODE_ID,
span: lo.to(i.span),
2016-06-17 02:30:01 +00:00
node: StmtKind::Item(i),
},
None => {
let unused_attrs = |attrs: &[Attribute], s: &mut Self| {
if !attrs.is_empty() {
if s.prev_token_kind == PrevTokenKind::DocComment {
2017-02-12 14:18:41 +00:00
s.span_fatal_err(s.prev_span, Error::UselessDocComment).emit();
} else if attrs.iter().any(|a| a.style == AttrStyle::Outer) {
s.span_err(
s.token.span, "expected statement after outer attribute"
);
}
}
};
// Do not attempt to parse an expression if we're done here.
if self.token == token::Semi {
unused_attrs(&attrs, self);
self.bump();
return Ok(None);
}
if self.token == token::CloseDelim(token::Brace) {
unused_attrs(&attrs, self);
return Ok(None);
}
2014-09-13 16:06:01 +00:00
// Remainder are line-expr stmts.
let e = self.parse_expr_res(
Restrictions::STMT_EXPR, Some(attrs.into()))?;
2016-06-17 02:30:01 +00:00
Stmt {
id: ast::DUMMY_NODE_ID,
span: lo.to(e.span),
2016-06-17 02:30:01 +00:00
node: StmtKind::Expr(e),
}
2014-09-13 16:06:01 +00:00
}
}
}))
}
2019-02-08 13:53:55 +00:00
/// Checks if this expression is a successfully parsed statement.
fn expr_is_complete(&self, e: &Expr) -> bool {
self.restrictions.contains(Restrictions::STMT_EXPR) &&
2014-09-13 16:06:01 +00:00
!classify::expr_requires_semi_to_be_stmt(e)
}
2019-02-08 13:53:55 +00:00
/// Parses a block. No inner attributes are allowed.
pub fn parse_block(&mut self) -> PResult<'a, P<Block>> {
maybe_whole!(self, NtBlock, |x| x);
let lo = self.token.span;
if !self.eat(&token::OpenDelim(token::Brace)) {
let sp = self.token.span;
let tok = self.this_token_descr();
let mut e = self.span_fatal(sp, &format!("expected `{{`, found {}", tok));
2018-09-19 23:23:21 +00:00
let do_not_suggest_help =
2019-05-11 14:41:37 +00:00
self.token.is_keyword(kw::In) || self.token == token::Colon;
if self.token.is_ident_named(sym::and) {
e.span_suggestion_short(
self.token.span,
"use `&&` instead of `and` for the boolean operator",
"&&".to_string(),
Applicability::MaybeIncorrect,
);
}
if self.token.is_ident_named(sym::or) {
e.span_suggestion_short(
self.token.span,
"use `||` instead of `or` for the boolean operator",
"||".to_string(),
Applicability::MaybeIncorrect,
);
}
// Check to see if the user has written something like
//
// if (cond)
// bar;
//
// Which is valid in other languages, but not Rust.
match self.parse_stmt_without_recovery(false) {
Ok(Some(stmt)) => {
2018-08-14 19:05:27 +00:00
if self.look_ahead(1, |t| t == &token::OpenDelim(token::Brace))
|| do_not_suggest_help {
// if the next token is an open brace (e.g., `if a b {`), the place-
// inside-a-block suggestion would be more likely wrong than right
2018-09-19 23:23:21 +00:00
e.span_label(sp, "expected `{`");
return Err(e);
}
let mut stmt_span = stmt.span;
// expand the span to include the semicolon, if it exists
if self.eat(&token::Semi) {
2017-07-31 20:04:34 +00:00
stmt_span = stmt_span.with_hi(self.prev_span.hi());
}
2019-07-24 09:01:30 +00:00
if let Ok(snippet) = self.span_to_snippet(stmt_span) {
e.span_suggestion(
stmt_span,
"try placing this code inside a block",
format!("{{ {} }}", snippet),
// speculative, has been misleading in the past (#46836)
Applicability::MaybeIncorrect,
);
}
}
Err(mut e) => {
self.recover_stmt_(SemiColonMode::Break, BlockMode::Ignore);
2016-09-15 19:34:21 +00:00
self.cancel(&mut e);
}
_ => ()
}
e.span_label(sp, "expected `{`");
return Err(e);
}
self.parse_block_tail(lo, BlockCheckMode::Default)
}
2019-02-08 13:53:55 +00:00
/// Parses a block. Inner attributes are allowed.
crate fn parse_inner_attrs_and_block(&mut self) -> PResult<'a, (Vec<Attribute>, P<Block>)> {
maybe_whole!(self, NtBlock, |x| (Vec::new(), x));
2012-08-01 21:34:35 +00:00
let lo = self.token.span;
self.expect(&token::OpenDelim(token::Brace))?;
Ok((self.parse_inner_attributes()?,
self.parse_block_tail(lo, BlockCheckMode::Default)?))
}
2013-04-02 23:44:01 +00:00
2019-02-08 13:53:55 +00:00
/// Parses the rest of a block expression or function body.
/// Precondition: already parsed the '{'.
fn parse_block_tail(&mut self, lo: Span, s: BlockCheckMode) -> PResult<'a, P<Block>> {
let mut stmts = vec![];
while !self.eat(&token::CloseDelim(token::Brace)) {
2019-07-12 03:02:54 +00:00
if self.token == token::Eof {
break;
}
let stmt = match self.parse_full_stmt(false) {
Err(mut err) => {
err.emit();
self.recover_stmt_(SemiColonMode::Ignore, BlockMode::Ignore);
2018-12-19 23:33:56 +00:00
Some(Stmt {
id: ast::DUMMY_NODE_ID,
node: StmtKind::Expr(DummyResult::raw_expr(self.token.span, true)),
span: self.token.span,
2018-12-19 23:33:56 +00:00
})
}
Ok(stmt) => stmt,
};
if let Some(stmt) = stmt {
stmts.push(stmt);
} else {
// Found only `;` or `}`.
continue;
};
}
Ok(P(ast::Block {
stmts,
id: ast::DUMMY_NODE_ID,
2013-01-15 03:35:08 +00:00
rules: s,
span: lo.to(self.prev_span),
}))
}
2019-02-08 13:53:55 +00:00
/// Parses a statement, including the trailing semicolon.
crate fn parse_full_stmt(&mut self, macro_legacy_warnings: bool) -> PResult<'a, Option<Stmt>> {
// skip looking for a trailing semicolon when we have an interpolated statement
maybe_whole!(self, NtStmt, |x| Some(x));
let mut stmt = match self.parse_stmt_without_recovery(macro_legacy_warnings)? {
Some(stmt) => stmt,
None => return Ok(None),
};
match stmt.node {
StmtKind::Expr(ref expr) if self.token != token::Eof => {
// expression without semicolon
if classify::expr_requires_semi_to_be_stmt(expr) {
// Just check for errors and recover; do not eat semicolon yet.
if let Err(mut e) =
self.expect_one_of(&[], &[token::Semi, token::CloseDelim(token::Brace)])
{
e.emit();
self.recover_stmt();
// Don't complain about type errors in body tail after parse error (#57383).
let sp = expr.span.to(self.prev_span);
stmt.node = StmtKind::Expr(DummyResult::raw_expr(sp, true));
}
}
2016-02-10 03:11:27 +00:00
}
StmtKind::Local(..) => {
2016-07-12 03:56:19 +00:00
// We used to incorrectly allow a macro-expanded let statement to lack a semicolon.
2016-09-06 05:57:58 +00:00
if macro_legacy_warnings && self.token != token::Semi {
2016-07-12 03:56:19 +00:00
self.warn_missing_semicolon();
} else {
self.expect_one_of(&[], &[token::Semi])?;
2016-07-12 03:56:19 +00:00
}
}
_ => {}
}
if self.eat(&token::Semi) {
stmt = stmt.add_trailing_semicolon();
}
stmt.span = stmt.span.to(self.prev_span);
2016-07-16 20:41:43 +00:00
Ok(Some(stmt))
}
2016-07-12 03:56:19 +00:00
fn warn_missing_semicolon(&self) {
self.diagnostic().struct_span_warn(self.token.span, {
&format!("expected `;`, found {}", self.this_token_descr())
2016-07-12 03:56:19 +00:00
}).note({
"This was erroneously allowed and will become a hard error in a future release"
}).emit();
}
fn err_dotdotdot_syntax(&self, span: Span) {
self.diagnostic().struct_span_err(span, {
"unexpected token: `...`"
}).span_suggestion(
span, "use `..` for an exclusive range", "..".to_owned(),
Applicability::MaybeIncorrect
).span_suggestion(
span, "or `..=` for an inclusive range", "..=".to_owned(),
Applicability::MaybeIncorrect
).emit();
}
2019-02-08 13:53:55 +00:00
/// Parses bounds of a type parameter `BOUND + BOUND + ...`, possibly with trailing `+`.
///
/// ```
/// BOUND = TY_BOUND | LT_BOUND
/// LT_BOUND = LIFETIME (e.g., `'a`)
/// TY_BOUND = TY_BOUND_NOPAREN | (TY_BOUND_NOPAREN)
/// TY_BOUND_NOPAREN = [?] [for<LT_PARAM_DEFS>] SIMPLE_PATH (e.g., `?for<'a: 'b> m::Trait<'a>`)
/// ```
fn parse_generic_bounds_common(&mut self,
allow_plus: bool,
colon_span: Option<Span>) -> PResult<'a, GenericBounds> {
let mut bounds = Vec::new();
let mut negative_bounds = Vec::new();
let mut last_plus_span = None;
let mut was_negative = false;
loop {
// This needs to be synchronized with `TokenKind::can_begin_bound`.
let is_bound_start = self.check_path() || self.check_lifetime() ||
self.check(&token::Not) || // used for error reporting only
self.check(&token::Question) ||
2019-05-11 14:41:37 +00:00
self.check_keyword(kw::For) ||
self.check(&token::OpenDelim(token::Paren));
if is_bound_start {
let lo = self.token.span;
let has_parens = self.eat(&token::OpenDelim(token::Paren));
let inner_lo = self.token.span;
let is_negative = self.eat(&token::Not);
let question = if self.eat(&token::Question) { Some(self.prev_span) } else { None };
if self.token.is_lifetime() {
if let Some(question_span) = question {
self.span_err(question_span,
"`?` may only modify trait bounds, not lifetime bounds");
}
2018-06-14 11:23:46 +00:00
bounds.push(GenericBound::Outlives(self.expect_lifetime()));
2018-04-27 08:32:54 +00:00
if has_parens {
let inner_span = inner_lo.to(self.prev_span);
2018-04-27 08:32:54 +00:00
self.expect(&token::CloseDelim(token::Paren))?;
let mut err = self.struct_span_err(
lo.to(self.prev_span),
"parenthesized lifetime bounds are not supported"
);
2019-07-24 09:01:30 +00:00
if let Ok(snippet) = self.span_to_snippet(inner_span) {
err.span_suggestion_short(
lo.to(self.prev_span),
"remove the parentheses",
snippet.to_owned(),
Applicability::MachineApplicable
);
}
err.emit();
2018-04-27 08:32:54 +00:00
}
} else {
let lifetime_defs = self.parse_late_bound_lifetime_defs()?;
let path = self.parse_path(PathStyle::Type)?;
2018-04-27 08:32:54 +00:00
if has_parens {
self.expect(&token::CloseDelim(token::Paren))?;
}
let poly_span = lo.to(self.prev_span);
if is_negative {
was_negative = true;
if let Some(sp) = last_plus_span.or(colon_span) {
negative_bounds.push(sp.to(poly_span));
}
} else {
let poly_trait = PolyTraitRef::new(lifetime_defs, path, poly_span);
let modifier = if question.is_some() {
TraitBoundModifier::Maybe
} else {
TraitBoundModifier::None
};
bounds.push(GenericBound::Trait(poly_trait, modifier));
}
}
} else {
break
2017-03-16 21:47:32 +00:00
}
if !allow_plus || !self.eat_plus() {
break
} else {
last_plus_span = Some(self.prev_span);
}
}
if !negative_bounds.is_empty() || was_negative {
let plural = negative_bounds.len() > 1;
let last_span = negative_bounds.last().map(|sp| *sp);
let mut err = self.struct_span_err(
negative_bounds,
"negative trait bounds are not supported",
);
if let Some(sp) = last_span {
err.span_label(sp, "negative trait bounds are not supported");
}
if let Some(bound_list) = colon_span {
let bound_list = bound_list.to(self.prev_span);
let mut new_bound_list = String::new();
if !bounds.is_empty() {
let mut snippets = bounds.iter().map(|bound| bound.span())
2019-07-24 09:01:30 +00:00
.map(|span| self.span_to_snippet(span));
while let Some(Ok(snippet)) = snippets.next() {
new_bound_list.push_str(" + ");
new_bound_list.push_str(&snippet);
}
new_bound_list = new_bound_list.replacen(" +", ":", 1);
}
err.span_suggestion_hidden(
bound_list,
&format!("remove the trait bound{}", if plural { "s" } else { "" }),
new_bound_list,
Applicability::MachineApplicable,
);
}
err.emit();
}
return Ok(bounds);
}
crate fn parse_generic_bounds(&mut self,
colon_span: Option<Span>) -> PResult<'a, GenericBounds> {
self.parse_generic_bounds_common(true, colon_span)
2017-03-16 21:47:32 +00:00
}
2019-02-08 13:53:55 +00:00
/// Parses bounds of a lifetime parameter `BOUND + BOUND + BOUND`, possibly with trailing `+`.
///
/// ```
/// BOUND = LT_BOUND (e.g., `'a`)
/// ```
fn parse_lt_param_bounds(&mut self) -> GenericBounds {
let mut lifetimes = Vec::new();
2017-03-16 21:47:32 +00:00
while self.check_lifetime() {
lifetimes.push(ast::GenericBound::Outlives(self.expect_lifetime()));
if !self.eat_plus() {
break
}
}
lifetimes
}
2019-02-08 13:53:55 +00:00
/// Matches `typaram = IDENT (`?` unbound)? optbounds ( EQ ty )?`.
2018-05-26 18:16:21 +00:00
fn parse_ty_param(&mut self,
preceding_attrs: Vec<Attribute>)
2018-05-27 19:07:09 +00:00
-> PResult<'a, GenericParam> {
let ident = self.parse_ident()?;
// Parse optional colon and param bounds.
let bounds = if self.eat(&token::Colon) {
self.parse_generic_bounds(Some(self.prev_span))?
} else {
Vec::new()
};
let default = if self.eat(&token::Eq) {
Some(self.parse_ty()?)
} else {
None
};
2018-05-27 19:07:09 +00:00
Ok(GenericParam {
ident,
id: ast::DUMMY_NODE_ID,
2018-05-28 12:33:28 +00:00
attrs: preceding_attrs.into(),
bounds,
2018-05-27 19:07:09 +00:00
kind: GenericParamKind::Type {
2018-05-26 18:16:21 +00:00
default,
}
})
}
/// Parses the following grammar:
2019-02-08 13:53:55 +00:00
///
/// TraitItemAssocTy = Ident ["<"...">"] [":" [GenericBounds]] ["where" ...] ["=" Ty]
2018-05-26 18:16:21 +00:00
fn parse_trait_item_assoc_ty(&mut self)
-> PResult<'a, (Ident, TraitItemKind, ast::Generics)> {
let ident = self.parse_ident()?;
let mut generics = self.parse_generics()?;
// Parse optional colon and param bounds.
let bounds = if self.eat(&token::Colon) {
self.parse_generic_bounds(None)?
} else {
Vec::new()
};
generics.where_clause = self.parse_where_clause()?;
let default = if self.eat(&token::Eq) {
Some(self.parse_ty()?)
} else {
None
};
2017-11-25 19:42:55 +00:00
self.expect(&token::Semi)?;
2018-05-26 18:16:21 +00:00
Ok((ident, TraitItemKind::Type(bounds, default), generics))
}
fn parse_const_param(&mut self, preceding_attrs: Vec<Attribute>) -> PResult<'a, GenericParam> {
2019-05-11 14:41:37 +00:00
self.expect_keyword(kw::Const)?;
let ident = self.parse_ident()?;
self.expect(&token::Colon)?;
let ty = self.parse_ty()?;
Ok(GenericParam {
ident,
id: ast::DUMMY_NODE_ID,
attrs: preceding_attrs.into(),
bounds: Vec::new(),
kind: GenericParamKind::Const {
ty,
}
})
}
2019-02-08 13:53:55 +00:00
/// Parses a (possibly empty) list of lifetime and type parameters, possibly including
/// a trailing comma and erroneous trailing attributes.
2018-05-27 19:07:09 +00:00
crate fn parse_generic_params(&mut self) -> PResult<'a, Vec<ast::GenericParam>> {
let mut params = Vec::new();
loop {
let attrs = self.parse_outer_attributes()?;
2017-03-16 21:47:32 +00:00
if self.check_lifetime() {
let lifetime = self.expect_lifetime();
// Parse lifetime parameter.
let bounds = if self.eat(&token::Colon) {
self.parse_lt_param_bounds()
} else {
Vec::new()
};
params.push(ast::GenericParam {
2018-05-26 18:16:21 +00:00
ident: lifetime.ident,
id: lifetime.id,
attrs: attrs.into(),
2018-05-28 12:33:28 +00:00
bounds,
kind: ast::GenericParamKind::Lifetime,
2018-05-26 18:16:21 +00:00
});
2019-05-11 14:41:37 +00:00
} else if self.check_keyword(kw::Const) {
// Parse const parameter.
params.push(self.parse_const_param(attrs)?);
2017-03-16 21:47:32 +00:00
} else if self.check_ident() {
// Parse type parameter.
2018-05-26 18:16:21 +00:00
params.push(self.parse_ty_param(attrs)?);
} else {
// Check for trailing attributes and stop parsing.
if !attrs.is_empty() {
if !params.is_empty() {
self.struct_span_err(
attrs[0].span,
&format!("trailing attribute after generic parameter"),
)
.span_label(attrs[0].span, "attributes must go before parameters")
.emit();
} else {
self.struct_span_err(
attrs[0].span,
&format!("attribute without generic parameters"),
)
.span_label(
attrs[0].span,
"attributes are only permitted when preceding parameters",
)
.emit();
}
}
break
2017-03-16 21:47:32 +00:00
}
if !self.eat(&token::Comma) {
break
}
}
Ok(params)
}
2019-02-08 13:53:55 +00:00
/// Parses a set of optional generic type parameter declarations. Where
/// clauses are not parsed here, and must be added later via
/// `parse_where_clause()`.
///
2014-06-09 20:12:30 +00:00
/// matches generics = ( ) | ( < > ) | ( < typaramseq ( , )? > ) | ( < lifetimes ( , )? > )
/// | ( < lifetimes , typaramseq ( , )? > )
/// where typaramseq = ( typaram ) | ( typaram , typaramseq )
fn parse_generics(&mut self) -> PResult<'a, ast::Generics> {
let span_lo = self.token.span;
2019-06-01 05:19:30 +00:00
let (params, span) = if self.eat_lt() {
let params = self.parse_generic_params()?;
self.expect_gt()?;
2019-06-01 05:19:30 +00:00
(params, span_lo.to(self.prev_span))
} else {
(vec![], self.prev_span.between(self.token.span))
2019-06-01 05:19:30 +00:00
};
Ok(ast::Generics {
params,
where_clause: WhereClause {
predicates: Vec::new(),
span: DUMMY_SP,
},
span,
})
}
2019-02-08 13:53:55 +00:00
/// Parses generic args (within a path segment) with recovery for extra leading angle brackets.
/// For the purposes of understanding the parsing logic of generic arguments, this function
/// can be thought of being the same as just calling `self.parse_generic_args()` if the source
/// had the correct amount of leading angle brackets.
///
/// ```ignore (diagnostics)
/// bar::<<<<T as Foo>::Output>();
/// ^^ help: remove extra angle brackets
/// ```
fn parse_generic_args_with_leaning_angle_bracket_recovery(
&mut self,
style: PathStyle,
lo: Span,
) -> PResult<'a, (Vec<GenericArg>, Vec<AssocTyConstraint>)> {
// We need to detect whether there are extra leading left angle brackets and produce an
// appropriate error and suggestion. This cannot be implemented by looking ahead at
// upcoming tokens for a matching `>` character - if there are unmatched `<` tokens
// then there won't be matching `>` tokens to find.
//
// To explain how this detection works, consider the following example:
//
// ```ignore (diagnostics)
// bar::<<<<T as Foo>::Output>();
// ^^ help: remove extra angle brackets
// ```
//
// Parsing of the left angle brackets starts in this function. We start by parsing the
// `<` token (incrementing the counter of unmatched angle brackets on `Parser` via
// `eat_lt`):
//
// *Upcoming tokens:* `<<<<T as Foo>::Output>;`
// *Unmatched count:* 1
// *`parse_path_segment` calls deep:* 0
//
// This has the effect of recursing as this function is called if a `<` character
// is found within the expected generic arguments:
//
// *Upcoming tokens:* `<<<T as Foo>::Output>;`
// *Unmatched count:* 2
// *`parse_path_segment` calls deep:* 1
//
// Eventually we will have recursed until having consumed all of the `<` tokens and
// this will be reflected in the count:
//
// *Upcoming tokens:* `T as Foo>::Output>;`
// *Unmatched count:* 4
// `parse_path_segment` calls deep:* 3
//
// The parser will continue until reaching the first `>` - this will decrement the
// unmatched angle bracket count and return to the parent invocation of this function
// having succeeded in parsing:
//
// *Upcoming tokens:* `::Output>;`
// *Unmatched count:* 3
// *`parse_path_segment` calls deep:* 2
//
// This will continue until the next `>` character which will also return successfully
// to the parent invocation of this function and decrement the count:
//
// *Upcoming tokens:* `;`
// *Unmatched count:* 2
// *`parse_path_segment` calls deep:* 1
//
// At this point, this function will expect to find another matching `>` character but
// won't be able to and will return an error. This will continue all the way up the
// call stack until the first invocation:
//
// *Upcoming tokens:* `;`
// *Unmatched count:* 2
// *`parse_path_segment` calls deep:* 0
//
// In doing this, we have managed to work out how many unmatched leading left angle
// brackets there are, but we cannot recover as the unmatched angle brackets have
// already been consumed. To remedy this, we keep a snapshot of the parser state
// before we do the above. We can then inspect whether we ended up with a parsing error
// and unmatched left angle brackets and if so, restore the parser state before we
// consumed any `<` characters to emit an error and consume the erroneous tokens to
// recover by attempting to parse again.
//
// In practice, the recursion of this function is indirect and there will be other
// locations that consume some `<` characters - as long as we update the count when
// this happens, it isn't an issue.
let is_first_invocation = style == PathStyle::Expr;
// Take a snapshot before attempting to parse - we can restore this later.
let snapshot = if is_first_invocation {
Some(self.clone())
} else {
None
};
debug!("parse_generic_args_with_leading_angle_bracket_recovery: (snapshotting)");
match self.parse_generic_args() {
Ok(value) => Ok(value),
Err(ref mut e) if is_first_invocation && self.unmatched_angle_bracket_count > 0 => {
// Cancel error from being unable to find `>`. We know the error
// must have been this due to a non-zero unmatched angle bracket
// count.
e.cancel();
// Swap `self` with our backup of the parser state before attempting to parse
// generic arguments.
let snapshot = mem::replace(self, snapshot.unwrap());
debug!(
"parse_generic_args_with_leading_angle_bracket_recovery: (snapshot failure) \
snapshot.count={:?}",
snapshot.unmatched_angle_bracket_count,
);
// Eat the unmatched angle brackets.
for _ in 0..snapshot.unmatched_angle_bracket_count {
self.eat_lt();
}
// Make a span over ${unmatched angle bracket count} characters.
let span = lo.with_hi(
lo.lo() + BytePos(snapshot.unmatched_angle_bracket_count)
);
let plural = snapshot.unmatched_angle_bracket_count > 1;
self.diagnostic()
.struct_span_err(
span,
&format!(
"unmatched angle bracket{}",
if plural { "s" } else { "" }
),
)
.span_suggestion(
span,
&format!(
"remove extra angle bracket{}",
if plural { "s" } else { "" }
),
String::new(),
Applicability::MachineApplicable,
)
.emit();
// Try again without unmatched angle bracket characters.
self.parse_generic_args()
},
Err(e) => Err(e),
}
}
/// Parses (possibly empty) list of lifetime and type arguments and associated type bindings,
/// possibly including trailing comma.
fn parse_generic_args(&mut self) -> PResult<'a, (Vec<GenericArg>, Vec<AssocTyConstraint>)> {
2018-02-23 17:48:54 +00:00
let mut args = Vec::new();
let mut constraints = Vec::new();
let mut misplaced_assoc_ty_constraints: Vec<Span> = Vec::new();
let mut assoc_ty_constraints: Vec<Span> = Vec::new();
let args_lo = self.token.span;
loop {
if self.check_lifetime() && self.look_ahead(1, |t| !t.is_like_plus()) {
// Parse lifetime argument.
2018-05-27 19:07:09 +00:00
args.push(GenericArg::Lifetime(self.expect_lifetime()));
misplaced_assoc_ty_constraints.append(&mut assoc_ty_constraints);
} else if self.check_ident() && self.look_ahead(1,
|t| t == &token::Eq || t == &token::Colon) {
// Parse associated type constraint.
let lo = self.token.span;
let ident = self.parse_ident()?;
let kind = if self.eat(&token::Eq) {
AssocTyConstraintKind::Equality {
ty: self.parse_ty()?,
}
} else if self.eat(&token::Colon) {
AssocTyConstraintKind::Bound {
bounds: self.parse_generic_bounds(Some(self.prev_span))?,
}
} else {
unreachable!();
};
let span = lo.to(self.prev_span);
constraints.push(AssocTyConstraint {
id: ast::DUMMY_NODE_ID,
ident,
kind,
span,
});
assoc_ty_constraints.push(span);
} else if self.check_const_arg() {
// Parse const argument.
2019-06-04 22:17:07 +00:00
let expr = if let token::OpenDelim(token::Brace) = self.token.kind {
self.parse_block_expr(
None, self.token.span, BlockCheckMode::Default, ThinVec::new()
)?
2019-02-07 09:10:11 +00:00
} else if self.token.is_ident() {
// FIXME(const_generics): to distinguish between idents for types and consts,
// we should introduce a GenericArg::Ident in the AST and distinguish when
// lowering to the HIR. For now, idents for const args are not permitted.
if self.token.is_keyword(kw::True) || self.token.is_keyword(kw::False) {
self.parse_literal_maybe_minus()?
} else {
return Err(
self.fatal("identifiers may currently not be used for const generics")
);
}
2019-02-07 09:10:11 +00:00
} else {
self.parse_literal_maybe_minus()?
};
let value = AnonConst {
id: ast::DUMMY_NODE_ID,
value: expr,
};
args.push(GenericArg::Const(value));
misplaced_assoc_ty_constraints.append(&mut assoc_ty_constraints);
} else if self.check_type() {
// Parse type argument.
args.push(GenericArg::Type(self.parse_ty()?));
misplaced_assoc_ty_constraints.append(&mut assoc_ty_constraints);
} else {
break
2017-03-16 21:47:32 +00:00
}
2011-07-27 12:19:39 +00:00
if !self.eat(&token::Comma) {
break
}
}
// FIXME: we would like to report this in ast_validation instead, but we currently do not
// preserve ordering of generic parameters with respect to associated type binding, so we
// lose that information after parsing.
if misplaced_assoc_ty_constraints.len() > 0 {
let mut err = self.struct_span_err(
args_lo.to(self.prev_span),
"associated type bindings must be declared after generic parameters",
);
for span in misplaced_assoc_ty_constraints {
err.span_label(
span,
"this associated type binding should be moved after the generic parameters",
);
}
err.emit();
}
Ok((args, constraints))
}
2019-02-08 13:53:55 +00:00
/// Parses an optional where-clause and places it in `generics`.
///
/// ```ignore (only-for-syntax-highlight)
/// where T : Trait<U, V> + 'b, 'a : 'b
/// ```
fn parse_where_clause(&mut self) -> PResult<'a, WhereClause> {
let mut where_clause = WhereClause {
predicates: Vec::new(),
2019-06-03 19:06:49 +00:00
span: self.prev_span.to(self.prev_span),
};
2019-05-11 14:41:37 +00:00
if !self.eat_keyword(kw::Where) {
return Ok(where_clause);
}
2017-07-27 04:37:35 +00:00
let lo = self.prev_span;
// We are considering adding generics to the `where` keyword as an alternative higher-rank
// parameter syntax (as in `where<'a>` or `where<T>`. To avoid that being a breaking
// change we parse those generics now, but report an error.
if self.choose_generics_over_qpath() {
let generics = self.parse_generics()?;
self.struct_span_err(
generics.span,
"generic parameters on `where` clauses are reserved for future use",
)
.span_label(generics.span, "currently unsupported")
.emit();
}
loop {
let lo = self.token.span;
if self.check_lifetime() && self.look_ahead(1, |t| !t.is_like_plus()) {
2017-03-16 21:47:32 +00:00
let lifetime = self.expect_lifetime();
// Bounds starting with a colon are mandatory, but possibly empty.
self.expect(&token::Colon)?;
let bounds = self.parse_lt_param_bounds();
where_clause.predicates.push(ast::WherePredicate::RegionPredicate(
ast::WhereRegionPredicate {
span: lo.to(self.prev_span),
lifetime,
bounds,
}
));
2017-03-16 21:47:32 +00:00
} else if self.check_type() {
// Parse optional `for<'a, 'b>`.
// This `for` is parsed greedily and applies to the whole predicate,
// the bounded type can have its own `for` applying only to it.
// Examples:
// * `for<'a> Trait1<'a>: Trait2<'a /* ok */>`
// * `(for<'a> Trait1<'a>): Trait2<'a /* not ok */>`
// * `for<'a> for<'b> Trait1<'a, 'b>: Trait2<'a /* ok */, 'b /* not ok */>`
let lifetime_defs = self.parse_late_bound_lifetime_defs()?;
// Parse type with mandatory colon and (possibly empty) bounds,
// or with mandatory equality sign and the second type.
let ty = self.parse_ty()?;
if self.eat(&token::Colon) {
let bounds = self.parse_generic_bounds(Some(self.prev_span))?;
where_clause.predicates.push(ast::WherePredicate::BoundPredicate(
ast::WhereBoundPredicate {
span: lo.to(self.prev_span),
bound_generic_params: lifetime_defs,
bounded_ty: ty,
bounds,
}
));
// FIXME: Decide what should be used here, `=` or `==`.
2018-03-06 10:33:26 +00:00
// FIXME: We are just dropping the binders in lifetime_defs on the floor here.
} else if self.eat(&token::Eq) || self.eat(&token::EqEq) {
let rhs_ty = self.parse_ty()?;
where_clause.predicates.push(ast::WherePredicate::EqPredicate(
ast::WhereEqPredicate {
span: lo.to(self.prev_span),
lhs_ty: ty,
rhs_ty,
id: ast::DUMMY_NODE_ID,
}
));
} else {
return self.unexpected();
}
} else {
break
2017-03-16 21:47:32 +00:00
}
if !self.eat(&token::Comma) {
break
}
}
2017-07-27 04:37:35 +00:00
where_clause.span = lo.to(self.prev_span);
Ok(where_clause)
}
fn parse_fn_args(&mut self, named_args: bool, allow_c_variadic: bool)
2015-12-20 21:00:43 +00:00
-> PResult<'a, (Vec<Arg> , bool)> {
let sp = self.token.span;
let mut c_variadic = false;
let (args, _): (Vec<Option<Arg>>, _) = self.parse_paren_comma_seq(|p| {
let do_not_enforce_named_arguments_for_c_variadic =
|token: &token::Token| -> bool {
if token == &token::DotDotDot {
false
} else {
named_args
}
};
match p.parse_arg_general(
false,
allow_c_variadic,
do_not_enforce_named_arguments_for_c_variadic
) {
Ok(arg) => {
if let TyKind::CVarArgs = arg.ty.node {
c_variadic = true;
if p.token != token::CloseDelim(token::Paren) {
let span = p.token.span;
p.span_err(span,
"`...` must be the last argument of a C-variadic function");
Ok(None)
} else {
Ok(Some(arg))
}
} else {
Ok(Some(arg))
}
},
Err(mut e) => {
e.emit();
let lo = p.prev_span;
// Skip every token until next possible arg or end.
p.eat_to_tokens(&[&token::Comma, &token::CloseDelim(token::Paren)]);
// Create a placeholder argument for proper arg count (issue #34264).
let span = lo.to(p.prev_span);
Ok(Some(dummy_arg(Ident::new(kw::Invalid, span))))
}
}
})?;
2018-11-16 12:54:49 +00:00
2016-02-10 03:11:27 +00:00
let args: Vec<_> = args.into_iter().filter_map(|x| x).collect();
if c_variadic && args.is_empty() {
self.span_err(sp,
"C-variadic function must be declared with at least one named argument");
}
Ok((args, c_variadic))
}
2019-02-08 13:53:55 +00:00
/// Parses the argument list and result type of a function declaration.
fn parse_fn_decl(&mut self, allow_c_variadic: bool) -> PResult<'a, P<FnDecl>> {
let (args, c_variadic) = self.parse_fn_args(true, allow_c_variadic)?;
let ret_ty = self.parse_ret_ty(true)?;
Ok(P(FnDecl {
inputs: args,
output: ret_ty,
c_variadic,
}))
}
2016-03-06 12:54:44 +00:00
/// Returns the parsed optional self argument and whether a self shortcut was used.
///
/// See `parse_self_arg_with_attrs` to collect attributes.
fn parse_self_arg(&mut self) -> PResult<'a, Option<Arg>> {
2019-06-04 22:17:07 +00:00
let expect_ident = |this: &mut Self| match this.token.kind {
2016-03-06 12:54:44 +00:00
// Preserve hygienic context.
token::Ident(name, _) =>
{ let span = this.token.span; this.bump(); Ident::new(name, span) }
_ => unreachable!()
};
2016-10-19 20:33:41 +00:00
let isolated_self = |this: &mut Self, n| {
2019-05-11 14:41:37 +00:00
this.look_ahead(n, |t| t.is_keyword(kw::SelfLower)) &&
2016-10-19 20:33:41 +00:00
this.look_ahead(n + 1, |t| t != &token::ModSep)
};
// Parse optional `self` parameter of a method.
// Only a limited set of initial token sequences is considered `self` parameters; anything
// else is parsed as a normal function parameter list, so some lookahead is required.
let eself_lo = self.token.span;
2019-06-04 22:17:07 +00:00
let (eself, eself_ident, eself_hi) = match self.token.kind {
2014-10-27 08:22:52 +00:00
token::BinOp(token::And) => {
// `&self`
// `&mut self`
// `&'lt self`
// `&'lt mut self`
// `&not_self`
(if isolated_self(self, 1) {
self.bump();
SelfKind::Region(None, Mutability::Immutable)
} else if self.is_keyword_ahead(1, &[kw::Mut]) &&
2016-10-19 20:33:41 +00:00
isolated_self(self, 2) {
self.bump();
self.bump();
SelfKind::Region(None, Mutability::Mutable)
} else if self.look_ahead(1, |t| t.is_lifetime()) &&
2016-10-19 20:33:41 +00:00
isolated_self(self, 2) {
self.bump();
2017-03-16 21:47:32 +00:00
let lt = self.expect_lifetime();
SelfKind::Region(Some(lt), Mutability::Immutable)
} else if self.look_ahead(1, |t| t.is_lifetime()) &&
self.is_keyword_ahead(2, &[kw::Mut]) &&
2016-10-19 20:33:41 +00:00
isolated_self(self, 3) {
self.bump();
2017-03-16 21:47:32 +00:00
let lt = self.expect_lifetime();
self.bump();
SelfKind::Region(Some(lt), Mutability::Mutable)
} else {
return Ok(None);
}, expect_ident(self), self.prev_span)
}
2014-10-27 08:22:52 +00:00
token::BinOp(token::Star) => {
// `*self`
// `*const self`
// `*mut self`
// `*not_self`
// Emit special error for `self` cases.
2019-01-12 06:04:54 +00:00
let msg = "cannot pass `self` by raw pointer";
(if isolated_self(self, 1) {
self.bump();
self.struct_span_err(self.token.span, msg)
.span_label(self.token.span, msg)
2019-01-12 06:04:54 +00:00
.emit();
SelfKind::Value(Mutability::Immutable)
} else if self.look_ahead(1, |t| t.is_mutability()) &&
2016-10-19 20:33:41 +00:00
isolated_self(self, 2) {
self.bump();
self.bump();
self.struct_span_err(self.token.span, msg)
.span_label(self.token.span, msg)
2019-01-12 06:04:54 +00:00
.emit();
SelfKind::Value(Mutability::Immutable)
} else {
return Ok(None);
}, expect_ident(self), self.prev_span)
}
2014-10-27 08:22:52 +00:00
token::Ident(..) => {
2016-10-19 20:33:41 +00:00
if isolated_self(self, 0) {
// `self`
// `self: TYPE`
let eself_ident = expect_ident(self);
let eself_hi = self.prev_span;
(if self.eat(&token::Colon) {
let ty = self.parse_ty()?;
SelfKind::Explicit(ty, Mutability::Immutable)
} else {
SelfKind::Value(Mutability::Immutable)
}, eself_ident, eself_hi)
2019-05-11 14:41:37 +00:00
} else if self.token.is_keyword(kw::Mut) &&
2016-10-19 20:33:41 +00:00
isolated_self(self, 1) {
// `mut self`
// `mut self: TYPE`
self.bump();
let eself_ident = expect_ident(self);
let eself_hi = self.prev_span;
(if self.eat(&token::Colon) {
let ty = self.parse_ty()?;
SelfKind::Explicit(ty, Mutability::Mutable)
} else {
SelfKind::Value(Mutability::Mutable)
}, eself_ident, eself_hi)
} else {
return Ok(None);
}
}
_ => return Ok(None),
};
2016-03-06 12:54:44 +00:00
2018-08-18 10:14:03 +00:00
let eself = source_map::respan(eself_lo.to(eself_hi), eself);
Ok(Some(Arg::from_self(ThinVec::default(), eself, eself_ident)))
}
/// Returns the parsed optional self argument with attributes and whether a self
/// shortcut was used.
fn parse_self_arg_with_attrs(&mut self) -> PResult<'a, Option<Arg>> {
let attrs = self.parse_arg_attributes()?;
let arg_opt = self.parse_self_arg()?;
Ok(arg_opt.map(|mut arg| {
arg.attrs = attrs.into();
arg
}))
2016-03-06 12:54:44 +00:00
}
2019-02-08 13:53:55 +00:00
/// Parses the parameter list and result type of a function that may have a `self` parameter.
fn parse_fn_decl_with_self<F>(&mut self, parse_arg_fn: F) -> PResult<'a, P<FnDecl>>
2016-03-06 12:54:44 +00:00
where F: FnMut(&mut Parser<'a>) -> PResult<'a, Arg>,
{
self.expect(&token::OpenDelim(token::Paren))?;
// Parse optional self argument.
let self_arg = self.parse_self_arg_with_attrs()?;
2012-07-30 23:33:02 +00:00
// Parse the rest of the function parameter list.
let sep = SeqSep::trailing_allowed(token::Comma);
let (mut fn_inputs, recovered) = if let Some(self_arg) = self_arg {
2016-03-06 12:54:44 +00:00
if self.check(&token::CloseDelim(token::Paren)) {
(vec![self_arg], false)
2016-03-06 12:54:44 +00:00
} else if self.eat(&token::Comma) {
let mut fn_inputs = vec![self_arg];
let (mut input, _, recovered) = self.parse_seq_to_before_end(
&token::CloseDelim(token::Paren), sep, parse_arg_fn)?;
fn_inputs.append(&mut input);
(fn_inputs, recovered)
2016-03-06 12:54:44 +00:00
} else {
match self.expect_one_of(&[], &[]) {
Err(err) => return Err(err),
Ok(recovered) => (vec![self_arg], recovered),
}
2012-07-30 23:33:02 +00:00
}
2016-03-06 12:54:44 +00:00
} else {
let (input, _, recovered) =
self.parse_seq_to_before_end(&token::CloseDelim(token::Paren), sep, parse_arg_fn)?;
(input, recovered)
};
2012-07-30 23:33:02 +00:00
if !recovered {
// Parse closing paren and return type.
self.expect(&token::CloseDelim(token::Paren))?;
}
// Replace duplicated recovered arguments with `_` pattern to avoid unecessary errors.
self.deduplicate_recovered_arg_names(&mut fn_inputs);
Ok(P(FnDecl {
inputs: fn_inputs,
output: self.parse_ret_ty(true)?,
c_variadic: false
}))
2012-07-30 23:33:02 +00:00
}
2019-02-08 13:53:55 +00:00
/// Parses the `|arg, arg|` header of a closure.
2015-12-20 21:00:43 +00:00
fn parse_fn_block_decl(&mut self) -> PResult<'a, P<FnDecl>> {
let inputs_captures = {
if self.eat(&token::OrOr) {
Vec::new()
} else {
self.expect(&token::BinOp(token::Or))?;
let args = self.parse_seq_to_before_tokens(
&[&token::BinOp(token::Or), &token::OrOr],
SeqSep::trailing_allowed(token::Comma),
TokenExpectType::NoExpect,
|p| p.parse_fn_block_arg()
)?.0;
self.expect_or()?;
args
2012-05-04 19:33:04 +00:00
}
};
let output = self.parse_ret_ty(true)?;
Ok(P(FnDecl {
inputs: inputs_captures,
output,
c_variadic: false
}))
}
2019-02-08 13:53:55 +00:00
/// Parses the name and optional generic types of a function header.
2015-12-20 21:00:43 +00:00
fn parse_fn_header(&mut self) -> PResult<'a, (Ident, ast::Generics)> {
let id = self.parse_ident()?;
let generics = self.parse_generics()?;
Ok((id, generics))
}
fn mk_item(&self, span: Span, ident: Ident, node: ItemKind, vis: Visibility,
2014-09-13 16:06:01 +00:00
attrs: Vec<Attribute>) -> P<Item> {
P(Item {
ident,
attrs,
id: ast::DUMMY_NODE_ID,
node,
vis,
span,
tokens: None,
2014-09-13 16:06:01 +00:00
})
}
2019-02-08 13:53:55 +00:00
/// Parses an item-position function declaration.
fn parse_item_fn(&mut self,
unsafety: Unsafety,
asyncness: Spanned<IsAsync>,
2016-08-10 23:20:12 +00:00
constness: Spanned<Constness>,
abi: Abi)
2015-12-20 21:00:43 +00:00
-> PResult<'a, ItemInfo> {
let (ident, mut generics) = self.parse_fn_header()?;
let allow_c_variadic = abi == Abi::C && unsafety == Unsafety::Unsafe;
let decl = self.parse_fn_decl(allow_c_variadic)?;
generics.where_clause = self.parse_where_clause()?;
let (inner_attrs, body) = self.parse_inner_attrs_and_block()?;
let header = FnHeader { unsafety, asyncness, constness, abi };
Ok((ident, ItemKind::Fn(decl, header, generics, body), Some(inner_attrs)))
}
2019-02-08 13:53:55 +00:00
/// Returns `true` if we are looking at `const ID`
/// (returns `false` for things like `const fn`, etc.).
fn is_const_item(&self) -> bool {
2019-05-11 14:41:37 +00:00
self.token.is_keyword(kw::Const) &&
!self.is_keyword_ahead(1, &[kw::Fn, kw::Unsafe])
}
2019-02-08 13:53:55 +00:00
/// Parses all the "front matter" for a `fn` declaration, up to
/// and including the `fn` keyword:
///
/// - `const fn`
/// - `unsafe fn`
/// - `const unsafe fn`
/// - `extern fn`
2019-02-08 13:53:55 +00:00
/// - etc.
2018-06-19 04:18:10 +00:00
fn parse_fn_front_matter(&mut self)
-> PResult<'a, (
Spanned<Constness>,
Unsafety,
Spanned<IsAsync>,
2018-06-19 04:18:10 +00:00
Abi
)>
{
2019-05-11 14:41:37 +00:00
let is_const_fn = self.eat_keyword(kw::Const);
let const_span = self.prev_span;
2018-06-19 04:18:10 +00:00
let asyncness = self.parse_asyncness();
if let IsAsync::Async { .. } = asyncness {
self.ban_async_in_2015(self.prev_span);
}
let asyncness = respan(self.prev_span, asyncness);
let unsafety = self.parse_unsafety();
let (constness, unsafety, abi) = if is_const_fn {
2016-08-10 23:20:12 +00:00
(respan(const_span, Constness::Const), unsafety, Abi::Rust)
} else {
2019-05-11 14:41:37 +00:00
let abi = if self.eat_keyword(kw::Extern) {
self.parse_opt_abi()?.unwrap_or(Abi::C)
} else {
Abi::Rust
};
(respan(self.prev_span, Constness::NotConst), unsafety, abi)
};
2019-05-11 14:41:37 +00:00
if !self.eat_keyword(kw::Fn) {
// It is possible for `expect_one_of` to recover given the contents of
// `self.expected_tokens`, therefore, do not use `self.unexpected()` which doesn't
// account for this.
if !self.expect_one_of(&[], &[])? { unreachable!() }
}
2018-06-19 04:18:10 +00:00
Ok((constness, unsafety, asyncness, abi))
}
2019-02-08 13:53:55 +00:00
/// Parses an impl item.
pub fn parse_impl_item(&mut self, at_end: &mut bool) -> PResult<'a, ImplItem> {
maybe_whole!(self, NtImplItem, |x| x);
let attrs = self.parse_outer_attributes()?;
let mut unclosed_delims = vec![];
let (mut item, tokens) = self.collect_tokens(|this| {
let item = this.parse_impl_item_(at_end, attrs);
unclosed_delims.append(&mut this.unclosed_delims);
item
})?;
self.unclosed_delims.append(&mut unclosed_delims);
// See `parse_item` for why this clause is here.
if !item.attrs.iter().any(|attr| attr.style == AttrStyle::Inner) {
item.tokens = Some(tokens);
}
Ok(item)
}
fn parse_impl_item_(&mut self,
at_end: &mut bool,
mut attrs: Vec<Attribute>) -> PResult<'a, ImplItem> {
let lo = self.token.span;
let vis = self.parse_visibility(false)?;
let defaultness = self.parse_defaultness();
2018-07-03 17:38:14 +00:00
let (name, node, generics) = if let Some(type_) = self.eat_type() {
let (name, alias, generics) = type_?;
let kind = match alias {
AliasKind::Weak(typ) => ast::ImplItemKind::TyAlias(typ),
2019-07-31 23:41:54 +00:00
AliasKind::OpaqueTy(bounds) => ast::ImplItemKind::OpaqueTy(bounds),
2018-07-03 17:38:14 +00:00
};
(name, kind, generics)
} else if self.is_const_item() {
// This parses the grammar:
// ImplItemConst = "const" Ident ":" Ty "=" Expr ";"
2019-05-11 14:41:37 +00:00
self.expect_keyword(kw::Const)?;
let name = self.parse_ident()?;
self.expect(&token::Colon)?;
let typ = self.parse_ty()?;
self.expect(&token::Eq)?;
let expr = self.parse_expr()?;
self.expect(&token::Semi)?;
(name, ast::ImplItemKind::Const(typ, expr), ast::Generics::default())
} else {
let (name, inner_attrs, generics, node) = self.parse_impl_method(&vis, at_end)?;
attrs.extend(inner_attrs);
(name, node, generics)
};
Ok(ImplItem {
id: ast::DUMMY_NODE_ID,
span: lo.to(self.prev_span),
ident: name,
vis,
defaultness,
attrs,
generics,
node,
tokens: None,
})
}
fn complain_if_pub_macro(&self, vis: &VisibilityKind, sp: Span) {
match *vis {
VisibilityKind::Inherited => {}
_ => {
let mut err = if self.token.is_keyword(sym::macro_rules) {
let mut err = self.diagnostic()
.struct_span_err(sp, "can't qualify macro_rules invocation with `pub`");
err.span_suggestion(
suggestion applicabilities for libsyntax and librustc, run-rustfix tests Consider this a down payment on #50723. To recap, an `Applicability` enum was recently (#50204) added, to convey to Rustfix and other tools whether we think it's OK for them to blindly apply the suggestion, or whether to prompt a human for guidance (because the suggestion might contain placeholders that we can't infer, or because we think it has a sufficiently high probability of being wrong even though it's— presumably—right often enough to be worth emitting in the first place). When a suggestion is marked as `MaybeIncorrect`, we try to use comments to indicate precisely why (although there are a few places where we just say `// speculative` because the present author's subjective judgement balked at the idea that the suggestion has no false positives). The `run-rustfix` directive is opporunistically set on some relevant UI tests (and a couple tests that were in the `test/ui/suggestions` directory, even if the suggestions didn't originate in librustc or libsyntax). This is less trivial than it sounds, because a surprising number of test files aren't equipped to be tested as fixed even when they contain successfully fixable errors, because, e.g., there are more, not-directly-related errors after fixing. Some test files need an attribute or underscore to avoid unused warnings tripping up the "fixed code is still producing diagnostics" check despite the fixes being correct; this is an interesting contrast-to/inconsistency-with the behavior of UI tests (which secretly pass `-A unused`), a behavior which we probably ought to resolve one way or the other (filed issue #50926). A few suggestion labels are reworded (e.g., to avoid phrasing it as a question, which which is discouraged by the style guidelines listed in `.span_suggestion`'s doc-comment).
2018-05-19 21:52:24 +00:00
sp,
"try exporting the macro",
"#[macro_export]".to_owned(),
Applicability::MaybeIncorrect // speculative
);
err
} else {
let mut err = self.diagnostic()
.struct_span_err(sp, "can't qualify macro invocation with `pub`");
err.help("try adjusting the macro to put `pub` inside the invocation");
err
};
err.emit();
}
}
}
fn missing_assoc_item_kind_err(&self, item_type: &str, prev_span: Span)
-> DiagnosticBuilder<'a>
{
let expected_kinds = if item_type == "extern" {
"missing `fn`, `type`, or `static`"
} else {
"missing `fn`, `type`, or `const`"
};
// Given this code `path(`, it seems like this is not
// setting the visibility of a macro invocation, but rather
// a mistyped method declaration.
// Create a diagnostic pointing out that `fn` is missing.
//
// x | pub path(&self) {
// | ^ missing `fn`, `type`, or `const`
// pub path(
// ^^ `sp` below will point to this
let sp = prev_span.between(self.prev_span);
let mut err = self.diagnostic().struct_span_err(
sp,
&format!("{} for {}-item declaration",
expected_kinds, item_type));
err.span_label(sp, expected_kinds);
err
}
/// Parse a method or a macro invocation in a trait impl.
fn parse_impl_method(&mut self, vis: &Visibility, at_end: &mut bool)
-> PResult<'a, (Ident, Vec<Attribute>, ast::Generics,
2017-09-22 02:22:33 +00:00
ast::ImplItemKind)> {
2014-07-07 22:15:31 +00:00
// code copied from parse_macro_use_or_failure... abstraction!
if let Some(mac) = self.parse_assoc_macro_invoc("impl", Some(vis), at_end)? {
// method macro
Ok((Ident::invalid(), vec![], ast::Generics::default(),
2017-09-22 02:22:33 +00:00
ast::ImplItemKind::Macro(mac)))
} else {
let (constness, unsafety, asyncness, abi) = self.parse_fn_front_matter()?;
let ident = self.parse_ident()?;
let mut generics = self.parse_generics()?;
let decl = self.parse_fn_decl_with_self(|p| {
p.parse_arg_general(true, false, |_| true)
})?;
generics.where_clause = self.parse_where_clause()?;
*at_end = true;
let (inner_attrs, body) = self.parse_inner_attrs_and_block()?;
2018-06-19 04:18:10 +00:00
let header = ast::FnHeader { abi, unsafety, constness, asyncness };
Ok((ident, inner_attrs, generics, ast::ImplItemKind::Method(
ast::MethodSig { header, decl },
body
)))
}
}
2019-02-08 13:53:55 +00:00
/// Parses `trait Foo { ... }` or `trait Foo = Bar;`.
fn parse_item_trait(&mut self, is_auto: IsAuto, unsafety: Unsafety) -> PResult<'a, ItemInfo> {
let ident = self.parse_ident()?;
let mut tps = self.parse_generics()?;
2012-08-03 22:24:11 +00:00
// Parse optional colon and supertrait bounds.
let bounds = if self.eat(&token::Colon) {
self.parse_generic_bounds(Some(self.prev_span))?
} else {
Vec::new()
};
2017-10-02 12:27:45 +00:00
if self.eat(&token::Eq) {
// it's a trait alias
let bounds = self.parse_generic_bounds(None)?;
2017-10-02 12:27:45 +00:00
tps.where_clause = self.parse_where_clause()?;
self.expect(&token::Semi)?;
2019-02-12 01:50:33 +00:00
if is_auto == IsAuto::Yes {
let msg = "trait aliases cannot be `auto`";
self.struct_span_err(self.prev_span, msg)
.span_label(self.prev_span, msg)
.emit();
}
2017-10-02 12:27:45 +00:00
if unsafety != Unsafety::Normal {
2019-02-12 01:50:33 +00:00
let msg = "trait aliases cannot be `unsafe`";
2019-01-12 06:04:54 +00:00
self.struct_span_err(self.prev_span, msg)
.span_label(self.prev_span, msg)
.emit();
2017-10-02 12:27:45 +00:00
}
Ok((ident, ItemKind::TraitAlias(tps, bounds), None))
} else {
// it's a normal trait
tps.where_clause = self.parse_where_clause()?;
self.expect(&token::OpenDelim(token::Brace))?;
let mut trait_items = vec![];
while !self.eat(&token::CloseDelim(token::Brace)) {
2019-06-04 22:17:07 +00:00
if let token::DocComment(_) = self.token.kind {
2019-03-09 08:00:42 +00:00
if self.look_ahead(1,
|tok| tok == &token::CloseDelim(token::Brace)) {
2019-07-24 08:51:20 +00:00
self.diagnostic().struct_span_err_with_code(
self.token.span,
2019-03-09 08:00:42 +00:00
"found a documentation comment that doesn't document anything",
DiagnosticId::Error("E0584".into()),
2019-07-24 08:51:20 +00:00
)
.help(
"doc comments must come before what they document, maybe a \
2019-03-09 08:00:42 +00:00
comment was intended with `//`?",
2019-07-24 08:51:20 +00:00
)
.emit();
2019-03-09 08:00:42 +00:00
self.bump();
continue;
}
}
2017-10-02 12:27:45 +00:00
let mut at_end = false;
match self.parse_trait_item(&mut at_end) {
Ok(item) => trait_items.push(item),
Err(mut e) => {
e.emit();
if !at_end {
self.recover_stmt_(SemiColonMode::Break, BlockMode::Break);
}
}
}
}
2017-10-02 12:27:45 +00:00
Ok((ident, ItemKind::Trait(is_auto, unsafety, tps, bounds, trait_items), None))
}
}
fn choose_generics_over_qpath(&self) -> bool {
// There's an ambiguity between generic parameters and qualified paths in impls.
// If we see `<` it may start both, so we have to inspect some following tokens.
// The following combinations can only start generics,
// but not qualified paths (with one exception):
// `<` `>` - empty generic parameters
// `<` `#` - generic parameters with attributes
// `<` (LIFETIME|IDENT) `>` - single generic parameter
// `<` (LIFETIME|IDENT) `,` - first generic parameter in a list
// `<` (LIFETIME|IDENT) `:` - generic parameter with bounds
// `<` (LIFETIME|IDENT) `=` - generic parameter with a default
2019-02-07 09:10:11 +00:00
// `<` const - generic const parameter
// The only truly ambiguous case is
// `<` IDENT `>` `::` IDENT ...
// we disambiguate it in favor of generics (`impl<T> ::absolute::Path<T> { ... }`)
// because this is what almost always expected in practice, qualified paths in impls
// (`impl <Type>::AssocTy { ... }`) aren't even allowed by type checker at the moment.
self.token == token::Lt &&
(self.look_ahead(1, |t| t == &token::Pound || t == &token::Gt) ||
self.look_ahead(1, |t| t.is_lifetime() || t.is_ident()) &&
self.look_ahead(2, |t| t == &token::Gt || t == &token::Comma ||
t == &token::Colon || t == &token::Eq) ||
self.is_keyword_ahead(1, &[kw::Const]))
}
fn parse_impl_body(&mut self) -> PResult<'a, (Vec<ImplItem>, Vec<Attribute>)> {
self.expect(&token::OpenDelim(token::Brace))?;
let attrs = self.parse_inner_attributes()?;
let mut impl_items = Vec::new();
while !self.eat(&token::CloseDelim(token::Brace)) {
let mut at_end = false;
match self.parse_impl_item(&mut at_end) {
Ok(impl_item) => impl_items.push(impl_item),
Err(mut err) => {
err.emit();
if !at_end {
self.recover_stmt_(SemiColonMode::Break, BlockMode::Break);
}
}
}
}
Ok((impl_items, attrs))
}
/// Parses an implementation item, `impl` keyword is already parsed.
2019-02-08 13:53:55 +00:00
///
/// impl<'a, T> TYPE { /* impl items */ }
/// impl<'a, T> TRAIT for TYPE { /* impl items */ }
/// impl<'a, T> !TRAIT for TYPE { /* impl items */ }
2019-02-08 13:53:55 +00:00
///
/// We actually parse slightly more relaxed grammar for better error reporting and recovery.
/// `impl` GENERICS `!`? TYPE `for`? (TYPE | `..`) (`where` PREDICATES)? `{` BODY `}`
/// `impl` GENERICS `!`? TYPE (`where` PREDICATES)? `{` BODY `}`
fn parse_item_impl(&mut self, unsafety: Unsafety, defaultness: Defaultness)
-> PResult<'a, ItemInfo> {
// First, parse generic parameters if necessary.
let mut generics = if self.choose_generics_over_qpath() {
self.parse_generics()?
} else {
ast::Generics::default()
};
// Disambiguate `impl !Trait for Type { ... }` and `impl ! { ... }` for the never type.
let polarity = if self.check(&token::Not) && self.look_ahead(1, |t| t.can_begin_type()) {
self.bump(); // `!`
ast::ImplPolarity::Negative
} else {
ast::ImplPolarity::Positive
};
// Parse both types and traits as a type, then reinterpret if necessary.
2019-05-11 14:41:37 +00:00
let err_path = |span| ast::Path::from_ident(Ident::new(kw::Invalid, span));
let ty_first = if self.token.is_keyword(kw::For) &&
self.look_ahead(1, |t| t != &token::Lt) {
let span = self.prev_span.between(self.token.span);
self.struct_span_err(span, "missing trait in a trait impl").emit();
P(Ty { node: TyKind::Path(None, err_path(span)), span, id: ast::DUMMY_NODE_ID })
} else {
self.parse_ty()?
};
// If `for` is missing we try to recover.
2019-05-11 14:41:37 +00:00
let has_for = self.eat_keyword(kw::For);
let missing_for_span = self.prev_span.between(self.token.span);
let ty_second = if self.token == token::DotDot {
// We need to report this error after `cfg` expansion for compatibility reasons
self.bump(); // `..`, do not add it to expected tokens
Some(DummyResult::raw_ty(self.prev_span, true))
} else if has_for || self.token.can_begin_type() {
Some(self.parse_ty()?)
} else {
None
};
generics.where_clause = self.parse_where_clause()?;
let (impl_items, attrs) = self.parse_impl_body()?;
let item_kind = match ty_second {
Some(ty_second) => {
// impl Trait for Type
if !has_for {
self.struct_span_err(missing_for_span, "missing `for` in a trait impl")
.span_suggestion_short(
missing_for_span,
"add `for` here",
" for ".to_string(),
Applicability::MachineApplicable,
).emit();
}
let ty_first = ty_first.into_inner();
let path = match ty_first.node {
// This notably includes paths passed through `ty` macro fragments (#46438).
TyKind::Path(None, path) => path,
_ => {
self.span_err(ty_first.span, "expected a trait, found type");
err_path(ty_first.span)
}
};
let trait_ref = TraitRef { path, ref_id: ty_first.id };
ItemKind::Impl(unsafety, polarity, defaultness,
generics, Some(trait_ref), ty_second, impl_items)
}
None => {
// impl Type
ItemKind::Impl(unsafety, polarity, defaultness,
generics, None, ty_first, impl_items)
}
};
Ok((Ident::invalid(), item_kind, Some(attrs)))
}
2018-05-27 19:07:09 +00:00
fn parse_late_bound_lifetime_defs(&mut self) -> PResult<'a, Vec<GenericParam>> {
2019-05-11 14:41:37 +00:00
if self.eat_keyword(kw::For) {
self.expect_lt()?;
let params = self.parse_generic_params()?;
self.expect_gt()?;
// We rely on AST validation to rule out invalid cases: There must not be type
// parameters, and the lifetime parameters must not have bounds.
Ok(params)
2014-11-07 11:53:45 +00:00
} else {
Ok(Vec::new())
2014-11-07 11:53:45 +00:00
}
}
2019-02-08 13:53:55 +00:00
/// Parses `struct Foo { ... }`.
2015-12-20 21:00:43 +00:00
fn parse_item_struct(&mut self) -> PResult<'a, ItemInfo> {
let class_name = self.parse_ident()?;
let mut generics = self.parse_generics()?;
// There is a special case worth noting here, as reported in issue #17904.
// If we are parsing a tuple struct it is the case that the where clause
// should follow the field list. Like so:
//
// struct Foo<T>(T) where T: Copy;
//
// If we are parsing a normal record-style struct it is the case
// that the where clause comes before the body, and after the generics.
// So if we look ahead and see a brace or a where-clause we begin
// parsing a record style struct.
//
// Otherwise if we look ahead and see a paren we parse a tuple-style
// struct.
2019-05-11 14:41:37 +00:00
let vdata = if self.token.is_keyword(kw::Where) {
generics.where_clause = self.parse_where_clause()?;
if self.eat(&token::Semi) {
2015-01-04 10:35:14 +00:00
// If we see a: `struct Foo<T> where T: Copy;` style decl.
2015-10-10 00:28:40 +00:00
VariantData::Unit(ast::DUMMY_NODE_ID)
2015-01-04 10:35:14 +00:00
} else {
// If we see: `struct Foo<T> where T: Copy { ... }`
let (fields, recovered) = self.parse_record_struct_body()?;
VariantData::Struct(fields, recovered)
2015-01-04 10:35:14 +00:00
}
// No `where` so: `struct Foo<T>;`
} else if self.eat(&token::Semi) {
2015-10-10 00:28:40 +00:00
VariantData::Unit(ast::DUMMY_NODE_ID)
2015-01-04 10:35:14 +00:00
// Record-style struct definition
} else if self.token == token::OpenDelim(token::Brace) {
let (fields, recovered) = self.parse_record_struct_body()?;
VariantData::Struct(fields, recovered)
2015-01-04 10:35:14 +00:00
// Tuple-style struct definition with optional where-clause.
2015-09-07 19:15:36 +00:00
} else if self.token == token::OpenDelim(token::Paren) {
let body = VariantData::Tuple(self.parse_tuple_struct_body()?, ast::DUMMY_NODE_ID);
generics.where_clause = self.parse_where_clause()?;
self.expect(&token::Semi)?;
body
2015-09-07 19:15:36 +00:00
} else {
let token_str = self.this_token_descr();
let mut err = self.fatal(&format!(
"expected `where`, `{{`, `(`, or `;` after struct name, found {}",
token_str
));
err.span_label(self.token.span, "expected `where`, `{`, `(`, or `;` after struct name");
return Err(err);
2015-01-04 10:35:14 +00:00
};
Ok((class_name, ItemKind::Struct(vdata, generics), None))
2015-01-04 10:35:14 +00:00
}
2019-02-08 13:53:55 +00:00
/// Parses `union Foo { ... }`.
fn parse_item_union(&mut self) -> PResult<'a, ItemInfo> {
let class_name = self.parse_ident()?;
let mut generics = self.parse_generics()?;
2019-05-11 14:41:37 +00:00
let vdata = if self.token.is_keyword(kw::Where) {
generics.where_clause = self.parse_where_clause()?;
let (fields, recovered) = self.parse_record_struct_body()?;
VariantData::Struct(fields, recovered)
} else if self.token == token::OpenDelim(token::Brace) {
let (fields, recovered) = self.parse_record_struct_body()?;
VariantData::Struct(fields, recovered)
} else {
let token_str = self.this_token_descr();
let mut err = self.fatal(&format!(
"expected `where` or `{{` after union name, found {}", token_str));
err.span_label(self.token.span, "expected `where` or `{` after union name");
return Err(err);
};
Ok((class_name, ItemKind::Union(vdata, generics), None))
}
2019-03-19 20:17:25 +00:00
fn parse_record_struct_body(
&mut self,
) -> PResult<'a, (Vec<StructField>, /* recovered */ bool)> {
2015-01-04 10:35:14 +00:00
let mut fields = Vec::new();
let mut recovered = false;
if self.eat(&token::OpenDelim(token::Brace)) {
while self.token != token::CloseDelim(token::Brace) {
let field = self.parse_struct_decl_field().map_err(|e| {
self.recover_stmt();
recovered = true;
e
});
match field {
Ok(field) => fields.push(field),
Err(mut err) => {
err.emit();
}
}
}
self.eat(&token::CloseDelim(token::Brace));
2015-01-04 10:35:14 +00:00
} else {
let token_str = self.this_token_descr();
let mut err = self.fatal(&format!(
"expected `where`, or `{{` after struct name, found {}", token_str));
err.span_label(self.token.span, "expected `where`, or `{` after struct name");
return Err(err);
2015-01-04 10:35:14 +00:00
}
Ok((fields, recovered))
2015-01-04 10:35:14 +00:00
}
fn parse_tuple_struct_body(&mut self) -> PResult<'a, Vec<StructField>> {
2015-01-04 10:35:14 +00:00
// This is the case where we find `struct Foo<T>(T) where T: Copy;`
// Unit like structs are handled in parse_item_struct function
self.parse_paren_comma_seq(|p| {
let attrs = p.parse_outer_attributes()?;
let lo = p.token.span;
let vis = p.parse_visibility(true)?;
let ty = p.parse_ty()?;
Ok(StructField {
span: lo.to(ty.span),
vis,
ident: None,
id: ast::DUMMY_NODE_ID,
ty,
attrs,
})
}).map(|(r, _)| r)
}
2019-02-08 13:53:55 +00:00
/// Parses a structure field declaration.
fn parse_single_struct_field(&mut self,
lo: Span,
vis: Visibility,
attrs: Vec<Attribute> )
2015-12-20 21:00:43 +00:00
-> PResult<'a, StructField> {
let mut seen_comma: bool = false;
2016-08-08 12:35:15 +00:00
let a_var = self.parse_name_and_ty(lo, vis, attrs)?;
if self.token == token::Comma {
seen_comma = true;
}
2019-06-04 22:17:07 +00:00
match self.token.kind {
2014-10-27 08:22:52 +00:00
token::Comma => {
self.bump();
2013-02-25 03:27:43 +00:00
}
token::CloseDelim(token::Brace) => {}
token::DocComment(_) => {
let previous_span = self.prev_span;
let mut err = self.span_fatal_err(self.token.span, Error::UselessDocComment);
self.bump(); // consume the doc comment
let comma_after_doc_seen = self.eat(&token::Comma);
// `seen_comma` is always false, because we are inside doc block
// condition is here to make code more readable
if seen_comma == false && comma_after_doc_seen == true {
seen_comma = true;
}
if comma_after_doc_seen || self.token == token::CloseDelim(token::Brace) {
err.emit();
} else {
if seen_comma == false {
2018-08-18 10:14:09 +00:00
let sp = self.sess.source_map().next_point(previous_span);
err.span_suggestion(
suggestion applicabilities for libsyntax and librustc, run-rustfix tests Consider this a down payment on #50723. To recap, an `Applicability` enum was recently (#50204) added, to convey to Rustfix and other tools whether we think it's OK for them to blindly apply the suggestion, or whether to prompt a human for guidance (because the suggestion might contain placeholders that we can't infer, or because we think it has a sufficiently high probability of being wrong even though it's— presumably—right often enough to be worth emitting in the first place). When a suggestion is marked as `MaybeIncorrect`, we try to use comments to indicate precisely why (although there are a few places where we just say `// speculative` because the present author's subjective judgement balked at the idea that the suggestion has no false positives). The `run-rustfix` directive is opporunistically set on some relevant UI tests (and a couple tests that were in the `test/ui/suggestions` directory, even if the suggestions didn't originate in librustc or libsyntax). This is less trivial than it sounds, because a surprising number of test files aren't equipped to be tested as fixed even when they contain successfully fixable errors, because, e.g., there are more, not-directly-related errors after fixing. Some test files need an attribute or underscore to avoid unused warnings tripping up the "fixed code is still producing diagnostics" check despite the fixes being correct; this is an interesting contrast-to/inconsistency-with the behavior of UI tests (which secretly pass `-A unused`), a behavior which we probably ought to resolve one way or the other (filed issue #50926). A few suggestion labels are reworded (e.g., to avoid phrasing it as a question, which which is discouraged by the style guidelines listed in `.span_suggestion`'s doc-comment).
2018-05-19 21:52:24 +00:00
sp,
"missing comma here",
",".into(),
Applicability::MachineApplicable
);
}
return Err(err);
}
}
_ => {
2018-08-18 10:14:09 +00:00
let sp = self.sess.source_map().next_point(self.prev_span);
let mut err = self.struct_span_err(sp, &format!("expected `,`, or `}}`, found {}",
self.this_token_descr()));
if self.token.is_ident() {
// This is likely another field; emit the diagnostic and keep going
err.span_suggestion(
sp,
"try adding a comma",
",".into(),
Applicability::MachineApplicable,
);
err.emit();
} else {
return Err(err)
}
}
}
Ok(a_var)
}
2019-02-08 13:53:55 +00:00
/// Parses an element of a struct declaration.
fn parse_struct_decl_field(&mut self) -> PResult<'a, StructField> {
let attrs = self.parse_outer_attributes()?;
let lo = self.token.span;
let vis = self.parse_visibility(false)?;
2016-08-08 12:35:15 +00:00
self.parse_single_struct_field(lo, vis, attrs)
}
2019-02-08 13:53:55 +00:00
/// Parses `pub`, `pub(crate)` and `pub(in path)` plus shortcuts `crate` for `pub(crate)`,
2018-12-02 09:33:12 +00:00
/// `pub(self)` for `pub(in self)` and `pub(super)` for `pub(in super)`.
2019-02-08 13:53:55 +00:00
/// If the following element can't be a tuple (i.e., it's a function definition), then
/// it's not a tuple struct field), and the contents within the parentheses isn't valid,
/// so emit a proper diagnostic.
pub fn parse_visibility(&mut self, can_take_tuple: bool) -> PResult<'a, Visibility> {
2017-04-02 04:46:51 +00:00
maybe_whole!(self, NtVis, |x| x);
2019-05-11 14:41:37 +00:00
self.expected_tokens.push(TokenType::Keyword(kw::Crate));
2017-11-04 20:56:45 +00:00
if self.is_crate_vis() {
self.bump(); // `crate`
2018-01-29 05:12:09 +00:00
return Ok(respan(self.prev_span, VisibilityKind::Crate(CrateSugar::JustCrate)));
}
2019-05-11 14:41:37 +00:00
if !self.eat_keyword(kw::Pub) {
// We need a span for our `Spanned<VisibilityKind>`, but there's inherently no
// keyword to grab a span from for inherited visibility; an empty span at the
// beginning of the current token would seem to be the "Schelling span".
return Ok(respan(self.token.span.shrink_to_lo(), VisibilityKind::Inherited))
2017-03-07 23:50:13 +00:00
}
2018-01-29 05:12:09 +00:00
let lo = self.prev_span;
2017-03-07 23:50:13 +00:00
if self.check(&token::OpenDelim(token::Paren)) {
// We don't `self.bump()` the `(` yet because this might be a struct definition where
// `()` or a tuple might be allowed. For example, `struct Struct(pub (), pub (usize));`.
// Because of this, we only `bump` the `(` if we're assured it is appropriate to do so
// by the following tokens.
if self.is_keyword_ahead(1, &[kw::Crate]) &&
self.look_ahead(2, |t| t != &token::ModSep) // account for `pub(crate::foo)`
{
2017-03-07 23:50:13 +00:00
// `pub(crate)`
self.bump(); // `(`
self.bump(); // `crate`
self.expect(&token::CloseDelim(token::Paren))?; // `)`
2018-01-29 05:12:09 +00:00
let vis = respan(
lo.to(self.prev_span),
VisibilityKind::Crate(CrateSugar::PubCrate),
);
2017-03-07 23:50:13 +00:00
return Ok(vis)
} else if self.is_keyword_ahead(1, &[kw::In]) {
2017-03-07 23:50:13 +00:00
// `pub(in path)`
self.bump(); // `(`
self.bump(); // `in`
let path = self.parse_path(PathStyle::Mod)?; // `path`
2017-03-07 23:50:13 +00:00
self.expect(&token::CloseDelim(token::Paren))?; // `)`
2018-01-29 05:12:09 +00:00
let vis = respan(lo.to(self.prev_span), VisibilityKind::Restricted {
path: P(path),
id: ast::DUMMY_NODE_ID,
2018-01-29 05:12:09 +00:00
});
2017-03-07 23:50:13 +00:00
return Ok(vis)
} else if self.look_ahead(2, |t| t == &token::CloseDelim(token::Paren)) &&
self.is_keyword_ahead(1, &[kw::Super, kw::SelfLower])
{
2017-03-07 23:50:13 +00:00
// `pub(self)` or `pub(super)`
self.bump(); // `(`
let path = self.parse_path(PathStyle::Mod)?; // `super`/`self`
2017-03-07 23:50:13 +00:00
self.expect(&token::CloseDelim(token::Paren))?; // `)`
2018-01-29 05:12:09 +00:00
let vis = respan(lo.to(self.prev_span), VisibilityKind::Restricted {
path: P(path),
id: ast::DUMMY_NODE_ID,
2018-01-29 05:12:09 +00:00
});
2017-03-07 23:50:13 +00:00
return Ok(vis)
} else if !can_take_tuple { // Provide this diagnostic if this is not a tuple struct
// `pub(something) fn ...` or `struct X { pub(something) y: Z }`
self.bump(); // `(`
let msg = "incorrect visibility restriction";
let suggestion = r##"some possible visibility restrictions are:
`pub(crate)`: visible only on the current crate
`pub(super)`: visible only in the current module's parent
`pub(in path::to::module)`: visible only on the specified path"##;
let path = self.parse_path(PathStyle::Mod)?;
let sp = path.span;
2017-05-16 13:12:24 +00:00
let help_msg = format!("make this visible only to module `{}` with `in`", path);
self.expect(&token::CloseDelim(token::Paren))?; // `)`
2019-07-24 08:51:20 +00:00
struct_span_err!(self.sess.span_diagnostic, sp, E0704, "{}", msg)
.help(suggestion)
.span_suggestion(
sp,
&help_msg,
format!("in {}", path),
Applicability::MachineApplicable,
)
.emit(); // emit diagnostic, but continue with public visibility
}
2016-04-11 00:39:35 +00:00
}
2017-03-07 23:50:13 +00:00
2018-01-29 05:12:09 +00:00
Ok(respan(lo, VisibilityKind::Public))
}
2019-02-08 13:53:55 +00:00
/// Parses defaultness (i.e., `default` or nothing).
fn parse_defaultness(&mut self) -> Defaultness {
// `pub` is included for better error messages
2019-05-11 14:41:37 +00:00
if self.check_keyword(kw::Default) &&
self.is_keyword_ahead(1, &[
kw::Impl,
kw::Const,
kw::Fn,
kw::Unsafe,
kw::Extern,
kw::Type,
kw::Pub,
])
{
self.bump(); // `default`
Defaultness::Default
} else {
Defaultness::Final
}
}
2019-02-08 13:53:55 +00:00
/// Given a termination token, parses all of the items in a module.
2019-06-05 11:17:56 +00:00
fn parse_mod_items(&mut self, term: &TokenKind, inner_lo: Span) -> PResult<'a, Mod> {
let mut items = vec![];
while let Some(item) = self.parse_item()? {
items.push(item);
2019-01-14 01:29:32 +00:00
self.maybe_consume_incorrect_semicolon(&items);
}
if !self.eat(term) {
let token_str = self.this_token_descr();
2019-01-14 01:29:32 +00:00
if !self.maybe_consume_incorrect_semicolon(&items) {
let mut err = self.fatal(&format!("expected item, found {}", token_str));
err.span_label(self.token.span, "expected item");
2019-01-14 01:29:32 +00:00
return Err(err);
}
}
2011-02-04 16:10:04 +00:00
let hi = if self.token.span.is_dummy() {
inner_lo
} else {
self.prev_span
};
Ok(ast::Mod {
inner: inner_lo.to(hi),
items,
inline: true
})
}
2015-12-20 21:00:43 +00:00
fn parse_item_const(&mut self, m: Option<Mutability>) -> PResult<'a, ItemInfo> {
let id = if m.is_none() { self.parse_ident_or_underscore() } else { self.parse_ident() }?;
self.expect(&token::Colon)?;
let ty = self.parse_ty()?;
self.expect(&token::Eq)?;
let e = self.parse_expr()?;
self.expect(&token::Semi)?;
rustc: Add `const` globals to the language This change is an implementation of [RFC 69][rfc] which adds a third kind of global to the language, `const`. This global is most similar to what the old `static` was, and if you're unsure about what to use then you should use a `const`. The semantics of these three kinds of globals are: * A `const` does not represent a memory location, but only a value. Constants are translated as rvalues, which means that their values are directly inlined at usage location (similar to a #define in C/C++). Constant values are, well, constant, and can not be modified. Any "modification" is actually a modification to a local value on the stack rather than the actual constant itself. Almost all values are allowed inside constants, whether they have interior mutability or not. There are a few minor restrictions listed in the RFC, but they should in general not come up too often. * A `static` now always represents a memory location (unconditionally). Any references to the same `static` are actually a reference to the same memory location. Only values whose types ascribe to `Sync` are allowed in a `static`. This restriction is in place because many threads may access a `static` concurrently. Lifting this restriction (and allowing unsafe access) is a future extension not implemented at this time. * A `static mut` continues to always represent a memory location. All references to a `static mut` continue to be `unsafe`. This is a large breaking change, and many programs will need to be updated accordingly. A summary of the breaking changes is: * Statics may no longer be used in patterns. Statics now always represent a memory location, which can sometimes be modified. To fix code, repurpose the matched-on-`static` to a `const`. static FOO: uint = 4; match n { FOO => { /* ... */ } _ => { /* ... */ } } change this code to: const FOO: uint = 4; match n { FOO => { /* ... */ } _ => { /* ... */ } } * Statics may no longer refer to other statics by value. Due to statics being able to change at runtime, allowing them to reference one another could possibly lead to confusing semantics. If you are in this situation, use a constant initializer instead. Note, however, that statics may reference other statics by address, however. * Statics may no longer be used in constant expressions, such as array lengths. This is due to the same restrictions as listed above. Use a `const` instead. [breaking-change] [rfc]: https://github.com/rust-lang/rfcs/pull/246
2014-10-06 15:17:01 +00:00
let item = match m {
Some(m) => ItemKind::Static(ty, m, e),
None => ItemKind::Const(ty, e),
rustc: Add `const` globals to the language This change is an implementation of [RFC 69][rfc] which adds a third kind of global to the language, `const`. This global is most similar to what the old `static` was, and if you're unsure about what to use then you should use a `const`. The semantics of these three kinds of globals are: * A `const` does not represent a memory location, but only a value. Constants are translated as rvalues, which means that their values are directly inlined at usage location (similar to a #define in C/C++). Constant values are, well, constant, and can not be modified. Any "modification" is actually a modification to a local value on the stack rather than the actual constant itself. Almost all values are allowed inside constants, whether they have interior mutability or not. There are a few minor restrictions listed in the RFC, but they should in general not come up too often. * A `static` now always represents a memory location (unconditionally). Any references to the same `static` are actually a reference to the same memory location. Only values whose types ascribe to `Sync` are allowed in a `static`. This restriction is in place because many threads may access a `static` concurrently. Lifting this restriction (and allowing unsafe access) is a future extension not implemented at this time. * A `static mut` continues to always represent a memory location. All references to a `static mut` continue to be `unsafe`. This is a large breaking change, and many programs will need to be updated accordingly. A summary of the breaking changes is: * Statics may no longer be used in patterns. Statics now always represent a memory location, which can sometimes be modified. To fix code, repurpose the matched-on-`static` to a `const`. static FOO: uint = 4; match n { FOO => { /* ... */ } _ => { /* ... */ } } change this code to: const FOO: uint = 4; match n { FOO => { /* ... */ } _ => { /* ... */ } } * Statics may no longer refer to other statics by value. Due to statics being able to change at runtime, allowing them to reference one another could possibly lead to confusing semantics. If you are in this situation, use a constant initializer instead. Note, however, that statics may reference other statics by address, however. * Statics may no longer be used in constant expressions, such as array lengths. This is due to the same restrictions as listed above. Use a `const` instead. [breaking-change] [rfc]: https://github.com/rust-lang/rfcs/pull/246
2014-10-06 15:17:01 +00:00
};
Ok((id, item, None))
}
2014-06-09 20:12:30 +00:00
/// Parse a `mod <foo> { ... }` or `mod <foo>;` item
2015-12-20 21:00:43 +00:00
fn parse_item_mod(&mut self, outer_attrs: &[Attribute]) -> PResult<'a, ItemInfo> {
let (in_cfg, outer_attrs) = {
2019-02-06 17:33:01 +00:00
let mut strip_unconfigured = crate::config::StripUnconfigured {
sess: self.sess,
features: None, // don't perform gated feature checking
};
Overhaul `syntax::fold::Folder`. This commit changes `syntax::fold::Folder` from a functional style (where most methods take a `T` and produce a new `T`) to a more imperative style (where most methods take and modify a `&mut T`), and renames it `syntax::mut_visit::MutVisitor`. The first benefit is speed. The functional style does not require any reallocations, due to the use of `P::map` and `MoveMap::move_{,flat_}map`. However, every field in the AST must be overwritten; even those fields that are unchanged are overwritten with the same value. This causes a lot of unnecessary memory writes. The imperative style reduces instruction counts by 1--3% across a wide range of workloads, particularly incremental workloads. The second benefit is conciseness; the imperative style is usually more concise. E.g. compare the old functional style: ``` fn fold_abc(&mut self, abc: ABC) { ABC { a: fold_a(abc.a), b: fold_b(abc.b), c: abc.c, } } ``` with the imperative style: ``` fn visit_abc(&mut self, ABC { a, b, c: _ }: &mut ABC) { visit_a(a); visit_b(b); } ``` (The reductions get larger in more complex examples.) Overall, the patch removes over 200 lines of code -- even though the new code has more comments -- and a lot of the remaining lines have fewer characters. Some notes: - The old style used methods called `fold_*`. The new style mostly uses methods called `visit_*`, but there are a few methods that map a `T` to something other than a `T`, which are called `flat_map_*` (`T` maps to multiple `T`s) or `filter_map_*` (`T` maps to 0 or 1 `T`s). - `move_map.rs`/`MoveMap`/`move_map`/`move_flat_map` are renamed `map_in_place.rs`/`MapInPlace`/`map_in_place`/`flat_map_in_place` to reflect their slightly changed signatures. - Although this commit renames the `fold` module as `mut_visit`, it keeps it in the `fold.rs` file, so as not to confuse git. The next commit will rename the file.
2019-02-05 04:20:55 +00:00
let mut outer_attrs = outer_attrs.to_owned();
strip_unconfigured.process_cfg_attrs(&mut outer_attrs);
(!self.cfg_mods || strip_unconfigured.in_cfg(&outer_attrs), outer_attrs)
};
let id_span = self.token.span;
let id = self.parse_ident()?;
2018-09-02 06:13:29 +00:00
if self.eat(&token::Semi) {
if in_cfg && self.recurse_into_file_modules {
// This mod is in an external file. Let's go get it!
2016-11-14 09:31:03 +00:00
let ModulePathSuccess { path, directory_ownership, warn } =
self.submod_path(id, &outer_attrs, id_span)?;
2016-11-14 09:31:03 +00:00
let (module, mut attrs) =
self.eval_src_mod(path, directory_ownership, id.to_string(), id_span)?;
// Record that we fetched the mod from an external file
2016-11-14 09:31:03 +00:00
if warn {
let attr = attr::mk_attr_outer(
attr::mk_word_item(Ident::with_empty_ctxt(sym::warn_directory_ownership)));
2016-11-14 09:31:03 +00:00
attr::mark_known(&attr);
attrs.push(attr);
}
Ok((id, ItemKind::Mod(module), Some(attrs)))
} else {
let placeholder = ast::Mod {
inner: DUMMY_SP,
items: Vec::new(),
inline: false
};
Ok((id, ItemKind::Mod(placeholder), None))
}
2012-11-10 00:31:44 +00:00
} else {
let old_directory = self.directory.clone();
self.push_directory(id, &outer_attrs);
self.expect(&token::OpenDelim(token::Brace))?;
let mod_inner_lo = self.token.span;
let attrs = self.parse_inner_attributes()?;
let module = self.parse_mod_items(&token::CloseDelim(token::Brace), mod_inner_lo)?;
self.directory = old_directory;
Ok((id, ItemKind::Mod(module), Some(attrs)))
2012-11-10 00:31:44 +00:00
}
}
fn push_directory(&mut self, id: Ident, attrs: &[Attribute]) {
if let Some(path) = attr::first_attr_value_str_by_name(attrs, sym::path) {
self.directory.path.to_mut().push(&path.as_str());
2017-11-28 02:14:24 +00:00
self.directory.ownership = DirectoryOwnership::Owned { relative: None };
} else {
// We have to push on the current module name in the case of relative
// paths in order to ensure that any additional module paths from inline
// `mod x { ... }` come after the relative extension.
//
// For example, a `mod z { ... }` inside `x/y.rs` should set the current
// directory path to `/x/y/z`, not `/x/z` with a relative offset of `y`.
if let DirectoryOwnership::Owned { relative } = &mut self.directory.ownership {
if let Some(ident) = relative.take() { // remove the relative offset
self.directory.path.to_mut().push(ident.as_str());
}
}
2018-05-26 12:12:38 +00:00
self.directory.path.to_mut().push(&id.as_str());
}
}
pub fn submod_path_from_attr(attrs: &[Attribute], dir_path: &Path) -> Option<PathBuf> {
if let Some(s) = attr::first_attr_value_str_by_name(attrs, sym::path) {
let s = s.as_str();
2018-06-06 20:20:47 +00:00
// On windows, the base path might have the form
// `\\?\foo\bar` in which case it does not tolerate
// mixed `/` and `\` separators, so canonicalize
// `/` to `\`.
#[cfg(windows)]
let s = s.replace("/", "\\");
Some(dir_path.join(s))
} else {
None
}
}
2019-02-08 13:53:55 +00:00
/// Returns a path to a module.
2017-11-28 02:14:24 +00:00
pub fn default_submod_path(
id: ast::Ident,
relative: Option<ast::Ident>,
dir_path: &Path,
2018-08-18 10:14:14 +00:00
source_map: &SourceMap) -> ModulePath
2017-11-28 02:14:24 +00:00
{
// If we're in a foo.rs file instead of a mod.rs file,
// we need to look for submodules in
// `./foo/<id>.rs` and `./foo/<id>/mod.rs` rather than
// `./<id>.rs` and `./<id>/mod.rs`.
let relative_prefix_string;
let relative_prefix = if let Some(ident) = relative {
2018-05-26 12:12:38 +00:00
relative_prefix_string = format!("{}{}", ident.as_str(), path::MAIN_SEPARATOR);
2017-11-28 02:14:24 +00:00
&relative_prefix_string
} else {
""
};
let mod_name = id.to_string();
2017-11-28 02:14:24 +00:00
let default_path_str = format!("{}{}.rs", relative_prefix, mod_name);
let secondary_path_str = format!("{}{}{}mod.rs",
relative_prefix, mod_name, path::MAIN_SEPARATOR);
let default_path = dir_path.join(&default_path_str);
let secondary_path = dir_path.join(&secondary_path_str);
2018-08-18 10:14:14 +00:00
let default_exists = source_map.file_exists(&default_path);
let secondary_exists = source_map.file_exists(&secondary_path);
let result = match (default_exists, secondary_exists) {
(true, false) => Ok(ModulePathSuccess {
path: default_path,
2017-11-28 02:14:24 +00:00
directory_ownership: DirectoryOwnership::Owned {
relative: Some(id),
},
2016-11-14 09:31:03 +00:00
warn: false,
}),
(false, true) => Ok(ModulePathSuccess {
path: secondary_path,
2017-11-28 02:14:24 +00:00
directory_ownership: DirectoryOwnership::Owned {
relative: None,
},
2016-11-14 09:31:03 +00:00
warn: false,
}),
2017-02-12 14:18:41 +00:00
(false, false) => Err(Error::FileNotFoundForModule {
mod_name: mod_name.clone(),
default_path: default_path_str,
secondary_path: secondary_path_str,
2018-07-27 09:11:18 +00:00
dir_path: dir_path.display().to_string(),
}),
2017-02-12 14:18:41 +00:00
(true, true) => Err(Error::DuplicatePaths {
mod_name: mod_name.clone(),
default_path: default_path_str,
secondary_path: secondary_path_str,
}),
};
ModulePath {
name: mod_name,
path_exists: default_exists || secondary_exists,
result,
}
}
fn submod_path(&mut self,
id: ast::Ident,
outer_attrs: &[Attribute],
id_sp: Span)
-> PResult<'a, ModulePathSuccess> {
if let Some(path) = Parser::submod_path_from_attr(outer_attrs, &self.directory.path) {
return Ok(ModulePathSuccess {
directory_ownership: match path.file_name().and_then(|s| s.to_str()) {
2018-01-09 18:54:13 +00:00
// All `#[path]` files are treated as though they are a `mod.rs` file.
// This means that `mod foo;` declarations inside `#[path]`-included
// files are siblings,
//
// Note that this will produce weirdness when a file named `foo.rs` is
// `#[path]` included and contains a `mod foo;` declaration.
// If you encounter this, it's your own darn fault :P
Some(_) => DirectoryOwnership::Owned { relative: None },
2016-11-14 09:31:03 +00:00
_ => DirectoryOwnership::UnownedViaMod(true),
},
path,
2016-11-14 09:31:03 +00:00
warn: false,
});
}
2017-11-28 02:14:24 +00:00
let relative = match self.directory.ownership {
2018-10-16 01:43:57 +00:00
DirectoryOwnership::Owned { relative } => relative,
2017-11-28 02:14:24 +00:00
DirectoryOwnership::UnownedViaBlock |
DirectoryOwnership::UnownedViaMod(_) => None,
};
let paths = Parser::default_submod_path(
2018-08-18 10:14:09 +00:00
id, relative, &self.directory.path, self.sess.source_map());
2017-11-28 02:14:24 +00:00
match self.directory.ownership {
DirectoryOwnership::Owned { .. } => {
paths.result.map_err(|err| self.span_fatal_err(id_sp, err))
},
DirectoryOwnership::UnownedViaBlock => {
let msg =
"Cannot declare a non-inline module inside a block \
unless it has a path attribute";
let mut err = self.diagnostic().struct_span_err(id_sp, msg);
if paths.path_exists {
let msg = format!("Maybe `use` the module `{}` instead of redeclaring it",
paths.name);
err.span_note(id_sp, &msg);
2016-11-14 09:31:03 +00:00
}
2017-11-28 02:14:24 +00:00
Err(err)
2016-11-14 09:31:03 +00:00
}
2017-11-28 02:14:24 +00:00
DirectoryOwnership::UnownedViaMod(warn) => {
if warn {
if let Ok(result) = paths.result {
return Ok(ModulePathSuccess { warn: true, ..result });
}
}
let mut err = self.diagnostic().struct_span_err(id_sp,
"cannot declare a new module at this location");
2018-06-24 22:00:21 +00:00
if !id_sp.is_dummy() {
2018-08-18 10:14:09 +00:00
let src_path = self.sess.source_map().span_to_filename(id_sp);
2017-11-28 02:14:24 +00:00
if let FileName::Real(src_path) = src_path {
if let Some(stem) = src_path.file_stem() {
let mut dest_path = src_path.clone();
dest_path.set_file_name(stem);
dest_path.push("mod.rs");
err.span_note(id_sp,
&format!("maybe move this module `{}` to its own \
directory via `{}`", src_path.display(),
dest_path.display()));
2017-11-28 02:14:24 +00:00
}
}
2017-01-03 11:19:13 +00:00
}
2017-11-28 02:14:24 +00:00
if paths.path_exists {
err.span_note(id_sp,
&format!("... or maybe `use` the module `{}` instead \
of possibly redeclaring it",
paths.name));
}
Err(err)
2016-12-09 00:12:38 +00:00
}
}
}
2012-11-19 01:56:50 +00:00
2019-02-08 13:53:55 +00:00
/// Reads a module from a source file.
fn eval_src_mod(
&mut self,
path: PathBuf,
directory_ownership: DirectoryOwnership,
name: String,
id_sp: Span,
) -> PResult<'a, (ast::Mod, Vec<Attribute>)> {
2014-03-20 22:05:37 +00:00
let mut included_mod_stack = self.sess.included_mod_stack.borrow_mut();
if let Some(i) = included_mod_stack.iter().position(|p| *p == path) {
let mut err = String::from("circular modules: ");
let len = included_mod_stack.len();
for p in &included_mod_stack[i.. len] {
err.push_str(&p.to_string_lossy());
err.push_str(" -> ");
}
err.push_str(&path.to_string_lossy());
return Err(self.span_fatal(id_sp, &err[..]));
}
2014-03-20 22:05:37 +00:00
included_mod_stack.push(path.clone());
drop(included_mod_stack);
let mut p0 =
new_sub_parser_from_file(self.sess, &path, directory_ownership, Some(name), id_sp);
p0.cfg_mods = self.cfg_mods;
let mod_inner_lo = p0.token.span;
let mod_attrs = p0.parse_inner_attributes()?;
let mut m0 = p0.parse_mod_items(&token::Eof, mod_inner_lo)?;
m0.inline = false;
2014-03-20 22:05:37 +00:00
self.sess.included_mod_stack.borrow_mut().pop();
Ok((m0, mod_attrs))
}
2019-02-08 13:53:55 +00:00
/// Parses a function declaration from a foreign module.
fn parse_item_foreign_fn(
&mut self,
vis: ast::Visibility,
lo: Span,
attrs: Vec<Attribute>,
extern_sp: Span,
) -> PResult<'a, ForeignItem> {
2019-05-11 14:41:37 +00:00
self.expect_keyword(kw::Fn)?;
let (ident, mut generics) = self.parse_fn_header()?;
let decl = self.parse_fn_decl(true)?;
generics.where_clause = self.parse_where_clause()?;
let hi = self.token.span;
self.parse_semi_or_incorrect_foreign_fn_body(&ident, extern_sp)?;
Ok(ast::ForeignItem {
ident,
attrs,
node: ForeignItemKind::Fn(decl, generics),
2014-09-13 16:06:01 +00:00
id: ast::DUMMY_NODE_ID,
span: lo.to(hi),
vis,
})
}
2019-02-08 13:53:55 +00:00
/// Parses a static item from a foreign module.
/// Assumes that the `static` keyword is already parsed.
fn parse_item_foreign_static(&mut self, vis: ast::Visibility, lo: Span, attrs: Vec<Attribute>)
-> PResult<'a, ForeignItem> {
let mutbl = self.parse_mutability();
let ident = self.parse_ident()?;
self.expect(&token::Colon)?;
let ty = self.parse_ty()?;
let hi = self.token.span;
self.expect(&token::Semi)?;
Ok(ForeignItem {
ident,
attrs,
node: ForeignItemKind::Static(ty, mutbl),
id: ast::DUMMY_NODE_ID,
span: lo.to(hi),
vis,
})
}
2019-02-08 13:53:55 +00:00
/// Parses a type from a foreign module.
2017-09-03 18:53:58 +00:00
fn parse_item_foreign_type(&mut self, vis: ast::Visibility, lo: Span, attrs: Vec<Attribute>)
-> PResult<'a, ForeignItem> {
2019-05-11 14:41:37 +00:00
self.expect_keyword(kw::Type)?;
2017-09-03 18:53:58 +00:00
let ident = self.parse_ident()?;
let hi = self.token.span;
2017-09-03 18:53:58 +00:00
self.expect(&token::Semi)?;
Ok(ast::ForeignItem {
2019-06-25 21:22:45 +00:00
ident,
attrs,
2017-09-03 18:53:58 +00:00
node: ForeignItemKind::Ty,
id: ast::DUMMY_NODE_ID,
span: lo.to(hi),
2019-06-25 21:22:45 +00:00
vis
2017-09-03 18:53:58 +00:00
})
}
2018-07-27 20:11:48 +00:00
fn parse_crate_name_with_dashes(&mut self) -> PResult<'a, ast::Ident> {
let error_msg = "crate name using dashes are not valid in `extern crate` statements";
let suggestion_msg = "if the original crate name uses dashes you need to use underscores \
in the code";
2019-05-11 14:41:37 +00:00
let mut ident = if self.token.is_keyword(kw::SelfLower) {
self.parse_path_segment_ident()
} else {
self.parse_ident()
}?;
let mut idents = vec![];
let mut replacement = vec![];
let mut fixed_crate_name = false;
// Accept `extern crate name-like-this` for better diagnostics
let dash = token::BinOp(token::BinOpToken::Minus);
if self.token == dash { // Do not include `-` as part of the expected tokens list
while self.eat(&dash) {
fixed_crate_name = true;
replacement.push((self.prev_span, "_".to_string()));
idents.push(self.parse_ident()?);
}
}
if fixed_crate_name {
let fixed_name_sp = ident.span.to(idents.last().unwrap().span);
let mut fixed_name = format!("{}", ident.name);
for part in idents {
fixed_name.push_str(&format!("_{}", part.name));
}
ident = Ident::from_str(&fixed_name).with_span_pos(fixed_name_sp);
2019-07-24 08:51:20 +00:00
self.struct_span_err(fixed_name_sp, error_msg)
.span_label(fixed_name_sp, "dash-separated idents are not valid")
.multipart_suggestion(suggestion_msg, replacement, Applicability::MachineApplicable)
.emit();
}
Ok(ident)
}
2019-02-08 13:53:55 +00:00
/// Parses `extern crate` links.
///
/// # Examples
///
2019-02-08 13:53:55 +00:00
/// ```
/// extern crate foo;
/// extern crate bar as foo;
2019-02-08 13:53:55 +00:00
/// ```
fn parse_item_extern_crate(&mut self,
lo: Span,
visibility: Visibility,
attrs: Vec<Attribute>)
-> PResult<'a, P<Item>> {
// Accept `extern crate name-like-this` for better diagnostics
2018-07-27 20:11:48 +00:00
let orig_name = self.parse_crate_name_with_dashes()?;
let (item_name, orig_name) = if let Some(rename) = self.parse_rename()? {
(rename, Some(orig_name.name))
} else {
(orig_name, None)
};
self.expect(&token::Semi)?;
let span = lo.to(self.prev_span);
Ok(self.mk_item(span, item_name, ItemKind::ExternCrate(orig_name), visibility, attrs))
}
2019-02-08 13:53:55 +00:00
/// Parses `extern` for foreign ABIs modules.
///
/// `extern` is expected to have been
2019-02-08 13:53:55 +00:00
/// consumed before calling this method.
///
2019-02-08 13:53:55 +00:00
/// # Examples
///
2019-02-08 13:53:55 +00:00
/// ```ignore (only-for-syntax-highlight)
/// extern "C" {}
/// extern {}
2019-02-08 13:53:55 +00:00
/// ```
fn parse_item_foreign_mod(
&mut self,
lo: Span,
opt_abi: Option<Abi>,
visibility: Visibility,
mut attrs: Vec<Attribute>,
extern_sp: Span,
) -> PResult<'a, P<Item>> {
self.expect(&token::OpenDelim(token::Brace))?;
let abi = opt_abi.unwrap_or(Abi::C);
attrs.extend(self.parse_inner_attributes()?);
let mut foreign_items = vec![];
while !self.eat(&token::CloseDelim(token::Brace)) {
foreign_items.push(self.parse_foreign_item(extern_sp)?);
}
let prev_span = self.prev_span;
let m = ast::ForeignMod {
abi,
items: foreign_items
};
let invalid = Ident::invalid();
Ok(self.mk_item(lo.to(prev_span), invalid, ItemKind::ForeignMod(m), visibility, attrs))
}
/// Parses `type Foo = Bar;` or returns `None`
2019-02-08 13:53:55 +00:00
/// without modifying the parser state.
2018-07-03 17:38:14 +00:00
fn eat_type(&mut self) -> Option<PResult<'a, (Ident, AliasKind, ast::Generics)>> {
// This parses the grammar:
// Ident ["<"...">"] ["where" ...] ("=" | ":") Ty ";"
if self.eat_keyword(kw::Type) {
Some(self.parse_type_alias())
2018-07-03 17:38:14 +00:00
} else {
None
}
}
2019-07-31 23:41:54 +00:00
/// Parses a type alias or opaque type.
fn parse_type_alias(&mut self) -> PResult<'a, (Ident, AliasKind, ast::Generics)> {
let ident = self.parse_ident()?;
let mut tps = self.parse_generics()?;
tps.where_clause = self.parse_where_clause()?;
self.expect(&token::Eq)?;
let alias = if self.check_keyword(kw::Impl) {
self.bump();
let bounds = self.parse_generic_bounds(Some(self.prev_span))?;
2019-07-31 23:41:54 +00:00
AliasKind::OpaqueTy(bounds)
2018-07-03 17:38:14 +00:00
} else {
let ty = self.parse_ty()?;
AliasKind::Weak(ty)
};
self.expect(&token::Semi)?;
2018-07-03 17:38:14 +00:00
Ok((ident, alias, tps))
}
2019-02-08 13:53:55 +00:00
/// Parses the part of an enum declaration following the `{`.
2015-12-20 21:00:43 +00:00
fn parse_enum_def(&mut self, _generics: &ast::Generics) -> PResult<'a, EnumDef> {
let mut variants = Vec::new();
while self.token != token::CloseDelim(token::Brace) {
let variant_attrs = self.parse_outer_attributes()?;
let vlo = self.token.span;
2012-08-09 02:51:19 +00:00
self.eat_bad_pub();
let ident = self.parse_ident()?;
2019-05-09 21:08:55 +00:00
let struct_def = if self.check(&token::OpenDelim(token::Brace)) {
// Parse a struct variant.
let (fields, recovered) = self.parse_record_struct_body()?;
2019-05-09 21:08:55 +00:00
VariantData::Struct(fields, recovered)
} else if self.check(&token::OpenDelim(token::Paren)) {
2019-05-09 21:08:55 +00:00
VariantData::Tuple(
self.parse_tuple_struct_body()?,
ast::DUMMY_NODE_ID,
2019-05-09 21:08:55 +00:00
)
} else {
VariantData::Unit(ast::DUMMY_NODE_ID)
};
let disr_expr = if self.eat(&token::Eq) {
Some(AnonConst {
id: ast::DUMMY_NODE_ID,
value: self.parse_expr()?,
2019-05-09 21:08:55 +00:00
})
} else {
2019-05-09 21:08:55 +00:00
None
};
let vr = ast::Variant_ {
ident,
id: ast::DUMMY_NODE_ID,
attrs: variant_attrs,
data: struct_def,
disr_expr,
};
variants.push(respan(vlo.to(self.prev_span), vr));
if !self.eat(&token::Comma) {
2019-03-10 22:04:43 +00:00
if self.token.is_ident() && !self.token.is_reserved_ident() {
let sp = self.sess.source_map().next_point(self.prev_span);
2019-07-24 08:51:20 +00:00
self.struct_span_err(sp, "missing comma")
.span_suggestion_short(
sp,
"missing comma",
",".to_owned(),
Applicability::MaybeIncorrect,
)
.emit();
} else {
break;
}
}
}
self.expect(&token::CloseDelim(token::Brace))?;
2018-11-06 20:05:44 +00:00
Ok(ast::EnumDef { variants })
}
2019-02-08 13:53:55 +00:00
/// Parses an enum declaration.
2015-12-20 21:00:43 +00:00
fn parse_item_enum(&mut self) -> PResult<'a, ItemInfo> {
let id = self.parse_ident()?;
let mut generics = self.parse_generics()?;
generics.where_clause = self.parse_where_clause()?;
self.expect(&token::OpenDelim(token::Brace))?;
let enum_definition = self.parse_enum_def(&generics).map_err(|e| {
self.recover_stmt();
self.eat(&token::CloseDelim(token::Brace));
e
})?;
Ok((id, ItemKind::Enum(enum_definition, generics), None))
}
2014-06-09 20:12:30 +00:00
/// Parses a string as an ABI spec on an extern type or module. Consumes
/// the `extern` keyword, if one is found.
fn parse_opt_abi(&mut self) -> PResult<'a, Option<Abi>> {
2019-06-04 22:17:07 +00:00
match self.token.kind {
token::Literal(token::Lit { kind: token::Str, symbol, suffix }) |
token::Literal(token::Lit { kind: token::StrRaw(..), symbol, suffix }) => {
let sp = self.token.span;
self.expect_no_suffix(sp, "an ABI spec", suffix);
self.bump();
match abi::lookup(&symbol.as_str()) {
Some(abi) => Ok(Some(abi)),
None => {
let prev_span = self.prev_span;
2019-07-24 08:51:20 +00:00
struct_span_err!(
2018-06-09 22:44:32 +00:00
self.sess.span_diagnostic,
prev_span,
2018-06-19 23:22:37 +00:00
E0703,
2018-06-09 22:44:32 +00:00
"invalid ABI: found `{}`",
2019-07-24 08:51:20 +00:00
symbol
)
.span_label(prev_span, "invalid ABI")
.help(&format!("valid ABIs: {}", abi::all_names().join(", ")))
.emit();
Ok(None)
}
}
}
_ => Ok(None),
}
}
fn is_static_global(&mut self) -> bool {
2019-05-11 14:41:37 +00:00
if self.check_keyword(kw::Static) {
// Check if this could be a closure
!self.look_ahead(1, |token| {
2019-05-11 14:41:37 +00:00
if token.is_keyword(kw::Move) {
return true;
}
2019-06-04 22:17:07 +00:00
match token.kind {
token::BinOp(token::Or) | token::OrOr => true,
_ => false,
}
})
} else {
false
}
}
fn parse_item_(
&mut self,
attrs: Vec<Attribute>,
macros_allowed: bool,
attributes_allowed: bool,
) -> PResult<'a, Option<P<Item>>> {
let mut unclosed_delims = vec![];
let (ret, tokens) = self.collect_tokens(|this| {
2019-03-03 20:14:25 +00:00
let item = this.parse_item_implementation(attrs, macros_allowed, attributes_allowed);
unclosed_delims.append(&mut this.unclosed_delims);
2019-03-03 20:14:25 +00:00
item
})?;
self.unclosed_delims.append(&mut unclosed_delims);
// Once we've parsed an item and recorded the tokens we got while
// parsing we may want to store `tokens` into the item we're about to
// return. Note, though, that we specifically didn't capture tokens
// related to outer attributes. The `tokens` field here may later be
// used with procedural macros to convert this item back into a token
// stream, but during expansion we may be removing attributes as we go
// along.
//
// If we've got inner attributes then the `tokens` we've got above holds
// these inner attributes. If an inner attribute is expanded we won't
// actually remove it from the token stream, so we'll just keep yielding
// it (bad!). To work around this case for now we just avoid recording
// `tokens` if we detect any inner attributes. This should help keep
// expansion correct, but we should fix this bug one day!
Ok(ret.map(|item| {
item.map(|mut i| {
if !i.attrs.iter().any(|attr| attr.style == AttrStyle::Inner) {
i.tokens = Some(tokens);
}
i
})
}))
}
2019-02-08 13:53:55 +00:00
/// Parses one of the items allowed by the flags.
fn parse_item_implementation(
&mut self,
attrs: Vec<Attribute>,
macros_allowed: bool,
attributes_allowed: bool,
) -> PResult<'a, Option<P<Item>>> {
maybe_whole!(self, NtItem, |item| {
let mut item = item.into_inner();
let mut attrs = attrs;
mem::swap(&mut item.attrs, &mut attrs);
item.attrs.extend(attrs);
Some(P(item))
});
let lo = self.token.span;
let visibility = self.parse_visibility(false)?;
2019-05-11 14:41:37 +00:00
if self.eat_keyword(kw::Use) {
// USE ITEM
let item_ = ItemKind::Use(P(self.parse_use_tree()?));
self.expect(&token::Semi)?;
let span = lo.to(self.prev_span);
2019-05-11 14:41:37 +00:00
let item =
self.mk_item(span, Ident::invalid(), item_, visibility, attrs);
return Ok(Some(item));
}
2019-05-11 14:41:37 +00:00
if self.eat_keyword(kw::Extern) {
let extern_sp = self.prev_span;
2019-05-11 14:41:37 +00:00
if self.eat_keyword(kw::Crate) {
return Ok(Some(self.parse_item_extern_crate(lo, visibility, attrs)?));
}
let opt_abi = self.parse_opt_abi()?;
2019-05-11 14:41:37 +00:00
if self.eat_keyword(kw::Fn) {
// EXTERN FUNCTION ITEM
let fn_span = self.prev_span;
let abi = opt_abi.unwrap_or(Abi::C);
let (ident, item_, extra_attrs) =
2016-08-10 23:20:12 +00:00
self.parse_item_fn(Unsafety::Normal,
respan(fn_span, IsAsync::NotAsync),
2016-08-10 23:20:12 +00:00
respan(fn_span, Constness::NotConst),
abi)?;
let prev_span = self.prev_span;
let item = self.mk_item(lo.to(prev_span),
2013-12-30 22:04:00 +00:00
ident,
item_,
visibility,
maybe_append(attrs, extra_attrs));
return Ok(Some(item));
} else if self.check(&token::OpenDelim(token::Brace)) {
return Ok(Some(
self.parse_item_foreign_mod(lo, opt_abi, visibility, attrs, extern_sp)?,
));
}
self.unexpected()?;
}
if self.is_static_global() {
self.bump();
// STATIC ITEM
2019-05-11 14:41:37 +00:00
let m = if self.eat_keyword(kw::Mut) {
Mutability::Mutable
} else {
Mutability::Immutable
};
let (ident, item_, extra_attrs) = self.parse_item_const(Some(m))?;
let prev_span = self.prev_span;
let item = self.mk_item(lo.to(prev_span),
ident,
item_,
visibility,
maybe_append(attrs, extra_attrs));
return Ok(Some(item));
}
2019-05-11 14:41:37 +00:00
if self.eat_keyword(kw::Const) {
let const_span = self.prev_span;
2019-05-11 14:41:37 +00:00
if self.check_keyword(kw::Fn)
|| (self.check_keyword(kw::Unsafe)
&& self.is_keyword_ahead(1, &[kw::Fn])) {
// CONST FUNCTION ITEM
let unsafety = self.parse_unsafety();
self.bump();
let (ident, item_, extra_attrs) =
2016-08-10 23:20:12 +00:00
self.parse_item_fn(unsafety,
respan(const_span, IsAsync::NotAsync),
2016-08-10 23:20:12 +00:00
respan(const_span, Constness::Const),
Abi::Rust)?;
let prev_span = self.prev_span;
let item = self.mk_item(lo.to(prev_span),
ident,
item_,
visibility,
maybe_append(attrs, extra_attrs));
return Ok(Some(item));
}
// CONST ITEM
2019-05-11 14:41:37 +00:00
if self.eat_keyword(kw::Mut) {
let prev_span = self.prev_span;
2019-07-24 08:51:20 +00:00
self.struct_span_err(prev_span, "const globals cannot be mutable")
.span_label(prev_span, "cannot be mutable")
.span_suggestion(
const_span,
"you might want to declare a static instead",
"static".to_owned(),
Applicability::MaybeIncorrect,
)
.emit();
}
let (ident, item_, extra_attrs) = self.parse_item_const(None)?;
let prev_span = self.prev_span;
let item = self.mk_item(lo.to(prev_span),
2013-12-30 22:04:00 +00:00
ident,
item_,
visibility,
maybe_append(attrs, extra_attrs));
return Ok(Some(item));
}
// Parse `async unsafe? fn`.
if self.check_keyword(kw::Async) {
let async_span = self.token.span;
if self.is_keyword_ahead(1, &[kw::Fn])
|| self.is_keyword_ahead(2, &[kw::Fn])
{
// ASYNC FUNCTION ITEM
self.bump(); // `async`
let unsafety = self.parse_unsafety(); // `unsafe`?
self.expect_keyword(kw::Fn)?; // `fn`
let fn_span = self.prev_span;
let (ident, item_, extra_attrs) =
self.parse_item_fn(unsafety,
respan(async_span, IsAsync::Async {
closure_id: ast::DUMMY_NODE_ID,
return_impl_trait_id: ast::DUMMY_NODE_ID,
}),
respan(fn_span, Constness::NotConst),
Abi::Rust)?;
let prev_span = self.prev_span;
let item = self.mk_item(lo.to(prev_span),
ident,
item_,
visibility,
maybe_append(attrs, extra_attrs));
self.ban_async_in_2015(async_span);
return Ok(Some(item));
}
}
2019-05-11 14:41:37 +00:00
if self.check_keyword(kw::Unsafe) &&
self.is_keyword_ahead(1, &[kw::Trait, kw::Auto])
{
// UNSAFE TRAIT ITEM
self.bump(); // `unsafe`
2019-05-11 14:41:37 +00:00
let is_auto = if self.eat_keyword(kw::Trait) {
IsAuto::No
} else {
2019-05-11 14:41:37 +00:00
self.expect_keyword(kw::Auto)?;
self.expect_keyword(kw::Trait)?;
IsAuto::Yes
};
let (ident, item_, extra_attrs) =
self.parse_item_trait(is_auto, Unsafety::Unsafe)?;
let prev_span = self.prev_span;
let item = self.mk_item(lo.to(prev_span),
ident,
item_,
visibility,
maybe_append(attrs, extra_attrs));
return Ok(Some(item));
}
2019-05-11 14:41:37 +00:00
if self.check_keyword(kw::Impl) ||
self.check_keyword(kw::Unsafe) &&
self.is_keyword_ahead(1, &[kw::Impl]) ||
2019-05-11 14:41:37 +00:00
self.check_keyword(kw::Default) &&
self.is_keyword_ahead(1, &[kw::Impl, kw::Unsafe]) {
// IMPL ITEM
let defaultness = self.parse_defaultness();
let unsafety = self.parse_unsafety();
2019-05-11 14:41:37 +00:00
self.expect_keyword(kw::Impl)?;
let (ident, item, extra_attrs) = self.parse_item_impl(unsafety, defaultness)?;
let span = lo.to(self.prev_span);
return Ok(Some(self.mk_item(span, ident, item, visibility,
maybe_append(attrs, extra_attrs))));
}
2019-05-11 14:41:37 +00:00
if self.check_keyword(kw::Fn) {
2013-03-29 17:35:23 +00:00
// FUNCTION ITEM
self.bump();
let fn_span = self.prev_span;
let (ident, item_, extra_attrs) =
2016-08-10 23:20:12 +00:00
self.parse_item_fn(Unsafety::Normal,
respan(fn_span, IsAsync::NotAsync),
2016-08-10 23:20:12 +00:00
respan(fn_span, Constness::NotConst),
Abi::Rust)?;
let prev_span = self.prev_span;
let item = self.mk_item(lo.to(prev_span),
2013-12-30 22:04:00 +00:00
ident,
item_,
visibility,
maybe_append(attrs, extra_attrs));
return Ok(Some(item));
}
2019-05-11 14:41:37 +00:00
if self.check_keyword(kw::Unsafe)
&& self.look_ahead(1, |t| *t != token::OpenDelim(token::Brace)) {
// UNSAFE FUNCTION ITEM
self.bump(); // `unsafe`
// `{` is also expected after `unsafe`, in case of error, include it in the diagnostic
self.check(&token::OpenDelim(token::Brace));
2019-05-11 14:41:37 +00:00
let abi = if self.eat_keyword(kw::Extern) {
self.parse_opt_abi()?.unwrap_or(Abi::C)
} else {
Abi::Rust
};
2019-05-11 14:41:37 +00:00
self.expect_keyword(kw::Fn)?;
let fn_span = self.prev_span;
let (ident, item_, extra_attrs) =
2016-08-10 23:20:12 +00:00
self.parse_item_fn(Unsafety::Unsafe,
respan(fn_span, IsAsync::NotAsync),
2016-08-10 23:20:12 +00:00
respan(fn_span, Constness::NotConst),
abi)?;
let prev_span = self.prev_span;
let item = self.mk_item(lo.to(prev_span),
2013-12-30 22:04:00 +00:00
ident,
item_,
visibility,
maybe_append(attrs, extra_attrs));
return Ok(Some(item));
}
2019-05-11 14:41:37 +00:00
if self.eat_keyword(kw::Mod) {
2013-02-11 21:36:24 +00:00
// MODULE ITEM
let (ident, item_, extra_attrs) =
self.parse_item_mod(&attrs[..])?;
let prev_span = self.prev_span;
let item = self.mk_item(lo.to(prev_span),
2013-12-30 22:04:00 +00:00
ident,
item_,
visibility,
maybe_append(attrs, extra_attrs));
return Ok(Some(item));
}
2018-07-03 17:38:14 +00:00
if let Some(type_) = self.eat_type() {
let (ident, alias, generics) = type_?;
2013-02-11 21:36:24 +00:00
// TYPE ITEM
2018-07-03 17:38:14 +00:00
let item_ = match alias {
AliasKind::Weak(ty) => ItemKind::TyAlias(ty, generics),
2019-07-31 23:41:54 +00:00
AliasKind::OpaqueTy(bounds) => ItemKind::OpaqueTy(bounds, generics),
2018-07-03 17:38:14 +00:00
};
let prev_span = self.prev_span;
let item = self.mk_item(lo.to(prev_span),
2013-12-30 22:04:00 +00:00
ident,
item_,
visibility,
2018-07-03 17:38:14 +00:00
attrs);
return Ok(Some(item));
}
2019-05-11 14:41:37 +00:00
if self.eat_keyword(kw::Enum) {
2013-02-11 21:36:24 +00:00
// ENUM ITEM
let (ident, item_, extra_attrs) = self.parse_item_enum()?;
let prev_span = self.prev_span;
let item = self.mk_item(lo.to(prev_span),
2013-12-30 22:04:00 +00:00
ident,
item_,
visibility,
maybe_append(attrs, extra_attrs));
return Ok(Some(item));
}
2019-05-11 14:41:37 +00:00
if self.check_keyword(kw::Trait)
|| (self.check_keyword(kw::Auto)
&& self.is_keyword_ahead(1, &[kw::Trait]))
{
2019-05-11 14:41:37 +00:00
let is_auto = if self.eat_keyword(kw::Trait) {
IsAuto::No
} else {
2019-05-11 14:41:37 +00:00
self.expect_keyword(kw::Auto)?;
self.expect_keyword(kw::Trait)?;
IsAuto::Yes
};
2013-02-11 21:36:24 +00:00
// TRAIT ITEM
let (ident, item_, extra_attrs) =
self.parse_item_trait(is_auto, Unsafety::Normal)?;
let prev_span = self.prev_span;
let item = self.mk_item(lo.to(prev_span),
2013-12-30 22:04:00 +00:00
ident,
item_,
visibility,
maybe_append(attrs, extra_attrs));
return Ok(Some(item));
}
2019-05-11 14:41:37 +00:00
if self.eat_keyword(kw::Struct) {
2013-02-11 21:36:24 +00:00
// STRUCT ITEM
let (ident, item_, extra_attrs) = self.parse_item_struct()?;
let prev_span = self.prev_span;
let item = self.mk_item(lo.to(prev_span),
ident,
item_,
visibility,
maybe_append(attrs, extra_attrs));
return Ok(Some(item));
}
2016-10-19 20:33:41 +00:00
if self.is_union_item() {
// UNION ITEM
self.bump();
let (ident, item_, extra_attrs) = self.parse_item_union()?;
let prev_span = self.prev_span;
let item = self.mk_item(lo.to(prev_span),
2013-12-30 22:04:00 +00:00
ident,
item_,
visibility,
maybe_append(attrs, extra_attrs));
return Ok(Some(item));
}
2017-09-06 21:11:16 +00:00
if let Some(macro_def) = self.eat_macro_def(&attrs, &visibility, lo)? {
return Ok(Some(macro_def));
}
2018-02-16 14:56:50 +00:00
// Verify whether we have encountered a struct or method definition where the user forgot to
// add the `struct` or `fn` keyword after writing `pub`: `pub S {}`
2018-07-06 20:18:38 +00:00
if visibility.node.is_pub() &&
self.check_ident() &&
self.look_ahead(1, |t| *t != token::Not)
{
// Space between `pub` keyword and the identifier
//
// pub S {}
// ^^^ `sp` points here
let sp = self.prev_span.between(self.token.span);
let full_sp = self.prev_span.to(self.token.span);
let ident_sp = self.token.span;
if self.look_ahead(1, |t| *t == token::OpenDelim(token::Brace)) {
// possible public struct definition where `struct` was forgotten
let ident = self.parse_ident().unwrap();
let msg = format!("add `struct` here to parse `{}` as a public struct",
ident);
let mut err = self.diagnostic()
.struct_span_err(sp, "missing `struct` for struct definition");
err.span_suggestion_short(
suggestion applicabilities for libsyntax and librustc, run-rustfix tests Consider this a down payment on #50723. To recap, an `Applicability` enum was recently (#50204) added, to convey to Rustfix and other tools whether we think it's OK for them to blindly apply the suggestion, or whether to prompt a human for guidance (because the suggestion might contain placeholders that we can't infer, or because we think it has a sufficiently high probability of being wrong even though it's— presumably—right often enough to be worth emitting in the first place). When a suggestion is marked as `MaybeIncorrect`, we try to use comments to indicate precisely why (although there are a few places where we just say `// speculative` because the present author's subjective judgement balked at the idea that the suggestion has no false positives). The `run-rustfix` directive is opporunistically set on some relevant UI tests (and a couple tests that were in the `test/ui/suggestions` directory, even if the suggestions didn't originate in librustc or libsyntax). This is less trivial than it sounds, because a surprising number of test files aren't equipped to be tested as fixed even when they contain successfully fixable errors, because, e.g., there are more, not-directly-related errors after fixing. Some test files need an attribute or underscore to avoid unused warnings tripping up the "fixed code is still producing diagnostics" check despite the fixes being correct; this is an interesting contrast-to/inconsistency-with the behavior of UI tests (which secretly pass `-A unused`), a behavior which we probably ought to resolve one way or the other (filed issue #50926). A few suggestion labels are reworded (e.g., to avoid phrasing it as a question, which which is discouraged by the style guidelines listed in `.span_suggestion`'s doc-comment).
2018-05-19 21:52:24 +00:00
sp, &msg, " struct ".into(), Applicability::MaybeIncorrect // speculative
);
return Err(err);
} else if self.look_ahead(1, |t| *t == token::OpenDelim(token::Paren)) {
let ident = self.parse_ident().unwrap();
self.bump(); // `(`
let kw_name = if let Ok(Some(_)) = self.parse_self_arg_with_attrs()
.map_err(|mut e| e.cancel())
{
"method"
} else {
"function"
};
self.consume_block(token::Paren);
let (kw, kw_name, ambiguous) = if self.check(&token::RArrow) {
self.eat_to_tokens(&[&token::OpenDelim(token::Brace)]);
self.bump(); // `{`
("fn", kw_name, false)
} else if self.check(&token::OpenDelim(token::Brace)) {
self.bump(); // `{`
("fn", kw_name, false)
} else if self.check(&token::Colon) {
let kw = "struct";
(kw, kw, false)
} else {
("fn` or `struct", "function or struct", true)
};
let msg = format!("missing `{}` for {} definition", kw, kw_name);
let mut err = self.diagnostic().struct_span_err(sp, &msg);
if !ambiguous {
2019-04-19 18:04:41 +00:00
self.consume_block(token::Brace);
let suggestion = format!("add `{}` here to parse `{}` as a public {}",
kw,
ident,
kw_name);
err.span_suggestion_short(
suggestion applicabilities for libsyntax and librustc, run-rustfix tests Consider this a down payment on #50723. To recap, an `Applicability` enum was recently (#50204) added, to convey to Rustfix and other tools whether we think it's OK for them to blindly apply the suggestion, or whether to prompt a human for guidance (because the suggestion might contain placeholders that we can't infer, or because we think it has a sufficiently high probability of being wrong even though it's— presumably—right often enough to be worth emitting in the first place). When a suggestion is marked as `MaybeIncorrect`, we try to use comments to indicate precisely why (although there are a few places where we just say `// speculative` because the present author's subjective judgement balked at the idea that the suggestion has no false positives). The `run-rustfix` directive is opporunistically set on some relevant UI tests (and a couple tests that were in the `test/ui/suggestions` directory, even if the suggestions didn't originate in librustc or libsyntax). This is less trivial than it sounds, because a surprising number of test files aren't equipped to be tested as fixed even when they contain successfully fixable errors, because, e.g., there are more, not-directly-related errors after fixing. Some test files need an attribute or underscore to avoid unused warnings tripping up the "fixed code is still producing diagnostics" check despite the fixes being correct; this is an interesting contrast-to/inconsistency-with the behavior of UI tests (which secretly pass `-A unused`), a behavior which we probably ought to resolve one way or the other (filed issue #50926). A few suggestion labels are reworded (e.g., to avoid phrasing it as a question, which which is discouraged by the style guidelines listed in `.span_suggestion`'s doc-comment).
2018-05-19 21:52:24 +00:00
sp, &suggestion, format!(" {} ", kw), Applicability::MachineApplicable
);
} else {
2019-07-24 09:01:30 +00:00
if let Ok(snippet) = self.span_to_snippet(ident_sp) {
err.span_suggestion(
full_sp,
suggestion applicabilities for libsyntax and librustc, run-rustfix tests Consider this a down payment on #50723. To recap, an `Applicability` enum was recently (#50204) added, to convey to Rustfix and other tools whether we think it's OK for them to blindly apply the suggestion, or whether to prompt a human for guidance (because the suggestion might contain placeholders that we can't infer, or because we think it has a sufficiently high probability of being wrong even though it's— presumably—right often enough to be worth emitting in the first place). When a suggestion is marked as `MaybeIncorrect`, we try to use comments to indicate precisely why (although there are a few places where we just say `// speculative` because the present author's subjective judgement balked at the idea that the suggestion has no false positives). The `run-rustfix` directive is opporunistically set on some relevant UI tests (and a couple tests that were in the `test/ui/suggestions` directory, even if the suggestions didn't originate in librustc or libsyntax). This is less trivial than it sounds, because a surprising number of test files aren't equipped to be tested as fixed even when they contain successfully fixable errors, because, e.g., there are more, not-directly-related errors after fixing. Some test files need an attribute or underscore to avoid unused warnings tripping up the "fixed code is still producing diagnostics" check despite the fixes being correct; this is an interesting contrast-to/inconsistency-with the behavior of UI tests (which secretly pass `-A unused`), a behavior which we probably ought to resolve one way or the other (filed issue #50926). A few suggestion labels are reworded (e.g., to avoid phrasing it as a question, which which is discouraged by the style guidelines listed in `.span_suggestion`'s doc-comment).
2018-05-19 21:52:24 +00:00
"if you meant to call a macro, try",
format!("{}!", snippet),
// this is the `ambiguous` conditional branch
Applicability::MaybeIncorrect
);
} else {
err.help("if you meant to call a macro, remove the `pub` \
and add a trailing `!` after the identifier");
}
}
return Err(err);
} else if self.look_ahead(1, |t| *t == token::Lt) {
let ident = self.parse_ident().unwrap();
self.eat_to_tokens(&[&token::Gt]);
self.bump(); // `>`
let (kw, kw_name, ambiguous) = if self.eat(&token::OpenDelim(token::Paren)) {
if let Ok(Some(_)) = self.parse_self_arg_with_attrs()
.map_err(|mut e| e.cancel())
{
("fn", "method", false)
} else {
("fn", "function", false)
}
} else if self.check(&token::OpenDelim(token::Brace)) {
("struct", "struct", false)
} else {
("fn` or `struct", "function or struct", true)
};
let msg = format!("missing `{}` for {} definition", kw, kw_name);
let mut err = self.diagnostic().struct_span_err(sp, &msg);
if !ambiguous {
err.span_suggestion_short(
sp,
&format!("add `{}` here to parse `{}` as a public {}", kw, ident, kw_name),
format!(" {} ", kw),
Applicability::MachineApplicable,
);
}
return Err(err);
}
}
self.parse_macro_use_or_failure(attrs, macros_allowed, attributes_allowed, lo, visibility)
}
/// We are parsing `async fn`. If we are on Rust 2015, emit an error.
fn ban_async_in_2015(&self, async_span: Span) {
if async_span.rust_2015() {
self.diagnostic()
.struct_span_err_with_code(
async_span,
"`async fn` is not permitted in the 2015 edition",
DiagnosticId::Error("E0670".into())
)
.emit();
}
}
2019-02-08 13:53:55 +00:00
/// Parses a foreign item.
crate fn parse_foreign_item(&mut self, extern_sp: Span) -> PResult<'a, ForeignItem> {
maybe_whole!(self, NtForeignItem, |ni| ni);
let attrs = self.parse_outer_attributes()?;
let lo = self.token.span;
let visibility = self.parse_visibility(false)?;
// FOREIGN STATIC ITEM
// Treat `const` as `static` for error recovery, but don't add it to expected tokens.
2019-05-11 14:41:37 +00:00
if self.check_keyword(kw::Static) || self.token.is_keyword(kw::Const) {
if self.token.is_keyword(kw::Const) {
self.diagnostic()
.struct_span_err(self.token.span, "extern items cannot be `const`")
.span_suggestion(
self.token.span,
suggestion applicabilities for libsyntax and librustc, run-rustfix tests Consider this a down payment on #50723. To recap, an `Applicability` enum was recently (#50204) added, to convey to Rustfix and other tools whether we think it's OK for them to blindly apply the suggestion, or whether to prompt a human for guidance (because the suggestion might contain placeholders that we can't infer, or because we think it has a sufficiently high probability of being wrong even though it's— presumably—right often enough to be worth emitting in the first place). When a suggestion is marked as `MaybeIncorrect`, we try to use comments to indicate precisely why (although there are a few places where we just say `// speculative` because the present author's subjective judgement balked at the idea that the suggestion has no false positives). The `run-rustfix` directive is opporunistically set on some relevant UI tests (and a couple tests that were in the `test/ui/suggestions` directory, even if the suggestions didn't originate in librustc or libsyntax). This is less trivial than it sounds, because a surprising number of test files aren't equipped to be tested as fixed even when they contain successfully fixable errors, because, e.g., there are more, not-directly-related errors after fixing. Some test files need an attribute or underscore to avoid unused warnings tripping up the "fixed code is still producing diagnostics" check despite the fixes being correct; this is an interesting contrast-to/inconsistency-with the behavior of UI tests (which secretly pass `-A unused`), a behavior which we probably ought to resolve one way or the other (filed issue #50926). A few suggestion labels are reworded (e.g., to avoid phrasing it as a question, which which is discouraged by the style guidelines listed in `.span_suggestion`'s doc-comment).
2018-05-19 21:52:24 +00:00
"try using a static value",
"static".to_owned(),
Applicability::MachineApplicable
).emit();
}
self.bump(); // `static` or `const`
return Ok(self.parse_item_foreign_static(visibility, lo, attrs)?);
}
// FOREIGN FUNCTION ITEM
2019-05-11 14:41:37 +00:00
if self.check_keyword(kw::Fn) {
return Ok(self.parse_item_foreign_fn(visibility, lo, attrs, extern_sp)?);
}
2017-09-03 18:53:58 +00:00
// FOREIGN TYPE ITEM
2019-05-11 14:41:37 +00:00
if self.check_keyword(kw::Type) {
return Ok(self.parse_item_foreign_type(visibility, lo, attrs)?);
2017-09-03 18:53:58 +00:00
}
match self.parse_assoc_macro_invoc("extern", Some(&visibility), &mut false)? {
Some(mac) => {
Ok(
ForeignItem {
ident: Ident::invalid(),
span: lo.to(self.prev_span),
id: ast::DUMMY_NODE_ID,
attrs,
vis: visibility,
node: ForeignItemKind::Macro(mac),
}
)
}
None => {
if !attrs.is_empty() {
2019-01-20 19:52:16 +00:00
self.expected_item_err(&attrs)?;
}
self.unexpected()
}
}
}
2014-06-09 20:12:30 +00:00
/// This is the fall-through for parsing items.
fn parse_macro_use_or_failure(
2013-12-30 22:04:00 +00:00
&mut self,
attrs: Vec<Attribute> ,
macros_allowed: bool,
attributes_allowed: bool,
lo: Span,
visibility: Visibility
2015-12-20 21:00:43 +00:00
) -> PResult<'a, Option<P<Item>>> {
if macros_allowed && self.token.is_path_start() &&
!(self.is_async_fn() && self.token.span.rust_2015()) {
2013-02-11 21:36:24 +00:00
// MACRO INVOCATION ITEM
let prev_span = self.prev_span;
2018-01-29 05:12:09 +00:00
self.complain_if_pub_macro(&visibility.node, prev_span);
let mac_lo = self.token.span;
2015-11-26 19:14:10 +00:00
// item macro.
2019-07-01 09:20:44 +00:00
let path = self.parse_path(PathStyle::Mod)?;
self.expect(&token::Not)?;
let (delim, tts) = self.expect_delimited_token_tree()?;
if delim != MacDelimiter::Brace && !self.eat(&token::Semi) {
self.report_invalid_macro_expansion_item();
}
let hi = self.prev_span;
let mac = respan(mac_lo.to(hi), Mac_ {
path,
tts,
delim,
prior_type_ascription: self.last_type_ascription,
});
let item =
self.mk_item(lo.to(hi), Ident::invalid(), ItemKind::Mac(mac), visibility, attrs);
return Ok(Some(item));
}
// FAILURE TO PARSE ITEM
2018-01-29 05:12:09 +00:00
match visibility.node {
VisibilityKind::Inherited => {}
_ => {
return Err(self.span_fatal(self.prev_span, "unmatched visibility `pub`"));
}
}
if !attributes_allowed && !attrs.is_empty() {
2019-01-20 19:52:16 +00:00
self.expected_item_err(&attrs)?;
}
Ok(None)
}
2019-02-08 13:53:55 +00:00
/// Parses a macro invocation inside a `trait`, `impl` or `extern` block.
fn parse_assoc_macro_invoc(&mut self, item_kind: &str, vis: Option<&Visibility>,
at_end: &mut bool) -> PResult<'a, Option<Mac>>
{
if self.token.is_path_start() &&
!(self.is_async_fn() && self.token.span.rust_2015()) {
let prev_span = self.prev_span;
let lo = self.token.span;
2019-07-01 09:20:44 +00:00
let path = self.parse_path(PathStyle::Mod)?;
2019-07-01 09:20:44 +00:00
if path.segments.len() == 1 {
if !self.eat(&token::Not) {
return Err(self.missing_assoc_item_kind_err(item_kind, prev_span));
}
} else {
self.expect(&token::Not)?;
}
if let Some(vis) = vis {
self.complain_if_pub_macro(&vis.node, prev_span);
}
*at_end = true;
// eat a matched-delimiter token tree:
let (delim, tts) = self.expect_delimited_token_tree()?;
if delim != MacDelimiter::Brace {
self.expect(&token::Semi)?;
}
Ok(Some(respan(lo.to(self.prev_span), Mac_ {
path,
tts,
delim,
prior_type_ascription: self.last_type_ascription,
})))
} else {
Ok(None)
}
}
fn collect_tokens<F, R>(&mut self, f: F) -> PResult<'a, (R, TokenStream)>
where F: FnOnce(&mut Self) -> PResult<'a, R>
{
// Record all tokens we parse when parsing this item.
let mut tokens = Vec::new();
let prev_collecting = match self.token_cursor.frame.last_token {
LastToken::Collecting(ref mut list) => {
2019-06-30 18:30:01 +00:00
Some(mem::take(list))
}
LastToken::Was(ref mut last) => {
tokens.extend(last.take());
None
}
};
self.token_cursor.frame.last_token = LastToken::Collecting(tokens);
let prev = self.token_cursor.stack.len();
let ret = f(self);
let last_token = if self.token_cursor.stack.len() == prev {
&mut self.token_cursor.frame.last_token
2019-07-23 18:19:13 +00:00
} else if self.token_cursor.stack.get(prev).is_none() {
// This can happen due to a bad interaction of two unrelated recovery mechanisms with
2019-07-23 19:51:34 +00:00
// mismatched delimiters *and* recovery lookahead on the likely typo `pub ident(`
// (#62881).
return Ok((ret?, TokenStream::new(vec![])));
} else {
&mut self.token_cursor.stack[prev].last_token
};
// Pull out the tokens that we've collected from the call to `f` above.
let mut collected_tokens = match *last_token {
2019-06-30 18:30:01 +00:00
LastToken::Collecting(ref mut v) => mem::take(v),
2019-07-23 18:19:13 +00:00
LastToken::Was(ref was) => {
let msg = format!("our vector went away? - found Was({:?})", was);
debug!("collect_tokens: {}", msg);
self.sess.span_diagnostic.delay_span_bug(self.token.span, &msg);
// This can happen due to a bad interaction of two unrelated recovery mechanisms
2019-07-23 19:51:34 +00:00
// with mismatched delimiters *and* recovery lookahead on the likely typo
// `pub ident(` (#62895, different but similar to the case above).
2019-07-23 18:19:13 +00:00
return Ok((ret?, TokenStream::new(vec![])));
}
};
// If we're not at EOF our current token wasn't actually consumed by
// `f`, but it'll still be in our list that we pulled out. In that case
// put it back.
let extra_token = if self.token != token::Eof {
collected_tokens.pop()
} else {
None
};
// If we were previously collecting tokens, then this was a recursive
// call. In that case we need to record all the tokens we collected in
// our parent list as well. To do that we push a clone of our stream
// onto the previous list.
match prev_collecting {
Some(mut list) => {
list.extend(collected_tokens.iter().cloned());
list.extend(extra_token);
*last_token = LastToken::Collecting(list);
}
None => {
*last_token = LastToken::Was(extra_token);
}
}
Ok((ret?, TokenStream::new(collected_tokens)))
}
2015-12-20 21:00:43 +00:00
pub fn parse_item(&mut self) -> PResult<'a, Option<P<Item>>> {
let attrs = self.parse_outer_attributes()?;
self.parse_item_(attrs, true, false)
}
/// `::{` or `::*`
fn is_import_coupler(&mut self) -> bool {
self.check(&token::ModSep) &&
self.look_ahead(1, |t| *t == token::OpenDelim(token::Brace) ||
*t == token::BinOp(token::Star))
2016-04-17 00:48:40 +00:00
}
2019-02-08 13:53:55 +00:00
/// Parses a `UseTree`.
///
2019-02-08 13:53:55 +00:00
/// ```
/// USE_TREE = [`::`] `*` |
/// [`::`] `{` USE_TREE_LIST `}` |
/// PATH `::` `*` |
/// PATH `::` `{` USE_TREE_LIST `}` |
/// PATH [`as` IDENT]
2019-02-08 13:53:55 +00:00
/// ```
fn parse_use_tree(&mut self) -> PResult<'a, UseTree> {
let lo = self.token.span;
let mut prefix = ast::Path { segments: Vec::new(), span: lo.shrink_to_lo() };
let kind = if self.check(&token::OpenDelim(token::Brace)) ||
self.check(&token::BinOp(token::Star)) ||
self.is_import_coupler() {
// `use *;` or `use ::*;` or `use {...};` or `use ::{...};`
let mod_sep_ctxt = self.token.span.ctxt();
if self.eat(&token::ModSep) {
prefix.segments.push(
2018-12-02 12:15:42 +00:00
PathSegment::path_root(lo.shrink_to_lo().with_ctxt(mod_sep_ctxt))
);
}
if self.eat(&token::BinOp(token::Star)) {
UseTreeKind::Glob
2016-10-23 21:43:41 +00:00
} else {
UseTreeKind::Nested(self.parse_use_tree_list()?)
}
2016-04-17 00:48:40 +00:00
} else {
// `use path::*;` or `use path::{...};` or `use path;` or `use path as bar;`
prefix = self.parse_path(PathStyle::Mod)?;
if self.eat(&token::ModSep) {
if self.eat(&token::BinOp(token::Star)) {
UseTreeKind::Glob
2016-04-17 00:48:40 +00:00
} else {
UseTreeKind::Nested(self.parse_use_tree_list()?)
}
2016-04-17 00:48:40 +00:00
} else {
UseTreeKind::Simple(self.parse_rename()?, ast::DUMMY_NODE_ID, ast::DUMMY_NODE_ID)
}
};
Ok(UseTree { prefix, kind, span: lo.to(self.prev_span) })
}
2019-02-08 13:53:55 +00:00
/// Parses a `UseTreeKind::Nested(list)`.
///
2019-02-08 13:53:55 +00:00
/// ```
/// USE_TREE_LIST = Ø | (USE_TREE `,`)* USE_TREE [`,`]
2019-02-08 13:53:55 +00:00
/// ```
fn parse_use_tree_list(&mut self) -> PResult<'a, Vec<(UseTree, ast::NodeId)>> {
self.parse_delim_comma_seq(token::Brace, |p| Ok((p.parse_use_tree()?, ast::DUMMY_NODE_ID)))
.map(|(r, _)| r)
}
2015-12-20 21:00:43 +00:00
fn parse_rename(&mut self) -> PResult<'a, Option<Ident>> {
2019-05-11 14:41:37 +00:00
if self.eat_keyword(kw::As) {
self.parse_ident_or_underscore().map(Some)
} else {
Ok(None)
}
}
2019-02-08 13:53:55 +00:00
/// Parses a source module as a crate. This is the main entry point for the parser.
2015-12-20 21:00:43 +00:00
pub fn parse_crate_mod(&mut self) -> PResult<'a, Crate> {
let lo = self.token.span;
let krate = Ok(ast::Crate {
attrs: self.parse_inner_attributes()?,
module: self.parse_mod_items(&token::Eof, lo)?,
span: lo.to(self.token.span),
});
krate
}
pub fn parse_optional_str(&mut self) -> Option<(Symbol, ast::StrStyle, Option<ast::Name>)> {
2019-06-04 22:17:07 +00:00
let ret = match self.token.kind {
token::Literal(token::Lit { kind: token::Str, symbol, suffix }) =>
(symbol, ast::StrStyle::Cooked, suffix),
token::Literal(token::Lit { kind: token::StrRaw(n), symbol, suffix }) =>
(symbol, ast::StrStyle::Raw(n), suffix),
_ => return None
};
self.bump();
Some(ret)
}
pub fn parse_str(&mut self) -> PResult<'a, (Symbol, StrStyle)> {
match self.parse_optional_str() {
Some((s, style, suf)) => {
let sp = self.prev_span;
2019-03-26 19:09:13 +00:00
self.expect_no_suffix(sp, "a string literal", suf);
Ok((s, style))
}
_ => {
let msg = "expected string literal";
let mut err = self.fatal(msg);
err.span_label(self.token.span, msg);
Err(err)
}
}
}
fn report_invalid_macro_expansion_item(&self) {
self.struct_span_err(
self.prev_span,
"macros that expand to items must be delimited with braces or followed by a semicolon",
).multipart_suggestion(
"change the delimiters to curly braces",
vec![
(self.prev_span.with_hi(self.prev_span.lo() + BytePos(1)), String::from(" {")),
(self.prev_span.with_lo(self.prev_span.hi() - BytePos(1)), '}'.to_string()),
],
Applicability::MaybeIncorrect,
).span_suggestion(
self.sess.source_map.next_point(self.prev_span),
"add a semicolon",
';'.to_string(),
Applicability::MaybeIncorrect,
).emit();
}
}
2019-02-05 09:35:25 +00:00
2019-03-03 20:14:25 +00:00
pub fn emit_unclosed_delims(unclosed_delims: &mut Vec<UnmatchedBrace>, handler: &errors::Handler) {
for unmatched in unclosed_delims.iter() {
2019-02-05 09:35:25 +00:00
let mut err = handler.struct_span_err(unmatched.found_span, &format!(
"incorrect close delimiter: `{}`",
pprust::token_kind_to_string(&token::CloseDelim(unmatched.found_delim)),
2019-02-05 09:35:25 +00:00
));
err.span_label(unmatched.found_span, "incorrect close delimiter");
if let Some(sp) = unmatched.candidate_span {
err.span_label(sp, "close delimiter possibly meant for this");
}
if let Some(sp) = unmatched.unclosed_span {
err.span_label(sp, "un-closed delimiter");
}
err.emit();
}
2019-03-03 20:14:25 +00:00
unclosed_delims.clear();
2019-02-05 10:26:26 +00:00
}