2019-04-22 14:29:04 +00:00
|
|
|
|
// ignore-tidy-filelength
|
|
|
|
|
|
2019-03-12 16:00:20 +00:00
|
|
|
|
use crate::ast::{AngleBracketedArgs, AsyncArgument, ParenthesizedArgs, AttrStyle, BareFnTy};
|
2019-02-06 17:33:01 +00:00
|
|
|
|
use crate::ast::{GenericBound, TraitBoundModifier};
|
|
|
|
|
use crate::ast::Unsafety;
|
2019-03-13 16:10:27 +00:00
|
|
|
|
use crate::ast::{Mod, AnonConst, Arg, ArgSource, Arm, Guard, Attribute, BindingMode, TraitItemKind};
|
2019-02-06 17:33:01 +00:00
|
|
|
|
use crate::ast::Block;
|
|
|
|
|
use crate::ast::{BlockCheckMode, CaptureBy, Movability};
|
|
|
|
|
use crate::ast::{Constness, Crate};
|
|
|
|
|
use crate::ast::Defaultness;
|
|
|
|
|
use crate::ast::EnumDef;
|
|
|
|
|
use crate::ast::{Expr, ExprKind, RangeLimits};
|
|
|
|
|
use crate::ast::{Field, FnDecl, FnHeader};
|
|
|
|
|
use crate::ast::{ForeignItem, ForeignItemKind, FunctionRetTy};
|
|
|
|
|
use crate::ast::{GenericParam, GenericParamKind};
|
|
|
|
|
use crate::ast::GenericArg;
|
|
|
|
|
use crate::ast::{Ident, ImplItem, IsAsync, IsAuto, Item, ItemKind};
|
2019-05-10 23:31:34 +00:00
|
|
|
|
use crate::ast::{Label, Lifetime};
|
2019-03-12 15:53:33 +00:00
|
|
|
|
use crate::ast::{Local, LocalSource};
|
2019-02-06 17:33:01 +00:00
|
|
|
|
use crate::ast::MacStmtStyle;
|
|
|
|
|
use crate::ast::{Mac, Mac_, MacDelimiter};
|
|
|
|
|
use crate::ast::{MutTy, Mutability};
|
|
|
|
|
use crate::ast::{Pat, PatKind, PathSegment};
|
|
|
|
|
use crate::ast::{PolyTraitRef, QSelf};
|
|
|
|
|
use crate::ast::{Stmt, StmtKind};
|
|
|
|
|
use crate::ast::{VariantData, StructField};
|
|
|
|
|
use crate::ast::StrStyle;
|
|
|
|
|
use crate::ast::SelfKind;
|
|
|
|
|
use crate::ast::{TraitItem, TraitRef, TraitObjectSyntax};
|
|
|
|
|
use crate::ast::{Ty, TyKind, TypeBinding, GenericBounds};
|
|
|
|
|
use crate::ast::{Visibility, VisibilityKind, WhereClause, CrateSugar};
|
|
|
|
|
use crate::ast::{UseTree, UseTreeKind};
|
|
|
|
|
use crate::ast::{BinOpKind, UnOp};
|
|
|
|
|
use crate::ast::{RangeEnd, RangeSyntax};
|
|
|
|
|
use crate::{ast, attr};
|
|
|
|
|
use crate::ext::base::DummyResult;
|
|
|
|
|
use crate::source_map::{self, SourceMap, Spanned, respan};
|
2019-05-10 23:31:34 +00:00
|
|
|
|
use crate::parse::{SeqSep, classify, literal, token};
|
2019-01-28 05:04:50 +00:00
|
|
|
|
use crate::parse::lexer::{TokenAndSpan, UnmatchedBrace};
|
2019-02-06 17:33:01 +00:00
|
|
|
|
use crate::parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
|
|
|
|
|
use crate::parse::token::DelimToken;
|
|
|
|
|
use crate::parse::{new_sub_parser_from_file, ParseSess, Directory, DirectoryOwnership};
|
|
|
|
|
use crate::util::parser::{AssocOp, Fixity};
|
|
|
|
|
use crate::print::pprust;
|
|
|
|
|
use crate::ptr::P;
|
|
|
|
|
use crate::parse::PResult;
|
|
|
|
|
use crate::ThinVec;
|
|
|
|
|
use crate::tokenstream::{self, DelimSpan, TokenTree, TokenStream, TreeAndJoint};
|
2019-05-11 14:41:37 +00:00
|
|
|
|
use crate::symbol::{kw, sym, Symbol};
|
2019-02-06 17:33:01 +00:00
|
|
|
|
|
2019-03-02 05:47:06 +00:00
|
|
|
|
use errors::{Applicability, DiagnosticBuilder, DiagnosticId, FatalError};
|
2018-04-25 16:30:39 +00:00
|
|
|
|
use rustc_target::spec::abi::{self, Abi};
|
2019-05-09 18:10:27 +00:00
|
|
|
|
use syntax_pos::{
|
2019-05-22 00:47:23 +00:00
|
|
|
|
BytePos, DUMMY_SP, FileName, MultiSpan, Span,
|
2019-05-09 18:10:27 +00:00
|
|
|
|
hygiene::CompilerDesugaringKind,
|
|
|
|
|
};
|
2019-02-06 17:33:01 +00:00
|
|
|
|
use log::{debug, trace};
|
2012-12-23 22:41:37 +00:00
|
|
|
|
|
2018-05-18 06:19:35 +00:00
|
|
|
|
use std::borrow::Cow;
|
2017-04-24 14:26:04 +00:00
|
|
|
|
use std::cmp;
|
2017-01-03 11:19:13 +00:00
|
|
|
|
use std::mem;
|
2017-04-24 14:26:04 +00:00
|
|
|
|
use std::path::{self, Path, PathBuf};
|
2017-01-03 11:19:13 +00:00
|
|
|
|
use std::slice;
|
2010-08-18 22:41:13 +00:00
|
|
|
|
|
2018-07-03 17:38:14 +00:00
|
|
|
|
#[derive(Debug)]
|
|
|
|
|
/// Whether the type alias or associated type is a concrete type or an existential type
|
|
|
|
|
pub enum AliasKind {
|
|
|
|
|
/// Just a new name for the same type
|
|
|
|
|
Weak(P<Ty>),
|
|
|
|
|
/// Only trait impls of the type will be usable, not the actual type itself
|
|
|
|
|
Existential(GenericBounds),
|
|
|
|
|
}
|
|
|
|
|
|
2019-02-06 17:33:01 +00:00
|
|
|
|
bitflags::bitflags! {
|
2018-05-31 22:53:30 +00:00
|
|
|
|
struct Restrictions: u8 {
|
2017-09-08 19:08:01 +00:00
|
|
|
|
const STMT_EXPR = 1 << 0;
|
|
|
|
|
const NO_STRUCT_LITERAL = 1 << 1;
|
2014-09-16 05:22:12 +00:00
|
|
|
|
}
|
2011-12-21 04:12:52 +00:00
|
|
|
|
}
|
2011-01-24 23:26:10 +00:00
|
|
|
|
|
2017-12-02 19:15:03 +00:00
|
|
|
|
type ItemInfo = (Ident, ItemKind, Option<Vec<Attribute>>);
|
2012-05-23 22:06:11 +00:00
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Specifies how to parse a path.
|
2015-03-30 13:38:59 +00:00
|
|
|
|
#[derive(Copy, Clone, PartialEq)]
|
2016-04-18 21:42:18 +00:00
|
|
|
|
pub enum PathStyle {
|
2017-07-19 23:39:34 +00:00
|
|
|
|
/// In some contexts, notably in expressions, paths with generic arguments are ambiguous
|
|
|
|
|
/// with something else. For example, in expressions `segment < ....` can be interpreted
|
|
|
|
|
/// as a comparison and `segment ( ....` can be interpreted as a function call.
|
|
|
|
|
/// In all such contexts the non-path interpretation is preferred by default for practical
|
|
|
|
|
/// reasons, but the path interpretation can be forced by the disambiguator `::`, e.g.
|
|
|
|
|
/// `x<y>` - comparisons, `x::<y>` - unambiguously a path.
|
2016-04-18 21:42:18 +00:00
|
|
|
|
Expr,
|
2017-07-19 23:39:34 +00:00
|
|
|
|
/// In other contexts, notably in types, no ambiguity exists and paths can be written
|
2018-11-27 02:59:49 +00:00
|
|
|
|
/// without the disambiguator, e.g., `x<y>` - unambiguously a path.
|
2017-07-29 01:47:12 +00:00
|
|
|
|
/// Paths with disambiguators are still accepted, `x::<Y>` - unambiguously a path too.
|
2017-07-19 23:39:34 +00:00
|
|
|
|
Type,
|
2018-11-27 02:59:49 +00:00
|
|
|
|
/// A path with generic arguments disallowed, e.g., `foo::bar::Baz`, used in imports,
|
2017-07-19 23:39:34 +00:00
|
|
|
|
/// visibilities or attributes.
|
2018-11-27 02:59:49 +00:00
|
|
|
|
/// Technically, this variant is unnecessary and e.g., `Expr` can be used instead
|
2017-07-19 23:39:34 +00:00
|
|
|
|
/// (paths in "mod" contexts have to be checked later for absence of generic arguments
|
|
|
|
|
/// anyway, due to macros), but it is used to avoid weird suggestions about expected
|
|
|
|
|
/// tokens when something goes wrong.
|
|
|
|
|
Mod,
|
2013-08-07 16:47:28 +00:00
|
|
|
|
}
|
|
|
|
|
|
2018-03-20 22:58:25 +00:00
|
|
|
|
#[derive(Clone, Copy, PartialEq, Debug)]
|
2019-05-16 21:31:07 +00:00
|
|
|
|
crate enum SemiColonMode {
|
2016-02-10 03:11:27 +00:00
|
|
|
|
Break,
|
|
|
|
|
Ignore,
|
2019-01-20 08:37:06 +00:00
|
|
|
|
Comma,
|
2016-02-10 03:11:27 +00:00
|
|
|
|
}
|
|
|
|
|
|
2018-03-20 22:58:25 +00:00
|
|
|
|
#[derive(Clone, Copy, PartialEq, Debug)]
|
2019-05-16 21:31:07 +00:00
|
|
|
|
crate enum BlockMode {
|
2017-04-13 19:37:05 +00:00
|
|
|
|
Break,
|
|
|
|
|
Ignore,
|
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Possibly accepts an `token::Interpolated` expression (a pre-parsed expression
|
2014-10-27 08:22:52 +00:00
|
|
|
|
/// dropped into the token stream, which happens while parsing the result of
|
|
|
|
|
/// macro expansion). Placement of these is not as complex as I feared it would
|
|
|
|
|
/// be. The important thing is to make sure that lookahead doesn't balk at
|
|
|
|
|
/// `token::Interpolated` tokens.
|
2014-11-14 17:18:10 +00:00
|
|
|
|
macro_rules! maybe_whole_expr {
|
2016-11-02 03:03:55 +00:00
|
|
|
|
($p:expr) => {
|
2019-03-10 08:53:16 +00:00
|
|
|
|
if let token::Interpolated(nt) = &$p.token {
|
|
|
|
|
match &**nt {
|
|
|
|
|
token::NtExpr(e) | token::NtLiteral(e) => {
|
|
|
|
|
let e = e.clone();
|
2016-11-02 03:03:55 +00:00
|
|
|
|
$p.bump();
|
2019-03-10 08:53:16 +00:00
|
|
|
|
return Ok(e);
|
2013-07-05 10:15:21 +00:00
|
|
|
|
}
|
2019-03-10 08:53:16 +00:00
|
|
|
|
token::NtPath(path) => {
|
|
|
|
|
let path = path.clone();
|
2016-11-02 03:03:55 +00:00
|
|
|
|
$p.bump();
|
2019-03-10 08:53:16 +00:00
|
|
|
|
return Ok($p.mk_expr($p.span, ExprKind::Path(None, path), ThinVec::new()));
|
2014-05-26 01:33:52 +00:00
|
|
|
|
}
|
2019-03-10 08:53:16 +00:00
|
|
|
|
token::NtBlock(block) => {
|
|
|
|
|
let block = block.clone();
|
2016-11-02 03:03:55 +00:00
|
|
|
|
$p.bump();
|
2019-03-10 08:53:16 +00:00
|
|
|
|
return Ok($p.mk_expr($p.span, ExprKind::Block(block, None), ThinVec::new()));
|
2013-07-05 10:15:21 +00:00
|
|
|
|
}
|
2016-11-02 03:03:55 +00:00
|
|
|
|
_ => {},
|
2013-07-05 10:15:21 +00:00
|
|
|
|
};
|
2013-02-21 05:04:05 +00:00
|
|
|
|
}
|
2016-11-02 03:03:55 +00:00
|
|
|
|
}
|
2014-11-14 17:18:10 +00:00
|
|
|
|
}
|
2012-07-10 23:37:44 +00:00
|
|
|
|
|
2014-06-09 20:12:30 +00:00
|
|
|
|
/// As maybe_whole_expr, but for things other than expressions
|
2014-11-14 17:18:10 +00:00
|
|
|
|
macro_rules! maybe_whole {
|
2016-11-02 03:03:55 +00:00
|
|
|
|
($p:expr, $constructor:ident, |$x:ident| $e:expr) => {
|
2019-03-10 08:53:16 +00:00
|
|
|
|
if let token::Interpolated(nt) = &$p.token {
|
|
|
|
|
if let token::$constructor(x) = &**nt {
|
|
|
|
|
let $x = x.clone();
|
2016-11-02 03:03:55 +00:00
|
|
|
|
$p.bump();
|
|
|
|
|
return Ok($e);
|
2013-03-02 21:02:27 +00:00
|
|
|
|
}
|
2013-07-02 19:47:32 +00:00
|
|
|
|
}
|
2016-11-02 03:03:55 +00:00
|
|
|
|
};
|
2014-11-14 17:18:10 +00:00
|
|
|
|
}
|
2012-07-04 01:39:37 +00:00
|
|
|
|
|
2019-03-09 14:41:01 +00:00
|
|
|
|
/// If the next tokens are ill-formed `$ty::` recover them as `<$ty>::`.
|
|
|
|
|
macro_rules! maybe_recover_from_interpolated_ty_qpath {
|
|
|
|
|
($self: expr, $allow_qpath_recovery: expr) => {
|
|
|
|
|
if $allow_qpath_recovery && $self.look_ahead(1, |t| t == &token::ModSep) {
|
|
|
|
|
if let token::Interpolated(nt) = &$self.token {
|
|
|
|
|
if let token::NtTy(ty) = &**nt {
|
|
|
|
|
let ty = ty.clone();
|
|
|
|
|
$self.bump();
|
|
|
|
|
return $self.maybe_recover_from_bad_qpath_stage_2($self.prev_span, ty);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2017-12-02 19:15:03 +00:00
|
|
|
|
fn maybe_append(mut lhs: Vec<Attribute>, mut rhs: Option<Vec<Attribute>>) -> Vec<Attribute> {
|
|
|
|
|
if let Some(ref mut rhs) = rhs {
|
|
|
|
|
lhs.append(rhs);
|
2012-08-14 18:07:41 +00:00
|
|
|
|
}
|
2014-10-15 06:05:01 +00:00
|
|
|
|
lhs
|
2012-08-14 18:07:41 +00:00
|
|
|
|
}
|
|
|
|
|
|
2017-07-20 04:54:01 +00:00
|
|
|
|
#[derive(Debug, Clone, Copy, PartialEq)]
|
2016-09-21 02:16:28 +00:00
|
|
|
|
enum PrevTokenKind {
|
2016-09-16 05:46:40 +00:00
|
|
|
|
DocComment,
|
|
|
|
|
Comma,
|
2017-04-04 22:12:53 +00:00
|
|
|
|
Plus,
|
2016-09-16 05:46:40 +00:00
|
|
|
|
Interpolated,
|
|
|
|
|
Eof,
|
2017-07-04 14:04:34 +00:00
|
|
|
|
Ident,
|
2019-04-23 02:37:23 +00:00
|
|
|
|
BitOr,
|
2016-09-16 05:46:40 +00:00
|
|
|
|
Other,
|
|
|
|
|
}
|
|
|
|
|
|
2012-08-01 21:34:35 +00:00
|
|
|
|
/* ident is handled by common.rs */
|
2012-07-26 17:14:01 +00:00
|
|
|
|
|
2017-06-15 03:42:24 +00:00
|
|
|
|
#[derive(Clone)]
|
2014-03-09 14:54:34 +00:00
|
|
|
|
pub struct Parser<'a> {
|
2014-03-27 22:39:48 +00:00
|
|
|
|
pub sess: &'a ParseSess,
|
2014-06-09 20:12:30 +00:00
|
|
|
|
/// the current token:
|
2014-03-27 22:39:48 +00:00
|
|
|
|
pub token: token::Token,
|
2014-06-09 20:12:30 +00:00
|
|
|
|
/// the span of the current token:
|
2014-03-27 22:39:48 +00:00
|
|
|
|
pub span: Span,
|
2016-09-21 02:09:22 +00:00
|
|
|
|
/// the span of the previous token:
|
2018-05-31 22:53:30 +00:00
|
|
|
|
meta_var_span: Option<Span>,
|
2016-09-21 02:09:22 +00:00
|
|
|
|
pub prev_span: Span,
|
2016-09-16 05:46:40 +00:00
|
|
|
|
/// the previous token kind
|
2016-09-21 02:16:28 +00:00
|
|
|
|
prev_token_kind: PrevTokenKind,
|
2018-05-31 22:53:30 +00:00
|
|
|
|
restrictions: Restrictions,
|
2012-12-11 20:20:27 +00:00
|
|
|
|
/// Used to determine the path to externally loaded source files
|
2018-05-31 22:53:30 +00:00
|
|
|
|
crate directory: Directory<'a>,
|
2017-05-17 22:37:24 +00:00
|
|
|
|
/// Whether to parse sub-modules in other files.
|
|
|
|
|
pub recurse_into_file_modules: bool,
|
2014-05-16 21:23:04 +00:00
|
|
|
|
/// Name of the root module this parser originated from. If `None`, then the
|
|
|
|
|
/// name is not known. This does not change while the parser is descending
|
|
|
|
|
/// into modules, and sub-parsers have new values for this name.
|
2014-05-22 23:57:53 +00:00
|
|
|
|
pub root_module_name: Option<String>,
|
2018-05-31 22:53:30 +00:00
|
|
|
|
crate expected_tokens: Vec<TokenType>,
|
2017-02-20 05:44:06 +00:00
|
|
|
|
token_cursor: TokenCursor,
|
2018-05-31 22:53:30 +00:00
|
|
|
|
desugar_doc_comments: bool,
|
2017-01-18 00:13:36 +00:00
|
|
|
|
/// Whether we should configure out of line modules as we parse.
|
|
|
|
|
pub cfg_mods: bool,
|
2019-01-23 01:35:13 +00:00
|
|
|
|
/// This field is used to keep track of how many left angle brackets we have seen. This is
|
|
|
|
|
/// required in order to detect extra leading left angle brackets (`<` characters) and error
|
|
|
|
|
/// appropriately.
|
|
|
|
|
///
|
|
|
|
|
/// See the comments in the `parse_path_segment` function for more details.
|
|
|
|
|
crate unmatched_angle_bracket_count: u32,
|
2019-01-28 05:04:50 +00:00
|
|
|
|
crate max_angle_bracket_count: u32,
|
2019-02-06 10:24:07 +00:00
|
|
|
|
/// List of all unclosed delimiters found by the lexer. If an entry is used for error recovery
|
|
|
|
|
/// it gets removed from here. Every entry left at the end gets emitted as an independent
|
|
|
|
|
/// error.
|
2019-01-28 05:04:50 +00:00
|
|
|
|
crate unclosed_delims: Vec<UnmatchedBrace>,
|
2019-03-02 05:47:06 +00:00
|
|
|
|
last_unexpected_token_span: Option<Span>,
|
2019-05-22 00:47:23 +00:00
|
|
|
|
/// If `true`, this `Parser` is not parsing Rust code but rather a macro call.
|
|
|
|
|
is_subparser: Option<&'static str>,
|
Make the parser’s ‘expected <foo>, found <bar>’ errors more accurate
As an example of what this changes, the following code:
let x: [int ..4];
Currently spits out ‘expected `]`, found `..`’. However, a comma would also be
valid there, as would a number of other tokens. This change adjusts the parser
to produce more accurate errors, so that that example now produces ‘expected one
of `(`, `+`, `,`, `::`, or `]`, found `..`’.
2014-12-03 09:47:53 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-03-03 19:13:19 +00:00
|
|
|
|
impl<'a> Drop for Parser<'a> {
|
|
|
|
|
fn drop(&mut self) {
|
2019-03-04 20:59:43 +00:00
|
|
|
|
let diag = self.diagnostic();
|
|
|
|
|
emit_unclosed_delims(&mut self.unclosed_delims, diag);
|
2019-03-03 19:13:19 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2017-05-17 22:37:24 +00:00
|
|
|
|
|
2017-06-10 03:30:33 +00:00
|
|
|
|
#[derive(Clone)]
|
2017-02-20 05:44:06 +00:00
|
|
|
|
struct TokenCursor {
|
|
|
|
|
frame: TokenCursorFrame,
|
|
|
|
|
stack: Vec<TokenCursorFrame>,
|
|
|
|
|
}
|
|
|
|
|
|
2017-06-10 03:30:33 +00:00
|
|
|
|
#[derive(Clone)]
|
2017-02-20 05:44:06 +00:00
|
|
|
|
struct TokenCursorFrame {
|
|
|
|
|
delim: token::DelimToken,
|
2018-09-09 01:07:02 +00:00
|
|
|
|
span: DelimSpan,
|
2017-02-20 05:44:06 +00:00
|
|
|
|
open_delim: bool,
|
|
|
|
|
tree_cursor: tokenstream::Cursor,
|
|
|
|
|
close_delim: bool,
|
2017-07-12 16:50:05 +00:00
|
|
|
|
last_token: LastToken,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// This is used in `TokenCursorFrame` above to track tokens that are consumed
|
|
|
|
|
/// by the parser, and then that's transitively used to record the tokens that
|
|
|
|
|
/// each parse AST item is created with.
|
|
|
|
|
///
|
|
|
|
|
/// Right now this has two states, either collecting tokens or not collecting
|
|
|
|
|
/// tokens. If we're collecting tokens we just save everything off into a local
|
|
|
|
|
/// `Vec`. This should eventually though likely save tokens from the original
|
|
|
|
|
/// token stream and just use slicing of token streams to avoid creation of a
|
|
|
|
|
/// whole new vector.
|
|
|
|
|
///
|
|
|
|
|
/// The second state is where we're passively not recording tokens, but the last
|
|
|
|
|
/// token is still tracked for when we want to start recording tokens. This
|
|
|
|
|
/// "last token" means that when we start recording tokens we'll want to ensure
|
|
|
|
|
/// that this, the first token, is included in the output.
|
|
|
|
|
///
|
|
|
|
|
/// You can find some more example usage of this in the `collect_tokens` method
|
|
|
|
|
/// on the parser.
|
|
|
|
|
#[derive(Clone)]
|
|
|
|
|
enum LastToken {
|
2019-01-10 00:58:38 +00:00
|
|
|
|
Collecting(Vec<TreeAndJoint>),
|
|
|
|
|
Was(Option<TreeAndJoint>),
|
2017-02-20 05:44:06 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl TokenCursorFrame {
|
2019-01-09 05:53:14 +00:00
|
|
|
|
fn new(sp: DelimSpan, delim: DelimToken, tts: &TokenStream) -> Self {
|
2017-02-20 05:44:06 +00:00
|
|
|
|
TokenCursorFrame {
|
2018-11-29 23:02:04 +00:00
|
|
|
|
delim: delim,
|
2017-02-20 05:44:06 +00:00
|
|
|
|
span: sp,
|
2018-11-29 23:02:04 +00:00
|
|
|
|
open_delim: delim == token::NoDelim,
|
2019-01-09 05:53:14 +00:00
|
|
|
|
tree_cursor: tts.clone().into_trees(),
|
2018-11-29 23:02:04 +00:00
|
|
|
|
close_delim: delim == token::NoDelim,
|
2017-07-12 16:50:05 +00:00
|
|
|
|
last_token: LastToken::Was(None),
|
2017-02-20 05:44:06 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl TokenCursor {
|
|
|
|
|
fn next(&mut self) -> TokenAndSpan {
|
|
|
|
|
loop {
|
|
|
|
|
let tree = if !self.frame.open_delim {
|
|
|
|
|
self.frame.open_delim = true;
|
2018-11-29 23:02:04 +00:00
|
|
|
|
TokenTree::open_tt(self.frame.span.open, self.frame.delim)
|
2017-02-20 05:44:06 +00:00
|
|
|
|
} else if let Some(tree) = self.frame.tree_cursor.next() {
|
|
|
|
|
tree
|
|
|
|
|
} else if !self.frame.close_delim {
|
|
|
|
|
self.frame.close_delim = true;
|
2018-11-29 23:02:04 +00:00
|
|
|
|
TokenTree::close_tt(self.frame.span.close, self.frame.delim)
|
2017-02-20 05:44:06 +00:00
|
|
|
|
} else if let Some(frame) = self.stack.pop() {
|
|
|
|
|
self.frame = frame;
|
|
|
|
|
continue
|
|
|
|
|
} else {
|
2019-05-22 00:47:23 +00:00
|
|
|
|
return TokenAndSpan { tok: token::Eof, sp: DUMMY_SP }
|
2017-02-20 05:44:06 +00:00
|
|
|
|
};
|
|
|
|
|
|
2017-07-12 16:50:05 +00:00
|
|
|
|
match self.frame.last_token {
|
2018-07-22 15:48:29 +00:00
|
|
|
|
LastToken::Collecting(ref mut v) => v.push(tree.clone().into()),
|
|
|
|
|
LastToken::Was(ref mut t) => *t = Some(tree.clone().into()),
|
2017-07-12 16:50:05 +00:00
|
|
|
|
}
|
|
|
|
|
|
2017-02-20 05:44:06 +00:00
|
|
|
|
match tree {
|
|
|
|
|
TokenTree::Token(sp, tok) => return TokenAndSpan { tok: tok, sp: sp },
|
2018-11-29 23:02:04 +00:00
|
|
|
|
TokenTree::Delimited(sp, delim, tts) => {
|
|
|
|
|
let frame = TokenCursorFrame::new(sp, delim, &tts);
|
2017-02-20 05:44:06 +00:00
|
|
|
|
self.stack.push(mem::replace(&mut self.frame, frame));
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn next_desugared(&mut self) -> TokenAndSpan {
|
|
|
|
|
let (sp, name) = match self.next() {
|
|
|
|
|
TokenAndSpan { sp, tok: token::DocComment(name) } => (sp, name),
|
2017-05-12 18:05:39 +00:00
|
|
|
|
tok => return tok,
|
2017-02-20 05:44:06 +00:00
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
let stripped = strip_doc_comment_decoration(&name.as_str());
|
|
|
|
|
|
|
|
|
|
// Searches for the occurrences of `"#*` and returns the minimum number of `#`s
|
|
|
|
|
// required to wrap the text.
|
|
|
|
|
let mut num_of_hashes = 0;
|
|
|
|
|
let mut count = 0;
|
|
|
|
|
for ch in stripped.chars() {
|
|
|
|
|
count = match ch {
|
|
|
|
|
'"' => 1,
|
|
|
|
|
'#' if count > 0 => count + 1,
|
|
|
|
|
_ => 0,
|
|
|
|
|
};
|
|
|
|
|
num_of_hashes = cmp::max(num_of_hashes, count);
|
|
|
|
|
}
|
|
|
|
|
|
2018-09-09 01:07:02 +00:00
|
|
|
|
let delim_span = DelimSpan::from_single(sp);
|
2018-11-29 23:02:04 +00:00
|
|
|
|
let body = TokenTree::Delimited(
|
|
|
|
|
delim_span,
|
|
|
|
|
token::Bracket,
|
2019-05-18 22:04:26 +00:00
|
|
|
|
[
|
|
|
|
|
TokenTree::Token(sp, token::Ident(ast::Ident::with_empty_ctxt(sym::doc), false)),
|
|
|
|
|
TokenTree::Token(sp, token::Eq),
|
|
|
|
|
TokenTree::Token(sp, token::Token::lit(
|
|
|
|
|
token::StrRaw(num_of_hashes), Symbol::intern(&stripped), None
|
|
|
|
|
)),
|
2018-11-29 23:02:04 +00:00
|
|
|
|
]
|
|
|
|
|
.iter().cloned().collect::<TokenStream>().into(),
|
|
|
|
|
);
|
2017-02-20 05:44:06 +00:00
|
|
|
|
|
2018-11-29 23:02:04 +00:00
|
|
|
|
self.stack.push(mem::replace(&mut self.frame, TokenCursorFrame::new(
|
|
|
|
|
delim_span,
|
|
|
|
|
token::NoDelim,
|
|
|
|
|
&if doc_comment_style(&name.as_str()) == AttrStyle::Inner {
|
2017-02-20 05:44:06 +00:00
|
|
|
|
[TokenTree::Token(sp, token::Pound), TokenTree::Token(sp, token::Not), body]
|
2017-02-21 05:05:59 +00:00
|
|
|
|
.iter().cloned().collect::<TokenStream>().into()
|
2017-02-20 05:44:06 +00:00
|
|
|
|
} else {
|
2017-02-21 05:05:59 +00:00
|
|
|
|
[TokenTree::Token(sp, token::Pound), body]
|
|
|
|
|
.iter().cloned().collect::<TokenStream>().into()
|
2017-02-20 05:44:06 +00:00
|
|
|
|
},
|
2018-11-29 23:02:04 +00:00
|
|
|
|
)));
|
2017-02-20 05:44:06 +00:00
|
|
|
|
|
|
|
|
|
self.next()
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2018-03-20 22:58:25 +00:00
|
|
|
|
#[derive(Clone, PartialEq)]
|
2018-05-31 22:53:30 +00:00
|
|
|
|
crate enum TokenType {
|
Make the parser’s ‘expected <foo>, found <bar>’ errors more accurate
As an example of what this changes, the following code:
let x: [int ..4];
Currently spits out ‘expected `]`, found `..`’. However, a comma would also be
valid there, as would a number of other tokens. This change adjusts the parser
to produce more accurate errors, so that that example now produces ‘expected one
of `(`, `+`, `,`, `::`, or `]`, found `..`’.
2014-12-03 09:47:53 +00:00
|
|
|
|
Token(token::Token),
|
2019-05-11 14:41:37 +00:00
|
|
|
|
Keyword(Symbol),
|
Make the parser’s ‘expected <foo>, found <bar>’ errors more accurate
As an example of what this changes, the following code:
let x: [int ..4];
Currently spits out ‘expected `]`, found `..`’. However, a comma would also be
valid there, as would a number of other tokens. This change adjusts the parser
to produce more accurate errors, so that that example now produces ‘expected one
of `(`, `+`, `,`, `::`, or `]`, found `..`’.
2014-12-03 09:47:53 +00:00
|
|
|
|
Operator,
|
2017-01-18 16:01:04 +00:00
|
|
|
|
Lifetime,
|
|
|
|
|
Ident,
|
|
|
|
|
Path,
|
|
|
|
|
Type,
|
2019-02-05 15:49:38 +00:00
|
|
|
|
Const,
|
Make the parser’s ‘expected <foo>, found <bar>’ errors more accurate
As an example of what this changes, the following code:
let x: [int ..4];
Currently spits out ‘expected `]`, found `..`’. However, a comma would also be
valid there, as would a number of other tokens. This change adjusts the parser
to produce more accurate errors, so that that example now produces ‘expected one
of `(`, `+`, `,`, `::`, or `]`, found `..`’.
2014-12-03 09:47:53 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl TokenType {
|
2019-05-16 21:31:07 +00:00
|
|
|
|
crate fn to_string(&self) -> String {
|
Make the parser’s ‘expected <foo>, found <bar>’ errors more accurate
As an example of what this changes, the following code:
let x: [int ..4];
Currently spits out ‘expected `]`, found `..`’. However, a comma would also be
valid there, as would a number of other tokens. This change adjusts the parser
to produce more accurate errors, so that that example now produces ‘expected one
of `(`, `+`, `,`, `::`, or `]`, found `..`’.
2014-12-03 09:47:53 +00:00
|
|
|
|
match *self {
|
2018-05-31 22:53:30 +00:00
|
|
|
|
TokenType::Token(ref t) => format!("`{}`", pprust::token_to_string(t)),
|
2019-05-11 14:41:37 +00:00
|
|
|
|
TokenType::Keyword(kw) => format!("`{}`", kw),
|
2017-01-18 16:01:04 +00:00
|
|
|
|
TokenType::Operator => "an operator".to_string(),
|
|
|
|
|
TokenType::Lifetime => "lifetime".to_string(),
|
|
|
|
|
TokenType::Ident => "identifier".to_string(),
|
|
|
|
|
TokenType::Path => "path".to_string(),
|
|
|
|
|
TokenType::Type => "type".to_string(),
|
2019-02-05 15:49:38 +00:00
|
|
|
|
TokenType::Const => "const".to_string(),
|
Make the parser’s ‘expected <foo>, found <bar>’ errors more accurate
As an example of what this changes, the following code:
let x: [int ..4];
Currently spits out ‘expected `]`, found `..`’. However, a comma would also be
valid there, as would a number of other tokens. This change adjusts the parser
to produce more accurate errors, so that that example now produces ‘expected one
of `(`, `+`, `,`, `::`, or `]`, found `..`’.
2014-12-03 09:47:53 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2012-09-08 02:04:40 +00:00
|
|
|
|
}
|
2012-05-23 22:06:11 +00:00
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Returns `true` if `IDENT t` can start a type -- `IDENT::a::b`, `IDENT<u8, u8>`,
|
2018-02-23 20:39:51 +00:00
|
|
|
|
/// `IDENT<<u8 as Trait>::AssocTy>`.
|
|
|
|
|
///
|
|
|
|
|
/// Types can also be of the form `IDENT(u8, u8) -> u8`, however this assumes
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// that `IDENT` is not the ident of a fn trait.
|
2018-02-23 20:39:51 +00:00
|
|
|
|
fn can_continue_type_after_non_fn_ident(t: &token::Token) -> bool {
|
2017-10-10 14:33:19 +00:00
|
|
|
|
t == &token::ModSep || t == &token::Lt ||
|
2018-02-23 20:39:51 +00:00
|
|
|
|
t == &token::BinOp(token::Shl)
|
2017-10-10 14:33:19 +00:00
|
|
|
|
}
|
|
|
|
|
|
2015-07-03 00:50:18 +00:00
|
|
|
|
/// Information about the path to a module.
|
|
|
|
|
pub struct ModulePath {
|
2018-05-31 22:53:30 +00:00
|
|
|
|
name: String,
|
|
|
|
|
path_exists: bool,
|
2017-02-12 14:18:41 +00:00
|
|
|
|
pub result: Result<ModulePathSuccess, Error>,
|
2015-07-03 00:50:18 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
pub struct ModulePathSuccess {
|
2016-11-05 04:16:26 +00:00
|
|
|
|
pub path: PathBuf,
|
|
|
|
|
pub directory_ownership: DirectoryOwnership,
|
2016-11-14 09:31:03 +00:00
|
|
|
|
warn: bool,
|
2015-07-03 00:50:18 +00:00
|
|
|
|
}
|
|
|
|
|
|
2017-02-12 14:18:41 +00:00
|
|
|
|
pub enum Error {
|
2017-02-12 13:33:17 +00:00
|
|
|
|
FileNotFoundForModule {
|
|
|
|
|
mod_name: String,
|
|
|
|
|
default_path: String,
|
|
|
|
|
secondary_path: String,
|
|
|
|
|
dir_path: String,
|
|
|
|
|
},
|
|
|
|
|
DuplicatePaths {
|
|
|
|
|
mod_name: String,
|
|
|
|
|
default_path: String,
|
|
|
|
|
secondary_path: String,
|
|
|
|
|
},
|
|
|
|
|
UselessDocComment,
|
|
|
|
|
InclusiveRangeWithNoEnd,
|
|
|
|
|
}
|
|
|
|
|
|
2017-02-12 14:18:41 +00:00
|
|
|
|
impl Error {
|
2018-05-31 22:53:30 +00:00
|
|
|
|
fn span_err<S: Into<MultiSpan>>(self,
|
2018-01-16 05:38:12 +00:00
|
|
|
|
sp: S,
|
2019-02-06 17:33:01 +00:00
|
|
|
|
handler: &errors::Handler) -> DiagnosticBuilder<'_> {
|
2017-02-12 13:33:17 +00:00
|
|
|
|
match self {
|
2017-02-12 14:18:41 +00:00
|
|
|
|
Error::FileNotFoundForModule { ref mod_name,
|
|
|
|
|
ref default_path,
|
|
|
|
|
ref secondary_path,
|
|
|
|
|
ref dir_path } => {
|
2017-02-12 13:33:17 +00:00
|
|
|
|
let mut err = struct_span_err!(handler, sp, E0583,
|
|
|
|
|
"file not found for module `{}`", mod_name);
|
2018-03-29 11:03:24 +00:00
|
|
|
|
err.help(&format!("name the file either {} or {} inside the directory \"{}\"",
|
2017-02-12 13:33:17 +00:00
|
|
|
|
default_path,
|
|
|
|
|
secondary_path,
|
|
|
|
|
dir_path));
|
|
|
|
|
err
|
|
|
|
|
}
|
2017-02-12 14:18:41 +00:00
|
|
|
|
Error::DuplicatePaths { ref mod_name, ref default_path, ref secondary_path } => {
|
2017-02-12 13:33:17 +00:00
|
|
|
|
let mut err = struct_span_err!(handler, sp, E0584,
|
|
|
|
|
"file for module `{}` found at both {} and {}",
|
|
|
|
|
mod_name,
|
|
|
|
|
default_path,
|
|
|
|
|
secondary_path);
|
|
|
|
|
err.help("delete or rename one of them to remove the ambiguity");
|
|
|
|
|
err
|
|
|
|
|
}
|
2017-02-12 14:18:41 +00:00
|
|
|
|
Error::UselessDocComment => {
|
2017-02-12 13:33:17 +00:00
|
|
|
|
let mut err = struct_span_err!(handler, sp, E0585,
|
|
|
|
|
"found a documentation comment that doesn't document anything");
|
|
|
|
|
err.help("doc comments must come before what they document, maybe a comment was \
|
|
|
|
|
intended with `//`?");
|
|
|
|
|
err
|
|
|
|
|
}
|
2017-02-12 14:18:41 +00:00
|
|
|
|
Error::InclusiveRangeWithNoEnd => {
|
2017-02-12 13:33:17 +00:00
|
|
|
|
let mut err = struct_span_err!(handler, sp, E0586,
|
|
|
|
|
"inclusive range with no end");
|
2017-09-19 05:40:04 +00:00
|
|
|
|
err.help("inclusive ranges must be bounded at the end (`..=b` or `a..=b`)");
|
2017-02-12 13:33:17 +00:00
|
|
|
|
err
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2017-06-10 03:30:33 +00:00
|
|
|
|
#[derive(Debug)]
|
2018-05-31 22:53:30 +00:00
|
|
|
|
enum LhsExpr {
|
2015-11-03 16:39:51 +00:00
|
|
|
|
NotYetParsed,
|
2016-06-18 04:01:57 +00:00
|
|
|
|
AttributesParsed(ThinVec<Attribute>),
|
2015-11-03 16:39:51 +00:00
|
|
|
|
AlreadyParsed(P<Expr>),
|
|
|
|
|
}
|
|
|
|
|
|
2016-06-18 04:01:57 +00:00
|
|
|
|
impl From<Option<ThinVec<Attribute>>> for LhsExpr {
|
|
|
|
|
fn from(o: Option<ThinVec<Attribute>>) -> Self {
|
2015-11-03 16:39:51 +00:00
|
|
|
|
if let Some(attrs) = o {
|
|
|
|
|
LhsExpr::AttributesParsed(attrs)
|
|
|
|
|
} else {
|
|
|
|
|
LhsExpr::NotYetParsed
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl From<P<Expr>> for LhsExpr {
|
|
|
|
|
fn from(expr: P<Expr>) -> Self {
|
|
|
|
|
LhsExpr::AlreadyParsed(expr)
|
|
|
|
|
}
|
|
|
|
|
}
|
2015-07-03 00:50:18 +00:00
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Creates a placeholder argument.
|
2017-03-29 01:56:29 +00:00
|
|
|
|
fn dummy_arg(span: Span) -> Arg {
|
2019-05-11 14:41:37 +00:00
|
|
|
|
let ident = Ident::new(kw::Invalid, span);
|
2017-03-29 01:56:29 +00:00
|
|
|
|
let pat = P(Pat {
|
|
|
|
|
id: ast::DUMMY_NODE_ID,
|
2018-03-18 13:47:09 +00:00
|
|
|
|
node: PatKind::Ident(BindingMode::ByValue(Mutability::Immutable), ident, None),
|
2017-08-07 05:54:09 +00:00
|
|
|
|
span,
|
2017-03-29 01:56:29 +00:00
|
|
|
|
});
|
|
|
|
|
let ty = Ty {
|
|
|
|
|
node: TyKind::Err,
|
2017-08-07 05:54:09 +00:00
|
|
|
|
span,
|
2017-03-29 01:56:29 +00:00
|
|
|
|
id: ast::DUMMY_NODE_ID
|
|
|
|
|
};
|
2019-03-13 16:10:27 +00:00
|
|
|
|
Arg { ty: P(ty), pat: pat, id: ast::DUMMY_NODE_ID, source: ast::ArgSource::Normal }
|
2017-03-29 01:56:29 +00:00
|
|
|
|
}
|
|
|
|
|
|
2018-03-20 22:58:25 +00:00
|
|
|
|
#[derive(Copy, Clone, Debug)]
|
2017-09-07 06:07:49 +00:00
|
|
|
|
enum TokenExpectType {
|
|
|
|
|
Expect,
|
|
|
|
|
NoExpect,
|
|
|
|
|
}
|
|
|
|
|
|
2014-03-09 14:54:34 +00:00
|
|
|
|
impl<'a> Parser<'a> {
|
2019-05-22 00:47:23 +00:00
|
|
|
|
pub fn new(
|
|
|
|
|
sess: &'a ParseSess,
|
|
|
|
|
tokens: TokenStream,
|
|
|
|
|
directory: Option<Directory<'a>>,
|
|
|
|
|
recurse_into_file_modules: bool,
|
|
|
|
|
desugar_doc_comments: bool,
|
|
|
|
|
is_subparser: Option<&'static str>,
|
|
|
|
|
) -> Self {
|
2016-11-03 07:43:29 +00:00
|
|
|
|
let mut parser = Parser {
|
2017-08-07 05:54:09 +00:00
|
|
|
|
sess,
|
2018-03-08 11:27:23 +00:00
|
|
|
|
token: token::Whitespace,
|
2019-05-22 00:47:23 +00:00
|
|
|
|
span: DUMMY_SP,
|
|
|
|
|
prev_span: DUMMY_SP,
|
2017-03-29 07:17:18 +00:00
|
|
|
|
meta_var_span: None,
|
2016-09-21 02:16:28 +00:00
|
|
|
|
prev_token_kind: PrevTokenKind::Other,
|
2015-04-29 21:58:43 +00:00
|
|
|
|
restrictions: Restrictions::empty(),
|
2017-08-07 05:54:09 +00:00
|
|
|
|
recurse_into_file_modules,
|
2017-11-28 02:14:24 +00:00
|
|
|
|
directory: Directory {
|
2018-05-18 06:19:35 +00:00
|
|
|
|
path: Cow::from(PathBuf::new()),
|
2017-11-28 02:14:24 +00:00
|
|
|
|
ownership: DirectoryOwnership::Owned { relative: None }
|
|
|
|
|
},
|
2014-05-25 23:27:36 +00:00
|
|
|
|
root_module_name: None,
|
Make the parser’s ‘expected <foo>, found <bar>’ errors more accurate
As an example of what this changes, the following code:
let x: [int ..4];
Currently spits out ‘expected `]`, found `..`’. However, a comma would also be
valid there, as would a number of other tokens. This change adjusts the parser
to produce more accurate errors, so that that example now produces ‘expected one
of `(`, `+`, `,`, `::`, or `]`, found `..`’.
2014-12-03 09:47:53 +00:00
|
|
|
|
expected_tokens: Vec::new(),
|
2017-02-20 05:44:06 +00:00
|
|
|
|
token_cursor: TokenCursor {
|
2018-11-29 23:02:04 +00:00
|
|
|
|
frame: TokenCursorFrame::new(
|
|
|
|
|
DelimSpan::dummy(),
|
|
|
|
|
token::NoDelim,
|
|
|
|
|
&tokens.into(),
|
|
|
|
|
),
|
2017-02-20 05:44:06 +00:00
|
|
|
|
stack: Vec::new(),
|
|
|
|
|
},
|
2017-08-07 05:54:09 +00:00
|
|
|
|
desugar_doc_comments,
|
2017-01-18 00:13:36 +00:00
|
|
|
|
cfg_mods: true,
|
2019-01-23 01:35:13 +00:00
|
|
|
|
unmatched_angle_bracket_count: 0,
|
2019-01-28 05:04:50 +00:00
|
|
|
|
max_angle_bracket_count: 0,
|
|
|
|
|
unclosed_delims: Vec::new(),
|
2019-03-02 05:47:06 +00:00
|
|
|
|
last_unexpected_token_span: None,
|
2019-05-22 00:47:23 +00:00
|
|
|
|
is_subparser,
|
2016-11-03 07:43:29 +00:00
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
let tok = parser.next_tok();
|
|
|
|
|
parser.token = tok.tok;
|
|
|
|
|
parser.span = tok.sp;
|
2017-05-17 22:37:24 +00:00
|
|
|
|
|
2016-12-07 00:28:51 +00:00
|
|
|
|
if let Some(directory) = directory {
|
|
|
|
|
parser.directory = directory;
|
2018-06-24 22:00:21 +00:00
|
|
|
|
} else if !parser.span.is_dummy() {
|
2018-08-18 10:14:09 +00:00
|
|
|
|
if let FileName::Real(mut path) = sess.source_map().span_to_unmapped_path(parser.span) {
|
2018-05-18 06:19:35 +00:00
|
|
|
|
path.pop();
|
|
|
|
|
parser.directory.path = Cow::from(path);
|
2017-12-14 07:09:19 +00:00
|
|
|
|
}
|
2016-11-03 07:43:29 +00:00
|
|
|
|
}
|
2017-05-17 22:37:24 +00:00
|
|
|
|
|
2017-03-29 07:17:18 +00:00
|
|
|
|
parser.process_potential_macro_variable();
|
2016-11-03 07:43:29 +00:00
|
|
|
|
parser
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn next_tok(&mut self) -> TokenAndSpan {
|
2017-05-12 18:05:39 +00:00
|
|
|
|
let mut next = if self.desugar_doc_comments {
|
|
|
|
|
self.token_cursor.next_desugared()
|
|
|
|
|
} else {
|
|
|
|
|
self.token_cursor.next()
|
2017-02-21 12:04:45 +00:00
|
|
|
|
};
|
2018-06-24 22:00:21 +00:00
|
|
|
|
if next.sp.is_dummy() {
|
2018-04-22 01:10:15 +00:00
|
|
|
|
// Tweak the location for better diagnostics, but keep syntactic context intact.
|
|
|
|
|
next.sp = self.prev_span.with_ctxt(next.sp.ctxt());
|
2014-05-25 23:27:36 +00:00
|
|
|
|
}
|
2017-02-21 12:04:45 +00:00
|
|
|
|
next
|
2014-05-25 23:27:36 +00:00
|
|
|
|
}
|
2014-06-09 20:12:30 +00:00
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Converts the current token to a string using `self`'s reader.
|
2015-01-21 11:44:49 +00:00
|
|
|
|
pub fn this_token_to_string(&self) -> String {
|
2018-05-31 22:53:30 +00:00
|
|
|
|
pprust::token_to_string(&self.token)
|
2013-06-15 01:21:47 +00:00
|
|
|
|
}
|
|
|
|
|
|
2018-05-31 22:53:30 +00:00
|
|
|
|
fn token_descr(&self) -> Option<&'static str> {
|
2018-01-06 22:43:20 +00:00
|
|
|
|
Some(match &self.token {
|
|
|
|
|
t if t.is_special_ident() => "reserved identifier",
|
|
|
|
|
t if t.is_used_keyword() => "keyword",
|
|
|
|
|
t if t.is_unused_keyword() => "reserved keyword",
|
2018-10-28 23:05:07 +00:00
|
|
|
|
token::DocComment(..) => "doc comment",
|
2018-01-06 22:43:20 +00:00
|
|
|
|
_ => return None,
|
|
|
|
|
})
|
|
|
|
|
}
|
|
|
|
|
|
2019-05-10 23:31:34 +00:00
|
|
|
|
crate fn this_token_descr(&self) -> String {
|
2018-01-06 22:43:20 +00:00
|
|
|
|
if let Some(prefix) = self.token_descr() {
|
|
|
|
|
format!("{} `{}`", prefix, self.this_token_to_string())
|
|
|
|
|
} else {
|
|
|
|
|
format!("`{}`", self.this_token_to_string())
|
|
|
|
|
}
|
2016-04-20 23:03:29 +00:00
|
|
|
|
}
|
|
|
|
|
|
2018-05-31 22:53:30 +00:00
|
|
|
|
crate fn unexpected<T>(&mut self) -> PResult<'a, T> {
|
2015-03-28 21:58:51 +00:00
|
|
|
|
match self.expect_one_of(&[], &[]) {
|
2015-12-30 23:11:53 +00:00
|
|
|
|
Err(e) => Err(e),
|
|
|
|
|
Ok(_) => unreachable!(),
|
2015-03-28 21:58:51 +00:00
|
|
|
|
}
|
2013-06-15 01:21:47 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Expects and consumes the token `t`. Signals an error if the next token is not `t`.
|
2019-01-28 05:04:50 +00:00
|
|
|
|
pub fn expect(&mut self, t: &token::Token) -> PResult<'a, bool /* recovered */> {
|
Make the parser’s ‘expected <foo>, found <bar>’ errors more accurate
As an example of what this changes, the following code:
let x: [int ..4];
Currently spits out ‘expected `]`, found `..`’. However, a comma would also be
valid there, as would a number of other tokens. This change adjusts the parser
to produce more accurate errors, so that that example now produces ‘expected one
of `(`, `+`, `,`, `::`, or `]`, found `..`’.
2014-12-03 09:47:53 +00:00
|
|
|
|
if self.expected_tokens.is_empty() {
|
|
|
|
|
if self.token == *t {
|
2015-12-30 23:11:53 +00:00
|
|
|
|
self.bump();
|
2019-01-28 05:04:50 +00:00
|
|
|
|
Ok(false)
|
Make the parser’s ‘expected <foo>, found <bar>’ errors more accurate
As an example of what this changes, the following code:
let x: [int ..4];
Currently spits out ‘expected `]`, found `..`’. However, a comma would also be
valid there, as would a number of other tokens. This change adjusts the parser
to produce more accurate errors, so that that example now produces ‘expected one
of `(`, `+`, `,`, `::`, or `]`, found `..`’.
2014-12-03 09:47:53 +00:00
|
|
|
|
} else {
|
2018-05-31 22:53:30 +00:00
|
|
|
|
let token_str = pprust::token_to_string(t);
|
2018-10-28 23:05:07 +00:00
|
|
|
|
let this_token_str = self.this_token_descr();
|
2019-05-22 00:47:23 +00:00
|
|
|
|
let (prev_sp, sp) = match (&self.token, self.is_subparser) {
|
|
|
|
|
// Point at the end of the macro call when reaching end of macro arguments.
|
|
|
|
|
(token::Token::Eof, Some(_)) => {
|
|
|
|
|
let sp = self.sess.source_map().next_point(self.span);
|
|
|
|
|
(sp, sp)
|
|
|
|
|
}
|
|
|
|
|
// We don't want to point at the following span after DUMMY_SP.
|
|
|
|
|
// This happens when the parser finds an empty TokenStream.
|
|
|
|
|
_ if self.prev_span == DUMMY_SP => (self.span, self.span),
|
|
|
|
|
// EOF, don't want to point at the following char, but rather the last token.
|
|
|
|
|
(token::Token::Eof, None) => (self.prev_span, self.span),
|
|
|
|
|
_ => (self.sess.source_map().next_point(self.prev_span), self.span),
|
2018-06-10 01:39:14 +00:00
|
|
|
|
};
|
2019-05-22 00:47:23 +00:00
|
|
|
|
let msg = format!(
|
|
|
|
|
"expected `{}`, found {}",
|
|
|
|
|
token_str,
|
|
|
|
|
match (&self.token, self.is_subparser) {
|
|
|
|
|
(token::Token::Eof, Some(origin)) => format!("end of {}", origin),
|
|
|
|
|
_ => this_token_str,
|
|
|
|
|
},
|
|
|
|
|
);
|
|
|
|
|
let mut err = self.struct_span_err(sp, &msg);
|
2018-06-10 01:39:14 +00:00
|
|
|
|
let label_exp = format!("expected `{}`", token_str);
|
2019-01-28 05:04:50 +00:00
|
|
|
|
match self.recover_closing_delimiter(&[t.clone()], err) {
|
|
|
|
|
Err(e) => err = e,
|
|
|
|
|
Ok(recovered) => {
|
|
|
|
|
return Ok(recovered);
|
|
|
|
|
}
|
|
|
|
|
}
|
2018-08-18 10:14:09 +00:00
|
|
|
|
let cm = self.sess.source_map();
|
2019-05-22 00:47:23 +00:00
|
|
|
|
match (cm.lookup_line(prev_sp.lo()), cm.lookup_line(sp.lo())) {
|
2018-06-10 01:39:14 +00:00
|
|
|
|
(Ok(ref a), Ok(ref b)) if a.line == b.line => {
|
2018-06-10 02:00:32 +00:00
|
|
|
|
// When the spans are in the same line, it means that the only content
|
|
|
|
|
// between them is whitespace, point only at the found token.
|
2019-05-22 00:47:23 +00:00
|
|
|
|
err.span_label(sp, label_exp);
|
2018-06-10 01:39:14 +00:00
|
|
|
|
}
|
|
|
|
|
_ => {
|
2019-05-22 00:47:23 +00:00
|
|
|
|
err.span_label(prev_sp, label_exp);
|
|
|
|
|
err.span_label(sp, "unexpected token");
|
2018-06-10 01:39:14 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2018-02-19 00:59:33 +00:00
|
|
|
|
Err(err)
|
Make the parser’s ‘expected <foo>, found <bar>’ errors more accurate
As an example of what this changes, the following code:
let x: [int ..4];
Currently spits out ‘expected `]`, found `..`’. However, a comma would also be
valid there, as would a number of other tokens. This change adjusts the parser
to produce more accurate errors, so that that example now produces ‘expected one
of `(`, `+`, `,`, `::`, or `]`, found `..`’.
2014-12-03 09:47:53 +00:00
|
|
|
|
}
|
2013-06-15 01:21:47 +00:00
|
|
|
|
} else {
|
2018-06-01 14:05:46 +00:00
|
|
|
|
self.expect_one_of(slice::from_ref(t), &[])
|
2013-06-15 01:21:47 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2014-06-09 20:12:30 +00:00
|
|
|
|
/// Expect next token to be edible or inedible token. If edible,
|
|
|
|
|
/// then consume it; if inedible, then return without consuming
|
|
|
|
|
/// anything. Signal a fatal error if next token is unexpected.
|
2019-01-28 05:04:50 +00:00
|
|
|
|
pub fn expect_one_of(
|
|
|
|
|
&mut self,
|
|
|
|
|
edible: &[token::Token],
|
|
|
|
|
inedible: &[token::Token],
|
|
|
|
|
) -> PResult<'a, bool /* recovered */> {
|
Make the parser’s ‘expected <foo>, found <bar>’ errors more accurate
As an example of what this changes, the following code:
let x: [int ..4];
Currently spits out ‘expected `]`, found `..`’. However, a comma would also be
valid there, as would a number of other tokens. This change adjusts the parser
to produce more accurate errors, so that that example now produces ‘expected one
of `(`, `+`, `,`, `::`, or `]`, found `..`’.
2014-12-03 09:47:53 +00:00
|
|
|
|
fn tokens_to_string(tokens: &[TokenType]) -> String {
|
2013-08-05 20:18:29 +00:00
|
|
|
|
let mut i = tokens.iter();
|
|
|
|
|
// This might be a sign we need a connect method on Iterator.
|
2014-05-07 23:33:43 +00:00
|
|
|
|
let b = i.next()
|
2018-08-23 08:14:52 +00:00
|
|
|
|
.map_or(String::new(), |t| t.to_string());
|
2017-05-12 18:05:39 +00:00
|
|
|
|
i.enumerate().fold(b, |mut b, (i, a)| {
|
Make the parser’s ‘expected <foo>, found <bar>’ errors more accurate
As an example of what this changes, the following code:
let x: [int ..4];
Currently spits out ‘expected `]`, found `..`’. However, a comma would also be
valid there, as would a number of other tokens. This change adjusts the parser
to produce more accurate errors, so that that example now produces ‘expected one
of `(`, `+`, `,`, `::`, or `]`, found `..`’.
2014-12-03 09:47:53 +00:00
|
|
|
|
if tokens.len() > 2 && i == tokens.len() - 2 {
|
|
|
|
|
b.push_str(", or ");
|
|
|
|
|
} else if tokens.len() == 2 && i == tokens.len() - 2 {
|
|
|
|
|
b.push_str(" or ");
|
|
|
|
|
} else {
|
|
|
|
|
b.push_str(", ");
|
|
|
|
|
}
|
2016-02-08 22:55:55 +00:00
|
|
|
|
b.push_str(&a.to_string());
|
2014-05-07 23:33:43 +00:00
|
|
|
|
b
|
|
|
|
|
})
|
2013-08-05 20:18:29 +00:00
|
|
|
|
}
|
2013-12-30 23:09:41 +00:00
|
|
|
|
if edible.contains(&self.token) {
|
2015-12-30 23:11:53 +00:00
|
|
|
|
self.bump();
|
2019-01-28 05:04:50 +00:00
|
|
|
|
Ok(false)
|
2013-12-30 23:09:41 +00:00
|
|
|
|
} else if inedible.contains(&self.token) {
|
2013-08-05 20:18:29 +00:00
|
|
|
|
// leave it in the input
|
2019-01-28 05:04:50 +00:00
|
|
|
|
Ok(false)
|
2019-03-02 05:47:06 +00:00
|
|
|
|
} else if self.last_unexpected_token_span == Some(self.span) {
|
|
|
|
|
FatalError.raise();
|
2013-08-05 20:18:29 +00:00
|
|
|
|
} else {
|
2015-04-17 04:57:38 +00:00
|
|
|
|
let mut expected = edible.iter()
|
|
|
|
|
.map(|x| TokenType::Token(x.clone()))
|
|
|
|
|
.chain(inedible.iter().map(|x| TokenType::Token(x.clone())))
|
|
|
|
|
.chain(self.expected_tokens.iter().cloned())
|
|
|
|
|
.collect::<Vec<_>>();
|
2018-03-30 09:54:14 +00:00
|
|
|
|
expected.sort_by_cached_key(|x| x.to_string());
|
Make the parser’s ‘expected <foo>, found <bar>’ errors more accurate
As an example of what this changes, the following code:
let x: [int ..4];
Currently spits out ‘expected `]`, found `..`’. However, a comma would also be
valid there, as would a number of other tokens. This change adjusts the parser
to produce more accurate errors, so that that example now produces ‘expected one
of `(`, `+`, `,`, `::`, or `]`, found `..`’.
2014-12-03 09:47:53 +00:00
|
|
|
|
expected.dedup();
|
2015-02-18 19:48:57 +00:00
|
|
|
|
let expect = tokens_to_string(&expected[..]);
|
2014-06-21 10:39:03 +00:00
|
|
|
|
let actual = self.this_token_to_string();
|
2017-03-25 22:36:59 +00:00
|
|
|
|
let (msg_exp, (label_sp, label_exp)) = if expected.len() > 1 {
|
|
|
|
|
let short_expect = if expected.len() > 6 {
|
|
|
|
|
format!("{} possible tokens", expected.len())
|
2013-08-05 20:18:29 +00:00
|
|
|
|
} else {
|
2017-03-25 22:36:59 +00:00
|
|
|
|
expect.clone()
|
|
|
|
|
};
|
2017-03-25 02:14:58 +00:00
|
|
|
|
(format!("expected one of {}, found `{}`", expect, actual),
|
2018-08-18 10:14:09 +00:00
|
|
|
|
(self.sess.source_map().next_point(self.prev_span),
|
2018-01-14 17:29:07 +00:00
|
|
|
|
format!("expected one of {} here", short_expect)))
|
2017-03-25 02:14:58 +00:00
|
|
|
|
} else if expected.is_empty() {
|
|
|
|
|
(format!("unexpected token: `{}`", actual),
|
2017-03-25 22:36:59 +00:00
|
|
|
|
(self.prev_span, "unexpected token after this".to_string()))
|
2017-03-25 02:14:58 +00:00
|
|
|
|
} else {
|
|
|
|
|
(format!("expected {}, found `{}`", expect, actual),
|
2018-08-18 10:14:09 +00:00
|
|
|
|
(self.sess.source_map().next_point(self.prev_span),
|
2018-01-14 17:29:07 +00:00
|
|
|
|
format!("expected {} here", expect)))
|
2017-03-25 02:14:58 +00:00
|
|
|
|
};
|
2019-03-02 05:47:06 +00:00
|
|
|
|
self.last_unexpected_token_span = Some(self.span);
|
2017-03-25 02:14:58 +00:00
|
|
|
|
let mut err = self.fatal(&msg_exp);
|
2018-09-13 12:54:12 +00:00
|
|
|
|
if self.token.is_ident_named("and") {
|
2019-01-25 21:03:27 +00:00
|
|
|
|
err.span_suggestion_short(
|
2018-09-13 17:40:39 +00:00
|
|
|
|
self.span,
|
|
|
|
|
"use `&&` instead of `and` for the boolean operator",
|
|
|
|
|
"&&".to_string(),
|
|
|
|
|
Applicability::MaybeIncorrect,
|
|
|
|
|
);
|
2018-09-13 12:54:12 +00:00
|
|
|
|
}
|
|
|
|
|
if self.token.is_ident_named("or") {
|
2019-01-25 21:03:27 +00:00
|
|
|
|
err.span_suggestion_short(
|
2018-09-13 17:40:39 +00:00
|
|
|
|
self.span,
|
|
|
|
|
"use `||` instead of `or` for the boolean operator",
|
|
|
|
|
"||".to_string(),
|
|
|
|
|
Applicability::MaybeIncorrect,
|
|
|
|
|
);
|
2018-09-13 12:54:12 +00:00
|
|
|
|
}
|
2017-03-25 22:36:59 +00:00
|
|
|
|
let sp = if self.token == token::Token::Eof {
|
|
|
|
|
// This is EOF, don't want to point at the following char, but rather the last token
|
|
|
|
|
self.prev_span
|
|
|
|
|
} else {
|
|
|
|
|
label_sp
|
|
|
|
|
};
|
2019-01-28 05:04:50 +00:00
|
|
|
|
match self.recover_closing_delimiter(&expected.iter().filter_map(|tt| match tt {
|
|
|
|
|
TokenType::Token(t) => Some(t.clone()),
|
|
|
|
|
_ => None,
|
|
|
|
|
}).collect::<Vec<_>>(), err) {
|
|
|
|
|
Err(e) => err = e,
|
|
|
|
|
Ok(recovered) => {
|
|
|
|
|
return Ok(recovered);
|
|
|
|
|
}
|
|
|
|
|
}
|
2017-11-29 21:28:47 +00:00
|
|
|
|
|
2019-04-11 21:24:31 +00:00
|
|
|
|
let is_semi_suggestable = expected.iter().any(|t| match t {
|
|
|
|
|
TokenType::Token(token::Semi) => true, // we expect a `;` here
|
|
|
|
|
_ => false,
|
|
|
|
|
}) && ( // a `;` would be expected before the current keyword
|
2019-05-11 14:41:37 +00:00
|
|
|
|
self.token.is_keyword(kw::Break) ||
|
|
|
|
|
self.token.is_keyword(kw::Continue) ||
|
|
|
|
|
self.token.is_keyword(kw::For) ||
|
|
|
|
|
self.token.is_keyword(kw::If) ||
|
|
|
|
|
self.token.is_keyword(kw::Let) ||
|
|
|
|
|
self.token.is_keyword(kw::Loop) ||
|
|
|
|
|
self.token.is_keyword(kw::Match) ||
|
|
|
|
|
self.token.is_keyword(kw::Return) ||
|
|
|
|
|
self.token.is_keyword(kw::While)
|
2019-04-11 21:24:31 +00:00
|
|
|
|
);
|
2018-08-18 10:14:09 +00:00
|
|
|
|
let cm = self.sess.source_map();
|
2017-11-29 21:28:47 +00:00
|
|
|
|
match (cm.lookup_line(self.span.lo()), cm.lookup_line(sp.lo())) {
|
2019-04-11 01:07:52 +00:00
|
|
|
|
(Ok(ref a), Ok(ref b)) if a.line != b.line && is_semi_suggestable => {
|
|
|
|
|
// The spans are in different lines, expected `;` and found `let` or `return`.
|
|
|
|
|
// High likelihood that it is only a missing `;`.
|
|
|
|
|
err.span_suggestion_short(
|
|
|
|
|
label_sp,
|
2019-04-11 21:24:31 +00:00
|
|
|
|
"a semicolon may be missing here",
|
2019-04-11 01:07:52 +00:00
|
|
|
|
";".to_string(),
|
|
|
|
|
Applicability::MaybeIncorrect,
|
|
|
|
|
);
|
|
|
|
|
err.emit();
|
|
|
|
|
return Ok(true);
|
|
|
|
|
}
|
2017-11-29 21:28:47 +00:00
|
|
|
|
(Ok(ref a), Ok(ref b)) if a.line == b.line => {
|
|
|
|
|
// When the spans are in the same line, it means that the only content between
|
|
|
|
|
// them is whitespace, point at the found token in that case:
|
|
|
|
|
//
|
|
|
|
|
// X | () => { syntax error };
|
|
|
|
|
// | ^^^^^ expected one of 8 possible tokens here
|
|
|
|
|
//
|
|
|
|
|
// instead of having:
|
|
|
|
|
//
|
|
|
|
|
// X | () => { syntax error };
|
|
|
|
|
// | -^^^^^ unexpected token
|
|
|
|
|
// | |
|
|
|
|
|
// | expected one of 8 possible tokens here
|
|
|
|
|
err.span_label(self.span, label_exp);
|
|
|
|
|
}
|
2019-05-22 00:47:23 +00:00
|
|
|
|
_ if self.prev_span == DUMMY_SP => {
|
2018-10-10 13:34:06 +00:00
|
|
|
|
// Account for macro context where the previous span might not be
|
|
|
|
|
// available to avoid incorrect output (#54841).
|
|
|
|
|
err.span_label(self.span, "unexpected token");
|
|
|
|
|
}
|
2017-11-29 21:28:47 +00:00
|
|
|
|
_ => {
|
|
|
|
|
err.span_label(sp, label_exp);
|
|
|
|
|
err.span_label(self.span, "unexpected token");
|
|
|
|
|
}
|
2017-03-25 22:36:59 +00:00
|
|
|
|
}
|
2017-03-25 02:14:58 +00:00
|
|
|
|
Err(err)
|
2013-08-05 20:18:29 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Returns the span of expr, if it was not interpolated or the span of the interpolated token.
|
2016-01-27 10:26:38 +00:00
|
|
|
|
fn interpolated_or_expr_span(&self,
|
|
|
|
|
expr: PResult<'a, P<Expr>>)
|
|
|
|
|
-> PResult<'a, (Span, P<Expr>)> {
|
2016-01-27 09:47:33 +00:00
|
|
|
|
expr.map(|e| {
|
2016-09-21 02:16:28 +00:00
|
|
|
|
if self.prev_token_kind == PrevTokenKind::Interpolated {
|
2016-09-21 02:09:22 +00:00
|
|
|
|
(self.prev_span, e)
|
2016-01-27 09:47:33 +00:00
|
|
|
|
} else {
|
|
|
|
|
(e.span, e)
|
|
|
|
|
}
|
|
|
|
|
})
|
|
|
|
|
}
|
|
|
|
|
|
2018-01-23 03:03:51 +00:00
|
|
|
|
fn expected_ident_found(&self) -> DiagnosticBuilder<'a> {
|
|
|
|
|
let mut err = self.struct_span_err(self.span,
|
|
|
|
|
&format!("expected identifier, found {}",
|
|
|
|
|
self.this_token_descr()));
|
2018-12-30 19:52:15 +00:00
|
|
|
|
if let token::Ident(ident, false) = &self.token {
|
2019-02-27 19:06:26 +00:00
|
|
|
|
if ident.is_raw_guess() {
|
2019-01-25 21:03:27 +00:00
|
|
|
|
err.span_suggestion(
|
2018-12-30 19:52:15 +00:00
|
|
|
|
self.span,
|
|
|
|
|
"you can escape reserved keywords to use them as identifiers",
|
|
|
|
|
format!("r#{}", ident),
|
|
|
|
|
Applicability::MaybeIncorrect,
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
if let Some(token_descr) = self.token_descr() {
|
2018-01-23 03:03:51 +00:00
|
|
|
|
err.span_label(self.span, format!("expected identifier, found {}", token_descr));
|
|
|
|
|
} else {
|
|
|
|
|
err.span_label(self.span, "expected identifier");
|
2018-05-30 11:06:05 +00:00
|
|
|
|
if self.token == token::Comma && self.look_ahead(1, |t| t.is_ident()) {
|
2019-01-25 21:03:27 +00:00
|
|
|
|
err.span_suggestion(
|
2018-08-26 03:47:46 +00:00
|
|
|
|
self.span,
|
|
|
|
|
"remove this comma",
|
|
|
|
|
String::new(),
|
|
|
|
|
Applicability::MachineApplicable,
|
|
|
|
|
);
|
2018-05-29 11:19:58 +00:00
|
|
|
|
}
|
2018-01-23 03:03:51 +00:00
|
|
|
|
}
|
|
|
|
|
err
|
|
|
|
|
}
|
|
|
|
|
|
2018-06-11 14:04:11 +00:00
|
|
|
|
pub fn parse_ident(&mut self) -> PResult<'a, ast::Ident> {
|
2018-01-06 22:43:20 +00:00
|
|
|
|
self.parse_ident_common(true)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn parse_ident_common(&mut self, recover: bool) -> PResult<'a, ast::Ident> {
|
2013-12-30 23:09:41 +00:00
|
|
|
|
match self.token {
|
2018-03-18 13:47:09 +00:00
|
|
|
|
token::Ident(ident, _) => {
|
2017-06-29 10:16:35 +00:00
|
|
|
|
if self.token.is_reserved_ident() {
|
2018-01-23 03:03:51 +00:00
|
|
|
|
let mut err = self.expected_ident_found();
|
2018-01-06 22:43:20 +00:00
|
|
|
|
if recover {
|
|
|
|
|
err.emit();
|
|
|
|
|
} else {
|
|
|
|
|
return Err(err);
|
|
|
|
|
}
|
2017-06-29 10:16:35 +00:00
|
|
|
|
}
|
2018-03-18 13:47:09 +00:00
|
|
|
|
let span = self.span;
|
2015-12-30 23:11:53 +00:00
|
|
|
|
self.bump();
|
2018-03-18 13:47:09 +00:00
|
|
|
|
Ok(Ident::new(ident.name, span))
|
2013-06-15 01:21:47 +00:00
|
|
|
|
}
|
|
|
|
|
_ => {
|
2016-09-21 02:16:28 +00:00
|
|
|
|
Err(if self.prev_token_kind == PrevTokenKind::DocComment {
|
2017-02-12 14:18:41 +00:00
|
|
|
|
self.span_fatal_err(self.prev_span, Error::UselessDocComment)
|
2016-09-16 05:46:40 +00:00
|
|
|
|
} else {
|
2018-03-08 11:27:23 +00:00
|
|
|
|
self.expected_ident_found()
|
2016-09-16 05:46:40 +00:00
|
|
|
|
})
|
2013-06-15 01:21:47 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Checks if the next token is `tok`, and returns `true` if so.
|
Make the parser’s ‘expected <foo>, found <bar>’ errors more accurate
As an example of what this changes, the following code:
let x: [int ..4];
Currently spits out ‘expected `]`, found `..`’. However, a comma would also be
valid there, as would a number of other tokens. This change adjusts the parser
to produce more accurate errors, so that that example now produces ‘expected one
of `(`, `+`, `,`, `::`, or `]`, found `..`’.
2014-12-03 09:47:53 +00:00
|
|
|
|
///
|
2016-01-31 19:39:50 +00:00
|
|
|
|
/// This method will automatically add `tok` to `expected_tokens` if `tok` is not
|
Make the parser’s ‘expected <foo>, found <bar>’ errors more accurate
As an example of what this changes, the following code:
let x: [int ..4];
Currently spits out ‘expected `]`, found `..`’. However, a comma would also be
valid there, as would a number of other tokens. This change adjusts the parser
to produce more accurate errors, so that that example now produces ‘expected one
of `(`, `+`, `,`, `::`, or `]`, found `..`’.
2014-12-03 09:47:53 +00:00
|
|
|
|
/// encountered.
|
2018-08-12 17:15:59 +00:00
|
|
|
|
crate fn check(&mut self, tok: &token::Token) -> bool {
|
Make the parser’s ‘expected <foo>, found <bar>’ errors more accurate
As an example of what this changes, the following code:
let x: [int ..4];
Currently spits out ‘expected `]`, found `..`’. However, a comma would also be
valid there, as would a number of other tokens. This change adjusts the parser
to produce more accurate errors, so that that example now produces ‘expected one
of `(`, `+`, `,`, `::`, or `]`, found `..`’.
2014-12-03 09:47:53 +00:00
|
|
|
|
let is_present = self.token == *tok;
|
|
|
|
|
if !is_present { self.expected_tokens.push(TokenType::Token(tok.clone())); }
|
|
|
|
|
is_present
|
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Consumes a token 'tok' if it exists. Returns whether the given token was present.
|
2015-12-30 23:11:53 +00:00
|
|
|
|
pub fn eat(&mut self, tok: &token::Token) -> bool {
|
Make the parser’s ‘expected <foo>, found <bar>’ errors more accurate
As an example of what this changes, the following code:
let x: [int ..4];
Currently spits out ‘expected `]`, found `..`’. However, a comma would also be
valid there, as would a number of other tokens. This change adjusts the parser
to produce more accurate errors, so that that example now produces ‘expected one
of `(`, `+`, `,`, `::`, or `]`, found `..`’.
2014-12-03 09:47:53 +00:00
|
|
|
|
let is_present = self.check(tok);
|
2015-12-30 23:11:53 +00:00
|
|
|
|
if is_present { self.bump() }
|
|
|
|
|
is_present
|
2013-06-15 01:21:47 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-05-11 14:41:37 +00:00
|
|
|
|
fn check_keyword(&mut self, kw: Symbol) -> bool {
|
2015-01-16 03:04:28 +00:00
|
|
|
|
self.expected_tokens.push(TokenType::Keyword(kw));
|
|
|
|
|
self.token.is_keyword(kw)
|
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// If the next token is the given keyword, eats it and returns
|
|
|
|
|
/// `true`. Otherwise, returns `false`.
|
2019-05-11 14:41:37 +00:00
|
|
|
|
pub fn eat_keyword(&mut self, kw: Symbol) -> bool {
|
2015-01-16 03:04:28 +00:00
|
|
|
|
if self.check_keyword(kw) {
|
2015-12-30 23:11:53 +00:00
|
|
|
|
self.bump();
|
|
|
|
|
true
|
2015-01-16 03:04:28 +00:00
|
|
|
|
} else {
|
2015-12-30 23:11:53 +00:00
|
|
|
|
false
|
2015-01-16 03:04:28 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-05-11 14:41:37 +00:00
|
|
|
|
fn eat_keyword_noexpect(&mut self, kw: Symbol) -> bool {
|
2014-10-27 12:33:30 +00:00
|
|
|
|
if self.token.is_keyword(kw) {
|
2015-12-30 23:11:53 +00:00
|
|
|
|
self.bump();
|
|
|
|
|
true
|
2014-08-28 04:34:03 +00:00
|
|
|
|
} else {
|
2015-12-30 23:11:53 +00:00
|
|
|
|
false
|
2014-08-28 04:34:03 +00:00
|
|
|
|
}
|
2013-06-15 01:21:47 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// If the given word is not a keyword, signals an error.
|
|
|
|
|
/// If the next token is not the given word, signals an error.
|
|
|
|
|
/// Otherwise, eats it.
|
2019-05-11 14:41:37 +00:00
|
|
|
|
fn expect_keyword(&mut self, kw: Symbol) -> PResult<'a, ()> {
|
2015-12-30 23:11:53 +00:00
|
|
|
|
if !self.eat_keyword(kw) {
|
|
|
|
|
self.unexpected()
|
2015-03-28 21:58:51 +00:00
|
|
|
|
} else {
|
|
|
|
|
Ok(())
|
2013-06-15 01:21:47 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2017-01-18 16:01:04 +00:00
|
|
|
|
fn check_ident(&mut self) -> bool {
|
|
|
|
|
if self.token.is_ident() {
|
|
|
|
|
true
|
|
|
|
|
} else {
|
|
|
|
|
self.expected_tokens.push(TokenType::Ident);
|
|
|
|
|
false
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn check_path(&mut self) -> bool {
|
|
|
|
|
if self.token.is_path_start() {
|
|
|
|
|
true
|
|
|
|
|
} else {
|
|
|
|
|
self.expected_tokens.push(TokenType::Path);
|
|
|
|
|
false
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn check_type(&mut self) -> bool {
|
|
|
|
|
if self.token.can_begin_type() {
|
|
|
|
|
true
|
|
|
|
|
} else {
|
|
|
|
|
self.expected_tokens.push(TokenType::Type);
|
|
|
|
|
false
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-02-05 15:49:38 +00:00
|
|
|
|
fn check_const_arg(&mut self) -> bool {
|
|
|
|
|
if self.token.can_begin_const_arg() {
|
|
|
|
|
true
|
|
|
|
|
} else {
|
|
|
|
|
self.expected_tokens.push(TokenType::Const);
|
|
|
|
|
false
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Expects and consumes a `+`. if `+=` is seen, replaces it with a `=`
|
|
|
|
|
/// and continues. If a `+` is not seen, returns `false`.
|
2018-05-25 20:40:16 +00:00
|
|
|
|
///
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// This is used when token-splitting `+=` into `+`.
|
|
|
|
|
/// See issue #47856 for an example of when this may occur.
|
2018-05-25 20:40:16 +00:00
|
|
|
|
fn eat_plus(&mut self) -> bool {
|
|
|
|
|
self.expected_tokens.push(TokenType::Token(token::BinOp(token::Plus)));
|
|
|
|
|
match self.token {
|
|
|
|
|
token::BinOp(token::Plus) => {
|
|
|
|
|
self.bump();
|
|
|
|
|
true
|
|
|
|
|
}
|
|
|
|
|
token::BinOpEq(token::Plus) => {
|
|
|
|
|
let span = self.span.with_lo(self.span.lo() + BytePos(1));
|
|
|
|
|
self.bump_with(token::Eq, span);
|
|
|
|
|
true
|
|
|
|
|
}
|
|
|
|
|
_ => false,
|
|
|
|
|
}
|
|
|
|
|
}
|
2018-05-25 21:36:23 +00:00
|
|
|
|
|
|
|
|
|
|
2018-05-25 21:09:32 +00:00
|
|
|
|
/// Checks to see if the next token is either `+` or `+=`.
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Otherwise returns `false`.
|
2018-05-25 21:09:32 +00:00
|
|
|
|
fn check_plus(&mut self) -> bool {
|
|
|
|
|
if self.token.is_like_plus() {
|
|
|
|
|
true
|
|
|
|
|
}
|
|
|
|
|
else {
|
2018-06-01 13:52:51 +00:00
|
|
|
|
self.expected_tokens.push(TokenType::Token(token::BinOp(token::Plus)));
|
|
|
|
|
false
|
2018-05-25 21:09:32 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2017-01-18 16:01:04 +00:00
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Expects and consumes an `&`. If `&&` is seen, replaces it with a single
|
|
|
|
|
/// `&` and continues. If an `&` is not seen, signals an error.
|
2015-12-20 21:00:43 +00:00
|
|
|
|
fn expect_and(&mut self) -> PResult<'a, ()> {
|
2015-01-16 03:04:28 +00:00
|
|
|
|
self.expected_tokens.push(TokenType::Token(token::BinOp(token::And)));
|
2014-04-17 08:35:31 +00:00
|
|
|
|
match self.token {
|
2015-12-30 23:11:53 +00:00
|
|
|
|
token::BinOp(token::And) => {
|
|
|
|
|
self.bump();
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
2014-10-27 08:22:52 +00:00
|
|
|
|
token::AndAnd => {
|
2017-07-31 20:04:34 +00:00
|
|
|
|
let span = self.span.with_lo(self.span.lo() + BytePos(1));
|
|
|
|
|
Ok(self.bump_with(token::BinOp(token::And), span))
|
2014-04-17 08:35:31 +00:00
|
|
|
|
}
|
2015-12-30 23:11:53 +00:00
|
|
|
|
_ => self.unexpected()
|
2014-04-17 08:35:31 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Expects and consumes an `|`. If `||` is seen, replaces it with a single
|
|
|
|
|
/// `|` and continues. If an `|` is not seen, signals an error.
|
2017-09-07 06:07:49 +00:00
|
|
|
|
fn expect_or(&mut self) -> PResult<'a, ()> {
|
|
|
|
|
self.expected_tokens.push(TokenType::Token(token::BinOp(token::Or)));
|
|
|
|
|
match self.token {
|
|
|
|
|
token::BinOp(token::Or) => {
|
|
|
|
|
self.bump();
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
token::OrOr => {
|
|
|
|
|
let span = self.span.with_lo(self.span.lo() + BytePos(1));
|
|
|
|
|
Ok(self.bump_with(token::BinOp(token::Or), span))
|
|
|
|
|
}
|
|
|
|
|
_ => self.unexpected()
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2018-05-31 22:53:30 +00:00
|
|
|
|
fn expect_no_suffix(&self, sp: Span, kind: &str, suffix: Option<ast::Name>) {
|
2019-05-18 14:36:30 +00:00
|
|
|
|
literal::expect_no_suffix(&self.sess.span_diagnostic, sp, kind, suffix)
|
2014-11-19 04:48:38 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Attempts to consume a `<`. If `<<` is seen, replaces it with a single
|
|
|
|
|
/// `<` and continue. If `<-` is seen, replaces it with a single `<`
|
|
|
|
|
/// and continue. If a `<` is not seen, returns false.
|
2014-06-09 20:12:30 +00:00
|
|
|
|
///
|
|
|
|
|
/// This is meant to be used when parsing generics on a path to get the
|
2014-12-23 00:13:49 +00:00
|
|
|
|
/// starting token.
|
2015-12-30 23:11:53 +00:00
|
|
|
|
fn eat_lt(&mut self) -> bool {
|
2015-01-16 03:04:28 +00:00
|
|
|
|
self.expected_tokens.push(TokenType::Token(token::Lt));
|
2019-01-23 01:35:13 +00:00
|
|
|
|
let ate = match self.token {
|
2015-12-30 23:11:53 +00:00
|
|
|
|
token::Lt => {
|
|
|
|
|
self.bump();
|
|
|
|
|
true
|
|
|
|
|
}
|
2014-10-27 08:22:52 +00:00
|
|
|
|
token::BinOp(token::Shl) => {
|
2017-07-31 20:04:34 +00:00
|
|
|
|
let span = self.span.with_lo(self.span.lo() + BytePos(1));
|
|
|
|
|
self.bump_with(token::Lt, span);
|
2015-12-30 23:11:53 +00:00
|
|
|
|
true
|
2014-05-11 04:27:44 +00:00
|
|
|
|
}
|
2019-02-07 09:10:11 +00:00
|
|
|
|
token::LArrow => {
|
|
|
|
|
let span = self.span.with_lo(self.span.lo() + BytePos(1));
|
|
|
|
|
self.bump_with(token::BinOp(token::Minus), span);
|
|
|
|
|
true
|
|
|
|
|
}
|
2015-12-30 23:11:53 +00:00
|
|
|
|
_ => false,
|
2019-01-23 01:35:13 +00:00
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
if ate {
|
|
|
|
|
// See doc comment for `unmatched_angle_bracket_count`.
|
|
|
|
|
self.unmatched_angle_bracket_count += 1;
|
2019-01-28 05:04:50 +00:00
|
|
|
|
self.max_angle_bracket_count += 1;
|
2019-01-23 01:35:13 +00:00
|
|
|
|
debug!("eat_lt: (increment) count={:?}", self.unmatched_angle_bracket_count);
|
2014-05-11 04:27:44 +00:00
|
|
|
|
}
|
2019-01-23 01:35:13 +00:00
|
|
|
|
|
|
|
|
|
ate
|
2014-05-11 04:27:44 +00:00
|
|
|
|
}
|
|
|
|
|
|
2015-12-20 21:00:43 +00:00
|
|
|
|
fn expect_lt(&mut self) -> PResult<'a, ()> {
|
2015-12-30 23:11:53 +00:00
|
|
|
|
if !self.eat_lt() {
|
|
|
|
|
self.unexpected()
|
2015-03-28 21:58:51 +00:00
|
|
|
|
} else {
|
|
|
|
|
Ok(())
|
2014-05-11 04:27:44 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Expects and consumes a single `>` token. if a `>>` is seen, replaces it
|
|
|
|
|
/// with a single `>` and continues. If a `>` is not seen, signals an error.
|
2018-05-31 22:53:30 +00:00
|
|
|
|
fn expect_gt(&mut self) -> PResult<'a, ()> {
|
2015-01-16 03:04:28 +00:00
|
|
|
|
self.expected_tokens.push(TokenType::Token(token::Gt));
|
2019-01-23 01:35:13 +00:00
|
|
|
|
let ate = match self.token {
|
2015-12-30 23:11:53 +00:00
|
|
|
|
token::Gt => {
|
|
|
|
|
self.bump();
|
2019-01-23 01:35:13 +00:00
|
|
|
|
Some(())
|
2015-12-30 23:11:53 +00:00
|
|
|
|
}
|
2014-10-27 08:22:52 +00:00
|
|
|
|
token::BinOp(token::Shr) => {
|
2017-07-31 20:04:34 +00:00
|
|
|
|
let span = self.span.with_lo(self.span.lo() + BytePos(1));
|
2019-01-23 01:35:13 +00:00
|
|
|
|
Some(self.bump_with(token::Gt, span))
|
2013-12-30 23:17:53 +00:00
|
|
|
|
}
|
2014-10-27 08:22:52 +00:00
|
|
|
|
token::BinOpEq(token::Shr) => {
|
2017-07-31 20:04:34 +00:00
|
|
|
|
let span = self.span.with_lo(self.span.lo() + BytePos(1));
|
2019-01-23 01:35:13 +00:00
|
|
|
|
Some(self.bump_with(token::Ge, span))
|
2014-06-20 16:53:12 +00:00
|
|
|
|
}
|
2014-10-27 08:22:52 +00:00
|
|
|
|
token::Ge => {
|
2017-07-31 20:04:34 +00:00
|
|
|
|
let span = self.span.with_lo(self.span.lo() + BytePos(1));
|
2019-01-23 01:35:13 +00:00
|
|
|
|
Some(self.bump_with(token::Eq, span))
|
2014-06-20 16:53:12 +00:00
|
|
|
|
}
|
2019-01-23 01:35:13 +00:00
|
|
|
|
_ => None,
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
match ate {
|
2019-01-28 05:04:50 +00:00
|
|
|
|
Some(_) => {
|
2019-01-23 01:35:13 +00:00
|
|
|
|
// See doc comment for `unmatched_angle_bracket_count`.
|
2019-02-22 19:17:30 +00:00
|
|
|
|
if self.unmatched_angle_bracket_count > 0 {
|
|
|
|
|
self.unmatched_angle_bracket_count -= 1;
|
|
|
|
|
debug!("expect_gt: (decrement) count={:?}", self.unmatched_angle_bracket_count);
|
|
|
|
|
}
|
2019-01-23 01:35:13 +00:00
|
|
|
|
|
2019-01-28 05:04:50 +00:00
|
|
|
|
Ok(())
|
2019-01-23 01:35:13 +00:00
|
|
|
|
},
|
|
|
|
|
None => self.unexpected(),
|
2013-06-15 01:21:47 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Eats and discards tokens until one of `kets` is encountered. Respects token trees,
|
2016-01-31 19:39:50 +00:00
|
|
|
|
/// passes through any errors encountered. Used for error recovery.
|
2018-05-31 22:53:30 +00:00
|
|
|
|
fn eat_to_tokens(&mut self, kets: &[&token::Token]) {
|
2016-09-15 19:34:21 +00:00
|
|
|
|
let handler = self.diagnostic();
|
|
|
|
|
|
2017-10-22 16:19:30 +00:00
|
|
|
|
if let Err(ref mut err) = self.parse_seq_to_before_tokens(kets,
|
|
|
|
|
SeqSep::none(),
|
|
|
|
|
TokenExpectType::Expect,
|
|
|
|
|
|p| Ok(p.parse_token_tree())) {
|
|
|
|
|
handler.cancel(err);
|
|
|
|
|
}
|
2016-01-31 19:39:50 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses a sequence, including the closing delimiter. The function
|
|
|
|
|
/// `f` must consume tokens until reaching the next separator or
|
2014-06-09 20:12:30 +00:00
|
|
|
|
/// closing bracket.
|
2018-06-11 16:40:58 +00:00
|
|
|
|
pub fn parse_seq_to_end<T, F>(&mut self,
|
2014-12-08 18:28:32 +00:00
|
|
|
|
ket: &token::Token,
|
|
|
|
|
sep: SeqSep,
|
|
|
|
|
f: F)
|
2015-12-20 21:00:43 +00:00
|
|
|
|
-> PResult<'a, Vec<T>> where
|
|
|
|
|
F: FnMut(&mut Parser<'a>) -> PResult<'a, T>,
|
2014-12-08 18:28:32 +00:00
|
|
|
|
{
|
2019-01-28 05:04:50 +00:00
|
|
|
|
let (val, recovered) = self.parse_seq_to_before_end(ket, sep, f)?;
|
|
|
|
|
if !recovered {
|
|
|
|
|
self.bump();
|
|
|
|
|
}
|
2015-03-28 21:58:51 +00:00
|
|
|
|
Ok(val)
|
2013-06-15 01:21:47 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses a sequence, not including the closing delimiter. The function
|
|
|
|
|
/// `f` must consume tokens until reaching the next separator or
|
2014-06-09 20:12:30 +00:00
|
|
|
|
/// closing bracket.
|
2019-01-28 05:04:50 +00:00
|
|
|
|
pub fn parse_seq_to_before_end<T, F>(
|
|
|
|
|
&mut self,
|
|
|
|
|
ket: &token::Token,
|
|
|
|
|
sep: SeqSep,
|
|
|
|
|
f: F,
|
|
|
|
|
) -> PResult<'a, (Vec<T>, bool)>
|
2017-10-22 16:19:30 +00:00
|
|
|
|
where F: FnMut(&mut Parser<'a>) -> PResult<'a, T>
|
2016-01-31 19:39:50 +00:00
|
|
|
|
{
|
2017-10-22 16:19:30 +00:00
|
|
|
|
self.parse_seq_to_before_tokens(&[ket], sep, TokenExpectType::Expect, f)
|
2016-01-31 19:39:50 +00:00
|
|
|
|
}
|
|
|
|
|
|
2018-08-20 23:16:17 +00:00
|
|
|
|
fn parse_seq_to_before_tokens<T, F>(
|
|
|
|
|
&mut self,
|
|
|
|
|
kets: &[&token::Token],
|
|
|
|
|
sep: SeqSep,
|
|
|
|
|
expect: TokenExpectType,
|
|
|
|
|
mut f: F,
|
2019-01-28 05:04:50 +00:00
|
|
|
|
) -> PResult<'a, (Vec<T>, bool /* recovered */)>
|
2017-10-22 16:19:30 +00:00
|
|
|
|
where F: FnMut(&mut Parser<'a>) -> PResult<'a, T>
|
2014-12-08 18:28:32 +00:00
|
|
|
|
{
|
2019-01-28 05:04:50 +00:00
|
|
|
|
let mut first = true;
|
|
|
|
|
let mut recovered = false;
|
2016-10-29 21:54:04 +00:00
|
|
|
|
let mut v = vec![];
|
2018-06-01 14:05:46 +00:00
|
|
|
|
while !kets.iter().any(|k| {
|
|
|
|
|
match expect {
|
|
|
|
|
TokenExpectType::Expect => self.check(k),
|
|
|
|
|
TokenExpectType::NoExpect => self.token == **k,
|
|
|
|
|
}
|
|
|
|
|
}) {
|
2017-02-08 23:38:41 +00:00
|
|
|
|
match self.token {
|
|
|
|
|
token::CloseDelim(..) | token::Eof => break,
|
|
|
|
|
_ => {}
|
|
|
|
|
};
|
2017-05-12 18:05:39 +00:00
|
|
|
|
if let Some(ref t) = sep.sep {
|
|
|
|
|
if first {
|
|
|
|
|
first = false;
|
|
|
|
|
} else {
|
2019-01-28 05:04:50 +00:00
|
|
|
|
match self.expect(t) {
|
|
|
|
|
Ok(false) => {}
|
|
|
|
|
Ok(true) => {
|
|
|
|
|
recovered = true;
|
|
|
|
|
break;
|
2017-10-24 13:04:01 +00:00
|
|
|
|
}
|
2019-01-28 05:04:50 +00:00
|
|
|
|
Err(mut e) => {
|
|
|
|
|
// Attempt to keep parsing if it was a similar separator
|
|
|
|
|
if let Some(ref tokens) = t.similar_tokens() {
|
|
|
|
|
if tokens.contains(&self.token) {
|
|
|
|
|
self.bump();
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
e.emit();
|
|
|
|
|
// Attempt to keep parsing if it was an omitted separator
|
|
|
|
|
match f(self) {
|
|
|
|
|
Ok(t) => {
|
|
|
|
|
v.push(t);
|
|
|
|
|
continue;
|
|
|
|
|
},
|
|
|
|
|
Err(mut e) => {
|
|
|
|
|
e.cancel();
|
|
|
|
|
break;
|
|
|
|
|
}
|
2017-10-24 13:04:01 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2016-01-29 04:49:59 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2013-06-15 01:21:47 +00:00
|
|
|
|
}
|
2017-09-07 06:07:49 +00:00
|
|
|
|
if sep.trailing_sep_allowed && kets.iter().any(|k| {
|
|
|
|
|
match expect {
|
|
|
|
|
TokenExpectType::Expect => self.check(k),
|
|
|
|
|
TokenExpectType::NoExpect => self.token == **k,
|
|
|
|
|
}
|
|
|
|
|
}) {
|
2016-01-31 19:39:50 +00:00
|
|
|
|
break;
|
|
|
|
|
}
|
2016-01-29 04:49:59 +00:00
|
|
|
|
|
2017-10-22 16:19:30 +00:00
|
|
|
|
let t = f(self)?;
|
|
|
|
|
v.push(t);
|
2013-06-15 01:21:47 +00:00
|
|
|
|
}
|
2016-01-29 04:49:59 +00:00
|
|
|
|
|
2019-01-28 05:04:50 +00:00
|
|
|
|
Ok((v, recovered))
|
2013-06-15 01:21:47 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses a sequence, including the closing delimiter. The function
|
|
|
|
|
/// `f` must consume tokens until reaching the next separator or
|
2014-06-09 20:12:30 +00:00
|
|
|
|
/// closing bracket.
|
2019-01-28 05:04:50 +00:00
|
|
|
|
fn parse_unspanned_seq<T, F>(
|
|
|
|
|
&mut self,
|
|
|
|
|
bra: &token::Token,
|
|
|
|
|
ket: &token::Token,
|
|
|
|
|
sep: SeqSep,
|
|
|
|
|
f: F,
|
|
|
|
|
) -> PResult<'a, Vec<T>> where
|
2018-02-19 00:59:33 +00:00
|
|
|
|
F: FnMut(&mut Parser<'a>) -> PResult<'a, T>,
|
2014-12-08 18:28:32 +00:00
|
|
|
|
{
|
2016-03-23 03:01:37 +00:00
|
|
|
|
self.expect(bra)?;
|
2019-01-28 05:04:50 +00:00
|
|
|
|
let (result, recovered) = self.parse_seq_to_before_end(ket, sep, f)?;
|
|
|
|
|
if !recovered {
|
|
|
|
|
self.eat(ket);
|
|
|
|
|
}
|
2015-03-28 21:58:51 +00:00
|
|
|
|
Ok(result)
|
2013-06-15 01:21:47 +00:00
|
|
|
|
}
|
|
|
|
|
|
2014-06-09 20:12:30 +00:00
|
|
|
|
/// Advance the parser by one token
|
2015-12-30 23:11:53 +00:00
|
|
|
|
pub fn bump(&mut self) {
|
2016-09-21 02:16:28 +00:00
|
|
|
|
if self.prev_token_kind == PrevTokenKind::Eof {
|
2016-03-25 22:13:54 +00:00
|
|
|
|
// Bumping after EOF is a bad sign, usually an infinite loop.
|
|
|
|
|
self.bug("attempted to bump the parser past EOF (may be stuck in a loop)");
|
|
|
|
|
}
|
|
|
|
|
|
2017-03-29 07:17:18 +00:00
|
|
|
|
self.prev_span = self.meta_var_span.take().unwrap_or(self.span);
|
2016-09-16 05:46:40 +00:00
|
|
|
|
|
|
|
|
|
// Record last token kind for possible error recovery.
|
2016-09-21 02:16:28 +00:00
|
|
|
|
self.prev_token_kind = match self.token {
|
|
|
|
|
token::DocComment(..) => PrevTokenKind::DocComment,
|
|
|
|
|
token::Comma => PrevTokenKind::Comma,
|
2017-04-04 22:12:53 +00:00
|
|
|
|
token::BinOp(token::Plus) => PrevTokenKind::Plus,
|
2019-04-23 02:37:23 +00:00
|
|
|
|
token::BinOp(token::Or) => PrevTokenKind::BitOr,
|
2016-09-21 02:16:28 +00:00
|
|
|
|
token::Interpolated(..) => PrevTokenKind::Interpolated,
|
|
|
|
|
token::Eof => PrevTokenKind::Eof,
|
2017-07-04 14:04:34 +00:00
|
|
|
|
token::Ident(..) => PrevTokenKind::Ident,
|
2016-09-21 02:16:28 +00:00
|
|
|
|
_ => PrevTokenKind::Other,
|
2013-08-05 20:18:29 +00:00
|
|
|
|
};
|
2016-09-16 05:46:40 +00:00
|
|
|
|
|
2017-01-14 11:13:45 +00:00
|
|
|
|
let next = self.next_tok();
|
2013-12-30 23:17:53 +00:00
|
|
|
|
self.span = next.sp;
|
2013-12-30 23:09:41 +00:00
|
|
|
|
self.token = next.tok;
|
Make the parser’s ‘expected <foo>, found <bar>’ errors more accurate
As an example of what this changes, the following code:
let x: [int ..4];
Currently spits out ‘expected `]`, found `..`’. However, a comma would also be
valid there, as would a number of other tokens. This change adjusts the parser
to produce more accurate errors, so that that example now produces ‘expected one
of `(`, `+`, `,`, `::`, or `]`, found `..`’.
2014-12-03 09:47:53 +00:00
|
|
|
|
self.expected_tokens.clear();
|
2015-01-02 22:00:06 +00:00
|
|
|
|
// check after each token
|
2017-03-29 07:17:18 +00:00
|
|
|
|
self.process_potential_macro_variable();
|
2012-01-13 08:56:53 +00:00
|
|
|
|
}
|
2013-07-02 19:47:32 +00:00
|
|
|
|
|
2016-02-06 17:42:17 +00:00
|
|
|
|
/// Advance the parser using provided token as a next one. Use this when
|
|
|
|
|
/// consuming a part of a token. For example a single `<` from `<<`.
|
2018-05-31 22:53:30 +00:00
|
|
|
|
fn bump_with(&mut self, next: token::Token, span: Span) {
|
2017-07-31 20:04:34 +00:00
|
|
|
|
self.prev_span = self.span.with_hi(span.lo());
|
2016-09-16 05:46:40 +00:00
|
|
|
|
// It would be incorrect to record the kind of the current token, but
|
|
|
|
|
// fortunately for tokens currently using `bump_with`, the
|
2016-09-21 02:16:28 +00:00
|
|
|
|
// prev_token_kind will be of no use anyway.
|
|
|
|
|
self.prev_token_kind = PrevTokenKind::Other;
|
2017-03-15 00:22:48 +00:00
|
|
|
|
self.span = span;
|
2016-02-06 17:42:17 +00:00
|
|
|
|
self.token = next;
|
|
|
|
|
self.expected_tokens.clear();
|
2012-01-13 08:56:53 +00:00
|
|
|
|
}
|
2016-02-06 17:42:17 +00:00
|
|
|
|
|
2017-03-05 05:15:58 +00:00
|
|
|
|
pub fn look_ahead<R, F>(&self, dist: usize, f: F) -> R where
|
2014-12-08 18:28:32 +00:00
|
|
|
|
F: FnOnce(&token::Token) -> R,
|
|
|
|
|
{
|
2016-10-19 20:33:41 +00:00
|
|
|
|
if dist == 0 {
|
2017-02-20 05:44:06 +00:00
|
|
|
|
return f(&self.token)
|
2012-01-13 08:56:53 +00:00
|
|
|
|
}
|
2017-02-20 05:44:06 +00:00
|
|
|
|
|
|
|
|
|
f(&match self.token_cursor.frame.tree_cursor.look_ahead(dist - 1) {
|
|
|
|
|
Some(tree) => match tree {
|
|
|
|
|
TokenTree::Token(_, tok) => tok,
|
2018-11-29 23:02:04 +00:00
|
|
|
|
TokenTree::Delimited(_, delim, _) => token::OpenDelim(delim),
|
2017-02-20 05:44:06 +00:00
|
|
|
|
},
|
|
|
|
|
None => token::CloseDelim(self.token_cursor.frame.delim),
|
|
|
|
|
})
|
2012-01-13 08:56:53 +00:00
|
|
|
|
}
|
2017-11-21 14:49:15 +00:00
|
|
|
|
|
2019-05-10 23:31:34 +00:00
|
|
|
|
crate fn look_ahead_span(&self, dist: usize) -> Span {
|
2017-07-03 23:17:01 +00:00
|
|
|
|
if dist == 0 {
|
|
|
|
|
return self.span
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
match self.token_cursor.frame.tree_cursor.look_ahead(dist - 1) {
|
2018-09-09 01:07:02 +00:00
|
|
|
|
Some(TokenTree::Token(span, _)) => span,
|
2018-11-29 23:02:04 +00:00
|
|
|
|
Some(TokenTree::Delimited(span, ..)) => span.entire(),
|
2017-07-03 23:17:01 +00:00
|
|
|
|
None => self.look_ahead_span(dist - 1),
|
|
|
|
|
}
|
|
|
|
|
}
|
2015-12-20 21:00:43 +00:00
|
|
|
|
pub fn fatal(&self, m: &str) -> DiagnosticBuilder<'a> {
|
|
|
|
|
self.sess.span_diagnostic.struct_span_fatal(self.span, m)
|
2012-01-13 08:56:53 +00:00
|
|
|
|
}
|
2018-06-11 19:19:12 +00:00
|
|
|
|
pub fn span_fatal<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> DiagnosticBuilder<'a> {
|
2015-12-20 21:00:43 +00:00
|
|
|
|
self.sess.span_diagnostic.struct_span_fatal(sp, m)
|
2012-01-13 08:56:53 +00:00
|
|
|
|
}
|
2018-05-31 22:53:30 +00:00
|
|
|
|
fn span_fatal_err<S: Into<MultiSpan>>(&self, sp: S, err: Error) -> DiagnosticBuilder<'a> {
|
2017-02-12 13:33:17 +00:00
|
|
|
|
err.span_err(sp, self.diagnostic())
|
|
|
|
|
}
|
2018-05-31 22:53:30 +00:00
|
|
|
|
fn bug(&self, m: &str) -> ! {
|
2013-12-30 23:17:53 +00:00
|
|
|
|
self.sess.span_diagnostic.span_bug(self.span, m)
|
2012-04-19 23:44:24 +00:00
|
|
|
|
}
|
2018-05-31 22:53:30 +00:00
|
|
|
|
fn span_err<S: Into<MultiSpan>>(&self, sp: S, m: &str) {
|
2012-09-08 22:50:29 +00:00
|
|
|
|
self.sess.span_diagnostic.span_err(sp, m)
|
|
|
|
|
}
|
2019-04-28 05:28:07 +00:00
|
|
|
|
crate fn struct_span_err<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> DiagnosticBuilder<'a> {
|
2017-12-16 08:58:19 +00:00
|
|
|
|
self.sess.span_diagnostic.struct_span_err(sp, m)
|
|
|
|
|
}
|
2018-05-31 22:53:30 +00:00
|
|
|
|
crate fn span_bug<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> ! {
|
2014-11-19 04:48:38 +00:00
|
|
|
|
self.sess.span_diagnostic.span_bug(sp, m)
|
|
|
|
|
}
|
2011-03-18 00:39:47 +00:00
|
|
|
|
|
2019-02-06 17:33:01 +00:00
|
|
|
|
fn cancel(&self, err: &mut DiagnosticBuilder<'_>) {
|
2016-09-15 19:34:21 +00:00
|
|
|
|
self.sess.span_diagnostic.cancel(err)
|
|
|
|
|
}
|
|
|
|
|
|
2018-05-31 22:53:30 +00:00
|
|
|
|
crate fn diagnostic(&self) -> &'a errors::Handler {
|
2015-12-20 21:00:43 +00:00
|
|
|
|
&self.sess.span_diagnostic
|
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Is the current token one of the keywords that signals a bare function type?
|
2018-05-31 22:53:30 +00:00
|
|
|
|
fn token_is_bare_fn_keyword(&mut self) -> bool {
|
2019-05-11 14:41:37 +00:00
|
|
|
|
self.check_keyword(kw::Fn) ||
|
|
|
|
|
self.check_keyword(kw::Unsafe) ||
|
|
|
|
|
self.check_keyword(kw::Extern)
|
2013-10-29 22:06:13 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses a `TyKind::BareFn` type.
|
2018-05-27 19:07:09 +00:00
|
|
|
|
fn parse_ty_bare_fn(&mut self, generic_params: Vec<GenericParam>) -> PResult<'a, TyKind> {
|
2012-11-05 04:41:00 +00:00
|
|
|
|
/*
|
|
|
|
|
|
2016-04-04 14:24:57 +00:00
|
|
|
|
[unsafe] [extern "ABI"] fn (S) -> T
|
|
|
|
|
^~~~^ ^~~~^ ^~^ ^
|
|
|
|
|
| | | |
|
|
|
|
|
| | | Return type
|
|
|
|
|
| | Argument types
|
|
|
|
|
| |
|
2014-05-07 01:43:56 +00:00
|
|
|
|
| ABI
|
|
|
|
|
Function Style
|
2013-02-01 01:12:29 +00:00
|
|
|
|
*/
|
|
|
|
|
|
2017-12-02 19:15:03 +00:00
|
|
|
|
let unsafety = self.parse_unsafety();
|
2019-05-11 14:41:37 +00:00
|
|
|
|
let abi = if self.eat_keyword(kw::Extern) {
|
2016-03-23 03:01:37 +00:00
|
|
|
|
self.parse_opt_abi()?.unwrap_or(Abi::C)
|
2014-02-14 04:23:01 +00:00
|
|
|
|
} else {
|
2016-02-05 12:13:36 +00:00
|
|
|
|
Abi::Rust
|
2014-02-14 04:23:01 +00:00
|
|
|
|
};
|
2014-02-02 22:52:06 +00:00
|
|
|
|
|
2019-05-11 14:41:37 +00:00
|
|
|
|
self.expect_keyword(kw::Fn)?;
|
2019-02-08 17:30:42 +00:00
|
|
|
|
let (inputs, c_variadic) = self.parse_fn_args(false, true)?;
|
2018-01-18 17:59:28 +00:00
|
|
|
|
let ret_ty = self.parse_ret_ty(false)?;
|
2014-11-07 11:53:45 +00:00
|
|
|
|
let decl = P(FnDecl {
|
2017-08-07 05:54:09 +00:00
|
|
|
|
inputs,
|
2014-11-07 11:53:45 +00:00
|
|
|
|
output: ret_ty,
|
2019-02-08 17:30:42 +00:00
|
|
|
|
c_variadic,
|
2014-11-07 11:53:45 +00:00
|
|
|
|
});
|
2016-02-08 15:53:21 +00:00
|
|
|
|
Ok(TyKind::BareFn(P(BareFnTy {
|
2017-08-07 05:54:09 +00:00
|
|
|
|
abi,
|
|
|
|
|
unsafety,
|
2017-10-16 19:07:26 +00:00
|
|
|
|
generic_params,
|
2017-08-07 05:54:09 +00:00
|
|
|
|
decl,
|
2015-03-28 21:58:51 +00:00
|
|
|
|
})))
|
2013-02-01 01:12:29 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses asyncness: `async` or nothing.
|
2018-06-19 04:18:10 +00:00
|
|
|
|
fn parse_asyncness(&mut self) -> IsAsync {
|
2019-05-11 14:41:37 +00:00
|
|
|
|
if self.eat_keyword(kw::Async) {
|
2018-06-26 09:56:24 +00:00
|
|
|
|
IsAsync::Async {
|
|
|
|
|
closure_id: ast::DUMMY_NODE_ID,
|
|
|
|
|
return_impl_trait_id: ast::DUMMY_NODE_ID,
|
2019-03-12 16:00:20 +00:00
|
|
|
|
arguments: Vec::new(),
|
2018-06-26 09:56:24 +00:00
|
|
|
|
}
|
2018-06-19 04:18:10 +00:00
|
|
|
|
} else {
|
|
|
|
|
IsAsync::NotAsync
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses unsafety: `unsafe` or nothing.
|
2017-12-02 19:15:03 +00:00
|
|
|
|
fn parse_unsafety(&mut self) -> Unsafety {
|
2019-05-11 14:41:37 +00:00
|
|
|
|
if self.eat_keyword(kw::Unsafe) {
|
2017-12-02 19:15:03 +00:00
|
|
|
|
Unsafety::Unsafe
|
2013-02-01 01:12:29 +00:00
|
|
|
|
} else {
|
2017-12-02 19:15:03 +00:00
|
|
|
|
Unsafety::Normal
|
2013-02-01 01:12:29 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2012-11-05 04:41:00 +00:00
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses the items in a trait declaration.
|
2017-04-13 19:37:05 +00:00
|
|
|
|
pub fn parse_trait_item(&mut self, at_end: &mut bool) -> PResult<'a, TraitItem> {
|
2016-11-02 03:03:55 +00:00
|
|
|
|
maybe_whole!(self, NtTraitItem, |x| x);
|
2017-07-12 16:50:05 +00:00
|
|
|
|
let attrs = self.parse_outer_attributes()?;
|
2019-03-03 20:45:49 +00:00
|
|
|
|
let mut unclosed_delims = vec![];
|
2017-07-12 16:50:05 +00:00
|
|
|
|
let (mut item, tokens) = self.collect_tokens(|this| {
|
2019-03-03 20:45:49 +00:00
|
|
|
|
let item = this.parse_trait_item_(at_end, attrs);
|
|
|
|
|
unclosed_delims.append(&mut this.unclosed_delims);
|
|
|
|
|
item
|
2017-07-12 16:50:05 +00:00
|
|
|
|
})?;
|
2019-03-03 20:45:49 +00:00
|
|
|
|
self.unclosed_delims.append(&mut unclosed_delims);
|
2017-07-12 16:50:05 +00:00
|
|
|
|
// See `parse_item` for why this clause is here.
|
|
|
|
|
if !item.attrs.iter().any(|attr| attr.style == AttrStyle::Inner) {
|
|
|
|
|
item.tokens = Some(tokens);
|
|
|
|
|
}
|
|
|
|
|
Ok(item)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn parse_trait_item_(&mut self,
|
|
|
|
|
at_end: &mut bool,
|
|
|
|
|
mut attrs: Vec<Attribute>) -> PResult<'a, TraitItem> {
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let lo = self.span;
|
2019-03-08 23:12:51 +00:00
|
|
|
|
self.eat_bad_pub();
|
2019-05-11 14:41:37 +00:00
|
|
|
|
let (name, node, generics) = if self.eat_keyword(kw::Type) {
|
2018-05-26 18:16:21 +00:00
|
|
|
|
self.parse_trait_item_assoc_ty()?
|
2016-06-11 01:00:07 +00:00
|
|
|
|
} else if self.is_const_item() {
|
2019-05-11 14:41:37 +00:00
|
|
|
|
self.expect_keyword(kw::Const)?;
|
2016-06-11 01:00:07 +00:00
|
|
|
|
let ident = self.parse_ident()?;
|
|
|
|
|
self.expect(&token::Colon)?;
|
2017-01-16 23:13:41 +00:00
|
|
|
|
let ty = self.parse_ty()?;
|
2018-09-02 06:13:29 +00:00
|
|
|
|
let default = if self.eat(&token::Eq) {
|
2016-06-11 01:00:07 +00:00
|
|
|
|
let expr = self.parse_expr()?;
|
2016-07-01 23:40:45 +00:00
|
|
|
|
self.expect(&token::Semi)?;
|
2016-06-11 01:00:07 +00:00
|
|
|
|
Some(expr)
|
2014-05-29 05:26:56 +00:00
|
|
|
|
} else {
|
2016-06-11 01:00:07 +00:00
|
|
|
|
self.expect(&token::Semi)?;
|
|
|
|
|
None
|
|
|
|
|
};
|
2017-09-22 02:18:47 +00:00
|
|
|
|
(ident, TraitItemKind::Const(ty, default), ast::Generics::default())
|
2018-03-11 02:16:26 +00:00
|
|
|
|
} else if let Some(mac) = self.parse_assoc_macro_invoc("trait", None, &mut false)? {
|
2016-09-22 22:44:59 +00:00
|
|
|
|
// trait item macro.
|
2019-05-11 16:08:09 +00:00
|
|
|
|
(Ident::invalid(), ast::TraitItemKind::Macro(mac), ast::Generics::default())
|
2016-09-22 22:44:59 +00:00
|
|
|
|
} else {
|
2019-03-12 16:00:20 +00:00
|
|
|
|
let (constness, unsafety, mut asyncness, abi) = self.parse_fn_front_matter()?;
|
2013-03-13 02:32:14 +00:00
|
|
|
|
|
2016-09-22 22:44:59 +00:00
|
|
|
|
let ident = self.parse_ident()?;
|
|
|
|
|
let mut generics = self.parse_generics()?;
|
2014-08-11 16:32:26 +00:00
|
|
|
|
|
2019-05-05 03:03:32 +00:00
|
|
|
|
let mut decl = self.parse_fn_decl_with_self(|p: &mut Parser<'a>| {
|
2016-09-22 22:44:59 +00:00
|
|
|
|
// This is somewhat dubious; We don't want to allow
|
|
|
|
|
// argument names to be left off if there is a
|
|
|
|
|
// definition...
|
2018-08-22 18:43:04 +00:00
|
|
|
|
|
|
|
|
|
// We don't allow argument names to be left off in edition 2018.
|
2018-11-30 15:53:44 +00:00
|
|
|
|
p.parse_arg_general(p.span.rust_2018(), true, false)
|
2016-09-22 22:44:59 +00:00
|
|
|
|
})?;
|
|
|
|
|
generics.where_clause = self.parse_where_clause()?;
|
2019-05-05 03:03:32 +00:00
|
|
|
|
self.construct_async_arguments(&mut asyncness, &mut decl);
|
2017-09-22 02:18:47 +00:00
|
|
|
|
|
2016-09-22 22:44:59 +00:00
|
|
|
|
let sig = ast::MethodSig {
|
2018-05-17 05:55:18 +00:00
|
|
|
|
header: FnHeader {
|
|
|
|
|
unsafety,
|
|
|
|
|
constness,
|
|
|
|
|
abi,
|
2018-06-19 04:18:10 +00:00
|
|
|
|
asyncness,
|
2018-05-17 05:55:18 +00:00
|
|
|
|
},
|
2019-05-05 03:03:32 +00:00
|
|
|
|
decl,
|
2016-09-22 22:44:59 +00:00
|
|
|
|
};
|
2012-07-10 20:44:20 +00:00
|
|
|
|
|
2016-09-22 22:44:59 +00:00
|
|
|
|
let body = match self.token {
|
|
|
|
|
token::Semi => {
|
|
|
|
|
self.bump();
|
2017-04-13 19:37:05 +00:00
|
|
|
|
*at_end = true;
|
2016-09-22 22:44:59 +00:00
|
|
|
|
debug!("parse_trait_methods(): parsing required method");
|
|
|
|
|
None
|
|
|
|
|
}
|
|
|
|
|
token::OpenDelim(token::Brace) => {
|
|
|
|
|
debug!("parse_trait_methods(): parsing provided method");
|
2017-04-13 19:37:05 +00:00
|
|
|
|
*at_end = true;
|
2016-09-22 22:44:59 +00:00
|
|
|
|
let (inner_attrs, body) = self.parse_inner_attrs_and_block()?;
|
|
|
|
|
attrs.extend(inner_attrs.iter().cloned());
|
|
|
|
|
Some(body)
|
|
|
|
|
}
|
2018-10-05 15:32:57 +00:00
|
|
|
|
token::Interpolated(ref nt) => {
|
2019-02-14 22:10:02 +00:00
|
|
|
|
match **nt {
|
2018-10-05 15:32:57 +00:00
|
|
|
|
token::NtBlock(..) => {
|
|
|
|
|
*at_end = true;
|
|
|
|
|
let (inner_attrs, body) = self.parse_inner_attrs_and_block()?;
|
|
|
|
|
attrs.extend(inner_attrs.iter().cloned());
|
|
|
|
|
Some(body)
|
|
|
|
|
}
|
|
|
|
|
_ => {
|
2018-10-28 23:05:07 +00:00
|
|
|
|
let token_str = self.this_token_descr();
|
|
|
|
|
let mut err = self.fatal(&format!("expected `;` or `{{`, found {}",
|
2018-10-05 15:32:57 +00:00
|
|
|
|
token_str));
|
|
|
|
|
err.span_label(self.span, "expected `;` or `{`");
|
|
|
|
|
return Err(err);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2016-09-22 22:44:59 +00:00
|
|
|
|
_ => {
|
2018-10-28 23:05:07 +00:00
|
|
|
|
let token_str = self.this_token_descr();
|
|
|
|
|
let mut err = self.fatal(&format!("expected `;` or `{{`, found {}",
|
2018-02-19 07:08:23 +00:00
|
|
|
|
token_str));
|
|
|
|
|
err.span_label(self.span, "expected `;` or `{`");
|
|
|
|
|
return Err(err);
|
2016-09-22 22:44:59 +00:00
|
|
|
|
}
|
2015-03-13 09:34:51 +00:00
|
|
|
|
};
|
2017-09-22 02:18:47 +00:00
|
|
|
|
(ident, ast::TraitItemKind::Method(sig, body), generics)
|
2016-09-22 22:44:59 +00:00
|
|
|
|
};
|
|
|
|
|
|
2016-06-11 01:00:07 +00:00
|
|
|
|
Ok(TraitItem {
|
|
|
|
|
id: ast::DUMMY_NODE_ID,
|
|
|
|
|
ident: name,
|
2017-08-07 05:54:09 +00:00
|
|
|
|
attrs,
|
2017-09-22 02:18:47 +00:00
|
|
|
|
generics,
|
2017-08-07 05:54:09 +00:00
|
|
|
|
node,
|
2017-03-15 00:22:48 +00:00
|
|
|
|
span: lo.to(self.prev_span),
|
2017-07-12 16:50:05 +00:00
|
|
|
|
tokens: None,
|
2016-06-11 01:00:07 +00:00
|
|
|
|
})
|
|
|
|
|
}
|
2015-03-10 10:28:44 +00:00
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses an optional return type `[ -> TY ]` in a function declaration.
|
2018-01-18 17:59:28 +00:00
|
|
|
|
fn parse_ret_ty(&mut self, allow_plus: bool) -> PResult<'a, FunctionRetTy> {
|
2015-12-30 23:11:53 +00:00
|
|
|
|
if self.eat(&token::RArrow) {
|
2018-11-30 15:53:44 +00:00
|
|
|
|
Ok(FunctionRetTy::Ty(self.parse_ty_common(allow_plus, true, false)?))
|
2012-03-12 23:26:31 +00:00
|
|
|
|
} else {
|
2018-03-10 14:45:47 +00:00
|
|
|
|
Ok(FunctionRetTy::Default(self.span.shrink_to_lo()))
|
2012-03-12 23:26:31 +00:00
|
|
|
|
}
|
2011-05-15 02:02:30 +00:00
|
|
|
|
}
|
2012-04-10 00:32:49 +00:00
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses a type.
|
2017-01-16 23:13:41 +00:00
|
|
|
|
pub fn parse_ty(&mut self) -> PResult<'a, P<Ty>> {
|
2018-11-30 15:53:44 +00:00
|
|
|
|
self.parse_ty_common(true, true, false)
|
2014-11-20 20:05:29 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses a type in restricted contexts where `+` is not permitted.
|
|
|
|
|
///
|
2017-01-16 23:13:41 +00:00
|
|
|
|
/// Example 1: `&'a TYPE`
|
|
|
|
|
/// `+` is prohibited to maintain operator priority (P(+) < P(&)).
|
|
|
|
|
/// Example 2: `value1 as TYPE + value2`
|
|
|
|
|
/// `+` is prohibited to avoid interactions with expression grammar.
|
2017-03-16 21:47:32 +00:00
|
|
|
|
fn parse_ty_no_plus(&mut self) -> PResult<'a, P<Ty>> {
|
2018-11-30 15:53:44 +00:00
|
|
|
|
self.parse_ty_common(false, true, false)
|
2017-03-16 21:47:32 +00:00
|
|
|
|
}
|
|
|
|
|
|
2018-11-30 15:53:44 +00:00
|
|
|
|
fn parse_ty_common(&mut self, allow_plus: bool, allow_qpath_recovery: bool,
|
2019-02-08 17:30:42 +00:00
|
|
|
|
allow_c_variadic: bool) -> PResult<'a, P<Ty>> {
|
2019-03-09 14:41:01 +00:00
|
|
|
|
maybe_recover_from_interpolated_ty_qpath!(self, allow_qpath_recovery);
|
2016-11-02 03:03:55 +00:00
|
|
|
|
maybe_whole!(self, NtTy, |x| x);
|
2012-08-01 21:34:35 +00:00
|
|
|
|
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let lo = self.span;
|
2018-01-27 19:37:17 +00:00
|
|
|
|
let mut impl_dyn_multi = false;
|
2017-03-16 21:47:32 +00:00
|
|
|
|
let node = if self.eat(&token::OpenDelim(token::Paren)) {
|
|
|
|
|
// `(TYPE)` is a parenthesized type.
|
|
|
|
|
// `(TYPE,)` is a tuple with a single field of type TYPE.
|
2014-11-09 15:14:15 +00:00
|
|
|
|
let mut ts = vec![];
|
|
|
|
|
let mut last_comma = false;
|
|
|
|
|
while self.token != token::CloseDelim(token::Paren) {
|
2017-01-16 23:13:41 +00:00
|
|
|
|
ts.push(self.parse_ty()?);
|
2017-01-17 18:18:29 +00:00
|
|
|
|
if self.eat(&token::Comma) {
|
2014-11-09 15:14:15 +00:00
|
|
|
|
last_comma = true;
|
2014-06-11 19:14:38 +00:00
|
|
|
|
} else {
|
2014-11-09 15:14:15 +00:00
|
|
|
|
last_comma = false;
|
|
|
|
|
break;
|
2013-07-02 19:47:32 +00:00
|
|
|
|
}
|
2011-08-15 10:18:27 +00:00
|
|
|
|
}
|
2017-04-04 22:12:53 +00:00
|
|
|
|
let trailing_plus = self.prev_token_kind == PrevTokenKind::Plus;
|
2016-03-23 03:01:37 +00:00
|
|
|
|
self.expect(&token::CloseDelim(token::Paren))?;
|
2017-03-16 21:47:32 +00:00
|
|
|
|
|
2014-11-09 15:14:15 +00:00
|
|
|
|
if ts.len() == 1 && !last_comma {
|
2017-12-16 23:21:29 +00:00
|
|
|
|
let ty = ts.into_iter().nth(0).unwrap().into_inner();
|
2018-05-25 22:27:37 +00:00
|
|
|
|
let maybe_bounds = allow_plus && self.token.is_like_plus();
|
2017-03-16 21:47:32 +00:00
|
|
|
|
match ty.node {
|
2017-04-04 22:12:53 +00:00
|
|
|
|
// `(TY_BOUND_NOPAREN) + BOUND + ...`.
|
|
|
|
|
TyKind::Path(None, ref path) if maybe_bounds => {
|
2017-04-21 18:32:11 +00:00
|
|
|
|
self.parse_remaining_bounds(Vec::new(), path.clone(), lo, true)?
|
2017-03-16 21:47:32 +00:00
|
|
|
|
}
|
2017-10-10 14:33:19 +00:00
|
|
|
|
TyKind::TraitObject(ref bounds, TraitObjectSyntax::None)
|
2017-04-04 22:12:53 +00:00
|
|
|
|
if maybe_bounds && bounds.len() == 1 && !trailing_plus => {
|
2017-04-21 18:32:11 +00:00
|
|
|
|
let path = match bounds[0] {
|
2018-06-14 11:23:46 +00:00
|
|
|
|
GenericBound::Trait(ref pt, ..) => pt.trait_ref.path.clone(),
|
2018-10-21 23:45:24 +00:00
|
|
|
|
GenericBound::Outlives(..) => self.bug("unexpected lifetime bound"),
|
2017-04-21 18:32:11 +00:00
|
|
|
|
};
|
|
|
|
|
self.parse_remaining_bounds(Vec::new(), path, lo, true)?
|
2017-04-04 22:12:53 +00:00
|
|
|
|
}
|
|
|
|
|
// `(TYPE)`
|
2017-03-16 21:47:32 +00:00
|
|
|
|
_ => TyKind::Paren(P(ty))
|
|
|
|
|
}
|
2014-11-09 15:14:15 +00:00
|
|
|
|
} else {
|
2016-02-08 15:53:21 +00:00
|
|
|
|
TyKind::Tup(ts)
|
2014-11-09 15:14:15 +00:00
|
|
|
|
}
|
2016-06-22 06:02:26 +00:00
|
|
|
|
} else if self.eat(&token::Not) {
|
2017-03-16 21:47:32 +00:00
|
|
|
|
// Never type `!`
|
2016-08-02 07:56:20 +00:00
|
|
|
|
TyKind::Never
|
2017-01-17 18:18:29 +00:00
|
|
|
|
} else if self.eat(&token::BinOp(token::Star)) {
|
2017-03-16 21:47:32 +00:00
|
|
|
|
// Raw pointer
|
2016-03-23 03:01:37 +00:00
|
|
|
|
TyKind::Ptr(self.parse_ptr()?)
|
2017-01-17 18:18:29 +00:00
|
|
|
|
} else if self.eat(&token::OpenDelim(token::Bracket)) {
|
2017-03-16 21:47:32 +00:00
|
|
|
|
// Array or slice
|
2017-01-16 23:13:41 +00:00
|
|
|
|
let t = self.parse_ty()?;
|
2017-03-16 21:47:32 +00:00
|
|
|
|
// Parse optional `; EXPR` in `[TYPE; EXPR]`
|
2016-03-23 03:01:37 +00:00
|
|
|
|
let t = match self.maybe_parse_fixed_length_of_vec()? {
|
2016-09-20 14:54:24 +00:00
|
|
|
|
None => TyKind::Slice(t),
|
2018-05-17 18:28:50 +00:00
|
|
|
|
Some(length) => TyKind::Array(t, AnonConst {
|
|
|
|
|
id: ast::DUMMY_NODE_ID,
|
|
|
|
|
value: length,
|
|
|
|
|
}),
|
2012-11-05 04:41:00 +00:00
|
|
|
|
};
|
2016-03-23 03:01:37 +00:00
|
|
|
|
self.expect(&token::CloseDelim(token::Bracket))?;
|
2012-02-06 14:29:56 +00:00
|
|
|
|
t
|
2017-03-16 21:47:32 +00:00
|
|
|
|
} else if self.check(&token::BinOp(token::And)) || self.check(&token::AndAnd) {
|
|
|
|
|
// Reference
|
2016-03-23 03:01:37 +00:00
|
|
|
|
self.expect_and()?;
|
|
|
|
|
self.parse_borrowed_pointee()?
|
2019-05-11 14:41:37 +00:00
|
|
|
|
} else if self.eat_keyword_noexpect(kw::Typeof) {
|
2017-03-16 21:47:32 +00:00
|
|
|
|
// `typeof(EXPR)`
|
2013-08-22 21:00:02 +00:00
|
|
|
|
// In order to not be ambiguous, the type must be surrounded by parens.
|
2016-03-23 03:01:37 +00:00
|
|
|
|
self.expect(&token::OpenDelim(token::Paren))?;
|
2018-05-17 18:28:50 +00:00
|
|
|
|
let e = AnonConst {
|
|
|
|
|
id: ast::DUMMY_NODE_ID,
|
|
|
|
|
value: self.parse_expr()?,
|
|
|
|
|
};
|
2016-03-23 03:01:37 +00:00
|
|
|
|
self.expect(&token::CloseDelim(token::Paren))?;
|
2016-02-08 15:53:21 +00:00
|
|
|
|
TyKind::Typeof(e)
|
2019-05-11 14:41:37 +00:00
|
|
|
|
} else if self.eat_keyword(kw::Underscore) {
|
2017-03-16 21:47:32 +00:00
|
|
|
|
// A type to be inferred `_`
|
|
|
|
|
TyKind::Infer
|
|
|
|
|
} else if self.token_is_bare_fn_keyword() {
|
|
|
|
|
// Function pointer type
|
|
|
|
|
self.parse_ty_bare_fn(Vec::new())?
|
2019-05-11 14:41:37 +00:00
|
|
|
|
} else if self.check_keyword(kw::For) {
|
2017-03-16 21:47:32 +00:00
|
|
|
|
// Function pointer type or bound list (trait object type) starting with a poly-trait.
|
|
|
|
|
// `for<'lt> [unsafe] [extern "ABI"] fn (&'lt S) -> T`
|
|
|
|
|
// `for<'lt> Trait1<'lt> + Trait2 + 'a`
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let lo = self.span;
|
2017-03-16 21:47:32 +00:00
|
|
|
|
let lifetime_defs = self.parse_late_bound_lifetime_defs()?;
|
|
|
|
|
if self.token_is_bare_fn_keyword() {
|
|
|
|
|
self.parse_ty_bare_fn(lifetime_defs)?
|
2015-07-26 04:40:57 +00:00
|
|
|
|
} else {
|
2017-03-16 21:47:32 +00:00
|
|
|
|
let path = self.parse_path(PathStyle::Type)?;
|
2018-05-25 21:09:32 +00:00
|
|
|
|
let parse_plus = allow_plus && self.check_plus();
|
2017-04-21 18:32:11 +00:00
|
|
|
|
self.parse_remaining_bounds(lifetime_defs, path, lo, parse_plus)?
|
2015-07-26 04:40:57 +00:00
|
|
|
|
}
|
2019-05-11 14:41:37 +00:00
|
|
|
|
} else if self.eat_keyword(kw::Impl) {
|
2018-01-27 19:37:17 +00:00
|
|
|
|
// Always parse bounds greedily for better error recovery.
|
2018-12-16 08:50:49 +00:00
|
|
|
|
let bounds = self.parse_generic_bounds(None)?;
|
2018-01-27 19:37:17 +00:00
|
|
|
|
impl_dyn_multi = bounds.len() > 1 || self.prev_token_kind == PrevTokenKind::Plus;
|
2018-06-18 14:23:13 +00:00
|
|
|
|
TyKind::ImplTrait(ast::DUMMY_NODE_ID, bounds)
|
2019-05-11 14:41:37 +00:00
|
|
|
|
} else if self.check_keyword(kw::Dyn) &&
|
2018-11-18 00:25:59 +00:00
|
|
|
|
(self.span.rust_2018() ||
|
2018-09-15 17:18:49 +00:00
|
|
|
|
self.look_ahead(1, |t| t.can_begin_bound() &&
|
2018-11-18 00:25:59 +00:00
|
|
|
|
!can_continue_type_after_non_fn_ident(t))) {
|
2017-10-10 14:33:19 +00:00
|
|
|
|
self.bump(); // `dyn`
|
2018-01-27 19:37:17 +00:00
|
|
|
|
// Always parse bounds greedily for better error recovery.
|
2018-12-16 08:50:49 +00:00
|
|
|
|
let bounds = self.parse_generic_bounds(None)?;
|
2018-01-27 19:37:17 +00:00
|
|
|
|
impl_dyn_multi = bounds.len() > 1 || self.prev_token_kind == PrevTokenKind::Plus;
|
|
|
|
|
TyKind::TraitObject(bounds, TraitObjectSyntax::Dyn)
|
2017-02-22 20:59:40 +00:00
|
|
|
|
} else if self.check(&token::Question) ||
|
2018-05-25 21:09:32 +00:00
|
|
|
|
self.check_lifetime() && self.look_ahead(1, |t| t.is_like_plus()) {
|
2017-03-16 21:47:32 +00:00
|
|
|
|
// Bound list (trait object type)
|
2018-12-16 08:50:49 +00:00
|
|
|
|
TyKind::TraitObject(self.parse_generic_bounds_common(allow_plus, None)?,
|
2017-10-10 14:33:19 +00:00
|
|
|
|
TraitObjectSyntax::None)
|
|
|
|
|
} else if self.eat_lt() {
|
|
|
|
|
// Qualified path
|
|
|
|
|
let (qself, path) = self.parse_qpath(PathStyle::Type)?;
|
|
|
|
|
TyKind::Path(Some(qself), path)
|
|
|
|
|
} else if self.token.is_path_start() {
|
|
|
|
|
// Simple path
|
|
|
|
|
let path = self.parse_path(PathStyle::Type)?;
|
|
|
|
|
if self.eat(&token::Not) {
|
|
|
|
|
// Macro invocation in type position
|
2018-05-22 15:01:21 +00:00
|
|
|
|
let (delim, tts) = self.expect_delimited_token_tree()?;
|
|
|
|
|
let node = Mac_ { path, tts, delim };
|
|
|
|
|
TyKind::Mac(respan(lo.to(self.prev_span), node))
|
2017-10-10 14:33:19 +00:00
|
|
|
|
} else {
|
|
|
|
|
// Just a type path or bound list (trait object type) starting with a trait.
|
|
|
|
|
// `Type`
|
|
|
|
|
// `Trait1 + Trait2 + 'a`
|
2018-05-25 21:09:32 +00:00
|
|
|
|
if allow_plus && self.check_plus() {
|
2017-10-10 14:33:19 +00:00
|
|
|
|
self.parse_remaining_bounds(Vec::new(), path, lo, true)?
|
|
|
|
|
} else {
|
|
|
|
|
TyKind::Path(None, path)
|
|
|
|
|
}
|
|
|
|
|
}
|
2018-11-30 15:53:44 +00:00
|
|
|
|
} else if self.check(&token::DotDotDot) {
|
2019-02-08 17:30:42 +00:00
|
|
|
|
if allow_c_variadic {
|
2018-11-30 15:53:44 +00:00
|
|
|
|
self.eat(&token::DotDotDot);
|
|
|
|
|
TyKind::CVarArgs
|
|
|
|
|
} else {
|
|
|
|
|
return Err(self.fatal(
|
2019-02-08 17:30:42 +00:00
|
|
|
|
"only foreign functions are allowed to be C-variadic"
|
2018-11-30 15:53:44 +00:00
|
|
|
|
));
|
|
|
|
|
}
|
2013-02-24 17:39:29 +00:00
|
|
|
|
} else {
|
2016-04-20 23:03:29 +00:00
|
|
|
|
let msg = format!("expected type, found {}", self.this_token_descr());
|
|
|
|
|
return Err(self.fatal(&msg));
|
2013-02-24 17:39:29 +00:00
|
|
|
|
};
|
2012-05-23 22:06:11 +00:00
|
|
|
|
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let span = lo.to(self.prev_span);
|
2019-03-09 14:41:01 +00:00
|
|
|
|
let ty = P(Ty { node, span, id: ast::DUMMY_NODE_ID });
|
2017-03-16 21:47:32 +00:00
|
|
|
|
|
|
|
|
|
// Try to recover from use of `+` with incorrect priority.
|
2018-01-27 19:37:17 +00:00
|
|
|
|
self.maybe_report_ambiguous_plus(allow_plus, impl_dyn_multi, &ty);
|
2017-03-16 21:47:32 +00:00
|
|
|
|
self.maybe_recover_from_bad_type_plus(allow_plus, &ty)?;
|
2019-03-09 14:41:01 +00:00
|
|
|
|
self.maybe_recover_from_bad_qpath(ty, allow_qpath_recovery)
|
2012-11-05 04:41:00 +00:00
|
|
|
|
}
|
|
|
|
|
|
2018-05-27 19:07:09 +00:00
|
|
|
|
fn parse_remaining_bounds(&mut self, generic_params: Vec<GenericParam>, path: ast::Path,
|
2017-04-21 18:32:11 +00:00
|
|
|
|
lo: Span, parse_plus: bool) -> PResult<'a, TyKind> {
|
2017-10-16 19:07:26 +00:00
|
|
|
|
let poly_trait_ref = PolyTraitRef::new(generic_params, path, lo.to(self.prev_span));
|
2018-06-14 11:23:46 +00:00
|
|
|
|
let mut bounds = vec![GenericBound::Trait(poly_trait_ref, TraitBoundModifier::None)];
|
2017-04-21 18:32:11 +00:00
|
|
|
|
if parse_plus {
|
2018-05-25 21:36:23 +00:00
|
|
|
|
self.eat_plus(); // `+`, or `+=` gets split and `+` is discarded
|
2019-03-01 22:42:39 +00:00
|
|
|
|
bounds.append(&mut self.parse_generic_bounds(Some(self.prev_span))?);
|
2017-04-21 18:32:11 +00:00
|
|
|
|
}
|
2017-10-10 14:33:19 +00:00
|
|
|
|
Ok(TyKind::TraitObject(bounds, TraitObjectSyntax::None))
|
2017-04-21 18:32:11 +00:00
|
|
|
|
}
|
|
|
|
|
|
2017-03-16 21:47:32 +00:00
|
|
|
|
fn parse_borrowed_pointee(&mut self) -> PResult<'a, TyKind> {
|
|
|
|
|
let opt_lifetime = if self.check_lifetime() { Some(self.expect_lifetime()) } else { None };
|
|
|
|
|
let mutbl = self.parse_mutability();
|
2017-01-17 18:18:29 +00:00
|
|
|
|
let ty = self.parse_ty_no_plus()?;
|
|
|
|
|
return Ok(TyKind::Rptr(opt_lifetime, MutTy { ty: ty, mutbl: mutbl }));
|
2012-05-23 22:06:11 +00:00
|
|
|
|
}
|
|
|
|
|
|
2018-05-31 22:53:30 +00:00
|
|
|
|
fn parse_ptr(&mut self) -> PResult<'a, MutTy> {
|
2019-05-11 14:41:37 +00:00
|
|
|
|
let mutbl = if self.eat_keyword(kw::Mut) {
|
2016-02-09 16:44:47 +00:00
|
|
|
|
Mutability::Mutable
|
2019-05-11 14:41:37 +00:00
|
|
|
|
} else if self.eat_keyword(kw::Const) {
|
2016-02-09 16:44:47 +00:00
|
|
|
|
Mutability::Immutable
|
2014-06-16 23:58:17 +00:00
|
|
|
|
} else {
|
2016-09-21 02:09:22 +00:00
|
|
|
|
let span = self.prev_span;
|
2019-01-12 06:04:54 +00:00
|
|
|
|
let msg = "expected mut or const in raw pointer type";
|
|
|
|
|
self.struct_span_err(span, msg)
|
|
|
|
|
.span_label(span, msg)
|
|
|
|
|
.help("use `*mut T` or `*const T` as appropriate")
|
|
|
|
|
.emit();
|
2016-02-09 16:44:47 +00:00
|
|
|
|
Mutability::Immutable
|
2014-06-16 23:58:17 +00:00
|
|
|
|
};
|
2017-01-16 23:13:41 +00:00
|
|
|
|
let t = self.parse_ty_no_plus()?;
|
2015-03-28 21:58:51 +00:00
|
|
|
|
Ok(MutTy { ty: t, mutbl: mutbl })
|
2014-06-16 23:58:17 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-05-12 00:00:06 +00:00
|
|
|
|
fn is_named_argument(&self) -> bool {
|
2013-12-30 23:09:41 +00:00
|
|
|
|
let offset = match self.token {
|
2019-02-14 22:10:02 +00:00
|
|
|
|
token::Interpolated(ref nt) => match **nt {
|
2017-11-05 01:22:18 +00:00
|
|
|
|
token::NtPat(..) => return self.look_ahead(1, |t| t == &token::Colon),
|
|
|
|
|
_ => 0,
|
|
|
|
|
}
|
|
|
|
|
token::BinOp(token::And) | token::AndAnd => 1,
|
2019-05-11 14:41:37 +00:00
|
|
|
|
_ if self.token.is_keyword(kw::Mut) => 1,
|
2017-11-05 01:22:18 +00:00
|
|
|
|
_ => 0,
|
2013-07-18 03:04:37 +00:00
|
|
|
|
};
|
|
|
|
|
|
2018-03-08 11:27:23 +00:00
|
|
|
|
self.look_ahead(offset, |t| t.is_ident()) &&
|
2017-11-05 01:22:18 +00:00
|
|
|
|
self.look_ahead(offset + 1, |t| t == &token::Colon)
|
2012-09-26 08:47:21 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Skips unexpected attributes and doc comments in this position and emits an appropriate
|
|
|
|
|
/// error.
|
2018-10-28 18:38:50 +00:00
|
|
|
|
fn eat_incorrect_doc_comment(&mut self, applied_to: &str) {
|
|
|
|
|
if let token::DocComment(_) = self.token {
|
|
|
|
|
let mut err = self.diagnostic().struct_span_err(
|
|
|
|
|
self.span,
|
|
|
|
|
&format!("documentation comments cannot be applied to {}", applied_to),
|
|
|
|
|
);
|
|
|
|
|
err.span_label(self.span, "doc comments are not allowed here");
|
|
|
|
|
err.emit();
|
|
|
|
|
self.bump();
|
|
|
|
|
} else if self.token == token::Pound && self.look_ahead(1, |t| {
|
|
|
|
|
*t == token::OpenDelim(token::Bracket)
|
|
|
|
|
}) {
|
|
|
|
|
let lo = self.span;
|
|
|
|
|
// Skip every token until next possible arg.
|
|
|
|
|
while self.token != token::CloseDelim(token::Bracket) {
|
|
|
|
|
self.bump();
|
|
|
|
|
}
|
|
|
|
|
let sp = lo.to(self.span);
|
|
|
|
|
self.bump();
|
|
|
|
|
let mut err = self.diagnostic().struct_span_err(
|
|
|
|
|
sp,
|
|
|
|
|
&format!("attributes cannot be applied to {}", applied_to),
|
|
|
|
|
);
|
|
|
|
|
err.span_label(sp, "attributes are not allowed here");
|
|
|
|
|
err.emit();
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// This version of parse arg doesn't necessarily require identifier names.
|
2018-11-30 15:53:44 +00:00
|
|
|
|
fn parse_arg_general(&mut self, require_name: bool, is_trait_item: bool,
|
2019-02-08 17:30:42 +00:00
|
|
|
|
allow_c_variadic: bool) -> PResult<'a, Arg> {
|
2018-11-16 18:27:27 +00:00
|
|
|
|
if let Ok(Some(_)) = self.parse_self_arg() {
|
|
|
|
|
let mut err = self.struct_span_err(self.prev_span,
|
|
|
|
|
"unexpected `self` argument in function");
|
|
|
|
|
err.span_label(self.prev_span,
|
2018-11-20 13:43:16 +00:00
|
|
|
|
"`self` is only valid as the first argument of an associated function");
|
2018-11-16 18:27:27 +00:00
|
|
|
|
return Err(err);
|
|
|
|
|
}
|
|
|
|
|
|
2018-08-09 22:23:08 +00:00
|
|
|
|
let (pat, ty) = if require_name || self.is_named_argument() {
|
|
|
|
|
debug!("parse_arg_general parse_pat (require_name:{})",
|
|
|
|
|
require_name);
|
2018-10-28 18:38:50 +00:00
|
|
|
|
self.eat_incorrect_doc_comment("method arguments");
|
2018-10-28 18:54:31 +00:00
|
|
|
|
let pat = self.parse_pat(Some("argument name"))?;
|
2018-08-09 22:23:08 +00:00
|
|
|
|
|
2018-10-04 02:21:05 +00:00
|
|
|
|
if let Err(mut err) = self.expect(&token::Colon) {
|
|
|
|
|
// If we find a pattern followed by an identifier, it could be an (incorrect)
|
|
|
|
|
// C-style parameter declaration.
|
|
|
|
|
if self.check_ident() && self.look_ahead(1, |t| {
|
|
|
|
|
*t == token::Comma || *t == token::CloseDelim(token::Paren)
|
|
|
|
|
}) {
|
|
|
|
|
let ident = self.parse_ident().unwrap();
|
|
|
|
|
let span = pat.span.with_hi(ident.span.hi());
|
|
|
|
|
|
2019-01-25 21:03:27 +00:00
|
|
|
|
err.span_suggestion(
|
2018-10-04 02:21:05 +00:00
|
|
|
|
span,
|
|
|
|
|
"declare the type after the parameter binding",
|
|
|
|
|
String::from("<identifier>: <type>"),
|
|
|
|
|
Applicability::HasPlaceholders,
|
|
|
|
|
);
|
2018-12-07 09:54:14 +00:00
|
|
|
|
} else if require_name && is_trait_item {
|
2018-12-07 10:16:13 +00:00
|
|
|
|
if let PatKind::Ident(_, ident, _) = pat.node {
|
2019-01-25 21:03:27 +00:00
|
|
|
|
err.span_suggestion(
|
2018-12-07 10:16:13 +00:00
|
|
|
|
pat.span,
|
|
|
|
|
"explicitly ignore parameter",
|
|
|
|
|
format!("_: {}", ident),
|
|
|
|
|
Applicability::MachineApplicable,
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
|
2018-12-07 09:54:14 +00:00
|
|
|
|
err.note("anonymous parameters are removed in the 2018 edition (see RFC 1685)");
|
2018-10-04 02:21:05 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return Err(err);
|
|
|
|
|
}
|
|
|
|
|
|
2018-10-28 18:38:50 +00:00
|
|
|
|
self.eat_incorrect_doc_comment("a method argument's type");
|
2019-02-08 17:30:42 +00:00
|
|
|
|
(pat, self.parse_ty_common(true, true, allow_c_variadic)?)
|
2018-08-09 22:23:08 +00:00
|
|
|
|
} else {
|
|
|
|
|
debug!("parse_arg_general ident_to_pat");
|
2018-09-21 01:26:36 +00:00
|
|
|
|
let parser_snapshot_before_ty = self.clone();
|
2018-10-28 18:38:50 +00:00
|
|
|
|
self.eat_incorrect_doc_comment("a method argument's type");
|
2019-02-08 17:30:42 +00:00
|
|
|
|
let mut ty = self.parse_ty_common(true, true, allow_c_variadic);
|
2019-01-01 16:14:00 +00:00
|
|
|
|
if ty.is_ok() && self.token != token::Comma &&
|
|
|
|
|
self.token != token::CloseDelim(token::Paren) {
|
2018-09-21 01:26:36 +00:00
|
|
|
|
// This wasn't actually a type, but a pattern looking like a type,
|
|
|
|
|
// so we are going to rollback and re-parse for recovery.
|
|
|
|
|
ty = self.unexpected();
|
2018-08-20 00:51:02 +00:00
|
|
|
|
}
|
2018-09-21 01:26:36 +00:00
|
|
|
|
match ty {
|
|
|
|
|
Ok(ty) => {
|
2019-05-11 14:41:37 +00:00
|
|
|
|
let ident = Ident::new(kw::Invalid, self.prev_span);
|
2018-09-21 01:26:36 +00:00
|
|
|
|
let pat = P(Pat {
|
|
|
|
|
id: ast::DUMMY_NODE_ID,
|
|
|
|
|
node: PatKind::Ident(
|
|
|
|
|
BindingMode::ByValue(Mutability::Immutable), ident, None),
|
|
|
|
|
span: ty.span,
|
|
|
|
|
});
|
|
|
|
|
(pat, ty)
|
|
|
|
|
}
|
|
|
|
|
Err(mut err) => {
|
2019-02-08 17:30:42 +00:00
|
|
|
|
// If this is a C-variadic argument and we hit an error, return the
|
2018-11-30 15:53:44 +00:00
|
|
|
|
// error.
|
|
|
|
|
if self.token == token::DotDotDot {
|
|
|
|
|
return Err(err);
|
|
|
|
|
}
|
2018-09-21 01:26:36 +00:00
|
|
|
|
// Recover from attempting to parse the argument as a type without pattern.
|
|
|
|
|
err.cancel();
|
|
|
|
|
mem::replace(self, parser_snapshot_before_ty);
|
2018-10-28 18:54:31 +00:00
|
|
|
|
let pat = self.parse_pat(Some("argument name"))?;
|
2018-09-21 01:26:36 +00:00
|
|
|
|
self.expect(&token::Colon)?;
|
|
|
|
|
let ty = self.parse_ty()?;
|
2018-08-20 00:51:02 +00:00
|
|
|
|
|
2018-08-10 00:49:45 +00:00
|
|
|
|
let mut err = self.diagnostic().struct_span_err_with_code(
|
|
|
|
|
pat.span,
|
2018-08-11 20:25:48 +00:00
|
|
|
|
"patterns aren't allowed in methods without bodies",
|
2018-08-10 00:49:45 +00:00
|
|
|
|
DiagnosticId::Error("E0642".into()),
|
|
|
|
|
);
|
2019-01-25 21:03:27 +00:00
|
|
|
|
err.span_suggestion_short(
|
2018-08-09 22:23:08 +00:00
|
|
|
|
pat.span,
|
|
|
|
|
"give this argument a name or use an underscore to ignore it",
|
|
|
|
|
"_".to_owned(),
|
|
|
|
|
Applicability::MachineApplicable,
|
|
|
|
|
);
|
|
|
|
|
err.emit();
|
2018-09-21 01:26:36 +00:00
|
|
|
|
|
2018-08-09 22:23:08 +00:00
|
|
|
|
// Pretend the pattern is `_`, to avoid duplicate errors from AST validation.
|
|
|
|
|
let pat = P(Pat {
|
|
|
|
|
node: PatKind::Wild,
|
|
|
|
|
span: pat.span,
|
|
|
|
|
id: ast::DUMMY_NODE_ID
|
|
|
|
|
});
|
|
|
|
|
(pat, ty)
|
|
|
|
|
}
|
2018-08-04 01:23:21 +00:00
|
|
|
|
}
|
2018-08-09 22:23:08 +00:00
|
|
|
|
};
|
|
|
|
|
|
2019-03-13 16:10:27 +00:00
|
|
|
|
Ok(Arg { ty, pat, id: ast::DUMMY_NODE_ID, source: ast::ArgSource::Normal })
|
2012-08-17 22:25:35 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses a single function argument.
|
2018-05-31 22:53:30 +00:00
|
|
|
|
crate fn parse_arg(&mut self) -> PResult<'a, Arg> {
|
2018-11-30 15:53:44 +00:00
|
|
|
|
self.parse_arg_general(true, false, false)
|
2012-05-04 19:33:04 +00:00
|
|
|
|
}
|
2011-08-12 19:58:37 +00:00
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses an argument in a lambda header (e.g., `|arg, arg|`).
|
2018-05-31 22:53:30 +00:00
|
|
|
|
fn parse_fn_block_arg(&mut self) -> PResult<'a, Arg> {
|
2018-10-28 18:54:31 +00:00
|
|
|
|
let pat = self.parse_pat(Some("argument name"))?;
|
2015-12-30 23:11:53 +00:00
|
|
|
|
let t = if self.eat(&token::Colon) {
|
2017-01-16 23:13:41 +00:00
|
|
|
|
self.parse_ty()?
|
2013-02-26 00:49:28 +00:00
|
|
|
|
} else {
|
2013-11-30 22:00:39 +00:00
|
|
|
|
P(Ty {
|
2013-09-07 02:11:55 +00:00
|
|
|
|
id: ast::DUMMY_NODE_ID,
|
2016-02-08 15:53:21 +00:00
|
|
|
|
node: TyKind::Infer,
|
2018-09-17 17:39:37 +00:00
|
|
|
|
span: self.prev_span,
|
2013-11-30 22:00:39 +00:00
|
|
|
|
})
|
2013-02-26 00:49:28 +00:00
|
|
|
|
};
|
2015-03-28 21:58:51 +00:00
|
|
|
|
Ok(Arg {
|
2013-02-26 00:49:28 +00:00
|
|
|
|
ty: t,
|
2017-08-07 05:54:09 +00:00
|
|
|
|
pat,
|
2019-03-13 16:10:27 +00:00
|
|
|
|
id: ast::DUMMY_NODE_ID,
|
|
|
|
|
source: ast::ArgSource::Normal,
|
2015-03-28 21:58:51 +00:00
|
|
|
|
})
|
2012-05-04 19:33:04 +00:00
|
|
|
|
}
|
2010-09-21 23:22:32 +00:00
|
|
|
|
|
2018-05-31 22:53:30 +00:00
|
|
|
|
fn maybe_parse_fixed_length_of_vec(&mut self) -> PResult<'a, Option<P<ast::Expr>>> {
|
2017-01-17 18:18:29 +00:00
|
|
|
|
if self.eat(&token::Semi) {
|
2016-03-23 03:01:37 +00:00
|
|
|
|
Ok(Some(self.parse_expr()?))
|
2012-08-14 02:59:32 +00:00
|
|
|
|
} else {
|
2015-03-28 21:58:51 +00:00
|
|
|
|
Ok(None)
|
2012-08-14 02:59:32 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Matches `'-' lit | lit` (cf. `ast_validation::AstValidator::check_expr_within_pat`).
|
2018-05-31 22:53:30 +00:00
|
|
|
|
crate fn parse_literal_maybe_minus(&mut self) -> PResult<'a, P<Expr>> {
|
2017-06-24 18:26:04 +00:00
|
|
|
|
maybe_whole_expr!(self);
|
|
|
|
|
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let minus_lo = self.span;
|
2015-12-30 23:11:53 +00:00
|
|
|
|
let minus_present = self.eat(&token::BinOp(token::Minus));
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let lo = self.span;
|
2018-11-08 04:55:52 +00:00
|
|
|
|
let literal = self.parse_lit()?;
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let hi = self.prev_span;
|
|
|
|
|
let expr = self.mk_expr(lo.to(hi), ExprKind::Lit(literal), ThinVec::new());
|
2013-05-11 01:19:58 +00:00
|
|
|
|
|
|
|
|
|
if minus_present {
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let minus_hi = self.prev_span;
|
2016-02-08 12:21:29 +00:00
|
|
|
|
let unary = self.mk_unary(UnOp::Neg, expr);
|
2017-03-15 00:22:48 +00:00
|
|
|
|
Ok(self.mk_expr(minus_lo.to(minus_hi), unary, ThinVec::new()))
|
2013-05-11 01:19:58 +00:00
|
|
|
|
} else {
|
2015-03-28 21:58:51 +00:00
|
|
|
|
Ok(expr)
|
2013-05-11 01:19:58 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2018-05-31 22:53:30 +00:00
|
|
|
|
fn parse_path_segment_ident(&mut self) -> PResult<'a, ast::Ident> {
|
2016-04-16 01:10:59 +00:00
|
|
|
|
match self.token {
|
2018-03-18 13:47:09 +00:00
|
|
|
|
token::Ident(ident, _) if self.token.is_path_segment_keyword() => {
|
|
|
|
|
let span = self.span;
|
2016-04-16 01:10:59 +00:00
|
|
|
|
self.bump();
|
2018-03-18 13:47:09 +00:00
|
|
|
|
Ok(Ident::new(ident.name, span))
|
2016-04-16 01:10:59 +00:00
|
|
|
|
}
|
|
|
|
|
_ => self.parse_ident(),
|
2018-03-18 13:47:09 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2016-04-16 01:10:59 +00:00
|
|
|
|
|
2018-11-30 19:34:24 +00:00
|
|
|
|
fn parse_ident_or_underscore(&mut self) -> PResult<'a, ast::Ident> {
|
|
|
|
|
match self.token {
|
2019-05-11 14:41:37 +00:00
|
|
|
|
token::Ident(ident, false) if ident.name == kw::Underscore => {
|
2018-11-30 19:34:24 +00:00
|
|
|
|
let span = self.span;
|
|
|
|
|
self.bump();
|
|
|
|
|
Ok(Ident::new(ident.name, span))
|
|
|
|
|
}
|
|
|
|
|
_ => self.parse_ident(),
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses a qualified path.
|
2015-10-22 23:17:03 +00:00
|
|
|
|
/// Assumes that the leading `<` has been parsed already.
|
|
|
|
|
///
|
|
|
|
|
/// `qualified_path = <type [as trait_ref]>::path`
|
|
|
|
|
///
|
2017-07-19 23:39:34 +00:00
|
|
|
|
/// # Examples
|
2018-05-22 19:26:35 +00:00
|
|
|
|
/// `<T>::default`
|
2015-10-22 23:17:03 +00:00
|
|
|
|
/// `<T as U>::a`
|
2017-07-19 23:39:34 +00:00
|
|
|
|
/// `<T as U>::F::a<S>` (without disambiguator)
|
|
|
|
|
/// `<T as U>::F::a::<S>` (with disambiguator)
|
|
|
|
|
fn parse_qpath(&mut self, style: PathStyle) -> PResult<'a, (QSelf, ast::Path)> {
|
|
|
|
|
let lo = self.prev_span;
|
|
|
|
|
let ty = self.parse_ty()?;
|
2018-05-22 19:26:35 +00:00
|
|
|
|
|
|
|
|
|
// `path` will contain the prefix of the path up to the `>`,
|
|
|
|
|
// if any (e.g., `U` in the `<T as U>::*` examples
|
|
|
|
|
// above). `path_span` has the span of that path, or an empty
|
|
|
|
|
// span in the case of something like `<T>::Bar`.
|
|
|
|
|
let (mut path, path_span);
|
2019-05-11 14:41:37 +00:00
|
|
|
|
if self.eat_keyword(kw::As) {
|
2018-05-22 19:26:35 +00:00
|
|
|
|
let path_lo = self.span;
|
|
|
|
|
path = self.parse_path(PathStyle::Type)?;
|
|
|
|
|
path_span = path_lo.to(self.prev_span);
|
2015-03-25 16:53:28 +00:00
|
|
|
|
} else {
|
2019-05-22 00:47:23 +00:00
|
|
|
|
path = ast::Path { segments: Vec::new(), span: DUMMY_SP };
|
2018-05-22 19:26:35 +00:00
|
|
|
|
path_span = self.span.to(self.span);
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-23 01:35:13 +00:00
|
|
|
|
// See doc comment for `unmatched_angle_bracket_count`.
|
2016-03-23 03:01:37 +00:00
|
|
|
|
self.expect(&token::Gt)?;
|
2019-02-22 19:17:30 +00:00
|
|
|
|
if self.unmatched_angle_bracket_count > 0 {
|
|
|
|
|
self.unmatched_angle_bracket_count -= 1;
|
|
|
|
|
debug!("parse_qpath: (decrement) count={:?}", self.unmatched_angle_bracket_count);
|
|
|
|
|
}
|
2019-01-23 01:35:13 +00:00
|
|
|
|
|
2016-03-23 03:01:37 +00:00
|
|
|
|
self.expect(&token::ModSep)?;
|
2015-03-25 16:53:28 +00:00
|
|
|
|
|
2018-05-22 19:26:35 +00:00
|
|
|
|
let qself = QSelf { ty, path_span, position: path.segments.len() };
|
2019-01-13 01:52:59 +00:00
|
|
|
|
self.parse_path_segments(&mut path.segments, style)?;
|
2017-06-10 03:30:33 +00:00
|
|
|
|
|
2017-07-19 23:39:34 +00:00
|
|
|
|
Ok((qself, ast::Path { segments: path.segments, span: lo.to(self.prev_span) }))
|
2017-06-10 03:30:33 +00:00
|
|
|
|
}
|
|
|
|
|
|
2017-07-19 23:39:34 +00:00
|
|
|
|
/// Parses simple paths.
|
|
|
|
|
///
|
|
|
|
|
/// `path = [::] segment+`
|
|
|
|
|
/// `segment = ident | ident[::]<args> | ident[::](args) [-> type]`
|
|
|
|
|
///
|
|
|
|
|
/// # Examples
|
|
|
|
|
/// `a::b::C<D>` (without disambiguator)
|
|
|
|
|
/// `a::b::C::<D>` (with disambiguator)
|
|
|
|
|
/// `Fn(Args)` (without disambiguator)
|
|
|
|
|
/// `Fn::(Args)` (with disambiguator)
|
2018-06-11 16:40:58 +00:00
|
|
|
|
pub fn parse_path(&mut self, style: PathStyle) -> PResult<'a, ast::Path> {
|
2017-08-17 18:44:28 +00:00
|
|
|
|
maybe_whole!(self, NtPath, |path| {
|
|
|
|
|
if style == PathStyle::Mod &&
|
2018-02-23 17:48:54 +00:00
|
|
|
|
path.segments.iter().any(|segment| segment.args.is_some()) {
|
2017-08-17 18:44:28 +00:00
|
|
|
|
self.diagnostic().span_err(path.span, "unexpected generic arguments in path");
|
|
|
|
|
}
|
|
|
|
|
path
|
|
|
|
|
});
|
2013-08-07 16:47:28 +00:00
|
|
|
|
|
2017-03-29 07:17:18 +00:00
|
|
|
|
let lo = self.meta_var_span.unwrap_or(self.span);
|
2017-07-19 23:39:34 +00:00
|
|
|
|
let mut segments = Vec::new();
|
2018-11-18 00:25:59 +00:00
|
|
|
|
let mod_sep_ctxt = self.span.ctxt();
|
2017-07-19 23:39:34 +00:00
|
|
|
|
if self.eat(&token::ModSep) {
|
2018-12-02 12:15:42 +00:00
|
|
|
|
segments.push(PathSegment::path_root(lo.shrink_to_lo().with_ctxt(mod_sep_ctxt)));
|
2016-12-05 03:51:11 +00:00
|
|
|
|
}
|
2019-01-13 01:52:59 +00:00
|
|
|
|
self.parse_path_segments(&mut segments, style)?;
|
2016-12-05 03:51:11 +00:00
|
|
|
|
|
2017-07-19 23:39:34 +00:00
|
|
|
|
Ok(ast::Path { segments, span: lo.to(self.prev_span) })
|
2013-06-17 19:16:30 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Like `parse_path`, but also supports parsing `Word` meta items into paths for
|
|
|
|
|
/// backwards-compatibility. This is used when parsing derive macro paths in `#[derive]`
|
|
|
|
|
/// attributes.
|
2017-07-19 23:39:34 +00:00
|
|
|
|
pub fn parse_path_allowing_meta(&mut self, style: PathStyle) -> PResult<'a, ast::Path> {
|
2018-04-17 13:33:39 +00:00
|
|
|
|
let meta_ident = match self.token {
|
2019-02-14 22:10:02 +00:00
|
|
|
|
token::Interpolated(ref nt) => match **nt {
|
2017-04-03 22:23:32 +00:00
|
|
|
|
token::NtMeta(ref meta) => match meta.node {
|
2019-03-02 16:15:26 +00:00
|
|
|
|
ast::MetaItemKind::Word => Some(meta.path.clone()),
|
2017-04-03 22:23:32 +00:00
|
|
|
|
_ => None,
|
|
|
|
|
},
|
|
|
|
|
_ => None,
|
|
|
|
|
},
|
|
|
|
|
_ => None,
|
|
|
|
|
};
|
2018-04-17 13:33:39 +00:00
|
|
|
|
if let Some(path) = meta_ident {
|
2017-04-03 22:23:32 +00:00
|
|
|
|
self.bump();
|
2018-01-30 05:53:01 +00:00
|
|
|
|
return Ok(path);
|
2017-04-03 22:23:32 +00:00
|
|
|
|
}
|
2017-07-19 23:39:34 +00:00
|
|
|
|
self.parse_path(style)
|
2017-04-03 22:23:32 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-04-28 05:28:07 +00:00
|
|
|
|
crate fn parse_path_segments(&mut self,
|
2017-10-22 16:19:30 +00:00
|
|
|
|
segments: &mut Vec<PathSegment>,
|
2019-01-13 01:52:59 +00:00
|
|
|
|
style: PathStyle)
|
2017-10-22 16:19:30 +00:00
|
|
|
|
-> PResult<'a, ()> {
|
2014-10-29 22:03:40 +00:00
|
|
|
|
loop {
|
2019-01-13 01:52:59 +00:00
|
|
|
|
let segment = self.parse_path_segment(style)?;
|
2019-01-21 23:35:31 +00:00
|
|
|
|
if style == PathStyle::Expr {
|
|
|
|
|
// In order to check for trailing angle brackets, we must have finished
|
|
|
|
|
// recursing (`parse_path_segment` can indirectly call this function),
|
|
|
|
|
// that is, the next token must be the highlighted part of the below example:
|
|
|
|
|
//
|
|
|
|
|
// `Foo::<Bar as Baz<T>>::Qux`
|
|
|
|
|
// ^ here
|
|
|
|
|
//
|
|
|
|
|
// As opposed to the below highlight (if we had only finished the first
|
|
|
|
|
// recursion):
|
|
|
|
|
//
|
|
|
|
|
// `Foo::<Bar as Baz<T>>::Qux`
|
|
|
|
|
// ^ here
|
|
|
|
|
//
|
|
|
|
|
// `PathStyle::Expr` is only provided at the root invocation and never in
|
|
|
|
|
// `parse_path_segment` to recurse and therefore can be checked to maintain
|
|
|
|
|
// this invariant.
|
|
|
|
|
self.check_trailing_angle_brackets(&segment, token::ModSep);
|
|
|
|
|
}
|
|
|
|
|
segments.push(segment);
|
2014-10-29 22:03:40 +00:00
|
|
|
|
|
2018-03-10 15:44:44 +00:00
|
|
|
|
if self.is_import_coupler() || !self.eat(&token::ModSep) {
|
2017-07-19 23:39:34 +00:00
|
|
|
|
return Ok(());
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2016-09-01 23:58:44 +00:00
|
|
|
|
|
2019-01-13 01:52:59 +00:00
|
|
|
|
fn parse_path_segment(&mut self, style: PathStyle) -> PResult<'a, PathSegment> {
|
2017-07-19 23:39:34 +00:00
|
|
|
|
let ident = self.parse_path_segment_ident()?;
|
|
|
|
|
|
|
|
|
|
let is_args_start = |token: &token::Token| match *token {
|
2019-05-06 16:00:01 +00:00
|
|
|
|
token::Lt | token::BinOp(token::Shl) | token::OpenDelim(token::Paren)
|
|
|
|
|
| token::LArrow => true,
|
2017-07-19 23:39:34 +00:00
|
|
|
|
_ => false,
|
|
|
|
|
};
|
|
|
|
|
let check_args_start = |this: &mut Self| {
|
|
|
|
|
this.expected_tokens.extend_from_slice(
|
|
|
|
|
&[TokenType::Token(token::Lt), TokenType::Token(token::OpenDelim(token::Paren))]
|
|
|
|
|
);
|
|
|
|
|
is_args_start(&this.token)
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
Ok(if style == PathStyle::Type && check_args_start(self) ||
|
|
|
|
|
style != PathStyle::Mod && self.check(&token::ModSep)
|
|
|
|
|
&& self.look_ahead(1, |t| is_args_start(t)) {
|
2019-01-23 01:35:13 +00:00
|
|
|
|
// We use `style == PathStyle::Expr` to check if this is in a recursion or not. If
|
|
|
|
|
// it isn't, then we reset the unmatched angle bracket count as we're about to start
|
|
|
|
|
// parsing a new path.
|
2019-01-28 05:04:50 +00:00
|
|
|
|
if style == PathStyle::Expr {
|
|
|
|
|
self.unmatched_angle_bracket_count = 0;
|
|
|
|
|
self.max_angle_bracket_count = 0;
|
|
|
|
|
}
|
2019-01-23 01:35:13 +00:00
|
|
|
|
|
2019-01-13 01:52:59 +00:00
|
|
|
|
// Generic arguments are found - `<`, `(`, `::<` or `::(`.
|
|
|
|
|
self.eat(&token::ModSep);
|
|
|
|
|
let lo = self.span;
|
2018-02-23 17:48:54 +00:00
|
|
|
|
let args = if self.eat_lt() {
|
2017-07-19 23:39:34 +00:00
|
|
|
|
// `<'a, T, A = U>`
|
2019-01-23 01:35:13 +00:00
|
|
|
|
let (args, bindings) =
|
|
|
|
|
self.parse_generic_args_with_leaning_angle_bracket_recovery(style, lo)?;
|
2017-01-17 18:18:29 +00:00
|
|
|
|
self.expect_gt()?;
|
2017-07-23 17:50:56 +00:00
|
|
|
|
let span = lo.to(self.prev_span);
|
2018-02-23 17:48:54 +00:00
|
|
|
|
AngleBracketedArgs { args, bindings, span }.into()
|
2017-07-19 23:39:34 +00:00
|
|
|
|
} else {
|
|
|
|
|
// `(T, U) -> R`
|
|
|
|
|
self.bump(); // `(`
|
2019-01-28 05:04:50 +00:00
|
|
|
|
let (inputs, recovered) = self.parse_seq_to_before_tokens(
|
2017-10-22 16:19:30 +00:00
|
|
|
|
&[&token::CloseDelim(token::Paren)],
|
|
|
|
|
SeqSep::trailing_allowed(token::Comma),
|
|
|
|
|
TokenExpectType::Expect,
|
|
|
|
|
|p| p.parse_ty())?;
|
2019-01-28 05:04:50 +00:00
|
|
|
|
if !recovered {
|
|
|
|
|
self.bump(); // `)`
|
|
|
|
|
}
|
2018-08-20 23:16:17 +00:00
|
|
|
|
let span = lo.to(self.prev_span);
|
2017-07-19 23:39:34 +00:00
|
|
|
|
let output = if self.eat(&token::RArrow) {
|
2018-11-30 15:53:44 +00:00
|
|
|
|
Some(self.parse_ty_common(false, false, false)?)
|
2014-11-04 02:52:52 +00:00
|
|
|
|
} else {
|
|
|
|
|
None
|
|
|
|
|
};
|
2019-01-21 07:09:56 +00:00
|
|
|
|
ParenthesizedArgs { inputs, output, span }.into()
|
2014-10-29 22:03:40 +00:00
|
|
|
|
};
|
|
|
|
|
|
2018-08-31 00:01:26 +00:00
|
|
|
|
PathSegment { ident, args, id: ast::DUMMY_NODE_ID }
|
2017-07-19 23:39:34 +00:00
|
|
|
|
} else {
|
|
|
|
|
// Generic arguments are not found.
|
2018-09-12 03:21:50 +00:00
|
|
|
|
PathSegment::from_ident(ident)
|
2017-07-19 23:39:34 +00:00
|
|
|
|
})
|
2014-10-29 22:03:40 +00:00
|
|
|
|
}
|
|
|
|
|
|
2018-05-31 22:53:30 +00:00
|
|
|
|
crate fn check_lifetime(&mut self) -> bool {
|
2017-03-16 21:47:32 +00:00
|
|
|
|
self.expected_tokens.push(TokenType::Lifetime);
|
|
|
|
|
self.token.is_lifetime()
|
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses a single lifetime `'a` or panics.
|
2018-05-31 22:53:30 +00:00
|
|
|
|
crate fn expect_lifetime(&mut self) -> Lifetime {
|
2018-03-24 16:49:50 +00:00
|
|
|
|
if let Some(ident) = self.token.lifetime() {
|
|
|
|
|
let span = self.span;
|
2017-12-31 02:47:45 +00:00
|
|
|
|
self.bump();
|
2018-03-24 16:49:50 +00:00
|
|
|
|
Lifetime { ident: Ident::new(ident.name, span), id: ast::DUMMY_NODE_ID }
|
2017-12-31 02:47:45 +00:00
|
|
|
|
} else {
|
|
|
|
|
self.span_bug(self.span, "not a lifetime")
|
2014-08-06 02:59:24 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2018-03-19 00:54:56 +00:00
|
|
|
|
fn eat_label(&mut self) -> Option<Label> {
|
2018-03-24 16:49:50 +00:00
|
|
|
|
if let Some(ident) = self.token.lifetime() {
|
|
|
|
|
let span = self.span;
|
2018-03-19 00:54:56 +00:00
|
|
|
|
self.bump();
|
2018-03-24 16:49:50 +00:00
|
|
|
|
Some(Label { ident: Ident::new(ident.name, span) })
|
2018-03-19 00:54:56 +00:00
|
|
|
|
} else {
|
|
|
|
|
None
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses mutability (`mut` or nothing).
|
2017-03-16 21:47:32 +00:00
|
|
|
|
fn parse_mutability(&mut self) -> Mutability {
|
2019-05-11 14:41:37 +00:00
|
|
|
|
if self.eat_keyword(kw::Mut) {
|
2017-03-16 21:47:32 +00:00
|
|
|
|
Mutability::Mutable
|
2012-04-24 22:52:52 +00:00
|
|
|
|
} else {
|
2017-03-16 21:47:32 +00:00
|
|
|
|
Mutability::Immutable
|
2012-04-24 22:52:52 +00:00
|
|
|
|
}
|
2012-05-23 22:06:11 +00:00
|
|
|
|
}
|
2012-04-24 22:52:52 +00:00
|
|
|
|
|
2018-05-31 22:53:30 +00:00
|
|
|
|
fn parse_field_name(&mut self) -> PResult<'a, Ident> {
|
2019-05-18 22:04:26 +00:00
|
|
|
|
if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = self.token {
|
2019-03-26 19:32:32 +00:00
|
|
|
|
self.expect_no_suffix(self.span, "a tuple index", suffix);
|
2016-07-29 20:47:55 +00:00
|
|
|
|
self.bump();
|
2019-05-18 22:04:26 +00:00
|
|
|
|
Ok(Ident::new(symbol, self.prev_span))
|
2016-07-29 20:47:55 +00:00
|
|
|
|
} else {
|
2018-01-06 22:43:20 +00:00
|
|
|
|
self.parse_ident_common(false)
|
2016-07-29 20:47:55 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2016-10-27 00:15:13 +00:00
|
|
|
|
/// Parse ident (COLON expr)?
|
2018-05-31 22:53:30 +00:00
|
|
|
|
fn parse_field(&mut self) -> PResult<'a, Field> {
|
2017-01-04 03:13:01 +00:00
|
|
|
|
let attrs = self.parse_outer_attributes()?;
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let lo = self.span;
|
2016-10-27 00:15:13 +00:00
|
|
|
|
|
|
|
|
|
// Check if a colon exists one ahead. This means we're parsing a fieldname.
|
2019-01-23 15:42:23 +00:00
|
|
|
|
let (fieldname, expr, is_shorthand) = if self.look_ahead(1, |t| {
|
|
|
|
|
t == &token::Colon || t == &token::Eq
|
|
|
|
|
}) {
|
2016-10-27 00:15:13 +00:00
|
|
|
|
let fieldname = self.parse_field_name()?;
|
2019-01-23 15:42:23 +00:00
|
|
|
|
|
|
|
|
|
// Check for an equals token. This means the source incorrectly attempts to
|
|
|
|
|
// initialize a field with an eq rather than a colon.
|
|
|
|
|
if self.token == token::Eq {
|
|
|
|
|
self.diagnostic()
|
|
|
|
|
.struct_span_err(self.span, "expected `:`, found `=`")
|
2019-01-25 21:03:27 +00:00
|
|
|
|
.span_suggestion(
|
2019-01-23 15:42:23 +00:00
|
|
|
|
fieldname.span.shrink_to_hi().to(self.span),
|
|
|
|
|
"replace equals symbol with a colon",
|
|
|
|
|
":".to_string(),
|
|
|
|
|
Applicability::MachineApplicable,
|
|
|
|
|
)
|
|
|
|
|
.emit();
|
|
|
|
|
}
|
2018-03-18 13:47:09 +00:00
|
|
|
|
self.bump(); // `:`
|
2016-10-27 00:15:13 +00:00
|
|
|
|
(fieldname, self.parse_expr()?, false)
|
|
|
|
|
} else {
|
2018-01-06 22:43:20 +00:00
|
|
|
|
let fieldname = self.parse_ident_common(false)?;
|
2016-10-27 00:15:13 +00:00
|
|
|
|
|
|
|
|
|
// Mimic `x: x` for the `x` field shorthand.
|
2018-03-19 00:54:56 +00:00
|
|
|
|
let path = ast::Path::from_ident(fieldname);
|
2018-03-18 13:47:09 +00:00
|
|
|
|
let expr = self.mk_expr(fieldname.span, ExprKind::Path(None, path), ThinVec::new());
|
|
|
|
|
(fieldname, expr, true)
|
2016-10-27 00:15:13 +00:00
|
|
|
|
};
|
2015-03-28 21:58:51 +00:00
|
|
|
|
Ok(ast::Field {
|
2018-03-18 13:47:09 +00:00
|
|
|
|
ident: fieldname,
|
2017-03-15 00:22:48 +00:00
|
|
|
|
span: lo.to(expr.span),
|
2017-08-07 05:54:09 +00:00
|
|
|
|
expr,
|
|
|
|
|
is_shorthand,
|
2017-01-04 03:13:01 +00:00
|
|
|
|
attrs: attrs.into(),
|
2015-03-28 21:58:51 +00:00
|
|
|
|
})
|
2012-05-23 22:06:11 +00:00
|
|
|
|
}
|
2010-09-28 01:25:02 +00:00
|
|
|
|
|
2019-05-16 21:31:07 +00:00
|
|
|
|
crate fn mk_expr(&self, span: Span, node: ExprKind, attrs: ThinVec<Attribute>) -> P<Expr> {
|
2017-12-16 22:53:11 +00:00
|
|
|
|
P(Expr { node, span, attrs, id: ast::DUMMY_NODE_ID })
|
2010-10-12 23:30:44 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-05-12 00:00:06 +00:00
|
|
|
|
fn mk_unary(&self, unop: ast::UnOp, expr: P<Expr>) -> ast::ExprKind {
|
2016-02-08 15:05:05 +00:00
|
|
|
|
ExprKind::Unary(unop, expr)
|
2013-06-01 22:31:56 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-05-12 00:00:06 +00:00
|
|
|
|
fn mk_binary(&self, binop: ast::BinOp, lhs: P<Expr>, rhs: P<Expr>) -> ast::ExprKind {
|
2016-02-08 15:05:05 +00:00
|
|
|
|
ExprKind::Binary(binop, lhs, rhs)
|
2013-06-01 22:31:56 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-05-12 00:00:06 +00:00
|
|
|
|
fn mk_call(&self, f: P<Expr>, args: Vec<P<Expr>>) -> ast::ExprKind {
|
2016-02-08 15:05:05 +00:00
|
|
|
|
ExprKind::Call(f, args)
|
2013-06-01 22:31:56 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-05-12 00:00:06 +00:00
|
|
|
|
fn mk_index(&self, expr: P<Expr>, idx: P<Expr>) -> ast::ExprKind {
|
2016-02-08 15:05:05 +00:00
|
|
|
|
ExprKind::Index(expr, idx)
|
2013-06-01 22:31:56 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-05-12 00:00:06 +00:00
|
|
|
|
fn mk_range(&self,
|
2014-12-30 07:07:25 +00:00
|
|
|
|
start: Option<P<Expr>>,
|
2016-01-13 06:23:31 +00:00
|
|
|
|
end: Option<P<Expr>>,
|
|
|
|
|
limits: RangeLimits)
|
2016-03-17 01:35:36 +00:00
|
|
|
|
-> PResult<'a, ast::ExprKind> {
|
|
|
|
|
if end.is_none() && limits == RangeLimits::Closed {
|
2017-02-12 14:18:41 +00:00
|
|
|
|
Err(self.span_fatal_err(self.span, Error::InclusiveRangeWithNoEnd))
|
2016-03-17 01:35:36 +00:00
|
|
|
|
} else {
|
|
|
|
|
Ok(ExprKind::Range(start, end, limits))
|
|
|
|
|
}
|
2014-12-13 05:41:02 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-05-12 00:00:06 +00:00
|
|
|
|
fn mk_assign_op(&self, binop: ast::BinOp,
|
2016-02-08 15:05:05 +00:00
|
|
|
|
lhs: P<Expr>, rhs: P<Expr>) -> ast::ExprKind {
|
|
|
|
|
ExprKind::AssignOp(binop, lhs, rhs)
|
2013-06-01 22:31:56 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-01-09 05:53:14 +00:00
|
|
|
|
fn expect_delimited_token_tree(&mut self) -> PResult<'a, (MacDelimiter, TokenStream)> {
|
2018-05-22 15:01:21 +00:00
|
|
|
|
let delim = match self.token {
|
|
|
|
|
token::OpenDelim(delim) => delim,
|
2018-02-19 07:08:23 +00:00
|
|
|
|
_ => {
|
|
|
|
|
let msg = "expected open delimiter";
|
|
|
|
|
let mut err = self.fatal(msg);
|
|
|
|
|
err.span_label(self.span, msg);
|
2018-05-22 15:01:21 +00:00
|
|
|
|
return Err(err)
|
2018-02-19 07:08:23 +00:00
|
|
|
|
}
|
2018-05-22 15:01:21 +00:00
|
|
|
|
};
|
2018-11-29 23:02:04 +00:00
|
|
|
|
let tts = match self.parse_token_tree() {
|
|
|
|
|
TokenTree::Delimited(_, _, tts) => tts,
|
2018-05-22 15:01:21 +00:00
|
|
|
|
_ => unreachable!(),
|
|
|
|
|
};
|
|
|
|
|
let delim = match delim {
|
|
|
|
|
token::Paren => MacDelimiter::Parenthesis,
|
|
|
|
|
token::Bracket => MacDelimiter::Bracket,
|
|
|
|
|
token::Brace => MacDelimiter::Brace,
|
|
|
|
|
token::NoDelim => self.bug("unexpected no delimiter"),
|
|
|
|
|
};
|
2019-01-09 05:53:14 +00:00
|
|
|
|
Ok((delim, tts.into()))
|
2014-10-29 14:47:53 +00:00
|
|
|
|
}
|
|
|
|
|
|
2014-06-09 20:12:30 +00:00
|
|
|
|
/// At the bottom (top?) of the precedence hierarchy,
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses things like parenthesized exprs, macros, `return`, etc.
|
2015-11-03 16:39:51 +00:00
|
|
|
|
///
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// N.B., this does not parse outer attributes, and is private because it only works
|
|
|
|
|
/// correctly if called from `parse_dot_or_call_expr()`.
|
2015-12-20 21:00:43 +00:00
|
|
|
|
fn parse_bottom_expr(&mut self) -> PResult<'a, P<Expr>> {
|
2019-03-09 14:41:01 +00:00
|
|
|
|
maybe_recover_from_interpolated_ty_qpath!(self, true);
|
2012-08-23 00:24:52 +00:00
|
|
|
|
maybe_whole_expr!(self);
|
2013-03-02 21:02:27 +00:00
|
|
|
|
|
2015-11-03 16:39:51 +00:00
|
|
|
|
// Outer attributes are already parsed and will be
|
|
|
|
|
// added to the return value after the fact.
|
|
|
|
|
//
|
|
|
|
|
// Therefore, prevent sub-parser from parsing
|
|
|
|
|
// attributes by giving them a empty "already parsed" list.
|
2016-06-18 04:01:57 +00:00
|
|
|
|
let mut attrs = ThinVec::new();
|
2015-11-03 16:39:51 +00:00
|
|
|
|
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let lo = self.span;
|
|
|
|
|
let mut hi = self.span;
|
2012-01-04 06:03:07 +00:00
|
|
|
|
|
2016-02-08 15:05:05 +00:00
|
|
|
|
let ex: ExprKind;
|
2012-01-04 06:03:07 +00:00
|
|
|
|
|
2015-01-10 23:14:03 +00:00
|
|
|
|
// Note: when adding new syntax here, don't forget to adjust Token::can_begin_expr().
|
2014-07-06 21:29:29 +00:00
|
|
|
|
match self.token {
|
2014-10-29 10:37:54 +00:00
|
|
|
|
token::OpenDelim(token::Paren) => {
|
2015-12-30 23:11:53 +00:00
|
|
|
|
self.bump();
|
2014-11-09 15:14:15 +00:00
|
|
|
|
|
2016-06-18 04:01:57 +00:00
|
|
|
|
attrs.extend(self.parse_inner_attributes()?);
|
2015-11-03 16:39:51 +00:00
|
|
|
|
|
2014-07-06 21:29:29 +00:00
|
|
|
|
// (e) is parenthesized e
|
|
|
|
|
// (e,) is a tuple with only one field, e
|
2014-11-09 15:14:15 +00:00
|
|
|
|
let mut es = vec![];
|
2014-07-06 21:29:29 +00:00
|
|
|
|
let mut trailing_comma = false;
|
2019-01-28 05:04:50 +00:00
|
|
|
|
let mut recovered = false;
|
2014-11-09 15:14:15 +00:00
|
|
|
|
while self.token != token::CloseDelim(token::Paren) {
|
2019-03-27 00:36:07 +00:00
|
|
|
|
es.push(match self.parse_expr() {
|
|
|
|
|
Ok(es) => es,
|
2019-03-29 02:58:45 +00:00
|
|
|
|
Err(err) => {
|
|
|
|
|
// recover from parse error in tuple list
|
|
|
|
|
return Ok(self.recover_seq_parse_error(token::Paren, lo, Err(err)));
|
2019-03-27 00:36:07 +00:00
|
|
|
|
}
|
|
|
|
|
});
|
2019-01-28 05:04:50 +00:00
|
|
|
|
recovered = self.expect_one_of(
|
|
|
|
|
&[],
|
|
|
|
|
&[token::Comma, token::CloseDelim(token::Paren)],
|
|
|
|
|
)?;
|
2018-09-02 06:13:29 +00:00
|
|
|
|
if self.eat(&token::Comma) {
|
2014-07-06 21:29:29 +00:00
|
|
|
|
trailing_comma = true;
|
2014-11-09 15:14:15 +00:00
|
|
|
|
} else {
|
|
|
|
|
trailing_comma = false;
|
|
|
|
|
break;
|
2014-07-06 21:29:29 +00:00
|
|
|
|
}
|
2013-02-17 23:41:47 +00:00
|
|
|
|
}
|
2019-01-28 05:04:50 +00:00
|
|
|
|
if !recovered {
|
|
|
|
|
self.bump();
|
|
|
|
|
}
|
2013-10-28 22:22:49 +00:00
|
|
|
|
|
2017-03-15 00:22:48 +00:00
|
|
|
|
hi = self.prev_span;
|
2017-12-16 22:53:11 +00:00
|
|
|
|
ex = if es.len() == 1 && !trailing_comma {
|
|
|
|
|
ExprKind::Paren(es.into_iter().nth(0).unwrap())
|
2014-09-13 16:06:01 +00:00
|
|
|
|
} else {
|
2017-12-16 22:53:11 +00:00
|
|
|
|
ExprKind::Tup(es)
|
|
|
|
|
};
|
2017-07-01 23:37:47 +00:00
|
|
|
|
}
|
2014-10-29 10:37:54 +00:00
|
|
|
|
token::OpenDelim(token::Brace) => {
|
2018-04-16 03:44:39 +00:00
|
|
|
|
return self.parse_block_expr(None, lo, BlockCheckMode::Default, attrs);
|
2017-07-01 23:37:47 +00:00
|
|
|
|
}
|
|
|
|
|
token::BinOp(token::Or) | token::OrOr => {
|
2017-10-07 14:36:28 +00:00
|
|
|
|
return self.parse_lambda_expr(attrs);
|
2017-07-01 23:37:47 +00:00
|
|
|
|
}
|
2014-10-29 10:37:54 +00:00
|
|
|
|
token::OpenDelim(token::Bracket) => {
|
2015-12-30 23:11:53 +00:00
|
|
|
|
self.bump();
|
2014-07-06 22:11:44 +00:00
|
|
|
|
|
2016-06-18 04:01:57 +00:00
|
|
|
|
attrs.extend(self.parse_inner_attributes()?);
|
2015-11-03 16:39:51 +00:00
|
|
|
|
|
2018-09-02 06:13:29 +00:00
|
|
|
|
if self.eat(&token::CloseDelim(token::Bracket)) {
|
2014-07-06 21:29:29 +00:00
|
|
|
|
// Empty vector.
|
2017-01-16 07:36:10 +00:00
|
|
|
|
ex = ExprKind::Array(Vec::new());
|
2014-07-06 21:29:29 +00:00
|
|
|
|
} else {
|
|
|
|
|
// Nonempty vector.
|
2016-03-23 03:01:37 +00:00
|
|
|
|
let first_expr = self.parse_expr()?;
|
2018-09-02 06:13:29 +00:00
|
|
|
|
if self.eat(&token::Semi) {
|
2015-08-11 15:35:22 +00:00
|
|
|
|
// Repeating array syntax: [ 0; 512 ]
|
2018-05-17 18:28:50 +00:00
|
|
|
|
let count = AnonConst {
|
|
|
|
|
id: ast::DUMMY_NODE_ID,
|
|
|
|
|
value: self.parse_expr()?,
|
|
|
|
|
};
|
2016-03-23 03:01:37 +00:00
|
|
|
|
self.expect(&token::CloseDelim(token::Bracket))?;
|
2016-02-08 15:05:05 +00:00
|
|
|
|
ex = ExprKind::Repeat(first_expr, count);
|
2018-09-02 06:13:29 +00:00
|
|
|
|
} else if self.eat(&token::Comma) {
|
2014-07-06 21:29:29 +00:00
|
|
|
|
// Vector with two or more elements.
|
2016-03-23 03:01:37 +00:00
|
|
|
|
let remaining_exprs = self.parse_seq_to_end(
|
2014-10-29 10:37:54 +00:00
|
|
|
|
&token::CloseDelim(token::Bracket),
|
2016-02-23 04:24:42 +00:00
|
|
|
|
SeqSep::trailing_allowed(token::Comma),
|
2016-03-23 03:01:37 +00:00
|
|
|
|
|p| Ok(p.parse_expr()?)
|
2016-03-22 22:58:45 +00:00
|
|
|
|
)?;
|
2016-10-29 21:54:04 +00:00
|
|
|
|
let mut exprs = vec![first_expr];
|
2015-06-10 16:22:20 +00:00
|
|
|
|
exprs.extend(remaining_exprs);
|
2017-01-16 07:36:10 +00:00
|
|
|
|
ex = ExprKind::Array(exprs);
|
2014-07-06 21:29:29 +00:00
|
|
|
|
} else {
|
|
|
|
|
// Vector with one element.
|
2016-03-23 03:01:37 +00:00
|
|
|
|
self.expect(&token::CloseDelim(token::Bracket))?;
|
2017-01-16 07:36:10 +00:00
|
|
|
|
ex = ExprKind::Array(vec![first_expr]);
|
2014-07-06 21:29:29 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2017-03-15 00:22:48 +00:00
|
|
|
|
hi = self.prev_span;
|
2014-08-06 02:44:21 +00:00
|
|
|
|
}
|
librustc: Disallow mutation and assignment in pattern guards, and modify
the CFG for match statements.
There were two bugs in issue #14684. One was simply that the borrow
check didn't know about the correct CFG for match statements: the
pattern must be a predecessor of the guard. This disallows the bad
behavior if there are bindings in the pattern. But it isn't enough to
prevent the memory safety problem, because of wildcards; thus, this
patch introduces a more restrictive rule, which disallows assignments
and mutable borrows inside guards outright.
I discussed this with Niko and we decided this was the best plan of
action.
This breaks code that performs mutable borrows in pattern guards. Most
commonly, the code looks like this:
impl Foo {
fn f(&mut self, ...) {}
fn g(&mut self, ...) {
match bar {
Baz if self.f(...) => { ... }
_ => { ... }
}
}
}
Change this code to not use a guard. For example:
impl Foo {
fn f(&mut self, ...) {}
fn g(&mut self, ...) {
match bar {
Baz => {
if self.f(...) {
...
} else {
...
}
}
_ => { ... }
}
}
}
Sometimes this can result in code duplication, but often it illustrates
a hidden memory safety problem.
Closes #14684.
[breaking-change]
2014-07-25 22:18:19 +00:00
|
|
|
|
_ => {
|
2015-12-30 23:11:53 +00:00
|
|
|
|
if self.eat_lt() {
|
2017-07-19 23:39:34 +00:00
|
|
|
|
let (qself, path) = self.parse_qpath(PathStyle::Expr)?;
|
2017-03-15 00:22:48 +00:00
|
|
|
|
hi = path.span;
|
|
|
|
|
return Ok(self.mk_expr(lo.to(hi), ExprKind::Path(Some(qself), path), attrs));
|
2015-01-13 04:03:12 +00:00
|
|
|
|
}
|
2019-05-11 14:41:37 +00:00
|
|
|
|
if self.span.rust_2018() && self.check_keyword(kw::Async) {
|
2019-05-12 00:01:32 +00:00
|
|
|
|
return if self.is_async_block() { // check for `async {` and `async move {`
|
|
|
|
|
self.parse_async_block(attrs)
|
2018-06-06 22:50:59 +00:00
|
|
|
|
} else {
|
2019-05-12 00:01:32 +00:00
|
|
|
|
self.parse_lambda_expr(attrs)
|
|
|
|
|
};
|
2018-06-06 22:50:59 +00:00
|
|
|
|
}
|
2019-05-11 14:41:37 +00:00
|
|
|
|
if self.check_keyword(kw::Move) || self.check_keyword(kw::Static) {
|
2017-10-07 14:36:28 +00:00
|
|
|
|
return self.parse_lambda_expr(attrs);
|
2014-07-23 19:43:29 +00:00
|
|
|
|
}
|
2019-05-11 14:41:37 +00:00
|
|
|
|
if self.eat_keyword(kw::If) {
|
2015-11-03 16:39:51 +00:00
|
|
|
|
return self.parse_if_expr(attrs);
|
librustc: Disallow mutation and assignment in pattern guards, and modify
the CFG for match statements.
There were two bugs in issue #14684. One was simply that the borrow
check didn't know about the correct CFG for match statements: the
pattern must be a predecessor of the guard. This disallows the bad
behavior if there are bindings in the pattern. But it isn't enough to
prevent the memory safety problem, because of wildcards; thus, this
patch introduces a more restrictive rule, which disallows assignments
and mutable borrows inside guards outright.
I discussed this with Niko and we decided this was the best plan of
action.
This breaks code that performs mutable borrows in pattern guards. Most
commonly, the code looks like this:
impl Foo {
fn f(&mut self, ...) {}
fn g(&mut self, ...) {
match bar {
Baz if self.f(...) => { ... }
_ => { ... }
}
}
}
Change this code to not use a guard. For example:
impl Foo {
fn f(&mut self, ...) {}
fn g(&mut self, ...) {
match bar {
Baz => {
if self.f(...) {
...
} else {
...
}
}
_ => { ... }
}
}
}
Sometimes this can result in code duplication, but often it illustrates
a hidden memory safety problem.
Closes #14684.
[breaking-change]
2014-07-25 22:18:19 +00:00
|
|
|
|
}
|
2019-05-11 14:41:37 +00:00
|
|
|
|
if self.eat_keyword(kw::For) {
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let lo = self.prev_span;
|
2015-11-03 16:39:51 +00:00
|
|
|
|
return self.parse_for_expr(None, lo, attrs);
|
librustc: Disallow mutation and assignment in pattern guards, and modify
the CFG for match statements.
There were two bugs in issue #14684. One was simply that the borrow
check didn't know about the correct CFG for match statements: the
pattern must be a predecessor of the guard. This disallows the bad
behavior if there are bindings in the pattern. But it isn't enough to
prevent the memory safety problem, because of wildcards; thus, this
patch introduces a more restrictive rule, which disallows assignments
and mutable borrows inside guards outright.
I discussed this with Niko and we decided this was the best plan of
action.
This breaks code that performs mutable borrows in pattern guards. Most
commonly, the code looks like this:
impl Foo {
fn f(&mut self, ...) {}
fn g(&mut self, ...) {
match bar {
Baz if self.f(...) => { ... }
_ => { ... }
}
}
}
Change this code to not use a guard. For example:
impl Foo {
fn f(&mut self, ...) {}
fn g(&mut self, ...) {
match bar {
Baz => {
if self.f(...) {
...
} else {
...
}
}
_ => { ... }
}
}
}
Sometimes this can result in code duplication, but often it illustrates
a hidden memory safety problem.
Closes #14684.
[breaking-change]
2014-07-25 22:18:19 +00:00
|
|
|
|
}
|
2019-05-11 14:41:37 +00:00
|
|
|
|
if self.eat_keyword(kw::While) {
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let lo = self.prev_span;
|
2015-11-03 16:39:51 +00:00
|
|
|
|
return self.parse_while_expr(None, lo, attrs);
|
librustc: Disallow mutation and assignment in pattern guards, and modify
the CFG for match statements.
There were two bugs in issue #14684. One was simply that the borrow
check didn't know about the correct CFG for match statements: the
pattern must be a predecessor of the guard. This disallows the bad
behavior if there are bindings in the pattern. But it isn't enough to
prevent the memory safety problem, because of wildcards; thus, this
patch introduces a more restrictive rule, which disallows assignments
and mutable borrows inside guards outright.
I discussed this with Niko and we decided this was the best plan of
action.
This breaks code that performs mutable borrows in pattern guards. Most
commonly, the code looks like this:
impl Foo {
fn f(&mut self, ...) {}
fn g(&mut self, ...) {
match bar {
Baz if self.f(...) => { ... }
_ => { ... }
}
}
}
Change this code to not use a guard. For example:
impl Foo {
fn f(&mut self, ...) {}
fn g(&mut self, ...) {
match bar {
Baz => {
if self.f(...) {
...
} else {
...
}
}
_ => { ... }
}
}
}
Sometimes this can result in code duplication, but often it illustrates
a hidden memory safety problem.
Closes #14684.
[breaking-change]
2014-07-25 22:18:19 +00:00
|
|
|
|
}
|
2018-01-15 22:44:32 +00:00
|
|
|
|
if let Some(label) = self.eat_label() {
|
2018-03-19 00:54:56 +00:00
|
|
|
|
let lo = label.ident.span;
|
2016-03-23 03:01:37 +00:00
|
|
|
|
self.expect(&token::Colon)?;
|
2019-05-11 14:41:37 +00:00
|
|
|
|
if self.eat_keyword(kw::While) {
|
2016-05-02 16:22:03 +00:00
|
|
|
|
return self.parse_while_expr(Some(label), lo, attrs)
|
2014-07-26 00:12:51 +00:00
|
|
|
|
}
|
2019-05-11 14:41:37 +00:00
|
|
|
|
if self.eat_keyword(kw::For) {
|
2016-05-02 16:22:03 +00:00
|
|
|
|
return self.parse_for_expr(Some(label), lo, attrs)
|
librustc: Disallow mutation and assignment in pattern guards, and modify
the CFG for match statements.
There were two bugs in issue #14684. One was simply that the borrow
check didn't know about the correct CFG for match statements: the
pattern must be a predecessor of the guard. This disallows the bad
behavior if there are bindings in the pattern. But it isn't enough to
prevent the memory safety problem, because of wildcards; thus, this
patch introduces a more restrictive rule, which disallows assignments
and mutable borrows inside guards outright.
I discussed this with Niko and we decided this was the best plan of
action.
This breaks code that performs mutable borrows in pattern guards. Most
commonly, the code looks like this:
impl Foo {
fn f(&mut self, ...) {}
fn g(&mut self, ...) {
match bar {
Baz if self.f(...) => { ... }
_ => { ... }
}
}
}
Change this code to not use a guard. For example:
impl Foo {
fn f(&mut self, ...) {}
fn g(&mut self, ...) {
match bar {
Baz => {
if self.f(...) {
...
} else {
...
}
}
_ => { ... }
}
}
}
Sometimes this can result in code duplication, but often it illustrates
a hidden memory safety problem.
Closes #14684.
[breaking-change]
2014-07-25 22:18:19 +00:00
|
|
|
|
}
|
2019-05-11 14:41:37 +00:00
|
|
|
|
if self.eat_keyword(kw::Loop) {
|
2016-05-02 16:22:03 +00:00
|
|
|
|
return self.parse_loop_expr(Some(label), lo, attrs)
|
librustc: Disallow mutation and assignment in pattern guards, and modify
the CFG for match statements.
There were two bugs in issue #14684. One was simply that the borrow
check didn't know about the correct CFG for match statements: the
pattern must be a predecessor of the guard. This disallows the bad
behavior if there are bindings in the pattern. But it isn't enough to
prevent the memory safety problem, because of wildcards; thus, this
patch introduces a more restrictive rule, which disallows assignments
and mutable borrows inside guards outright.
I discussed this with Niko and we decided this was the best plan of
action.
This breaks code that performs mutable borrows in pattern guards. Most
commonly, the code looks like this:
impl Foo {
fn f(&mut self, ...) {}
fn g(&mut self, ...) {
match bar {
Baz if self.f(...) => { ... }
_ => { ... }
}
}
}
Change this code to not use a guard. For example:
impl Foo {
fn f(&mut self, ...) {}
fn g(&mut self, ...) {
match bar {
Baz => {
if self.f(...) {
...
} else {
...
}
}
_ => { ... }
}
}
}
Sometimes this can result in code duplication, but often it illustrates
a hidden memory safety problem.
Closes #14684.
[breaking-change]
2014-07-25 22:18:19 +00:00
|
|
|
|
}
|
2018-04-16 03:44:39 +00:00
|
|
|
|
if self.token == token::OpenDelim(token::Brace) {
|
|
|
|
|
return self.parse_block_expr(Some(label),
|
|
|
|
|
lo,
|
|
|
|
|
BlockCheckMode::Default,
|
|
|
|
|
attrs);
|
|
|
|
|
}
|
|
|
|
|
let msg = "expected `while`, `for`, `loop` or `{` after a label";
|
2018-02-19 07:08:23 +00:00
|
|
|
|
let mut err = self.fatal(msg);
|
|
|
|
|
err.span_label(self.span, msg);
|
|
|
|
|
return Err(err);
|
2012-08-04 01:01:30 +00:00
|
|
|
|
}
|
2019-05-11 14:41:37 +00:00
|
|
|
|
if self.eat_keyword(kw::Loop) {
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let lo = self.prev_span;
|
2015-11-03 16:39:51 +00:00
|
|
|
|
return self.parse_loop_expr(None, lo, attrs);
|
librustc: Disallow mutation and assignment in pattern guards, and modify
the CFG for match statements.
There were two bugs in issue #14684. One was simply that the borrow
check didn't know about the correct CFG for match statements: the
pattern must be a predecessor of the guard. This disallows the bad
behavior if there are bindings in the pattern. But it isn't enough to
prevent the memory safety problem, because of wildcards; thus, this
patch introduces a more restrictive rule, which disallows assignments
and mutable borrows inside guards outright.
I discussed this with Niko and we decided this was the best plan of
action.
This breaks code that performs mutable borrows in pattern guards. Most
commonly, the code looks like this:
impl Foo {
fn f(&mut self, ...) {}
fn g(&mut self, ...) {
match bar {
Baz if self.f(...) => { ... }
_ => { ... }
}
}
}
Change this code to not use a guard. For example:
impl Foo {
fn f(&mut self, ...) {}
fn g(&mut self, ...) {
match bar {
Baz => {
if self.f(...) {
...
} else {
...
}
}
_ => { ... }
}
}
}
Sometimes this can result in code duplication, but often it illustrates
a hidden memory safety problem.
Closes #14684.
[breaking-change]
2014-07-25 22:18:19 +00:00
|
|
|
|
}
|
2019-05-11 14:41:37 +00:00
|
|
|
|
if self.eat_keyword(kw::Continue) {
|
2018-01-15 22:44:32 +00:00
|
|
|
|
let label = self.eat_label();
|
|
|
|
|
let ex = ExprKind::Continue(label);
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let hi = self.prev_span;
|
|
|
|
|
return Ok(self.mk_expr(lo.to(hi), ex, attrs));
|
librustc: Disallow mutation and assignment in pattern guards, and modify
the CFG for match statements.
There were two bugs in issue #14684. One was simply that the borrow
check didn't know about the correct CFG for match statements: the
pattern must be a predecessor of the guard. This disallows the bad
behavior if there are bindings in the pattern. But it isn't enough to
prevent the memory safety problem, because of wildcards; thus, this
patch introduces a more restrictive rule, which disallows assignments
and mutable borrows inside guards outright.
I discussed this with Niko and we decided this was the best plan of
action.
This breaks code that performs mutable borrows in pattern guards. Most
commonly, the code looks like this:
impl Foo {
fn f(&mut self, ...) {}
fn g(&mut self, ...) {
match bar {
Baz if self.f(...) => { ... }
_ => { ... }
}
}
}
Change this code to not use a guard. For example:
impl Foo {
fn f(&mut self, ...) {}
fn g(&mut self, ...) {
match bar {
Baz => {
if self.f(...) {
...
} else {
...
}
}
_ => { ... }
}
}
}
Sometimes this can result in code duplication, but often it illustrates
a hidden memory safety problem.
Closes #14684.
[breaking-change]
2014-07-25 22:18:19 +00:00
|
|
|
|
}
|
2019-05-11 14:41:37 +00:00
|
|
|
|
if self.eat_keyword(kw::Match) {
|
2018-10-28 18:41:23 +00:00
|
|
|
|
let match_sp = self.prev_span;
|
|
|
|
|
return self.parse_match_expr(attrs).map_err(|mut err| {
|
|
|
|
|
err.span_label(match_sp, "while parsing this match expression");
|
|
|
|
|
err
|
|
|
|
|
});
|
librustc: Disallow mutation and assignment in pattern guards, and modify
the CFG for match statements.
There were two bugs in issue #14684. One was simply that the borrow
check didn't know about the correct CFG for match statements: the
pattern must be a predecessor of the guard. This disallows the bad
behavior if there are bindings in the pattern. But it isn't enough to
prevent the memory safety problem, because of wildcards; thus, this
patch introduces a more restrictive rule, which disallows assignments
and mutable borrows inside guards outright.
I discussed this with Niko and we decided this was the best plan of
action.
This breaks code that performs mutable borrows in pattern guards. Most
commonly, the code looks like this:
impl Foo {
fn f(&mut self, ...) {}
fn g(&mut self, ...) {
match bar {
Baz if self.f(...) => { ... }
_ => { ... }
}
}
}
Change this code to not use a guard. For example:
impl Foo {
fn f(&mut self, ...) {}
fn g(&mut self, ...) {
match bar {
Baz => {
if self.f(...) {
...
} else {
...
}
}
_ => { ... }
}
}
}
Sometimes this can result in code duplication, but often it illustrates
a hidden memory safety problem.
Closes #14684.
[breaking-change]
2014-07-25 22:18:19 +00:00
|
|
|
|
}
|
2019-05-11 14:41:37 +00:00
|
|
|
|
if self.eat_keyword(kw::Unsafe) {
|
librustc: Disallow mutation and assignment in pattern guards, and modify
the CFG for match statements.
There were two bugs in issue #14684. One was simply that the borrow
check didn't know about the correct CFG for match statements: the
pattern must be a predecessor of the guard. This disallows the bad
behavior if there are bindings in the pattern. But it isn't enough to
prevent the memory safety problem, because of wildcards; thus, this
patch introduces a more restrictive rule, which disallows assignments
and mutable borrows inside guards outright.
I discussed this with Niko and we decided this was the best plan of
action.
This breaks code that performs mutable borrows in pattern guards. Most
commonly, the code looks like this:
impl Foo {
fn f(&mut self, ...) {}
fn g(&mut self, ...) {
match bar {
Baz if self.f(...) => { ... }
_ => { ... }
}
}
}
Change this code to not use a guard. For example:
impl Foo {
fn f(&mut self, ...) {}
fn g(&mut self, ...) {
match bar {
Baz => {
if self.f(...) {
...
} else {
...
}
}
_ => { ... }
}
}
}
Sometimes this can result in code duplication, but often it illustrates
a hidden memory safety problem.
Closes #14684.
[breaking-change]
2014-07-25 22:18:19 +00:00
|
|
|
|
return self.parse_block_expr(
|
2018-04-16 03:44:39 +00:00
|
|
|
|
None,
|
librustc: Disallow mutation and assignment in pattern guards, and modify
the CFG for match statements.
There were two bugs in issue #14684. One was simply that the borrow
check didn't know about the correct CFG for match statements: the
pattern must be a predecessor of the guard. This disallows the bad
behavior if there are bindings in the pattern. But it isn't enough to
prevent the memory safety problem, because of wildcards; thus, this
patch introduces a more restrictive rule, which disallows assignments
and mutable borrows inside guards outright.
I discussed this with Niko and we decided this was the best plan of
action.
This breaks code that performs mutable borrows in pattern guards. Most
commonly, the code looks like this:
impl Foo {
fn f(&mut self, ...) {}
fn g(&mut self, ...) {
match bar {
Baz if self.f(...) => { ... }
_ => { ... }
}
}
}
Change this code to not use a guard. For example:
impl Foo {
fn f(&mut self, ...) {}
fn g(&mut self, ...) {
match bar {
Baz => {
if self.f(...) {
...
} else {
...
}
}
_ => { ... }
}
}
}
Sometimes this can result in code duplication, but often it illustrates
a hidden memory safety problem.
Closes #14684.
[breaking-change]
2014-07-25 22:18:19 +00:00
|
|
|
|
lo,
|
2016-02-08 11:44:45 +00:00
|
|
|
|
BlockCheckMode::Unsafe(ast::UserProvided),
|
2015-11-03 16:39:51 +00:00
|
|
|
|
attrs);
|
librustc: Disallow mutation and assignment in pattern guards, and modify
the CFG for match statements.
There were two bugs in issue #14684. One was simply that the borrow
check didn't know about the correct CFG for match statements: the
pattern must be a predecessor of the guard. This disallows the bad
behavior if there are bindings in the pattern. But it isn't enough to
prevent the memory safety problem, because of wildcards; thus, this
patch introduces a more restrictive rule, which disallows assignments
and mutable borrows inside guards outright.
I discussed this with Niko and we decided this was the best plan of
action.
This breaks code that performs mutable borrows in pattern guards. Most
commonly, the code looks like this:
impl Foo {
fn f(&mut self, ...) {}
fn g(&mut self, ...) {
match bar {
Baz if self.f(...) => { ... }
_ => { ... }
}
}
}
Change this code to not use a guard. For example:
impl Foo {
fn f(&mut self, ...) {}
fn g(&mut self, ...) {
match bar {
Baz => {
if self.f(...) {
...
} else {
...
}
}
_ => { ... }
}
}
}
Sometimes this can result in code duplication, but often it illustrates
a hidden memory safety problem.
Closes #14684.
[breaking-change]
2014-07-25 22:18:19 +00:00
|
|
|
|
}
|
2018-07-25 00:55:36 +00:00
|
|
|
|
if self.is_do_catch_block() {
|
|
|
|
|
let mut db = self.fatal("found removed `do catch` syntax");
|
|
|
|
|
db.help("Following RFC #2388, the new non-placeholder syntax is `try`");
|
|
|
|
|
return Err(db);
|
|
|
|
|
}
|
2018-07-22 03:59:44 +00:00
|
|
|
|
if self.is_try_block() {
|
2017-07-24 23:39:41 +00:00
|
|
|
|
let lo = self.span;
|
2019-05-11 14:41:37 +00:00
|
|
|
|
assert!(self.eat_keyword(kw::Try));
|
2018-07-22 03:59:44 +00:00
|
|
|
|
return self.parse_try_block(lo, attrs);
|
2017-02-17 23:12:47 +00:00
|
|
|
|
}
|
2019-05-11 14:41:37 +00:00
|
|
|
|
if self.eat_keyword(kw::Return) {
|
2014-10-27 12:33:30 +00:00
|
|
|
|
if self.token.can_begin_expr() {
|
2016-03-23 03:01:37 +00:00
|
|
|
|
let e = self.parse_expr()?;
|
2017-03-15 00:22:48 +00:00
|
|
|
|
hi = e.span;
|
2016-02-08 15:05:05 +00:00
|
|
|
|
ex = ExprKind::Ret(Some(e));
|
librustc: Disallow mutation and assignment in pattern guards, and modify
the CFG for match statements.
There were two bugs in issue #14684. One was simply that the borrow
check didn't know about the correct CFG for match statements: the
pattern must be a predecessor of the guard. This disallows the bad
behavior if there are bindings in the pattern. But it isn't enough to
prevent the memory safety problem, because of wildcards; thus, this
patch introduces a more restrictive rule, which disallows assignments
and mutable borrows inside guards outright.
I discussed this with Niko and we decided this was the best plan of
action.
This breaks code that performs mutable borrows in pattern guards. Most
commonly, the code looks like this:
impl Foo {
fn f(&mut self, ...) {}
fn g(&mut self, ...) {
match bar {
Baz if self.f(...) => { ... }
_ => { ... }
}
}
}
Change this code to not use a guard. For example:
impl Foo {
fn f(&mut self, ...) {}
fn g(&mut self, ...) {
match bar {
Baz => {
if self.f(...) {
...
} else {
...
}
}
_ => { ... }
}
}
}
Sometimes this can result in code duplication, but often it illustrates
a hidden memory safety problem.
Closes #14684.
[breaking-change]
2014-07-25 22:18:19 +00:00
|
|
|
|
} else {
|
2016-02-08 15:05:05 +00:00
|
|
|
|
ex = ExprKind::Ret(None);
|
librustc: Disallow mutation and assignment in pattern guards, and modify
the CFG for match statements.
There were two bugs in issue #14684. One was simply that the borrow
check didn't know about the correct CFG for match statements: the
pattern must be a predecessor of the guard. This disallows the bad
behavior if there are bindings in the pattern. But it isn't enough to
prevent the memory safety problem, because of wildcards; thus, this
patch introduces a more restrictive rule, which disallows assignments
and mutable borrows inside guards outright.
I discussed this with Niko and we decided this was the best plan of
action.
This breaks code that performs mutable borrows in pattern guards. Most
commonly, the code looks like this:
impl Foo {
fn f(&mut self, ...) {}
fn g(&mut self, ...) {
match bar {
Baz if self.f(...) => { ... }
_ => { ... }
}
}
}
Change this code to not use a guard. For example:
impl Foo {
fn f(&mut self, ...) {}
fn g(&mut self, ...) {
match bar {
Baz => {
if self.f(...) {
...
} else {
...
}
}
_ => { ... }
}
}
}
Sometimes this can result in code duplication, but often it illustrates
a hidden memory safety problem.
Closes #14684.
[breaking-change]
2014-07-25 22:18:19 +00:00
|
|
|
|
}
|
2019-05-11 14:41:37 +00:00
|
|
|
|
} else if self.eat_keyword(kw::Break) {
|
2018-01-15 22:44:32 +00:00
|
|
|
|
let label = self.eat_label();
|
Implement the `loop_break_value` feature.
This implements RFC 1624, tracking issue #37339.
- `FnCtxt` (in typeck) gets a stack of `LoopCtxt`s, which store the
currently deduced type of that loop, the desired type, and a list of
break expressions currently seen. `loop` loops get a fresh type
variable as their initial type (this logic is stolen from that for
arrays). `while` loops get `()`.
- `break {expr}` looks up the broken loop, and unifies the type of
`expr` with the type of the loop.
- `break` with no expr unifies the loop's type with `()`.
- When building MIR, `loop` loops no longer construct a `()` value at
termination of the loop; rather, the `break` expression assigns the
result of the loop. `while` loops are unchanged.
- `break` respects contexts in which expressions may not end with braced
blocks. That is, `while break { break-value } { while-body }` is
illegal; this preserves backwards compatibility.
- The RFC did not make it clear, but I chose to make `break ()` inside
of a `while` loop illegal, just in case we wanted to do anything with
that design space in the future.
This is my first time dealing with this part of rustc so I'm sure
there's plenty of problems to pick on here ^_^
2016-10-29 22:15:06 +00:00
|
|
|
|
let e = if self.token.can_begin_expr()
|
|
|
|
|
&& !(self.token == token::OpenDelim(token::Brace)
|
|
|
|
|
&& self.restrictions.contains(
|
2017-09-08 19:08:01 +00:00
|
|
|
|
Restrictions::NO_STRUCT_LITERAL)) {
|
Implement the `loop_break_value` feature.
This implements RFC 1624, tracking issue #37339.
- `FnCtxt` (in typeck) gets a stack of `LoopCtxt`s, which store the
currently deduced type of that loop, the desired type, and a list of
break expressions currently seen. `loop` loops get a fresh type
variable as their initial type (this logic is stolen from that for
arrays). `while` loops get `()`.
- `break {expr}` looks up the broken loop, and unifies the type of
`expr` with the type of the loop.
- `break` with no expr unifies the loop's type with `()`.
- When building MIR, `loop` loops no longer construct a `()` value at
termination of the loop; rather, the `break` expression assigns the
result of the loop. `while` loops are unchanged.
- `break` respects contexts in which expressions may not end with braced
blocks. That is, `while break { break-value } { while-body }` is
illegal; this preserves backwards compatibility.
- The RFC did not make it clear, but I chose to make `break ()` inside
of a `while` loop illegal, just in case we wanted to do anything with
that design space in the future.
This is my first time dealing with this part of rustc so I'm sure
there's plenty of problems to pick on here ^_^
2016-10-29 22:15:06 +00:00
|
|
|
|
Some(self.parse_expr()?)
|
|
|
|
|
} else {
|
|
|
|
|
None
|
|
|
|
|
};
|
2018-01-15 22:44:32 +00:00
|
|
|
|
ex = ExprKind::Break(label, e);
|
2017-03-15 00:22:48 +00:00
|
|
|
|
hi = self.prev_span;
|
2019-05-11 14:41:37 +00:00
|
|
|
|
} else if self.eat_keyword(kw::Yield) {
|
2016-12-26 13:34:03 +00:00
|
|
|
|
if self.token.can_begin_expr() {
|
|
|
|
|
let e = self.parse_expr()?;
|
|
|
|
|
hi = e.span;
|
|
|
|
|
ex = ExprKind::Yield(Some(e));
|
|
|
|
|
} else {
|
|
|
|
|
ex = ExprKind::Yield(None);
|
|
|
|
|
}
|
2019-05-11 14:41:37 +00:00
|
|
|
|
} else if self.token.is_keyword(kw::Let) {
|
2017-06-29 10:16:35 +00:00
|
|
|
|
// Catch this syntax error here, instead of in `parse_ident`, so
|
2016-01-14 15:52:24 +00:00
|
|
|
|
// that we can explicitly mention that let is not to be used as an expression
|
2016-01-26 08:23:28 +00:00
|
|
|
|
let mut db = self.fatal("expected expression, found statement (`let`)");
|
2018-02-19 07:08:23 +00:00
|
|
|
|
db.span_label(self.span, "expected expression");
|
2016-01-26 08:23:28 +00:00
|
|
|
|
db.note("variable declaration using `let` is a statement");
|
|
|
|
|
return Err(db);
|
2019-05-11 14:41:37 +00:00
|
|
|
|
} else if self.span.rust_2018() && self.eat_keyword(kw::Await) {
|
2019-05-16 20:33:26 +00:00
|
|
|
|
let (await_hi, e_kind) = self.parse_await_macro_or_alt(lo, self.prev_span)?;
|
|
|
|
|
hi = await_hi;
|
|
|
|
|
ex = e_kind;
|
2016-04-20 23:03:29 +00:00
|
|
|
|
} else if self.token.is_path_start() {
|
2019-03-09 14:41:01 +00:00
|
|
|
|
let path = self.parse_path(PathStyle::Expr)?;
|
librustc: Disallow mutation and assignment in pattern guards, and modify
the CFG for match statements.
There were two bugs in issue #14684. One was simply that the borrow
check didn't know about the correct CFG for match statements: the
pattern must be a predecessor of the guard. This disallows the bad
behavior if there are bindings in the pattern. But it isn't enough to
prevent the memory safety problem, because of wildcards; thus, this
patch introduces a more restrictive rule, which disallows assignments
and mutable borrows inside guards outright.
I discussed this with Niko and we decided this was the best plan of
action.
This breaks code that performs mutable borrows in pattern guards. Most
commonly, the code looks like this:
impl Foo {
fn f(&mut self, ...) {}
fn g(&mut self, ...) {
match bar {
Baz if self.f(...) => { ... }
_ => { ... }
}
}
}
Change this code to not use a guard. For example:
impl Foo {
fn f(&mut self, ...) {}
fn g(&mut self, ...) {
match bar {
Baz => {
if self.f(...) {
...
} else {
...
}
}
_ => { ... }
}
}
}
Sometimes this can result in code duplication, but often it illustrates
a hidden memory safety problem.
Closes #14684.
[breaking-change]
2014-07-25 22:18:19 +00:00
|
|
|
|
|
|
|
|
|
// `!`, as an operator, is prefix, so we know this isn't that
|
2016-09-22 22:26:35 +00:00
|
|
|
|
if self.eat(&token::Not) {
|
librustc: Disallow mutation and assignment in pattern guards, and modify
the CFG for match statements.
There were two bugs in issue #14684. One was simply that the borrow
check didn't know about the correct CFG for match statements: the
pattern must be a predecessor of the guard. This disallows the bad
behavior if there are bindings in the pattern. But it isn't enough to
prevent the memory safety problem, because of wildcards; thus, this
patch introduces a more restrictive rule, which disallows assignments
and mutable borrows inside guards outright.
I discussed this with Niko and we decided this was the best plan of
action.
This breaks code that performs mutable borrows in pattern guards. Most
commonly, the code looks like this:
impl Foo {
fn f(&mut self, ...) {}
fn g(&mut self, ...) {
match bar {
Baz if self.f(...) => { ... }
_ => { ... }
}
}
}
Change this code to not use a guard. For example:
impl Foo {
fn f(&mut self, ...) {}
fn g(&mut self, ...) {
match bar {
Baz => {
if self.f(...) {
...
} else {
...
}
}
_ => { ... }
}
}
}
Sometimes this can result in code duplication, but often it illustrates
a hidden memory safety problem.
Closes #14684.
[breaking-change]
2014-07-25 22:18:19 +00:00
|
|
|
|
// MACRO INVOCATION expression
|
2018-05-22 15:01:21 +00:00
|
|
|
|
let (delim, tts) = self.expect_delimited_token_tree()?;
|
2019-03-09 14:41:01 +00:00
|
|
|
|
hi = self.prev_span;
|
|
|
|
|
ex = ExprKind::Mac(respan(lo.to(hi), Mac_ { path, tts, delim }));
|
2019-04-15 00:09:03 +00:00
|
|
|
|
} else if self.check(&token::OpenDelim(token::Brace)) {
|
2019-04-18 18:35:11 +00:00
|
|
|
|
if let Some(expr) = self.maybe_parse_struct_expr(lo, &path, &attrs) {
|
2019-04-15 00:09:03 +00:00
|
|
|
|
return expr;
|
|
|
|
|
} else {
|
|
|
|
|
hi = path.span;
|
|
|
|
|
ex = ExprKind::Path(None, path);
|
|
|
|
|
}
|
2019-03-09 14:41:01 +00:00
|
|
|
|
} else {
|
|
|
|
|
hi = path.span;
|
|
|
|
|
ex = ExprKind::Path(None, path);
|
2014-06-14 02:09:12 +00:00
|
|
|
|
}
|
librustc: Disallow mutation and assignment in pattern guards, and modify
the CFG for match statements.
There were two bugs in issue #14684. One was simply that the borrow
check didn't know about the correct CFG for match statements: the
pattern must be a predecessor of the guard. This disallows the bad
behavior if there are bindings in the pattern. But it isn't enough to
prevent the memory safety problem, because of wildcards; thus, this
patch introduces a more restrictive rule, which disallows assignments
and mutable borrows inside guards outright.
I discussed this with Niko and we decided this was the best plan of
action.
This breaks code that performs mutable borrows in pattern guards. Most
commonly, the code looks like this:
impl Foo {
fn f(&mut self, ...) {}
fn g(&mut self, ...) {
match bar {
Baz if self.f(...) => { ... }
_ => { ... }
}
}
}
Change this code to not use a guard. For example:
impl Foo {
fn f(&mut self, ...) {}
fn g(&mut self, ...) {
match bar {
Baz => {
if self.f(...) {
...
} else {
...
}
}
_ => { ... }
}
}
}
Sometimes this can result in code duplication, but often it illustrates
a hidden memory safety problem.
Closes #14684.
[breaking-change]
2014-07-25 22:18:19 +00:00
|
|
|
|
} else {
|
2019-02-05 10:25:06 +00:00
|
|
|
|
if !self.unclosed_delims.is_empty() && self.check(&token::Semi) {
|
|
|
|
|
// Don't complain about bare semicolons after unclosed braces
|
|
|
|
|
// recovery in order to keep the error count down. Fixing the
|
|
|
|
|
// delimiters will possibly also fix the bare semicolon found in
|
|
|
|
|
// expression context. For example, silence the following error:
|
|
|
|
|
// ```
|
|
|
|
|
// error: expected expression, found `;`
|
|
|
|
|
// --> file.rs:2:13
|
|
|
|
|
// |
|
|
|
|
|
// 2 | foo(bar(;
|
|
|
|
|
// | ^ expected expression
|
|
|
|
|
// ```
|
|
|
|
|
self.bump();
|
|
|
|
|
return Ok(self.mk_expr(self.span, ExprKind::Err, ThinVec::new()));
|
|
|
|
|
}
|
2018-04-09 23:08:47 +00:00
|
|
|
|
match self.parse_literal_maybe_minus() {
|
|
|
|
|
Ok(expr) => {
|
|
|
|
|
hi = expr.span;
|
|
|
|
|
ex = expr.node.clone();
|
2016-04-20 23:03:29 +00:00
|
|
|
|
}
|
|
|
|
|
Err(mut err) => {
|
2016-09-15 19:34:21 +00:00
|
|
|
|
self.cancel(&mut err);
|
2019-05-22 00:47:23 +00:00
|
|
|
|
let (span, msg) = match (&self.token, self.is_subparser) {
|
|
|
|
|
(&token::Token::Eof, Some(origin)) => {
|
|
|
|
|
let sp = self.sess.source_map().next_point(self.span);
|
|
|
|
|
(sp, format!( "expected expression, found end of {}", origin))
|
|
|
|
|
}
|
|
|
|
|
_ => (self.span, format!(
|
|
|
|
|
"expected expression, found {}",
|
|
|
|
|
self.this_token_descr(),
|
|
|
|
|
)),
|
|
|
|
|
};
|
|
|
|
|
let mut err = self.struct_span_err(span, &msg);
|
2019-04-23 02:37:23 +00:00
|
|
|
|
let sp = self.sess.source_map().start_point(self.span);
|
2019-05-06 23:00:21 +00:00
|
|
|
|
if let Some(sp) = self.sess.ambiguous_block_expr_parse.borrow()
|
2019-04-23 02:37:23 +00:00
|
|
|
|
.get(&sp)
|
|
|
|
|
{
|
2019-05-02 23:13:28 +00:00
|
|
|
|
self.sess.expr_parentheses_needed(&mut err, *sp, None);
|
2019-04-23 02:37:23 +00:00
|
|
|
|
}
|
2019-05-22 00:47:23 +00:00
|
|
|
|
err.span_label(span, "expected expression");
|
2018-02-19 07:08:23 +00:00
|
|
|
|
return Err(err);
|
2016-04-20 23:03:29 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
librustc: Disallow mutation and assignment in pattern guards, and modify
the CFG for match statements.
There were two bugs in issue #14684. One was simply that the borrow
check didn't know about the correct CFG for match statements: the
pattern must be a predecessor of the guard. This disallows the bad
behavior if there are bindings in the pattern. But it isn't enough to
prevent the memory safety problem, because of wildcards; thus, this
patch introduces a more restrictive rule, which disallows assignments
and mutable borrows inside guards outright.
I discussed this with Niko and we decided this was the best plan of
action.
This breaks code that performs mutable borrows in pattern guards. Most
commonly, the code looks like this:
impl Foo {
fn f(&mut self, ...) {}
fn g(&mut self, ...) {
match bar {
Baz if self.f(...) => { ... }
_ => { ... }
}
}
}
Change this code to not use a guard. For example:
impl Foo {
fn f(&mut self, ...) {}
fn g(&mut self, ...) {
match bar {
Baz => {
if self.f(...) {
...
} else {
...
}
}
_ => { ... }
}
}
}
Sometimes this can result in code duplication, but often it illustrates
a hidden memory safety problem.
Closes #14684.
[breaking-change]
2014-07-25 22:18:19 +00:00
|
|
|
|
}
|
2014-07-06 21:29:29 +00:00
|
|
|
|
}
|
2012-05-23 22:06:11 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-03-09 14:41:01 +00:00
|
|
|
|
let expr = self.mk_expr(lo.to(hi), ex, attrs);
|
|
|
|
|
self.maybe_recover_from_bad_qpath(expr, true)
|
2015-11-03 16:39:51 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-05-16 20:33:26 +00:00
|
|
|
|
/// Parse `await!(<expr>)` calls, or alternatively recover from incorrect but reasonable
|
|
|
|
|
/// alternative syntaxes `await <expr>`, `await? <expr>`, `await(<expr>)` and
|
|
|
|
|
/// `await { <expr> }`.
|
|
|
|
|
fn parse_await_macro_or_alt(
|
2019-05-16 02:59:56 +00:00
|
|
|
|
&mut self,
|
|
|
|
|
lo: Span,
|
|
|
|
|
await_sp: Span,
|
|
|
|
|
) -> PResult<'a, (Span, ExprKind)> {
|
2019-05-16 20:33:26 +00:00
|
|
|
|
if self.token == token::Not {
|
|
|
|
|
// Handle correct `await!(<expr>)`.
|
|
|
|
|
// FIXME: make this an error when `await!` is no longer supported
|
|
|
|
|
// https://github.com/rust-lang/rust/issues/60610
|
|
|
|
|
self.expect(&token::Not)?;
|
|
|
|
|
self.expect(&token::OpenDelim(token::Paren))?;
|
|
|
|
|
let expr = self.parse_expr().map_err(|mut err| {
|
|
|
|
|
err.span_label(await_sp, "while parsing this await macro call");
|
|
|
|
|
err
|
|
|
|
|
})?;
|
|
|
|
|
self.expect(&token::CloseDelim(token::Paren))?;
|
2019-05-16 21:30:39 +00:00
|
|
|
|
Ok((self.prev_span, ExprKind::Await(ast::AwaitOrigin::MacroLike, expr)))
|
2019-05-16 20:33:26 +00:00
|
|
|
|
} else { // Handle `await <expr>`.
|
|
|
|
|
self.parse_incorrect_await_syntax(lo, await_sp)
|
|
|
|
|
}
|
2019-05-16 02:59:56 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-04-18 18:35:11 +00:00
|
|
|
|
fn maybe_parse_struct_expr(
|
2019-04-15 00:09:03 +00:00
|
|
|
|
&mut self,
|
|
|
|
|
lo: Span,
|
2019-04-15 15:08:46 +00:00
|
|
|
|
path: &ast::Path,
|
|
|
|
|
attrs: &ThinVec<Attribute>,
|
2019-04-15 00:09:03 +00:00
|
|
|
|
) -> Option<PResult<'a, P<Expr>>> {
|
2019-04-18 20:58:57 +00:00
|
|
|
|
let struct_allowed = !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL);
|
|
|
|
|
let certainly_not_a_block = || self.look_ahead(1, |t| t.is_ident()) && (
|
|
|
|
|
// `{ ident, ` cannot start a block
|
|
|
|
|
self.look_ahead(2, |t| t == &token::Comma) ||
|
|
|
|
|
self.look_ahead(2, |t| t == &token::Colon) && (
|
|
|
|
|
// `{ ident: token, ` cannot start a block
|
|
|
|
|
self.look_ahead(4, |t| t == &token::Comma) ||
|
|
|
|
|
// `{ ident: ` cannot start a block unless it's a type ascription `ident: Type`
|
|
|
|
|
self.look_ahead(3, |t| !t.can_begin_type())
|
|
|
|
|
)
|
2019-04-15 00:09:03 +00:00
|
|
|
|
);
|
2019-04-18 20:58:57 +00:00
|
|
|
|
|
|
|
|
|
if struct_allowed || certainly_not_a_block() {
|
2019-04-15 00:09:03 +00:00
|
|
|
|
// This is a struct literal, but we don't can't accept them here
|
2019-04-18 18:35:11 +00:00
|
|
|
|
let expr = self.parse_struct_expr(lo, path.clone(), attrs.clone());
|
2019-04-18 20:58:57 +00:00
|
|
|
|
if let (Ok(expr), false) = (&expr, struct_allowed) {
|
2019-04-18 18:35:11 +00:00
|
|
|
|
let mut err = self.diagnostic().struct_span_err(
|
|
|
|
|
expr.span,
|
|
|
|
|
"struct literals are not allowed here",
|
|
|
|
|
);
|
|
|
|
|
err.multipart_suggestion(
|
2019-05-02 22:53:09 +00:00
|
|
|
|
"surround the struct literal with parentheses",
|
2019-04-18 18:35:11 +00:00
|
|
|
|
vec![
|
|
|
|
|
(lo.shrink_to_lo(), "(".to_string()),
|
|
|
|
|
(expr.span.shrink_to_hi(), ")".to_string()),
|
|
|
|
|
],
|
|
|
|
|
Applicability::MachineApplicable,
|
|
|
|
|
);
|
|
|
|
|
err.emit();
|
2019-04-15 00:09:03 +00:00
|
|
|
|
}
|
2019-04-18 18:35:11 +00:00
|
|
|
|
return Some(expr);
|
2019-04-15 00:09:03 +00:00
|
|
|
|
}
|
|
|
|
|
None
|
|
|
|
|
}
|
|
|
|
|
|
2017-03-15 00:22:48 +00:00
|
|
|
|
fn parse_struct_expr(&mut self, lo: Span, pth: ast::Path, mut attrs: ThinVec<Attribute>)
|
2016-09-22 04:45:29 +00:00
|
|
|
|
-> PResult<'a, P<Expr>> {
|
2018-01-06 22:43:20 +00:00
|
|
|
|
let struct_sp = lo.to(self.prev_span);
|
2016-09-22 04:45:29 +00:00
|
|
|
|
self.bump();
|
|
|
|
|
let mut fields = Vec::new();
|
|
|
|
|
let mut base = None;
|
|
|
|
|
|
|
|
|
|
attrs.extend(self.parse_inner_attributes()?);
|
|
|
|
|
|
|
|
|
|
while self.token != token::CloseDelim(token::Brace) {
|
|
|
|
|
if self.eat(&token::DotDot) {
|
2017-10-10 02:11:22 +00:00
|
|
|
|
let exp_span = self.prev_span;
|
2016-09-22 04:45:29 +00:00
|
|
|
|
match self.parse_expr() {
|
|
|
|
|
Ok(e) => {
|
|
|
|
|
base = Some(e);
|
|
|
|
|
}
|
|
|
|
|
Err(mut e) => {
|
|
|
|
|
e.emit();
|
|
|
|
|
self.recover_stmt();
|
|
|
|
|
}
|
|
|
|
|
}
|
2017-10-10 02:11:22 +00:00
|
|
|
|
if self.token == token::Comma {
|
|
|
|
|
let mut err = self.sess.span_diagnostic.mut_span_err(
|
|
|
|
|
exp_span.to(self.prev_span),
|
|
|
|
|
"cannot use a comma after the base struct",
|
|
|
|
|
);
|
2019-01-25 21:03:27 +00:00
|
|
|
|
err.span_suggestion_short(
|
suggestion applicabilities for libsyntax and librustc, run-rustfix tests
Consider this a down payment on #50723. To recap, an `Applicability`
enum was recently (#50204) added, to convey to Rustfix and other tools
whether we think it's OK for them to blindly apply the suggestion, or
whether to prompt a human for guidance (because the suggestion might
contain placeholders that we can't infer, or because we think it has a
sufficiently high probability of being wrong even though it's—
presumably—right often enough to be worth emitting in the first place).
When a suggestion is marked as `MaybeIncorrect`, we try to use comments
to indicate precisely why (although there are a few places where we just
say `// speculative` because the present author's subjective judgement
balked at the idea that the suggestion has no false positives).
The `run-rustfix` directive is opporunistically set on some relevant UI
tests (and a couple tests that were in the `test/ui/suggestions`
directory, even if the suggestions didn't originate in librustc or
libsyntax). This is less trivial than it sounds, because a surprising
number of test files aren't equipped to be tested as fixed even when
they contain successfully fixable errors, because, e.g., there are more,
not-directly-related errors after fixing. Some test files need an
attribute or underscore to avoid unused warnings tripping up the "fixed
code is still producing diagnostics" check despite the fixes being
correct; this is an interesting contrast-to/inconsistency-with the
behavior of UI tests (which secretly pass `-A unused`), a behavior which
we probably ought to resolve one way or the other (filed issue #50926).
A few suggestion labels are reworded (e.g., to avoid phrasing it as a
question, which which is discouraged by the style guidelines listed in
`.span_suggestion`'s doc-comment).
2018-05-19 21:52:24 +00:00
|
|
|
|
self.span,
|
|
|
|
|
"remove this comma",
|
2018-08-23 08:14:52 +00:00
|
|
|
|
String::new(),
|
suggestion applicabilities for libsyntax and librustc, run-rustfix tests
Consider this a down payment on #50723. To recap, an `Applicability`
enum was recently (#50204) added, to convey to Rustfix and other tools
whether we think it's OK for them to blindly apply the suggestion, or
whether to prompt a human for guidance (because the suggestion might
contain placeholders that we can't infer, or because we think it has a
sufficiently high probability of being wrong even though it's—
presumably—right often enough to be worth emitting in the first place).
When a suggestion is marked as `MaybeIncorrect`, we try to use comments
to indicate precisely why (although there are a few places where we just
say `// speculative` because the present author's subjective judgement
balked at the idea that the suggestion has no false positives).
The `run-rustfix` directive is opporunistically set on some relevant UI
tests (and a couple tests that were in the `test/ui/suggestions`
directory, even if the suggestions didn't originate in librustc or
libsyntax). This is less trivial than it sounds, because a surprising
number of test files aren't equipped to be tested as fixed even when
they contain successfully fixable errors, because, e.g., there are more,
not-directly-related errors after fixing. Some test files need an
attribute or underscore to avoid unused warnings tripping up the "fixed
code is still producing diagnostics" check despite the fixes being
correct; this is an interesting contrast-to/inconsistency-with the
behavior of UI tests (which secretly pass `-A unused`), a behavior which
we probably ought to resolve one way or the other (filed issue #50926).
A few suggestion labels are reworded (e.g., to avoid phrasing it as a
question, which which is discouraged by the style guidelines listed in
`.span_suggestion`'s doc-comment).
2018-05-19 21:52:24 +00:00
|
|
|
|
Applicability::MachineApplicable
|
|
|
|
|
);
|
2017-10-10 02:11:22 +00:00
|
|
|
|
err.note("the base struct must always be the last field");
|
|
|
|
|
err.emit();
|
|
|
|
|
self.recover_stmt();
|
|
|
|
|
}
|
2016-09-22 04:45:29 +00:00
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-21 23:28:51 +00:00
|
|
|
|
let mut recovery_field = None;
|
|
|
|
|
if let token::Ident(ident, _) = self.token {
|
|
|
|
|
if !self.token.is_reserved_ident() && self.look_ahead(1, |t| *t == token::Colon) {
|
|
|
|
|
// Use in case of error after field-looking code: `S { foo: () with a }`
|
|
|
|
|
let mut ident = ident.clone();
|
|
|
|
|
ident.span = self.span;
|
|
|
|
|
recovery_field = Some(ast::Field {
|
|
|
|
|
ident,
|
|
|
|
|
span: self.span,
|
|
|
|
|
expr: self.mk_expr(self.span, ExprKind::Err, ThinVec::new()),
|
|
|
|
|
is_shorthand: false,
|
|
|
|
|
attrs: ThinVec::new(),
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
}
|
2019-01-20 23:16:36 +00:00
|
|
|
|
let mut parsed_field = None;
|
2016-09-22 04:45:29 +00:00
|
|
|
|
match self.parse_field() {
|
2019-01-20 23:16:36 +00:00
|
|
|
|
Ok(f) => parsed_field = Some(f),
|
2016-09-22 04:45:29 +00:00
|
|
|
|
Err(mut e) => {
|
2018-01-06 22:43:20 +00:00
|
|
|
|
e.span_label(struct_sp, "while parsing this struct");
|
2016-09-22 04:45:29 +00:00
|
|
|
|
e.emit();
|
2018-05-29 11:19:58 +00:00
|
|
|
|
|
2018-06-05 17:04:15 +00:00
|
|
|
|
// If the next token is a comma, then try to parse
|
|
|
|
|
// what comes next as additional fields, rather than
|
|
|
|
|
// bailing out until next `}`.
|
2018-05-29 11:19:58 +00:00
|
|
|
|
if self.token != token::Comma {
|
2019-01-20 08:37:06 +00:00
|
|
|
|
self.recover_stmt_(SemiColonMode::Comma, BlockMode::Ignore);
|
|
|
|
|
if self.token != token::Comma {
|
|
|
|
|
break;
|
|
|
|
|
}
|
2018-05-29 11:19:58 +00:00
|
|
|
|
}
|
2016-09-22 04:45:29 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
match self.expect_one_of(&[token::Comma],
|
|
|
|
|
&[token::CloseDelim(token::Brace)]) {
|
2019-01-28 05:04:50 +00:00
|
|
|
|
Ok(_) => if let Some(f) = parsed_field.or(recovery_field) {
|
2019-01-21 23:28:51 +00:00
|
|
|
|
// only include the field if there's no parse error for the field name
|
2019-01-20 23:16:36 +00:00
|
|
|
|
fields.push(f);
|
|
|
|
|
}
|
2016-09-22 04:45:29 +00:00
|
|
|
|
Err(mut e) => {
|
2019-01-21 23:28:51 +00:00
|
|
|
|
if let Some(f) = recovery_field {
|
|
|
|
|
fields.push(f);
|
|
|
|
|
}
|
2019-01-20 08:37:06 +00:00
|
|
|
|
e.span_label(struct_sp, "while parsing this struct");
|
2016-09-22 04:45:29 +00:00
|
|
|
|
e.emit();
|
2019-01-20 08:37:06 +00:00
|
|
|
|
self.recover_stmt_(SemiColonMode::Comma, BlockMode::Ignore);
|
|
|
|
|
self.eat(&token::Comma);
|
2016-09-22 04:45:29 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let span = lo.to(self.span);
|
2016-09-22 04:45:29 +00:00
|
|
|
|
self.expect(&token::CloseDelim(token::Brace))?;
|
2017-03-15 00:22:48 +00:00
|
|
|
|
return Ok(self.mk_expr(span, ExprKind::Struct(pth, fields, base), attrs));
|
2016-09-22 04:45:29 +00:00
|
|
|
|
}
|
|
|
|
|
|
2015-11-03 16:39:51 +00:00
|
|
|
|
fn parse_or_use_outer_attributes(&mut self,
|
2016-06-18 04:01:57 +00:00
|
|
|
|
already_parsed_attrs: Option<ThinVec<Attribute>>)
|
|
|
|
|
-> PResult<'a, ThinVec<Attribute>> {
|
2015-11-03 16:39:51 +00:00
|
|
|
|
if let Some(attrs) = already_parsed_attrs {
|
|
|
|
|
Ok(attrs)
|
|
|
|
|
} else {
|
2016-06-18 04:01:57 +00:00
|
|
|
|
self.parse_outer_attributes().map(|a| a.into())
|
2015-11-03 16:39:51 +00:00
|
|
|
|
}
|
2012-05-23 22:06:11 +00:00
|
|
|
|
}
|
2010-09-28 01:25:02 +00:00
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses a block or unsafe block.
|
2019-05-16 20:33:26 +00:00
|
|
|
|
crate fn parse_block_expr(
|
|
|
|
|
&mut self,
|
|
|
|
|
opt_label: Option<Label>,
|
|
|
|
|
lo: Span,
|
|
|
|
|
blk_mode: BlockCheckMode,
|
|
|
|
|
outer_attrs: ThinVec<Attribute>,
|
|
|
|
|
) -> PResult<'a, P<Expr>> {
|
2016-03-23 03:01:37 +00:00
|
|
|
|
self.expect(&token::OpenDelim(token::Brace))?;
|
2015-11-03 16:39:51 +00:00
|
|
|
|
|
2016-06-18 04:01:57 +00:00
|
|
|
|
let mut attrs = outer_attrs;
|
|
|
|
|
attrs.extend(self.parse_inner_attributes()?);
|
2015-11-03 16:39:51 +00:00
|
|
|
|
|
2016-03-23 03:01:37 +00:00
|
|
|
|
let blk = self.parse_block_tail(lo, blk_mode)?;
|
2018-04-16 03:44:39 +00:00
|
|
|
|
return Ok(self.mk_expr(blk.span, ExprKind::Block(blk, opt_label), attrs));
|
2012-05-23 22:06:11 +00:00
|
|
|
|
}
|
2011-10-06 23:42:27 +00:00
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses `a.b` or `a(13)` or `a[4]` or just `a`.
|
2018-05-31 22:53:30 +00:00
|
|
|
|
fn parse_dot_or_call_expr(&mut self,
|
2016-06-18 04:01:57 +00:00
|
|
|
|
already_parsed_attrs: Option<ThinVec<Attribute>>)
|
2015-12-20 21:00:43 +00:00
|
|
|
|
-> PResult<'a, P<Expr>> {
|
2016-03-23 03:01:37 +00:00
|
|
|
|
let attrs = self.parse_or_use_outer_attributes(already_parsed_attrs)?;
|
2015-11-03 16:39:51 +00:00
|
|
|
|
|
2016-01-27 09:47:33 +00:00
|
|
|
|
let b = self.parse_bottom_expr();
|
2016-03-23 03:01:37 +00:00
|
|
|
|
let (span, b) = self.interpolated_or_expr_span(b)?;
|
2017-03-15 00:22:48 +00:00
|
|
|
|
self.parse_dot_or_call_expr_with(b, span, attrs)
|
2015-11-03 16:39:51 +00:00
|
|
|
|
}
|
|
|
|
|
|
2018-05-31 22:53:30 +00:00
|
|
|
|
fn parse_dot_or_call_expr_with(&mut self,
|
2015-11-03 16:39:51 +00:00
|
|
|
|
e0: P<Expr>,
|
2017-03-15 00:22:48 +00:00
|
|
|
|
lo: Span,
|
2016-06-18 04:01:57 +00:00
|
|
|
|
mut attrs: ThinVec<Attribute>)
|
2015-12-20 21:00:43 +00:00
|
|
|
|
-> PResult<'a, P<Expr>> {
|
2015-11-03 16:39:51 +00:00
|
|
|
|
// Stitch the list of outer attributes onto the return value.
|
|
|
|
|
// A little bit ugly, but the best way given the current code
|
|
|
|
|
// structure
|
2016-01-23 23:52:43 +00:00
|
|
|
|
self.parse_dot_or_call_expr_with_(e0, lo)
|
2015-11-03 16:39:51 +00:00
|
|
|
|
.map(|expr|
|
|
|
|
|
expr.map(|mut expr| {
|
2016-06-18 04:01:57 +00:00
|
|
|
|
attrs.extend::<Vec<_>>(expr.attrs.into());
|
|
|
|
|
expr.attrs = attrs;
|
2015-11-03 16:39:51 +00:00
|
|
|
|
match expr.node {
|
2016-02-08 15:05:05 +00:00
|
|
|
|
ExprKind::If(..) | ExprKind::IfLet(..) => {
|
2016-06-18 04:01:57 +00:00
|
|
|
|
if !expr.attrs.is_empty() {
|
2015-11-03 16:39:51 +00:00
|
|
|
|
// Just point to the first attribute in there...
|
2016-06-18 04:01:57 +00:00
|
|
|
|
let span = expr.attrs[0].span;
|
2015-11-03 16:39:51 +00:00
|
|
|
|
|
|
|
|
|
self.span_err(span,
|
|
|
|
|
"attributes are not yet allowed on `if` \
|
|
|
|
|
expressions");
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
_ => {}
|
|
|
|
|
}
|
|
|
|
|
expr
|
|
|
|
|
})
|
|
|
|
|
)
|
2012-05-23 22:06:11 +00:00
|
|
|
|
}
|
2011-12-21 04:12:52 +00:00
|
|
|
|
|
2017-07-19 23:39:34 +00:00
|
|
|
|
// Assuming we have just parsed `.`, continue parsing into an expression.
|
|
|
|
|
fn parse_dot_suffix(&mut self, self_arg: P<Expr>, lo: Span) -> PResult<'a, P<Expr>> {
|
2019-05-11 14:41:37 +00:00
|
|
|
|
if self.span.rust_2018() && self.eat_keyword(kw::Await) {
|
2019-04-18 19:55:23 +00:00
|
|
|
|
let span = lo.to(self.prev_span);
|
|
|
|
|
let await_expr = self.mk_expr(
|
|
|
|
|
span,
|
|
|
|
|
ExprKind::Await(ast::AwaitOrigin::FieldLike, self_arg),
|
|
|
|
|
ThinVec::new(),
|
|
|
|
|
);
|
2019-05-16 21:31:07 +00:00
|
|
|
|
self.recover_from_await_method_call();
|
2019-04-18 19:55:23 +00:00
|
|
|
|
return Ok(await_expr);
|
|
|
|
|
}
|
2019-01-13 01:52:59 +00:00
|
|
|
|
let segment = self.parse_path_segment(PathStyle::Expr)?;
|
2019-01-21 23:35:31 +00:00
|
|
|
|
self.check_trailing_angle_brackets(&segment, token::OpenDelim(token::Paren));
|
2019-01-21 18:42:06 +00:00
|
|
|
|
|
2016-01-20 04:15:03 +00:00
|
|
|
|
Ok(match self.token {
|
|
|
|
|
token::OpenDelim(token::Paren) => {
|
2017-07-19 23:39:34 +00:00
|
|
|
|
// Method call `expr.f()`
|
|
|
|
|
let mut args = self.parse_unspanned_seq(
|
2016-01-20 04:15:03 +00:00
|
|
|
|
&token::OpenDelim(token::Paren),
|
|
|
|
|
&token::CloseDelim(token::Paren),
|
2016-02-23 04:24:42 +00:00
|
|
|
|
SeqSep::trailing_allowed(token::Comma),
|
2016-03-23 03:01:37 +00:00
|
|
|
|
|p| Ok(p.parse_expr()?)
|
|
|
|
|
)?;
|
2017-07-19 23:39:34 +00:00
|
|
|
|
args.insert(0, self_arg);
|
2016-01-20 04:15:03 +00:00
|
|
|
|
|
2017-07-19 23:39:34 +00:00
|
|
|
|
let span = lo.to(self.prev_span);
|
|
|
|
|
self.mk_expr(span, ExprKind::MethodCall(segment, args), ThinVec::new())
|
2016-01-20 04:15:03 +00:00
|
|
|
|
}
|
|
|
|
|
_ => {
|
2017-07-19 23:39:34 +00:00
|
|
|
|
// Field access `expr.f`
|
2018-02-23 17:48:54 +00:00
|
|
|
|
if let Some(args) = segment.args {
|
|
|
|
|
self.span_err(args.span(),
|
2017-07-06 23:39:55 +00:00
|
|
|
|
"field expressions may not have generic arguments");
|
2016-01-20 04:15:03 +00:00
|
|
|
|
}
|
|
|
|
|
|
2017-07-19 23:39:34 +00:00
|
|
|
|
let span = lo.to(self.prev_span);
|
2018-03-18 13:47:09 +00:00
|
|
|
|
self.mk_expr(span, ExprKind::Field(self_arg, segment.ident), ThinVec::new())
|
2016-01-20 04:15:03 +00:00
|
|
|
|
}
|
|
|
|
|
})
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-21 18:42:06 +00:00
|
|
|
|
/// This function checks if there are trailing angle brackets and produces
|
|
|
|
|
/// a diagnostic to suggest removing them.
|
|
|
|
|
///
|
|
|
|
|
/// ```ignore (diagnostic)
|
|
|
|
|
/// let _ = vec![1, 2, 3].into_iter().collect::<Vec<usize>>>>();
|
|
|
|
|
/// ^^ help: remove extra angle brackets
|
|
|
|
|
/// ```
|
2019-01-21 23:35:31 +00:00
|
|
|
|
fn check_trailing_angle_brackets(&mut self, segment: &PathSegment, end: token::Token) {
|
|
|
|
|
// This function is intended to be invoked after parsing a path segment where there are two
|
2019-01-21 18:42:06 +00:00
|
|
|
|
// cases:
|
|
|
|
|
//
|
2019-01-21 23:35:31 +00:00
|
|
|
|
// 1. A specific token is expected after the path segment.
|
|
|
|
|
// eg. `x.foo(`, `x.foo::<u32>(` (parenthesis - method call),
|
|
|
|
|
// `Foo::`, or `Foo::<Bar>::` (mod sep - continued path).
|
|
|
|
|
// 2. No specific token is expected after the path segment.
|
|
|
|
|
// eg. `x.foo` (field access)
|
2019-01-21 18:42:06 +00:00
|
|
|
|
//
|
2019-01-21 23:35:31 +00:00
|
|
|
|
// This function is called after parsing `.foo` and before parsing the token `end` (if
|
|
|
|
|
// present). This includes any angle bracket arguments, such as `.foo::<u32>` or
|
|
|
|
|
// `Foo::<Bar>`.
|
2019-01-21 18:42:06 +00:00
|
|
|
|
|
|
|
|
|
// We only care about trailing angle brackets if we previously parsed angle bracket
|
|
|
|
|
// arguments. This helps stop us incorrectly suggesting that extra angle brackets be
|
|
|
|
|
// removed in this case:
|
|
|
|
|
//
|
|
|
|
|
// `x.foo >> (3)` (where `x.foo` is a `u32` for example)
|
|
|
|
|
//
|
|
|
|
|
// This case is particularly tricky as we won't notice it just looking at the tokens -
|
|
|
|
|
// it will appear the same (in terms of upcoming tokens) as below (since the `::<u32>` will
|
|
|
|
|
// have already been parsed):
|
|
|
|
|
//
|
|
|
|
|
// `x.foo::<u32>>>(3)`
|
|
|
|
|
let parsed_angle_bracket_args = segment.args
|
|
|
|
|
.as_ref()
|
|
|
|
|
.map(|args| args.is_angle_bracketed())
|
|
|
|
|
.unwrap_or(false);
|
|
|
|
|
|
|
|
|
|
debug!(
|
|
|
|
|
"check_trailing_angle_brackets: parsed_angle_bracket_args={:?}",
|
|
|
|
|
parsed_angle_bracket_args,
|
|
|
|
|
);
|
|
|
|
|
if !parsed_angle_bracket_args {
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Keep the span at the start so we can highlight the sequence of `>` characters to be
|
|
|
|
|
// removed.
|
|
|
|
|
let lo = self.span;
|
|
|
|
|
|
|
|
|
|
// We need to look-ahead to see if we have `>` characters without moving the cursor forward
|
|
|
|
|
// (since we might have the field access case and the characters we're eating are
|
|
|
|
|
// actual operators and not trailing characters - ie `x.foo >> 3`).
|
|
|
|
|
let mut position = 0;
|
|
|
|
|
|
2019-01-21 23:35:31 +00:00
|
|
|
|
// We can encounter `>` or `>>` tokens in any order, so we need to keep track of how
|
|
|
|
|
// many of each (so we can correctly pluralize our error messages) and continue to
|
|
|
|
|
// advance.
|
2019-01-21 18:42:06 +00:00
|
|
|
|
let mut number_of_shr = 0;
|
2019-01-21 23:35:31 +00:00
|
|
|
|
let mut number_of_gt = 0;
|
|
|
|
|
while self.look_ahead(position, |t| {
|
|
|
|
|
trace!("check_trailing_angle_brackets: t={:?}", t);
|
|
|
|
|
if *t == token::BinOp(token::BinOpToken::Shr) {
|
|
|
|
|
number_of_shr += 1;
|
|
|
|
|
true
|
|
|
|
|
} else if *t == token::Gt {
|
|
|
|
|
number_of_gt += 1;
|
|
|
|
|
true
|
|
|
|
|
} else {
|
|
|
|
|
false
|
|
|
|
|
}
|
|
|
|
|
}) {
|
2019-01-21 18:42:06 +00:00
|
|
|
|
position += 1;
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-21 23:35:31 +00:00
|
|
|
|
// If we didn't find any trailing `>` characters, then we have nothing to error about.
|
2019-01-21 18:42:06 +00:00
|
|
|
|
debug!(
|
2019-01-21 23:35:31 +00:00
|
|
|
|
"check_trailing_angle_brackets: number_of_gt={:?} number_of_shr={:?}",
|
|
|
|
|
number_of_gt, number_of_shr,
|
2019-01-21 18:42:06 +00:00
|
|
|
|
);
|
2019-01-21 23:35:31 +00:00
|
|
|
|
if number_of_gt < 1 && number_of_shr < 1 {
|
2019-01-21 18:42:06 +00:00
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-21 23:35:31 +00:00
|
|
|
|
// Finally, double check that we have our end token as otherwise this is the
|
|
|
|
|
// second case.
|
|
|
|
|
if self.look_ahead(position, |t| {
|
|
|
|
|
trace!("check_trailing_angle_brackets: t={:?}", t);
|
|
|
|
|
*t == end
|
|
|
|
|
}) {
|
|
|
|
|
// Eat from where we started until the end token so that parsing can continue
|
2019-01-21 18:42:06 +00:00
|
|
|
|
// as if we didn't have those extra angle brackets.
|
2019-01-21 23:35:31 +00:00
|
|
|
|
self.eat_to_tokens(&[&end]);
|
2019-01-21 18:42:06 +00:00
|
|
|
|
let span = lo.until(self.span);
|
|
|
|
|
|
2019-01-21 23:35:31 +00:00
|
|
|
|
let plural = number_of_gt > 1 || number_of_shr >= 1;
|
2019-01-21 18:42:06 +00:00
|
|
|
|
self.diagnostic()
|
2019-01-21 20:16:46 +00:00
|
|
|
|
.struct_span_err(
|
|
|
|
|
span,
|
|
|
|
|
&format!("unmatched angle bracket{}", if plural { "s" } else { "" }),
|
|
|
|
|
)
|
2019-01-25 21:03:27 +00:00
|
|
|
|
.span_suggestion(
|
2019-01-21 18:42:06 +00:00
|
|
|
|
span,
|
2019-01-21 20:16:46 +00:00
|
|
|
|
&format!("remove extra angle bracket{}", if plural { "s" } else { "" }),
|
2019-01-21 18:42:06 +00:00
|
|
|
|
String::new(),
|
|
|
|
|
Applicability::MachineApplicable,
|
|
|
|
|
)
|
|
|
|
|
.emit();
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2017-03-15 00:22:48 +00:00
|
|
|
|
fn parse_dot_or_call_expr_with_(&mut self, e0: P<Expr>, lo: Span) -> PResult<'a, P<Expr>> {
|
2012-05-23 22:06:11 +00:00
|
|
|
|
let mut e = e0;
|
2012-05-24 20:35:57 +00:00
|
|
|
|
let mut hi;
|
2012-05-23 22:06:11 +00:00
|
|
|
|
loop {
|
2016-02-28 22:38:48 +00:00
|
|
|
|
// expr?
|
|
|
|
|
while self.eat(&token::Question) {
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let hi = self.prev_span;
|
|
|
|
|
e = self.mk_expr(lo.to(hi), ExprKind::Try(e), ThinVec::new());
|
2016-02-28 22:38:48 +00:00
|
|
|
|
}
|
|
|
|
|
|
2012-05-23 22:06:11 +00:00
|
|
|
|
// expr.f
|
2015-12-30 23:11:53 +00:00
|
|
|
|
if self.eat(&token::Dot) {
|
2013-12-30 23:09:41 +00:00
|
|
|
|
match self.token {
|
2019-03-25 23:11:21 +00:00
|
|
|
|
token::Ident(..) => {
|
|
|
|
|
e = self.parse_dot_suffix(e, lo)?;
|
2014-10-18 02:39:44 +00:00
|
|
|
|
}
|
2019-05-18 22:04:26 +00:00
|
|
|
|
token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) => {
|
2019-03-25 23:11:21 +00:00
|
|
|
|
let span = self.span;
|
|
|
|
|
self.bump();
|
2019-05-18 22:04:26 +00:00
|
|
|
|
let field = ExprKind::Field(e, Ident::new(symbol, span));
|
2019-03-25 23:11:21 +00:00
|
|
|
|
e = self.mk_expr(lo.to(span), field, ThinVec::new());
|
|
|
|
|
|
2019-03-26 19:09:13 +00:00
|
|
|
|
self.expect_no_suffix(span, "a tuple index", suffix);
|
2019-03-25 23:11:21 +00:00
|
|
|
|
}
|
2019-05-18 22:04:26 +00:00
|
|
|
|
token::Literal(token::Lit { kind: token::Float, symbol, .. }) => {
|
2019-03-25 23:11:21 +00:00
|
|
|
|
self.bump();
|
2019-05-18 22:04:26 +00:00
|
|
|
|
let fstr = symbol.as_str();
|
|
|
|
|
let msg = format!("unexpected token: `{}`", symbol);
|
|
|
|
|
let mut err = self.diagnostic().struct_span_err(self.prev_span, &msg);
|
2019-03-25 23:11:21 +00:00
|
|
|
|
err.span_label(self.prev_span, "unexpected token");
|
|
|
|
|
if fstr.chars().all(|x| "0123456789.".contains(x)) {
|
|
|
|
|
let float = match fstr.parse::<f64>().ok() {
|
|
|
|
|
Some(f) => f,
|
|
|
|
|
None => continue,
|
|
|
|
|
};
|
|
|
|
|
let sugg = pprust::to_string(|s| {
|
|
|
|
|
use crate::print::pprust::PrintState;
|
|
|
|
|
s.popen()?;
|
|
|
|
|
s.print_expr(&e)?;
|
|
|
|
|
s.s.word( ".")?;
|
|
|
|
|
s.print_usize(float.trunc() as usize)?;
|
|
|
|
|
s.pclose()?;
|
|
|
|
|
s.s.word(".")?;
|
|
|
|
|
s.s.word(fstr.splitn(2, ".").last().unwrap().to_string())
|
|
|
|
|
});
|
|
|
|
|
err.span_suggestion(
|
|
|
|
|
lo.to(self.prev_span),
|
|
|
|
|
"try parenthesizing the first index",
|
|
|
|
|
sugg,
|
|
|
|
|
Applicability::MachineApplicable
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
return Err(err);
|
2014-08-10 03:54:33 +00:00
|
|
|
|
|
2019-03-25 23:11:21 +00:00
|
|
|
|
}
|
|
|
|
|
_ => {
|
|
|
|
|
// FIXME Could factor this out into non_fatal_unexpected or something.
|
|
|
|
|
let actual = self.this_token_to_string();
|
|
|
|
|
self.span_err(self.span, &format!("unexpected token: `{}`", actual));
|
|
|
|
|
}
|
2012-05-23 22:06:11 +00:00
|
|
|
|
}
|
2013-10-01 21:31:03 +00:00
|
|
|
|
continue;
|
2012-01-31 12:31:02 +00:00
|
|
|
|
}
|
2016-02-08 22:55:55 +00:00
|
|
|
|
if self.expr_is_complete(&e) { break; }
|
2013-12-30 23:09:41 +00:00
|
|
|
|
match self.token {
|
2019-03-29 02:58:45 +00:00
|
|
|
|
// expr(...)
|
|
|
|
|
token::OpenDelim(token::Paren) => {
|
|
|
|
|
let seq = self.parse_unspanned_seq(
|
|
|
|
|
&token::OpenDelim(token::Paren),
|
|
|
|
|
&token::CloseDelim(token::Paren),
|
|
|
|
|
SeqSep::trailing_allowed(token::Comma),
|
|
|
|
|
|p| Ok(p.parse_expr()?)
|
|
|
|
|
).map(|es| {
|
2019-03-27 00:36:07 +00:00
|
|
|
|
let nd = self.mk_call(e, es);
|
2019-03-29 02:58:45 +00:00
|
|
|
|
let hi = self.prev_span;
|
|
|
|
|
self.mk_expr(lo.to(hi), nd, ThinVec::new())
|
|
|
|
|
});
|
|
|
|
|
e = self.recover_seq_parse_error(token::Paren, lo, seq);
|
2019-03-27 00:36:07 +00:00
|
|
|
|
}
|
2012-05-31 01:14:40 +00:00
|
|
|
|
|
2019-03-29 02:58:45 +00:00
|
|
|
|
// expr[...]
|
|
|
|
|
// Could be either an index expression or a slicing expression.
|
|
|
|
|
token::OpenDelim(token::Bracket) => {
|
|
|
|
|
self.bump();
|
|
|
|
|
let ix = self.parse_expr()?;
|
|
|
|
|
hi = self.span;
|
|
|
|
|
self.expect(&token::CloseDelim(token::Bracket))?;
|
|
|
|
|
let index = self.mk_index(e, ix);
|
|
|
|
|
e = self.mk_expr(lo.to(hi), index, ThinVec::new())
|
|
|
|
|
}
|
|
|
|
|
_ => return Ok(e)
|
2012-05-31 01:14:40 +00:00
|
|
|
|
}
|
2010-09-28 17:30:34 +00:00
|
|
|
|
}
|
2015-03-28 21:58:51 +00:00
|
|
|
|
return Ok(e);
|
2010-09-28 17:30:34 +00:00
|
|
|
|
}
|
2010-09-28 01:25:02 +00:00
|
|
|
|
|
2018-05-31 22:53:30 +00:00
|
|
|
|
crate fn process_potential_macro_variable(&mut self) {
|
2018-03-24 16:49:50 +00:00
|
|
|
|
let (token, span) = match self.token {
|
2017-07-31 20:04:34 +00:00
|
|
|
|
token::Dollar if self.span.ctxt() != syntax_pos::hygiene::SyntaxContext::empty() &&
|
2017-03-28 05:32:43 +00:00
|
|
|
|
self.look_ahead(1, |t| t.is_ident()) => {
|
|
|
|
|
self.bump();
|
2018-03-10 05:56:40 +00:00
|
|
|
|
let name = match self.token {
|
|
|
|
|
token::Ident(ident, _) => ident,
|
|
|
|
|
_ => unreachable!()
|
|
|
|
|
};
|
2018-02-19 07:08:23 +00:00
|
|
|
|
let mut err = self.fatal(&format!("unknown macro variable `{}`", name));
|
|
|
|
|
err.span_label(self.span, "unknown macro variable");
|
|
|
|
|
err.emit();
|
2018-12-20 23:58:55 +00:00
|
|
|
|
self.bump();
|
2017-03-29 07:17:18 +00:00
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
token::Interpolated(ref nt) => {
|
|
|
|
|
self.meta_var_span = Some(self.span);
|
2018-03-24 16:49:50 +00:00
|
|
|
|
// Interpolated identifier and lifetime tokens are replaced with usual identifier
|
|
|
|
|
// and lifetime tokens, so the former are never encountered during normal parsing.
|
2019-02-14 22:10:02 +00:00
|
|
|
|
match **nt {
|
2018-03-24 16:49:50 +00:00
|
|
|
|
token::NtIdent(ident, is_raw) => (token::Ident(ident, is_raw), ident.span),
|
|
|
|
|
token::NtLifetime(ident) => (token::Lifetime(ident), ident.span),
|
2017-03-29 07:17:18 +00:00
|
|
|
|
_ => return,
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
_ => return,
|
|
|
|
|
};
|
2018-03-24 16:49:50 +00:00
|
|
|
|
self.token = token;
|
|
|
|
|
self.span = span;
|
2012-07-05 21:30:56 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses a single token tree from the input.
|
2018-05-31 22:53:30 +00:00
|
|
|
|
crate fn parse_token_tree(&mut self) -> TokenTree {
|
2014-10-29 10:37:54 +00:00
|
|
|
|
match self.token {
|
2017-01-29 08:38:44 +00:00
|
|
|
|
token::OpenDelim(..) => {
|
2017-02-20 05:44:06 +00:00
|
|
|
|
let frame = mem::replace(&mut self.token_cursor.frame,
|
|
|
|
|
self.token_cursor.stack.pop().unwrap());
|
2018-09-09 01:07:02 +00:00
|
|
|
|
self.span = frame.span.entire();
|
2017-01-13 04:49:20 +00:00
|
|
|
|
self.bump();
|
2018-11-29 23:02:04 +00:00
|
|
|
|
TokenTree::Delimited(
|
|
|
|
|
frame.span,
|
|
|
|
|
frame.delim,
|
2018-12-19 03:53:52 +00:00
|
|
|
|
frame.tree_cursor.stream.into(),
|
2018-11-29 23:02:04 +00:00
|
|
|
|
)
|
2014-10-29 10:37:54 +00:00
|
|
|
|
},
|
2017-01-29 08:38:44 +00:00
|
|
|
|
token::CloseDelim(_) | token::Eof => unreachable!(),
|
2017-03-04 19:37:45 +00:00
|
|
|
|
_ => {
|
2018-03-08 11:27:23 +00:00
|
|
|
|
let (token, span) = (mem::replace(&mut self.token, token::Whitespace), self.span);
|
2017-03-04 19:37:45 +00:00
|
|
|
|
self.bump();
|
2017-03-29 07:17:18 +00:00
|
|
|
|
TokenTree::Token(span, token)
|
2017-03-04 19:37:45 +00:00
|
|
|
|
}
|
2013-02-04 21:15:17 +00:00
|
|
|
|
}
|
2012-05-21 17:45:56 +00:00
|
|
|
|
}
|
|
|
|
|
|
2014-01-09 13:05:33 +00:00
|
|
|
|
// parse a stream of tokens into a list of TokenTree's,
|
2013-04-15 23:13:42 +00:00
|
|
|
|
// up to EOF.
|
2015-12-20 21:00:43 +00:00
|
|
|
|
pub fn parse_all_token_trees(&mut self) -> PResult<'a, Vec<TokenTree>> {
|
2014-02-28 21:09:09 +00:00
|
|
|
|
let mut tts = Vec::new();
|
2014-10-27 08:22:52 +00:00
|
|
|
|
while self.token != token::Eof {
|
2017-03-06 06:45:28 +00:00
|
|
|
|
tts.push(self.parse_token_tree());
|
2012-11-21 00:07:57 +00:00
|
|
|
|
}
|
2015-03-28 21:58:51 +00:00
|
|
|
|
Ok(tts)
|
2012-11-21 00:07:57 +00:00
|
|
|
|
}
|
|
|
|
|
|
2017-03-08 23:13:35 +00:00
|
|
|
|
pub fn parse_tokens(&mut self) -> TokenStream {
|
|
|
|
|
let mut result = Vec::new();
|
|
|
|
|
loop {
|
|
|
|
|
match self.token {
|
|
|
|
|
token::Eof | token::CloseDelim(..) => break,
|
|
|
|
|
_ => result.push(self.parse_token_tree().into()),
|
|
|
|
|
}
|
|
|
|
|
}
|
2018-12-11 23:01:08 +00:00
|
|
|
|
TokenStream::new(result)
|
2017-03-08 23:13:35 +00:00
|
|
|
|
}
|
|
|
|
|
|
2015-10-15 12:51:30 +00:00
|
|
|
|
/// Parse a prefix-unary-operator expr
|
2018-05-31 22:53:30 +00:00
|
|
|
|
fn parse_prefix_expr(&mut self,
|
2016-06-18 04:01:57 +00:00
|
|
|
|
already_parsed_attrs: Option<ThinVec<Attribute>>)
|
2015-12-20 21:00:43 +00:00
|
|
|
|
-> PResult<'a, P<Expr>> {
|
2016-03-23 03:01:37 +00:00
|
|
|
|
let attrs = self.parse_or_use_outer_attributes(already_parsed_attrs)?;
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let lo = self.span;
|
2015-01-10 23:14:03 +00:00
|
|
|
|
// Note: when adding new unary operators, don't forget to adjust Token::can_begin_expr()
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let (hi, ex) = match self.token {
|
2015-10-15 18:37:21 +00:00
|
|
|
|
token::Not => {
|
2015-12-30 23:11:53 +00:00
|
|
|
|
self.bump();
|
2016-01-27 09:47:33 +00:00
|
|
|
|
let e = self.parse_prefix_expr(None);
|
2016-03-23 03:01:37 +00:00
|
|
|
|
let (span, e) = self.interpolated_or_expr_span(e)?;
|
2017-09-21 16:19:34 +00:00
|
|
|
|
(lo.to(span), self.mk_unary(UnOp::Not, e))
|
2015-10-15 18:37:21 +00:00
|
|
|
|
}
|
2017-05-03 06:31:47 +00:00
|
|
|
|
// Suggest `!` for bitwise negation when encountering a `~`
|
|
|
|
|
token::Tilde => {
|
|
|
|
|
self.bump();
|
|
|
|
|
let e = self.parse_prefix_expr(None);
|
|
|
|
|
let (span, e) = self.interpolated_or_expr_span(e)?;
|
|
|
|
|
let span_of_tilde = lo;
|
2018-09-19 23:23:21 +00:00
|
|
|
|
let mut err = self.diagnostic()
|
|
|
|
|
.struct_span_err(span_of_tilde, "`~` cannot be used as a unary operator");
|
2019-01-25 21:03:27 +00:00
|
|
|
|
err.span_suggestion_short(
|
suggestion applicabilities for libsyntax and librustc, run-rustfix tests
Consider this a down payment on #50723. To recap, an `Applicability`
enum was recently (#50204) added, to convey to Rustfix and other tools
whether we think it's OK for them to blindly apply the suggestion, or
whether to prompt a human for guidance (because the suggestion might
contain placeholders that we can't infer, or because we think it has a
sufficiently high probability of being wrong even though it's—
presumably—right often enough to be worth emitting in the first place).
When a suggestion is marked as `MaybeIncorrect`, we try to use comments
to indicate precisely why (although there are a few places where we just
say `// speculative` because the present author's subjective judgement
balked at the idea that the suggestion has no false positives).
The `run-rustfix` directive is opporunistically set on some relevant UI
tests (and a couple tests that were in the `test/ui/suggestions`
directory, even if the suggestions didn't originate in librustc or
libsyntax). This is less trivial than it sounds, because a surprising
number of test files aren't equipped to be tested as fixed even when
they contain successfully fixable errors, because, e.g., there are more,
not-directly-related errors after fixing. Some test files need an
attribute or underscore to avoid unused warnings tripping up the "fixed
code is still producing diagnostics" check despite the fixes being
correct; this is an interesting contrast-to/inconsistency-with the
behavior of UI tests (which secretly pass `-A unused`), a behavior which
we probably ought to resolve one way or the other (filed issue #50926).
A few suggestion labels are reworded (e.g., to avoid phrasing it as a
question, which which is discouraged by the style guidelines listed in
`.span_suggestion`'s doc-comment).
2018-05-19 21:52:24 +00:00
|
|
|
|
span_of_tilde,
|
|
|
|
|
"use `!` to perform bitwise negation",
|
|
|
|
|
"!".to_owned(),
|
|
|
|
|
Applicability::MachineApplicable
|
|
|
|
|
);
|
2017-05-03 06:31:47 +00:00
|
|
|
|
err.emit();
|
2017-09-21 16:19:34 +00:00
|
|
|
|
(lo.to(span), self.mk_unary(UnOp::Not, e))
|
2017-05-03 06:31:47 +00:00
|
|
|
|
}
|
2015-10-15 18:37:21 +00:00
|
|
|
|
token::BinOp(token::Minus) => {
|
2015-12-30 23:11:53 +00:00
|
|
|
|
self.bump();
|
2016-01-27 09:47:33 +00:00
|
|
|
|
let e = self.parse_prefix_expr(None);
|
2016-03-23 03:01:37 +00:00
|
|
|
|
let (span, e) = self.interpolated_or_expr_span(e)?;
|
2017-09-21 16:19:34 +00:00
|
|
|
|
(lo.to(span), self.mk_unary(UnOp::Neg, e))
|
2015-10-15 18:37:21 +00:00
|
|
|
|
}
|
|
|
|
|
token::BinOp(token::Star) => {
|
2015-12-30 23:11:53 +00:00
|
|
|
|
self.bump();
|
2016-01-27 09:47:33 +00:00
|
|
|
|
let e = self.parse_prefix_expr(None);
|
2016-03-23 03:01:37 +00:00
|
|
|
|
let (span, e) = self.interpolated_or_expr_span(e)?;
|
2017-09-21 16:19:34 +00:00
|
|
|
|
(lo.to(span), self.mk_unary(UnOp::Deref, e))
|
2015-10-15 18:37:21 +00:00
|
|
|
|
}
|
|
|
|
|
token::BinOp(token::And) | token::AndAnd => {
|
2016-03-23 03:01:37 +00:00
|
|
|
|
self.expect_and()?;
|
2017-03-16 21:47:32 +00:00
|
|
|
|
let m = self.parse_mutability();
|
2016-01-27 09:47:33 +00:00
|
|
|
|
let e = self.parse_prefix_expr(None);
|
2016-03-23 03:01:37 +00:00
|
|
|
|
let (span, e) = self.interpolated_or_expr_span(e)?;
|
2017-09-21 16:19:34 +00:00
|
|
|
|
(lo.to(span), ExprKind::AddrOf(m, e))
|
2015-10-15 18:37:21 +00:00
|
|
|
|
}
|
2019-05-11 14:41:37 +00:00
|
|
|
|
token::Ident(..) if self.token.is_keyword(kw::Box) => {
|
2015-12-30 23:11:53 +00:00
|
|
|
|
self.bump();
|
2016-01-27 09:47:33 +00:00
|
|
|
|
let e = self.parse_prefix_expr(None);
|
2016-03-23 03:01:37 +00:00
|
|
|
|
let (span, e) = self.interpolated_or_expr_span(e)?;
|
2017-09-21 16:19:34 +00:00
|
|
|
|
(lo.to(span), ExprKind::Box(e))
|
2015-10-15 18:37:21 +00:00
|
|
|
|
}
|
2018-03-22 05:38:24 +00:00
|
|
|
|
token::Ident(..) if self.token.is_ident_named("not") => {
|
|
|
|
|
// `not` is just an ordinary identifier in Rust-the-language,
|
|
|
|
|
// but as `rustc`-the-compiler, we can issue clever diagnostics
|
|
|
|
|
// for confused users who really want to say `!`
|
|
|
|
|
let token_cannot_continue_expr = |t: &token::Token| match *t {
|
|
|
|
|
// These tokens can start an expression after `!`, but
|
|
|
|
|
// can't continue an expression after an ident
|
|
|
|
|
token::Ident(ident, is_raw) => token::ident_can_begin_expr(ident, is_raw),
|
|
|
|
|
token::Literal(..) | token::Pound => true,
|
2019-02-14 22:10:02 +00:00
|
|
|
|
token::Interpolated(ref nt) => match **nt {
|
2018-03-22 05:38:24 +00:00
|
|
|
|
token::NtIdent(..) | token::NtExpr(..) |
|
|
|
|
|
token::NtBlock(..) | token::NtPath(..) => true,
|
|
|
|
|
_ => false,
|
|
|
|
|
},
|
|
|
|
|
_ => false
|
|
|
|
|
};
|
|
|
|
|
let cannot_continue_expr = self.look_ahead(1, token_cannot_continue_expr);
|
|
|
|
|
if cannot_continue_expr {
|
|
|
|
|
self.bump();
|
|
|
|
|
// Emit the error ...
|
|
|
|
|
let mut err = self.diagnostic()
|
|
|
|
|
.struct_span_err(self.span,
|
|
|
|
|
&format!("unexpected {} after identifier",
|
|
|
|
|
self.this_token_descr()));
|
|
|
|
|
// span the `not` plus trailing whitespace to avoid
|
|
|
|
|
// trailing whitespace after the `!` in our suggestion
|
2018-08-18 10:14:09 +00:00
|
|
|
|
let to_replace = self.sess.source_map()
|
2018-03-22 05:38:24 +00:00
|
|
|
|
.span_until_non_whitespace(lo.to(self.span));
|
2019-01-25 21:03:27 +00:00
|
|
|
|
err.span_suggestion_short(
|
suggestion applicabilities for libsyntax and librustc, run-rustfix tests
Consider this a down payment on #50723. To recap, an `Applicability`
enum was recently (#50204) added, to convey to Rustfix and other tools
whether we think it's OK for them to blindly apply the suggestion, or
whether to prompt a human for guidance (because the suggestion might
contain placeholders that we can't infer, or because we think it has a
sufficiently high probability of being wrong even though it's—
presumably—right often enough to be worth emitting in the first place).
When a suggestion is marked as `MaybeIncorrect`, we try to use comments
to indicate precisely why (although there are a few places where we just
say `// speculative` because the present author's subjective judgement
balked at the idea that the suggestion has no false positives).
The `run-rustfix` directive is opporunistically set on some relevant UI
tests (and a couple tests that were in the `test/ui/suggestions`
directory, even if the suggestions didn't originate in librustc or
libsyntax). This is less trivial than it sounds, because a surprising
number of test files aren't equipped to be tested as fixed even when
they contain successfully fixable errors, because, e.g., there are more,
not-directly-related errors after fixing. Some test files need an
attribute or underscore to avoid unused warnings tripping up the "fixed
code is still producing diagnostics" check despite the fixes being
correct; this is an interesting contrast-to/inconsistency-with the
behavior of UI tests (which secretly pass `-A unused`), a behavior which
we probably ought to resolve one way or the other (filed issue #50926).
A few suggestion labels are reworded (e.g., to avoid phrasing it as a
question, which which is discouraged by the style guidelines listed in
`.span_suggestion`'s doc-comment).
2018-05-19 21:52:24 +00:00
|
|
|
|
to_replace,
|
|
|
|
|
"use `!` to perform logical negation",
|
|
|
|
|
"!".to_owned(),
|
|
|
|
|
Applicability::MachineApplicable
|
|
|
|
|
);
|
2018-03-22 05:38:24 +00:00
|
|
|
|
err.emit();
|
|
|
|
|
// —and recover! (just as if we were in the block
|
|
|
|
|
// for the `token::Not` arm)
|
|
|
|
|
let e = self.parse_prefix_expr(None);
|
|
|
|
|
let (span, e) = self.interpolated_or_expr_span(e)?;
|
|
|
|
|
(lo.to(span), self.mk_unary(UnOp::Not, e))
|
|
|
|
|
} else {
|
|
|
|
|
return self.parse_dot_or_call_expr(Some(attrs));
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
_ => { return self.parse_dot_or_call_expr(Some(attrs)); }
|
2015-10-15 18:37:21 +00:00
|
|
|
|
};
|
2017-03-15 00:22:48 +00:00
|
|
|
|
return Ok(self.mk_expr(lo.to(hi), ex, attrs));
|
2012-05-23 22:06:11 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses an associative expression.
|
2015-10-15 12:51:30 +00:00
|
|
|
|
///
|
|
|
|
|
/// This parses an expression accounting for associativity and precedence of the operators in
|
|
|
|
|
/// the expression.
|
2018-12-14 09:48:41 +00:00
|
|
|
|
#[inline]
|
2018-05-31 22:53:30 +00:00
|
|
|
|
fn parse_assoc_expr(&mut self,
|
2016-06-18 04:01:57 +00:00
|
|
|
|
already_parsed_attrs: Option<ThinVec<Attribute>>)
|
2015-12-20 21:00:43 +00:00
|
|
|
|
-> PResult<'a, P<Expr>> {
|
2015-11-03 16:39:51 +00:00
|
|
|
|
self.parse_assoc_expr_with(0, already_parsed_attrs.into())
|
2012-05-23 22:06:11 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses an associative expression with operators of at least `min_prec` precedence.
|
2018-05-31 22:53:30 +00:00
|
|
|
|
fn parse_assoc_expr_with(&mut self,
|
2015-10-16 21:06:25 +00:00
|
|
|
|
min_prec: usize,
|
2015-11-03 16:39:51 +00:00
|
|
|
|
lhs: LhsExpr)
|
2015-12-20 21:00:43 +00:00
|
|
|
|
-> PResult<'a, P<Expr>> {
|
2015-11-03 16:39:51 +00:00
|
|
|
|
let mut lhs = if let LhsExpr::AlreadyParsed(expr) = lhs {
|
|
|
|
|
expr
|
2015-10-15 12:51:30 +00:00
|
|
|
|
} else {
|
2015-11-03 16:39:51 +00:00
|
|
|
|
let attrs = match lhs {
|
|
|
|
|
LhsExpr::AttributesParsed(attrs) => Some(attrs),
|
|
|
|
|
_ => None,
|
|
|
|
|
};
|
2017-09-19 05:40:04 +00:00
|
|
|
|
if [token::DotDot, token::DotDotDot, token::DotDotEq].contains(&self.token) {
|
2015-11-03 16:39:51 +00:00
|
|
|
|
return self.parse_prefix_range_expr(attrs);
|
|
|
|
|
} else {
|
2016-03-23 03:01:37 +00:00
|
|
|
|
self.parse_prefix_expr(attrs)?
|
2015-11-03 16:39:51 +00:00
|
|
|
|
}
|
2015-10-15 12:51:30 +00:00
|
|
|
|
};
|
2016-01-24 21:46:39 +00:00
|
|
|
|
|
2019-04-23 02:37:23 +00:00
|
|
|
|
match (self.expr_is_complete(&lhs), AssocOp::from_token(&self.token)) {
|
|
|
|
|
(true, None) => {
|
|
|
|
|
// Semi-statement forms are odd. See https://github.com/rust-lang/rust/issues/29071
|
|
|
|
|
return Ok(lhs);
|
|
|
|
|
}
|
|
|
|
|
(false, _) => {} // continue parsing the expression
|
2019-05-06 23:00:21 +00:00
|
|
|
|
// An exhaustive check is done in the following block, but these are checked first
|
|
|
|
|
// because they *are* ambiguous but also reasonable looking incorrect syntax, so we
|
|
|
|
|
// want to keep their span info to improve diagnostics in these cases in a later stage.
|
|
|
|
|
(true, Some(AssocOp::Multiply)) | // `{ 42 } *foo = bar;` or `{ 42 } * 3`
|
2019-04-23 02:37:23 +00:00
|
|
|
|
(true, Some(AssocOp::Subtract)) | // `{ 42 } -5`
|
|
|
|
|
(true, Some(AssocOp::Add)) => { // `{ 42 } + 42
|
|
|
|
|
// These cases are ambiguous and can't be identified in the parser alone
|
|
|
|
|
let sp = self.sess.source_map().start_point(self.span);
|
2019-05-06 23:00:21 +00:00
|
|
|
|
self.sess.ambiguous_block_expr_parse.borrow_mut().insert(sp, lhs.span);
|
2019-04-23 02:37:23 +00:00
|
|
|
|
return Ok(lhs);
|
|
|
|
|
}
|
|
|
|
|
(true, Some(ref op)) if !op.can_continue_expr_unambiguously() => {
|
|
|
|
|
return Ok(lhs);
|
|
|
|
|
}
|
|
|
|
|
(true, Some(_)) => {
|
|
|
|
|
// We've found an expression that would be parsed as a statement, but the next
|
|
|
|
|
// token implies this should be parsed as an expression.
|
2019-05-01 03:37:42 +00:00
|
|
|
|
// For example: `if let Some(x) = x { x } else { 0 } / 2`
|
|
|
|
|
let mut err = self.sess.span_diagnostic.struct_span_err(self.span, &format!(
|
|
|
|
|
"expected expression, found `{}`",
|
|
|
|
|
pprust::token_to_string(&self.token),
|
|
|
|
|
));
|
|
|
|
|
err.span_label(self.span, "expected expression");
|
2019-05-02 23:13:28 +00:00
|
|
|
|
self.sess.expr_parentheses_needed(
|
|
|
|
|
&mut err,
|
2019-04-23 02:37:23 +00:00
|
|
|
|
lhs.span,
|
2019-05-02 23:13:28 +00:00
|
|
|
|
Some(pprust::expr_to_string(&lhs),
|
|
|
|
|
));
|
2019-04-23 02:37:23 +00:00
|
|
|
|
err.emit();
|
|
|
|
|
}
|
2015-10-15 12:51:30 +00:00
|
|
|
|
}
|
Make the parser’s ‘expected <foo>, found <bar>’ errors more accurate
As an example of what this changes, the following code:
let x: [int ..4];
Currently spits out ‘expected `]`, found `..`’. However, a comma would also be
valid there, as would a number of other tokens. This change adjusts the parser
to produce more accurate errors, so that that example now produces ‘expected one
of `(`, `+`, `,`, `::`, or `]`, found `..`’.
2014-12-03 09:47:53 +00:00
|
|
|
|
self.expected_tokens.push(TokenType::Operator);
|
2015-10-15 12:51:30 +00:00
|
|
|
|
while let Some(op) = AssocOp::from_token(&self.token) {
|
2016-01-24 21:46:39 +00:00
|
|
|
|
|
2017-07-04 14:04:34 +00:00
|
|
|
|
// Adjust the span for interpolated LHS to point to the `$lhs` token and not to what
|
|
|
|
|
// it refers to. Interpolated identifiers are unwrapped early and never show up here
|
|
|
|
|
// as `PrevTokenKind::Interpolated` so if LHS is a single identifier we always process
|
|
|
|
|
// it as "interpolated", it doesn't change the answer for non-interpolated idents.
|
|
|
|
|
let lhs_span = match (self.prev_token_kind, &lhs.node) {
|
|
|
|
|
(PrevTokenKind::Interpolated, _) => self.prev_span,
|
|
|
|
|
(PrevTokenKind::Ident, &ExprKind::Path(None, ref path))
|
|
|
|
|
if path.segments.len() == 1 => self.prev_span,
|
|
|
|
|
_ => lhs.span,
|
2016-01-24 21:46:39 +00:00
|
|
|
|
};
|
|
|
|
|
|
2015-11-12 20:34:41 +00:00
|
|
|
|
let cur_op_span = self.span;
|
2015-10-16 19:42:06 +00:00
|
|
|
|
let restrictions = if op.is_assign_like() {
|
2017-09-08 19:08:01 +00:00
|
|
|
|
self.restrictions & Restrictions::NO_STRUCT_LITERAL
|
2015-10-16 19:42:06 +00:00
|
|
|
|
} else {
|
|
|
|
|
self.restrictions
|
|
|
|
|
};
|
2019-05-12 00:17:34 +00:00
|
|
|
|
let prec = op.precedence();
|
|
|
|
|
if prec < min_prec {
|
2015-10-15 12:51:30 +00:00
|
|
|
|
break;
|
|
|
|
|
}
|
2017-11-04 23:46:41 +00:00
|
|
|
|
// Check for deprecated `...` syntax
|
|
|
|
|
if self.token == token::DotDotDot && op == AssocOp::DotDotEq {
|
|
|
|
|
self.err_dotdotdot_syntax(self.span);
|
2017-09-19 05:40:04 +00:00
|
|
|
|
}
|
2017-11-04 23:46:41 +00:00
|
|
|
|
|
2015-12-30 23:11:53 +00:00
|
|
|
|
self.bump();
|
2015-10-15 12:51:30 +00:00
|
|
|
|
if op.is_comparison() {
|
2016-02-08 22:55:55 +00:00
|
|
|
|
self.check_no_chained_comparison(&lhs, &op);
|
2015-10-15 12:51:30 +00:00
|
|
|
|
}
|
|
|
|
|
// Special cases:
|
|
|
|
|
if op == AssocOp::As {
|
2017-07-03 23:17:01 +00:00
|
|
|
|
lhs = self.parse_assoc_op_cast(lhs, lhs_span, ExprKind::Cast)?;
|
2015-10-15 12:51:30 +00:00
|
|
|
|
continue
|
2015-12-03 02:37:48 +00:00
|
|
|
|
} else if op == AssocOp::Colon {
|
2019-03-13 02:27:10 +00:00
|
|
|
|
let maybe_path = self.could_ascription_be_path(&lhs.node);
|
|
|
|
|
let next_sp = self.span;
|
|
|
|
|
|
2017-07-06 21:29:55 +00:00
|
|
|
|
lhs = match self.parse_assoc_op_cast(lhs, lhs_span, ExprKind::Type) {
|
|
|
|
|
Ok(lhs) => lhs,
|
|
|
|
|
Err(mut err) => {
|
2019-03-13 02:27:10 +00:00
|
|
|
|
self.bad_type_ascription(
|
|
|
|
|
&mut err,
|
|
|
|
|
lhs_span,
|
|
|
|
|
cur_op_span,
|
|
|
|
|
next_sp,
|
|
|
|
|
maybe_path,
|
|
|
|
|
);
|
2017-07-06 21:29:55 +00:00
|
|
|
|
return Err(err);
|
|
|
|
|
}
|
|
|
|
|
};
|
2015-12-03 02:37:48 +00:00
|
|
|
|
continue
|
2017-09-19 05:40:04 +00:00
|
|
|
|
} else if op == AssocOp::DotDot || op == AssocOp::DotDotEq {
|
|
|
|
|
// If we didn’t have to handle `x..`/`x..=`, it would be pretty easy to
|
2016-03-17 01:35:36 +00:00
|
|
|
|
// generalise it to the Fixity::None code.
|
2016-01-13 06:23:31 +00:00
|
|
|
|
//
|
2017-09-19 05:40:04 +00:00
|
|
|
|
// We have 2 alternatives here: `x..y`/`x..=y` and `x..`/`x..=` The other
|
2016-03-17 01:35:36 +00:00
|
|
|
|
// two variants are handled with `parse_prefix_range_expr` call above.
|
2016-01-13 06:23:31 +00:00
|
|
|
|
let rhs = if self.is_at_start_of_range_notation_rhs() {
|
2019-05-12 00:17:34 +00:00
|
|
|
|
Some(self.parse_assoc_expr_with(prec + 1, LhsExpr::NotYetParsed)?)
|
2016-01-13 06:23:31 +00:00
|
|
|
|
} else {
|
|
|
|
|
None
|
|
|
|
|
};
|
|
|
|
|
let (lhs_span, rhs_span) = (lhs.span, if let Some(ref x) = rhs {
|
|
|
|
|
x.span
|
|
|
|
|
} else {
|
|
|
|
|
cur_op_span
|
|
|
|
|
});
|
2016-03-17 01:35:36 +00:00
|
|
|
|
let limits = if op == AssocOp::DotDot {
|
|
|
|
|
RangeLimits::HalfOpen
|
|
|
|
|
} else {
|
|
|
|
|
RangeLimits::Closed
|
|
|
|
|
};
|
|
|
|
|
|
2018-12-01 21:48:55 +00:00
|
|
|
|
let r = self.mk_range(Some(lhs), rhs, limits)?;
|
2017-03-15 00:22:48 +00:00
|
|
|
|
lhs = self.mk_expr(lhs_span.to(rhs_span), r, ThinVec::new());
|
2016-01-13 06:23:31 +00:00
|
|
|
|
break
|
2015-10-15 12:51:30 +00:00
|
|
|
|
}
|
2013-07-18 03:04:37 +00:00
|
|
|
|
|
2019-05-12 00:17:34 +00:00
|
|
|
|
let fixity = op.fixity();
|
|
|
|
|
let prec_adjustment = match fixity {
|
|
|
|
|
Fixity::Right => 0,
|
|
|
|
|
Fixity::Left => 1,
|
2016-03-18 23:04:43 +00:00
|
|
|
|
// We currently have no non-associative operators that are not handled above by
|
|
|
|
|
// the special cases. The code is here only for future convenience.
|
2019-05-12 00:17:34 +00:00
|
|
|
|
Fixity::None => 1,
|
|
|
|
|
};
|
|
|
|
|
let rhs = self.with_res(
|
|
|
|
|
restrictions - Restrictions::STMT_EXPR,
|
|
|
|
|
|this| this.parse_assoc_expr_with(prec + prec_adjustment, LhsExpr::NotYetParsed)
|
|
|
|
|
)?;
|
2015-10-15 12:51:30 +00:00
|
|
|
|
|
2019-02-03 15:35:12 +00:00
|
|
|
|
// Make sure that the span of the parent node is larger than the span of lhs and rhs,
|
|
|
|
|
// including the attributes.
|
|
|
|
|
let lhs_span = lhs
|
|
|
|
|
.attrs
|
|
|
|
|
.iter()
|
|
|
|
|
.filter(|a| a.style == AttrStyle::Outer)
|
|
|
|
|
.next()
|
|
|
|
|
.map_or(lhs_span, |a| a.span);
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let span = lhs_span.to(rhs.span);
|
2015-10-15 12:51:30 +00:00
|
|
|
|
lhs = match op {
|
|
|
|
|
AssocOp::Add | AssocOp::Subtract | AssocOp::Multiply | AssocOp::Divide |
|
|
|
|
|
AssocOp::Modulus | AssocOp::LAnd | AssocOp::LOr | AssocOp::BitXor |
|
|
|
|
|
AssocOp::BitAnd | AssocOp::BitOr | AssocOp::ShiftLeft | AssocOp::ShiftRight |
|
|
|
|
|
AssocOp::Equal | AssocOp::Less | AssocOp::LessEqual | AssocOp::NotEqual |
|
|
|
|
|
AssocOp::Greater | AssocOp::GreaterEqual => {
|
|
|
|
|
let ast_op = op.to_ast_binop().unwrap();
|
2018-08-18 10:14:03 +00:00
|
|
|
|
let binary = self.mk_binary(source_map::respan(cur_op_span, ast_op), lhs, rhs);
|
2017-03-15 00:22:48 +00:00
|
|
|
|
self.mk_expr(span, binary, ThinVec::new())
|
2013-01-31 18:32:57 +00:00
|
|
|
|
}
|
2019-05-17 02:56:11 +00:00
|
|
|
|
AssocOp::Assign => self.mk_expr(span, ExprKind::Assign(lhs, rhs), ThinVec::new()),
|
2015-10-15 12:51:30 +00:00
|
|
|
|
AssocOp::AssignOp(k) => {
|
|
|
|
|
let aop = match k {
|
2016-02-08 12:16:12 +00:00
|
|
|
|
token::Plus => BinOpKind::Add,
|
|
|
|
|
token::Minus => BinOpKind::Sub,
|
|
|
|
|
token::Star => BinOpKind::Mul,
|
|
|
|
|
token::Slash => BinOpKind::Div,
|
|
|
|
|
token::Percent => BinOpKind::Rem,
|
|
|
|
|
token::Caret => BinOpKind::BitXor,
|
|
|
|
|
token::And => BinOpKind::BitAnd,
|
|
|
|
|
token::Or => BinOpKind::BitOr,
|
|
|
|
|
token::Shl => BinOpKind::Shl,
|
|
|
|
|
token::Shr => BinOpKind::Shr,
|
2015-10-15 12:51:30 +00:00
|
|
|
|
};
|
2018-08-18 10:14:03 +00:00
|
|
|
|
let aopexpr = self.mk_assign_op(source_map::respan(cur_op_span, aop), lhs, rhs);
|
2017-03-15 00:22:48 +00:00
|
|
|
|
self.mk_expr(span, aopexpr, ThinVec::new())
|
2013-01-31 18:32:57 +00:00
|
|
|
|
}
|
2017-09-19 05:40:04 +00:00
|
|
|
|
AssocOp::As | AssocOp::Colon | AssocOp::DotDot | AssocOp::DotDotEq => {
|
|
|
|
|
self.bug("AssocOp should have been handled by special case")
|
2015-12-03 02:37:48 +00:00
|
|
|
|
}
|
2015-10-15 12:51:30 +00:00
|
|
|
|
};
|
|
|
|
|
|
2019-05-12 00:17:34 +00:00
|
|
|
|
if let Fixity::None = fixity { break }
|
2012-05-23 22:06:11 +00:00
|
|
|
|
}
|
2015-10-15 12:51:30 +00:00
|
|
|
|
Ok(lhs)
|
2012-05-23 22:06:11 +00:00
|
|
|
|
}
|
2010-09-28 17:30:34 +00:00
|
|
|
|
|
2017-07-03 23:17:01 +00:00
|
|
|
|
fn parse_assoc_op_cast(&mut self, lhs: P<Expr>, lhs_span: Span,
|
|
|
|
|
expr_kind: fn(P<Expr>, P<Ty>) -> ExprKind)
|
|
|
|
|
-> PResult<'a, P<Expr>> {
|
|
|
|
|
let mk_expr = |this: &mut Self, rhs: P<Ty>| {
|
|
|
|
|
this.mk_expr(lhs_span.to(rhs.span), expr_kind(lhs, rhs), ThinVec::new())
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// Save the state of the parser before parsing type normally, in case there is a
|
|
|
|
|
// LessThan comparison after this cast.
|
|
|
|
|
let parser_snapshot_before_type = self.clone();
|
2017-06-15 03:42:24 +00:00
|
|
|
|
match self.parse_ty_no_plus() {
|
|
|
|
|
Ok(rhs) => {
|
2017-07-03 23:17:01 +00:00
|
|
|
|
Ok(mk_expr(self, rhs))
|
2017-06-15 03:42:24 +00:00
|
|
|
|
}
|
2017-07-03 23:17:01 +00:00
|
|
|
|
Err(mut type_err) => {
|
|
|
|
|
// Rewind to before attempting to parse the type with generics, to recover
|
|
|
|
|
// from situations like `x as usize < y` in which we first tried to parse
|
|
|
|
|
// `usize < y` as a type with generic arguments.
|
|
|
|
|
let parser_snapshot_after_type = self.clone();
|
|
|
|
|
mem::replace(self, parser_snapshot_before_type);
|
2017-06-15 03:42:24 +00:00
|
|
|
|
|
2017-07-19 23:39:34 +00:00
|
|
|
|
match self.parse_path(PathStyle::Expr) {
|
2017-06-15 03:42:24 +00:00
|
|
|
|
Ok(path) => {
|
2017-10-09 17:02:17 +00:00
|
|
|
|
let (op_noun, op_verb) = match self.token {
|
|
|
|
|
token::Lt => ("comparison", "comparing"),
|
|
|
|
|
token::BinOp(token::Shl) => ("shift", "shifting"),
|
|
|
|
|
_ => {
|
|
|
|
|
// We can end up here even without `<` being the next token, for
|
|
|
|
|
// example because `parse_ty_no_plus` returns `Err` on keywords,
|
|
|
|
|
// but `parse_path` returns `Ok` on them due to error recovery.
|
|
|
|
|
// Return original error and parser state.
|
|
|
|
|
mem::replace(self, parser_snapshot_after_type);
|
|
|
|
|
return Err(type_err);
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
2017-07-03 23:17:01 +00:00
|
|
|
|
// Successfully parsed the type path leaving a `<` yet to parse.
|
|
|
|
|
type_err.cancel();
|
|
|
|
|
|
|
|
|
|
// Report non-fatal diagnostics, keep `x as usize` as an expression
|
|
|
|
|
// in AST and continue parsing.
|
2017-06-15 03:42:24 +00:00
|
|
|
|
let msg = format!("`<` is interpreted as a start of generic \
|
2017-10-09 17:02:17 +00:00
|
|
|
|
arguments for `{}`, not a {}", path, op_noun);
|
2017-07-03 23:17:01 +00:00
|
|
|
|
let mut err = self.sess.span_diagnostic.struct_span_err(self.span, &msg);
|
|
|
|
|
err.span_label(self.look_ahead_span(1).to(parser_snapshot_after_type.span),
|
|
|
|
|
"interpreted as generic arguments");
|
2017-10-09 17:02:17 +00:00
|
|
|
|
err.span_label(self.span, format!("not interpreted as {}", op_noun));
|
2017-07-03 23:17:01 +00:00
|
|
|
|
|
|
|
|
|
let expr = mk_expr(self, P(Ty {
|
|
|
|
|
span: path.span,
|
|
|
|
|
node: TyKind::Path(None, path),
|
|
|
|
|
id: ast::DUMMY_NODE_ID
|
|
|
|
|
}));
|
|
|
|
|
|
2018-08-18 10:14:09 +00:00
|
|
|
|
let expr_str = self.sess.source_map().span_to_snippet(expr.span)
|
2018-10-12 14:16:00 +00:00
|
|
|
|
.unwrap_or_else(|_| pprust::expr_to_string(&expr));
|
2019-01-25 21:03:27 +00:00
|
|
|
|
err.span_suggestion(
|
suggestion applicabilities for libsyntax and librustc, run-rustfix tests
Consider this a down payment on #50723. To recap, an `Applicability`
enum was recently (#50204) added, to convey to Rustfix and other tools
whether we think it's OK for them to blindly apply the suggestion, or
whether to prompt a human for guidance (because the suggestion might
contain placeholders that we can't infer, or because we think it has a
sufficiently high probability of being wrong even though it's—
presumably—right often enough to be worth emitting in the first place).
When a suggestion is marked as `MaybeIncorrect`, we try to use comments
to indicate precisely why (although there are a few places where we just
say `// speculative` because the present author's subjective judgement
balked at the idea that the suggestion has no false positives).
The `run-rustfix` directive is opporunistically set on some relevant UI
tests (and a couple tests that were in the `test/ui/suggestions`
directory, even if the suggestions didn't originate in librustc or
libsyntax). This is less trivial than it sounds, because a surprising
number of test files aren't equipped to be tested as fixed even when
they contain successfully fixable errors, because, e.g., there are more,
not-directly-related errors after fixing. Some test files need an
attribute or underscore to avoid unused warnings tripping up the "fixed
code is still producing diagnostics" check despite the fixes being
correct; this is an interesting contrast-to/inconsistency-with the
behavior of UI tests (which secretly pass `-A unused`), a behavior which
we probably ought to resolve one way or the other (filed issue #50926).
A few suggestion labels are reworded (e.g., to avoid phrasing it as a
question, which which is discouraged by the style guidelines listed in
`.span_suggestion`'s doc-comment).
2018-05-19 21:52:24 +00:00
|
|
|
|
expr.span,
|
|
|
|
|
&format!("try {} the cast value", op_verb),
|
|
|
|
|
format!("({})", expr_str),
|
|
|
|
|
Applicability::MachineApplicable
|
|
|
|
|
);
|
2017-06-15 03:42:24 +00:00
|
|
|
|
err.emit();
|
|
|
|
|
|
2017-07-03 23:17:01 +00:00
|
|
|
|
Ok(expr)
|
2017-06-15 03:42:24 +00:00
|
|
|
|
}
|
|
|
|
|
Err(mut path_err) => {
|
2017-07-03 23:17:01 +00:00
|
|
|
|
// Couldn't parse as a path, return original error and parser state.
|
2017-06-15 03:42:24 +00:00
|
|
|
|
path_err.cancel();
|
2017-07-03 23:17:01 +00:00
|
|
|
|
mem::replace(self, parser_snapshot_after_type);
|
|
|
|
|
Err(type_err)
|
2017-06-15 03:42:24 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2015-01-08 00:44:01 +00:00
|
|
|
|
/// Produce an error if comparison operators are chained (RFC #558).
|
|
|
|
|
/// We only need to check lhs, not rhs, because all comparison ops
|
|
|
|
|
/// have same precedence and are left-associative
|
2019-05-12 00:00:06 +00:00
|
|
|
|
fn check_no_chained_comparison(&self, lhs: &Expr, outer_op: &AssocOp) {
|
2017-06-10 03:30:33 +00:00
|
|
|
|
debug_assert!(outer_op.is_comparison(),
|
|
|
|
|
"check_no_chained_comparison: {:?} is not comparison",
|
|
|
|
|
outer_op);
|
2015-01-08 00:44:01 +00:00
|
|
|
|
match lhs.node {
|
2016-02-08 15:05:05 +00:00
|
|
|
|
ExprKind::Binary(op, _, _) if op.node.is_comparison() => {
|
2015-01-13 04:18:55 +00:00
|
|
|
|
// respan to include both operators
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let op_span = op.span.to(self.span);
|
2015-12-20 21:00:43 +00:00
|
|
|
|
let mut err = self.diagnostic().struct_span_err(op_span,
|
2015-01-13 04:18:55 +00:00
|
|
|
|
"chained comparison operators require parentheses");
|
2017-03-14 02:07:47 +00:00
|
|
|
|
if op.node == BinOpKind::Lt &&
|
|
|
|
|
*outer_op == AssocOp::Less || // Include `<` to provide this recommendation
|
|
|
|
|
*outer_op == AssocOp::Greater // even in a case like the following:
|
|
|
|
|
{ // Foo<Bar<Baz<Qux, ()>>>
|
2016-04-20 18:49:16 +00:00
|
|
|
|
err.help(
|
2015-01-13 04:18:55 +00:00
|
|
|
|
"use `::<...>` instead of `<...>` if you meant to specify type arguments");
|
2017-10-08 18:01:44 +00:00
|
|
|
|
err.help("or use `(...)` if you meant to specify fn arguments");
|
2015-01-08 00:44:01 +00:00
|
|
|
|
}
|
2015-12-20 21:00:43 +00:00
|
|
|
|
err.emit();
|
2015-01-08 00:44:01 +00:00
|
|
|
|
}
|
|
|
|
|
_ => {}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2017-11-04 23:46:41 +00:00
|
|
|
|
/// Parse prefix-forms of range notation: `..expr`, `..`, `..=expr`
|
2015-11-03 16:39:51 +00:00
|
|
|
|
fn parse_prefix_range_expr(&mut self,
|
2016-06-18 04:01:57 +00:00
|
|
|
|
already_parsed_attrs: Option<ThinVec<Attribute>>)
|
2015-12-20 21:00:43 +00:00
|
|
|
|
-> PResult<'a, P<Expr>> {
|
2017-11-04 23:46:41 +00:00
|
|
|
|
// Check for deprecated `...` syntax
|
|
|
|
|
if self.token == token::DotDotDot {
|
|
|
|
|
self.err_dotdotdot_syntax(self.span);
|
|
|
|
|
}
|
|
|
|
|
|
2017-09-19 05:40:04 +00:00
|
|
|
|
debug_assert!([token::DotDot, token::DotDotDot, token::DotDotEq].contains(&self.token),
|
2017-11-04 23:46:41 +00:00
|
|
|
|
"parse_prefix_range_expr: token {:?} is not DotDot/DotDotEq",
|
2017-06-10 03:30:33 +00:00
|
|
|
|
self.token);
|
2016-01-13 06:23:31 +00:00
|
|
|
|
let tok = self.token.clone();
|
2016-03-23 03:01:37 +00:00
|
|
|
|
let attrs = self.parse_or_use_outer_attributes(already_parsed_attrs)?;
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let lo = self.span;
|
|
|
|
|
let mut hi = self.span;
|
2015-12-30 23:11:53 +00:00
|
|
|
|
self.bump();
|
2015-10-15 18:37:21 +00:00
|
|
|
|
let opt_end = if self.is_at_start_of_range_notation_rhs() {
|
2016-01-13 06:23:31 +00:00
|
|
|
|
// RHS must be parsed with more associativity than the dots.
|
|
|
|
|
let next_prec = AssocOp::from_token(&tok).unwrap().precedence() + 1;
|
2016-03-23 03:01:37 +00:00
|
|
|
|
Some(self.parse_assoc_expr_with(next_prec,
|
2016-03-17 01:35:36 +00:00
|
|
|
|
LhsExpr::NotYetParsed)
|
|
|
|
|
.map(|x|{
|
2017-03-15 00:22:48 +00:00
|
|
|
|
hi = x.span;
|
2016-03-17 01:35:36 +00:00
|
|
|
|
x
|
|
|
|
|
})?)
|
2015-10-15 18:37:21 +00:00
|
|
|
|
} else {
|
|
|
|
|
None
|
|
|
|
|
};
|
2016-03-17 01:35:36 +00:00
|
|
|
|
let limits = if tok == token::DotDot {
|
|
|
|
|
RangeLimits::HalfOpen
|
|
|
|
|
} else {
|
|
|
|
|
RangeLimits::Closed
|
|
|
|
|
};
|
|
|
|
|
|
2018-12-01 21:48:55 +00:00
|
|
|
|
let r = self.mk_range(None, opt_end, limits)?;
|
2017-03-15 00:22:48 +00:00
|
|
|
|
Ok(self.mk_expr(lo.to(hi), r, attrs))
|
2015-10-15 18:37:21 +00:00
|
|
|
|
}
|
|
|
|
|
|
2015-01-18 20:43:03 +00:00
|
|
|
|
fn is_at_start_of_range_notation_rhs(&self) -> bool {
|
|
|
|
|
if self.token.can_begin_expr() {
|
|
|
|
|
// parse `for i in 1.. { }` as infinite loop, not as `for i in (1..{})`.
|
|
|
|
|
if self.token == token::OpenDelim(token::Brace) {
|
2017-09-08 19:08:01 +00:00
|
|
|
|
return !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL);
|
2015-01-18 20:43:03 +00:00
|
|
|
|
}
|
|
|
|
|
true
|
|
|
|
|
} else {
|
|
|
|
|
false
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses an `if` or `if let` expression (`if` token already eaten).
|
2018-05-31 22:53:30 +00:00
|
|
|
|
fn parse_if_expr(&mut self, attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
|
2019-05-11 14:41:37 +00:00
|
|
|
|
if self.check_keyword(kw::Let) {
|
2015-11-03 16:39:51 +00:00
|
|
|
|
return self.parse_if_let_expr(attrs);
|
2014-08-25 01:04:29 +00:00
|
|
|
|
}
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let lo = self.prev_span;
|
2017-09-08 19:08:01 +00:00
|
|
|
|
let cond = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?;
|
2017-08-17 23:51:52 +00:00
|
|
|
|
|
|
|
|
|
// Verify that the parsed `if` condition makes sense as a condition. If it is a block, then
|
|
|
|
|
// verify that the last statement is either an implicit return (no `;`) or an explicit
|
|
|
|
|
// return. This won't catch blocks with an explicit `return`, but that would be caught by
|
|
|
|
|
// the dead code lint.
|
2019-05-11 14:41:37 +00:00
|
|
|
|
if self.eat_keyword(kw::Else) || !cond.returns() {
|
2018-08-18 10:14:09 +00:00
|
|
|
|
let sp = self.sess.source_map().next_point(lo);
|
2017-08-17 19:14:35 +00:00
|
|
|
|
let mut err = self.diagnostic()
|
|
|
|
|
.struct_span_err(sp, "missing condition for `if` statemement");
|
|
|
|
|
err.span_label(sp, "expected if condition here");
|
|
|
|
|
return Err(err)
|
|
|
|
|
}
|
2018-02-19 00:59:33 +00:00
|
|
|
|
let not_block = self.token != token::OpenDelim(token::Brace);
|
|
|
|
|
let thn = self.parse_block().map_err(|mut err| {
|
2018-02-24 03:38:36 +00:00
|
|
|
|
if not_block {
|
2018-02-19 00:59:33 +00:00
|
|
|
|
err.span_label(lo, "this `if` statement has a condition, but no block");
|
|
|
|
|
}
|
|
|
|
|
err
|
|
|
|
|
})?;
|
2014-09-13 16:06:01 +00:00
|
|
|
|
let mut els: Option<P<Expr>> = None;
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let mut hi = thn.span;
|
2019-05-11 14:41:37 +00:00
|
|
|
|
if self.eat_keyword(kw::Else) {
|
2016-03-23 03:01:37 +00:00
|
|
|
|
let elexpr = self.parse_else_expr()?;
|
2017-03-15 00:22:48 +00:00
|
|
|
|
hi = elexpr.span;
|
2014-09-13 16:06:01 +00:00
|
|
|
|
els = Some(elexpr);
|
2012-05-23 22:06:11 +00:00
|
|
|
|
}
|
2017-03-15 00:22:48 +00:00
|
|
|
|
Ok(self.mk_expr(lo.to(hi), ExprKind::If(cond, thn, els), attrs))
|
2012-05-23 22:06:11 +00:00
|
|
|
|
}
|
2011-12-08 19:47:01 +00:00
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses an `if let` expression (`if` token already eaten).
|
2018-05-31 22:53:30 +00:00
|
|
|
|
fn parse_if_let_expr(&mut self, attrs: ThinVec<Attribute>)
|
2015-12-20 21:00:43 +00:00
|
|
|
|
-> PResult<'a, P<Expr>> {
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let lo = self.prev_span;
|
2019-05-11 14:41:37 +00:00
|
|
|
|
self.expect_keyword(kw::Let)?;
|
2018-02-24 00:12:35 +00:00
|
|
|
|
let pats = self.parse_pats()?;
|
2016-03-23 03:01:37 +00:00
|
|
|
|
self.expect(&token::Eq)?;
|
2017-09-08 19:08:01 +00:00
|
|
|
|
let expr = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?;
|
2016-03-23 03:01:37 +00:00
|
|
|
|
let thn = self.parse_block()?;
|
2019-05-11 14:41:37 +00:00
|
|
|
|
let (hi, els) = if self.eat_keyword(kw::Else) {
|
2016-03-23 03:01:37 +00:00
|
|
|
|
let expr = self.parse_else_expr()?;
|
2017-03-15 00:22:48 +00:00
|
|
|
|
(expr.span, Some(expr))
|
2014-08-25 01:04:29 +00:00
|
|
|
|
} else {
|
2017-03-15 00:22:48 +00:00
|
|
|
|
(thn.span, None)
|
2014-08-25 01:04:29 +00:00
|
|
|
|
};
|
2018-02-24 00:12:35 +00:00
|
|
|
|
Ok(self.mk_expr(lo.to(hi), ExprKind::IfLet(pats, expr, thn, els), attrs))
|
2014-08-25 01:04:29 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses `move |args| expr`.
|
2018-05-31 22:53:30 +00:00
|
|
|
|
fn parse_lambda_expr(&mut self,
|
2016-06-18 04:01:57 +00:00
|
|
|
|
attrs: ThinVec<Attribute>)
|
2015-12-20 21:00:43 +00:00
|
|
|
|
-> PResult<'a, P<Expr>>
|
2014-11-19 16:18:17 +00:00
|
|
|
|
{
|
2017-10-07 14:36:28 +00:00
|
|
|
|
let lo = self.span;
|
2019-05-11 14:41:37 +00:00
|
|
|
|
let movability = if self.eat_keyword(kw::Static) {
|
2017-10-07 14:36:28 +00:00
|
|
|
|
Movability::Static
|
|
|
|
|
} else {
|
|
|
|
|
Movability::Movable
|
|
|
|
|
};
|
2018-11-18 00:25:59 +00:00
|
|
|
|
let asyncness = if self.span.rust_2018() {
|
2018-06-26 09:56:24 +00:00
|
|
|
|
self.parse_asyncness()
|
2018-06-06 22:50:59 +00:00
|
|
|
|
} else {
|
|
|
|
|
IsAsync::NotAsync
|
|
|
|
|
};
|
2019-05-11 14:41:37 +00:00
|
|
|
|
let capture_clause = if self.eat_keyword(kw::Move) {
|
2017-10-07 14:36:28 +00:00
|
|
|
|
CaptureBy::Value
|
|
|
|
|
} else {
|
|
|
|
|
CaptureBy::Ref
|
|
|
|
|
};
|
2016-03-23 03:01:37 +00:00
|
|
|
|
let decl = self.parse_fn_block_decl()?;
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let decl_hi = self.prev_span;
|
2015-03-18 13:22:38 +00:00
|
|
|
|
let body = match decl.output {
|
2017-07-23 13:20:14 +00:00
|
|
|
|
FunctionRetTy::Default(_) => {
|
2017-09-08 19:08:01 +00:00
|
|
|
|
let restrictions = self.restrictions - Restrictions::STMT_EXPR;
|
2017-07-23 13:20:14 +00:00
|
|
|
|
self.parse_expr_res(restrictions, None)?
|
|
|
|
|
},
|
2015-03-18 13:22:38 +00:00
|
|
|
|
_ => {
|
|
|
|
|
// If an explicit return type is given, require a
|
|
|
|
|
// block to appear (RFC 968).
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let body_lo = self.span;
|
2018-04-16 03:44:39 +00:00
|
|
|
|
self.parse_block_expr(None, body_lo, BlockCheckMode::Default, ThinVec::new())?
|
2015-03-18 13:22:38 +00:00
|
|
|
|
}
|
|
|
|
|
};
|
2013-07-16 18:08:35 +00:00
|
|
|
|
|
2015-03-28 21:58:51 +00:00
|
|
|
|
Ok(self.mk_expr(
|
2017-03-15 00:22:48 +00:00
|
|
|
|
lo.to(body.span),
|
2018-06-06 22:50:59 +00:00
|
|
|
|
ExprKind::Closure(capture_clause, asyncness, movability, decl, body, lo.to(decl_hi)),
|
2016-04-20 18:44:07 +00:00
|
|
|
|
attrs))
|
2012-06-30 01:09:56 +00:00
|
|
|
|
}
|
|
|
|
|
|
2015-11-03 16:39:51 +00:00
|
|
|
|
// `else` token already eaten
|
2018-05-31 22:53:30 +00:00
|
|
|
|
fn parse_else_expr(&mut self) -> PResult<'a, P<Expr>> {
|
2019-05-11 14:41:37 +00:00
|
|
|
|
if self.eat_keyword(kw::If) {
|
2016-06-18 04:01:57 +00:00
|
|
|
|
return self.parse_if_expr(ThinVec::new());
|
2012-05-23 22:06:11 +00:00
|
|
|
|
} else {
|
2016-03-23 03:01:37 +00:00
|
|
|
|
let blk = self.parse_block()?;
|
2018-04-16 03:44:39 +00:00
|
|
|
|
return Ok(self.mk_expr(blk.span, ExprKind::Block(blk, None), ThinVec::new()));
|
2012-05-23 22:06:11 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2010-11-03 18:05:15 +00:00
|
|
|
|
|
2014-06-09 20:12:30 +00:00
|
|
|
|
/// Parse a 'for' .. 'in' expression ('for' token already eaten)
|
2018-05-31 22:53:30 +00:00
|
|
|
|
fn parse_for_expr(&mut self, opt_label: Option<Label>,
|
2017-03-15 00:22:48 +00:00
|
|
|
|
span_lo: Span,
|
2016-06-18 04:01:57 +00:00
|
|
|
|
mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
|
2013-08-03 03:20:22 +00:00
|
|
|
|
// Parse: `for <src_pat> in <src_expr> <src_loop_block>`
|
2013-07-30 00:25:00 +00:00
|
|
|
|
|
in which parentheses are suggested for should-have-been-tuple-patterns
Programmers used to working in some other languages (such as Python or
Go) might expect to be able to destructure values with comma-separated
identifiers but no parentheses on the left side of an assignment.
Previously, the first name in such code would get parsed as a
single-indentifier pattern—recognizing, for example, the
`let a` in `let a, b = (1, 2);`—whereupon we would have a fatal syntax
error on seeing an unexpected comma rather than the expected semicolon
(all the way nearer to the end of `parse_full_stmt`).
Instead, let's look for that comma when parsing the pattern, and if we
see it, momentarily make-believe that we're parsing the remaining
elements in a tuple pattern, so that we can suggest wrapping it all in
parentheses. We need to do this in a separate wrapper method called on
the top-level pattern (or `|`-patterns) in a `let` statement, `for`
loop, `if`- or `while let` expression, or match arm rather than within
`parse_pat` itself, because `parse_pat` gets called recursively to parse
the sub-patterns within a tuple pattern.
Resolves #48492.
2018-02-25 04:41:16 +00:00
|
|
|
|
let pat = self.parse_top_level_pat()?;
|
2019-05-11 14:41:37 +00:00
|
|
|
|
if !self.eat_keyword(kw::In) {
|
2017-11-01 06:45:34 +00:00
|
|
|
|
let in_span = self.prev_span.between(self.span);
|
|
|
|
|
let mut err = self.sess.span_diagnostic
|
|
|
|
|
.struct_span_err(in_span, "missing `in` in `for` loop");
|
2019-01-25 21:03:27 +00:00
|
|
|
|
err.span_suggestion_short(
|
suggestion applicabilities for libsyntax and librustc, run-rustfix tests
Consider this a down payment on #50723. To recap, an `Applicability`
enum was recently (#50204) added, to convey to Rustfix and other tools
whether we think it's OK for them to blindly apply the suggestion, or
whether to prompt a human for guidance (because the suggestion might
contain placeholders that we can't infer, or because we think it has a
sufficiently high probability of being wrong even though it's—
presumably—right often enough to be worth emitting in the first place).
When a suggestion is marked as `MaybeIncorrect`, we try to use comments
to indicate precisely why (although there are a few places where we just
say `// speculative` because the present author's subjective judgement
balked at the idea that the suggestion has no false positives).
The `run-rustfix` directive is opporunistically set on some relevant UI
tests (and a couple tests that were in the `test/ui/suggestions`
directory, even if the suggestions didn't originate in librustc or
libsyntax). This is less trivial than it sounds, because a surprising
number of test files aren't equipped to be tested as fixed even when
they contain successfully fixable errors, because, e.g., there are more,
not-directly-related errors after fixing. Some test files need an
attribute or underscore to avoid unused warnings tripping up the "fixed
code is still producing diagnostics" check despite the fixes being
correct; this is an interesting contrast-to/inconsistency-with the
behavior of UI tests (which secretly pass `-A unused`), a behavior which
we probably ought to resolve one way or the other (filed issue #50926).
A few suggestion labels are reworded (e.g., to avoid phrasing it as a
question, which which is discouraged by the style guidelines listed in
`.span_suggestion`'s doc-comment).
2018-05-19 21:52:24 +00:00
|
|
|
|
in_span, "try adding `in` here", " in ".into(),
|
|
|
|
|
// has been misleading, at least in the past (closed Issue #48492)
|
|
|
|
|
Applicability::MaybeIncorrect
|
|
|
|
|
);
|
2017-11-01 06:45:34 +00:00
|
|
|
|
err.emit();
|
2017-10-31 21:26:49 +00:00
|
|
|
|
}
|
2018-09-19 23:23:21 +00:00
|
|
|
|
let in_span = self.prev_span;
|
2019-05-11 14:41:37 +00:00
|
|
|
|
if self.eat_keyword(kw::In) {
|
2018-09-19 23:23:21 +00:00
|
|
|
|
// a common typo: `for _ in in bar {}`
|
|
|
|
|
let mut err = self.sess.span_diagnostic.struct_span_err(
|
|
|
|
|
self.prev_span,
|
|
|
|
|
"expected iterable, found keyword `in`",
|
|
|
|
|
);
|
2019-01-25 21:03:27 +00:00
|
|
|
|
err.span_suggestion_short(
|
2018-09-19 23:23:21 +00:00
|
|
|
|
in_span.until(self.prev_span),
|
|
|
|
|
"remove the duplicated `in`",
|
|
|
|
|
String::new(),
|
|
|
|
|
Applicability::MachineApplicable,
|
|
|
|
|
);
|
|
|
|
|
err.emit();
|
|
|
|
|
}
|
2017-09-08 19:08:01 +00:00
|
|
|
|
let expr = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?;
|
2016-03-23 03:01:37 +00:00
|
|
|
|
let (iattrs, loop_block) = self.parse_inner_attrs_and_block()?;
|
2016-06-18 04:01:57 +00:00
|
|
|
|
attrs.extend(iattrs);
|
2015-11-03 16:39:51 +00:00
|
|
|
|
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let hi = self.prev_span;
|
2018-01-15 22:44:32 +00:00
|
|
|
|
Ok(self.mk_expr(span_lo.to(hi), ExprKind::ForLoop(pat, expr, loop_block, opt_label), attrs))
|
2013-07-30 00:25:00 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses a `while` or `while let` expression (`while` token already eaten).
|
2018-05-31 22:53:30 +00:00
|
|
|
|
fn parse_while_expr(&mut self, opt_label: Option<Label>,
|
2017-03-15 00:22:48 +00:00
|
|
|
|
span_lo: Span,
|
2016-06-18 04:01:57 +00:00
|
|
|
|
mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
|
2019-05-11 14:41:37 +00:00
|
|
|
|
if self.token.is_keyword(kw::Let) {
|
2018-01-15 22:44:32 +00:00
|
|
|
|
return self.parse_while_let_expr(opt_label, span_lo, attrs);
|
2014-10-03 02:45:46 +00:00
|
|
|
|
}
|
2017-09-08 19:08:01 +00:00
|
|
|
|
let cond = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?;
|
2016-03-23 03:01:37 +00:00
|
|
|
|
let (iattrs, body) = self.parse_inner_attrs_and_block()?;
|
2016-06-18 04:01:57 +00:00
|
|
|
|
attrs.extend(iattrs);
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let span = span_lo.to(body.span);
|
2018-01-15 22:44:32 +00:00
|
|
|
|
return Ok(self.mk_expr(span, ExprKind::While(cond, body, opt_label), attrs));
|
2012-05-23 22:06:11 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses a `while let` expression (`while` token already eaten).
|
2018-05-31 22:53:30 +00:00
|
|
|
|
fn parse_while_let_expr(&mut self, opt_label: Option<Label>,
|
2017-03-15 00:22:48 +00:00
|
|
|
|
span_lo: Span,
|
2016-06-18 04:01:57 +00:00
|
|
|
|
mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
|
2019-05-11 14:41:37 +00:00
|
|
|
|
self.expect_keyword(kw::Let)?;
|
2018-02-24 00:12:35 +00:00
|
|
|
|
let pats = self.parse_pats()?;
|
2016-03-23 03:01:37 +00:00
|
|
|
|
self.expect(&token::Eq)?;
|
2017-09-08 19:08:01 +00:00
|
|
|
|
let expr = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?;
|
2016-03-23 03:01:37 +00:00
|
|
|
|
let (iattrs, body) = self.parse_inner_attrs_and_block()?;
|
2016-06-18 04:01:57 +00:00
|
|
|
|
attrs.extend(iattrs);
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let span = span_lo.to(body.span);
|
2018-02-24 00:12:35 +00:00
|
|
|
|
return Ok(self.mk_expr(span, ExprKind::WhileLet(pats, expr, body, opt_label), attrs));
|
2014-10-03 02:45:46 +00:00
|
|
|
|
}
|
|
|
|
|
|
2015-11-03 16:39:51 +00:00
|
|
|
|
// parse `loop {...}`, `loop` token already eaten
|
2018-05-31 22:53:30 +00:00
|
|
|
|
fn parse_loop_expr(&mut self, opt_label: Option<Label>,
|
2017-03-15 00:22:48 +00:00
|
|
|
|
span_lo: Span,
|
2016-06-18 04:01:57 +00:00
|
|
|
|
mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
|
2016-03-23 03:01:37 +00:00
|
|
|
|
let (iattrs, body) = self.parse_inner_attrs_and_block()?;
|
2016-06-18 04:01:57 +00:00
|
|
|
|
attrs.extend(iattrs);
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let span = span_lo.to(body.span);
|
2018-01-15 22:44:32 +00:00
|
|
|
|
Ok(self.mk_expr(span, ExprKind::Loop(body, opt_label), attrs))
|
2012-05-23 22:06:11 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses an `async move {...}` expression.
|
2018-06-06 22:50:59 +00:00
|
|
|
|
pub fn parse_async_block(&mut self, mut attrs: ThinVec<Attribute>)
|
|
|
|
|
-> PResult<'a, P<Expr>>
|
|
|
|
|
{
|
|
|
|
|
let span_lo = self.span;
|
2019-05-11 14:41:37 +00:00
|
|
|
|
self.expect_keyword(kw::Async)?;
|
|
|
|
|
let capture_clause = if self.eat_keyword(kw::Move) {
|
2018-06-06 22:50:59 +00:00
|
|
|
|
CaptureBy::Value
|
|
|
|
|
} else {
|
|
|
|
|
CaptureBy::Ref
|
|
|
|
|
};
|
|
|
|
|
let (iattrs, body) = self.parse_inner_attrs_and_block()?;
|
|
|
|
|
attrs.extend(iattrs);
|
|
|
|
|
Ok(self.mk_expr(
|
|
|
|
|
span_lo.to(body.span),
|
|
|
|
|
ExprKind::Async(capture_clause, ast::DUMMY_NODE_ID, body), attrs))
|
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses a `try {...}` expression (`try` token already eaten).
|
2018-07-22 03:59:44 +00:00
|
|
|
|
fn parse_try_block(&mut self, span_lo: Span, mut attrs: ThinVec<Attribute>)
|
2017-02-17 23:12:47 +00:00
|
|
|
|
-> PResult<'a, P<Expr>>
|
|
|
|
|
{
|
|
|
|
|
let (iattrs, body) = self.parse_inner_attrs_and_block()?;
|
|
|
|
|
attrs.extend(iattrs);
|
2019-05-11 14:41:37 +00:00
|
|
|
|
if self.eat_keyword(kw::Catch) {
|
2019-04-10 17:41:47 +00:00
|
|
|
|
let mut error = self.struct_span_err(self.prev_span,
|
2019-04-11 02:22:43 +00:00
|
|
|
|
"keyword `catch` cannot follow a `try` block");
|
2019-04-10 17:35:48 +00:00
|
|
|
|
error.help("try using `match` on the result of the `try` block instead");
|
2019-04-11 02:22:43 +00:00
|
|
|
|
error.emit();
|
2019-04-10 17:35:48 +00:00
|
|
|
|
Err(error)
|
|
|
|
|
} else {
|
|
|
|
|
Ok(self.mk_expr(span_lo.to(body.span), ExprKind::TryBlock(body), attrs))
|
|
|
|
|
}
|
2017-02-17 23:12:47 +00:00
|
|
|
|
}
|
|
|
|
|
|
2015-11-03 16:39:51 +00:00
|
|
|
|
// `match` token already eaten
|
2016-06-18 04:01:57 +00:00
|
|
|
|
fn parse_match_expr(&mut self, mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
|
2016-09-21 02:09:22 +00:00
|
|
|
|
let match_span = self.prev_span;
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let lo = self.prev_span;
|
2017-09-08 19:08:01 +00:00
|
|
|
|
let discriminant = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL,
|
2016-03-22 22:58:45 +00:00
|
|
|
|
None)?;
|
2016-07-01 23:40:45 +00:00
|
|
|
|
if let Err(mut e) = self.expect(&token::OpenDelim(token::Brace)) {
|
2015-10-27 13:41:55 +00:00
|
|
|
|
if self.token == token::Token::Semi {
|
2019-01-25 21:03:27 +00:00
|
|
|
|
e.span_suggestion_short(
|
suggestion applicabilities for libsyntax and librustc, run-rustfix tests
Consider this a down payment on #50723. To recap, an `Applicability`
enum was recently (#50204) added, to convey to Rustfix and other tools
whether we think it's OK for them to blindly apply the suggestion, or
whether to prompt a human for guidance (because the suggestion might
contain placeholders that we can't infer, or because we think it has a
sufficiently high probability of being wrong even though it's—
presumably—right often enough to be worth emitting in the first place).
When a suggestion is marked as `MaybeIncorrect`, we try to use comments
to indicate precisely why (although there are a few places where we just
say `// speculative` because the present author's subjective judgement
balked at the idea that the suggestion has no false positives).
The `run-rustfix` directive is opporunistically set on some relevant UI
tests (and a couple tests that were in the `test/ui/suggestions`
directory, even if the suggestions didn't originate in librustc or
libsyntax). This is less trivial than it sounds, because a surprising
number of test files aren't equipped to be tested as fixed even when
they contain successfully fixable errors, because, e.g., there are more,
not-directly-related errors after fixing. Some test files need an
attribute or underscore to avoid unused warnings tripping up the "fixed
code is still producing diagnostics" check despite the fixes being
correct; this is an interesting contrast-to/inconsistency-with the
behavior of UI tests (which secretly pass `-A unused`), a behavior which
we probably ought to resolve one way or the other (filed issue #50926).
A few suggestion labels are reworded (e.g., to avoid phrasing it as a
question, which which is discouraged by the style guidelines listed in
`.span_suggestion`'s doc-comment).
2018-05-19 21:52:24 +00:00
|
|
|
|
match_span,
|
|
|
|
|
"try removing this `match`",
|
2018-08-23 08:14:52 +00:00
|
|
|
|
String::new(),
|
suggestion applicabilities for libsyntax and librustc, run-rustfix tests
Consider this a down payment on #50723. To recap, an `Applicability`
enum was recently (#50204) added, to convey to Rustfix and other tools
whether we think it's OK for them to blindly apply the suggestion, or
whether to prompt a human for guidance (because the suggestion might
contain placeholders that we can't infer, or because we think it has a
sufficiently high probability of being wrong even though it's—
presumably—right often enough to be worth emitting in the first place).
When a suggestion is marked as `MaybeIncorrect`, we try to use comments
to indicate precisely why (although there are a few places where we just
say `// speculative` because the present author's subjective judgement
balked at the idea that the suggestion has no false positives).
The `run-rustfix` directive is opporunistically set on some relevant UI
tests (and a couple tests that were in the `test/ui/suggestions`
directory, even if the suggestions didn't originate in librustc or
libsyntax). This is less trivial than it sounds, because a surprising
number of test files aren't equipped to be tested as fixed even when
they contain successfully fixable errors, because, e.g., there are more,
not-directly-related errors after fixing. Some test files need an
attribute or underscore to avoid unused warnings tripping up the "fixed
code is still producing diagnostics" check despite the fixes being
correct; this is an interesting contrast-to/inconsistency-with the
behavior of UI tests (which secretly pass `-A unused`), a behavior which
we probably ought to resolve one way or the other (filed issue #50926).
A few suggestion labels are reworded (e.g., to avoid phrasing it as a
question, which which is discouraged by the style guidelines listed in
`.span_suggestion`'s doc-comment).
2018-05-19 21:52:24 +00:00
|
|
|
|
Applicability::MaybeIncorrect // speculative
|
|
|
|
|
);
|
2015-10-27 13:41:55 +00:00
|
|
|
|
}
|
|
|
|
|
return Err(e)
|
|
|
|
|
}
|
2016-06-18 04:01:57 +00:00
|
|
|
|
attrs.extend(self.parse_inner_attributes()?);
|
|
|
|
|
|
2014-02-28 21:09:09 +00:00
|
|
|
|
let mut arms: Vec<Arm> = Vec::new();
|
2014-10-29 10:37:54 +00:00
|
|
|
|
while self.token != token::CloseDelim(token::Brace) {
|
2016-02-10 03:11:27 +00:00
|
|
|
|
match self.parse_arm() {
|
|
|
|
|
Ok(arm) => arms.push(arm),
|
|
|
|
|
Err(mut e) => {
|
|
|
|
|
// Recover by skipping to the end of the block.
|
|
|
|
|
e.emit();
|
|
|
|
|
self.recover_stmt();
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let span = lo.to(self.span);
|
2016-02-10 03:11:27 +00:00
|
|
|
|
if self.token == token::CloseDelim(token::Brace) {
|
|
|
|
|
self.bump();
|
|
|
|
|
}
|
2017-03-15 00:22:48 +00:00
|
|
|
|
return Ok(self.mk_expr(span, ExprKind::Match(discriminant, arms), attrs));
|
2016-02-10 03:11:27 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2012-05-23 22:06:11 +00:00
|
|
|
|
}
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let hi = self.span;
|
2015-12-30 23:11:53 +00:00
|
|
|
|
self.bump();
|
2017-03-15 00:22:48 +00:00
|
|
|
|
return Ok(self.mk_expr(lo.to(hi), ExprKind::Match(discriminant, arms), attrs));
|
2012-05-23 22:06:11 +00:00
|
|
|
|
}
|
2010-11-24 22:42:01 +00:00
|
|
|
|
|
2018-05-31 22:53:30 +00:00
|
|
|
|
crate fn parse_arm(&mut self) -> PResult<'a, Arm> {
|
2016-03-23 03:01:37 +00:00
|
|
|
|
let attrs = self.parse_outer_attributes()?;
|
2019-03-30 22:54:29 +00:00
|
|
|
|
let lo = self.span;
|
2016-03-23 03:01:37 +00:00
|
|
|
|
let pats = self.parse_pats()?;
|
2019-05-11 14:41:37 +00:00
|
|
|
|
let guard = if self.eat_keyword(kw::If) {
|
2018-08-30 04:18:11 +00:00
|
|
|
|
Some(Guard::If(self.parse_expr()?))
|
2017-05-12 18:05:39 +00:00
|
|
|
|
} else {
|
|
|
|
|
None
|
|
|
|
|
};
|
Provide missing comma in match arm suggestion
When finding:
```rust
match &Some(3) {
&None => 1
&Some(2) => { 3 }
_ => 2
}
```
provide the following diagnostic:
```
error: expected one of `,`, `.`, `?`, `}`, or an operator, found `=>`
--> $DIR/missing-comma-in-match.rs:15:18
|
X | &None => 1
| -- - help: missing comma
| |
| while parsing the match arm starting here
X | &Some(2) => { 3 }
| ^^ expected one of `,`, `.`, `?`, `}`, or an operator here
```
2018-02-18 22:36:35 +00:00
|
|
|
|
let arrow_span = self.span;
|
2016-03-23 03:01:37 +00:00
|
|
|
|
self.expect(&token::FatArrow)?;
|
Provide missing comma in match arm suggestion
When finding:
```rust
match &Some(3) {
&None => 1
&Some(2) => { 3 }
_ => 2
}
```
provide the following diagnostic:
```
error: expected one of `,`, `.`, `?`, `}`, or an operator, found `=>`
--> $DIR/missing-comma-in-match.rs:15:18
|
X | &None => 1
| -- - help: missing comma
| |
| while parsing the match arm starting here
X | &Some(2) => { 3 }
| ^^ expected one of `,`, `.`, `?`, `}`, or an operator here
```
2018-02-18 22:36:35 +00:00
|
|
|
|
let arm_start_span = self.span;
|
|
|
|
|
|
|
|
|
|
let expr = self.parse_expr_res(Restrictions::STMT_EXPR, None)
|
|
|
|
|
.map_err(|mut err| {
|
2018-02-24 03:38:36 +00:00
|
|
|
|
err.span_label(arrow_span, "while parsing the `match` arm starting here");
|
Provide missing comma in match arm suggestion
When finding:
```rust
match &Some(3) {
&None => 1
&Some(2) => { 3 }
_ => 2
}
```
provide the following diagnostic:
```
error: expected one of `,`, `.`, `?`, `}`, or an operator, found `=>`
--> $DIR/missing-comma-in-match.rs:15:18
|
X | &None => 1
| -- - help: missing comma
| |
| while parsing the match arm starting here
X | &Some(2) => { 3 }
| ^^ expected one of `,`, `.`, `?`, `}`, or an operator here
```
2018-02-18 22:36:35 +00:00
|
|
|
|
err
|
|
|
|
|
})?;
|
2014-07-29 00:32:51 +00:00
|
|
|
|
|
2017-04-01 08:11:31 +00:00
|
|
|
|
let require_comma = classify::expr_requires_semi_to_be_stmt(&expr)
|
2014-10-29 10:37:54 +00:00
|
|
|
|
&& self.token != token::CloseDelim(token::Brace);
|
2014-07-29 00:32:51 +00:00
|
|
|
|
|
2019-03-30 22:54:29 +00:00
|
|
|
|
let hi = self.span;
|
|
|
|
|
|
2014-07-29 00:32:51 +00:00
|
|
|
|
if require_comma {
|
2018-08-18 10:14:09 +00:00
|
|
|
|
let cm = self.sess.source_map();
|
Provide missing comma in match arm suggestion
When finding:
```rust
match &Some(3) {
&None => 1
&Some(2) => { 3 }
_ => 2
}
```
provide the following diagnostic:
```
error: expected one of `,`, `.`, `?`, `}`, or an operator, found `=>`
--> $DIR/missing-comma-in-match.rs:15:18
|
X | &None => 1
| -- - help: missing comma
| |
| while parsing the match arm starting here
X | &Some(2) => { 3 }
| ^^ expected one of `,`, `.`, `?`, `}`, or an operator here
```
2018-02-18 22:36:35 +00:00
|
|
|
|
self.expect_one_of(&[token::Comma], &[token::CloseDelim(token::Brace)])
|
|
|
|
|
.map_err(|mut err| {
|
|
|
|
|
match (cm.span_to_lines(expr.span), cm.span_to_lines(arm_start_span)) {
|
|
|
|
|
(Ok(ref expr_lines), Ok(ref arm_start_lines))
|
|
|
|
|
if arm_start_lines.lines[0].end_col == expr_lines.lines[0].end_col
|
|
|
|
|
&& expr_lines.lines.len() == 2
|
|
|
|
|
&& self.token == token::FatArrow => {
|
2018-08-19 13:30:23 +00:00
|
|
|
|
// We check whether there's any trailing code in the parse span,
|
|
|
|
|
// if there isn't, we very likely have the following:
|
Provide missing comma in match arm suggestion
When finding:
```rust
match &Some(3) {
&None => 1
&Some(2) => { 3 }
_ => 2
}
```
provide the following diagnostic:
```
error: expected one of `,`, `.`, `?`, `}`, or an operator, found `=>`
--> $DIR/missing-comma-in-match.rs:15:18
|
X | &None => 1
| -- - help: missing comma
| |
| while parsing the match arm starting here
X | &Some(2) => { 3 }
| ^^ expected one of `,`, `.`, `?`, `}`, or an operator here
```
2018-02-18 22:36:35 +00:00
|
|
|
|
//
|
|
|
|
|
// X | &Y => "y"
|
|
|
|
|
// | -- - missing comma
|
|
|
|
|
// | |
|
|
|
|
|
// | arrow_span
|
|
|
|
|
// X | &X => "x"
|
|
|
|
|
// | - ^^ self.span
|
|
|
|
|
// | |
|
|
|
|
|
// | parsed until here as `"y" & X`
|
2019-01-25 21:03:27 +00:00
|
|
|
|
err.span_suggestion_short(
|
2018-02-24 03:38:36 +00:00
|
|
|
|
cm.next_point(arm_start_span),
|
|
|
|
|
"missing a comma here to end this `match` arm",
|
suggestion applicabilities for libsyntax and librustc, run-rustfix tests
Consider this a down payment on #50723. To recap, an `Applicability`
enum was recently (#50204) added, to convey to Rustfix and other tools
whether we think it's OK for them to blindly apply the suggestion, or
whether to prompt a human for guidance (because the suggestion might
contain placeholders that we can't infer, or because we think it has a
sufficiently high probability of being wrong even though it's—
presumably—right often enough to be worth emitting in the first place).
When a suggestion is marked as `MaybeIncorrect`, we try to use comments
to indicate precisely why (although there are a few places where we just
say `// speculative` because the present author's subjective judgement
balked at the idea that the suggestion has no false positives).
The `run-rustfix` directive is opporunistically set on some relevant UI
tests (and a couple tests that were in the `test/ui/suggestions`
directory, even if the suggestions didn't originate in librustc or
libsyntax). This is less trivial than it sounds, because a surprising
number of test files aren't equipped to be tested as fixed even when
they contain successfully fixable errors, because, e.g., there are more,
not-directly-related errors after fixing. Some test files need an
attribute or underscore to avoid unused warnings tripping up the "fixed
code is still producing diagnostics" check despite the fixes being
correct; this is an interesting contrast-to/inconsistency-with the
behavior of UI tests (which secretly pass `-A unused`), a behavior which
we probably ought to resolve one way or the other (filed issue #50926).
A few suggestion labels are reworded (e.g., to avoid phrasing it as a
question, which which is discouraged by the style guidelines listed in
`.span_suggestion`'s doc-comment).
2018-05-19 21:52:24 +00:00
|
|
|
|
",".to_owned(),
|
|
|
|
|
Applicability::MachineApplicable
|
2018-02-24 03:38:36 +00:00
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
_ => {
|
|
|
|
|
err.span_label(arrow_span,
|
|
|
|
|
"while parsing the `match` arm starting here");
|
Provide missing comma in match arm suggestion
When finding:
```rust
match &Some(3) {
&None => 1
&Some(2) => { 3 }
_ => 2
}
```
provide the following diagnostic:
```
error: expected one of `,`, `.`, `?`, `}`, or an operator, found `=>`
--> $DIR/missing-comma-in-match.rs:15:18
|
X | &None => 1
| -- - help: missing comma
| |
| while parsing the match arm starting here
X | &Some(2) => { 3 }
| ^^ expected one of `,`, `.`, `?`, `}`, or an operator here
```
2018-02-18 22:36:35 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
err
|
|
|
|
|
})?;
|
2014-07-29 00:32:51 +00:00
|
|
|
|
} else {
|
2015-12-30 23:11:53 +00:00
|
|
|
|
self.eat(&token::Comma);
|
2014-07-29 00:32:51 +00:00
|
|
|
|
}
|
|
|
|
|
|
2015-03-28 21:58:51 +00:00
|
|
|
|
Ok(ast::Arm {
|
2017-08-07 05:54:09 +00:00
|
|
|
|
attrs,
|
|
|
|
|
pats,
|
|
|
|
|
guard,
|
2014-07-29 00:32:51 +00:00
|
|
|
|
body: expr,
|
2019-03-30 22:54:29 +00:00
|
|
|
|
span: lo.to(hi),
|
2015-03-28 21:58:51 +00:00
|
|
|
|
})
|
2014-07-29 00:32:51 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses an expression.
|
2018-12-14 09:48:41 +00:00
|
|
|
|
#[inline]
|
2015-12-20 21:00:43 +00:00
|
|
|
|
pub fn parse_expr(&mut self) -> PResult<'a, P<Expr>> {
|
2015-11-03 16:39:51 +00:00
|
|
|
|
self.parse_expr_res(Restrictions::empty(), None)
|
2012-05-23 22:06:11 +00:00
|
|
|
|
}
|
2011-01-01 01:28:43 +00:00
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Evaluates the closure with restrictions in place.
|
2015-10-16 19:42:06 +00:00
|
|
|
|
///
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Afters the closure is evaluated, restrictions are reset.
|
2018-05-31 22:53:30 +00:00
|
|
|
|
fn with_res<F, T>(&mut self, r: Restrictions, f: F) -> T
|
2016-02-11 00:52:44 +00:00
|
|
|
|
where F: FnOnce(&mut Self) -> T
|
2015-12-20 21:00:43 +00:00
|
|
|
|
{
|
2014-09-16 05:22:12 +00:00
|
|
|
|
let old = self.restrictions;
|
|
|
|
|
self.restrictions = r;
|
2015-10-16 19:42:06 +00:00
|
|
|
|
let r = f(self);
|
2014-09-16 05:22:12 +00:00
|
|
|
|
self.restrictions = old;
|
2015-10-16 19:42:06 +00:00
|
|
|
|
return r;
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses an expression, subject to the given restrictions.
|
2018-12-14 09:48:41 +00:00
|
|
|
|
#[inline]
|
2018-05-31 22:53:30 +00:00
|
|
|
|
fn parse_expr_res(&mut self, r: Restrictions,
|
2016-06-18 04:01:57 +00:00
|
|
|
|
already_parsed_attrs: Option<ThinVec<Attribute>>)
|
2015-12-20 21:00:43 +00:00
|
|
|
|
-> PResult<'a, P<Expr>> {
|
2015-11-03 16:39:51 +00:00
|
|
|
|
self.with_res(r, |this| this.parse_assoc_expr(already_parsed_attrs))
|
2012-05-23 22:06:11 +00:00
|
|
|
|
}
|
2011-01-01 01:28:43 +00:00
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses the RHS of a local variable declaration (e.g., '= 14;').
|
2017-10-22 16:19:30 +00:00
|
|
|
|
fn parse_initializer(&mut self, skip_eq: bool) -> PResult<'a, Option<P<Expr>>> {
|
2018-09-02 06:13:29 +00:00
|
|
|
|
if self.eat(&token::Eq) {
|
2016-03-23 03:01:37 +00:00
|
|
|
|
Ok(Some(self.parse_expr()?))
|
2017-10-22 16:19:30 +00:00
|
|
|
|
} else if skip_eq {
|
|
|
|
|
Ok(Some(self.parse_expr()?))
|
2013-10-03 09:53:46 +00:00
|
|
|
|
} else {
|
2015-03-28 21:58:51 +00:00
|
|
|
|
Ok(None)
|
2012-05-23 22:06:11 +00:00
|
|
|
|
}
|
2010-10-12 01:20:25 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses patterns, separated by '|' s.
|
2015-12-20 21:00:43 +00:00
|
|
|
|
fn parse_pats(&mut self) -> PResult<'a, Vec<P<Pat>>> {
|
2019-01-11 22:57:04 +00:00
|
|
|
|
// Allow a '|' before the pats (RFC 1925 + RFC 2530)
|
|
|
|
|
self.eat(&token::BinOp(token::Or));
|
|
|
|
|
|
2014-02-28 21:09:09 +00:00
|
|
|
|
let mut pats = Vec::new();
|
2012-05-23 22:06:11 +00:00
|
|
|
|
loop {
|
in which parentheses are suggested for should-have-been-tuple-patterns
Programmers used to working in some other languages (such as Python or
Go) might expect to be able to destructure values with comma-separated
identifiers but no parentheses on the left side of an assignment.
Previously, the first name in such code would get parsed as a
single-indentifier pattern—recognizing, for example, the
`let a` in `let a, b = (1, 2);`—whereupon we would have a fatal syntax
error on seeing an unexpected comma rather than the expected semicolon
(all the way nearer to the end of `parse_full_stmt`).
Instead, let's look for that comma when parsing the pattern, and if we
see it, momentarily make-believe that we're parsing the remaining
elements in a tuple pattern, so that we can suggest wrapping it all in
parentheses. We need to do this in a separate wrapper method called on
the top-level pattern (or `|`-patterns) in a `let` statement, `for`
loop, `if`- or `while let` expression, or match arm rather than within
`parse_pat` itself, because `parse_pat` gets called recursively to parse
the sub-patterns within a tuple pattern.
Resolves #48492.
2018-02-25 04:41:16 +00:00
|
|
|
|
pats.push(self.parse_top_level_pat()?);
|
2018-01-06 12:41:36 +00:00
|
|
|
|
|
2018-01-06 15:01:54 +00:00
|
|
|
|
if self.token == token::OrOr {
|
2018-01-06 20:29:08 +00:00
|
|
|
|
let mut err = self.struct_span_err(self.span,
|
|
|
|
|
"unexpected token `||` after pattern");
|
2019-01-25 21:03:27 +00:00
|
|
|
|
err.span_suggestion(
|
suggestion applicabilities for libsyntax and librustc, run-rustfix tests
Consider this a down payment on #50723. To recap, an `Applicability`
enum was recently (#50204) added, to convey to Rustfix and other tools
whether we think it's OK for them to blindly apply the suggestion, or
whether to prompt a human for guidance (because the suggestion might
contain placeholders that we can't infer, or because we think it has a
sufficiently high probability of being wrong even though it's—
presumably—right often enough to be worth emitting in the first place).
When a suggestion is marked as `MaybeIncorrect`, we try to use comments
to indicate precisely why (although there are a few places where we just
say `// speculative` because the present author's subjective judgement
balked at the idea that the suggestion has no false positives).
The `run-rustfix` directive is opporunistically set on some relevant UI
tests (and a couple tests that were in the `test/ui/suggestions`
directory, even if the suggestions didn't originate in librustc or
libsyntax). This is less trivial than it sounds, because a surprising
number of test files aren't equipped to be tested as fixed even when
they contain successfully fixable errors, because, e.g., there are more,
not-directly-related errors after fixing. Some test files need an
attribute or underscore to avoid unused warnings tripping up the "fixed
code is still producing diagnostics" check despite the fixes being
correct; this is an interesting contrast-to/inconsistency-with the
behavior of UI tests (which secretly pass `-A unused`), a behavior which
we probably ought to resolve one way or the other (filed issue #50926).
A few suggestion labels are reworded (e.g., to avoid phrasing it as a
question, which which is discouraged by the style guidelines listed in
`.span_suggestion`'s doc-comment).
2018-05-19 21:52:24 +00:00
|
|
|
|
self.span,
|
|
|
|
|
"use a single `|` to specify multiple patterns",
|
|
|
|
|
"|".to_owned(),
|
|
|
|
|
Applicability::MachineApplicable
|
|
|
|
|
);
|
2018-01-06 20:22:29 +00:00
|
|
|
|
err.emit();
|
2018-01-06 15:01:54 +00:00
|
|
|
|
self.bump();
|
2018-09-02 06:13:29 +00:00
|
|
|
|
} else if self.eat(&token::BinOp(token::Or)) {
|
2019-02-18 18:34:42 +00:00
|
|
|
|
// This is a No-op. Continue the loop to parse the next
|
|
|
|
|
// pattern.
|
2018-01-06 15:05:02 +00:00
|
|
|
|
} else {
|
2018-01-06 12:41:36 +00:00
|
|
|
|
return Ok(pats);
|
|
|
|
|
}
|
2012-05-23 22:06:11 +00:00
|
|
|
|
};
|
|
|
|
|
}
|
2011-07-08 14:27:55 +00:00
|
|
|
|
|
2018-02-24 12:27:06 +00:00
|
|
|
|
// Parses a parenthesized list of patterns like
|
|
|
|
|
// `()`, `(p)`, `(p,)`, `(p, q)`, or `(p, .., q)`. Returns:
|
|
|
|
|
// - a vector of the patterns that were parsed
|
|
|
|
|
// - an option indicating the index of the `..` element
|
|
|
|
|
// - a boolean indicating whether a trailing comma was present.
|
|
|
|
|
// Trailing commas are significant because (p) and (p,) are different patterns.
|
|
|
|
|
fn parse_parenthesized_pat_list(&mut self) -> PResult<'a, (Vec<P<Pat>>, Option<usize>, bool)> {
|
|
|
|
|
self.expect(&token::OpenDelim(token::Paren))?;
|
2019-03-27 00:36:07 +00:00
|
|
|
|
let result = match self.parse_pat_list() {
|
|
|
|
|
Ok(result) => result,
|
|
|
|
|
Err(mut err) => { // recover from parse error in tuple pattern list
|
|
|
|
|
err.emit();
|
|
|
|
|
self.consume_block(token::Paren);
|
|
|
|
|
return Ok((vec![], Some(0), false));
|
|
|
|
|
}
|
|
|
|
|
};
|
in which parentheses are suggested for should-have-been-tuple-patterns
Programmers used to working in some other languages (such as Python or
Go) might expect to be able to destructure values with comma-separated
identifiers but no parentheses on the left side of an assignment.
Previously, the first name in such code would get parsed as a
single-indentifier pattern—recognizing, for example, the
`let a` in `let a, b = (1, 2);`—whereupon we would have a fatal syntax
error on seeing an unexpected comma rather than the expected semicolon
(all the way nearer to the end of `parse_full_stmt`).
Instead, let's look for that comma when parsing the pattern, and if we
see it, momentarily make-believe that we're parsing the remaining
elements in a tuple pattern, so that we can suggest wrapping it all in
parentheses. We need to do this in a separate wrapper method called on
the top-level pattern (or `|`-patterns) in a `let` statement, `for`
loop, `if`- or `while let` expression, or match arm rather than within
`parse_pat` itself, because `parse_pat` gets called recursively to parse
the sub-patterns within a tuple pattern.
Resolves #48492.
2018-02-25 04:41:16 +00:00
|
|
|
|
self.expect(&token::CloseDelim(token::Paren))?;
|
|
|
|
|
Ok(result)
|
|
|
|
|
}
|
2016-03-06 12:54:44 +00:00
|
|
|
|
|
in which parentheses are suggested for should-have-been-tuple-patterns
Programmers used to working in some other languages (such as Python or
Go) might expect to be able to destructure values with comma-separated
identifiers but no parentheses on the left side of an assignment.
Previously, the first name in such code would get parsed as a
single-indentifier pattern—recognizing, for example, the
`let a` in `let a, b = (1, 2);`—whereupon we would have a fatal syntax
error on seeing an unexpected comma rather than the expected semicolon
(all the way nearer to the end of `parse_full_stmt`).
Instead, let's look for that comma when parsing the pattern, and if we
see it, momentarily make-believe that we're parsing the remaining
elements in a tuple pattern, so that we can suggest wrapping it all in
parentheses. We need to do this in a separate wrapper method called on
the top-level pattern (or `|`-patterns) in a `let` statement, `for`
loop, `if`- or `while let` expression, or match arm rather than within
`parse_pat` itself, because `parse_pat` gets called recursively to parse
the sub-patterns within a tuple pattern.
Resolves #48492.
2018-02-25 04:41:16 +00:00
|
|
|
|
fn parse_pat_list(&mut self) -> PResult<'a, (Vec<P<Pat>>, Option<usize>, bool)> {
|
2018-02-24 12:27:06 +00:00
|
|
|
|
let mut fields = Vec::new();
|
|
|
|
|
let mut ddpos = None;
|
2019-03-20 18:45:25 +00:00
|
|
|
|
let mut prev_dd_sp = None;
|
2018-02-24 12:27:06 +00:00
|
|
|
|
let mut trailing_comma = false;
|
|
|
|
|
loop {
|
|
|
|
|
if self.eat(&token::DotDot) {
|
|
|
|
|
if ddpos.is_none() {
|
|
|
|
|
ddpos = Some(fields.len());
|
2019-03-20 18:45:25 +00:00
|
|
|
|
prev_dd_sp = Some(self.prev_span);
|
2018-02-24 12:27:06 +00:00
|
|
|
|
} else {
|
|
|
|
|
// Emit a friendly error, ignore `..` and continue parsing
|
2019-03-20 18:45:25 +00:00
|
|
|
|
let mut err = self.struct_span_err(
|
2019-01-12 05:33:57 +00:00
|
|
|
|
self.prev_span,
|
|
|
|
|
"`..` can only be used once per tuple or tuple struct pattern",
|
2019-03-20 18:45:25 +00:00
|
|
|
|
);
|
|
|
|
|
err.span_label(self.prev_span, "can only be used once per pattern");
|
|
|
|
|
if let Some(sp) = prev_dd_sp {
|
|
|
|
|
err.span_label(sp, "previously present here");
|
|
|
|
|
}
|
|
|
|
|
err.emit();
|
2015-03-31 06:10:11 +00:00
|
|
|
|
}
|
2018-02-24 12:27:06 +00:00
|
|
|
|
} else if !self.check(&token::CloseDelim(token::Paren)) {
|
2018-10-28 18:54:31 +00:00
|
|
|
|
fields.push(self.parse_pat(None)?);
|
2018-02-24 12:27:06 +00:00
|
|
|
|
} else {
|
|
|
|
|
break
|
2015-03-31 06:10:11 +00:00
|
|
|
|
}
|
2016-03-06 12:54:44 +00:00
|
|
|
|
|
2018-02-24 12:27:06 +00:00
|
|
|
|
trailing_comma = self.eat(&token::Comma);
|
|
|
|
|
if !trailing_comma {
|
|
|
|
|
break
|
2015-03-31 06:10:11 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2016-03-06 12:54:44 +00:00
|
|
|
|
|
2018-02-24 12:27:06 +00:00
|
|
|
|
if ddpos == Some(fields.len()) && trailing_comma {
|
|
|
|
|
// `..` needs to be followed by `)` or `, pat`, `..,)` is disallowed.
|
2019-01-12 05:33:57 +00:00
|
|
|
|
let msg = "trailing comma is not permitted after `..`";
|
|
|
|
|
self.struct_span_err(self.prev_span, msg)
|
|
|
|
|
.span_label(self.prev_span, msg)
|
|
|
|
|
.emit();
|
2018-02-24 12:27:06 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
Ok((fields, ddpos, trailing_comma))
|
2015-03-31 06:10:11 +00:00
|
|
|
|
}
|
|
|
|
|
|
2013-03-02 21:02:27 +00:00
|
|
|
|
fn parse_pat_vec_elements(
|
2013-12-30 22:04:00 +00:00
|
|
|
|
&mut self,
|
2015-12-20 21:00:43 +00:00
|
|
|
|
) -> PResult<'a, (Vec<P<Pat>>, Option<P<Pat>>, Vec<P<Pat>>)> {
|
2014-02-28 21:09:09 +00:00
|
|
|
|
let mut before = Vec::new();
|
2013-02-26 18:58:46 +00:00
|
|
|
|
let mut slice = None;
|
2014-02-28 21:09:09 +00:00
|
|
|
|
let mut after = Vec::new();
|
2012-12-08 20:22:43 +00:00
|
|
|
|
let mut first = true;
|
2013-02-26 18:58:46 +00:00
|
|
|
|
let mut before_slice = true;
|
2012-12-08 20:22:43 +00:00
|
|
|
|
|
2014-10-29 10:37:54 +00:00
|
|
|
|
while self.token != token::CloseDelim(token::Bracket) {
|
2014-09-06 22:23:55 +00:00
|
|
|
|
if first {
|
|
|
|
|
first = false;
|
|
|
|
|
} else {
|
2016-03-23 03:01:37 +00:00
|
|
|
|
self.expect(&token::Comma)?;
|
2014-11-30 04:39:50 +00:00
|
|
|
|
|
|
|
|
|
if self.token == token::CloseDelim(token::Bracket)
|
2015-03-24 23:54:09 +00:00
|
|
|
|
&& (before_slice || !after.is_empty()) {
|
2014-11-30 04:39:50 +00:00
|
|
|
|
break
|
|
|
|
|
}
|
2014-09-06 22:23:55 +00:00
|
|
|
|
}
|
2012-12-08 20:22:43 +00:00
|
|
|
|
|
2013-02-26 18:58:46 +00:00
|
|
|
|
if before_slice {
|
2017-01-10 21:13:53 +00:00
|
|
|
|
if self.eat(&token::DotDot) {
|
2012-12-08 20:22:43 +00:00
|
|
|
|
|
Make the parser’s ‘expected <foo>, found <bar>’ errors more accurate
As an example of what this changes, the following code:
let x: [int ..4];
Currently spits out ‘expected `]`, found `..`’. However, a comma would also be
valid there, as would a number of other tokens. This change adjusts the parser
to produce more accurate errors, so that that example now produces ‘expected one
of `(`, `+`, `,`, `::`, or `]`, found `..`’.
2014-12-03 09:47:53 +00:00
|
|
|
|
if self.check(&token::Comma) ||
|
|
|
|
|
self.check(&token::CloseDelim(token::Bracket)) {
|
2017-12-16 22:53:11 +00:00
|
|
|
|
slice = Some(P(Pat {
|
2014-09-06 22:23:55 +00:00
|
|
|
|
id: ast::DUMMY_NODE_ID,
|
2016-02-11 18:16:33 +00:00
|
|
|
|
node: PatKind::Wild,
|
2018-02-24 19:21:33 +00:00
|
|
|
|
span: self.prev_span,
|
2014-09-13 16:06:01 +00:00
|
|
|
|
}));
|
2014-09-06 22:23:55 +00:00
|
|
|
|
before_slice = false;
|
2013-11-08 03:25:39 +00:00
|
|
|
|
}
|
2014-09-06 22:23:55 +00:00
|
|
|
|
continue
|
2012-12-08 20:22:43 +00:00
|
|
|
|
}
|
2014-09-06 22:23:55 +00:00
|
|
|
|
}
|
|
|
|
|
|
2018-10-28 18:54:31 +00:00
|
|
|
|
let subpat = self.parse_pat(None)?;
|
2017-01-10 21:13:53 +00:00
|
|
|
|
if before_slice && self.eat(&token::DotDot) {
|
2014-09-06 22:23:55 +00:00
|
|
|
|
slice = Some(subpat);
|
|
|
|
|
before_slice = false;
|
|
|
|
|
} else if before_slice {
|
|
|
|
|
before.push(subpat);
|
2013-02-26 18:58:46 +00:00
|
|
|
|
} else {
|
2014-09-06 22:23:55 +00:00
|
|
|
|
after.push(subpat);
|
2012-12-08 20:22:43 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2013-02-26 18:58:46 +00:00
|
|
|
|
|
2015-03-28 21:58:51 +00:00
|
|
|
|
Ok((before, slice, after))
|
2012-12-08 20:22:43 +00:00
|
|
|
|
}
|
|
|
|
|
|
2018-05-30 05:31:00 +00:00
|
|
|
|
fn parse_pat_field(
|
|
|
|
|
&mut self,
|
|
|
|
|
lo: Span,
|
|
|
|
|
attrs: Vec<Attribute>
|
2018-08-18 10:14:03 +00:00
|
|
|
|
) -> PResult<'a, source_map::Spanned<ast::FieldPat>> {
|
2018-05-30 05:31:00 +00:00
|
|
|
|
// Check if a colon exists one ahead. This means we're parsing a fieldname.
|
|
|
|
|
let hi;
|
|
|
|
|
let (subpat, fieldname, is_shorthand) = if self.look_ahead(1, |t| t == &token::Colon) {
|
|
|
|
|
// Parsing a pattern of the form "fieldname: pat"
|
|
|
|
|
let fieldname = self.parse_field_name()?;
|
|
|
|
|
self.bump();
|
2018-10-28 18:54:31 +00:00
|
|
|
|
let pat = self.parse_pat(None)?;
|
2018-05-30 05:31:00 +00:00
|
|
|
|
hi = pat.span;
|
|
|
|
|
(pat, fieldname, false)
|
|
|
|
|
} else {
|
|
|
|
|
// Parsing a pattern of the form "(box) (ref) (mut) fieldname"
|
2019-05-11 14:41:37 +00:00
|
|
|
|
let is_box = self.eat_keyword(kw::Box);
|
2018-05-30 05:31:00 +00:00
|
|
|
|
let boxed_span = self.span;
|
2019-05-11 14:41:37 +00:00
|
|
|
|
let is_ref = self.eat_keyword(kw::Ref);
|
|
|
|
|
let is_mut = self.eat_keyword(kw::Mut);
|
2018-05-30 05:31:00 +00:00
|
|
|
|
let fieldname = self.parse_ident()?;
|
|
|
|
|
hi = self.prev_span;
|
|
|
|
|
|
|
|
|
|
let bind_type = match (is_ref, is_mut) {
|
|
|
|
|
(true, true) => BindingMode::ByRef(Mutability::Mutable),
|
|
|
|
|
(true, false) => BindingMode::ByRef(Mutability::Immutable),
|
|
|
|
|
(false, true) => BindingMode::ByValue(Mutability::Mutable),
|
|
|
|
|
(false, false) => BindingMode::ByValue(Mutability::Immutable),
|
|
|
|
|
};
|
|
|
|
|
let fieldpat = P(Pat {
|
|
|
|
|
id: ast::DUMMY_NODE_ID,
|
|
|
|
|
node: PatKind::Ident(bind_type, fieldname, None),
|
|
|
|
|
span: boxed_span.to(hi),
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
let subpat = if is_box {
|
|
|
|
|
P(Pat {
|
|
|
|
|
id: ast::DUMMY_NODE_ID,
|
|
|
|
|
node: PatKind::Box(fieldpat),
|
|
|
|
|
span: lo.to(hi),
|
|
|
|
|
})
|
|
|
|
|
} else {
|
|
|
|
|
fieldpat
|
|
|
|
|
};
|
|
|
|
|
(subpat, fieldname, true)
|
|
|
|
|
};
|
|
|
|
|
|
2018-08-18 10:14:03 +00:00
|
|
|
|
Ok(source_map::Spanned {
|
2018-05-30 05:31:00 +00:00
|
|
|
|
span: lo.to(hi),
|
|
|
|
|
node: ast::FieldPat {
|
|
|
|
|
ident: fieldname,
|
|
|
|
|
pat: subpat,
|
|
|
|
|
is_shorthand,
|
|
|
|
|
attrs: attrs.into(),
|
|
|
|
|
}
|
|
|
|
|
})
|
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses the fields of a struct-like pattern.
|
2018-08-18 10:14:03 +00:00
|
|
|
|
fn parse_pat_fields(&mut self) -> PResult<'a, (Vec<source_map::Spanned<ast::FieldPat>>, bool)> {
|
2014-02-28 21:09:09 +00:00
|
|
|
|
let mut fields = Vec::new();
|
2012-08-07 00:01:14 +00:00
|
|
|
|
let mut etc = false;
|
2018-05-30 05:31:00 +00:00
|
|
|
|
let mut ate_comma = true;
|
|
|
|
|
let mut delayed_err: Option<DiagnosticBuilder<'a>> = None;
|
|
|
|
|
let mut etc_span = None;
|
2012-08-07 00:01:14 +00:00
|
|
|
|
|
2018-05-30 05:31:00 +00:00
|
|
|
|
while self.token != token::CloseDelim(token::Brace) {
|
2017-01-04 03:13:01 +00:00
|
|
|
|
let attrs = self.parse_outer_attributes()?;
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let lo = self.span;
|
2018-05-30 05:31:00 +00:00
|
|
|
|
|
|
|
|
|
// check that a comma comes after every field
|
|
|
|
|
if !ate_comma {
|
|
|
|
|
let err = self.struct_span_err(self.prev_span, "expected `,`");
|
2018-10-06 22:05:42 +00:00
|
|
|
|
if let Some(mut delayed) = delayed_err {
|
|
|
|
|
delayed.emit();
|
|
|
|
|
}
|
2018-05-30 05:31:00 +00:00
|
|
|
|
return Err(err);
|
|
|
|
|
}
|
|
|
|
|
ate_comma = false;
|
2014-10-06 00:36:53 +00:00
|
|
|
|
|
2017-12-16 08:58:19 +00:00
|
|
|
|
if self.check(&token::DotDot) || self.token == token::DotDotDot {
|
2018-05-30 05:31:00 +00:00
|
|
|
|
etc = true;
|
|
|
|
|
let mut etc_sp = self.span;
|
|
|
|
|
|
2017-12-16 08:58:19 +00:00
|
|
|
|
if self.token == token::DotDotDot { // Issue #46718
|
2018-05-30 05:31:00 +00:00
|
|
|
|
// Accept `...` as if it were `..` to avoid further errors
|
2017-12-16 08:58:19 +00:00
|
|
|
|
let mut err = self.struct_span_err(self.span,
|
|
|
|
|
"expected field pattern, found `...`");
|
2019-01-25 21:03:27 +00:00
|
|
|
|
err.span_suggestion(
|
suggestion applicabilities for libsyntax and librustc, run-rustfix tests
Consider this a down payment on #50723. To recap, an `Applicability`
enum was recently (#50204) added, to convey to Rustfix and other tools
whether we think it's OK for them to blindly apply the suggestion, or
whether to prompt a human for guidance (because the suggestion might
contain placeholders that we can't infer, or because we think it has a
sufficiently high probability of being wrong even though it's—
presumably—right often enough to be worth emitting in the first place).
When a suggestion is marked as `MaybeIncorrect`, we try to use comments
to indicate precisely why (although there are a few places where we just
say `// speculative` because the present author's subjective judgement
balked at the idea that the suggestion has no false positives).
The `run-rustfix` directive is opporunistically set on some relevant UI
tests (and a couple tests that were in the `test/ui/suggestions`
directory, even if the suggestions didn't originate in librustc or
libsyntax). This is less trivial than it sounds, because a surprising
number of test files aren't equipped to be tested as fixed even when
they contain successfully fixable errors, because, e.g., there are more,
not-directly-related errors after fixing. Some test files need an
attribute or underscore to avoid unused warnings tripping up the "fixed
code is still producing diagnostics" check despite the fixes being
correct; this is an interesting contrast-to/inconsistency-with the
behavior of UI tests (which secretly pass `-A unused`), a behavior which
we probably ought to resolve one way or the other (filed issue #50926).
A few suggestion labels are reworded (e.g., to avoid phrasing it as a
question, which which is discouraged by the style guidelines listed in
`.span_suggestion`'s doc-comment).
2018-05-19 21:52:24 +00:00
|
|
|
|
self.span,
|
|
|
|
|
"to omit remaining fields, use one fewer `.`",
|
|
|
|
|
"..".to_owned(),
|
|
|
|
|
Applicability::MachineApplicable
|
|
|
|
|
);
|
2017-12-16 08:58:19 +00:00
|
|
|
|
err.emit();
|
|
|
|
|
}
|
2018-11-29 00:05:02 +00:00
|
|
|
|
self.bump(); // `..` || `...`
|
2017-12-16 08:58:19 +00:00
|
|
|
|
|
2018-05-30 05:31:00 +00:00
|
|
|
|
if self.token == token::CloseDelim(token::Brace) {
|
|
|
|
|
etc_span = Some(etc_sp);
|
|
|
|
|
break;
|
|
|
|
|
}
|
2018-10-28 23:05:07 +00:00
|
|
|
|
let token_str = self.this_token_descr();
|
|
|
|
|
let mut err = self.fatal(&format!("expected `}}`, found {}", token_str));
|
2018-05-30 05:31:00 +00:00
|
|
|
|
|
|
|
|
|
err.span_label(self.span, "expected `}`");
|
|
|
|
|
let mut comma_sp = None;
|
|
|
|
|
if self.token == token::Comma { // Issue #49257
|
2018-08-18 10:14:09 +00:00
|
|
|
|
etc_sp = etc_sp.to(self.sess.source_map().span_until_non_whitespace(self.span));
|
2018-05-30 05:31:00 +00:00
|
|
|
|
err.span_label(etc_sp,
|
|
|
|
|
"`..` must be at the end and cannot have a trailing comma");
|
|
|
|
|
comma_sp = Some(self.span);
|
|
|
|
|
self.bump();
|
|
|
|
|
ate_comma = true;
|
|
|
|
|
}
|
|
|
|
|
|
2018-11-29 00:05:02 +00:00
|
|
|
|
etc_span = Some(etc_sp.until(self.span));
|
2018-05-30 05:31:00 +00:00
|
|
|
|
if self.token == token::CloseDelim(token::Brace) {
|
|
|
|
|
// If the struct looks otherwise well formed, recover and continue.
|
|
|
|
|
if let Some(sp) = comma_sp {
|
2019-01-25 21:03:27 +00:00
|
|
|
|
err.span_suggestion_short(
|
2018-09-17 17:13:08 +00:00
|
|
|
|
sp,
|
|
|
|
|
"remove this comma",
|
|
|
|
|
String::new(),
|
|
|
|
|
Applicability::MachineApplicable,
|
|
|
|
|
);
|
2018-05-30 05:31:00 +00:00
|
|
|
|
}
|
|
|
|
|
err.emit();
|
|
|
|
|
break;
|
|
|
|
|
} else if self.token.is_ident() && ate_comma {
|
|
|
|
|
// Accept fields coming after `..,`.
|
|
|
|
|
// This way we avoid "pattern missing fields" errors afterwards.
|
|
|
|
|
// We delay this error until the end in order to have a span for a
|
|
|
|
|
// suggested fix.
|
|
|
|
|
if let Some(mut delayed_err) = delayed_err {
|
|
|
|
|
delayed_err.emit();
|
|
|
|
|
return Err(err);
|
2018-03-22 11:57:12 +00:00
|
|
|
|
} else {
|
2018-05-30 05:31:00 +00:00
|
|
|
|
delayed_err = Some(err);
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
if let Some(mut err) = delayed_err {
|
|
|
|
|
err.emit();
|
2018-03-22 11:57:12 +00:00
|
|
|
|
}
|
2018-02-19 00:59:33 +00:00
|
|
|
|
return Err(err);
|
2012-08-07 00:01:14 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2018-05-30 05:31:00 +00:00
|
|
|
|
fields.push(match self.parse_pat_field(lo, attrs) {
|
|
|
|
|
Ok(field) => field,
|
|
|
|
|
Err(err) => {
|
|
|
|
|
if let Some(mut delayed_err) = delayed_err {
|
|
|
|
|
delayed_err.emit();
|
|
|
|
|
}
|
|
|
|
|
return Err(err);
|
|
|
|
|
}
|
2017-01-04 03:13:01 +00:00
|
|
|
|
});
|
2018-05-30 05:31:00 +00:00
|
|
|
|
ate_comma = self.eat(&token::Comma);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if let Some(mut err) = delayed_err {
|
|
|
|
|
if let Some(etc_span) = etc_span {
|
2019-01-25 21:03:27 +00:00
|
|
|
|
err.multipart_suggestion(
|
2018-05-30 05:31:00 +00:00
|
|
|
|
"move the `..` to the end of the field list",
|
|
|
|
|
vec![
|
2018-08-23 08:14:52 +00:00
|
|
|
|
(etc_span, String::new()),
|
2018-05-30 18:04:39 +00:00
|
|
|
|
(self.span, format!("{}.. }}", if ate_comma { "" } else { ", " })),
|
2018-05-30 05:31:00 +00:00
|
|
|
|
],
|
2019-01-17 15:18:56 +00:00
|
|
|
|
Applicability::MachineApplicable,
|
2018-05-30 05:31:00 +00:00
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
err.emit();
|
2012-08-07 00:01:14 +00:00
|
|
|
|
}
|
2015-03-28 21:58:51 +00:00
|
|
|
|
return Ok((fields, etc));
|
2012-08-07 00:01:14 +00:00
|
|
|
|
}
|
|
|
|
|
|
2015-12-20 21:00:43 +00:00
|
|
|
|
fn parse_pat_range_end(&mut self) -> PResult<'a, P<Expr>> {
|
2016-04-20 23:03:29 +00:00
|
|
|
|
if self.token.is_path_start() {
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let lo = self.span;
|
2015-12-30 23:11:53 +00:00
|
|
|
|
let (qself, path) = if self.eat_lt() {
|
2015-03-25 16:53:28 +00:00
|
|
|
|
// Parse a qualified path
|
2017-07-19 23:39:34 +00:00
|
|
|
|
let (qself, path) = self.parse_qpath(PathStyle::Expr)?;
|
2015-03-25 16:53:28 +00:00
|
|
|
|
(Some(qself), path)
|
|
|
|
|
} else {
|
|
|
|
|
// Parse an unqualified path
|
2016-04-18 21:42:18 +00:00
|
|
|
|
(None, self.parse_path(PathStyle::Expr)?)
|
2015-03-25 16:53:28 +00:00
|
|
|
|
};
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let hi = self.prev_span;
|
|
|
|
|
Ok(self.mk_expr(lo.to(hi), ExprKind::Path(qself, path), ThinVec::new()))
|
2015-03-31 06:10:11 +00:00
|
|
|
|
} else {
|
2018-04-09 23:08:47 +00:00
|
|
|
|
self.parse_literal_maybe_minus()
|
2015-03-31 06:10:11 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2017-01-10 21:13:53 +00:00
|
|
|
|
// helper function to decide whether to parse as ident binding or to try to do
|
|
|
|
|
// something more complex like range patterns
|
|
|
|
|
fn parse_as_ident(&mut self) -> bool {
|
|
|
|
|
self.look_ahead(1, |t| match *t {
|
|
|
|
|
token::OpenDelim(token::Paren) | token::OpenDelim(token::Brace) |
|
2017-09-19 05:40:04 +00:00
|
|
|
|
token::DotDotDot | token::DotDotEq | token::ModSep | token::Not => Some(false),
|
2017-01-10 21:13:53 +00:00
|
|
|
|
// ensure slice patterns [a, b.., c] and [a, b, c..] don't go into the
|
|
|
|
|
// range pattern branch
|
|
|
|
|
token::DotDot => None,
|
|
|
|
|
_ => Some(true),
|
|
|
|
|
}).unwrap_or_else(|| self.look_ahead(2, |t| match *t {
|
|
|
|
|
token::Comma | token::CloseDelim(token::Bracket) => true,
|
|
|
|
|
_ => false,
|
|
|
|
|
}))
|
|
|
|
|
}
|
|
|
|
|
|
in which parentheses are suggested for should-have-been-tuple-patterns
Programmers used to working in some other languages (such as Python or
Go) might expect to be able to destructure values with comma-separated
identifiers but no parentheses on the left side of an assignment.
Previously, the first name in such code would get parsed as a
single-indentifier pattern—recognizing, for example, the
`let a` in `let a, b = (1, 2);`—whereupon we would have a fatal syntax
error on seeing an unexpected comma rather than the expected semicolon
(all the way nearer to the end of `parse_full_stmt`).
Instead, let's look for that comma when parsing the pattern, and if we
see it, momentarily make-believe that we're parsing the remaining
elements in a tuple pattern, so that we can suggest wrapping it all in
parentheses. We need to do this in a separate wrapper method called on
the top-level pattern (or `|`-patterns) in a `let` statement, `for`
loop, `if`- or `while let` expression, or match arm rather than within
`parse_pat` itself, because `parse_pat` gets called recursively to parse
the sub-patterns within a tuple pattern.
Resolves #48492.
2018-02-25 04:41:16 +00:00
|
|
|
|
/// A wrapper around `parse_pat` with some special error handling for the
|
2018-08-19 13:30:23 +00:00
|
|
|
|
/// "top-level" patterns in a match arm, `for` loop, `let`, &c. (in contrast
|
in which parentheses are suggested for should-have-been-tuple-patterns
Programmers used to working in some other languages (such as Python or
Go) might expect to be able to destructure values with comma-separated
identifiers but no parentheses on the left side of an assignment.
Previously, the first name in such code would get parsed as a
single-indentifier pattern—recognizing, for example, the
`let a` in `let a, b = (1, 2);`—whereupon we would have a fatal syntax
error on seeing an unexpected comma rather than the expected semicolon
(all the way nearer to the end of `parse_full_stmt`).
Instead, let's look for that comma when parsing the pattern, and if we
see it, momentarily make-believe that we're parsing the remaining
elements in a tuple pattern, so that we can suggest wrapping it all in
parentheses. We need to do this in a separate wrapper method called on
the top-level pattern (or `|`-patterns) in a `let` statement, `for`
loop, `if`- or `while let` expression, or match arm rather than within
`parse_pat` itself, because `parse_pat` gets called recursively to parse
the sub-patterns within a tuple pattern.
Resolves #48492.
2018-02-25 04:41:16 +00:00
|
|
|
|
/// to subpatterns within such).
|
2018-05-31 22:53:30 +00:00
|
|
|
|
fn parse_top_level_pat(&mut self) -> PResult<'a, P<Pat>> {
|
2018-10-28 18:54:31 +00:00
|
|
|
|
let pat = self.parse_pat(None)?;
|
in which parentheses are suggested for should-have-been-tuple-patterns
Programmers used to working in some other languages (such as Python or
Go) might expect to be able to destructure values with comma-separated
identifiers but no parentheses on the left side of an assignment.
Previously, the first name in such code would get parsed as a
single-indentifier pattern—recognizing, for example, the
`let a` in `let a, b = (1, 2);`—whereupon we would have a fatal syntax
error on seeing an unexpected comma rather than the expected semicolon
(all the way nearer to the end of `parse_full_stmt`).
Instead, let's look for that comma when parsing the pattern, and if we
see it, momentarily make-believe that we're parsing the remaining
elements in a tuple pattern, so that we can suggest wrapping it all in
parentheses. We need to do this in a separate wrapper method called on
the top-level pattern (or `|`-patterns) in a `let` statement, `for`
loop, `if`- or `while let` expression, or match arm rather than within
`parse_pat` itself, because `parse_pat` gets called recursively to parse
the sub-patterns within a tuple pattern.
Resolves #48492.
2018-02-25 04:41:16 +00:00
|
|
|
|
if self.token == token::Comma {
|
|
|
|
|
// An unexpected comma after a top-level pattern is a clue that the
|
|
|
|
|
// user (perhaps more accustomed to some other language) forgot the
|
|
|
|
|
// parentheses in what should have been a tuple pattern; return a
|
|
|
|
|
// suggestion-enhanced error here rather than choking on the comma
|
|
|
|
|
// later.
|
|
|
|
|
let comma_span = self.span;
|
|
|
|
|
self.bump();
|
|
|
|
|
if let Err(mut err) = self.parse_pat_list() {
|
|
|
|
|
// We didn't expect this to work anyway; we just wanted
|
|
|
|
|
// to advance to the end of the comma-sequence so we know
|
|
|
|
|
// the span to suggest parenthesizing
|
|
|
|
|
err.cancel();
|
|
|
|
|
}
|
|
|
|
|
let seq_span = pat.span.to(self.prev_span);
|
|
|
|
|
let mut err = self.struct_span_err(comma_span,
|
|
|
|
|
"unexpected `,` in pattern");
|
2018-08-18 10:14:09 +00:00
|
|
|
|
if let Ok(seq_snippet) = self.sess.source_map().span_to_snippet(seq_span) {
|
2019-01-25 21:03:27 +00:00
|
|
|
|
err.span_suggestion(
|
suggestion applicabilities for libsyntax and librustc, run-rustfix tests
Consider this a down payment on #50723. To recap, an `Applicability`
enum was recently (#50204) added, to convey to Rustfix and other tools
whether we think it's OK for them to blindly apply the suggestion, or
whether to prompt a human for guidance (because the suggestion might
contain placeholders that we can't infer, or because we think it has a
sufficiently high probability of being wrong even though it's—
presumably—right often enough to be worth emitting in the first place).
When a suggestion is marked as `MaybeIncorrect`, we try to use comments
to indicate precisely why (although there are a few places where we just
say `// speculative` because the present author's subjective judgement
balked at the idea that the suggestion has no false positives).
The `run-rustfix` directive is opporunistically set on some relevant UI
tests (and a couple tests that were in the `test/ui/suggestions`
directory, even if the suggestions didn't originate in librustc or
libsyntax). This is less trivial than it sounds, because a surprising
number of test files aren't equipped to be tested as fixed even when
they contain successfully fixable errors, because, e.g., there are more,
not-directly-related errors after fixing. Some test files need an
attribute or underscore to avoid unused warnings tripping up the "fixed
code is still producing diagnostics" check despite the fixes being
correct; this is an interesting contrast-to/inconsistency-with the
behavior of UI tests (which secretly pass `-A unused`), a behavior which
we probably ought to resolve one way or the other (filed issue #50926).
A few suggestion labels are reworded (e.g., to avoid phrasing it as a
question, which which is discouraged by the style guidelines listed in
`.span_suggestion`'s doc-comment).
2018-05-19 21:52:24 +00:00
|
|
|
|
seq_span,
|
2019-01-30 04:50:44 +00:00
|
|
|
|
"try adding parentheses to match on a tuple..",
|
suggestion applicabilities for libsyntax and librustc, run-rustfix tests
Consider this a down payment on #50723. To recap, an `Applicability`
enum was recently (#50204) added, to convey to Rustfix and other tools
whether we think it's OK for them to blindly apply the suggestion, or
whether to prompt a human for guidance (because the suggestion might
contain placeholders that we can't infer, or because we think it has a
sufficiently high probability of being wrong even though it's—
presumably—right often enough to be worth emitting in the first place).
When a suggestion is marked as `MaybeIncorrect`, we try to use comments
to indicate precisely why (although there are a few places where we just
say `// speculative` because the present author's subjective judgement
balked at the idea that the suggestion has no false positives).
The `run-rustfix` directive is opporunistically set on some relevant UI
tests (and a couple tests that were in the `test/ui/suggestions`
directory, even if the suggestions didn't originate in librustc or
libsyntax). This is less trivial than it sounds, because a surprising
number of test files aren't equipped to be tested as fixed even when
they contain successfully fixable errors, because, e.g., there are more,
not-directly-related errors after fixing. Some test files need an
attribute or underscore to avoid unused warnings tripping up the "fixed
code is still producing diagnostics" check despite the fixes being
correct; this is an interesting contrast-to/inconsistency-with the
behavior of UI tests (which secretly pass `-A unused`), a behavior which
we probably ought to resolve one way or the other (filed issue #50926).
A few suggestion labels are reworded (e.g., to avoid phrasing it as a
question, which which is discouraged by the style guidelines listed in
`.span_suggestion`'s doc-comment).
2018-05-19 21:52:24 +00:00
|
|
|
|
format!("({})", seq_snippet),
|
|
|
|
|
Applicability::MachineApplicable
|
2019-01-30 04:50:44 +00:00
|
|
|
|
).span_suggestion(
|
|
|
|
|
seq_span,
|
|
|
|
|
"..or a vertical bar to match on multiple alternatives",
|
|
|
|
|
format!("{}", seq_snippet.replace(",", " |")),
|
|
|
|
|
Applicability::MachineApplicable
|
suggestion applicabilities for libsyntax and librustc, run-rustfix tests
Consider this a down payment on #50723. To recap, an `Applicability`
enum was recently (#50204) added, to convey to Rustfix and other tools
whether we think it's OK for them to blindly apply the suggestion, or
whether to prompt a human for guidance (because the suggestion might
contain placeholders that we can't infer, or because we think it has a
sufficiently high probability of being wrong even though it's—
presumably—right often enough to be worth emitting in the first place).
When a suggestion is marked as `MaybeIncorrect`, we try to use comments
to indicate precisely why (although there are a few places where we just
say `// speculative` because the present author's subjective judgement
balked at the idea that the suggestion has no false positives).
The `run-rustfix` directive is opporunistically set on some relevant UI
tests (and a couple tests that were in the `test/ui/suggestions`
directory, even if the suggestions didn't originate in librustc or
libsyntax). This is less trivial than it sounds, because a surprising
number of test files aren't equipped to be tested as fixed even when
they contain successfully fixable errors, because, e.g., there are more,
not-directly-related errors after fixing. Some test files need an
attribute or underscore to avoid unused warnings tripping up the "fixed
code is still producing diagnostics" check despite the fixes being
correct; this is an interesting contrast-to/inconsistency-with the
behavior of UI tests (which secretly pass `-A unused`), a behavior which
we probably ought to resolve one way or the other (filed issue #50926).
A few suggestion labels are reworded (e.g., to avoid phrasing it as a
question, which which is discouraged by the style guidelines listed in
`.span_suggestion`'s doc-comment).
2018-05-19 21:52:24 +00:00
|
|
|
|
);
|
in which parentheses are suggested for should-have-been-tuple-patterns
Programmers used to working in some other languages (such as Python or
Go) might expect to be able to destructure values with comma-separated
identifiers but no parentheses on the left side of an assignment.
Previously, the first name in such code would get parsed as a
single-indentifier pattern—recognizing, for example, the
`let a` in `let a, b = (1, 2);`—whereupon we would have a fatal syntax
error on seeing an unexpected comma rather than the expected semicolon
(all the way nearer to the end of `parse_full_stmt`).
Instead, let's look for that comma when parsing the pattern, and if we
see it, momentarily make-believe that we're parsing the remaining
elements in a tuple pattern, so that we can suggest wrapping it all in
parentheses. We need to do this in a separate wrapper method called on
the top-level pattern (or `|`-patterns) in a `let` statement, `for`
loop, `if`- or `while let` expression, or match arm rather than within
`parse_pat` itself, because `parse_pat` gets called recursively to parse
the sub-patterns within a tuple pattern.
Resolves #48492.
2018-02-25 04:41:16 +00:00
|
|
|
|
}
|
|
|
|
|
return Err(err);
|
|
|
|
|
}
|
|
|
|
|
Ok(pat)
|
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses a pattern.
|
2018-10-28 18:54:31 +00:00
|
|
|
|
pub fn parse_pat(&mut self, expected: Option<&'static str>) -> PResult<'a, P<Pat>> {
|
|
|
|
|
self.parse_pat_with_range_pat(true, expected)
|
2018-03-02 22:05:54 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses a pattern, with a setting whether modern range patterns (e.g., `a..=b`, `a..b` are
|
|
|
|
|
/// allowed).
|
2018-10-28 18:54:31 +00:00
|
|
|
|
fn parse_pat_with_range_pat(
|
|
|
|
|
&mut self,
|
|
|
|
|
allow_range_pat: bool,
|
|
|
|
|
expected: Option<&'static str>,
|
|
|
|
|
) -> PResult<'a, P<Pat>> {
|
2019-03-09 14:41:01 +00:00
|
|
|
|
maybe_recover_from_interpolated_ty_qpath!(self, true);
|
2016-11-02 03:03:55 +00:00
|
|
|
|
maybe_whole!(self, NtPat, |x| x);
|
2012-08-01 21:34:35 +00:00
|
|
|
|
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let lo = self.span;
|
2013-04-12 05:10:31 +00:00
|
|
|
|
let pat;
|
2013-12-30 23:09:41 +00:00
|
|
|
|
match self.token {
|
2016-09-22 22:44:59 +00:00
|
|
|
|
token::BinOp(token::And) | token::AndAnd => {
|
|
|
|
|
// Parse &pat / &mut pat
|
|
|
|
|
self.expect_and()?;
|
2017-03-16 21:47:32 +00:00
|
|
|
|
let mutbl = self.parse_mutability();
|
2016-09-22 22:44:59 +00:00
|
|
|
|
if let token::Lifetime(ident) = self.token {
|
2018-02-19 07:08:23 +00:00
|
|
|
|
let mut err = self.fatal(&format!("unexpected lifetime `{}` in pattern",
|
|
|
|
|
ident));
|
|
|
|
|
err.span_label(self.span, "unexpected lifetime");
|
|
|
|
|
return Err(err);
|
2016-09-22 22:44:59 +00:00
|
|
|
|
}
|
2018-10-28 18:54:31 +00:00
|
|
|
|
let subpat = self.parse_pat_with_range_pat(false, expected)?;
|
2016-09-22 22:44:59 +00:00
|
|
|
|
pat = PatKind::Ref(subpat, mutbl);
|
|
|
|
|
}
|
|
|
|
|
token::OpenDelim(token::Paren) => {
|
|
|
|
|
// Parse (pat,pat,pat,...) as tuple pattern
|
2018-02-24 12:27:06 +00:00
|
|
|
|
let (fields, ddpos, trailing_comma) = self.parse_parenthesized_pat_list()?;
|
|
|
|
|
pat = if fields.len() == 1 && ddpos.is_none() && !trailing_comma {
|
|
|
|
|
PatKind::Paren(fields.into_iter().nth(0).unwrap())
|
|
|
|
|
} else {
|
|
|
|
|
PatKind::Tuple(fields, ddpos)
|
|
|
|
|
};
|
2016-09-22 22:44:59 +00:00
|
|
|
|
}
|
|
|
|
|
token::OpenDelim(token::Bracket) => {
|
|
|
|
|
// Parse [pat,pat,...] as slice pattern
|
|
|
|
|
self.bump();
|
|
|
|
|
let (before, slice, after) = self.parse_pat_vec_elements()?;
|
|
|
|
|
self.expect(&token::CloseDelim(token::Bracket))?;
|
2016-09-20 14:54:24 +00:00
|
|
|
|
pat = PatKind::Slice(before, slice, after);
|
2015-10-24 23:57:42 +00:00
|
|
|
|
}
|
2018-03-08 11:27:23 +00:00
|
|
|
|
// At this point, token != &, &&, (, [
|
2019-05-11 14:41:37 +00:00
|
|
|
|
_ => if self.eat_keyword(kw::Underscore) {
|
2018-03-08 11:27:23 +00:00
|
|
|
|
// Parse _
|
|
|
|
|
pat = PatKind::Wild;
|
2019-05-11 14:41:37 +00:00
|
|
|
|
} else if self.eat_keyword(kw::Mut) {
|
2017-07-26 15:59:07 +00:00
|
|
|
|
// Parse mut ident @ pat / mut ref ident @ pat
|
|
|
|
|
let mutref_span = self.prev_span.to(self.span);
|
2019-05-11 14:41:37 +00:00
|
|
|
|
let binding_mode = if self.eat_keyword(kw::Ref) {
|
2017-07-26 15:59:07 +00:00
|
|
|
|
self.diagnostic()
|
|
|
|
|
.struct_span_err(mutref_span, "the order of `mut` and `ref` is incorrect")
|
2019-01-25 21:03:27 +00:00
|
|
|
|
.span_suggestion(
|
suggestion applicabilities for libsyntax and librustc, run-rustfix tests
Consider this a down payment on #50723. To recap, an `Applicability`
enum was recently (#50204) added, to convey to Rustfix and other tools
whether we think it's OK for them to blindly apply the suggestion, or
whether to prompt a human for guidance (because the suggestion might
contain placeholders that we can't infer, or because we think it has a
sufficiently high probability of being wrong even though it's—
presumably—right often enough to be worth emitting in the first place).
When a suggestion is marked as `MaybeIncorrect`, we try to use comments
to indicate precisely why (although there are a few places where we just
say `// speculative` because the present author's subjective judgement
balked at the idea that the suggestion has no false positives).
The `run-rustfix` directive is opporunistically set on some relevant UI
tests (and a couple tests that were in the `test/ui/suggestions`
directory, even if the suggestions didn't originate in librustc or
libsyntax). This is less trivial than it sounds, because a surprising
number of test files aren't equipped to be tested as fixed even when
they contain successfully fixable errors, because, e.g., there are more,
not-directly-related errors after fixing. Some test files need an
attribute or underscore to avoid unused warnings tripping up the "fixed
code is still producing diagnostics" check despite the fixes being
correct; this is an interesting contrast-to/inconsistency-with the
behavior of UI tests (which secretly pass `-A unused`), a behavior which
we probably ought to resolve one way or the other (filed issue #50926).
A few suggestion labels are reworded (e.g., to avoid phrasing it as a
question, which which is discouraged by the style guidelines listed in
`.span_suggestion`'s doc-comment).
2018-05-19 21:52:24 +00:00
|
|
|
|
mutref_span,
|
|
|
|
|
"try switching the order",
|
|
|
|
|
"ref mut".into(),
|
|
|
|
|
Applicability::MachineApplicable
|
|
|
|
|
).emit();
|
2017-07-26 15:59:07 +00:00
|
|
|
|
BindingMode::ByRef(Mutability::Mutable)
|
|
|
|
|
} else {
|
|
|
|
|
BindingMode::ByValue(Mutability::Mutable)
|
|
|
|
|
};
|
|
|
|
|
pat = self.parse_pat_ident(binding_mode)?;
|
2019-05-11 14:41:37 +00:00
|
|
|
|
} else if self.eat_keyword(kw::Ref) {
|
2015-03-31 06:10:11 +00:00
|
|
|
|
// Parse ref ident @ pat / ref mut ident @ pat
|
2017-03-16 21:47:32 +00:00
|
|
|
|
let mutbl = self.parse_mutability();
|
2016-03-23 03:01:37 +00:00
|
|
|
|
pat = self.parse_pat_ident(BindingMode::ByRef(mutbl))?;
|
2019-05-11 14:41:37 +00:00
|
|
|
|
} else if self.eat_keyword(kw::Box) {
|
2015-03-31 06:10:11 +00:00
|
|
|
|
// Parse box pat
|
2018-10-28 18:54:31 +00:00
|
|
|
|
let subpat = self.parse_pat_with_range_pat(false, None)?;
|
2016-02-11 18:16:33 +00:00
|
|
|
|
pat = PatKind::Box(subpat);
|
2017-06-29 10:16:35 +00:00
|
|
|
|
} else if self.token.is_ident() && !self.token.is_reserved_ident() &&
|
2017-01-10 21:13:53 +00:00
|
|
|
|
self.parse_as_ident() {
|
2016-09-22 22:44:59 +00:00
|
|
|
|
// Parse ident @ pat
|
|
|
|
|
// This can give false positives and parse nullary enums,
|
|
|
|
|
// they are dealt with later in resolve
|
|
|
|
|
let binding_mode = BindingMode::ByValue(Mutability::Immutable);
|
|
|
|
|
pat = self.parse_pat_ident(binding_mode)?;
|
2016-04-20 23:03:29 +00:00
|
|
|
|
} else if self.token.is_path_start() {
|
2015-03-31 06:10:11 +00:00
|
|
|
|
// Parse pattern starting with a path
|
2016-09-22 22:44:59 +00:00
|
|
|
|
let (qself, path) = if self.eat_lt() {
|
|
|
|
|
// Parse a qualified path
|
2017-07-19 23:39:34 +00:00
|
|
|
|
let (qself, path) = self.parse_qpath(PathStyle::Expr)?;
|
2016-09-22 22:44:59 +00:00
|
|
|
|
(Some(qself), path)
|
2015-03-31 06:10:11 +00:00
|
|
|
|
} else {
|
2016-09-22 22:44:59 +00:00
|
|
|
|
// Parse an unqualified path
|
|
|
|
|
(None, self.parse_path(PathStyle::Expr)?)
|
|
|
|
|
};
|
|
|
|
|
match self.token {
|
|
|
|
|
token::Not if qself.is_none() => {
|
2015-03-31 06:10:11 +00:00
|
|
|
|
// Parse macro invocation
|
2015-12-30 23:11:53 +00:00
|
|
|
|
self.bump();
|
2018-05-22 15:01:21 +00:00
|
|
|
|
let (delim, tts) = self.expect_delimited_token_tree()?;
|
|
|
|
|
let mac = respan(lo.to(self.prev_span), Mac_ { path, tts, delim });
|
2016-09-22 22:44:59 +00:00
|
|
|
|
pat = PatKind::Mac(mac);
|
2013-05-05 01:57:14 +00:00
|
|
|
|
}
|
2017-09-19 05:40:04 +00:00
|
|
|
|
token::DotDotDot | token::DotDotEq | token::DotDot => {
|
2017-01-10 21:13:53 +00:00
|
|
|
|
let end_kind = match self.token {
|
|
|
|
|
token::DotDot => RangeEnd::Excluded,
|
2017-09-21 10:13:26 +00:00
|
|
|
|
token::DotDotDot => RangeEnd::Included(RangeSyntax::DotDotDot),
|
|
|
|
|
token::DotDotEq => RangeEnd::Included(RangeSyntax::DotDotEq),
|
2017-09-19 05:40:04 +00:00
|
|
|
|
_ => panic!("can only parse `..`/`...`/`..=` for ranges \
|
|
|
|
|
(checked above)"),
|
2017-01-10 21:13:53 +00:00
|
|
|
|
};
|
2018-05-29 02:32:03 +00:00
|
|
|
|
let op_span = self.span;
|
2015-03-31 06:10:11 +00:00
|
|
|
|
// Parse range
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let span = lo.to(self.prev_span);
|
|
|
|
|
let begin = self.mk_expr(span, ExprKind::Path(qself, path), ThinVec::new());
|
2015-12-30 23:11:53 +00:00
|
|
|
|
self.bump();
|
2016-03-23 03:01:37 +00:00
|
|
|
|
let end = self.parse_pat_range_end()?;
|
2018-05-29 02:32:03 +00:00
|
|
|
|
let op = Spanned { span: op_span, node: end_kind };
|
|
|
|
|
pat = PatKind::Range(begin, end, op);
|
2016-09-22 22:44:59 +00:00
|
|
|
|
}
|
|
|
|
|
token::OpenDelim(token::Brace) => {
|
|
|
|
|
if qself.is_some() {
|
2018-02-19 07:08:23 +00:00
|
|
|
|
let msg = "unexpected `{` after qualified path";
|
|
|
|
|
let mut err = self.fatal(msg);
|
|
|
|
|
err.span_label(self.span, msg);
|
|
|
|
|
return Err(err);
|
2015-03-25 16:53:28 +00:00
|
|
|
|
}
|
2015-10-25 00:02:08 +00:00
|
|
|
|
// Parse struct pattern
|
2015-12-30 23:11:53 +00:00
|
|
|
|
self.bump();
|
2016-02-10 03:11:27 +00:00
|
|
|
|
let (fields, etc) = self.parse_pat_fields().unwrap_or_else(|mut e| {
|
|
|
|
|
e.emit();
|
|
|
|
|
self.recover_stmt();
|
2019-03-15 01:28:24 +00:00
|
|
|
|
(vec![], true)
|
2016-02-10 03:11:27 +00:00
|
|
|
|
});
|
2015-12-30 23:11:53 +00:00
|
|
|
|
self.bump();
|
2016-02-11 18:16:33 +00:00
|
|
|
|
pat = PatKind::Struct(path, fields, etc);
|
2016-09-22 22:44:59 +00:00
|
|
|
|
}
|
|
|
|
|
token::OpenDelim(token::Paren) => {
|
2015-03-25 16:53:28 +00:00
|
|
|
|
if qself.is_some() {
|
2018-02-19 07:08:23 +00:00
|
|
|
|
let msg = "unexpected `(` after qualified path";
|
|
|
|
|
let mut err = self.fatal(msg);
|
|
|
|
|
err.span_label(self.span, msg);
|
|
|
|
|
return Err(err);
|
2015-03-25 16:53:28 +00:00
|
|
|
|
}
|
2015-03-31 06:10:11 +00:00
|
|
|
|
// Parse tuple struct or enum pattern
|
2018-02-24 12:27:06 +00:00
|
|
|
|
let (fields, ddpos, _) = self.parse_parenthesized_pat_list()?;
|
2016-03-06 12:54:44 +00:00
|
|
|
|
pat = PatKind::TupleStruct(path, fields, ddpos)
|
2012-08-07 00:01:14 +00:00
|
|
|
|
}
|
2016-09-22 22:44:59 +00:00
|
|
|
|
_ => pat = PatKind::Path(qself, path),
|
2012-04-20 07:54:42 +00:00
|
|
|
|
}
|
2015-03-31 06:10:11 +00:00
|
|
|
|
} else {
|
|
|
|
|
// Try to parse everything else as literal with optional minus
|
2018-04-09 23:08:47 +00:00
|
|
|
|
match self.parse_literal_maybe_minus() {
|
2016-04-20 23:03:29 +00:00
|
|
|
|
Ok(begin) => {
|
2018-05-29 02:32:03 +00:00
|
|
|
|
let op_span = self.span;
|
|
|
|
|
if self.check(&token::DotDot) || self.check(&token::DotDotEq) ||
|
|
|
|
|
self.check(&token::DotDotDot) {
|
|
|
|
|
let end_kind = if self.eat(&token::DotDotDot) {
|
|
|
|
|
RangeEnd::Included(RangeSyntax::DotDotDot)
|
|
|
|
|
} else if self.eat(&token::DotDotEq) {
|
|
|
|
|
RangeEnd::Included(RangeSyntax::DotDotEq)
|
|
|
|
|
} else if self.eat(&token::DotDot) {
|
|
|
|
|
RangeEnd::Excluded
|
|
|
|
|
} else {
|
|
|
|
|
panic!("impossible case: we already matched \
|
|
|
|
|
on a range-operator token")
|
|
|
|
|
};
|
2016-04-20 23:03:29 +00:00
|
|
|
|
let end = self.parse_pat_range_end()?;
|
2018-05-29 02:32:03 +00:00
|
|
|
|
let op = Spanned { span: op_span, node: end_kind };
|
|
|
|
|
pat = PatKind::Range(begin, end, op);
|
2016-04-20 23:03:29 +00:00
|
|
|
|
} else {
|
|
|
|
|
pat = PatKind::Lit(begin);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
Err(mut err) => {
|
2016-09-15 19:34:21 +00:00
|
|
|
|
self.cancel(&mut err);
|
2018-10-28 18:54:31 +00:00
|
|
|
|
let expected = expected.unwrap_or("pattern");
|
|
|
|
|
let msg = format!(
|
|
|
|
|
"expected {}, found {}",
|
|
|
|
|
expected,
|
|
|
|
|
self.this_token_descr(),
|
|
|
|
|
);
|
2018-02-19 07:08:23 +00:00
|
|
|
|
let mut err = self.fatal(&msg);
|
2018-10-28 18:54:31 +00:00
|
|
|
|
err.span_label(self.span, format!("expected {}", expected));
|
2019-04-23 02:37:23 +00:00
|
|
|
|
let sp = self.sess.source_map().start_point(self.span);
|
2019-05-06 23:00:21 +00:00
|
|
|
|
if let Some(sp) = self.sess.ambiguous_block_expr_parse.borrow().get(&sp) {
|
2019-05-02 23:13:28 +00:00
|
|
|
|
self.sess.expr_parentheses_needed(&mut err, *sp, None);
|
2019-04-23 02:37:23 +00:00
|
|
|
|
}
|
2018-02-19 07:08:23 +00:00
|
|
|
|
return Err(err);
|
2016-04-20 23:03:29 +00:00
|
|
|
|
}
|
2015-03-31 06:10:11 +00:00
|
|
|
|
}
|
2012-01-15 00:05:07 +00:00
|
|
|
|
}
|
2012-05-23 22:06:11 +00:00
|
|
|
|
}
|
2015-03-31 06:10:11 +00:00
|
|
|
|
|
2019-03-09 14:41:01 +00:00
|
|
|
|
let pat = P(Pat { node: pat, span: lo.to(self.prev_span), id: ast::DUMMY_NODE_ID });
|
2017-12-18 20:59:31 +00:00
|
|
|
|
let pat = self.maybe_recover_from_bad_qpath(pat, true)?;
|
2017-12-16 22:53:11 +00:00
|
|
|
|
|
2018-03-02 22:05:54 +00:00
|
|
|
|
if !allow_range_pat {
|
|
|
|
|
match pat.node {
|
2018-05-29 02:32:03 +00:00
|
|
|
|
PatKind::Range(
|
|
|
|
|
_, _, Spanned { node: RangeEnd::Included(RangeSyntax::DotDotDot), .. }
|
|
|
|
|
) => {},
|
2018-03-02 22:05:54 +00:00
|
|
|
|
PatKind::Range(..) => {
|
|
|
|
|
let mut err = self.struct_span_err(
|
|
|
|
|
pat.span,
|
|
|
|
|
"the range pattern here has ambiguous interpretation",
|
|
|
|
|
);
|
2019-01-25 21:03:27 +00:00
|
|
|
|
err.span_suggestion(
|
2018-03-02 22:05:54 +00:00
|
|
|
|
pat.span,
|
|
|
|
|
"add parentheses to clarify the precedence",
|
|
|
|
|
format!("({})", pprust::pat_to_string(&pat)),
|
suggestion applicabilities for libsyntax and librustc, run-rustfix tests
Consider this a down payment on #50723. To recap, an `Applicability`
enum was recently (#50204) added, to convey to Rustfix and other tools
whether we think it's OK for them to blindly apply the suggestion, or
whether to prompt a human for guidance (because the suggestion might
contain placeholders that we can't infer, or because we think it has a
sufficiently high probability of being wrong even though it's—
presumably—right often enough to be worth emitting in the first place).
When a suggestion is marked as `MaybeIncorrect`, we try to use comments
to indicate precisely why (although there are a few places where we just
say `// speculative` because the present author's subjective judgement
balked at the idea that the suggestion has no false positives).
The `run-rustfix` directive is opporunistically set on some relevant UI
tests (and a couple tests that were in the `test/ui/suggestions`
directory, even if the suggestions didn't originate in librustc or
libsyntax). This is less trivial than it sounds, because a surprising
number of test files aren't equipped to be tested as fixed even when
they contain successfully fixable errors, because, e.g., there are more,
not-directly-related errors after fixing. Some test files need an
attribute or underscore to avoid unused warnings tripping up the "fixed
code is still producing diagnostics" check despite the fixes being
correct; this is an interesting contrast-to/inconsistency-with the
behavior of UI tests (which secretly pass `-A unused`), a behavior which
we probably ought to resolve one way or the other (filed issue #50926).
A few suggestion labels are reworded (e.g., to avoid phrasing it as a
question, which which is discouraged by the style guidelines listed in
`.span_suggestion`'s doc-comment).
2018-05-19 21:52:24 +00:00
|
|
|
|
// "ambiguous interpretation" implies that we have to be guessing
|
|
|
|
|
Applicability::MaybeIncorrect
|
2018-03-02 22:05:54 +00:00
|
|
|
|
);
|
|
|
|
|
return Err(err);
|
|
|
|
|
}
|
|
|
|
|
_ => {}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-03-09 14:41:01 +00:00
|
|
|
|
Ok(pat)
|
2012-05-23 22:06:11 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses `ident` or `ident @ pat`.
|
2014-06-09 20:12:30 +00:00
|
|
|
|
/// used by the copy foo and ref foo patterns to give a good
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// error message when parsing mistakes like `ref foo(a, b)`.
|
2013-12-30 22:04:00 +00:00
|
|
|
|
fn parse_pat_ident(&mut self,
|
2013-09-02 01:45:37 +00:00
|
|
|
|
binding_mode: ast::BindingMode)
|
2016-02-11 18:16:33 +00:00
|
|
|
|
-> PResult<'a, PatKind> {
|
2016-03-23 03:01:37 +00:00
|
|
|
|
let ident = self.parse_ident()?;
|
2015-12-30 23:11:53 +00:00
|
|
|
|
let sub = if self.eat(&token::At) {
|
2018-10-28 18:54:31 +00:00
|
|
|
|
Some(self.parse_pat(Some("binding pattern"))?)
|
2013-04-24 08:29:46 +00:00
|
|
|
|
} else {
|
|
|
|
|
None
|
|
|
|
|
};
|
2012-08-06 14:20:23 +00:00
|
|
|
|
|
|
|
|
|
// just to be friendly, if they write something like
|
2012-08-20 19:23:37 +00:00
|
|
|
|
// ref Some(i)
|
2012-08-06 14:20:23 +00:00
|
|
|
|
// we end up here with ( as the current token. This shortly
|
|
|
|
|
// leads to a parse error. Note that if there is no explicit
|
|
|
|
|
// binding mode then we do not end up here, because the lookahead
|
|
|
|
|
// will direct us over to parse_enum_variant()
|
2014-10-29 10:37:54 +00:00
|
|
|
|
if self.token == token::OpenDelim(token::Paren) {
|
2015-03-28 21:58:51 +00:00
|
|
|
|
return Err(self.span_fatal(
|
2016-09-21 02:09:22 +00:00
|
|
|
|
self.prev_span,
|
2015-03-28 21:58:51 +00:00
|
|
|
|
"expected identifier, found enum pattern"))
|
2012-08-06 14:20:23 +00:00
|
|
|
|
}
|
|
|
|
|
|
2018-03-18 13:47:09 +00:00
|
|
|
|
Ok(PatKind::Ident(binding_mode, ident, sub))
|
2012-08-06 14:20:23 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses a local variable declaration.
|
2016-06-18 04:01:57 +00:00
|
|
|
|
fn parse_local(&mut self, attrs: ThinVec<Attribute>) -> PResult<'a, P<Local>> {
|
2017-08-12 09:00:57 +00:00
|
|
|
|
let lo = self.prev_span;
|
in which parentheses are suggested for should-have-been-tuple-patterns
Programmers used to working in some other languages (such as Python or
Go) might expect to be able to destructure values with comma-separated
identifiers but no parentheses on the left side of an assignment.
Previously, the first name in such code would get parsed as a
single-indentifier pattern—recognizing, for example, the
`let a` in `let a, b = (1, 2);`—whereupon we would have a fatal syntax
error on seeing an unexpected comma rather than the expected semicolon
(all the way nearer to the end of `parse_full_stmt`).
Instead, let's look for that comma when parsing the pattern, and if we
see it, momentarily make-believe that we're parsing the remaining
elements in a tuple pattern, so that we can suggest wrapping it all in
parentheses. We need to do this in a separate wrapper method called on
the top-level pattern (or `|`-patterns) in a `let` statement, `for`
loop, `if`- or `while let` expression, or match arm rather than within
`parse_pat` itself, because `parse_pat` gets called recursively to parse
the sub-patterns within a tuple pattern.
Resolves #48492.
2018-02-25 04:41:16 +00:00
|
|
|
|
let pat = self.parse_top_level_pat()?;
|
2013-06-07 01:54:14 +00:00
|
|
|
|
|
2017-10-22 16:19:30 +00:00
|
|
|
|
let (err, ty) = if self.eat(&token::Colon) {
|
|
|
|
|
// Save the state of the parser before parsing type normally, in case there is a `:`
|
|
|
|
|
// instead of an `=` typo.
|
|
|
|
|
let parser_snapshot_before_type = self.clone();
|
|
|
|
|
let colon_sp = self.prev_span;
|
|
|
|
|
match self.parse_ty() {
|
|
|
|
|
Ok(ty) => (None, Some(ty)),
|
|
|
|
|
Err(mut err) => {
|
|
|
|
|
// Rewind to before attempting to parse the type and continue parsing
|
|
|
|
|
let parser_snapshot_after_type = self.clone();
|
|
|
|
|
mem::replace(self, parser_snapshot_before_type);
|
|
|
|
|
|
2018-08-18 10:14:09 +00:00
|
|
|
|
let snippet = self.sess.source_map().span_to_snippet(pat.span).unwrap();
|
2017-10-22 16:19:30 +00:00
|
|
|
|
err.span_label(pat.span, format!("while parsing the type for `{}`", snippet));
|
|
|
|
|
(Some((parser_snapshot_after_type, colon_sp, err)), None)
|
|
|
|
|
}
|
|
|
|
|
}
|
2017-05-12 18:05:39 +00:00
|
|
|
|
} else {
|
2017-10-22 16:19:30 +00:00
|
|
|
|
(None, None)
|
|
|
|
|
};
|
|
|
|
|
let init = match (self.parse_initializer(err.is_some()), err) {
|
|
|
|
|
(Ok(init), None) => { // init parsed, ty parsed
|
|
|
|
|
init
|
|
|
|
|
}
|
|
|
|
|
(Ok(init), Some((_, colon_sp, mut err))) => { // init parsed, ty error
|
|
|
|
|
// Could parse the type as if it were the initializer, it is likely there was a
|
|
|
|
|
// typo in the code: `:` instead of `=`. Add suggestion and emit the error.
|
2019-01-25 21:03:27 +00:00
|
|
|
|
err.span_suggestion_short(
|
suggestion applicabilities for libsyntax and librustc, run-rustfix tests
Consider this a down payment on #50723. To recap, an `Applicability`
enum was recently (#50204) added, to convey to Rustfix and other tools
whether we think it's OK for them to blindly apply the suggestion, or
whether to prompt a human for guidance (because the suggestion might
contain placeholders that we can't infer, or because we think it has a
sufficiently high probability of being wrong even though it's—
presumably—right often enough to be worth emitting in the first place).
When a suggestion is marked as `MaybeIncorrect`, we try to use comments
to indicate precisely why (although there are a few places where we just
say `// speculative` because the present author's subjective judgement
balked at the idea that the suggestion has no false positives).
The `run-rustfix` directive is opporunistically set on some relevant UI
tests (and a couple tests that were in the `test/ui/suggestions`
directory, even if the suggestions didn't originate in librustc or
libsyntax). This is less trivial than it sounds, because a surprising
number of test files aren't equipped to be tested as fixed even when
they contain successfully fixable errors, because, e.g., there are more,
not-directly-related errors after fixing. Some test files need an
attribute or underscore to avoid unused warnings tripping up the "fixed
code is still producing diagnostics" check despite the fixes being
correct; this is an interesting contrast-to/inconsistency-with the
behavior of UI tests (which secretly pass `-A unused`), a behavior which
we probably ought to resolve one way or the other (filed issue #50926).
A few suggestion labels are reworded (e.g., to avoid phrasing it as a
question, which which is discouraged by the style guidelines listed in
`.span_suggestion`'s doc-comment).
2018-05-19 21:52:24 +00:00
|
|
|
|
colon_sp,
|
|
|
|
|
"use `=` if you meant to assign",
|
|
|
|
|
"=".to_string(),
|
|
|
|
|
Applicability::MachineApplicable
|
|
|
|
|
);
|
2017-10-22 16:19:30 +00:00
|
|
|
|
err.emit();
|
2018-02-16 14:56:50 +00:00
|
|
|
|
// As this was parsed successfully, continue as if the code has been fixed for the
|
2017-10-22 16:19:30 +00:00
|
|
|
|
// rest of the file. It will still fail due to the emitted error, but we avoid
|
|
|
|
|
// extra noise.
|
|
|
|
|
init
|
|
|
|
|
}
|
|
|
|
|
(Err(mut init_err), Some((snapshot, _, ty_err))) => { // init error, ty error
|
|
|
|
|
init_err.cancel();
|
|
|
|
|
// Couldn't parse the type nor the initializer, only raise the type error and
|
|
|
|
|
// return to the parser state before parsing the type as the initializer.
|
|
|
|
|
// let x: <parse_error>;
|
|
|
|
|
mem::replace(self, snapshot);
|
|
|
|
|
return Err(ty_err);
|
|
|
|
|
}
|
|
|
|
|
(Err(err), None) => { // init error, ty parsed
|
|
|
|
|
// Couldn't parse the initializer and we're not attempting to recover a failed
|
|
|
|
|
// parse of the type, return the error.
|
|
|
|
|
return Err(err);
|
|
|
|
|
}
|
2017-05-12 18:05:39 +00:00
|
|
|
|
};
|
2017-10-06 10:16:16 +00:00
|
|
|
|
let hi = if self.token == token::Semi {
|
|
|
|
|
self.span
|
|
|
|
|
} else {
|
|
|
|
|
self.prev_span
|
|
|
|
|
};
|
2015-03-28 21:58:51 +00:00
|
|
|
|
Ok(P(ast::Local {
|
2017-08-07 05:54:09 +00:00
|
|
|
|
ty,
|
|
|
|
|
pat,
|
|
|
|
|
init,
|
2013-09-07 02:11:55 +00:00
|
|
|
|
id: ast::DUMMY_NODE_ID,
|
2017-10-06 10:16:16 +00:00
|
|
|
|
span: lo.to(hi),
|
2017-08-07 05:54:09 +00:00
|
|
|
|
attrs,
|
2019-03-12 15:53:33 +00:00
|
|
|
|
source: LocalSource::Normal,
|
2015-03-28 21:58:51 +00:00
|
|
|
|
}))
|
2012-05-23 22:06:11 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses a structure field.
|
2016-08-08 12:35:15 +00:00
|
|
|
|
fn parse_name_and_ty(&mut self,
|
2017-03-15 00:22:48 +00:00
|
|
|
|
lo: Span,
|
2016-08-08 12:35:15 +00:00
|
|
|
|
vis: Visibility,
|
|
|
|
|
attrs: Vec<Attribute>)
|
|
|
|
|
-> PResult<'a, StructField> {
|
2016-03-23 03:01:37 +00:00
|
|
|
|
let name = self.parse_ident()?;
|
|
|
|
|
self.expect(&token::Colon)?;
|
2017-01-16 23:13:41 +00:00
|
|
|
|
let ty = self.parse_ty()?;
|
2016-04-06 08:19:10 +00:00
|
|
|
|
Ok(StructField {
|
2017-03-15 00:22:48 +00:00
|
|
|
|
span: lo.to(self.prev_span),
|
2016-04-02 13:47:53 +00:00
|
|
|
|
ident: Some(name),
|
2017-08-07 05:54:09 +00:00
|
|
|
|
vis,
|
2013-09-07 02:11:55 +00:00
|
|
|
|
id: ast::DUMMY_NODE_ID,
|
2017-08-07 05:54:09 +00:00
|
|
|
|
ty,
|
|
|
|
|
attrs,
|
2016-04-06 08:19:10 +00:00
|
|
|
|
})
|
2012-05-23 22:06:11 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Emits an expected-item-after-attributes error.
|
2019-01-20 19:52:16 +00:00
|
|
|
|
fn expected_item_err(&mut self, attrs: &[Attribute]) -> PResult<'a, ()> {
|
2015-03-13 09:34:51 +00:00
|
|
|
|
let message = match attrs.last() {
|
2016-11-14 12:00:25 +00:00
|
|
|
|
Some(&Attribute { is_sugared_doc: true, .. }) => "expected item after doc comment",
|
2014-09-12 01:07:07 +00:00
|
|
|
|
_ => "expected item after attributes",
|
2015-03-13 09:34:51 +00:00
|
|
|
|
};
|
|
|
|
|
|
2019-01-20 19:52:16 +00:00
|
|
|
|
let mut err = self.diagnostic().struct_span_err(self.prev_span, message);
|
|
|
|
|
if attrs.last().unwrap().is_sugared_doc {
|
|
|
|
|
err.span_label(self.prev_span, "this doc comment doesn't document anything");
|
|
|
|
|
}
|
|
|
|
|
Err(err)
|
2014-09-12 01:07:07 +00:00
|
|
|
|
}
|
|
|
|
|
|
2016-07-02 09:01:21 +00:00
|
|
|
|
/// Parse a statement. This stops just before trailing semicolons on everything but items.
|
2018-11-27 02:59:49 +00:00
|
|
|
|
/// e.g., a `StmtKind::Semi` parses to a `StmtKind::Expr`, leaving the trailing `;` unconsumed.
|
2016-02-11 20:33:09 +00:00
|
|
|
|
pub fn parse_stmt(&mut self) -> PResult<'a, Option<Stmt>> {
|
2016-07-16 20:41:43 +00:00
|
|
|
|
Ok(self.parse_stmt_(true))
|
2016-02-10 03:11:27 +00:00
|
|
|
|
}
|
|
|
|
|
|
2016-09-06 05:57:58 +00:00
|
|
|
|
fn parse_stmt_(&mut self, macro_legacy_warnings: bool) -> Option<Stmt> {
|
|
|
|
|
self.parse_stmt_without_recovery(macro_legacy_warnings).unwrap_or_else(|mut e| {
|
2016-02-10 03:11:27 +00:00
|
|
|
|
e.emit();
|
2017-04-13 19:37:05 +00:00
|
|
|
|
self.recover_stmt_(SemiColonMode::Break, BlockMode::Ignore);
|
2016-02-10 03:11:27 +00:00
|
|
|
|
None
|
|
|
|
|
})
|
|
|
|
|
}
|
|
|
|
|
|
2019-05-12 00:00:06 +00:00
|
|
|
|
fn is_async_block(&self) -> bool {
|
2019-05-11 14:41:37 +00:00
|
|
|
|
self.token.is_keyword(kw::Async) &&
|
2018-06-06 22:50:59 +00:00
|
|
|
|
(
|
|
|
|
|
( // `async move {`
|
2019-05-11 14:41:37 +00:00
|
|
|
|
self.look_ahead(1, |t| t.is_keyword(kw::Move)) &&
|
2018-06-06 22:50:59 +00:00
|
|
|
|
self.look_ahead(2, |t| *t == token::OpenDelim(token::Brace))
|
|
|
|
|
) || ( // `async {`
|
|
|
|
|
self.look_ahead(1, |t| *t == token::OpenDelim(token::Brace))
|
|
|
|
|
)
|
|
|
|
|
)
|
|
|
|
|
}
|
|
|
|
|
|
2019-05-12 00:00:06 +00:00
|
|
|
|
fn is_async_fn(&self) -> bool {
|
2019-05-11 14:41:37 +00:00
|
|
|
|
self.token.is_keyword(kw::Async) &&
|
|
|
|
|
self.look_ahead(1, |t| t.is_keyword(kw::Fn))
|
2019-02-23 18:39:27 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-05-12 00:00:06 +00:00
|
|
|
|
fn is_do_catch_block(&self) -> bool {
|
2019-05-11 14:41:37 +00:00
|
|
|
|
self.token.is_keyword(kw::Do) &&
|
|
|
|
|
self.look_ahead(1, |t| t.is_keyword(kw::Catch)) &&
|
2017-03-03 22:41:07 +00:00
|
|
|
|
self.look_ahead(2, |t| *t == token::OpenDelim(token::Brace)) &&
|
2018-07-25 00:55:36 +00:00
|
|
|
|
!self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL)
|
|
|
|
|
}
|
|
|
|
|
|
2019-05-12 00:00:06 +00:00
|
|
|
|
fn is_try_block(&self) -> bool {
|
2019-05-11 14:41:37 +00:00
|
|
|
|
self.token.is_keyword(kw::Try) &&
|
2018-07-22 03:59:44 +00:00
|
|
|
|
self.look_ahead(1, |t| *t == token::OpenDelim(token::Brace)) &&
|
2018-11-18 00:25:59 +00:00
|
|
|
|
self.span.rust_2018() &&
|
2018-07-22 03:59:44 +00:00
|
|
|
|
// prevent `while try {} {}`, `if try {} {} else {}`, etc.
|
2017-09-08 19:08:01 +00:00
|
|
|
|
!self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL)
|
2017-02-17 23:12:47 +00:00
|
|
|
|
}
|
|
|
|
|
|
2017-03-05 05:15:58 +00:00
|
|
|
|
fn is_union_item(&self) -> bool {
|
2019-05-11 14:41:37 +00:00
|
|
|
|
self.token.is_keyword(kw::Union) &&
|
2017-06-29 10:16:35 +00:00
|
|
|
|
self.look_ahead(1, |t| t.is_ident() && !t.is_reserved_ident())
|
2016-10-19 20:33:41 +00:00
|
|
|
|
}
|
|
|
|
|
|
2017-11-04 20:56:45 +00:00
|
|
|
|
fn is_crate_vis(&self) -> bool {
|
2019-05-11 14:41:37 +00:00
|
|
|
|
self.token.is_keyword(kw::Crate) && self.look_ahead(1, |t| t != &token::ModSep)
|
2017-11-04 20:56:45 +00:00
|
|
|
|
}
|
|
|
|
|
|
2018-07-23 12:56:36 +00:00
|
|
|
|
fn is_existential_type_decl(&self) -> bool {
|
2019-05-11 14:41:37 +00:00
|
|
|
|
self.token.is_keyword(kw::Existential) &&
|
|
|
|
|
self.look_ahead(1, |t| t.is_keyword(kw::Type))
|
2018-07-23 12:56:36 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-05-12 00:00:06 +00:00
|
|
|
|
fn is_auto_trait_item(&self) -> bool {
|
2017-12-04 18:26:20 +00:00
|
|
|
|
// auto trait
|
2019-05-11 14:41:37 +00:00
|
|
|
|
(self.token.is_keyword(kw::Auto)
|
|
|
|
|
&& self.look_ahead(1, |t| t.is_keyword(kw::Trait)))
|
2017-12-04 18:26:20 +00:00
|
|
|
|
|| // unsafe auto trait
|
2019-05-11 14:41:37 +00:00
|
|
|
|
(self.token.is_keyword(kw::Unsafe) &&
|
|
|
|
|
self.look_ahead(1, |t| t.is_keyword(kw::Auto)) &&
|
|
|
|
|
self.look_ahead(2, |t| t.is_keyword(kw::Trait)))
|
2017-10-15 18:03:03 +00:00
|
|
|
|
}
|
|
|
|
|
|
2017-09-06 21:11:16 +00:00
|
|
|
|
fn eat_macro_def(&mut self, attrs: &[Attribute], vis: &Visibility, lo: Span)
|
2017-03-05 05:15:58 +00:00
|
|
|
|
-> PResult<'a, Option<P<Item>>> {
|
2017-09-06 21:11:16 +00:00
|
|
|
|
let token_lo = self.span;
|
2017-03-18 01:55:51 +00:00
|
|
|
|
let (ident, def) = match self.token {
|
2019-05-11 14:41:37 +00:00
|
|
|
|
token::Ident(ident, false) if ident.name == kw::Macro => {
|
2017-03-18 01:55:51 +00:00
|
|
|
|
self.bump();
|
|
|
|
|
let ident = self.parse_ident()?;
|
|
|
|
|
let tokens = if self.check(&token::OpenDelim(token::Brace)) {
|
|
|
|
|
match self.parse_token_tree() {
|
2019-01-09 05:53:14 +00:00
|
|
|
|
TokenTree::Delimited(_, _, tts) => tts,
|
2017-03-18 01:55:51 +00:00
|
|
|
|
_ => unreachable!(),
|
|
|
|
|
}
|
|
|
|
|
} else if self.check(&token::OpenDelim(token::Paren)) {
|
|
|
|
|
let args = self.parse_token_tree();
|
|
|
|
|
let body = if self.check(&token::OpenDelim(token::Brace)) {
|
|
|
|
|
self.parse_token_tree()
|
|
|
|
|
} else {
|
|
|
|
|
self.unexpected()?;
|
|
|
|
|
unreachable!()
|
|
|
|
|
};
|
2018-12-11 23:01:08 +00:00
|
|
|
|
TokenStream::new(vec![
|
2017-03-18 01:55:51 +00:00
|
|
|
|
args.into(),
|
2017-09-06 21:11:16 +00:00
|
|
|
|
TokenTree::Token(token_lo.to(self.prev_span), token::FatArrow).into(),
|
2017-03-18 01:55:51 +00:00
|
|
|
|
body.into(),
|
|
|
|
|
])
|
|
|
|
|
} else {
|
|
|
|
|
self.unexpected()?;
|
|
|
|
|
unreachable!()
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
(ident, ast::MacroDef { tokens: tokens.into(), legacy: false })
|
|
|
|
|
}
|
2019-05-07 06:03:44 +00:00
|
|
|
|
token::Ident(ident, _) if ident.name == sym::macro_rules &&
|
2017-03-18 01:55:51 +00:00
|
|
|
|
self.look_ahead(1, |t| *t == token::Not) => {
|
|
|
|
|
let prev_span = self.prev_span;
|
2018-01-29 05:12:09 +00:00
|
|
|
|
self.complain_if_pub_macro(&vis.node, prev_span);
|
2017-03-18 01:55:51 +00:00
|
|
|
|
self.bump();
|
|
|
|
|
self.bump();
|
|
|
|
|
|
|
|
|
|
let ident = self.parse_ident()?;
|
|
|
|
|
let (delim, tokens) = self.expect_delimited_token_tree()?;
|
2019-03-11 01:01:53 +00:00
|
|
|
|
if delim != MacDelimiter::Brace && !self.eat(&token::Semi) {
|
|
|
|
|
self.report_invalid_macro_expansion_item();
|
2017-03-05 05:15:58 +00:00
|
|
|
|
}
|
2017-03-18 01:55:51 +00:00
|
|
|
|
|
|
|
|
|
(ident, ast::MacroDef { tokens: tokens, legacy: true })
|
2017-03-05 05:15:58 +00:00
|
|
|
|
}
|
|
|
|
|
_ => return Ok(None),
|
|
|
|
|
};
|
|
|
|
|
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let span = lo.to(self.prev_span);
|
2017-03-18 01:55:51 +00:00
|
|
|
|
Ok(Some(self.mk_item(span, ident, ItemKind::MacroDef(def), vis.clone(), attrs.to_vec())))
|
2017-03-05 05:15:58 +00:00
|
|
|
|
}
|
|
|
|
|
|
2016-09-06 05:57:58 +00:00
|
|
|
|
fn parse_stmt_without_recovery(&mut self,
|
|
|
|
|
macro_legacy_warnings: bool)
|
|
|
|
|
-> PResult<'a, Option<Stmt>> {
|
2016-11-02 03:03:55 +00:00
|
|
|
|
maybe_whole!(self, NtStmt, |x| Some(x));
|
2012-08-01 21:34:35 +00:00
|
|
|
|
|
2016-03-23 03:01:37 +00:00
|
|
|
|
let attrs = self.parse_outer_attributes()?;
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let lo = self.span;
|
2015-03-13 09:34:51 +00:00
|
|
|
|
|
2019-05-11 14:41:37 +00:00
|
|
|
|
Ok(Some(if self.eat_keyword(kw::Let) {
|
2016-06-17 02:30:01 +00:00
|
|
|
|
Stmt {
|
|
|
|
|
id: ast::DUMMY_NODE_ID,
|
2016-06-26 02:18:04 +00:00
|
|
|
|
node: StmtKind::Local(self.parse_local(attrs.into())?),
|
2017-03-15 00:22:48 +00:00
|
|
|
|
span: lo.to(self.prev_span),
|
2016-06-17 02:30:01 +00:00
|
|
|
|
}
|
2018-01-29 05:12:09 +00:00
|
|
|
|
} else if let Some(macro_def) = self.eat_macro_def(
|
|
|
|
|
&attrs,
|
2018-08-18 10:14:03 +00:00
|
|
|
|
&source_map::respan(lo, VisibilityKind::Inherited),
|
2018-01-29 05:12:09 +00:00
|
|
|
|
lo,
|
|
|
|
|
)? {
|
2017-03-05 05:15:58 +00:00
|
|
|
|
Stmt {
|
|
|
|
|
id: ast::DUMMY_NODE_ID,
|
|
|
|
|
node: StmtKind::Item(macro_def),
|
2017-03-15 00:22:48 +00:00
|
|
|
|
span: lo.to(self.prev_span),
|
2017-03-05 05:15:58 +00:00
|
|
|
|
}
|
2017-12-04 18:26:20 +00:00
|
|
|
|
// Starts like a simple path, being careful to avoid contextual keywords
|
|
|
|
|
// such as a union items, item with `crate` visibility or auto trait items.
|
2017-11-04 20:56:45 +00:00
|
|
|
|
// Our goal here is to parse an arbitrary path `a::b::c` but not something that starts
|
|
|
|
|
// like a path (1 token), but it fact not a path.
|
|
|
|
|
// `union::b::c` - path, `union U { ... }` - not a path.
|
|
|
|
|
// `crate::b::c` - path, `crate struct S;` - not a path.
|
2016-10-19 20:33:41 +00:00
|
|
|
|
} else if self.token.is_path_start() &&
|
|
|
|
|
!self.token.is_qpath_start() &&
|
2017-11-04 20:56:45 +00:00
|
|
|
|
!self.is_union_item() &&
|
2018-01-01 14:42:32 +00:00
|
|
|
|
!self.is_crate_vis() &&
|
2018-07-23 12:56:36 +00:00
|
|
|
|
!self.is_existential_type_decl() &&
|
2019-02-23 18:39:27 +00:00
|
|
|
|
!self.is_auto_trait_item() &&
|
|
|
|
|
!self.is_async_fn() {
|
2016-09-22 05:10:16 +00:00
|
|
|
|
let pth = self.parse_path(PathStyle::Expr)?;
|
2013-05-08 22:27:29 +00:00
|
|
|
|
|
2016-09-22 05:10:16 +00:00
|
|
|
|
if !self.eat(&token::Not) {
|
|
|
|
|
let expr = if self.check(&token::OpenDelim(token::Brace)) {
|
|
|
|
|
self.parse_struct_expr(lo, pth, ThinVec::new())?
|
|
|
|
|
} else {
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let hi = self.prev_span;
|
|
|
|
|
self.mk_expr(lo.to(hi), ExprKind::Path(None, pth), ThinVec::new())
|
2016-09-22 05:10:16 +00:00
|
|
|
|
};
|
|
|
|
|
|
2017-09-08 19:08:01 +00:00
|
|
|
|
let expr = self.with_res(Restrictions::STMT_EXPR, |this| {
|
2016-09-22 05:10:16 +00:00
|
|
|
|
let expr = this.parse_dot_or_call_expr_with(expr, lo, attrs.into())?;
|
|
|
|
|
this.parse_assoc_expr_with(0, LhsExpr::AlreadyParsed(expr))
|
|
|
|
|
})?;
|
2012-11-14 04:45:25 +00:00
|
|
|
|
|
2016-09-22 05:10:16 +00:00
|
|
|
|
return Ok(Some(Stmt {
|
|
|
|
|
id: ast::DUMMY_NODE_ID,
|
|
|
|
|
node: StmtKind::Expr(expr),
|
2017-03-15 00:22:48 +00:00
|
|
|
|
span: lo.to(self.prev_span),
|
2016-09-22 05:10:16 +00:00
|
|
|
|
}));
|
|
|
|
|
}
|
2012-11-14 04:45:25 +00:00
|
|
|
|
|
2016-09-22 05:10:16 +00:00
|
|
|
|
// it's a macro invocation
|
2014-10-29 14:47:53 +00:00
|
|
|
|
let id = match self.token {
|
2019-05-11 16:08:09 +00:00
|
|
|
|
token::OpenDelim(_) => Ident::invalid(), // no special identifier
|
2016-03-23 03:01:37 +00:00
|
|
|
|
_ => self.parse_ident()?,
|
2012-11-14 04:45:25 +00:00
|
|
|
|
};
|
|
|
|
|
|
2014-02-23 13:53:59 +00:00
|
|
|
|
// check that we're pointing at delimiters (need to check
|
|
|
|
|
// again after the `if`, because of `parse_ident`
|
|
|
|
|
// consuming more tokens).
|
2018-05-22 15:01:21 +00:00
|
|
|
|
match self.token {
|
|
|
|
|
token::OpenDelim(_) => {}
|
2014-10-29 14:47:53 +00:00
|
|
|
|
_ => {
|
2014-05-28 16:24:28 +00:00
|
|
|
|
// we only expect an ident if we didn't parse one
|
|
|
|
|
// above.
|
2019-05-11 14:41:37 +00:00
|
|
|
|
let ident_str = if id.name == kw::Invalid {
|
2014-05-28 16:24:28 +00:00
|
|
|
|
"identifier, "
|
|
|
|
|
} else {
|
|
|
|
|
""
|
|
|
|
|
};
|
2018-10-28 23:05:07 +00:00
|
|
|
|
let tok_str = self.this_token_descr();
|
|
|
|
|
let mut err = self.fatal(&format!("expected {}`(` or `{{`, found {}",
|
2018-02-19 07:08:23 +00:00
|
|
|
|
ident_str,
|
|
|
|
|
tok_str));
|
|
|
|
|
err.span_label(self.span, format!("expected {}`(` or `{{`", ident_str));
|
|
|
|
|
return Err(err)
|
2014-10-29 14:47:53 +00:00
|
|
|
|
},
|
2018-05-22 15:01:21 +00:00
|
|
|
|
}
|
2014-02-23 13:53:59 +00:00
|
|
|
|
|
2018-05-22 15:01:21 +00:00
|
|
|
|
let (delim, tts) = self.expect_delimited_token_tree()?;
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let hi = self.prev_span;
|
2012-11-14 04:45:25 +00:00
|
|
|
|
|
2018-05-22 15:01:21 +00:00
|
|
|
|
let style = if delim == MacDelimiter::Brace {
|
2016-02-09 10:56:59 +00:00
|
|
|
|
MacStmtStyle::Braces
|
2014-11-14 17:18:10 +00:00
|
|
|
|
} else {
|
2016-02-09 10:56:59 +00:00
|
|
|
|
MacStmtStyle::NoBraces
|
2014-11-14 17:18:10 +00:00
|
|
|
|
};
|
|
|
|
|
|
2019-05-11 14:41:37 +00:00
|
|
|
|
if id.name == kw::Invalid {
|
2018-05-22 15:01:21 +00:00
|
|
|
|
let mac = respan(lo.to(hi), Mac_ { path: pth, tts, delim });
|
|
|
|
|
let node = if delim == MacDelimiter::Brace ||
|
2016-07-16 20:41:43 +00:00
|
|
|
|
self.token == token::Semi || self.token == token::Eof {
|
|
|
|
|
StmtKind::Mac(P((mac, style, attrs.into())))
|
|
|
|
|
}
|
|
|
|
|
// We used to incorrectly stop parsing macro-expanded statements here.
|
|
|
|
|
// If the next token will be an error anyway but could have parsed with the
|
|
|
|
|
// earlier behavior, stop parsing here and emit a warning to avoid breakage.
|
2016-09-06 05:57:58 +00:00
|
|
|
|
else if macro_legacy_warnings && self.token.can_begin_expr() && match self.token {
|
2016-07-16 20:41:43 +00:00
|
|
|
|
// These can continue an expression, so we can't stop parsing and warn.
|
|
|
|
|
token::OpenDelim(token::Paren) | token::OpenDelim(token::Bracket) |
|
|
|
|
|
token::BinOp(token::Minus) | token::BinOp(token::Star) |
|
|
|
|
|
token::BinOp(token::And) | token::BinOp(token::Or) |
|
|
|
|
|
token::AndAnd | token::OrOr |
|
2017-09-19 05:40:04 +00:00
|
|
|
|
token::DotDot | token::DotDotDot | token::DotDotEq => false,
|
2016-07-16 20:41:43 +00:00
|
|
|
|
_ => true,
|
|
|
|
|
} {
|
|
|
|
|
self.warn_missing_semicolon();
|
|
|
|
|
StmtKind::Mac(P((mac, style, attrs.into())))
|
|
|
|
|
} else {
|
2019-03-09 14:41:01 +00:00
|
|
|
|
let e = self.mk_expr(mac.span, ExprKind::Mac(mac), ThinVec::new());
|
|
|
|
|
let e = self.maybe_recover_from_bad_qpath(e, true)?;
|
2016-07-16 20:41:43 +00:00
|
|
|
|
let e = self.parse_dot_or_call_expr_with(e, lo, attrs.into())?;
|
|
|
|
|
let e = self.parse_assoc_expr_with(0, LhsExpr::AlreadyParsed(e))?;
|
|
|
|
|
StmtKind::Expr(e)
|
|
|
|
|
};
|
2016-06-17 02:30:01 +00:00
|
|
|
|
Stmt {
|
|
|
|
|
id: ast::DUMMY_NODE_ID,
|
2017-03-15 00:22:48 +00:00
|
|
|
|
span: lo.to(hi),
|
2017-08-07 05:54:09 +00:00
|
|
|
|
node,
|
2016-06-17 02:30:01 +00:00
|
|
|
|
}
|
2012-11-14 04:45:25 +00:00
|
|
|
|
} else {
|
|
|
|
|
// if it has a special ident, it's definitely an item
|
2014-11-14 17:18:10 +00:00
|
|
|
|
//
|
|
|
|
|
// Require a semicolon or braces.
|
2019-03-11 01:01:53 +00:00
|
|
|
|
if style != MacStmtStyle::Braces && !self.eat(&token::Semi) {
|
|
|
|
|
self.report_invalid_macro_expansion_item();
|
2014-11-14 17:18:10 +00:00
|
|
|
|
}
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let span = lo.to(hi);
|
2016-06-17 02:30:01 +00:00
|
|
|
|
Stmt {
|
|
|
|
|
id: ast::DUMMY_NODE_ID,
|
2017-08-07 05:54:09 +00:00
|
|
|
|
span,
|
2016-06-17 02:30:01 +00:00
|
|
|
|
node: StmtKind::Item({
|
2012-11-14 04:45:25 +00:00
|
|
|
|
self.mk_item(
|
2017-03-15 00:22:48 +00:00
|
|
|
|
span, id /*id is good here*/,
|
2018-05-22 15:01:21 +00:00
|
|
|
|
ItemKind::Mac(respan(span, Mac_ { path: pth, tts, delim })),
|
2018-01-29 05:12:09 +00:00
|
|
|
|
respan(lo, VisibilityKind::Inherited),
|
2016-06-26 02:16:55 +00:00
|
|
|
|
attrs)
|
2016-06-17 02:30:01 +00:00
|
|
|
|
}),
|
|
|
|
|
}
|
2012-11-14 04:45:25 +00:00
|
|
|
|
}
|
2012-05-23 22:06:11 +00:00
|
|
|
|
} else {
|
2015-11-03 16:39:51 +00:00
|
|
|
|
// FIXME: Bad copy of attrs
|
2016-11-05 04:16:26 +00:00
|
|
|
|
let old_directory_ownership =
|
|
|
|
|
mem::replace(&mut self.directory.ownership, DirectoryOwnership::UnownedViaBlock);
|
|
|
|
|
let item = self.parse_item_(attrs.clone(), false, true)?;
|
|
|
|
|
self.directory.ownership = old_directory_ownership;
|
2017-05-17 22:37:24 +00:00
|
|
|
|
|
2016-11-05 04:16:26 +00:00
|
|
|
|
match item {
|
2016-06-17 02:30:01 +00:00
|
|
|
|
Some(i) => Stmt {
|
|
|
|
|
id: ast::DUMMY_NODE_ID,
|
2017-03-15 00:22:48 +00:00
|
|
|
|
span: lo.to(i.span),
|
2016-06-17 02:30:01 +00:00
|
|
|
|
node: StmtKind::Item(i),
|
|
|
|
|
},
|
2015-03-13 09:34:51 +00:00
|
|
|
|
None => {
|
2017-10-16 02:02:45 +00:00
|
|
|
|
let unused_attrs = |attrs: &[Attribute], s: &mut Self| {
|
2017-05-12 18:05:39 +00:00
|
|
|
|
if !attrs.is_empty() {
|
2016-09-21 02:16:28 +00:00
|
|
|
|
if s.prev_token_kind == PrevTokenKind::DocComment {
|
2017-02-12 14:18:41 +00:00
|
|
|
|
s.span_fatal_err(s.prev_span, Error::UselessDocComment).emit();
|
2017-10-16 02:02:45 +00:00
|
|
|
|
} else if attrs.iter().any(|a| a.style == AttrStyle::Outer) {
|
2016-09-16 05:46:40 +00:00
|
|
|
|
s.span_err(s.span, "expected statement after outer attribute");
|
2016-05-28 02:05:22 +00:00
|
|
|
|
}
|
2015-11-03 16:39:51 +00:00
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
2015-03-13 09:34:51 +00:00
|
|
|
|
// Do not attempt to parse an expression if we're done here.
|
|
|
|
|
if self.token == token::Semi {
|
2015-11-03 16:39:51 +00:00
|
|
|
|
unused_attrs(&attrs, self);
|
2015-12-30 23:11:53 +00:00
|
|
|
|
self.bump();
|
2015-03-28 21:58:51 +00:00
|
|
|
|
return Ok(None);
|
2015-03-13 09:34:51 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if self.token == token::CloseDelim(token::Brace) {
|
2015-11-03 16:39:51 +00:00
|
|
|
|
unused_attrs(&attrs, self);
|
2015-03-28 21:58:51 +00:00
|
|
|
|
return Ok(None);
|
2014-09-12 01:07:07 +00:00
|
|
|
|
}
|
2011-06-15 20:27:39 +00:00
|
|
|
|
|
2014-09-13 16:06:01 +00:00
|
|
|
|
// Remainder are line-expr stmts.
|
2016-03-23 03:01:37 +00:00
|
|
|
|
let e = self.parse_expr_res(
|
2017-09-08 19:08:01 +00:00
|
|
|
|
Restrictions::STMT_EXPR, Some(attrs.into()))?;
|
2016-06-17 02:30:01 +00:00
|
|
|
|
Stmt {
|
|
|
|
|
id: ast::DUMMY_NODE_ID,
|
2017-03-15 00:22:48 +00:00
|
|
|
|
span: lo.to(e.span),
|
2016-06-17 02:30:01 +00:00
|
|
|
|
node: StmtKind::Expr(e),
|
|
|
|
|
}
|
2014-09-13 16:06:01 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2015-03-28 21:58:51 +00:00
|
|
|
|
}))
|
2012-05-23 22:06:11 +00:00
|
|
|
|
}
|
2011-12-21 04:12:52 +00:00
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Checks if this expression is a successfully parsed statement.
|
2019-05-12 00:00:06 +00:00
|
|
|
|
fn expr_is_complete(&self, e: &Expr) -> bool {
|
2017-09-08 19:08:01 +00:00
|
|
|
|
self.restrictions.contains(Restrictions::STMT_EXPR) &&
|
2014-09-13 16:06:01 +00:00
|
|
|
|
!classify::expr_requires_semi_to_be_stmt(e)
|
2010-09-21 23:22:32 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses a block. No inner attributes are allowed.
|
2018-06-14 08:18:44 +00:00
|
|
|
|
pub fn parse_block(&mut self) -> PResult<'a, P<Block>> {
|
2016-11-02 03:03:55 +00:00
|
|
|
|
maybe_whole!(self, NtBlock, |x| x);
|
2013-04-06 00:31:52 +00:00
|
|
|
|
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let lo = self.span;
|
2014-11-10 10:58:03 +00:00
|
|
|
|
|
2015-12-30 23:11:53 +00:00
|
|
|
|
if !self.eat(&token::OpenDelim(token::Brace)) {
|
2014-11-10 10:58:03 +00:00
|
|
|
|
let sp = self.span;
|
2018-10-28 23:05:07 +00:00
|
|
|
|
let tok = self.this_token_descr();
|
|
|
|
|
let mut e = self.span_fatal(sp, &format!("expected `{{`, found {}", tok));
|
2018-09-19 23:23:21 +00:00
|
|
|
|
let do_not_suggest_help =
|
2019-05-11 14:41:37 +00:00
|
|
|
|
self.token.is_keyword(kw::In) || self.token == token::Colon;
|
2016-08-23 06:23:31 +00:00
|
|
|
|
|
2018-09-13 12:54:12 +00:00
|
|
|
|
if self.token.is_ident_named("and") {
|
2019-01-25 21:03:27 +00:00
|
|
|
|
e.span_suggestion_short(
|
2018-09-13 17:40:39 +00:00
|
|
|
|
self.span,
|
|
|
|
|
"use `&&` instead of `and` for the boolean operator",
|
|
|
|
|
"&&".to_string(),
|
|
|
|
|
Applicability::MaybeIncorrect,
|
|
|
|
|
);
|
2018-09-13 12:54:12 +00:00
|
|
|
|
}
|
|
|
|
|
if self.token.is_ident_named("or") {
|
2019-01-25 21:03:27 +00:00
|
|
|
|
e.span_suggestion_short(
|
2018-09-13 17:40:39 +00:00
|
|
|
|
self.span,
|
|
|
|
|
"use `||` instead of `or` for the boolean operator",
|
|
|
|
|
"||".to_string(),
|
|
|
|
|
Applicability::MaybeIncorrect,
|
|
|
|
|
);
|
2018-09-13 12:54:12 +00:00
|
|
|
|
}
|
|
|
|
|
|
2016-08-23 06:23:31 +00:00
|
|
|
|
// Check to see if the user has written something like
|
|
|
|
|
//
|
|
|
|
|
// if (cond)
|
|
|
|
|
// bar;
|
|
|
|
|
//
|
|
|
|
|
// Which is valid in other languages, but not Rust.
|
|
|
|
|
match self.parse_stmt_without_recovery(false) {
|
|
|
|
|
Ok(Some(stmt)) => {
|
2018-08-14 19:05:27 +00:00
|
|
|
|
if self.look_ahead(1, |t| t == &token::OpenDelim(token::Brace))
|
|
|
|
|
|| do_not_suggest_help {
|
2018-03-22 04:54:06 +00:00
|
|
|
|
// if the next token is an open brace (e.g., `if a b {`), the place-
|
|
|
|
|
// inside-a-block suggestion would be more likely wrong than right
|
2018-09-19 23:23:21 +00:00
|
|
|
|
e.span_label(sp, "expected `{`");
|
2018-03-22 04:54:06 +00:00
|
|
|
|
return Err(e);
|
|
|
|
|
}
|
2016-08-23 06:23:31 +00:00
|
|
|
|
let mut stmt_span = stmt.span;
|
|
|
|
|
// expand the span to include the semicolon, if it exists
|
|
|
|
|
if self.eat(&token::Semi) {
|
2017-07-31 20:04:34 +00:00
|
|
|
|
stmt_span = stmt_span.with_hi(self.prev_span.hi());
|
2016-08-23 06:23:31 +00:00
|
|
|
|
}
|
2017-01-31 13:45:08 +00:00
|
|
|
|
let sugg = pprust::to_string(|s| {
|
2019-02-06 17:33:01 +00:00
|
|
|
|
use crate::print::pprust::{PrintState, INDENT_UNIT};
|
2017-01-31 13:45:08 +00:00
|
|
|
|
s.ibox(INDENT_UNIT)?;
|
|
|
|
|
s.bopen()?;
|
|
|
|
|
s.print_stmt(&stmt)?;
|
|
|
|
|
s.bclose_maybe_open(stmt.span, INDENT_UNIT, false)
|
|
|
|
|
});
|
2019-01-25 21:03:27 +00:00
|
|
|
|
e.span_suggestion(
|
suggestion applicabilities for libsyntax and librustc, run-rustfix tests
Consider this a down payment on #50723. To recap, an `Applicability`
enum was recently (#50204) added, to convey to Rustfix and other tools
whether we think it's OK for them to blindly apply the suggestion, or
whether to prompt a human for guidance (because the suggestion might
contain placeholders that we can't infer, or because we think it has a
sufficiently high probability of being wrong even though it's—
presumably—right often enough to be worth emitting in the first place).
When a suggestion is marked as `MaybeIncorrect`, we try to use comments
to indicate precisely why (although there are a few places where we just
say `// speculative` because the present author's subjective judgement
balked at the idea that the suggestion has no false positives).
The `run-rustfix` directive is opporunistically set on some relevant UI
tests (and a couple tests that were in the `test/ui/suggestions`
directory, even if the suggestions didn't originate in librustc or
libsyntax). This is less trivial than it sounds, because a surprising
number of test files aren't equipped to be tested as fixed even when
they contain successfully fixable errors, because, e.g., there are more,
not-directly-related errors after fixing. Some test files need an
attribute or underscore to avoid unused warnings tripping up the "fixed
code is still producing diagnostics" check despite the fixes being
correct; this is an interesting contrast-to/inconsistency-with the
behavior of UI tests (which secretly pass `-A unused`), a behavior which
we probably ought to resolve one way or the other (filed issue #50926).
A few suggestion labels are reworded (e.g., to avoid phrasing it as a
question, which which is discouraged by the style guidelines listed in
`.span_suggestion`'s doc-comment).
2018-05-19 21:52:24 +00:00
|
|
|
|
stmt_span,
|
|
|
|
|
"try placing this code inside a block",
|
|
|
|
|
sugg,
|
|
|
|
|
// speculative, has been misleading in the past (closed Issue #46836)
|
|
|
|
|
Applicability::MaybeIncorrect
|
|
|
|
|
);
|
2016-08-23 06:23:31 +00:00
|
|
|
|
}
|
|
|
|
|
Err(mut e) => {
|
2017-04-13 19:37:05 +00:00
|
|
|
|
self.recover_stmt_(SemiColonMode::Break, BlockMode::Ignore);
|
2016-09-15 19:34:21 +00:00
|
|
|
|
self.cancel(&mut e);
|
2016-08-23 06:23:31 +00:00
|
|
|
|
}
|
|
|
|
|
_ => ()
|
|
|
|
|
}
|
2018-10-28 23:05:07 +00:00
|
|
|
|
e.span_label(sp, "expected `{`");
|
2016-08-23 06:23:31 +00:00
|
|
|
|
return Err(e);
|
2014-11-10 10:58:03 +00:00
|
|
|
|
}
|
2013-04-06 00:31:52 +00:00
|
|
|
|
|
2016-02-08 11:44:45 +00:00
|
|
|
|
self.parse_block_tail(lo, BlockCheckMode::Default)
|
2012-05-23 22:06:11 +00:00
|
|
|
|
}
|
2010-11-30 01:11:03 +00:00
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses a block. Inner attributes are allowed.
|
2015-12-20 21:00:43 +00:00
|
|
|
|
fn parse_inner_attrs_and_block(&mut self) -> PResult<'a, (Vec<Attribute>, P<Block>)> {
|
2016-11-02 03:03:55 +00:00
|
|
|
|
maybe_whole!(self, NtBlock, |x| (Vec::new(), x));
|
2012-08-01 21:34:35 +00:00
|
|
|
|
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let lo = self.span;
|
2016-03-23 03:01:37 +00:00
|
|
|
|
self.expect(&token::OpenDelim(token::Brace))?;
|
|
|
|
|
Ok((self.parse_inner_attributes()?,
|
|
|
|
|
self.parse_block_tail(lo, BlockCheckMode::Default)?))
|
2012-01-16 01:23:59 +00:00
|
|
|
|
}
|
2013-04-02 23:44:01 +00:00
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses the rest of a block expression or function body.
|
2014-12-23 13:07:30 +00:00
|
|
|
|
/// Precondition: already parsed the '{'.
|
2017-03-15 00:22:48 +00:00
|
|
|
|
fn parse_block_tail(&mut self, lo: Span, s: BlockCheckMode) -> PResult<'a, P<Block>> {
|
2014-12-23 13:07:30 +00:00
|
|
|
|
let mut stmts = vec![];
|
2015-12-30 23:11:53 +00:00
|
|
|
|
while !self.eat(&token::CloseDelim(token::Brace)) {
|
2017-11-21 14:49:15 +00:00
|
|
|
|
let stmt = match self.parse_full_stmt(false) {
|
2017-11-21 16:03:02 +00:00
|
|
|
|
Err(mut err) => {
|
|
|
|
|
err.emit();
|
2017-12-14 07:05:49 +00:00
|
|
|
|
self.recover_stmt_(SemiColonMode::Ignore, BlockMode::Ignore);
|
2018-12-19 23:33:56 +00:00
|
|
|
|
Some(Stmt {
|
|
|
|
|
id: ast::DUMMY_NODE_ID,
|
2018-12-20 00:57:48 +00:00
|
|
|
|
node: StmtKind::Expr(DummyResult::raw_expr(self.span, true)),
|
2018-12-19 23:33:56 +00:00
|
|
|
|
span: self.span,
|
|
|
|
|
})
|
2017-11-21 14:49:15 +00:00
|
|
|
|
}
|
|
|
|
|
Ok(stmt) => stmt,
|
|
|
|
|
};
|
|
|
|
|
if let Some(stmt) = stmt {
|
2016-07-07 04:12:20 +00:00
|
|
|
|
stmts.push(stmt);
|
2016-03-18 02:49:12 +00:00
|
|
|
|
} else if self.token == token::Eof {
|
|
|
|
|
break;
|
2015-03-13 09:34:51 +00:00
|
|
|
|
} else {
|
|
|
|
|
// Found only `;` or `}`.
|
|
|
|
|
continue;
|
|
|
|
|
};
|
2013-04-30 19:02:56 +00:00
|
|
|
|
}
|
2015-03-28 21:58:51 +00:00
|
|
|
|
Ok(P(ast::Block {
|
2017-08-07 05:54:09 +00:00
|
|
|
|
stmts,
|
2013-09-07 02:11:55 +00:00
|
|
|
|
id: ast::DUMMY_NODE_ID,
|
2013-01-15 03:35:08 +00:00
|
|
|
|
rules: s,
|
2017-03-15 00:22:48 +00:00
|
|
|
|
span: lo.to(self.prev_span),
|
2015-03-28 21:58:51 +00:00
|
|
|
|
}))
|
2012-05-23 22:06:11 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses a statement, including the trailing semicolon.
|
2018-05-31 22:53:30 +00:00
|
|
|
|
crate fn parse_full_stmt(&mut self, macro_legacy_warnings: bool) -> PResult<'a, Option<Stmt>> {
|
2018-03-16 06:20:56 +00:00
|
|
|
|
// skip looking for a trailing semicolon when we have an interpolated statement
|
|
|
|
|
maybe_whole!(self, NtStmt, |x| Some(x));
|
|
|
|
|
|
2017-12-14 07:05:49 +00:00
|
|
|
|
let mut stmt = match self.parse_stmt_without_recovery(macro_legacy_warnings)? {
|
2016-07-07 04:12:20 +00:00
|
|
|
|
Some(stmt) => stmt,
|
|
|
|
|
None => return Ok(None),
|
|
|
|
|
};
|
|
|
|
|
|
2016-07-02 09:01:21 +00:00
|
|
|
|
match stmt.node {
|
2016-07-02 09:32:23 +00:00
|
|
|
|
StmtKind::Expr(ref expr) if self.token != token::Eof => {
|
2016-07-02 09:01:21 +00:00
|
|
|
|
// expression without semicolon
|
|
|
|
|
if classify::expr_requires_semi_to_be_stmt(expr) {
|
|
|
|
|
// Just check for errors and recover; do not eat semicolon yet.
|
|
|
|
|
if let Err(mut e) =
|
|
|
|
|
self.expect_one_of(&[], &[token::Semi, token::CloseDelim(token::Brace)])
|
|
|
|
|
{
|
|
|
|
|
e.emit();
|
|
|
|
|
self.recover_stmt();
|
|
|
|
|
}
|
|
|
|
|
}
|
2016-02-10 03:11:27 +00:00
|
|
|
|
}
|
2016-07-02 09:01:21 +00:00
|
|
|
|
StmtKind::Local(..) => {
|
2016-07-12 03:56:19 +00:00
|
|
|
|
// We used to incorrectly allow a macro-expanded let statement to lack a semicolon.
|
2016-09-06 05:57:58 +00:00
|
|
|
|
if macro_legacy_warnings && self.token != token::Semi {
|
2016-07-12 03:56:19 +00:00
|
|
|
|
self.warn_missing_semicolon();
|
|
|
|
|
} else {
|
2018-06-19 05:04:16 +00:00
|
|
|
|
self.expect_one_of(&[], &[token::Semi])?;
|
2016-07-12 03:56:19 +00:00
|
|
|
|
}
|
2016-07-02 09:01:21 +00:00
|
|
|
|
}
|
|
|
|
|
_ => {}
|
2014-11-14 17:18:10 +00:00
|
|
|
|
}
|
|
|
|
|
|
2016-07-02 09:01:21 +00:00
|
|
|
|
if self.eat(&token::Semi) {
|
|
|
|
|
stmt = stmt.add_trailing_semicolon();
|
2014-11-14 17:18:10 +00:00
|
|
|
|
}
|
2016-07-02 09:01:21 +00:00
|
|
|
|
|
2017-07-31 20:04:34 +00:00
|
|
|
|
stmt.span = stmt.span.with_hi(self.prev_span.hi());
|
2016-07-16 20:41:43 +00:00
|
|
|
|
Ok(Some(stmt))
|
2014-11-14 17:18:10 +00:00
|
|
|
|
}
|
|
|
|
|
|
2016-07-12 03:56:19 +00:00
|
|
|
|
fn warn_missing_semicolon(&self) {
|
|
|
|
|
self.diagnostic().struct_span_warn(self.span, {
|
2018-10-28 23:05:07 +00:00
|
|
|
|
&format!("expected `;`, found {}", self.this_token_descr())
|
2016-07-12 03:56:19 +00:00
|
|
|
|
}).note({
|
|
|
|
|
"This was erroneously allowed and will become a hard error in a future release"
|
|
|
|
|
}).emit();
|
|
|
|
|
}
|
|
|
|
|
|
2017-11-04 23:46:41 +00:00
|
|
|
|
fn err_dotdotdot_syntax(&self, span: Span) {
|
|
|
|
|
self.diagnostic().struct_span_err(span, {
|
2018-06-23 23:49:09 +00:00
|
|
|
|
"unexpected token: `...`"
|
2019-01-25 21:03:27 +00:00
|
|
|
|
}).span_suggestion(
|
2018-06-23 23:49:09 +00:00
|
|
|
|
span, "use `..` for an exclusive range", "..".to_owned(),
|
|
|
|
|
Applicability::MaybeIncorrect
|
2019-01-25 21:03:27 +00:00
|
|
|
|
).span_suggestion(
|
2018-06-23 23:49:09 +00:00
|
|
|
|
span, "or `..=` for an inclusive range", "..=".to_owned(),
|
|
|
|
|
Applicability::MaybeIncorrect
|
|
|
|
|
).emit();
|
2017-09-19 05:40:04 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses bounds of a type parameter `BOUND + BOUND + ...`, possibly with trailing `+`.
|
|
|
|
|
///
|
|
|
|
|
/// ```
|
|
|
|
|
/// BOUND = TY_BOUND | LT_BOUND
|
|
|
|
|
/// LT_BOUND = LIFETIME (e.g., `'a`)
|
|
|
|
|
/// TY_BOUND = TY_BOUND_NOPAREN | (TY_BOUND_NOPAREN)
|
|
|
|
|
/// TY_BOUND_NOPAREN = [?] [for<LT_PARAM_DEFS>] SIMPLE_PATH (e.g., `?for<'a: 'b> m::Trait<'a>`)
|
|
|
|
|
/// ```
|
2018-12-16 08:50:49 +00:00
|
|
|
|
fn parse_generic_bounds_common(&mut self,
|
|
|
|
|
allow_plus: bool,
|
|
|
|
|
colon_span: Option<Span>) -> PResult<'a, GenericBounds> {
|
2017-01-17 18:18:29 +00:00
|
|
|
|
let mut bounds = Vec::new();
|
2018-12-16 08:50:49 +00:00
|
|
|
|
let mut negative_bounds = Vec::new();
|
|
|
|
|
let mut last_plus_span = None;
|
2019-03-01 22:42:39 +00:00
|
|
|
|
let mut was_negative = false;
|
2013-02-15 05:50:03 +00:00
|
|
|
|
loop {
|
2018-08-19 13:30:23 +00:00
|
|
|
|
// This needs to be synchronized with `Token::can_begin_bound`.
|
2017-04-20 21:19:06 +00:00
|
|
|
|
let is_bound_start = self.check_path() || self.check_lifetime() ||
|
2018-12-16 08:50:49 +00:00
|
|
|
|
self.check(&token::Not) || // used for error reporting only
|
2017-04-20 21:19:06 +00:00
|
|
|
|
self.check(&token::Question) ||
|
2019-05-11 14:41:37 +00:00
|
|
|
|
self.check_keyword(kw::For) ||
|
2017-04-20 21:19:06 +00:00
|
|
|
|
self.check(&token::OpenDelim(token::Paren));
|
|
|
|
|
if is_bound_start {
|
2018-04-27 08:32:54 +00:00
|
|
|
|
let lo = self.span;
|
2017-04-20 21:19:06 +00:00
|
|
|
|
let has_parens = self.eat(&token::OpenDelim(token::Paren));
|
2019-02-05 20:21:03 +00:00
|
|
|
|
let inner_lo = self.span;
|
2018-12-16 08:50:49 +00:00
|
|
|
|
let is_negative = self.eat(&token::Not);
|
2017-04-20 21:19:06 +00:00
|
|
|
|
let question = if self.eat(&token::Question) { Some(self.prev_span) } else { None };
|
|
|
|
|
if self.token.is_lifetime() {
|
|
|
|
|
if let Some(question_span) = question {
|
|
|
|
|
self.span_err(question_span,
|
|
|
|
|
"`?` may only modify trait bounds, not lifetime bounds");
|
|
|
|
|
}
|
2018-06-14 11:23:46 +00:00
|
|
|
|
bounds.push(GenericBound::Outlives(self.expect_lifetime()));
|
2018-04-27 08:32:54 +00:00
|
|
|
|
if has_parens {
|
2019-02-05 20:21:03 +00:00
|
|
|
|
let inner_span = inner_lo.to(self.prev_span);
|
2018-04-27 08:32:54 +00:00
|
|
|
|
self.expect(&token::CloseDelim(token::Paren))?;
|
2019-02-05 20:21:03 +00:00
|
|
|
|
let mut err = self.struct_span_err(
|
|
|
|
|
lo.to(self.prev_span),
|
|
|
|
|
"parenthesized lifetime bounds are not supported"
|
|
|
|
|
);
|
|
|
|
|
if let Ok(snippet) = self.sess.source_map().span_to_snippet(inner_span) {
|
|
|
|
|
err.span_suggestion_short(
|
|
|
|
|
lo.to(self.prev_span),
|
|
|
|
|
"remove the parentheses",
|
|
|
|
|
snippet.to_owned(),
|
|
|
|
|
Applicability::MachineApplicable
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
err.emit();
|
2018-04-27 08:32:54 +00:00
|
|
|
|
}
|
2017-01-17 18:18:29 +00:00
|
|
|
|
} else {
|
2017-04-20 21:19:06 +00:00
|
|
|
|
let lifetime_defs = self.parse_late_bound_lifetime_defs()?;
|
|
|
|
|
let path = self.parse_path(PathStyle::Type)?;
|
2018-04-27 08:32:54 +00:00
|
|
|
|
if has_parens {
|
|
|
|
|
self.expect(&token::CloseDelim(token::Paren))?;
|
|
|
|
|
}
|
2018-12-16 08:50:49 +00:00
|
|
|
|
let poly_span = lo.to(self.prev_span);
|
|
|
|
|
if is_negative {
|
2019-03-01 22:42:39 +00:00
|
|
|
|
was_negative = true;
|
|
|
|
|
if let Some(sp) = last_plus_span.or(colon_span) {
|
|
|
|
|
negative_bounds.push(sp.to(poly_span));
|
|
|
|
|
}
|
2017-04-20 21:19:06 +00:00
|
|
|
|
} else {
|
2018-12-16 08:50:49 +00:00
|
|
|
|
let poly_trait = PolyTraitRef::new(lifetime_defs, path, poly_span);
|
|
|
|
|
let modifier = if question.is_some() {
|
|
|
|
|
TraitBoundModifier::Maybe
|
|
|
|
|
} else {
|
|
|
|
|
TraitBoundModifier::None
|
|
|
|
|
};
|
|
|
|
|
bounds.push(GenericBound::Trait(poly_trait, modifier));
|
|
|
|
|
}
|
2017-04-20 21:19:06 +00:00
|
|
|
|
}
|
2017-01-17 18:18:29 +00:00
|
|
|
|
} else {
|
|
|
|
|
break
|
2017-03-16 21:47:32 +00:00
|
|
|
|
}
|
2013-01-22 22:37:32 +00:00
|
|
|
|
|
2018-05-25 20:40:16 +00:00
|
|
|
|
if !allow_plus || !self.eat_plus() {
|
2017-01-17 18:18:29 +00:00
|
|
|
|
break
|
2018-12-16 08:50:49 +00:00
|
|
|
|
} else {
|
|
|
|
|
last_plus_span = Some(self.prev_span);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-03-01 22:42:39 +00:00
|
|
|
|
if !negative_bounds.is_empty() || was_negative {
|
2018-12-16 08:50:49 +00:00
|
|
|
|
let plural = negative_bounds.len() > 1;
|
2019-03-11 02:46:44 +00:00
|
|
|
|
let last_span = negative_bounds.last().map(|sp| *sp);
|
|
|
|
|
let mut err = self.struct_span_err(
|
|
|
|
|
negative_bounds,
|
|
|
|
|
"negative trait bounds are not supported",
|
|
|
|
|
);
|
|
|
|
|
if let Some(sp) = last_span {
|
|
|
|
|
err.span_label(sp, "negative trait bounds are not supported");
|
|
|
|
|
}
|
2019-03-01 22:42:39 +00:00
|
|
|
|
if let Some(bound_list) = colon_span {
|
|
|
|
|
let bound_list = bound_list.to(self.prev_span);
|
|
|
|
|
let mut new_bound_list = String::new();
|
|
|
|
|
if !bounds.is_empty() {
|
|
|
|
|
let mut snippets = bounds.iter().map(|bound| bound.span())
|
|
|
|
|
.map(|span| self.sess.source_map().span_to_snippet(span));
|
|
|
|
|
while let Some(Ok(snippet)) = snippets.next() {
|
|
|
|
|
new_bound_list.push_str(" + ");
|
|
|
|
|
new_bound_list.push_str(&snippet);
|
|
|
|
|
}
|
|
|
|
|
new_bound_list = new_bound_list.replacen(" +", ":", 1);
|
2018-12-16 08:50:49 +00:00
|
|
|
|
}
|
2019-03-11 02:46:44 +00:00
|
|
|
|
err.span_suggestion_hidden(
|
|
|
|
|
bound_list,
|
|
|
|
|
&format!("remove the trait bound{}", if plural { "s" } else { "" }),
|
|
|
|
|
new_bound_list,
|
|
|
|
|
Applicability::MachineApplicable,
|
|
|
|
|
);
|
2012-05-23 22:06:11 +00:00
|
|
|
|
}
|
2018-12-16 08:50:49 +00:00
|
|
|
|
err.emit();
|
2012-05-23 22:06:11 +00:00
|
|
|
|
}
|
2013-02-15 05:50:03 +00:00
|
|
|
|
|
2017-01-17 18:18:29 +00:00
|
|
|
|
return Ok(bounds);
|
|
|
|
|
}
|
|
|
|
|
|
2019-04-28 05:28:07 +00:00
|
|
|
|
crate fn parse_generic_bounds(&mut self,
|
|
|
|
|
colon_span: Option<Span>) -> PResult<'a, GenericBounds> {
|
2018-12-16 08:50:49 +00:00
|
|
|
|
self.parse_generic_bounds_common(true, colon_span)
|
2017-03-16 21:47:32 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses bounds of a lifetime parameter `BOUND + BOUND + BOUND`, possibly with trailing `+`.
|
|
|
|
|
///
|
|
|
|
|
/// ```
|
|
|
|
|
/// BOUND = LT_BOUND (e.g., `'a`)
|
|
|
|
|
/// ```
|
2018-06-14 11:08:58 +00:00
|
|
|
|
fn parse_lt_param_bounds(&mut self) -> GenericBounds {
|
2017-01-17 18:18:29 +00:00
|
|
|
|
let mut lifetimes = Vec::new();
|
2017-03-16 21:47:32 +00:00
|
|
|
|
while self.check_lifetime() {
|
2018-06-14 11:08:58 +00:00
|
|
|
|
lifetimes.push(ast::GenericBound::Outlives(self.expect_lifetime()));
|
2017-01-24 19:55:45 +00:00
|
|
|
|
|
2018-05-25 21:09:32 +00:00
|
|
|
|
if !self.eat_plus() {
|
2017-01-17 18:18:29 +00:00
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
lifetimes
|
2012-08-07 01:54:20 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Matches `typaram = IDENT (`?` unbound)? optbounds ( EQ ty )?`.
|
2018-05-26 18:16:21 +00:00
|
|
|
|
fn parse_ty_param(&mut self,
|
|
|
|
|
preceding_attrs: Vec<Attribute>)
|
2018-05-27 19:07:09 +00:00
|
|
|
|
-> PResult<'a, GenericParam> {
|
2016-03-23 03:01:37 +00:00
|
|
|
|
let ident = self.parse_ident()?;
|
2014-07-08 02:26:02 +00:00
|
|
|
|
|
2017-01-17 18:18:29 +00:00
|
|
|
|
// Parse optional colon and param bounds.
|
|
|
|
|
let bounds = if self.eat(&token::Colon) {
|
2019-03-01 22:42:39 +00:00
|
|
|
|
self.parse_generic_bounds(Some(self.prev_span))?
|
2017-01-17 18:18:29 +00:00
|
|
|
|
} else {
|
|
|
|
|
Vec::new()
|
|
|
|
|
};
|
2014-01-30 17:28:02 +00:00
|
|
|
|
|
2017-01-17 18:18:29 +00:00
|
|
|
|
let default = if self.eat(&token::Eq) {
|
2017-01-16 23:13:41 +00:00
|
|
|
|
Some(self.parse_ty()?)
|
2015-03-18 18:54:06 +00:00
|
|
|
|
} else {
|
|
|
|
|
None
|
|
|
|
|
};
|
2014-01-30 17:28:02 +00:00
|
|
|
|
|
2018-05-27 19:07:09 +00:00
|
|
|
|
Ok(GenericParam {
|
2017-08-07 05:54:09 +00:00
|
|
|
|
ident,
|
2014-01-30 17:28:02 +00:00
|
|
|
|
id: ast::DUMMY_NODE_ID,
|
2018-05-28 12:33:28 +00:00
|
|
|
|
attrs: preceding_attrs.into(),
|
|
|
|
|
bounds,
|
2018-05-27 19:07:09 +00:00
|
|
|
|
kind: GenericParamKind::Type {
|
2018-05-26 18:16:21 +00:00
|
|
|
|
default,
|
|
|
|
|
}
|
2015-03-28 21:58:51 +00:00
|
|
|
|
})
|
2012-05-23 22:06:11 +00:00
|
|
|
|
}
|
2012-01-04 22:16:41 +00:00
|
|
|
|
|
2017-11-19 05:18:43 +00:00
|
|
|
|
/// Parses the following grammar:
|
2019-02-08 13:53:55 +00:00
|
|
|
|
///
|
2018-06-14 11:08:58 +00:00
|
|
|
|
/// TraitItemAssocTy = Ident ["<"...">"] [":" [GenericBounds]] ["where" ...] ["=" Ty]
|
2018-05-26 18:16:21 +00:00
|
|
|
|
fn parse_trait_item_assoc_ty(&mut self)
|
|
|
|
|
-> PResult<'a, (Ident, TraitItemKind, ast::Generics)> {
|
2017-11-05 21:14:22 +00:00
|
|
|
|
let ident = self.parse_ident()?;
|
|
|
|
|
let mut generics = self.parse_generics()?;
|
|
|
|
|
|
|
|
|
|
// Parse optional colon and param bounds.
|
|
|
|
|
let bounds = if self.eat(&token::Colon) {
|
2018-12-16 08:50:49 +00:00
|
|
|
|
self.parse_generic_bounds(None)?
|
2017-11-05 21:14:22 +00:00
|
|
|
|
} else {
|
|
|
|
|
Vec::new()
|
|
|
|
|
};
|
2017-11-19 05:18:43 +00:00
|
|
|
|
generics.where_clause = self.parse_where_clause()?;
|
2017-11-05 21:14:22 +00:00
|
|
|
|
|
|
|
|
|
let default = if self.eat(&token::Eq) {
|
|
|
|
|
Some(self.parse_ty()?)
|
|
|
|
|
} else {
|
|
|
|
|
None
|
|
|
|
|
};
|
2017-11-25 19:42:55 +00:00
|
|
|
|
self.expect(&token::Semi)?;
|
2017-11-05 21:14:22 +00:00
|
|
|
|
|
2018-05-26 18:16:21 +00:00
|
|
|
|
Ok((ident, TraitItemKind::Type(bounds, default), generics))
|
2017-11-05 21:14:22 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-05 15:49:38 +00:00
|
|
|
|
fn parse_const_param(&mut self, preceding_attrs: Vec<Attribute>) -> PResult<'a, GenericParam> {
|
2019-05-11 14:41:37 +00:00
|
|
|
|
self.expect_keyword(kw::Const)?;
|
2019-02-05 15:49:38 +00:00
|
|
|
|
let ident = self.parse_ident()?;
|
|
|
|
|
self.expect(&token::Colon)?;
|
|
|
|
|
let ty = self.parse_ty()?;
|
|
|
|
|
|
|
|
|
|
Ok(GenericParam {
|
|
|
|
|
ident,
|
|
|
|
|
id: ast::DUMMY_NODE_ID,
|
|
|
|
|
attrs: preceding_attrs.into(),
|
|
|
|
|
bounds: Vec::new(),
|
|
|
|
|
kind: GenericParamKind::Const {
|
|
|
|
|
ty,
|
|
|
|
|
}
|
|
|
|
|
})
|
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses a (possibly empty) list of lifetime and type parameters, possibly including
|
|
|
|
|
/// a trailing comma and erroneous trailing attributes.
|
2018-05-27 19:07:09 +00:00
|
|
|
|
crate fn parse_generic_params(&mut self) -> PResult<'a, Vec<ast::GenericParam>> {
|
2017-10-16 19:07:26 +00:00
|
|
|
|
let mut params = Vec::new();
|
2017-01-17 18:18:29 +00:00
|
|
|
|
loop {
|
|
|
|
|
let attrs = self.parse_outer_attributes()?;
|
2017-03-16 21:47:32 +00:00
|
|
|
|
if self.check_lifetime() {
|
|
|
|
|
let lifetime = self.expect_lifetime();
|
2017-01-17 18:18:29 +00:00
|
|
|
|
// Parse lifetime parameter.
|
|
|
|
|
let bounds = if self.eat(&token::Colon) {
|
2018-05-28 14:23:16 +00:00
|
|
|
|
self.parse_lt_param_bounds()
|
2017-01-17 18:18:29 +00:00
|
|
|
|
} else {
|
|
|
|
|
Vec::new()
|
|
|
|
|
};
|
2019-02-05 15:49:38 +00:00
|
|
|
|
params.push(ast::GenericParam {
|
2018-05-26 18:16:21 +00:00
|
|
|
|
ident: lifetime.ident,
|
|
|
|
|
id: lifetime.id,
|
2017-01-17 18:18:29 +00:00
|
|
|
|
attrs: attrs.into(),
|
2018-05-28 12:33:28 +00:00
|
|
|
|
bounds,
|
2018-05-30 15:49:39 +00:00
|
|
|
|
kind: ast::GenericParamKind::Lifetime,
|
2018-05-26 18:16:21 +00:00
|
|
|
|
});
|
2019-05-11 14:41:37 +00:00
|
|
|
|
} else if self.check_keyword(kw::Const) {
|
2019-02-05 15:49:38 +00:00
|
|
|
|
// Parse const parameter.
|
|
|
|
|
params.push(self.parse_const_param(attrs)?);
|
2017-03-16 21:47:32 +00:00
|
|
|
|
} else if self.check_ident() {
|
2017-01-17 18:18:29 +00:00
|
|
|
|
// Parse type parameter.
|
2018-05-26 18:16:21 +00:00
|
|
|
|
params.push(self.parse_ty_param(attrs)?);
|
2017-01-17 18:18:29 +00:00
|
|
|
|
} else {
|
|
|
|
|
// Check for trailing attributes and stop parsing.
|
2019-02-07 13:58:31 +00:00
|
|
|
|
if !attrs.is_empty() {
|
|
|
|
|
if !params.is_empty() {
|
|
|
|
|
self.struct_span_err(
|
|
|
|
|
attrs[0].span,
|
|
|
|
|
&format!("trailing attribute after generic parameter"),
|
|
|
|
|
)
|
|
|
|
|
.span_label(attrs[0].span, "attributes must go before parameters")
|
|
|
|
|
.emit();
|
|
|
|
|
} else {
|
|
|
|
|
self.struct_span_err(
|
|
|
|
|
attrs[0].span,
|
|
|
|
|
&format!("attribute without generic parameters"),
|
|
|
|
|
)
|
|
|
|
|
.span_label(
|
|
|
|
|
attrs[0].span,
|
|
|
|
|
"attributes are only permitted when preceding parameters",
|
|
|
|
|
)
|
|
|
|
|
.emit();
|
|
|
|
|
}
|
2017-01-17 18:18:29 +00:00
|
|
|
|
}
|
|
|
|
|
break
|
2017-03-16 21:47:32 +00:00
|
|
|
|
}
|
2017-01-17 18:18:29 +00:00
|
|
|
|
|
|
|
|
|
if !self.eat(&token::Comma) {
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
}
|
2019-02-05 15:49:38 +00:00
|
|
|
|
Ok(params)
|
2017-01-17 18:18:29 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses a set of optional generic type parameter declarations. Where
|
2014-08-11 16:32:26 +00:00
|
|
|
|
/// clauses are not parsed here, and must be added later via
|
|
|
|
|
/// `parse_where_clause()`.
|
|
|
|
|
///
|
2014-06-09 20:12:30 +00:00
|
|
|
|
/// matches generics = ( ) | ( < > ) | ( < typaramseq ( , )? > ) | ( < lifetimes ( , )? > )
|
|
|
|
|
/// | ( < lifetimes , typaramseq ( , )? > )
|
|
|
|
|
/// where typaramseq = ( typaram ) | ( typaram , typaramseq )
|
2018-05-31 22:53:30 +00:00
|
|
|
|
fn parse_generics(&mut self) -> PResult<'a, ast::Generics> {
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let span_lo = self.span;
|
2017-01-17 18:18:29 +00:00
|
|
|
|
if self.eat_lt() {
|
2017-10-16 19:07:26 +00:00
|
|
|
|
let params = self.parse_generic_params()?;
|
2017-01-17 18:18:29 +00:00
|
|
|
|
self.expect_gt()?;
|
2015-03-28 21:58:51 +00:00
|
|
|
|
Ok(ast::Generics {
|
2017-10-16 19:07:26 +00:00
|
|
|
|
params,
|
2014-08-11 16:32:26 +00:00
|
|
|
|
where_clause: WhereClause {
|
|
|
|
|
id: ast::DUMMY_NODE_ID,
|
|
|
|
|
predicates: Vec::new(),
|
2019-05-22 00:47:23 +00:00
|
|
|
|
span: DUMMY_SP,
|
2016-08-10 17:39:12 +00:00
|
|
|
|
},
|
2017-03-15 00:22:48 +00:00
|
|
|
|
span: span_lo.to(self.prev_span),
|
2015-03-28 21:58:51 +00:00
|
|
|
|
})
|
2013-02-15 05:50:03 +00:00
|
|
|
|
} else {
|
2015-11-28 19:02:07 +00:00
|
|
|
|
Ok(ast::Generics::default())
|
2013-02-15 05:50:03 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses generic args (within a path segment) with recovery for extra leading angle brackets.
|
2019-01-23 01:35:13 +00:00
|
|
|
|
/// For the purposes of understanding the parsing logic of generic arguments, this function
|
|
|
|
|
/// can be thought of being the same as just calling `self.parse_generic_args()` if the source
|
|
|
|
|
/// had the correct amount of leading angle brackets.
|
|
|
|
|
///
|
|
|
|
|
/// ```ignore (diagnostics)
|
|
|
|
|
/// bar::<<<<T as Foo>::Output>();
|
|
|
|
|
/// ^^ help: remove extra angle brackets
|
|
|
|
|
/// ```
|
|
|
|
|
fn parse_generic_args_with_leaning_angle_bracket_recovery(
|
|
|
|
|
&mut self,
|
|
|
|
|
style: PathStyle,
|
|
|
|
|
lo: Span,
|
|
|
|
|
) -> PResult<'a, (Vec<GenericArg>, Vec<TypeBinding>)> {
|
|
|
|
|
// We need to detect whether there are extra leading left angle brackets and produce an
|
|
|
|
|
// appropriate error and suggestion. This cannot be implemented by looking ahead at
|
|
|
|
|
// upcoming tokens for a matching `>` character - if there are unmatched `<` tokens
|
|
|
|
|
// then there won't be matching `>` tokens to find.
|
|
|
|
|
//
|
|
|
|
|
// To explain how this detection works, consider the following example:
|
|
|
|
|
//
|
|
|
|
|
// ```ignore (diagnostics)
|
|
|
|
|
// bar::<<<<T as Foo>::Output>();
|
|
|
|
|
// ^^ help: remove extra angle brackets
|
|
|
|
|
// ```
|
|
|
|
|
//
|
|
|
|
|
// Parsing of the left angle brackets starts in this function. We start by parsing the
|
|
|
|
|
// `<` token (incrementing the counter of unmatched angle brackets on `Parser` via
|
|
|
|
|
// `eat_lt`):
|
|
|
|
|
//
|
|
|
|
|
// *Upcoming tokens:* `<<<<T as Foo>::Output>;`
|
|
|
|
|
// *Unmatched count:* 1
|
|
|
|
|
// *`parse_path_segment` calls deep:* 0
|
|
|
|
|
//
|
|
|
|
|
// This has the effect of recursing as this function is called if a `<` character
|
|
|
|
|
// is found within the expected generic arguments:
|
|
|
|
|
//
|
|
|
|
|
// *Upcoming tokens:* `<<<T as Foo>::Output>;`
|
|
|
|
|
// *Unmatched count:* 2
|
|
|
|
|
// *`parse_path_segment` calls deep:* 1
|
|
|
|
|
//
|
|
|
|
|
// Eventually we will have recursed until having consumed all of the `<` tokens and
|
|
|
|
|
// this will be reflected in the count:
|
|
|
|
|
//
|
|
|
|
|
// *Upcoming tokens:* `T as Foo>::Output>;`
|
|
|
|
|
// *Unmatched count:* 4
|
|
|
|
|
// `parse_path_segment` calls deep:* 3
|
|
|
|
|
//
|
|
|
|
|
// The parser will continue until reaching the first `>` - this will decrement the
|
|
|
|
|
// unmatched angle bracket count and return to the parent invocation of this function
|
|
|
|
|
// having succeeded in parsing:
|
|
|
|
|
//
|
|
|
|
|
// *Upcoming tokens:* `::Output>;`
|
|
|
|
|
// *Unmatched count:* 3
|
|
|
|
|
// *`parse_path_segment` calls deep:* 2
|
|
|
|
|
//
|
|
|
|
|
// This will continue until the next `>` character which will also return successfully
|
|
|
|
|
// to the parent invocation of this function and decrement the count:
|
|
|
|
|
//
|
|
|
|
|
// *Upcoming tokens:* `;`
|
|
|
|
|
// *Unmatched count:* 2
|
|
|
|
|
// *`parse_path_segment` calls deep:* 1
|
|
|
|
|
//
|
|
|
|
|
// At this point, this function will expect to find another matching `>` character but
|
|
|
|
|
// won't be able to and will return an error. This will continue all the way up the
|
|
|
|
|
// call stack until the first invocation:
|
|
|
|
|
//
|
|
|
|
|
// *Upcoming tokens:* `;`
|
|
|
|
|
// *Unmatched count:* 2
|
|
|
|
|
// *`parse_path_segment` calls deep:* 0
|
|
|
|
|
//
|
|
|
|
|
// In doing this, we have managed to work out how many unmatched leading left angle
|
|
|
|
|
// brackets there are, but we cannot recover as the unmatched angle brackets have
|
2019-01-23 20:39:15 +00:00
|
|
|
|
// already been consumed. To remedy this, we keep a snapshot of the parser state
|
|
|
|
|
// before we do the above. We can then inspect whether we ended up with a parsing error
|
|
|
|
|
// and unmatched left angle brackets and if so, restore the parser state before we
|
|
|
|
|
// consumed any `<` characters to emit an error and consume the erroneous tokens to
|
|
|
|
|
// recover by attempting to parse again.
|
2019-01-23 01:35:13 +00:00
|
|
|
|
//
|
|
|
|
|
// In practice, the recursion of this function is indirect and there will be other
|
|
|
|
|
// locations that consume some `<` characters - as long as we update the count when
|
|
|
|
|
// this happens, it isn't an issue.
|
2019-01-23 20:39:15 +00:00
|
|
|
|
|
|
|
|
|
let is_first_invocation = style == PathStyle::Expr;
|
|
|
|
|
// Take a snapshot before attempting to parse - we can restore this later.
|
|
|
|
|
let snapshot = if is_first_invocation {
|
|
|
|
|
Some(self.clone())
|
|
|
|
|
} else {
|
|
|
|
|
None
|
|
|
|
|
};
|
|
|
|
|
|
2019-01-23 01:35:13 +00:00
|
|
|
|
debug!("parse_generic_args_with_leading_angle_bracket_recovery: (snapshotting)");
|
2019-01-23 20:39:15 +00:00
|
|
|
|
match self.parse_generic_args() {
|
|
|
|
|
Ok(value) => Ok(value),
|
|
|
|
|
Err(ref mut e) if is_first_invocation && self.unmatched_angle_bracket_count > 0 => {
|
|
|
|
|
// Cancel error from being unable to find `>`. We know the error
|
|
|
|
|
// must have been this due to a non-zero unmatched angle bracket
|
|
|
|
|
// count.
|
|
|
|
|
e.cancel();
|
|
|
|
|
|
|
|
|
|
// Swap `self` with our backup of the parser state before attempting to parse
|
|
|
|
|
// generic arguments.
|
|
|
|
|
let snapshot = mem::replace(self, snapshot.unwrap());
|
|
|
|
|
|
2019-01-23 01:35:13 +00:00
|
|
|
|
debug!(
|
|
|
|
|
"parse_generic_args_with_leading_angle_bracket_recovery: (snapshot failure) \
|
|
|
|
|
snapshot.count={:?}",
|
|
|
|
|
snapshot.unmatched_angle_bracket_count,
|
|
|
|
|
);
|
|
|
|
|
|
2019-01-23 20:39:15 +00:00
|
|
|
|
// Eat the unmatched angle brackets.
|
|
|
|
|
for _ in 0..snapshot.unmatched_angle_bracket_count {
|
|
|
|
|
self.eat_lt();
|
2019-01-23 01:35:13 +00:00
|
|
|
|
}
|
2019-01-23 20:39:15 +00:00
|
|
|
|
|
|
|
|
|
// Make a span over ${unmatched angle bracket count} characters.
|
|
|
|
|
let span = lo.with_hi(
|
|
|
|
|
lo.lo() + BytePos(snapshot.unmatched_angle_bracket_count)
|
|
|
|
|
);
|
|
|
|
|
let plural = snapshot.unmatched_angle_bracket_count > 1;
|
|
|
|
|
self.diagnostic()
|
|
|
|
|
.struct_span_err(
|
|
|
|
|
span,
|
|
|
|
|
&format!(
|
|
|
|
|
"unmatched angle bracket{}",
|
|
|
|
|
if plural { "s" } else { "" }
|
|
|
|
|
),
|
|
|
|
|
)
|
2019-01-25 21:03:27 +00:00
|
|
|
|
.span_suggestion(
|
2019-01-23 20:39:15 +00:00
|
|
|
|
span,
|
|
|
|
|
&format!(
|
|
|
|
|
"remove extra angle bracket{}",
|
|
|
|
|
if plural { "s" } else { "" }
|
|
|
|
|
),
|
|
|
|
|
String::new(),
|
|
|
|
|
Applicability::MachineApplicable,
|
|
|
|
|
)
|
|
|
|
|
.emit();
|
|
|
|
|
|
|
|
|
|
// Try again without unmatched angle bracket characters.
|
|
|
|
|
self.parse_generic_args()
|
2019-01-23 01:35:13 +00:00
|
|
|
|
},
|
2019-01-23 20:39:15 +00:00
|
|
|
|
Err(e) => Err(e),
|
2019-01-23 01:35:13 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2017-01-17 18:18:29 +00:00
|
|
|
|
/// Parses (possibly empty) list of lifetime and type arguments and associated type bindings,
|
|
|
|
|
/// possibly including trailing comma.
|
2019-01-13 03:25:03 +00:00
|
|
|
|
fn parse_generic_args(&mut self) -> PResult<'a, (Vec<GenericArg>, Vec<TypeBinding>)> {
|
2018-02-23 17:48:54 +00:00
|
|
|
|
let mut args = Vec::new();
|
2017-01-17 18:18:29 +00:00
|
|
|
|
let mut bindings = Vec::new();
|
2019-02-05 15:49:38 +00:00
|
|
|
|
let mut misplaced_assoc_ty_bindings: Vec<Span> = Vec::new();
|
|
|
|
|
let mut assoc_ty_bindings: Vec<Span> = Vec::new();
|
2019-01-24 22:24:58 +00:00
|
|
|
|
|
2019-02-05 15:49:38 +00:00
|
|
|
|
let args_lo = self.span;
|
2019-01-24 22:24:58 +00:00
|
|
|
|
|
2017-01-17 18:18:29 +00:00
|
|
|
|
loop {
|
2018-05-25 21:09:32 +00:00
|
|
|
|
if self.check_lifetime() && self.look_ahead(1, |t| !t.is_like_plus()) {
|
2017-01-17 18:18:29 +00:00
|
|
|
|
// Parse lifetime argument.
|
2018-05-27 19:07:09 +00:00
|
|
|
|
args.push(GenericArg::Lifetime(self.expect_lifetime()));
|
2019-02-05 15:49:38 +00:00
|
|
|
|
misplaced_assoc_ty_bindings.append(&mut assoc_ty_bindings);
|
2017-03-16 21:47:32 +00:00
|
|
|
|
} else if self.check_ident() && self.look_ahead(1, |t| t == &token::Eq) {
|
2017-01-17 18:18:29 +00:00
|
|
|
|
// Parse associated type binding.
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let lo = self.span;
|
2017-01-17 18:18:29 +00:00
|
|
|
|
let ident = self.parse_ident()?;
|
|
|
|
|
self.bump();
|
|
|
|
|
let ty = self.parse_ty()?;
|
2019-01-13 03:25:03 +00:00
|
|
|
|
let span = lo.to(self.prev_span);
|
2017-01-17 18:18:29 +00:00
|
|
|
|
bindings.push(TypeBinding {
|
|
|
|
|
id: ast::DUMMY_NODE_ID,
|
2017-08-07 05:54:09 +00:00
|
|
|
|
ident,
|
|
|
|
|
ty,
|
2019-01-13 03:25:03 +00:00
|
|
|
|
span,
|
2016-02-11 20:33:09 +00:00
|
|
|
|
});
|
2019-02-05 15:49:38 +00:00
|
|
|
|
assoc_ty_bindings.push(span);
|
|
|
|
|
} else if self.check_const_arg() {
|
|
|
|
|
// Parse const argument.
|
|
|
|
|
let expr = if let token::OpenDelim(token::Brace) = self.token {
|
|
|
|
|
self.parse_block_expr(None, self.span, BlockCheckMode::Default, ThinVec::new())?
|
2019-02-07 09:10:11 +00:00
|
|
|
|
} else if self.token.is_ident() {
|
2019-02-05 15:49:38 +00:00
|
|
|
|
// FIXME(const_generics): to distinguish between idents for types and consts,
|
|
|
|
|
// we should introduce a GenericArg::Ident in the AST and distinguish when
|
|
|
|
|
// lowering to the HIR. For now, idents for const args are not permitted.
|
|
|
|
|
return Err(
|
|
|
|
|
self.fatal("identifiers may currently not be used for const generics")
|
|
|
|
|
);
|
2019-02-07 09:10:11 +00:00
|
|
|
|
} else {
|
2019-02-07 13:58:47 +00:00
|
|
|
|
self.parse_literal_maybe_minus()?
|
2019-02-05 15:49:38 +00:00
|
|
|
|
};
|
|
|
|
|
let value = AnonConst {
|
|
|
|
|
id: ast::DUMMY_NODE_ID,
|
|
|
|
|
value: expr,
|
|
|
|
|
};
|
|
|
|
|
args.push(GenericArg::Const(value));
|
|
|
|
|
misplaced_assoc_ty_bindings.append(&mut assoc_ty_bindings);
|
2017-01-18 16:01:04 +00:00
|
|
|
|
} else if self.check_type() {
|
2017-01-17 18:18:29 +00:00
|
|
|
|
// Parse type argument.
|
2019-02-05 15:49:38 +00:00
|
|
|
|
args.push(GenericArg::Type(self.parse_ty()?));
|
|
|
|
|
misplaced_assoc_ty_bindings.append(&mut assoc_ty_bindings);
|
2017-01-17 18:18:29 +00:00
|
|
|
|
} else {
|
|
|
|
|
break
|
2017-03-16 21:47:32 +00:00
|
|
|
|
}
|
2011-07-27 12:19:39 +00:00
|
|
|
|
|
2017-01-17 18:18:29 +00:00
|
|
|
|
if !self.eat(&token::Comma) {
|
|
|
|
|
break
|
|
|
|
|
}
|
2014-05-23 19:51:21 +00:00
|
|
|
|
}
|
2019-01-24 22:24:58 +00:00
|
|
|
|
|
2019-02-05 15:49:38 +00:00
|
|
|
|
// FIXME: we would like to report this in ast_validation instead, but we currently do not
|
|
|
|
|
// preserve ordering of generic parameters with respect to associated type binding, so we
|
|
|
|
|
// lose that information after parsing.
|
|
|
|
|
if misplaced_assoc_ty_bindings.len() > 0 {
|
|
|
|
|
let mut err = self.struct_span_err(
|
|
|
|
|
args_lo.to(self.prev_span),
|
|
|
|
|
"associated type bindings must be declared after generic parameters",
|
2019-01-24 23:36:28 +00:00
|
|
|
|
);
|
2019-02-05 15:49:38 +00:00
|
|
|
|
for span in misplaced_assoc_ty_bindings {
|
|
|
|
|
err.span_label(
|
|
|
|
|
span,
|
|
|
|
|
"this associated type binding should be moved after the generic parameters",
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
err.emit();
|
2019-01-24 23:36:28 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-05 15:49:38 +00:00
|
|
|
|
Ok((args, bindings))
|
2014-05-23 19:51:21 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses an optional where-clause and places it in `generics`.
|
2014-12-20 10:29:19 +00:00
|
|
|
|
///
|
2017-06-20 07:15:16 +00:00
|
|
|
|
/// ```ignore (only-for-syntax-highlight)
|
2014-12-20 10:29:19 +00:00
|
|
|
|
/// where T : Trait<U, V> + 'b, 'a : 'b
|
|
|
|
|
/// ```
|
2018-05-31 22:53:30 +00:00
|
|
|
|
fn parse_where_clause(&mut self) -> PResult<'a, WhereClause> {
|
2015-03-14 05:22:04 +00:00
|
|
|
|
let mut where_clause = WhereClause {
|
|
|
|
|
id: ast::DUMMY_NODE_ID,
|
|
|
|
|
predicates: Vec::new(),
|
2019-05-22 00:47:23 +00:00
|
|
|
|
span: DUMMY_SP,
|
2015-03-14 05:22:04 +00:00
|
|
|
|
};
|
|
|
|
|
|
2019-05-11 14:41:37 +00:00
|
|
|
|
if !self.eat_keyword(kw::Where) {
|
2015-03-28 21:58:51 +00:00
|
|
|
|
return Ok(where_clause);
|
2014-08-11 16:32:26 +00:00
|
|
|
|
}
|
2017-07-27 04:37:35 +00:00
|
|
|
|
let lo = self.prev_span;
|
2014-08-11 16:32:26 +00:00
|
|
|
|
|
2016-12-09 18:54:05 +00:00
|
|
|
|
// We are considering adding generics to the `where` keyword as an alternative higher-rank
|
|
|
|
|
// parameter syntax (as in `where<'a>` or `where<T>`. To avoid that being a breaking
|
2018-01-14 15:10:19 +00:00
|
|
|
|
// change we parse those generics now, but report an error.
|
|
|
|
|
if self.choose_generics_over_qpath() {
|
|
|
|
|
let generics = self.parse_generics()?;
|
2019-01-12 05:05:18 +00:00
|
|
|
|
self.struct_span_err(
|
|
|
|
|
generics.span,
|
|
|
|
|
"generic parameters on `where` clauses are reserved for future use",
|
|
|
|
|
)
|
|
|
|
|
.span_label(generics.span, "currently unsupported")
|
|
|
|
|
.emit();
|
2016-12-09 18:54:05 +00:00
|
|
|
|
}
|
|
|
|
|
|
2014-08-11 16:32:26 +00:00
|
|
|
|
loop {
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let lo = self.span;
|
2018-05-25 21:09:32 +00:00
|
|
|
|
if self.check_lifetime() && self.look_ahead(1, |t| !t.is_like_plus()) {
|
2017-03-16 21:47:32 +00:00
|
|
|
|
let lifetime = self.expect_lifetime();
|
2017-01-17 18:18:29 +00:00
|
|
|
|
// Bounds starting with a colon are mandatory, but possibly empty.
|
|
|
|
|
self.expect(&token::Colon)?;
|
|
|
|
|
let bounds = self.parse_lt_param_bounds();
|
|
|
|
|
where_clause.predicates.push(ast::WherePredicate::RegionPredicate(
|
|
|
|
|
ast::WhereRegionPredicate {
|
2017-03-15 00:22:48 +00:00
|
|
|
|
span: lo.to(self.prev_span),
|
2017-08-07 05:54:09 +00:00
|
|
|
|
lifetime,
|
|
|
|
|
bounds,
|
2017-01-17 18:18:29 +00:00
|
|
|
|
}
|
|
|
|
|
));
|
2017-03-16 21:47:32 +00:00
|
|
|
|
} else if self.check_type() {
|
2017-01-17 18:18:29 +00:00
|
|
|
|
// Parse optional `for<'a, 'b>`.
|
|
|
|
|
// This `for` is parsed greedily and applies to the whole predicate,
|
|
|
|
|
// the bounded type can have its own `for` applying only to it.
|
|
|
|
|
// Example 1: for<'a> Trait1<'a>: Trait2<'a /*ok*/>
|
|
|
|
|
// Example 2: (for<'a> Trait1<'a>): Trait2<'a /*not ok*/>
|
|
|
|
|
// Example 3: for<'a> for<'b> Trait1<'a, 'b>: Trait2<'a /*ok*/, 'b /*not ok*/>
|
|
|
|
|
let lifetime_defs = self.parse_late_bound_lifetime_defs()?;
|
|
|
|
|
|
|
|
|
|
// Parse type with mandatory colon and (possibly empty) bounds,
|
|
|
|
|
// or with mandatory equality sign and the second type.
|
|
|
|
|
let ty = self.parse_ty()?;
|
|
|
|
|
if self.eat(&token::Colon) {
|
2019-03-01 22:42:39 +00:00
|
|
|
|
let bounds = self.parse_generic_bounds(Some(self.prev_span))?;
|
2017-01-17 18:18:29 +00:00
|
|
|
|
where_clause.predicates.push(ast::WherePredicate::BoundPredicate(
|
|
|
|
|
ast::WhereBoundPredicate {
|
2017-03-15 00:22:48 +00:00
|
|
|
|
span: lo.to(self.prev_span),
|
2017-10-16 19:07:26 +00:00
|
|
|
|
bound_generic_params: lifetime_defs,
|
2017-01-17 18:18:29 +00:00
|
|
|
|
bounded_ty: ty,
|
2017-08-07 05:54:09 +00:00
|
|
|
|
bounds,
|
2014-12-20 10:29:19 +00:00
|
|
|
|
}
|
|
|
|
|
));
|
2017-01-17 18:18:29 +00:00
|
|
|
|
// FIXME: Decide what should be used here, `=` or `==`.
|
2018-03-06 10:33:26 +00:00
|
|
|
|
// FIXME: We are just dropping the binders in lifetime_defs on the floor here.
|
2017-01-17 18:18:29 +00:00
|
|
|
|
} else if self.eat(&token::Eq) || self.eat(&token::EqEq) {
|
|
|
|
|
let rhs_ty = self.parse_ty()?;
|
|
|
|
|
where_clause.predicates.push(ast::WherePredicate::EqPredicate(
|
|
|
|
|
ast::WhereEqPredicate {
|
2017-03-15 00:22:48 +00:00
|
|
|
|
span: lo.to(self.prev_span),
|
2017-01-17 18:18:29 +00:00
|
|
|
|
lhs_ty: ty,
|
2017-08-07 05:54:09 +00:00
|
|
|
|
rhs_ty,
|
2017-01-17 18:18:29 +00:00
|
|
|
|
id: ast::DUMMY_NODE_ID,
|
2014-12-20 10:29:19 +00:00
|
|
|
|
}
|
2017-01-17 18:18:29 +00:00
|
|
|
|
));
|
|
|
|
|
} else {
|
|
|
|
|
return self.unexpected();
|
2014-12-20 10:29:19 +00:00
|
|
|
|
}
|
2017-01-17 18:18:29 +00:00
|
|
|
|
} else {
|
|
|
|
|
break
|
2017-03-16 21:47:32 +00:00
|
|
|
|
}
|
2014-08-11 16:32:26 +00:00
|
|
|
|
|
2015-12-30 23:11:53 +00:00
|
|
|
|
if !self.eat(&token::Comma) {
|
2014-08-11 16:32:26 +00:00
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2017-07-27 04:37:35 +00:00
|
|
|
|
where_clause.span = lo.to(self.prev_span);
|
2015-03-28 21:58:51 +00:00
|
|
|
|
Ok(where_clause)
|
2014-08-11 16:32:26 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 17:30:42 +00:00
|
|
|
|
fn parse_fn_args(&mut self, named_args: bool, allow_c_variadic: bool)
|
2015-12-20 21:00:43 +00:00
|
|
|
|
-> PResult<'a, (Vec<Arg> , bool)> {
|
2018-11-16 12:54:49 +00:00
|
|
|
|
self.expect(&token::OpenDelim(token::Paren))?;
|
2018-11-16 18:35:13 +00:00
|
|
|
|
|
2013-12-30 23:17:53 +00:00
|
|
|
|
let sp = self.span;
|
2019-02-08 17:30:42 +00:00
|
|
|
|
let mut c_variadic = false;
|
2019-01-28 05:04:50 +00:00
|
|
|
|
let (args, recovered): (Vec<Option<Arg>>, bool) =
|
2018-11-16 12:54:49 +00:00
|
|
|
|
self.parse_seq_to_before_end(
|
2014-10-29 10:37:54 +00:00
|
|
|
|
&token::CloseDelim(token::Paren),
|
2016-02-23 04:24:42 +00:00
|
|
|
|
SeqSep::trailing_allowed(token::Comma),
|
2013-10-25 05:56:34 +00:00
|
|
|
|
|p| {
|
2018-11-30 15:53:44 +00:00
|
|
|
|
// If the argument is a C-variadic argument we should not
|
|
|
|
|
// enforce named arguments.
|
|
|
|
|
let enforce_named_args = if p.token == token::DotDotDot {
|
|
|
|
|
false
|
2013-10-25 05:56:34 +00:00
|
|
|
|
} else {
|
2018-11-30 15:53:44 +00:00
|
|
|
|
named_args
|
|
|
|
|
};
|
|
|
|
|
match p.parse_arg_general(enforce_named_args, false,
|
2019-02-08 17:30:42 +00:00
|
|
|
|
allow_c_variadic) {
|
2018-11-30 15:53:44 +00:00
|
|
|
|
Ok(arg) => {
|
|
|
|
|
if let TyKind::CVarArgs = arg.ty.node {
|
2019-02-08 17:30:42 +00:00
|
|
|
|
c_variadic = true;
|
2018-11-30 15:53:44 +00:00
|
|
|
|
if p.token != token::CloseDelim(token::Paren) {
|
|
|
|
|
let span = p.span;
|
|
|
|
|
p.span_err(span,
|
2019-02-08 17:30:42 +00:00
|
|
|
|
"`...` must be the last argument of a C-variadic function");
|
2018-11-30 15:53:44 +00:00
|
|
|
|
Ok(None)
|
|
|
|
|
} else {
|
|
|
|
|
Ok(Some(arg))
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
Ok(Some(arg))
|
2016-01-31 19:39:50 +00:00
|
|
|
|
}
|
2018-11-30 15:53:44 +00:00
|
|
|
|
},
|
|
|
|
|
Err(mut e) => {
|
|
|
|
|
e.emit();
|
|
|
|
|
let lo = p.prev_span;
|
|
|
|
|
// Skip every token until next possible arg or end.
|
|
|
|
|
p.eat_to_tokens(&[&token::Comma, &token::CloseDelim(token::Paren)]);
|
|
|
|
|
// Create a placeholder argument for proper arg count (issue #34264).
|
|
|
|
|
let span = lo.to(p.prev_span);
|
|
|
|
|
Ok(Some(dummy_arg(span)))
|
2016-01-31 19:39:50 +00:00
|
|
|
|
}
|
2013-10-25 05:56:34 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2016-03-23 03:01:37 +00:00
|
|
|
|
)?;
|
2012-05-23 22:06:11 +00:00
|
|
|
|
|
2019-01-28 05:04:50 +00:00
|
|
|
|
if !recovered {
|
|
|
|
|
self.eat(&token::CloseDelim(token::Paren));
|
|
|
|
|
}
|
2018-11-16 12:54:49 +00:00
|
|
|
|
|
2016-02-10 03:11:27 +00:00
|
|
|
|
let args: Vec<_> = args.into_iter().filter_map(|x| x).collect();
|
|
|
|
|
|
2019-02-08 17:30:42 +00:00
|
|
|
|
if c_variadic && args.is_empty() {
|
2013-10-25 05:56:34 +00:00
|
|
|
|
self.span_err(sp,
|
2019-02-08 17:30:42 +00:00
|
|
|
|
"C-variadic function must be declared with at least one named argument");
|
2013-10-25 05:56:34 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 17:30:42 +00:00
|
|
|
|
Ok((args, c_variadic))
|
2013-10-25 05:56:34 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses the argument list and result type of a function declaration.
|
2019-02-08 17:30:42 +00:00
|
|
|
|
fn parse_fn_decl(&mut self, allow_c_variadic: bool) -> PResult<'a, P<FnDecl>> {
|
2013-10-25 05:56:34 +00:00
|
|
|
|
|
2019-02-08 17:30:42 +00:00
|
|
|
|
let (args, c_variadic) = self.parse_fn_args(true, allow_c_variadic)?;
|
2018-01-18 17:59:28 +00:00
|
|
|
|
let ret_ty = self.parse_ret_ty(true)?;
|
2013-10-25 05:56:34 +00:00
|
|
|
|
|
2015-03-28 21:58:51 +00:00
|
|
|
|
Ok(P(FnDecl {
|
2013-09-14 02:07:43 +00:00
|
|
|
|
inputs: args,
|
2013-01-10 18:59:58 +00:00
|
|
|
|
output: ret_ty,
|
2019-02-08 17:30:42 +00:00
|
|
|
|
c_variadic,
|
2015-03-28 21:58:51 +00:00
|
|
|
|
}))
|
2012-05-23 22:06:11 +00:00
|
|
|
|
}
|
|
|
|
|
|
2016-03-06 12:54:44 +00:00
|
|
|
|
/// Returns the parsed optional self argument and whether a self shortcut was used.
|
2016-03-06 12:54:44 +00:00
|
|
|
|
fn parse_self_arg(&mut self) -> PResult<'a, Option<Arg>> {
|
2016-05-08 18:18:21 +00:00
|
|
|
|
let expect_ident = |this: &mut Self| match this.token {
|
2016-03-06 12:54:44 +00:00
|
|
|
|
// Preserve hygienic context.
|
2018-03-18 13:47:09 +00:00
|
|
|
|
token::Ident(ident, _) =>
|
|
|
|
|
{ let span = this.span; this.bump(); Ident::new(ident.name, span) }
|
2016-05-08 18:18:21 +00:00
|
|
|
|
_ => unreachable!()
|
|
|
|
|
};
|
2016-10-19 20:33:41 +00:00
|
|
|
|
let isolated_self = |this: &mut Self, n| {
|
2019-05-11 14:41:37 +00:00
|
|
|
|
this.look_ahead(n, |t| t.is_keyword(kw::SelfLower)) &&
|
2016-10-19 20:33:41 +00:00
|
|
|
|
this.look_ahead(n + 1, |t| t != &token::ModSep)
|
|
|
|
|
};
|
2013-03-10 00:43:53 +00:00
|
|
|
|
|
2016-05-08 18:18:21 +00:00
|
|
|
|
// Parse optional self parameter of a method.
|
|
|
|
|
// Only a limited set of initial token sequences is considered self parameters, anything
|
|
|
|
|
// else is parsed as a normal function parameter list, so some lookahead is required.
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let eself_lo = self.span;
|
2018-05-25 18:33:15 +00:00
|
|
|
|
let (eself, eself_ident, eself_hi) = match self.token {
|
2014-10-27 08:22:52 +00:00
|
|
|
|
token::BinOp(token::And) => {
|
2016-05-08 18:18:21 +00:00
|
|
|
|
// &self
|
|
|
|
|
// &mut self
|
|
|
|
|
// &'lt self
|
|
|
|
|
// &'lt mut self
|
|
|
|
|
// ¬_self
|
2018-05-25 18:33:15 +00:00
|
|
|
|
(if isolated_self(self, 1) {
|
2016-05-08 18:18:21 +00:00
|
|
|
|
self.bump();
|
2018-05-25 18:33:15 +00:00
|
|
|
|
SelfKind::Region(None, Mutability::Immutable)
|
2019-05-11 14:41:37 +00:00
|
|
|
|
} else if self.look_ahead(1, |t| t.is_keyword(kw::Mut)) &&
|
2016-10-19 20:33:41 +00:00
|
|
|
|
isolated_self(self, 2) {
|
2016-05-08 18:18:21 +00:00
|
|
|
|
self.bump();
|
|
|
|
|
self.bump();
|
2018-05-25 18:33:15 +00:00
|
|
|
|
SelfKind::Region(None, Mutability::Mutable)
|
2016-05-08 18:18:21 +00:00
|
|
|
|
} else if self.look_ahead(1, |t| t.is_lifetime()) &&
|
2016-10-19 20:33:41 +00:00
|
|
|
|
isolated_self(self, 2) {
|
2016-05-08 18:18:21 +00:00
|
|
|
|
self.bump();
|
2017-03-16 21:47:32 +00:00
|
|
|
|
let lt = self.expect_lifetime();
|
2018-05-25 18:33:15 +00:00
|
|
|
|
SelfKind::Region(Some(lt), Mutability::Immutable)
|
2016-05-08 18:18:21 +00:00
|
|
|
|
} else if self.look_ahead(1, |t| t.is_lifetime()) &&
|
2019-05-11 14:41:37 +00:00
|
|
|
|
self.look_ahead(2, |t| t.is_keyword(kw::Mut)) &&
|
2016-10-19 20:33:41 +00:00
|
|
|
|
isolated_self(self, 3) {
|
2016-05-08 18:18:21 +00:00
|
|
|
|
self.bump();
|
2017-03-16 21:47:32 +00:00
|
|
|
|
let lt = self.expect_lifetime();
|
2016-05-08 18:18:21 +00:00
|
|
|
|
self.bump();
|
2018-05-25 18:33:15 +00:00
|
|
|
|
SelfKind::Region(Some(lt), Mutability::Mutable)
|
2016-05-08 18:18:21 +00:00
|
|
|
|
} else {
|
2016-03-06 12:54:44 +00:00
|
|
|
|
return Ok(None);
|
2018-05-25 18:33:15 +00:00
|
|
|
|
}, expect_ident(self), self.prev_span)
|
2014-01-12 00:25:51 +00:00
|
|
|
|
}
|
2014-10-27 08:22:52 +00:00
|
|
|
|
token::BinOp(token::Star) => {
|
2016-05-08 18:18:21 +00:00
|
|
|
|
// *self
|
|
|
|
|
// *const self
|
|
|
|
|
// *mut self
|
|
|
|
|
// *not_self
|
|
|
|
|
// Emit special error for `self` cases.
|
2019-01-12 06:04:54 +00:00
|
|
|
|
let msg = "cannot pass `self` by raw pointer";
|
2018-05-25 18:33:15 +00:00
|
|
|
|
(if isolated_self(self, 1) {
|
2015-12-30 23:11:53 +00:00
|
|
|
|
self.bump();
|
2019-01-12 06:04:54 +00:00
|
|
|
|
self.struct_span_err(self.span, msg)
|
|
|
|
|
.span_label(self.span, msg)
|
|
|
|
|
.emit();
|
2018-05-25 18:33:15 +00:00
|
|
|
|
SelfKind::Value(Mutability::Immutable)
|
2016-05-08 18:18:21 +00:00
|
|
|
|
} else if self.look_ahead(1, |t| t.is_mutability()) &&
|
2016-10-19 20:33:41 +00:00
|
|
|
|
isolated_self(self, 2) {
|
2016-05-08 18:18:21 +00:00
|
|
|
|
self.bump();
|
|
|
|
|
self.bump();
|
2019-01-12 06:04:54 +00:00
|
|
|
|
self.struct_span_err(self.span, msg)
|
|
|
|
|
.span_label(self.span, msg)
|
|
|
|
|
.emit();
|
2018-05-25 18:33:15 +00:00
|
|
|
|
SelfKind::Value(Mutability::Immutable)
|
2016-05-08 18:18:21 +00:00
|
|
|
|
} else {
|
2016-03-06 12:54:44 +00:00
|
|
|
|
return Ok(None);
|
2018-05-25 18:33:15 +00:00
|
|
|
|
}, expect_ident(self), self.prev_span)
|
2014-01-12 00:25:51 +00:00
|
|
|
|
}
|
2014-10-27 08:22:52 +00:00
|
|
|
|
token::Ident(..) => {
|
2016-10-19 20:33:41 +00:00
|
|
|
|
if isolated_self(self, 0) {
|
2016-05-08 18:18:21 +00:00
|
|
|
|
// self
|
|
|
|
|
// self: TYPE
|
|
|
|
|
let eself_ident = expect_ident(self);
|
2018-05-25 18:33:15 +00:00
|
|
|
|
let eself_hi = self.prev_span;
|
|
|
|
|
(if self.eat(&token::Colon) {
|
2017-01-16 23:13:41 +00:00
|
|
|
|
let ty = self.parse_ty()?;
|
2018-05-25 18:33:15 +00:00
|
|
|
|
SelfKind::Explicit(ty, Mutability::Immutable)
|
librustc: Disallow mutation and assignment in pattern guards, and modify
the CFG for match statements.
There were two bugs in issue #14684. One was simply that the borrow
check didn't know about the correct CFG for match statements: the
pattern must be a predecessor of the guard. This disallows the bad
behavior if there are bindings in the pattern. But it isn't enough to
prevent the memory safety problem, because of wildcards; thus, this
patch introduces a more restrictive rule, which disallows assignments
and mutable borrows inside guards outright.
I discussed this with Niko and we decided this was the best plan of
action.
This breaks code that performs mutable borrows in pattern guards. Most
commonly, the code looks like this:
impl Foo {
fn f(&mut self, ...) {}
fn g(&mut self, ...) {
match bar {
Baz if self.f(...) => { ... }
_ => { ... }
}
}
}
Change this code to not use a guard. For example:
impl Foo {
fn f(&mut self, ...) {}
fn g(&mut self, ...) {
match bar {
Baz => {
if self.f(...) {
...
} else {
...
}
}
_ => { ... }
}
}
}
Sometimes this can result in code duplication, but often it illustrates
a hidden memory safety problem.
Closes #14684.
[breaking-change]
2014-07-25 22:18:19 +00:00
|
|
|
|
} else {
|
2018-05-25 18:33:15 +00:00
|
|
|
|
SelfKind::Value(Mutability::Immutable)
|
|
|
|
|
}, eself_ident, eself_hi)
|
2019-05-11 14:41:37 +00:00
|
|
|
|
} else if self.token.is_keyword(kw::Mut) &&
|
2016-10-19 20:33:41 +00:00
|
|
|
|
isolated_self(self, 1) {
|
2016-05-08 18:18:21 +00:00
|
|
|
|
// mut self
|
|
|
|
|
// mut self: TYPE
|
|
|
|
|
self.bump();
|
|
|
|
|
let eself_ident = expect_ident(self);
|
2018-05-25 18:33:15 +00:00
|
|
|
|
let eself_hi = self.prev_span;
|
|
|
|
|
(if self.eat(&token::Colon) {
|
2017-01-16 23:13:41 +00:00
|
|
|
|
let ty = self.parse_ty()?;
|
2018-05-25 18:33:15 +00:00
|
|
|
|
SelfKind::Explicit(ty, Mutability::Mutable)
|
librustc: Disallow mutation and assignment in pattern guards, and modify
the CFG for match statements.
There were two bugs in issue #14684. One was simply that the borrow
check didn't know about the correct CFG for match statements: the
pattern must be a predecessor of the guard. This disallows the bad
behavior if there are bindings in the pattern. But it isn't enough to
prevent the memory safety problem, because of wildcards; thus, this
patch introduces a more restrictive rule, which disallows assignments
and mutable borrows inside guards outright.
I discussed this with Niko and we decided this was the best plan of
action.
This breaks code that performs mutable borrows in pattern guards. Most
commonly, the code looks like this:
impl Foo {
fn f(&mut self, ...) {}
fn g(&mut self, ...) {
match bar {
Baz if self.f(...) => { ... }
_ => { ... }
}
}
}
Change this code to not use a guard. For example:
impl Foo {
fn f(&mut self, ...) {}
fn g(&mut self, ...) {
match bar {
Baz => {
if self.f(...) {
...
} else {
...
}
}
_ => { ... }
}
}
}
Sometimes this can result in code duplication, but often it illustrates
a hidden memory safety problem.
Closes #14684.
[breaking-change]
2014-07-25 22:18:19 +00:00
|
|
|
|
} else {
|
2018-05-25 18:33:15 +00:00
|
|
|
|
SelfKind::Value(Mutability::Mutable)
|
|
|
|
|
}, eself_ident, eself_hi)
|
2014-05-06 23:37:32 +00:00
|
|
|
|
} else {
|
2016-03-06 12:54:44 +00:00
|
|
|
|
return Ok(None);
|
2014-05-06 23:37:32 +00:00
|
|
|
|
}
|
2014-01-12 00:25:51 +00:00
|
|
|
|
}
|
2016-03-06 12:54:44 +00:00
|
|
|
|
_ => return Ok(None),
|
2012-08-17 22:25:35 +00:00
|
|
|
|
};
|
2016-03-06 12:54:44 +00:00
|
|
|
|
|
2018-08-18 10:14:03 +00:00
|
|
|
|
let eself = source_map::respan(eself_lo.to(eself_hi), eself);
|
2016-03-06 12:54:44 +00:00
|
|
|
|
Ok(Some(Arg::from_self(eself, eself_ident)))
|
2016-03-06 12:54:44 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses the parameter list and result type of a function that may have a `self` parameter.
|
2016-03-06 12:54:44 +00:00
|
|
|
|
fn parse_fn_decl_with_self<F>(&mut self, parse_arg_fn: F) -> PResult<'a, P<FnDecl>>
|
2016-03-06 12:54:44 +00:00
|
|
|
|
where F: FnMut(&mut Parser<'a>) -> PResult<'a, Arg>,
|
|
|
|
|
{
|
|
|
|
|
self.expect(&token::OpenDelim(token::Paren))?;
|
|
|
|
|
|
|
|
|
|
// Parse optional self argument
|
2016-03-06 12:54:44 +00:00
|
|
|
|
let self_arg = self.parse_self_arg()?;
|
2012-07-30 23:33:02 +00:00
|
|
|
|
|
2016-05-08 18:18:21 +00:00
|
|
|
|
// Parse the rest of the function parameter list.
|
|
|
|
|
let sep = SeqSep::trailing_allowed(token::Comma);
|
2019-01-28 05:04:50 +00:00
|
|
|
|
let (fn_inputs, recovered) = if let Some(self_arg) = self_arg {
|
2016-03-06 12:54:44 +00:00
|
|
|
|
if self.check(&token::CloseDelim(token::Paren)) {
|
2019-01-28 05:04:50 +00:00
|
|
|
|
(vec![self_arg], false)
|
2016-03-06 12:54:44 +00:00
|
|
|
|
} else if self.eat(&token::Comma) {
|
|
|
|
|
let mut fn_inputs = vec![self_arg];
|
2019-01-28 05:04:50 +00:00
|
|
|
|
let (mut input, recovered) = self.parse_seq_to_before_end(
|
|
|
|
|
&token::CloseDelim(token::Paren), sep, parse_arg_fn)?;
|
|
|
|
|
fn_inputs.append(&mut input);
|
|
|
|
|
(fn_inputs, recovered)
|
2016-03-06 12:54:44 +00:00
|
|
|
|
} else {
|
2019-03-02 05:47:06 +00:00
|
|
|
|
match self.expect_one_of(&[], &[]) {
|
|
|
|
|
Err(err) => return Err(err),
|
|
|
|
|
Ok(recovered) => (vec![self_arg], recovered),
|
|
|
|
|
}
|
2012-07-30 23:33:02 +00:00
|
|
|
|
}
|
2016-03-06 12:54:44 +00:00
|
|
|
|
} else {
|
2017-10-22 16:19:30 +00:00
|
|
|
|
self.parse_seq_to_before_end(&token::CloseDelim(token::Paren), sep, parse_arg_fn)?
|
2014-01-27 12:18:36 +00:00
|
|
|
|
};
|
2012-07-30 23:33:02 +00:00
|
|
|
|
|
2019-01-28 05:04:50 +00:00
|
|
|
|
if !recovered {
|
|
|
|
|
// Parse closing paren and return type.
|
|
|
|
|
self.expect(&token::CloseDelim(token::Paren))?;
|
|
|
|
|
}
|
2016-03-06 12:54:44 +00:00
|
|
|
|
Ok(P(FnDecl {
|
2013-09-14 02:07:43 +00:00
|
|
|
|
inputs: fn_inputs,
|
2018-01-18 17:59:28 +00:00
|
|
|
|
output: self.parse_ret_ty(true)?,
|
2019-02-08 17:30:42 +00:00
|
|
|
|
c_variadic: false
|
2016-03-06 12:54:44 +00:00
|
|
|
|
}))
|
2012-07-30 23:33:02 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses the `|arg, arg|` header of a closure.
|
2015-12-20 21:00:43 +00:00
|
|
|
|
fn parse_fn_block_decl(&mut self) -> PResult<'a, P<FnDecl>> {
|
2015-02-03 16:34:05 +00:00
|
|
|
|
let inputs_captures = {
|
2015-12-30 23:11:53 +00:00
|
|
|
|
if self.eat(&token::OrOr) {
|
2015-02-03 16:34:05 +00:00
|
|
|
|
Vec::new()
|
2012-05-23 22:06:11 +00:00
|
|
|
|
} else {
|
2016-03-23 03:01:37 +00:00
|
|
|
|
self.expect(&token::BinOp(token::Or))?;
|
2017-09-07 06:07:49 +00:00
|
|
|
|
let args = self.parse_seq_to_before_tokens(
|
|
|
|
|
&[&token::BinOp(token::Or), &token::OrOr],
|
2016-02-23 04:24:42 +00:00
|
|
|
|
SeqSep::trailing_allowed(token::Comma),
|
2017-09-07 06:07:49 +00:00
|
|
|
|
TokenExpectType::NoExpect,
|
2017-10-22 16:19:30 +00:00
|
|
|
|
|p| p.parse_fn_block_arg()
|
2019-01-28 05:04:50 +00:00
|
|
|
|
)?.0;
|
2017-09-07 06:07:49 +00:00
|
|
|
|
self.expect_or()?;
|
2015-02-03 16:34:05 +00:00
|
|
|
|
args
|
2012-05-04 19:33:04 +00:00
|
|
|
|
}
|
|
|
|
|
};
|
2018-01-18 17:59:28 +00:00
|
|
|
|
let output = self.parse_ret_ty(true)?;
|
2013-01-16 00:05:20 +00:00
|
|
|
|
|
2015-03-28 21:58:51 +00:00
|
|
|
|
Ok(P(FnDecl {
|
2013-09-14 02:07:43 +00:00
|
|
|
|
inputs: inputs_captures,
|
2017-08-07 05:54:09 +00:00
|
|
|
|
output,
|
2019-02-08 17:30:42 +00:00
|
|
|
|
c_variadic: false
|
2015-03-28 21:58:51 +00:00
|
|
|
|
}))
|
2012-05-23 22:06:11 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses the name and optional generic types of a function header.
|
2015-12-20 21:00:43 +00:00
|
|
|
|
fn parse_fn_header(&mut self) -> PResult<'a, (Ident, ast::Generics)> {
|
2016-03-23 03:01:37 +00:00
|
|
|
|
let id = self.parse_ident()?;
|
|
|
|
|
let generics = self.parse_generics()?;
|
2015-03-28 21:58:51 +00:00
|
|
|
|
Ok((id, generics))
|
2012-05-23 22:06:11 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-05-12 00:00:06 +00:00
|
|
|
|
fn mk_item(&self, span: Span, ident: Ident, node: ItemKind, vis: Visibility,
|
2014-09-13 16:06:01 +00:00
|
|
|
|
attrs: Vec<Attribute>) -> P<Item> {
|
|
|
|
|
P(Item {
|
2017-08-07 05:54:09 +00:00
|
|
|
|
ident,
|
|
|
|
|
attrs,
|
2014-01-09 13:05:33 +00:00
|
|
|
|
id: ast::DUMMY_NODE_ID,
|
2017-08-07 05:54:09 +00:00
|
|
|
|
node,
|
|
|
|
|
vis,
|
|
|
|
|
span,
|
2017-07-12 16:50:05 +00:00
|
|
|
|
tokens: None,
|
2014-09-13 16:06:01 +00:00
|
|
|
|
})
|
2012-05-23 22:06:11 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses an item-position function declaration.
|
2015-02-25 20:05:07 +00:00
|
|
|
|
fn parse_item_fn(&mut self,
|
|
|
|
|
unsafety: Unsafety,
|
2019-03-12 16:00:20 +00:00
|
|
|
|
mut asyncness: Spanned<IsAsync>,
|
2016-08-10 23:20:12 +00:00
|
|
|
|
constness: Spanned<Constness>,
|
2017-12-02 19:15:03 +00:00
|
|
|
|
abi: Abi)
|
2015-12-20 21:00:43 +00:00
|
|
|
|
-> PResult<'a, ItemInfo> {
|
2016-03-23 03:01:37 +00:00
|
|
|
|
let (ident, mut generics) = self.parse_fn_header()?;
|
2019-02-08 17:30:42 +00:00
|
|
|
|
let allow_c_variadic = abi == Abi::C && unsafety == Unsafety::Unsafe;
|
2019-05-05 03:03:32 +00:00
|
|
|
|
let mut decl = self.parse_fn_decl(allow_c_variadic)?;
|
2016-03-23 03:01:37 +00:00
|
|
|
|
generics.where_clause = self.parse_where_clause()?;
|
|
|
|
|
let (inner_attrs, body) = self.parse_inner_attrs_and_block()?;
|
2019-05-05 03:03:32 +00:00
|
|
|
|
self.construct_async_arguments(&mut asyncness, &mut decl);
|
2018-05-17 05:55:18 +00:00
|
|
|
|
let header = FnHeader { unsafety, asyncness, constness, abi };
|
|
|
|
|
Ok((ident, ItemKind::Fn(decl, header, generics, body), Some(inner_attrs)))
|
2012-05-23 22:06:11 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Returns `true` if we are looking at `const ID`
|
|
|
|
|
/// (returns `false` for things like `const fn`, etc.).
|
2019-05-12 00:00:06 +00:00
|
|
|
|
fn is_const_item(&self) -> bool {
|
2019-05-11 14:41:37 +00:00
|
|
|
|
self.token.is_keyword(kw::Const) &&
|
|
|
|
|
!self.look_ahead(1, |t| t.is_keyword(kw::Fn)) &&
|
|
|
|
|
!self.look_ahead(1, |t| t.is_keyword(kw::Unsafe))
|
2015-05-05 12:47:04 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses all the "front matter" for a `fn` declaration, up to
|
2015-05-05 12:47:04 +00:00
|
|
|
|
/// and including the `fn` keyword:
|
|
|
|
|
///
|
|
|
|
|
/// - `const fn`
|
|
|
|
|
/// - `unsafe fn`
|
2015-10-24 08:52:07 +00:00
|
|
|
|
/// - `const unsafe fn`
|
2015-05-05 12:47:04 +00:00
|
|
|
|
/// - `extern fn`
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// - etc.
|
2018-06-19 04:18:10 +00:00
|
|
|
|
fn parse_fn_front_matter(&mut self)
|
|
|
|
|
-> PResult<'a, (
|
|
|
|
|
Spanned<Constness>,
|
|
|
|
|
Unsafety,
|
2019-02-23 18:39:27 +00:00
|
|
|
|
Spanned<IsAsync>,
|
2018-06-19 04:18:10 +00:00
|
|
|
|
Abi
|
|
|
|
|
)>
|
|
|
|
|
{
|
2019-05-11 14:41:37 +00:00
|
|
|
|
let is_const_fn = self.eat_keyword(kw::Const);
|
2016-09-21 02:09:22 +00:00
|
|
|
|
let const_span = self.prev_span;
|
2017-12-02 19:15:03 +00:00
|
|
|
|
let unsafety = self.parse_unsafety();
|
2018-06-19 04:18:10 +00:00
|
|
|
|
let asyncness = self.parse_asyncness();
|
2019-02-23 18:39:27 +00:00
|
|
|
|
let asyncness = respan(self.prev_span, asyncness);
|
2015-05-05 12:47:04 +00:00
|
|
|
|
let (constness, unsafety, abi) = if is_const_fn {
|
2016-08-10 23:20:12 +00:00
|
|
|
|
(respan(const_span, Constness::Const), unsafety, Abi::Rust)
|
2015-05-05 12:47:04 +00:00
|
|
|
|
} else {
|
2019-05-11 14:41:37 +00:00
|
|
|
|
let abi = if self.eat_keyword(kw::Extern) {
|
2016-03-23 03:01:37 +00:00
|
|
|
|
self.parse_opt_abi()?.unwrap_or(Abi::C)
|
2015-05-05 12:47:04 +00:00
|
|
|
|
} else {
|
2016-02-05 12:13:36 +00:00
|
|
|
|
Abi::Rust
|
2015-05-05 12:47:04 +00:00
|
|
|
|
};
|
2016-09-21 02:09:22 +00:00
|
|
|
|
(respan(self.prev_span, Constness::NotConst), unsafety, abi)
|
2015-05-05 12:47:04 +00:00
|
|
|
|
};
|
2019-05-11 14:41:37 +00:00
|
|
|
|
if !self.eat_keyword(kw::Fn) {
|
2019-04-20 10:31:38 +00:00
|
|
|
|
// It is possible for `expect_one_of` to recover given the contents of
|
|
|
|
|
// `self.expected_tokens`, therefore, do not use `self.unexpected()` which doesn't
|
|
|
|
|
// account for this.
|
|
|
|
|
if !self.expect_one_of(&[], &[])? { unreachable!() }
|
|
|
|
|
}
|
2018-06-19 04:18:10 +00:00
|
|
|
|
Ok((constness, unsafety, asyncness, abi))
|
2015-05-05 12:47:04 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses an impl item.
|
2018-06-22 23:23:25 +00:00
|
|
|
|
pub fn parse_impl_item(&mut self, at_end: &mut bool) -> PResult<'a, ImplItem> {
|
2016-11-02 03:03:55 +00:00
|
|
|
|
maybe_whole!(self, NtImplItem, |x| x);
|
2017-07-12 16:50:05 +00:00
|
|
|
|
let attrs = self.parse_outer_attributes()?;
|
2019-03-03 20:45:49 +00:00
|
|
|
|
let mut unclosed_delims = vec![];
|
2017-07-12 16:50:05 +00:00
|
|
|
|
let (mut item, tokens) = self.collect_tokens(|this| {
|
2019-03-03 20:45:49 +00:00
|
|
|
|
let item = this.parse_impl_item_(at_end, attrs);
|
|
|
|
|
unclosed_delims.append(&mut this.unclosed_delims);
|
|
|
|
|
item
|
2017-07-12 16:50:05 +00:00
|
|
|
|
})?;
|
2019-03-03 20:45:49 +00:00
|
|
|
|
self.unclosed_delims.append(&mut unclosed_delims);
|
Interpolate AST nodes in quasiquote.
This changes the `ToTokens` implementations for expressions, statements,
etc. with almost-trivial ones that produce `Interpolated(*Nt(...))`
pseudo-tokens. In this way, quasiquote now works the same way as macros
do: already-parsed AST fragments are used as-is, not reparsed.
The `ToSource` trait is removed. Quasiquote no longer involves
pretty-printing at all, which removes the need for the
`encode_with_hygiene` hack. All associated machinery is removed.
A new `Nonterminal` is added, NtArm, which the parser now interpolates.
This is just for quasiquote, not macros (although it could be in the
future).
`ToTokens` is no longer implemented for `Arg` (although this could be
added again) and `Generics` (which I don't think makes sense).
This breaks any compiler extensions that relied on the ability of
`ToTokens` to turn AST fragments back into inspectable token trees. For
this reason, this closes #16987.
As such, this is a [breaking-change].
Fixes #16472.
Fixes #15962.
Fixes #17397.
Fixes #16617.
2015-03-05 20:06:49 +00:00
|
|
|
|
|
2017-07-12 16:50:05 +00:00
|
|
|
|
// See `parse_item` for why this clause is here.
|
|
|
|
|
if !item.attrs.iter().any(|attr| attr.style == AttrStyle::Inner) {
|
|
|
|
|
item.tokens = Some(tokens);
|
|
|
|
|
}
|
|
|
|
|
Ok(item)
|
|
|
|
|
}
|
Interpolate AST nodes in quasiquote.
This changes the `ToTokens` implementations for expressions, statements,
etc. with almost-trivial ones that produce `Interpolated(*Nt(...))`
pseudo-tokens. In this way, quasiquote now works the same way as macros
do: already-parsed AST fragments are used as-is, not reparsed.
The `ToSource` trait is removed. Quasiquote no longer involves
pretty-printing at all, which removes the need for the
`encode_with_hygiene` hack. All associated machinery is removed.
A new `Nonterminal` is added, NtArm, which the parser now interpolates.
This is just for quasiquote, not macros (although it could be in the
future).
`ToTokens` is no longer implemented for `Arg` (although this could be
added again) and `Generics` (which I don't think makes sense).
This breaks any compiler extensions that relied on the ability of
`ToTokens` to turn AST fragments back into inspectable token trees. For
this reason, this closes #16987.
As such, this is a [breaking-change].
Fixes #16472.
Fixes #15962.
Fixes #17397.
Fixes #16617.
2015-03-05 20:06:49 +00:00
|
|
|
|
|
2017-07-12 16:50:05 +00:00
|
|
|
|
fn parse_impl_item_(&mut self,
|
|
|
|
|
at_end: &mut bool,
|
|
|
|
|
mut attrs: Vec<Attribute>) -> PResult<'a, ImplItem> {
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let lo = self.span;
|
2017-03-18 04:13:00 +00:00
|
|
|
|
let vis = self.parse_visibility(false)?;
|
2017-12-02 19:15:03 +00:00
|
|
|
|
let defaultness = self.parse_defaultness();
|
2018-07-03 17:38:14 +00:00
|
|
|
|
let (name, node, generics) = if let Some(type_) = self.eat_type() {
|
|
|
|
|
let (name, alias, generics) = type_?;
|
|
|
|
|
let kind = match alias {
|
|
|
|
|
AliasKind::Weak(typ) => ast::ImplItemKind::Type(typ),
|
|
|
|
|
AliasKind::Existential(bounds) => ast::ImplItemKind::Existential(bounds),
|
|
|
|
|
};
|
|
|
|
|
(name, kind, generics)
|
2015-05-05 12:47:04 +00:00
|
|
|
|
} else if self.is_const_item() {
|
2017-11-19 05:18:43 +00:00
|
|
|
|
// This parses the grammar:
|
|
|
|
|
// ImplItemConst = "const" Ident ":" Ty "=" Expr ";"
|
2019-05-11 14:41:37 +00:00
|
|
|
|
self.expect_keyword(kw::Const)?;
|
2016-03-23 03:01:37 +00:00
|
|
|
|
let name = self.parse_ident()?;
|
|
|
|
|
self.expect(&token::Colon)?;
|
2017-01-16 23:13:41 +00:00
|
|
|
|
let typ = self.parse_ty()?;
|
2016-03-23 03:01:37 +00:00
|
|
|
|
self.expect(&token::Eq)?;
|
|
|
|
|
let expr = self.parse_expr()?;
|
2016-07-01 23:40:45 +00:00
|
|
|
|
self.expect(&token::Semi)?;
|
2017-09-22 02:18:47 +00:00
|
|
|
|
(name, ast::ImplItemKind::Const(typ, expr), ast::Generics::default())
|
2015-03-11 21:38:58 +00:00
|
|
|
|
} else {
|
2017-09-22 02:18:47 +00:00
|
|
|
|
let (name, inner_attrs, generics, node) = self.parse_impl_method(&vis, at_end)?;
|
2015-06-10 16:22:20 +00:00
|
|
|
|
attrs.extend(inner_attrs);
|
2017-09-22 02:18:47 +00:00
|
|
|
|
(name, node, generics)
|
2015-03-13 09:34:51 +00:00
|
|
|
|
};
|
|
|
|
|
|
2016-02-11 20:33:09 +00:00
|
|
|
|
Ok(ImplItem {
|
2015-03-13 09:34:51 +00:00
|
|
|
|
id: ast::DUMMY_NODE_ID,
|
2017-03-15 00:22:48 +00:00
|
|
|
|
span: lo.to(self.prev_span),
|
2015-03-13 09:34:51 +00:00
|
|
|
|
ident: name,
|
2017-08-07 05:54:09 +00:00
|
|
|
|
vis,
|
|
|
|
|
defaultness,
|
|
|
|
|
attrs,
|
2017-09-22 02:18:47 +00:00
|
|
|
|
generics,
|
2017-08-07 05:54:09 +00:00
|
|
|
|
node,
|
2017-07-12 16:50:05 +00:00
|
|
|
|
tokens: None,
|
2016-02-11 20:33:09 +00:00
|
|
|
|
})
|
2014-10-06 14:53:05 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-05-12 00:00:06 +00:00
|
|
|
|
fn complain_if_pub_macro(&self, vis: &VisibilityKind, sp: Span) {
|
2017-03-25 06:00:21 +00:00
|
|
|
|
match *vis {
|
2018-11-26 18:11:46 +00:00
|
|
|
|
VisibilityKind::Inherited => {}
|
2016-03-31 19:10:38 +00:00
|
|
|
|
_ => {
|
2015-12-29 12:59:19 +00:00
|
|
|
|
let is_macro_rules: bool = match self.token {
|
2018-03-10 05:56:40 +00:00
|
|
|
|
token::Ident(sid, _) => sid.name == Symbol::intern("macro_rules"),
|
2015-12-28 04:31:11 +00:00
|
|
|
|
_ => false,
|
|
|
|
|
};
|
2018-11-26 18:11:46 +00:00
|
|
|
|
let mut err = if is_macro_rules {
|
2017-03-25 06:00:21 +00:00
|
|
|
|
let mut err = self.diagnostic()
|
|
|
|
|
.struct_span_err(sp, "can't qualify macro_rules invocation with `pub`");
|
2019-01-25 21:03:27 +00:00
|
|
|
|
err.span_suggestion(
|
suggestion applicabilities for libsyntax and librustc, run-rustfix tests
Consider this a down payment on #50723. To recap, an `Applicability`
enum was recently (#50204) added, to convey to Rustfix and other tools
whether we think it's OK for them to blindly apply the suggestion, or
whether to prompt a human for guidance (because the suggestion might
contain placeholders that we can't infer, or because we think it has a
sufficiently high probability of being wrong even though it's—
presumably—right often enough to be worth emitting in the first place).
When a suggestion is marked as `MaybeIncorrect`, we try to use comments
to indicate precisely why (although there are a few places where we just
say `// speculative` because the present author's subjective judgement
balked at the idea that the suggestion has no false positives).
The `run-rustfix` directive is opporunistically set on some relevant UI
tests (and a couple tests that were in the `test/ui/suggestions`
directory, even if the suggestions didn't originate in librustc or
libsyntax). This is less trivial than it sounds, because a surprising
number of test files aren't equipped to be tested as fixed even when
they contain successfully fixable errors, because, e.g., there are more,
not-directly-related errors after fixing. Some test files need an
attribute or underscore to avoid unused warnings tripping up the "fixed
code is still producing diagnostics" check despite the fixes being
correct; this is an interesting contrast-to/inconsistency-with the
behavior of UI tests (which secretly pass `-A unused`), a behavior which
we probably ought to resolve one way or the other (filed issue #50926).
A few suggestion labels are reworded (e.g., to avoid phrasing it as a
question, which which is discouraged by the style guidelines listed in
`.span_suggestion`'s doc-comment).
2018-05-19 21:52:24 +00:00
|
|
|
|
sp,
|
|
|
|
|
"try exporting the macro",
|
|
|
|
|
"#[macro_export]".to_owned(),
|
|
|
|
|
Applicability::MaybeIncorrect // speculative
|
|
|
|
|
);
|
2018-11-26 18:11:46 +00:00
|
|
|
|
err
|
2015-12-28 04:31:11 +00:00
|
|
|
|
} else {
|
2017-03-25 06:00:21 +00:00
|
|
|
|
let mut err = self.diagnostic()
|
|
|
|
|
.struct_span_err(sp, "can't qualify macro invocation with `pub`");
|
|
|
|
|
err.help("try adjusting the macro to put `pub` inside the invocation");
|
2018-11-26 18:11:46 +00:00
|
|
|
|
err
|
|
|
|
|
};
|
|
|
|
|
err.emit();
|
2015-02-13 04:43:57 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-05-12 00:00:06 +00:00
|
|
|
|
fn missing_assoc_item_kind_err(&self, item_type: &str, prev_span: Span)
|
2017-04-13 19:37:05 +00:00
|
|
|
|
-> DiagnosticBuilder<'a>
|
|
|
|
|
{
|
2018-03-11 02:16:26 +00:00
|
|
|
|
let expected_kinds = if item_type == "extern" {
|
|
|
|
|
"missing `fn`, `type`, or `static`"
|
|
|
|
|
} else {
|
|
|
|
|
"missing `fn`, `type`, or `const`"
|
|
|
|
|
};
|
|
|
|
|
|
2017-04-13 19:37:05 +00:00
|
|
|
|
// Given this code `path(`, it seems like this is not
|
|
|
|
|
// setting the visibility of a macro invocation, but rather
|
|
|
|
|
// a mistyped method declaration.
|
|
|
|
|
// Create a diagnostic pointing out that `fn` is missing.
|
|
|
|
|
//
|
|
|
|
|
// x | pub path(&self) {
|
|
|
|
|
// | ^ missing `fn`, `type`, or `const`
|
|
|
|
|
// pub path(
|
|
|
|
|
// ^^ `sp` below will point to this
|
|
|
|
|
let sp = prev_span.between(self.prev_span);
|
|
|
|
|
let mut err = self.diagnostic().struct_span_err(
|
|
|
|
|
sp,
|
2018-03-11 02:16:26 +00:00
|
|
|
|
&format!("{} for {}-item declaration",
|
|
|
|
|
expected_kinds, item_type));
|
|
|
|
|
err.span_label(sp, expected_kinds);
|
2017-04-13 19:37:05 +00:00
|
|
|
|
err
|
|
|
|
|
}
|
|
|
|
|
|
2015-03-13 09:34:51 +00:00
|
|
|
|
/// Parse a method or a macro invocation in a trait impl.
|
2017-04-13 19:37:05 +00:00
|
|
|
|
fn parse_impl_method(&mut self, vis: &Visibility, at_end: &mut bool)
|
2017-12-02 19:15:03 +00:00
|
|
|
|
-> PResult<'a, (Ident, Vec<Attribute>, ast::Generics,
|
2017-09-22 02:22:33 +00:00
|
|
|
|
ast::ImplItemKind)> {
|
2014-07-07 22:15:31 +00:00
|
|
|
|
// code copied from parse_macro_use_or_failure... abstraction!
|
2018-03-11 02:16:26 +00:00
|
|
|
|
if let Some(mac) = self.parse_assoc_macro_invoc("impl", Some(vis), at_end)? {
|
2018-10-26 22:13:12 +00:00
|
|
|
|
// method macro
|
2019-05-11 16:08:09 +00:00
|
|
|
|
Ok((Ident::invalid(), vec![], ast::Generics::default(),
|
2017-09-22 02:22:33 +00:00
|
|
|
|
ast::ImplItemKind::Macro(mac)))
|
2015-03-13 09:34:51 +00:00
|
|
|
|
} else {
|
2019-03-12 16:00:20 +00:00
|
|
|
|
let (constness, unsafety, mut asyncness, abi) = self.parse_fn_front_matter()?;
|
2016-03-23 03:01:37 +00:00
|
|
|
|
let ident = self.parse_ident()?;
|
|
|
|
|
let mut generics = self.parse_generics()?;
|
2019-05-05 03:03:32 +00:00
|
|
|
|
let mut decl = self.parse_fn_decl_with_self(|p| p.parse_arg())?;
|
2016-03-23 03:01:37 +00:00
|
|
|
|
generics.where_clause = self.parse_where_clause()?;
|
2019-05-05 03:03:32 +00:00
|
|
|
|
self.construct_async_arguments(&mut asyncness, &mut decl);
|
2017-04-13 19:37:05 +00:00
|
|
|
|
*at_end = true;
|
2016-03-23 03:01:37 +00:00
|
|
|
|
let (inner_attrs, body) = self.parse_inner_attrs_and_block()?;
|
2018-06-19 04:18:10 +00:00
|
|
|
|
let header = ast::FnHeader { abi, unsafety, constness, asyncness };
|
2018-05-17 05:55:18 +00:00
|
|
|
|
Ok((ident, inner_attrs, generics, ast::ImplItemKind::Method(
|
|
|
|
|
ast::MethodSig { header, decl },
|
|
|
|
|
body
|
|
|
|
|
)))
|
2015-03-13 09:34:51 +00:00
|
|
|
|
}
|
2012-05-23 22:06:11 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses `trait Foo { ... }` or `trait Foo = Bar;`.
|
2017-10-15 18:03:03 +00:00
|
|
|
|
fn parse_item_trait(&mut self, is_auto: IsAuto, unsafety: Unsafety) -> PResult<'a, ItemInfo> {
|
2016-03-23 03:01:37 +00:00
|
|
|
|
let ident = self.parse_ident()?;
|
|
|
|
|
let mut tps = self.parse_generics()?;
|
2012-08-03 22:24:11 +00:00
|
|
|
|
|
2017-01-17 18:18:29 +00:00
|
|
|
|
// Parse optional colon and supertrait bounds.
|
|
|
|
|
let bounds = if self.eat(&token::Colon) {
|
2018-12-16 08:50:49 +00:00
|
|
|
|
self.parse_generic_bounds(Some(self.prev_span))?
|
2017-01-17 18:18:29 +00:00
|
|
|
|
} else {
|
|
|
|
|
Vec::new()
|
|
|
|
|
};
|
2012-08-03 22:02:01 +00:00
|
|
|
|
|
2017-10-02 12:27:45 +00:00
|
|
|
|
if self.eat(&token::Eq) {
|
|
|
|
|
// it's a trait alias
|
2018-12-16 08:50:49 +00:00
|
|
|
|
let bounds = self.parse_generic_bounds(None)?;
|
2017-10-02 12:27:45 +00:00
|
|
|
|
tps.where_clause = self.parse_where_clause()?;
|
|
|
|
|
self.expect(&token::Semi)?;
|
2019-02-12 01:50:33 +00:00
|
|
|
|
if is_auto == IsAuto::Yes {
|
|
|
|
|
let msg = "trait aliases cannot be `auto`";
|
|
|
|
|
self.struct_span_err(self.prev_span, msg)
|
|
|
|
|
.span_label(self.prev_span, msg)
|
|
|
|
|
.emit();
|
|
|
|
|
}
|
2017-10-02 12:27:45 +00:00
|
|
|
|
if unsafety != Unsafety::Normal {
|
2019-02-12 01:50:33 +00:00
|
|
|
|
let msg = "trait aliases cannot be `unsafe`";
|
2019-01-12 06:04:54 +00:00
|
|
|
|
self.struct_span_err(self.prev_span, msg)
|
|
|
|
|
.span_label(self.prev_span, msg)
|
|
|
|
|
.emit();
|
2017-10-02 12:27:45 +00:00
|
|
|
|
}
|
|
|
|
|
Ok((ident, ItemKind::TraitAlias(tps, bounds), None))
|
|
|
|
|
} else {
|
|
|
|
|
// it's a normal trait
|
|
|
|
|
tps.where_clause = self.parse_where_clause()?;
|
|
|
|
|
self.expect(&token::OpenDelim(token::Brace))?;
|
|
|
|
|
let mut trait_items = vec![];
|
|
|
|
|
while !self.eat(&token::CloseDelim(token::Brace)) {
|
2019-03-09 08:00:42 +00:00
|
|
|
|
if let token::DocComment(_) = self.token {
|
|
|
|
|
if self.look_ahead(1,
|
|
|
|
|
|tok| tok == &token::Token::CloseDelim(token::Brace)) {
|
|
|
|
|
let mut err = self.diagnostic().struct_span_err_with_code(
|
|
|
|
|
self.span,
|
|
|
|
|
"found a documentation comment that doesn't document anything",
|
|
|
|
|
DiagnosticId::Error("E0584".into()),
|
|
|
|
|
);
|
|
|
|
|
err.help("doc comments must come before what they document, maybe a \
|
|
|
|
|
comment was intended with `//`?",
|
|
|
|
|
);
|
|
|
|
|
err.emit();
|
|
|
|
|
self.bump();
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
}
|
2017-10-02 12:27:45 +00:00
|
|
|
|
let mut at_end = false;
|
|
|
|
|
match self.parse_trait_item(&mut at_end) {
|
|
|
|
|
Ok(item) => trait_items.push(item),
|
|
|
|
|
Err(mut e) => {
|
|
|
|
|
e.emit();
|
|
|
|
|
if !at_end {
|
|
|
|
|
self.recover_stmt_(SemiColonMode::Break, BlockMode::Break);
|
|
|
|
|
}
|
2017-04-13 19:37:05 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2017-10-02 12:27:45 +00:00
|
|
|
|
Ok((ident, ItemKind::Trait(is_auto, unsafety, tps, bounds, trait_items), None))
|
2017-04-13 19:37:05 +00:00
|
|
|
|
}
|
2012-05-23 22:06:11 +00:00
|
|
|
|
}
|
|
|
|
|
|
2018-01-14 15:10:19 +00:00
|
|
|
|
fn choose_generics_over_qpath(&self) -> bool {
|
|
|
|
|
// There's an ambiguity between generic parameters and qualified paths in impls.
|
|
|
|
|
// If we see `<` it may start both, so we have to inspect some following tokens.
|
|
|
|
|
// The following combinations can only start generics,
|
|
|
|
|
// but not qualified paths (with one exception):
|
|
|
|
|
// `<` `>` - empty generic parameters
|
|
|
|
|
// `<` `#` - generic parameters with attributes
|
|
|
|
|
// `<` (LIFETIME|IDENT) `>` - single generic parameter
|
|
|
|
|
// `<` (LIFETIME|IDENT) `,` - first generic parameter in a list
|
|
|
|
|
// `<` (LIFETIME|IDENT) `:` - generic parameter with bounds
|
|
|
|
|
// `<` (LIFETIME|IDENT) `=` - generic parameter with a default
|
2019-02-07 09:10:11 +00:00
|
|
|
|
// `<` const - generic const parameter
|
2018-01-14 15:10:19 +00:00
|
|
|
|
// The only truly ambiguous case is
|
|
|
|
|
// `<` IDENT `>` `::` IDENT ...
|
|
|
|
|
// we disambiguate it in favor of generics (`impl<T> ::absolute::Path<T> { ... }`)
|
|
|
|
|
// because this is what almost always expected in practice, qualified paths in impls
|
|
|
|
|
// (`impl <Type>::AssocTy { ... }`) aren't even allowed by type checker at the moment.
|
|
|
|
|
self.token == token::Lt &&
|
|
|
|
|
(self.look_ahead(1, |t| t == &token::Pound || t == &token::Gt) ||
|
|
|
|
|
self.look_ahead(1, |t| t.is_lifetime() || t.is_ident()) &&
|
|
|
|
|
self.look_ahead(2, |t| t == &token::Gt || t == &token::Comma ||
|
2019-02-05 15:49:38 +00:00
|
|
|
|
t == &token::Colon || t == &token::Eq) ||
|
2019-05-11 14:41:37 +00:00
|
|
|
|
self.look_ahead(1, |t| t.is_keyword(kw::Const)))
|
2018-01-14 15:10:19 +00:00
|
|
|
|
}
|
|
|
|
|
|
2017-12-02 19:15:03 +00:00
|
|
|
|
fn parse_impl_body(&mut self) -> PResult<'a, (Vec<ImplItem>, Vec<Attribute>)> {
|
|
|
|
|
self.expect(&token::OpenDelim(token::Brace))?;
|
|
|
|
|
let attrs = self.parse_inner_attributes()?;
|
2015-01-22 21:14:52 +00:00
|
|
|
|
|
2017-12-02 19:15:03 +00:00
|
|
|
|
let mut impl_items = Vec::new();
|
|
|
|
|
while !self.eat(&token::CloseDelim(token::Brace)) {
|
|
|
|
|
let mut at_end = false;
|
|
|
|
|
match self.parse_impl_item(&mut at_end) {
|
|
|
|
|
Ok(impl_item) => impl_items.push(impl_item),
|
|
|
|
|
Err(mut err) => {
|
|
|
|
|
err.emit();
|
|
|
|
|
if !at_end {
|
|
|
|
|
self.recover_stmt_(SemiColonMode::Break, BlockMode::Break);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
Ok((impl_items, attrs))
|
|
|
|
|
}
|
2012-07-24 23:38:24 +00:00
|
|
|
|
|
2017-12-02 19:15:03 +00:00
|
|
|
|
/// Parses an implementation item, `impl` keyword is already parsed.
|
2019-02-08 13:53:55 +00:00
|
|
|
|
///
|
2017-12-02 19:15:03 +00:00
|
|
|
|
/// impl<'a, T> TYPE { /* impl items */ }
|
|
|
|
|
/// impl<'a, T> TRAIT for TYPE { /* impl items */ }
|
|
|
|
|
/// impl<'a, T> !TRAIT for TYPE { /* impl items */ }
|
2019-02-08 13:53:55 +00:00
|
|
|
|
///
|
2017-12-02 19:15:03 +00:00
|
|
|
|
/// We actually parse slightly more relaxed grammar for better error reporting and recovery.
|
|
|
|
|
/// `impl` GENERICS `!`? TYPE `for`? (TYPE | `..`) (`where` PREDICATES)? `{` BODY `}`
|
|
|
|
|
/// `impl` GENERICS `!`? TYPE (`where` PREDICATES)? `{` BODY `}`
|
|
|
|
|
fn parse_item_impl(&mut self, unsafety: Unsafety, defaultness: Defaultness)
|
|
|
|
|
-> PResult<'a, ItemInfo> {
|
|
|
|
|
// First, parse generic parameters if necessary.
|
2018-01-14 15:10:19 +00:00
|
|
|
|
let mut generics = if self.choose_generics_over_qpath() {
|
|
|
|
|
self.parse_generics()?
|
|
|
|
|
} else {
|
|
|
|
|
ast::Generics::default()
|
|
|
|
|
};
|
2013-03-29 01:55:35 +00:00
|
|
|
|
|
2017-12-02 19:15:03 +00:00
|
|
|
|
// Disambiguate `impl !Trait for Type { ... }` and `impl ! { ... }` for the never type.
|
|
|
|
|
let polarity = if self.check(&token::Not) && self.look_ahead(1, |t| t.can_begin_type()) {
|
|
|
|
|
self.bump(); // `!`
|
2014-12-28 22:33:18 +00:00
|
|
|
|
ast::ImplPolarity::Negative
|
|
|
|
|
} else {
|
|
|
|
|
ast::ImplPolarity::Positive
|
|
|
|
|
};
|
|
|
|
|
|
2017-12-02 19:15:03 +00:00
|
|
|
|
// Parse both types and traits as a type, then reinterpret if necessary.
|
2019-05-11 14:41:37 +00:00
|
|
|
|
let err_path = |span| ast::Path::from_ident(Ident::new(kw::Invalid, span));
|
|
|
|
|
let ty_first = if self.token.is_keyword(kw::For) &&
|
2019-03-07 20:18:03 +00:00
|
|
|
|
self.look_ahead(1, |t| t != &token::Lt) {
|
|
|
|
|
let span = self.prev_span.between(self.span);
|
|
|
|
|
self.struct_span_err(span, "missing trait in a trait impl").emit();
|
|
|
|
|
P(Ty { node: TyKind::Path(None, err_path(span)), span, id: ast::DUMMY_NODE_ID })
|
|
|
|
|
} else {
|
|
|
|
|
self.parse_ty()?
|
|
|
|
|
};
|
2017-12-02 19:15:03 +00:00
|
|
|
|
|
|
|
|
|
// If `for` is missing we try to recover.
|
2019-05-11 14:41:37 +00:00
|
|
|
|
let has_for = self.eat_keyword(kw::For);
|
2017-12-02 19:15:03 +00:00
|
|
|
|
let missing_for_span = self.prev_span.between(self.span);
|
|
|
|
|
|
|
|
|
|
let ty_second = if self.token == token::DotDot {
|
|
|
|
|
// We need to report this error after `cfg` expansion for compatibility reasons
|
|
|
|
|
self.bump(); // `..`, do not add it to expected tokens
|
2019-03-07 20:18:03 +00:00
|
|
|
|
Some(DummyResult::raw_ty(self.prev_span, true))
|
2017-12-02 19:15:03 +00:00
|
|
|
|
} else if has_for || self.token.can_begin_type() {
|
|
|
|
|
Some(self.parse_ty()?)
|
2012-08-08 22:34:17 +00:00
|
|
|
|
} else {
|
2012-09-07 22:11:26 +00:00
|
|
|
|
None
|
2012-08-08 22:34:17 +00:00
|
|
|
|
};
|
2012-07-24 23:38:24 +00:00
|
|
|
|
|
2017-12-01 12:01:23 +00:00
|
|
|
|
generics.where_clause = self.parse_where_clause()?;
|
2015-03-13 09:34:51 +00:00
|
|
|
|
|
2017-12-02 19:15:03 +00:00
|
|
|
|
let (impl_items, attrs) = self.parse_impl_body()?;
|
2015-03-13 09:34:51 +00:00
|
|
|
|
|
2017-12-02 19:15:03 +00:00
|
|
|
|
let item_kind = match ty_second {
|
|
|
|
|
Some(ty_second) => {
|
|
|
|
|
// impl Trait for Type
|
|
|
|
|
if !has_for {
|
2019-01-12 04:05:31 +00:00
|
|
|
|
self.struct_span_err(missing_for_span, "missing `for` in a trait impl")
|
2019-01-25 21:03:27 +00:00
|
|
|
|
.span_suggestion_short(
|
2019-01-12 04:05:31 +00:00
|
|
|
|
missing_for_span,
|
|
|
|
|
"add `for` here",
|
|
|
|
|
" for ".to_string(),
|
|
|
|
|
Applicability::MachineApplicable,
|
|
|
|
|
).emit();
|
2017-04-13 19:37:05 +00:00
|
|
|
|
}
|
2017-12-02 19:15:03 +00:00
|
|
|
|
|
|
|
|
|
let ty_first = ty_first.into_inner();
|
|
|
|
|
let path = match ty_first.node {
|
|
|
|
|
// This notably includes paths passed through `ty` macro fragments (#46438).
|
|
|
|
|
TyKind::Path(None, path) => path,
|
|
|
|
|
_ => {
|
|
|
|
|
self.span_err(ty_first.span, "expected a trait, found type");
|
2019-03-07 20:18:03 +00:00
|
|
|
|
err_path(ty_first.span)
|
2017-12-02 19:15:03 +00:00
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
let trait_ref = TraitRef { path, ref_id: ty_first.id };
|
|
|
|
|
|
|
|
|
|
ItemKind::Impl(unsafety, polarity, defaultness,
|
|
|
|
|
generics, Some(trait_ref), ty_second, impl_items)
|
2015-03-13 09:34:51 +00:00
|
|
|
|
}
|
2017-12-02 19:15:03 +00:00
|
|
|
|
None => {
|
|
|
|
|
// impl Type
|
|
|
|
|
ItemKind::Impl(unsafety, polarity, defaultness,
|
|
|
|
|
generics, None, ty_first, impl_items)
|
|
|
|
|
}
|
|
|
|
|
};
|
2017-12-01 12:01:23 +00:00
|
|
|
|
|
2019-05-11 16:08:09 +00:00
|
|
|
|
Ok((Ident::invalid(), item_kind, Some(attrs)))
|
2012-11-07 02:41:06 +00:00
|
|
|
|
}
|
|
|
|
|
|
2018-05-27 19:07:09 +00:00
|
|
|
|
fn parse_late_bound_lifetime_defs(&mut self) -> PResult<'a, Vec<GenericParam>> {
|
2019-05-11 14:41:37 +00:00
|
|
|
|
if self.eat_keyword(kw::For) {
|
2017-01-17 18:18:29 +00:00
|
|
|
|
self.expect_lt()?;
|
2017-10-16 19:07:26 +00:00
|
|
|
|
let params = self.parse_generic_params()?;
|
2016-03-23 03:01:37 +00:00
|
|
|
|
self.expect_gt()?;
|
2018-03-06 10:22:24 +00:00
|
|
|
|
// We rely on AST validation to rule out invalid cases: There must not be type
|
|
|
|
|
// parameters, and the lifetime parameters must not have bounds.
|
2017-10-16 19:07:26 +00:00
|
|
|
|
Ok(params)
|
2014-11-07 11:53:45 +00:00
|
|
|
|
} else {
|
2015-03-28 21:58:51 +00:00
|
|
|
|
Ok(Vec::new())
|
2014-11-07 11:53:45 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses `struct Foo { ... }`.
|
2015-12-20 21:00:43 +00:00
|
|
|
|
fn parse_item_struct(&mut self) -> PResult<'a, ItemInfo> {
|
2016-03-23 03:01:37 +00:00
|
|
|
|
let class_name = self.parse_ident()?;
|
2017-02-17 23:12:47 +00:00
|
|
|
|
|
2016-03-23 03:01:37 +00:00
|
|
|
|
let mut generics = self.parse_generics()?;
|
2012-08-16 00:10:23 +00:00
|
|
|
|
|
2015-01-02 12:02:50 +00:00
|
|
|
|
// There is a special case worth noting here, as reported in issue #17904.
|
|
|
|
|
// If we are parsing a tuple struct it is the case that the where clause
|
|
|
|
|
// should follow the field list. Like so:
|
|
|
|
|
//
|
|
|
|
|
// struct Foo<T>(T) where T: Copy;
|
|
|
|
|
//
|
|
|
|
|
// If we are parsing a normal record-style struct it is the case
|
|
|
|
|
// that the where clause comes before the body, and after the generics.
|
|
|
|
|
// So if we look ahead and see a brace or a where-clause we begin
|
|
|
|
|
// parsing a record style struct.
|
|
|
|
|
//
|
|
|
|
|
// Otherwise if we look ahead and see a paren we parse a tuple-style
|
|
|
|
|
// struct.
|
|
|
|
|
|
2019-05-11 14:41:37 +00:00
|
|
|
|
let vdata = if self.token.is_keyword(kw::Where) {
|
2016-03-23 03:01:37 +00:00
|
|
|
|
generics.where_clause = self.parse_where_clause()?;
|
2015-12-30 23:11:53 +00:00
|
|
|
|
if self.eat(&token::Semi) {
|
2015-01-04 10:35:14 +00:00
|
|
|
|
// If we see a: `struct Foo<T> where T: Copy;` style decl.
|
2015-10-10 00:28:40 +00:00
|
|
|
|
VariantData::Unit(ast::DUMMY_NODE_ID)
|
2015-01-04 10:35:14 +00:00
|
|
|
|
} else {
|
|
|
|
|
// If we see: `struct Foo<T> where T: Copy { ... }`
|
2019-03-18 03:09:53 +00:00
|
|
|
|
let (fields, recovered) = self.parse_record_struct_body()?;
|
2019-03-21 22:38:50 +00:00
|
|
|
|
VariantData::Struct(fields, recovered)
|
2015-01-04 10:35:14 +00:00
|
|
|
|
}
|
|
|
|
|
// No `where` so: `struct Foo<T>;`
|
2015-12-30 23:11:53 +00:00
|
|
|
|
} else if self.eat(&token::Semi) {
|
2015-10-10 00:28:40 +00:00
|
|
|
|
VariantData::Unit(ast::DUMMY_NODE_ID)
|
2015-01-04 10:35:14 +00:00
|
|
|
|
// Record-style struct definition
|
|
|
|
|
} else if self.token == token::OpenDelim(token::Brace) {
|
2019-03-18 03:09:53 +00:00
|
|
|
|
let (fields, recovered) = self.parse_record_struct_body()?;
|
2019-03-21 22:38:50 +00:00
|
|
|
|
VariantData::Struct(fields, recovered)
|
2015-01-04 10:35:14 +00:00
|
|
|
|
// Tuple-style struct definition with optional where-clause.
|
2015-09-07 19:15:36 +00:00
|
|
|
|
} else if self.token == token::OpenDelim(token::Paren) {
|
2016-03-23 03:01:37 +00:00
|
|
|
|
let body = VariantData::Tuple(self.parse_tuple_struct_body()?, ast::DUMMY_NODE_ID);
|
|
|
|
|
generics.where_clause = self.parse_where_clause()?;
|
|
|
|
|
self.expect(&token::Semi)?;
|
2015-11-09 15:43:32 +00:00
|
|
|
|
body
|
2015-09-07 19:15:36 +00:00
|
|
|
|
} else {
|
2018-10-28 23:05:07 +00:00
|
|
|
|
let token_str = self.this_token_descr();
|
2018-02-19 07:08:23 +00:00
|
|
|
|
let mut err = self.fatal(&format!(
|
2018-10-28 23:05:07 +00:00
|
|
|
|
"expected `where`, `{{`, `(`, or `;` after struct name, found {}",
|
2018-02-19 07:08:23 +00:00
|
|
|
|
token_str
|
|
|
|
|
));
|
|
|
|
|
err.span_label(self.span, "expected `where`, `{`, `(`, or `;` after struct name");
|
|
|
|
|
return Err(err);
|
2015-01-04 10:35:14 +00:00
|
|
|
|
};
|
2014-08-11 16:32:26 +00:00
|
|
|
|
|
2016-02-09 10:36:51 +00:00
|
|
|
|
Ok((class_name, ItemKind::Struct(vdata, generics), None))
|
2015-01-04 10:35:14 +00:00
|
|
|
|
}
|
2012-08-16 00:10:23 +00:00
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses `union Foo { ... }`.
|
2016-08-08 22:18:47 +00:00
|
|
|
|
fn parse_item_union(&mut self) -> PResult<'a, ItemInfo> {
|
|
|
|
|
let class_name = self.parse_ident()?;
|
2017-02-17 23:12:47 +00:00
|
|
|
|
|
2016-08-08 22:18:47 +00:00
|
|
|
|
let mut generics = self.parse_generics()?;
|
|
|
|
|
|
2019-05-11 14:41:37 +00:00
|
|
|
|
let vdata = if self.token.is_keyword(kw::Where) {
|
2016-08-08 22:18:47 +00:00
|
|
|
|
generics.where_clause = self.parse_where_clause()?;
|
2019-03-18 03:09:53 +00:00
|
|
|
|
let (fields, recovered) = self.parse_record_struct_body()?;
|
2019-03-21 22:38:50 +00:00
|
|
|
|
VariantData::Struct(fields, recovered)
|
2016-08-08 22:18:47 +00:00
|
|
|
|
} else if self.token == token::OpenDelim(token::Brace) {
|
2019-03-18 03:09:53 +00:00
|
|
|
|
let (fields, recovered) = self.parse_record_struct_body()?;
|
2019-03-21 22:38:50 +00:00
|
|
|
|
VariantData::Struct(fields, recovered)
|
2016-08-08 22:18:47 +00:00
|
|
|
|
} else {
|
2018-10-28 23:05:07 +00:00
|
|
|
|
let token_str = self.this_token_descr();
|
2018-02-19 07:08:23 +00:00
|
|
|
|
let mut err = self.fatal(&format!(
|
2018-10-28 23:05:07 +00:00
|
|
|
|
"expected `where` or `{{` after union name, found {}", token_str));
|
2018-02-19 07:08:23 +00:00
|
|
|
|
err.span_label(self.span, "expected `where` or `{` after union name");
|
|
|
|
|
return Err(err);
|
2016-08-08 22:18:47 +00:00
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
Ok((class_name, ItemKind::Union(vdata, generics), None))
|
|
|
|
|
}
|
|
|
|
|
|
2019-03-19 20:17:25 +00:00
|
|
|
|
fn parse_record_struct_body(
|
|
|
|
|
&mut self,
|
|
|
|
|
) -> PResult<'a, (Vec<StructField>, /* recovered */ bool)> {
|
2015-01-04 10:35:14 +00:00
|
|
|
|
let mut fields = Vec::new();
|
2019-03-18 03:09:53 +00:00
|
|
|
|
let mut recovered = false;
|
2015-12-30 23:11:53 +00:00
|
|
|
|
if self.eat(&token::OpenDelim(token::Brace)) {
|
2014-10-29 10:37:54 +00:00
|
|
|
|
while self.token != token::CloseDelim(token::Brace) {
|
2017-11-21 14:49:15 +00:00
|
|
|
|
let field = self.parse_struct_decl_field().map_err(|e| {
|
2016-10-18 04:47:58 +00:00
|
|
|
|
self.recover_stmt();
|
2019-03-18 03:09:53 +00:00
|
|
|
|
recovered = true;
|
2016-10-18 04:47:58 +00:00
|
|
|
|
e
|
2017-11-21 14:49:15 +00:00
|
|
|
|
});
|
|
|
|
|
match field {
|
|
|
|
|
Ok(field) => fields.push(field),
|
|
|
|
|
Err(mut err) => {
|
|
|
|
|
err.emit();
|
|
|
|
|
}
|
|
|
|
|
}
|
2012-05-23 22:06:11 +00:00
|
|
|
|
}
|
2017-11-21 14:49:15 +00:00
|
|
|
|
self.eat(&token::CloseDelim(token::Brace));
|
2015-01-04 10:35:14 +00:00
|
|
|
|
} else {
|
2018-10-28 23:05:07 +00:00
|
|
|
|
let token_str = self.this_token_descr();
|
2018-02-19 07:08:23 +00:00
|
|
|
|
let mut err = self.fatal(&format!(
|
2018-10-28 23:05:07 +00:00
|
|
|
|
"expected `where`, or `{{` after struct name, found {}", token_str));
|
2018-02-19 07:08:23 +00:00
|
|
|
|
err.span_label(self.span, "expected `where`, or `{` after struct name");
|
|
|
|
|
return Err(err);
|
2015-01-04 10:35:14 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-03-18 03:09:53 +00:00
|
|
|
|
Ok((fields, recovered))
|
2015-01-04 10:35:14 +00:00
|
|
|
|
}
|
|
|
|
|
|
2018-05-31 22:53:30 +00:00
|
|
|
|
fn parse_tuple_struct_body(&mut self) -> PResult<'a, Vec<StructField>> {
|
2015-01-04 10:35:14 +00:00
|
|
|
|
// This is the case where we find `struct Foo<T>(T) where T: Copy;`
|
2015-09-07 17:08:57 +00:00
|
|
|
|
// Unit like structs are handled in parse_item_struct function
|
2016-03-23 03:01:37 +00:00
|
|
|
|
let fields = self.parse_unspanned_seq(
|
2015-09-07 17:08:57 +00:00
|
|
|
|
&token::OpenDelim(token::Paren),
|
|
|
|
|
&token::CloseDelim(token::Paren),
|
2016-02-23 04:24:42 +00:00
|
|
|
|
SeqSep::trailing_allowed(token::Comma),
|
2015-09-07 17:08:57 +00:00
|
|
|
|
|p| {
|
2016-03-23 03:01:37 +00:00
|
|
|
|
let attrs = p.parse_outer_attributes()?;
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let lo = p.span;
|
2017-03-18 04:13:00 +00:00
|
|
|
|
let vis = p.parse_visibility(true)?;
|
|
|
|
|
let ty = p.parse_ty()?;
|
2016-04-06 08:19:10 +00:00
|
|
|
|
Ok(StructField {
|
2018-05-09 23:26:18 +00:00
|
|
|
|
span: lo.to(ty.span),
|
2017-08-07 05:54:09 +00:00
|
|
|
|
vis,
|
2016-04-02 13:47:53 +00:00
|
|
|
|
ident: None,
|
2015-09-07 17:08:57 +00:00
|
|
|
|
id: ast::DUMMY_NODE_ID,
|
2017-08-07 05:54:09 +00:00
|
|
|
|
ty,
|
|
|
|
|
attrs,
|
2016-04-06 08:19:10 +00:00
|
|
|
|
})
|
2016-03-23 03:01:37 +00:00
|
|
|
|
})?;
|
2015-01-04 10:35:14 +00:00
|
|
|
|
|
2015-09-07 17:08:57 +00:00
|
|
|
|
Ok(fields)
|
2012-02-01 03:30:40 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses a structure field declaration.
|
2018-05-31 22:53:30 +00:00
|
|
|
|
fn parse_single_struct_field(&mut self,
|
2017-03-15 00:22:48 +00:00
|
|
|
|
lo: Span,
|
2014-01-09 13:05:33 +00:00
|
|
|
|
vis: Visibility,
|
2014-02-28 21:09:09 +00:00
|
|
|
|
attrs: Vec<Attribute> )
|
2015-12-20 21:00:43 +00:00
|
|
|
|
-> PResult<'a, StructField> {
|
2018-04-21 13:09:34 +00:00
|
|
|
|
let mut seen_comma: bool = false;
|
2016-08-08 12:35:15 +00:00
|
|
|
|
let a_var = self.parse_name_and_ty(lo, vis, attrs)?;
|
2018-04-21 13:09:34 +00:00
|
|
|
|
if self.token == token::Comma {
|
|
|
|
|
seen_comma = true;
|
|
|
|
|
}
|
2013-12-30 23:09:41 +00:00
|
|
|
|
match self.token {
|
2014-10-27 08:22:52 +00:00
|
|
|
|
token::Comma => {
|
2015-12-30 23:11:53 +00:00
|
|
|
|
self.bump();
|
2013-02-25 03:27:43 +00:00
|
|
|
|
}
|
2014-10-29 10:37:54 +00:00
|
|
|
|
token::CloseDelim(token::Brace) => {}
|
2017-11-23 21:15:31 +00:00
|
|
|
|
token::DocComment(_) => {
|
2018-04-21 13:09:34 +00:00
|
|
|
|
let previous_span = self.prev_span;
|
2017-11-23 21:15:31 +00:00
|
|
|
|
let mut err = self.span_fatal_err(self.span, Error::UselessDocComment);
|
2017-11-24 15:34:24 +00:00
|
|
|
|
self.bump(); // consume the doc comment
|
2018-04-21 13:09:34 +00:00
|
|
|
|
let comma_after_doc_seen = self.eat(&token::Comma);
|
|
|
|
|
// `seen_comma` is always false, because we are inside doc block
|
|
|
|
|
// condition is here to make code more readable
|
|
|
|
|
if seen_comma == false && comma_after_doc_seen == true {
|
|
|
|
|
seen_comma = true;
|
|
|
|
|
}
|
|
|
|
|
if comma_after_doc_seen || self.token == token::CloseDelim(token::Brace) {
|
2017-11-23 21:15:31 +00:00
|
|
|
|
err.emit();
|
|
|
|
|
} else {
|
2018-04-21 13:09:34 +00:00
|
|
|
|
if seen_comma == false {
|
2018-08-18 10:14:09 +00:00
|
|
|
|
let sp = self.sess.source_map().next_point(previous_span);
|
2019-01-25 21:03:27 +00:00
|
|
|
|
err.span_suggestion(
|
suggestion applicabilities for libsyntax and librustc, run-rustfix tests
Consider this a down payment on #50723. To recap, an `Applicability`
enum was recently (#50204) added, to convey to Rustfix and other tools
whether we think it's OK for them to blindly apply the suggestion, or
whether to prompt a human for guidance (because the suggestion might
contain placeholders that we can't infer, or because we think it has a
sufficiently high probability of being wrong even though it's—
presumably—right often enough to be worth emitting in the first place).
When a suggestion is marked as `MaybeIncorrect`, we try to use comments
to indicate precisely why (although there are a few places where we just
say `// speculative` because the present author's subjective judgement
balked at the idea that the suggestion has no false positives).
The `run-rustfix` directive is opporunistically set on some relevant UI
tests (and a couple tests that were in the `test/ui/suggestions`
directory, even if the suggestions didn't originate in librustc or
libsyntax). This is less trivial than it sounds, because a surprising
number of test files aren't equipped to be tested as fixed even when
they contain successfully fixable errors, because, e.g., there are more,
not-directly-related errors after fixing. Some test files need an
attribute or underscore to avoid unused warnings tripping up the "fixed
code is still producing diagnostics" check despite the fixes being
correct; this is an interesting contrast-to/inconsistency-with the
behavior of UI tests (which secretly pass `-A unused`), a behavior which
we probably ought to resolve one way or the other (filed issue #50926).
A few suggestion labels are reworded (e.g., to avoid phrasing it as a
question, which which is discouraged by the style guidelines listed in
`.span_suggestion`'s doc-comment).
2018-05-19 21:52:24 +00:00
|
|
|
|
sp,
|
|
|
|
|
"missing comma here",
|
|
|
|
|
",".into(),
|
|
|
|
|
Applicability::MachineApplicable
|
|
|
|
|
);
|
2018-04-21 13:09:34 +00:00
|
|
|
|
}
|
2017-11-23 21:15:31 +00:00
|
|
|
|
return Err(err);
|
|
|
|
|
}
|
|
|
|
|
}
|
2018-05-20 04:43:11 +00:00
|
|
|
|
_ => {
|
2018-08-18 10:14:09 +00:00
|
|
|
|
let sp = self.sess.source_map().next_point(self.prev_span);
|
2018-10-28 23:05:07 +00:00
|
|
|
|
let mut err = self.struct_span_err(sp, &format!("expected `,`, or `}}`, found {}",
|
|
|
|
|
self.this_token_descr()));
|
2018-05-20 04:43:11 +00:00
|
|
|
|
if self.token.is_ident() {
|
|
|
|
|
// This is likely another field; emit the diagnostic and keep going
|
2019-01-25 21:03:27 +00:00
|
|
|
|
err.span_suggestion(
|
2018-08-26 03:47:46 +00:00
|
|
|
|
sp,
|
|
|
|
|
"try adding a comma",
|
|
|
|
|
",".into(),
|
|
|
|
|
Applicability::MachineApplicable,
|
|
|
|
|
);
|
2018-05-20 04:43:11 +00:00
|
|
|
|
err.emit();
|
|
|
|
|
} else {
|
|
|
|
|
return Err(err)
|
|
|
|
|
}
|
|
|
|
|
}
|
2012-05-23 22:06:11 +00:00
|
|
|
|
}
|
2015-03-28 21:58:51 +00:00
|
|
|
|
Ok(a_var)
|
2012-05-23 22:06:11 +00:00
|
|
|
|
}
|
2012-03-29 01:50:33 +00:00
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses an element of a struct declaration.
|
2016-02-28 15:58:47 +00:00
|
|
|
|
fn parse_struct_decl_field(&mut self) -> PResult<'a, StructField> {
|
2016-03-23 03:01:37 +00:00
|
|
|
|
let attrs = self.parse_outer_attributes()?;
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let lo = self.span;
|
2017-03-18 04:13:00 +00:00
|
|
|
|
let vis = self.parse_visibility(false)?;
|
2016-08-08 12:35:15 +00:00
|
|
|
|
self.parse_single_struct_field(lo, vis, attrs)
|
2012-02-01 03:30:40 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses `pub`, `pub(crate)` and `pub(in path)` plus shortcuts `crate` for `pub(crate)`,
|
2018-12-02 09:33:12 +00:00
|
|
|
|
/// `pub(self)` for `pub(in self)` and `pub(super)` for `pub(in super)`.
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// If the following element can't be a tuple (i.e., it's a function definition), then
|
|
|
|
|
/// it's not a tuple struct field), and the contents within the parentheses isn't valid,
|
|
|
|
|
/// so emit a proper diagnostic.
|
2017-04-02 04:21:12 +00:00
|
|
|
|
pub fn parse_visibility(&mut self, can_take_tuple: bool) -> PResult<'a, Visibility> {
|
2017-04-02 04:46:51 +00:00
|
|
|
|
maybe_whole!(self, NtVis, |x| x);
|
|
|
|
|
|
2019-05-11 14:41:37 +00:00
|
|
|
|
self.expected_tokens.push(TokenType::Keyword(kw::Crate));
|
2017-11-04 20:56:45 +00:00
|
|
|
|
if self.is_crate_vis() {
|
|
|
|
|
self.bump(); // `crate`
|
2018-01-29 05:12:09 +00:00
|
|
|
|
return Ok(respan(self.prev_span, VisibilityKind::Crate(CrateSugar::JustCrate)));
|
2017-10-19 21:43:47 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-05-11 14:41:37 +00:00
|
|
|
|
if !self.eat_keyword(kw::Pub) {
|
2018-07-01 03:22:19 +00:00
|
|
|
|
// We need a span for our `Spanned<VisibilityKind>`, but there's inherently no
|
|
|
|
|
// keyword to grab a span from for inherited visibility; an empty span at the
|
|
|
|
|
// beginning of the current token would seem to be the "Schelling span".
|
|
|
|
|
return Ok(respan(self.span.shrink_to_lo(), VisibilityKind::Inherited))
|
2017-03-07 23:50:13 +00:00
|
|
|
|
}
|
2018-01-29 05:12:09 +00:00
|
|
|
|
let lo = self.prev_span;
|
2017-03-07 23:50:13 +00:00
|
|
|
|
|
|
|
|
|
if self.check(&token::OpenDelim(token::Paren)) {
|
2017-03-18 04:13:00 +00:00
|
|
|
|
// We don't `self.bump()` the `(` yet because this might be a struct definition where
|
|
|
|
|
// `()` or a tuple might be allowed. For example, `struct Struct(pub (), pub (usize));`.
|
|
|
|
|
// Because of this, we only `bump` the `(` if we're assured it is appropriate to do so
|
|
|
|
|
// by the following tokens.
|
2019-05-11 14:41:37 +00:00
|
|
|
|
if self.look_ahead(1, |t| t.is_keyword(kw::Crate)) &&
|
2019-05-01 00:48:18 +00:00
|
|
|
|
self.look_ahead(2, |t| t != &token::ModSep) // account for `pub(crate::foo)`
|
|
|
|
|
{
|
2017-03-07 23:50:13 +00:00
|
|
|
|
// `pub(crate)`
|
|
|
|
|
self.bump(); // `(`
|
|
|
|
|
self.bump(); // `crate`
|
|
|
|
|
self.expect(&token::CloseDelim(token::Paren))?; // `)`
|
2018-01-29 05:12:09 +00:00
|
|
|
|
let vis = respan(
|
|
|
|
|
lo.to(self.prev_span),
|
|
|
|
|
VisibilityKind::Crate(CrateSugar::PubCrate),
|
|
|
|
|
);
|
2017-03-07 23:50:13 +00:00
|
|
|
|
return Ok(vis)
|
2019-05-11 14:41:37 +00:00
|
|
|
|
} else if self.look_ahead(1, |t| t.is_keyword(kw::In)) {
|
2017-03-07 23:50:13 +00:00
|
|
|
|
// `pub(in path)`
|
|
|
|
|
self.bump(); // `(`
|
|
|
|
|
self.bump(); // `in`
|
2018-03-09 23:02:39 +00:00
|
|
|
|
let path = self.parse_path(PathStyle::Mod)?; // `path`
|
2017-03-07 23:50:13 +00:00
|
|
|
|
self.expect(&token::CloseDelim(token::Paren))?; // `)`
|
2018-01-29 05:12:09 +00:00
|
|
|
|
let vis = respan(lo.to(self.prev_span), VisibilityKind::Restricted {
|
2018-01-27 07:15:00 +00:00
|
|
|
|
path: P(path),
|
|
|
|
|
id: ast::DUMMY_NODE_ID,
|
2018-01-29 05:12:09 +00:00
|
|
|
|
});
|
2017-03-07 23:50:13 +00:00
|
|
|
|
return Ok(vis)
|
|
|
|
|
} else if self.look_ahead(2, |t| t == &token::CloseDelim(token::Paren)) &&
|
2019-05-11 14:41:37 +00:00
|
|
|
|
self.look_ahead(1, |t| t.is_keyword(kw::Super) ||
|
|
|
|
|
t.is_keyword(kw::SelfLower))
|
2018-01-27 07:13:50 +00:00
|
|
|
|
{
|
2017-03-07 23:50:13 +00:00
|
|
|
|
// `pub(self)` or `pub(super)`
|
|
|
|
|
self.bump(); // `(`
|
2018-03-09 23:02:39 +00:00
|
|
|
|
let path = self.parse_path(PathStyle::Mod)?; // `super`/`self`
|
2017-03-07 23:50:13 +00:00
|
|
|
|
self.expect(&token::CloseDelim(token::Paren))?; // `)`
|
2018-01-29 05:12:09 +00:00
|
|
|
|
let vis = respan(lo.to(self.prev_span), VisibilityKind::Restricted {
|
2018-01-27 07:15:00 +00:00
|
|
|
|
path: P(path),
|
|
|
|
|
id: ast::DUMMY_NODE_ID,
|
2018-01-29 05:12:09 +00:00
|
|
|
|
});
|
2017-03-07 23:50:13 +00:00
|
|
|
|
return Ok(vis)
|
2017-03-18 04:13:00 +00:00
|
|
|
|
} else if !can_take_tuple { // Provide this diagnostic if this is not a tuple struct
|
|
|
|
|
// `pub(something) fn ...` or `struct X { pub(something) y: Z }`
|
|
|
|
|
self.bump(); // `(`
|
|
|
|
|
let msg = "incorrect visibility restriction";
|
|
|
|
|
let suggestion = r##"some possible visibility restrictions are:
|
|
|
|
|
`pub(crate)`: visible only on the current crate
|
|
|
|
|
`pub(super)`: visible only in the current module's parent
|
|
|
|
|
`pub(in path::to::module)`: visible only on the specified path"##;
|
|
|
|
|
let path = self.parse_path(PathStyle::Mod)?;
|
2019-05-01 00:48:18 +00:00
|
|
|
|
let sp = path.span;
|
2017-05-16 13:12:24 +00:00
|
|
|
|
let help_msg = format!("make this visible only to module `{}` with `in`", path);
|
2017-03-18 04:13:00 +00:00
|
|
|
|
self.expect(&token::CloseDelim(token::Paren))?; // `)`
|
2018-06-19 23:22:37 +00:00
|
|
|
|
let mut err = struct_span_err!(self.sess.span_diagnostic, sp, E0704, "{}", msg);
|
2018-06-09 23:08:47 +00:00
|
|
|
|
err.help(suggestion);
|
2019-01-25 21:03:27 +00:00
|
|
|
|
err.span_suggestion(
|
2018-06-09 23:08:47 +00:00
|
|
|
|
sp, &help_msg, format!("in {}", path), Applicability::MachineApplicable
|
suggestion applicabilities for libsyntax and librustc, run-rustfix tests
Consider this a down payment on #50723. To recap, an `Applicability`
enum was recently (#50204) added, to convey to Rustfix and other tools
whether we think it's OK for them to blindly apply the suggestion, or
whether to prompt a human for guidance (because the suggestion might
contain placeholders that we can't infer, or because we think it has a
sufficiently high probability of being wrong even though it's—
presumably—right often enough to be worth emitting in the first place).
When a suggestion is marked as `MaybeIncorrect`, we try to use comments
to indicate precisely why (although there are a few places where we just
say `// speculative` because the present author's subjective judgement
balked at the idea that the suggestion has no false positives).
The `run-rustfix` directive is opporunistically set on some relevant UI
tests (and a couple tests that were in the `test/ui/suggestions`
directory, even if the suggestions didn't originate in librustc or
libsyntax). This is less trivial than it sounds, because a surprising
number of test files aren't equipped to be tested as fixed even when
they contain successfully fixable errors, because, e.g., there are more,
not-directly-related errors after fixing. Some test files need an
attribute or underscore to avoid unused warnings tripping up the "fixed
code is still producing diagnostics" check despite the fixes being
correct; this is an interesting contrast-to/inconsistency-with the
behavior of UI tests (which secretly pass `-A unused`), a behavior which
we probably ought to resolve one way or the other (filed issue #50926).
A few suggestion labels are reworded (e.g., to avoid phrasing it as a
question, which which is discouraged by the style guidelines listed in
`.span_suggestion`'s doc-comment).
2018-05-19 21:52:24 +00:00
|
|
|
|
);
|
2017-03-18 04:13:00 +00:00
|
|
|
|
err.emit(); // emit diagnostic, but continue with public visibility
|
2016-04-23 05:40:55 +00:00
|
|
|
|
}
|
2016-04-11 00:39:35 +00:00
|
|
|
|
}
|
2017-03-07 23:50:13 +00:00
|
|
|
|
|
2018-01-29 05:12:09 +00:00
|
|
|
|
Ok(respan(lo, VisibilityKind::Public))
|
2012-02-23 05:47:23 +00:00
|
|
|
|
}
|
2013-03-22 19:56:10 +00:00
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses defaultness (i.e., `default` or nothing).
|
2017-12-02 19:15:03 +00:00
|
|
|
|
fn parse_defaultness(&mut self) -> Defaultness {
|
|
|
|
|
// `pub` is included for better error messages
|
2019-05-11 14:41:37 +00:00
|
|
|
|
if self.check_keyword(kw::Default) &&
|
|
|
|
|
self.look_ahead(1, |t| t.is_keyword(kw::Impl) ||
|
|
|
|
|
t.is_keyword(kw::Const) ||
|
|
|
|
|
t.is_keyword(kw::Fn) ||
|
|
|
|
|
t.is_keyword(kw::Unsafe) ||
|
|
|
|
|
t.is_keyword(kw::Extern) ||
|
|
|
|
|
t.is_keyword(kw::Type) ||
|
|
|
|
|
t.is_keyword(kw::Pub)) {
|
2017-12-02 19:15:03 +00:00
|
|
|
|
self.bump(); // `default`
|
|
|
|
|
Defaultness::Default
|
2015-12-18 22:38:28 +00:00
|
|
|
|
} else {
|
2017-12-02 19:15:03 +00:00
|
|
|
|
Defaultness::Final
|
2015-12-18 22:38:28 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Given a termination token, parses all of the items in a module.
|
2017-03-15 00:22:48 +00:00
|
|
|
|
fn parse_mod_items(&mut self, term: &token::Token, inner_lo: Span) -> PResult<'a, Mod> {
|
2014-12-23 13:07:30 +00:00
|
|
|
|
let mut items = vec![];
|
2016-03-23 03:01:37 +00:00
|
|
|
|
while let Some(item) = self.parse_item()? {
|
2015-03-13 09:34:51 +00:00
|
|
|
|
items.push(item);
|
2019-01-14 01:29:32 +00:00
|
|
|
|
self.maybe_consume_incorrect_semicolon(&items);
|
2012-05-23 22:06:11 +00:00
|
|
|
|
}
|
2010-09-23 20:15:51 +00:00
|
|
|
|
|
2015-12-30 23:11:53 +00:00
|
|
|
|
if !self.eat(term) {
|
2018-10-28 23:05:07 +00:00
|
|
|
|
let token_str = self.this_token_descr();
|
2019-01-14 01:29:32 +00:00
|
|
|
|
if !self.maybe_consume_incorrect_semicolon(&items) {
|
|
|
|
|
let mut err = self.fatal(&format!("expected item, found {}", token_str));
|
2018-02-19 07:08:23 +00:00
|
|
|
|
err.span_label(self.span, "expected item");
|
2019-01-14 01:29:32 +00:00
|
|
|
|
return Err(err);
|
2017-11-25 15:38:30 +00:00
|
|
|
|
}
|
2012-05-23 22:06:11 +00:00
|
|
|
|
}
|
2011-02-04 16:10:04 +00:00
|
|
|
|
|
2018-06-24 22:00:21 +00:00
|
|
|
|
let hi = if self.span.is_dummy() {
|
2015-07-06 02:13:19 +00:00
|
|
|
|
inner_lo
|
|
|
|
|
} else {
|
2017-03-15 00:22:48 +00:00
|
|
|
|
self.prev_span
|
2015-07-06 02:13:19 +00:00
|
|
|
|
};
|
|
|
|
|
|
2015-03-28 21:58:51 +00:00
|
|
|
|
Ok(ast::Mod {
|
2017-03-15 00:22:48 +00:00
|
|
|
|
inner: inner_lo.to(hi),
|
2017-08-07 05:54:09 +00:00
|
|
|
|
items,
|
2018-07-11 13:19:32 +00:00
|
|
|
|
inline: true
|
2015-03-28 21:58:51 +00:00
|
|
|
|
})
|
2012-05-23 22:06:11 +00:00
|
|
|
|
}
|
|
|
|
|
|
2015-12-20 21:00:43 +00:00
|
|
|
|
fn parse_item_const(&mut self, m: Option<Mutability>) -> PResult<'a, ItemInfo> {
|
2018-11-30 19:34:24 +00:00
|
|
|
|
let id = if m.is_none() { self.parse_ident_or_underscore() } else { self.parse_ident() }?;
|
2016-03-23 03:01:37 +00:00
|
|
|
|
self.expect(&token::Colon)?;
|
2017-01-16 23:13:41 +00:00
|
|
|
|
let ty = self.parse_ty()?;
|
2016-03-23 03:01:37 +00:00
|
|
|
|
self.expect(&token::Eq)?;
|
|
|
|
|
let e = self.parse_expr()?;
|
2016-07-01 23:40:45 +00:00
|
|
|
|
self.expect(&token::Semi)?;
|
rustc: Add `const` globals to the language
This change is an implementation of [RFC 69][rfc] which adds a third kind of
global to the language, `const`. This global is most similar to what the old
`static` was, and if you're unsure about what to use then you should use a
`const`.
The semantics of these three kinds of globals are:
* A `const` does not represent a memory location, but only a value. Constants
are translated as rvalues, which means that their values are directly inlined
at usage location (similar to a #define in C/C++). Constant values are, well,
constant, and can not be modified. Any "modification" is actually a
modification to a local value on the stack rather than the actual constant
itself.
Almost all values are allowed inside constants, whether they have interior
mutability or not. There are a few minor restrictions listed in the RFC, but
they should in general not come up too often.
* A `static` now always represents a memory location (unconditionally). Any
references to the same `static` are actually a reference to the same memory
location. Only values whose types ascribe to `Sync` are allowed in a `static`.
This restriction is in place because many threads may access a `static`
concurrently. Lifting this restriction (and allowing unsafe access) is a
future extension not implemented at this time.
* A `static mut` continues to always represent a memory location. All references
to a `static mut` continue to be `unsafe`.
This is a large breaking change, and many programs will need to be updated
accordingly. A summary of the breaking changes is:
* Statics may no longer be used in patterns. Statics now always represent a
memory location, which can sometimes be modified. To fix code, repurpose the
matched-on-`static` to a `const`.
static FOO: uint = 4;
match n {
FOO => { /* ... */ }
_ => { /* ... */ }
}
change this code to:
const FOO: uint = 4;
match n {
FOO => { /* ... */ }
_ => { /* ... */ }
}
* Statics may no longer refer to other statics by value. Due to statics being
able to change at runtime, allowing them to reference one another could
possibly lead to confusing semantics. If you are in this situation, use a
constant initializer instead. Note, however, that statics may reference other
statics by address, however.
* Statics may no longer be used in constant expressions, such as array lengths.
This is due to the same restrictions as listed above. Use a `const` instead.
[breaking-change]
[rfc]: https://github.com/rust-lang/rfcs/pull/246
2014-10-06 15:17:01 +00:00
|
|
|
|
let item = match m {
|
2016-02-09 10:36:51 +00:00
|
|
|
|
Some(m) => ItemKind::Static(ty, m, e),
|
|
|
|
|
None => ItemKind::Const(ty, e),
|
rustc: Add `const` globals to the language
This change is an implementation of [RFC 69][rfc] which adds a third kind of
global to the language, `const`. This global is most similar to what the old
`static` was, and if you're unsure about what to use then you should use a
`const`.
The semantics of these three kinds of globals are:
* A `const` does not represent a memory location, but only a value. Constants
are translated as rvalues, which means that their values are directly inlined
at usage location (similar to a #define in C/C++). Constant values are, well,
constant, and can not be modified. Any "modification" is actually a
modification to a local value on the stack rather than the actual constant
itself.
Almost all values are allowed inside constants, whether they have interior
mutability or not. There are a few minor restrictions listed in the RFC, but
they should in general not come up too often.
* A `static` now always represents a memory location (unconditionally). Any
references to the same `static` are actually a reference to the same memory
location. Only values whose types ascribe to `Sync` are allowed in a `static`.
This restriction is in place because many threads may access a `static`
concurrently. Lifting this restriction (and allowing unsafe access) is a
future extension not implemented at this time.
* A `static mut` continues to always represent a memory location. All references
to a `static mut` continue to be `unsafe`.
This is a large breaking change, and many programs will need to be updated
accordingly. A summary of the breaking changes is:
* Statics may no longer be used in patterns. Statics now always represent a
memory location, which can sometimes be modified. To fix code, repurpose the
matched-on-`static` to a `const`.
static FOO: uint = 4;
match n {
FOO => { /* ... */ }
_ => { /* ... */ }
}
change this code to:
const FOO: uint = 4;
match n {
FOO => { /* ... */ }
_ => { /* ... */ }
}
* Statics may no longer refer to other statics by value. Due to statics being
able to change at runtime, allowing them to reference one another could
possibly lead to confusing semantics. If you are in this situation, use a
constant initializer instead. Note, however, that statics may reference other
statics by address, however.
* Statics may no longer be used in constant expressions, such as array lengths.
This is due to the same restrictions as listed above. Use a `const` instead.
[breaking-change]
[rfc]: https://github.com/rust-lang/rfcs/pull/246
2014-10-06 15:17:01 +00:00
|
|
|
|
};
|
2015-03-28 21:58:51 +00:00
|
|
|
|
Ok((id, item, None))
|
2012-05-23 22:06:11 +00:00
|
|
|
|
}
|
|
|
|
|
|
2014-06-09 20:12:30 +00:00
|
|
|
|
/// Parse a `mod <foo> { ... }` or `mod <foo>;` item
|
2015-12-20 21:00:43 +00:00
|
|
|
|
fn parse_item_mod(&mut self, outer_attrs: &[Attribute]) -> PResult<'a, ItemInfo> {
|
2016-09-14 22:36:42 +00:00
|
|
|
|
let (in_cfg, outer_attrs) = {
|
2019-02-06 17:33:01 +00:00
|
|
|
|
let mut strip_unconfigured = crate::config::StripUnconfigured {
|
2016-09-14 22:36:42 +00:00
|
|
|
|
sess: self.sess,
|
|
|
|
|
features: None, // don't perform gated feature checking
|
|
|
|
|
};
|
Overhaul `syntax::fold::Folder`.
This commit changes `syntax::fold::Folder` from a functional style
(where most methods take a `T` and produce a new `T`) to a more
imperative style (where most methods take and modify a `&mut T`), and
renames it `syntax::mut_visit::MutVisitor`.
The first benefit is speed. The functional style does not require any
reallocations, due to the use of `P::map` and
`MoveMap::move_{,flat_}map`. However, every field in the AST must be
overwritten; even those fields that are unchanged are overwritten with
the same value. This causes a lot of unnecessary memory writes. The
imperative style reduces instruction counts by 1--3% across a wide range
of workloads, particularly incremental workloads.
The second benefit is conciseness; the imperative style is usually more
concise. E.g. compare the old functional style:
```
fn fold_abc(&mut self, abc: ABC) {
ABC {
a: fold_a(abc.a),
b: fold_b(abc.b),
c: abc.c,
}
}
```
with the imperative style:
```
fn visit_abc(&mut self, ABC { a, b, c: _ }: &mut ABC) {
visit_a(a);
visit_b(b);
}
```
(The reductions get larger in more complex examples.)
Overall, the patch removes over 200 lines of code -- even though the new
code has more comments -- and a lot of the remaining lines have fewer
characters.
Some notes:
- The old style used methods called `fold_*`. The new style mostly uses
methods called `visit_*`, but there are a few methods that map a `T`
to something other than a `T`, which are called `flat_map_*` (`T` maps
to multiple `T`s) or `filter_map_*` (`T` maps to 0 or 1 `T`s).
- `move_map.rs`/`MoveMap`/`move_map`/`move_flat_map` are renamed
`map_in_place.rs`/`MapInPlace`/`map_in_place`/`flat_map_in_place` to
reflect their slightly changed signatures.
- Although this commit renames the `fold` module as `mut_visit`, it
keeps it in the `fold.rs` file, so as not to confuse git. The next
commit will rename the file.
2019-02-05 04:20:55 +00:00
|
|
|
|
let mut outer_attrs = outer_attrs.to_owned();
|
|
|
|
|
strip_unconfigured.process_cfg_attrs(&mut outer_attrs);
|
2017-01-18 00:13:36 +00:00
|
|
|
|
(!self.cfg_mods || strip_unconfigured.in_cfg(&outer_attrs), outer_attrs)
|
2016-09-14 22:36:42 +00:00
|
|
|
|
};
|
2016-06-29 09:28:50 +00:00
|
|
|
|
|
2013-12-30 23:17:53 +00:00
|
|
|
|
let id_span = self.span;
|
2016-03-23 03:01:37 +00:00
|
|
|
|
let id = self.parse_ident()?;
|
2018-09-02 06:13:29 +00:00
|
|
|
|
if self.eat(&token::Semi) {
|
2017-05-17 22:37:24 +00:00
|
|
|
|
if in_cfg && self.recurse_into_file_modules {
|
2016-09-14 22:36:42 +00:00
|
|
|
|
// This mod is in an external file. Let's go get it!
|
2016-11-14 09:31:03 +00:00
|
|
|
|
let ModulePathSuccess { path, directory_ownership, warn } =
|
2016-11-05 04:16:26 +00:00
|
|
|
|
self.submod_path(id, &outer_attrs, id_span)?;
|
2016-11-14 09:31:03 +00:00
|
|
|
|
let (module, mut attrs) =
|
2016-11-05 04:16:26 +00:00
|
|
|
|
self.eval_src_mod(path, directory_ownership, id.to_string(), id_span)?;
|
2018-07-11 13:19:32 +00:00
|
|
|
|
// Record that we fetched the mod from an external file
|
2016-11-14 09:31:03 +00:00
|
|
|
|
if warn {
|
2017-12-02 19:15:03 +00:00
|
|
|
|
let attr = Attribute {
|
2016-11-14 09:31:03 +00:00
|
|
|
|
id: attr::mk_attr_id(),
|
|
|
|
|
style: ast::AttrStyle::Outer,
|
2019-05-17 08:37:53 +00:00
|
|
|
|
path: ast::Path::from_ident(
|
|
|
|
|
Ident::with_empty_ctxt(sym::warn_directory_ownership)),
|
2017-03-03 09:23:59 +00:00
|
|
|
|
tokens: TokenStream::empty(),
|
2016-11-14 09:31:03 +00:00
|
|
|
|
is_sugared_doc: false,
|
2019-05-22 00:47:23 +00:00
|
|
|
|
span: DUMMY_SP,
|
2016-11-14 09:31:03 +00:00
|
|
|
|
};
|
|
|
|
|
attr::mark_known(&attr);
|
|
|
|
|
attrs.push(attr);
|
|
|
|
|
}
|
2018-07-11 13:19:32 +00:00
|
|
|
|
Ok((id, ItemKind::Mod(module), Some(attrs)))
|
2016-09-14 22:36:42 +00:00
|
|
|
|
} else {
|
2018-07-11 13:19:32 +00:00
|
|
|
|
let placeholder = ast::Mod {
|
2019-05-22 00:47:23 +00:00
|
|
|
|
inner: DUMMY_SP,
|
2018-07-11 13:19:32 +00:00
|
|
|
|
items: Vec::new(),
|
|
|
|
|
inline: false
|
|
|
|
|
};
|
2016-09-14 22:36:42 +00:00
|
|
|
|
Ok((id, ItemKind::Mod(placeholder), None))
|
|
|
|
|
}
|
2012-11-10 00:31:44 +00:00
|
|
|
|
} else {
|
2016-11-05 04:16:26 +00:00
|
|
|
|
let old_directory = self.directory.clone();
|
|
|
|
|
self.push_directory(id, &outer_attrs);
|
2017-05-17 22:37:24 +00:00
|
|
|
|
|
2016-03-23 03:01:37 +00:00
|
|
|
|
self.expect(&token::OpenDelim(token::Brace))?;
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let mod_inner_lo = self.span;
|
2016-03-23 03:01:37 +00:00
|
|
|
|
let attrs = self.parse_inner_attributes()?;
|
2016-11-05 04:16:26 +00:00
|
|
|
|
let module = self.parse_mod_items(&token::CloseDelim(token::Brace), mod_inner_lo)?;
|
2017-05-17 22:37:24 +00:00
|
|
|
|
|
2016-11-05 04:16:26 +00:00
|
|
|
|
self.directory = old_directory;
|
|
|
|
|
Ok((id, ItemKind::Mod(module), Some(attrs)))
|
2012-11-10 00:31:44 +00:00
|
|
|
|
}
|
2012-05-23 22:06:11 +00:00
|
|
|
|
}
|
|
|
|
|
|
2016-11-05 04:16:26 +00:00
|
|
|
|
fn push_directory(&mut self, id: Ident, attrs: &[Attribute]) {
|
2019-05-08 03:21:18 +00:00
|
|
|
|
if let Some(path) = attr::first_attr_value_str_by_name(attrs, sym::path) {
|
2018-05-18 06:19:35 +00:00
|
|
|
|
self.directory.path.to_mut().push(&path.as_str());
|
2017-11-28 02:14:24 +00:00
|
|
|
|
self.directory.ownership = DirectoryOwnership::Owned { relative: None };
|
2016-09-27 21:14:45 +00:00
|
|
|
|
} else {
|
2018-10-19 00:11:51 +00:00
|
|
|
|
// We have to push on the current module name in the case of relative
|
|
|
|
|
// paths in order to ensure that any additional module paths from inline
|
|
|
|
|
// `mod x { ... }` come after the relative extension.
|
|
|
|
|
//
|
|
|
|
|
// For example, a `mod z { ... }` inside `x/y.rs` should set the current
|
|
|
|
|
// directory path to `/x/y/z`, not `/x/z` with a relative offset of `y`.
|
|
|
|
|
if let DirectoryOwnership::Owned { relative } = &mut self.directory.ownership {
|
|
|
|
|
if let Some(ident) = relative.take() { // remove the relative offset
|
|
|
|
|
self.directory.path.to_mut().push(ident.as_str());
|
|
|
|
|
}
|
|
|
|
|
}
|
2018-05-26 12:12:38 +00:00
|
|
|
|
self.directory.path.to_mut().push(&id.as_str());
|
2016-09-27 21:14:45 +00:00
|
|
|
|
}
|
2012-12-11 20:20:27 +00:00
|
|
|
|
}
|
|
|
|
|
|
2017-12-02 19:15:03 +00:00
|
|
|
|
pub fn submod_path_from_attr(attrs: &[Attribute], dir_path: &Path) -> Option<PathBuf> {
|
2019-05-08 03:21:18 +00:00
|
|
|
|
if let Some(s) = attr::first_attr_value_str_by_name(attrs, sym::path) {
|
2018-06-01 16:56:33 +00:00
|
|
|
|
let s = s.as_str();
|
2018-06-06 20:20:47 +00:00
|
|
|
|
|
|
|
|
|
// On windows, the base path might have the form
|
|
|
|
|
// `\\?\foo\bar` in which case it does not tolerate
|
|
|
|
|
// mixed `/` and `\` separators, so canonicalize
|
|
|
|
|
// `/` to `\`.
|
2018-06-01 16:56:33 +00:00
|
|
|
|
#[cfg(windows)]
|
|
|
|
|
let s = s.replace("/", "\\");
|
|
|
|
|
Some(dir_path.join(s))
|
|
|
|
|
} else {
|
|
|
|
|
None
|
|
|
|
|
}
|
2015-07-03 00:50:18 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Returns a path to a module.
|
2017-11-28 02:14:24 +00:00
|
|
|
|
pub fn default_submod_path(
|
|
|
|
|
id: ast::Ident,
|
|
|
|
|
relative: Option<ast::Ident>,
|
|
|
|
|
dir_path: &Path,
|
2018-08-18 10:14:14 +00:00
|
|
|
|
source_map: &SourceMap) -> ModulePath
|
2017-11-28 02:14:24 +00:00
|
|
|
|
{
|
|
|
|
|
// If we're in a foo.rs file instead of a mod.rs file,
|
|
|
|
|
// we need to look for submodules in
|
|
|
|
|
// `./foo/<id>.rs` and `./foo/<id>/mod.rs` rather than
|
|
|
|
|
// `./<id>.rs` and `./<id>/mod.rs`.
|
|
|
|
|
let relative_prefix_string;
|
|
|
|
|
let relative_prefix = if let Some(ident) = relative {
|
2018-05-26 12:12:38 +00:00
|
|
|
|
relative_prefix_string = format!("{}{}", ident.as_str(), path::MAIN_SEPARATOR);
|
2017-11-28 02:14:24 +00:00
|
|
|
|
&relative_prefix_string
|
|
|
|
|
} else {
|
|
|
|
|
""
|
|
|
|
|
};
|
|
|
|
|
|
2015-07-28 16:07:20 +00:00
|
|
|
|
let mod_name = id.to_string();
|
2017-11-28 02:14:24 +00:00
|
|
|
|
let default_path_str = format!("{}{}.rs", relative_prefix, mod_name);
|
|
|
|
|
let secondary_path_str = format!("{}{}{}mod.rs",
|
|
|
|
|
relative_prefix, mod_name, path::MAIN_SEPARATOR);
|
2015-07-03 00:50:18 +00:00
|
|
|
|
let default_path = dir_path.join(&default_path_str);
|
|
|
|
|
let secondary_path = dir_path.join(&secondary_path_str);
|
2018-08-18 10:14:14 +00:00
|
|
|
|
let default_exists = source_map.file_exists(&default_path);
|
|
|
|
|
let secondary_exists = source_map.file_exists(&secondary_path);
|
2015-07-03 00:50:18 +00:00
|
|
|
|
|
|
|
|
|
let result = match (default_exists, secondary_exists) {
|
2016-11-05 04:16:26 +00:00
|
|
|
|
(true, false) => Ok(ModulePathSuccess {
|
|
|
|
|
path: default_path,
|
2017-11-28 02:14:24 +00:00
|
|
|
|
directory_ownership: DirectoryOwnership::Owned {
|
|
|
|
|
relative: Some(id),
|
|
|
|
|
},
|
2016-11-14 09:31:03 +00:00
|
|
|
|
warn: false,
|
2016-11-05 04:16:26 +00:00
|
|
|
|
}),
|
|
|
|
|
(false, true) => Ok(ModulePathSuccess {
|
|
|
|
|
path: secondary_path,
|
2017-11-28 02:14:24 +00:00
|
|
|
|
directory_ownership: DirectoryOwnership::Owned {
|
|
|
|
|
relative: None,
|
|
|
|
|
},
|
2016-11-14 09:31:03 +00:00
|
|
|
|
warn: false,
|
2016-11-05 04:16:26 +00:00
|
|
|
|
}),
|
2017-02-12 14:18:41 +00:00
|
|
|
|
(false, false) => Err(Error::FileNotFoundForModule {
|
2017-02-12 13:33:17 +00:00
|
|
|
|
mod_name: mod_name.clone(),
|
|
|
|
|
default_path: default_path_str,
|
|
|
|
|
secondary_path: secondary_path_str,
|
2018-07-27 09:11:18 +00:00
|
|
|
|
dir_path: dir_path.display().to_string(),
|
2015-07-03 00:50:18 +00:00
|
|
|
|
}),
|
2017-02-12 14:18:41 +00:00
|
|
|
|
(true, true) => Err(Error::DuplicatePaths {
|
2017-02-12 13:33:17 +00:00
|
|
|
|
mod_name: mod_name.clone(),
|
|
|
|
|
default_path: default_path_str,
|
|
|
|
|
secondary_path: secondary_path_str,
|
2015-07-03 00:50:18 +00:00
|
|
|
|
}),
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
ModulePath {
|
|
|
|
|
name: mod_name,
|
|
|
|
|
path_exists: default_exists || secondary_exists,
|
2017-08-07 05:54:09 +00:00
|
|
|
|
result,
|
2015-07-03 00:50:18 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn submod_path(&mut self,
|
|
|
|
|
id: ast::Ident,
|
2017-12-02 19:15:03 +00:00
|
|
|
|
outer_attrs: &[Attribute],
|
2017-05-17 22:37:24 +00:00
|
|
|
|
id_sp: Span)
|
|
|
|
|
-> PResult<'a, ModulePathSuccess> {
|
2016-11-05 04:16:26 +00:00
|
|
|
|
if let Some(path) = Parser::submod_path_from_attr(outer_attrs, &self.directory.path) {
|
|
|
|
|
return Ok(ModulePathSuccess {
|
|
|
|
|
directory_ownership: match path.file_name().and_then(|s| s.to_str()) {
|
2018-01-09 18:54:13 +00:00
|
|
|
|
// All `#[path]` files are treated as though they are a `mod.rs` file.
|
|
|
|
|
// This means that `mod foo;` declarations inside `#[path]`-included
|
|
|
|
|
// files are siblings,
|
|
|
|
|
//
|
|
|
|
|
// Note that this will produce weirdness when a file named `foo.rs` is
|
|
|
|
|
// `#[path]` included and contains a `mod foo;` declaration.
|
|
|
|
|
// If you encounter this, it's your own darn fault :P
|
|
|
|
|
Some(_) => DirectoryOwnership::Owned { relative: None },
|
2016-11-14 09:31:03 +00:00
|
|
|
|
_ => DirectoryOwnership::UnownedViaMod(true),
|
2016-11-05 04:16:26 +00:00
|
|
|
|
},
|
2017-08-07 05:54:09 +00:00
|
|
|
|
path,
|
2016-11-14 09:31:03 +00:00
|
|
|
|
warn: false,
|
2016-11-05 04:16:26 +00:00
|
|
|
|
});
|
2015-07-03 00:50:18 +00:00
|
|
|
|
}
|
|
|
|
|
|
2017-11-28 02:14:24 +00:00
|
|
|
|
let relative = match self.directory.ownership {
|
2018-10-16 01:43:57 +00:00
|
|
|
|
DirectoryOwnership::Owned { relative } => relative,
|
2017-11-28 02:14:24 +00:00
|
|
|
|
DirectoryOwnership::UnownedViaBlock |
|
|
|
|
|
DirectoryOwnership::UnownedViaMod(_) => None,
|
|
|
|
|
};
|
|
|
|
|
let paths = Parser::default_submod_path(
|
2018-08-18 10:14:09 +00:00
|
|
|
|
id, relative, &self.directory.path, self.sess.source_map());
|
2015-07-03 00:50:18 +00:00
|
|
|
|
|
2017-11-28 02:14:24 +00:00
|
|
|
|
match self.directory.ownership {
|
|
|
|
|
DirectoryOwnership::Owned { .. } => {
|
|
|
|
|
paths.result.map_err(|err| self.span_fatal_err(id_sp, err))
|
|
|
|
|
},
|
|
|
|
|
DirectoryOwnership::UnownedViaBlock => {
|
|
|
|
|
let msg =
|
|
|
|
|
"Cannot declare a non-inline module inside a block \
|
|
|
|
|
unless it has a path attribute";
|
|
|
|
|
let mut err = self.diagnostic().struct_span_err(id_sp, msg);
|
|
|
|
|
if paths.path_exists {
|
|
|
|
|
let msg = format!("Maybe `use` the module `{}` instead of redeclaring it",
|
|
|
|
|
paths.name);
|
|
|
|
|
err.span_note(id_sp, &msg);
|
2016-11-14 09:31:03 +00:00
|
|
|
|
}
|
2017-11-28 02:14:24 +00:00
|
|
|
|
Err(err)
|
2016-11-14 09:31:03 +00:00
|
|
|
|
}
|
2017-11-28 02:14:24 +00:00
|
|
|
|
DirectoryOwnership::UnownedViaMod(warn) => {
|
|
|
|
|
if warn {
|
|
|
|
|
if let Ok(result) = paths.result {
|
|
|
|
|
return Ok(ModulePathSuccess { warn: true, ..result });
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
let mut err = self.diagnostic().struct_span_err(id_sp,
|
|
|
|
|
"cannot declare a new module at this location");
|
2018-06-24 22:00:21 +00:00
|
|
|
|
if !id_sp.is_dummy() {
|
2018-08-18 10:14:09 +00:00
|
|
|
|
let src_path = self.sess.source_map().span_to_filename(id_sp);
|
2017-11-28 02:14:24 +00:00
|
|
|
|
if let FileName::Real(src_path) = src_path {
|
|
|
|
|
if let Some(stem) = src_path.file_stem() {
|
|
|
|
|
let mut dest_path = src_path.clone();
|
|
|
|
|
dest_path.set_file_name(stem);
|
|
|
|
|
dest_path.push("mod.rs");
|
|
|
|
|
err.span_note(id_sp,
|
2017-12-14 07:09:19 +00:00
|
|
|
|
&format!("maybe move this module `{}` to its own \
|
|
|
|
|
directory via `{}`", src_path.display(),
|
|
|
|
|
dest_path.display()));
|
2017-11-28 02:14:24 +00:00
|
|
|
|
}
|
2017-12-14 07:09:19 +00:00
|
|
|
|
}
|
2017-01-03 11:19:13 +00:00
|
|
|
|
}
|
2017-11-28 02:14:24 +00:00
|
|
|
|
if paths.path_exists {
|
|
|
|
|
err.span_note(id_sp,
|
|
|
|
|
&format!("... or maybe `use` the module `{}` instead \
|
|
|
|
|
of possibly redeclaring it",
|
|
|
|
|
paths.name));
|
|
|
|
|
}
|
|
|
|
|
Err(err)
|
2016-12-09 00:12:38 +00:00
|
|
|
|
}
|
2015-07-03 00:50:18 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2012-11-19 01:56:50 +00:00
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Reads a module from a source file.
|
2015-07-03 00:50:18 +00:00
|
|
|
|
fn eval_src_mod(&mut self,
|
2016-11-05 04:16:26 +00:00
|
|
|
|
path: PathBuf,
|
|
|
|
|
directory_ownership: DirectoryOwnership,
|
|
|
|
|
name: String,
|
2015-07-03 00:50:18 +00:00
|
|
|
|
id_sp: Span)
|
2018-07-11 13:19:32 +00:00
|
|
|
|
-> PResult<'a, (ast::Mod, Vec<Attribute> )> {
|
2014-03-20 22:05:37 +00:00
|
|
|
|
let mut included_mod_stack = self.sess.included_mod_stack.borrow_mut();
|
2016-06-14 05:43:30 +00:00
|
|
|
|
if let Some(i) = included_mod_stack.iter().position(|p| *p == path) {
|
|
|
|
|
let mut err = String::from("circular modules: ");
|
|
|
|
|
let len = included_mod_stack.len();
|
|
|
|
|
for p in &included_mod_stack[i.. len] {
|
|
|
|
|
err.push_str(&p.to_string_lossy());
|
|
|
|
|
err.push_str(" -> ");
|
2013-07-04 17:51:11 +00:00
|
|
|
|
}
|
2016-06-14 05:43:30 +00:00
|
|
|
|
err.push_str(&path.to_string_lossy());
|
|
|
|
|
return Err(self.span_fatal(id_sp, &err[..]));
|
2013-07-04 17:51:11 +00:00
|
|
|
|
}
|
2014-03-20 22:05:37 +00:00
|
|
|
|
included_mod_stack.push(path.clone());
|
|
|
|
|
drop(included_mod_stack);
|
2013-07-04 17:51:11 +00:00
|
|
|
|
|
2016-11-05 04:16:26 +00:00
|
|
|
|
let mut p0 =
|
|
|
|
|
new_sub_parser_from_file(self.sess, &path, directory_ownership, Some(name), id_sp);
|
2017-01-18 00:13:36 +00:00
|
|
|
|
p0.cfg_mods = self.cfg_mods;
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let mod_inner_lo = p0.span;
|
2016-03-23 03:01:37 +00:00
|
|
|
|
let mod_attrs = p0.parse_inner_attributes()?;
|
2018-07-12 21:35:40 +00:00
|
|
|
|
let mut m0 = p0.parse_mod_items(&token::Eof, mod_inner_lo)?;
|
|
|
|
|
m0.inline = false;
|
2014-03-20 22:05:37 +00:00
|
|
|
|
self.sess.included_mod_stack.borrow_mut().pop();
|
2018-07-11 13:19:32 +00:00
|
|
|
|
Ok((m0, mod_attrs))
|
2012-05-23 22:06:11 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses a function declaration from a foreign module.
|
2017-03-15 00:22:48 +00:00
|
|
|
|
fn parse_item_foreign_fn(&mut self, vis: ast::Visibility, lo: Span, attrs: Vec<Attribute>)
|
|
|
|
|
-> PResult<'a, ForeignItem> {
|
2019-05-11 14:41:37 +00:00
|
|
|
|
self.expect_keyword(kw::Fn)?;
|
2013-08-02 21:30:00 +00:00
|
|
|
|
|
2016-03-23 03:01:37 +00:00
|
|
|
|
let (ident, mut generics) = self.parse_fn_header()?;
|
|
|
|
|
let decl = self.parse_fn_decl(true)?;
|
|
|
|
|
generics.where_clause = self.parse_where_clause()?;
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let hi = self.span;
|
2016-03-23 03:01:37 +00:00
|
|
|
|
self.expect(&token::Semi)?;
|
2016-02-11 20:33:09 +00:00
|
|
|
|
Ok(ast::ForeignItem {
|
2017-08-07 05:54:09 +00:00
|
|
|
|
ident,
|
|
|
|
|
attrs,
|
2016-02-09 10:31:19 +00:00
|
|
|
|
node: ForeignItemKind::Fn(decl, generics),
|
2014-09-13 16:06:01 +00:00
|
|
|
|
id: ast::DUMMY_NODE_ID,
|
2017-03-15 00:22:48 +00:00
|
|
|
|
span: lo.to(hi),
|
2017-08-07 05:54:09 +00:00
|
|
|
|
vis,
|
2016-02-11 20:33:09 +00:00
|
|
|
|
})
|
2012-05-23 22:06:11 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses a static item from a foreign module.
|
2017-08-09 22:43:06 +00:00
|
|
|
|
/// Assumes that the `static` keyword is already parsed.
|
2017-03-15 00:22:48 +00:00
|
|
|
|
fn parse_item_foreign_static(&mut self, vis: ast::Visibility, lo: Span, attrs: Vec<Attribute>)
|
|
|
|
|
-> PResult<'a, ForeignItem> {
|
2019-04-21 12:29:58 +00:00
|
|
|
|
let mutbl = self.parse_mutability();
|
2016-03-23 03:01:37 +00:00
|
|
|
|
let ident = self.parse_ident()?;
|
|
|
|
|
self.expect(&token::Colon)?;
|
2017-01-16 23:13:41 +00:00
|
|
|
|
let ty = self.parse_ty()?;
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let hi = self.span;
|
2016-03-23 03:01:37 +00:00
|
|
|
|
self.expect(&token::Semi)?;
|
2016-02-11 20:33:09 +00:00
|
|
|
|
Ok(ForeignItem {
|
2017-08-07 05:54:09 +00:00
|
|
|
|
ident,
|
|
|
|
|
attrs,
|
2016-02-09 10:31:19 +00:00
|
|
|
|
node: ForeignItemKind::Static(ty, mutbl),
|
2014-05-16 17:45:16 +00:00
|
|
|
|
id: ast::DUMMY_NODE_ID,
|
2017-03-15 00:22:48 +00:00
|
|
|
|
span: lo.to(hi),
|
2017-08-07 05:54:09 +00:00
|
|
|
|
vis,
|
2016-02-11 20:33:09 +00:00
|
|
|
|
})
|
2012-08-25 22:09:33 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses a type from a foreign module.
|
2017-09-03 18:53:58 +00:00
|
|
|
|
fn parse_item_foreign_type(&mut self, vis: ast::Visibility, lo: Span, attrs: Vec<Attribute>)
|
|
|
|
|
-> PResult<'a, ForeignItem> {
|
2019-05-11 14:41:37 +00:00
|
|
|
|
self.expect_keyword(kw::Type)?;
|
2017-09-03 18:53:58 +00:00
|
|
|
|
|
|
|
|
|
let ident = self.parse_ident()?;
|
|
|
|
|
let hi = self.span;
|
|
|
|
|
self.expect(&token::Semi)?;
|
|
|
|
|
Ok(ast::ForeignItem {
|
|
|
|
|
ident: ident,
|
|
|
|
|
attrs: attrs,
|
|
|
|
|
node: ForeignItemKind::Ty,
|
|
|
|
|
id: ast::DUMMY_NODE_ID,
|
|
|
|
|
span: lo.to(hi),
|
|
|
|
|
vis: vis
|
|
|
|
|
})
|
|
|
|
|
}
|
|
|
|
|
|
2018-07-27 20:11:48 +00:00
|
|
|
|
fn parse_crate_name_with_dashes(&mut self) -> PResult<'a, ast::Ident> {
|
|
|
|
|
let error_msg = "crate name using dashes are not valid in `extern crate` statements";
|
|
|
|
|
let suggestion_msg = "if the original crate name uses dashes you need to use underscores \
|
|
|
|
|
in the code";
|
2019-05-11 14:41:37 +00:00
|
|
|
|
let mut ident = if self.token.is_keyword(kw::SelfLower) {
|
2018-10-25 22:39:47 +00:00
|
|
|
|
self.parse_path_segment_ident()
|
|
|
|
|
} else {
|
|
|
|
|
self.parse_ident()
|
|
|
|
|
}?;
|
2018-07-26 05:18:47 +00:00
|
|
|
|
let mut idents = vec![];
|
|
|
|
|
let mut replacement = vec![];
|
|
|
|
|
let mut fixed_crate_name = false;
|
|
|
|
|
// Accept `extern crate name-like-this` for better diagnostics
|
|
|
|
|
let dash = token::Token::BinOp(token::BinOpToken::Minus);
|
|
|
|
|
if self.token == dash { // Do not include `-` as part of the expected tokens list
|
|
|
|
|
while self.eat(&dash) {
|
|
|
|
|
fixed_crate_name = true;
|
|
|
|
|
replacement.push((self.prev_span, "_".to_string()));
|
|
|
|
|
idents.push(self.parse_ident()?);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
if fixed_crate_name {
|
|
|
|
|
let fixed_name_sp = ident.span.to(idents.last().unwrap().span);
|
|
|
|
|
let mut fixed_name = format!("{}", ident.name);
|
|
|
|
|
for part in idents {
|
|
|
|
|
fixed_name.push_str(&format!("_{}", part.name));
|
|
|
|
|
}
|
|
|
|
|
ident = Ident::from_str(&fixed_name).with_span_pos(fixed_name_sp);
|
|
|
|
|
|
|
|
|
|
let mut err = self.struct_span_err(fixed_name_sp, error_msg);
|
|
|
|
|
err.span_label(fixed_name_sp, "dash-separated idents are not valid");
|
2019-01-25 21:03:27 +00:00
|
|
|
|
err.multipart_suggestion(
|
2019-01-17 15:18:56 +00:00
|
|
|
|
suggestion_msg,
|
|
|
|
|
replacement,
|
|
|
|
|
Applicability::MachineApplicable,
|
|
|
|
|
);
|
2018-07-26 05:18:47 +00:00
|
|
|
|
err.emit();
|
|
|
|
|
}
|
|
|
|
|
Ok(ident)
|
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses `extern crate` links.
|
2014-02-02 22:52:06 +00:00
|
|
|
|
///
|
2015-03-12 01:11:40 +00:00
|
|
|
|
/// # Examples
|
2014-02-02 22:52:06 +00:00
|
|
|
|
///
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// ```
|
2015-03-27 00:35:13 +00:00
|
|
|
|
/// extern crate foo;
|
|
|
|
|
/// extern crate bar as foo;
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// ```
|
2014-02-02 22:52:06 +00:00
|
|
|
|
fn parse_item_extern_crate(&mut self,
|
2017-03-15 00:22:48 +00:00
|
|
|
|
lo: Span,
|
2015-03-27 00:35:13 +00:00
|
|
|
|
visibility: Visibility,
|
|
|
|
|
attrs: Vec<Attribute>)
|
2018-03-09 15:51:48 +00:00
|
|
|
|
-> PResult<'a, P<Item>> {
|
2018-07-26 05:18:47 +00:00
|
|
|
|
// Accept `extern crate name-like-this` for better diagnostics
|
2018-07-27 20:11:48 +00:00
|
|
|
|
let orig_name = self.parse_crate_name_with_dashes()?;
|
2018-03-09 15:51:48 +00:00
|
|
|
|
let (item_name, orig_name) = if let Some(rename) = self.parse_rename()? {
|
|
|
|
|
(rename, Some(orig_name.name))
|
2015-03-27 00:35:13 +00:00
|
|
|
|
} else {
|
2018-03-09 15:51:48 +00:00
|
|
|
|
(orig_name, None)
|
2012-08-29 19:22:05 +00:00
|
|
|
|
};
|
2016-03-23 03:01:37 +00:00
|
|
|
|
self.expect(&token::Semi)?;
|
2012-08-14 18:07:41 +00:00
|
|
|
|
|
2018-03-09 15:51:48 +00:00
|
|
|
|
let span = lo.to(self.prev_span);
|
|
|
|
|
Ok(self.mk_item(span, item_name, ItemKind::ExternCrate(orig_name), visibility, attrs))
|
2014-02-02 22:52:06 +00:00
|
|
|
|
}
|
2012-11-28 01:25:55 +00:00
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses `extern` for foreign ABIs modules.
|
2014-02-02 22:52:06 +00:00
|
|
|
|
///
|
|
|
|
|
/// `extern` is expected to have been
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// consumed before calling this method.
|
2014-02-02 22:52:06 +00:00
|
|
|
|
///
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// # Examples
|
2014-02-02 22:52:06 +00:00
|
|
|
|
///
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// ```ignore (only-for-syntax-highlight)
|
2014-02-02 22:52:06 +00:00
|
|
|
|
/// extern "C" {}
|
|
|
|
|
/// extern {}
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// ```
|
2014-02-02 22:52:06 +00:00
|
|
|
|
fn parse_item_foreign_mod(&mut self,
|
2017-03-15 00:22:48 +00:00
|
|
|
|
lo: Span,
|
2017-12-02 19:15:03 +00:00
|
|
|
|
opt_abi: Option<Abi>,
|
2014-02-02 22:52:06 +00:00
|
|
|
|
visibility: Visibility,
|
2015-03-13 09:34:51 +00:00
|
|
|
|
mut attrs: Vec<Attribute>)
|
2015-12-20 21:00:43 +00:00
|
|
|
|
-> PResult<'a, P<Item>> {
|
2016-03-23 03:01:37 +00:00
|
|
|
|
self.expect(&token::OpenDelim(token::Brace))?;
|
2012-08-14 18:07:41 +00:00
|
|
|
|
|
2016-02-05 12:13:36 +00:00
|
|
|
|
let abi = opt_abi.unwrap_or(Abi::C);
|
2012-11-28 01:25:55 +00:00
|
|
|
|
|
2016-03-23 03:01:37 +00:00
|
|
|
|
attrs.extend(self.parse_inner_attributes()?);
|
2015-03-13 09:34:51 +00:00
|
|
|
|
|
|
|
|
|
let mut foreign_items = vec![];
|
2018-10-03 21:24:31 +00:00
|
|
|
|
while !self.eat(&token::CloseDelim(token::Brace)) {
|
|
|
|
|
foreign_items.push(self.parse_foreign_item()?);
|
2015-03-13 09:34:51 +00:00
|
|
|
|
}
|
2013-11-26 22:54:32 +00:00
|
|
|
|
|
2016-09-21 02:09:22 +00:00
|
|
|
|
let prev_span = self.prev_span;
|
2015-03-13 09:34:51 +00:00
|
|
|
|
let m = ast::ForeignMod {
|
2017-08-07 05:54:09 +00:00
|
|
|
|
abi,
|
2015-03-13 09:34:51 +00:00
|
|
|
|
items: foreign_items
|
|
|
|
|
};
|
2019-05-11 16:08:09 +00:00
|
|
|
|
let invalid = Ident::invalid();
|
2017-03-15 00:22:48 +00:00
|
|
|
|
Ok(self.mk_item(lo.to(prev_span), invalid, ItemKind::ForeignMod(m), visibility, attrs))
|
2012-05-23 22:06:11 +00:00
|
|
|
|
}
|
2011-02-01 18:40:04 +00:00
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses `type Foo = Bar;`
|
2018-07-03 17:38:14 +00:00
|
|
|
|
/// or
|
2018-10-26 22:13:12 +00:00
|
|
|
|
/// `existential type Foo: Bar;`
|
2018-07-03 17:38:14 +00:00
|
|
|
|
/// or
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// `return `None``
|
|
|
|
|
/// without modifying the parser state.
|
2018-07-03 17:38:14 +00:00
|
|
|
|
fn eat_type(&mut self) -> Option<PResult<'a, (Ident, AliasKind, ast::Generics)>> {
|
|
|
|
|
// This parses the grammar:
|
|
|
|
|
// Ident ["<"...">"] ["where" ...] ("=" | ":") Ty ";"
|
2019-05-11 14:41:37 +00:00
|
|
|
|
if self.check_keyword(kw::Type) ||
|
|
|
|
|
self.check_keyword(kw::Existential) &&
|
|
|
|
|
self.look_ahead(1, |t| t.is_keyword(kw::Type)) {
|
|
|
|
|
let existential = self.eat_keyword(kw::Existential);
|
|
|
|
|
assert!(self.eat_keyword(kw::Type));
|
2018-07-03 17:38:14 +00:00
|
|
|
|
Some(self.parse_existential_or_alias(existential))
|
|
|
|
|
} else {
|
|
|
|
|
None
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses a type alias or existential type.
|
2018-07-03 17:38:14 +00:00
|
|
|
|
fn parse_existential_or_alias(
|
|
|
|
|
&mut self,
|
|
|
|
|
existential: bool,
|
|
|
|
|
) -> PResult<'a, (Ident, AliasKind, ast::Generics)> {
|
2016-03-23 03:01:37 +00:00
|
|
|
|
let ident = self.parse_ident()?;
|
|
|
|
|
let mut tps = self.parse_generics()?;
|
|
|
|
|
tps.where_clause = self.parse_where_clause()?;
|
2018-07-03 17:38:14 +00:00
|
|
|
|
let alias = if existential {
|
|
|
|
|
self.expect(&token::Colon)?;
|
2019-03-01 22:42:39 +00:00
|
|
|
|
let bounds = self.parse_generic_bounds(Some(self.prev_span))?;
|
2018-07-03 17:38:14 +00:00
|
|
|
|
AliasKind::Existential(bounds)
|
|
|
|
|
} else {
|
|
|
|
|
self.expect(&token::Eq)?;
|
|
|
|
|
let ty = self.parse_ty()?;
|
|
|
|
|
AliasKind::Weak(ty)
|
|
|
|
|
};
|
2016-03-23 03:01:37 +00:00
|
|
|
|
self.expect(&token::Semi)?;
|
2018-07-03 17:38:14 +00:00
|
|
|
|
Ok((ident, alias, tps))
|
2012-05-23 22:06:11 +00:00
|
|
|
|
}
|
2012-04-19 04:26:25 +00:00
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses the part of an enum declaration following the `{`.
|
2015-12-20 21:00:43 +00:00
|
|
|
|
fn parse_enum_def(&mut self, _generics: &ast::Generics) -> PResult<'a, EnumDef> {
|
2014-02-28 21:09:09 +00:00
|
|
|
|
let mut variants = Vec::new();
|
2019-01-12 07:12:29 +00:00
|
|
|
|
let mut any_disr = vec![];
|
2014-10-29 10:37:54 +00:00
|
|
|
|
while self.token != token::CloseDelim(token::Brace) {
|
2016-03-23 03:01:37 +00:00
|
|
|
|
let variant_attrs = self.parse_outer_attributes()?;
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let vlo = self.span;
|
2012-08-09 02:51:19 +00:00
|
|
|
|
|
2015-10-01 15:47:27 +00:00
|
|
|
|
let struct_def;
|
2013-06-05 04:43:41 +00:00
|
|
|
|
let mut disr_expr = None;
|
2019-03-08 23:12:51 +00:00
|
|
|
|
self.eat_bad_pub();
|
2016-03-23 03:01:37 +00:00
|
|
|
|
let ident = self.parse_ident()?;
|
2015-11-09 15:43:32 +00:00
|
|
|
|
if self.check(&token::OpenDelim(token::Brace)) {
|
2013-03-28 18:29:21 +00:00
|
|
|
|
// Parse a struct variant.
|
2019-03-18 03:09:53 +00:00
|
|
|
|
let (fields, recovered) = self.parse_record_struct_body()?;
|
2019-03-21 22:38:50 +00:00
|
|
|
|
struct_def = VariantData::Struct(fields, recovered);
|
Make the parser’s ‘expected <foo>, found <bar>’ errors more accurate
As an example of what this changes, the following code:
let x: [int ..4];
Currently spits out ‘expected `]`, found `..`’. However, a comma would also be
valid there, as would a number of other tokens. This change adjusts the parser
to produce more accurate errors, so that that example now produces ‘expected one
of `(`, `+`, `,`, `::`, or `]`, found `..`’.
2014-12-03 09:47:53 +00:00
|
|
|
|
} else if self.check(&token::OpenDelim(token::Paren)) {
|
2019-03-18 03:09:53 +00:00
|
|
|
|
struct_def = VariantData::Tuple(
|
|
|
|
|
self.parse_tuple_struct_body()?,
|
|
|
|
|
ast::DUMMY_NODE_ID,
|
|
|
|
|
);
|
2015-12-30 23:11:53 +00:00
|
|
|
|
} else if self.eat(&token::Eq) {
|
2018-05-17 18:28:50 +00:00
|
|
|
|
disr_expr = Some(AnonConst {
|
|
|
|
|
id: ast::DUMMY_NODE_ID,
|
|
|
|
|
value: self.parse_expr()?,
|
|
|
|
|
});
|
2019-01-12 07:12:29 +00:00
|
|
|
|
if let Some(sp) = disr_expr.as_ref().map(|c| c.value.span) {
|
|
|
|
|
any_disr.push(sp);
|
|
|
|
|
}
|
2015-11-09 15:43:32 +00:00
|
|
|
|
struct_def = VariantData::Unit(ast::DUMMY_NODE_ID);
|
2013-03-28 18:29:21 +00:00
|
|
|
|
} else {
|
2015-11-09 15:43:32 +00:00
|
|
|
|
struct_def = VariantData::Unit(ast::DUMMY_NODE_ID);
|
2012-05-23 22:06:11 +00:00
|
|
|
|
}
|
2012-01-25 13:10:33 +00:00
|
|
|
|
|
2014-01-09 13:05:33 +00:00
|
|
|
|
let vr = ast::Variant_ {
|
2018-03-18 22:21:30 +00:00
|
|
|
|
ident,
|
2019-03-21 22:38:50 +00:00
|
|
|
|
id: ast::DUMMY_NODE_ID,
|
2013-01-16 00:05:20 +00:00
|
|
|
|
attrs: variant_attrs,
|
2015-10-08 00:20:57 +00:00
|
|
|
|
data: struct_def,
|
2017-08-07 05:54:09 +00:00
|
|
|
|
disr_expr,
|
2013-01-16 00:05:20 +00:00
|
|
|
|
};
|
2017-03-15 00:22:48 +00:00
|
|
|
|
variants.push(respan(vlo.to(self.prev_span), vr));
|
2012-01-25 13:10:33 +00:00
|
|
|
|
|
2019-03-08 22:35:38 +00:00
|
|
|
|
if !self.eat(&token::Comma) {
|
2019-03-10 22:04:43 +00:00
|
|
|
|
if self.token.is_ident() && !self.token.is_reserved_ident() {
|
2019-03-08 22:35:38 +00:00
|
|
|
|
let sp = self.sess.source_map().next_point(self.prev_span);
|
|
|
|
|
let mut err = self.struct_span_err(sp, "missing comma");
|
|
|
|
|
err.span_suggestion_short(
|
|
|
|
|
sp,
|
|
|
|
|
"missing comma",
|
|
|
|
|
",".to_owned(),
|
|
|
|
|
Applicability::MaybeIncorrect,
|
|
|
|
|
);
|
|
|
|
|
err.emit();
|
|
|
|
|
} else {
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
}
|
2012-05-23 22:06:11 +00:00
|
|
|
|
}
|
2016-03-23 03:01:37 +00:00
|
|
|
|
self.expect(&token::CloseDelim(token::Brace))?;
|
2019-05-22 20:56:51 +00:00
|
|
|
|
self.maybe_report_invalid_custom_discriminants(any_disr, &variants);
|
2012-08-08 21:17:52 +00:00
|
|
|
|
|
2018-11-06 20:05:44 +00:00
|
|
|
|
Ok(ast::EnumDef { variants })
|
2012-08-08 21:17:52 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses an enum declaration.
|
2015-12-20 21:00:43 +00:00
|
|
|
|
fn parse_item_enum(&mut self) -> PResult<'a, ItemInfo> {
|
2016-03-23 03:01:37 +00:00
|
|
|
|
let id = self.parse_ident()?;
|
|
|
|
|
let mut generics = self.parse_generics()?;
|
|
|
|
|
generics.where_clause = self.parse_where_clause()?;
|
|
|
|
|
self.expect(&token::OpenDelim(token::Brace))?;
|
2012-08-08 21:17:52 +00:00
|
|
|
|
|
2016-10-18 04:47:58 +00:00
|
|
|
|
let enum_definition = self.parse_enum_def(&generics).map_err(|e| {
|
|
|
|
|
self.recover_stmt();
|
|
|
|
|
self.eat(&token::CloseDelim(token::Brace));
|
|
|
|
|
e
|
|
|
|
|
})?;
|
2016-02-09 10:36:51 +00:00
|
|
|
|
Ok((id, ItemKind::Enum(enum_definition, generics), None))
|
2012-01-10 21:50:40 +00:00
|
|
|
|
}
|
2010-11-24 19:36:35 +00:00
|
|
|
|
|
2014-06-09 20:12:30 +00:00
|
|
|
|
/// Parses a string as an ABI spec on an extern type or module. Consumes
|
|
|
|
|
/// the `extern` keyword, if one is found.
|
2017-12-02 19:15:03 +00:00
|
|
|
|
fn parse_opt_abi(&mut self) -> PResult<'a, Option<Abi>> {
|
2013-12-30 23:09:41 +00:00
|
|
|
|
match self.token {
|
2019-05-18 22:04:26 +00:00
|
|
|
|
token::Literal(token::Lit { kind: token::Str, symbol, suffix }) |
|
|
|
|
|
token::Literal(token::Lit { kind: token::StrRaw(..), symbol, suffix }) => {
|
2014-11-19 04:48:38 +00:00
|
|
|
|
let sp = self.span;
|
2019-05-18 22:04:26 +00:00
|
|
|
|
self.expect_no_suffix(sp, "an ABI spec", suffix);
|
2015-12-30 23:11:53 +00:00
|
|
|
|
self.bump();
|
2019-05-18 22:04:26 +00:00
|
|
|
|
match abi::lookup(&symbol.as_str()) {
|
2015-03-28 21:58:51 +00:00
|
|
|
|
Some(abi) => Ok(Some(abi)),
|
2014-04-02 08:19:41 +00:00
|
|
|
|
None => {
|
2016-09-21 02:09:22 +00:00
|
|
|
|
let prev_span = self.prev_span;
|
2018-06-09 22:44:32 +00:00
|
|
|
|
let mut err = struct_span_err!(
|
|
|
|
|
self.sess.span_diagnostic,
|
2016-09-21 02:09:22 +00:00
|
|
|
|
prev_span,
|
2018-06-19 23:22:37 +00:00
|
|
|
|
E0703,
|
2018-06-09 22:44:32 +00:00
|
|
|
|
"invalid ABI: found `{}`",
|
2019-05-18 22:04:26 +00:00
|
|
|
|
symbol);
|
2018-06-09 22:44:32 +00:00
|
|
|
|
err.span_label(prev_span, "invalid ABI");
|
|
|
|
|
err.help(&format!("valid ABIs: {}", abi::all_names().join(", ")));
|
|
|
|
|
err.emit();
|
2015-03-28 21:58:51 +00:00
|
|
|
|
Ok(None)
|
2014-04-02 08:19:41 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2013-03-14 02:25:28 +00:00
|
|
|
|
}
|
|
|
|
|
|
2015-03-28 21:58:51 +00:00
|
|
|
|
_ => Ok(None),
|
2014-04-02 08:19:41 +00:00
|
|
|
|
}
|
2013-03-14 02:25:28 +00:00
|
|
|
|
}
|
|
|
|
|
|
2017-10-07 14:36:28 +00:00
|
|
|
|
fn is_static_global(&mut self) -> bool {
|
2019-05-11 14:41:37 +00:00
|
|
|
|
if self.check_keyword(kw::Static) {
|
2017-10-07 14:36:28 +00:00
|
|
|
|
// Check if this could be a closure
|
|
|
|
|
!self.look_ahead(1, |token| {
|
2019-05-11 14:41:37 +00:00
|
|
|
|
if token.is_keyword(kw::Move) {
|
2017-10-07 14:36:28 +00:00
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
match *token {
|
|
|
|
|
token::BinOp(token::Or) | token::OrOr => true,
|
|
|
|
|
_ => false,
|
|
|
|
|
}
|
|
|
|
|
})
|
|
|
|
|
} else {
|
|
|
|
|
false
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2018-07-22 15:48:29 +00:00
|
|
|
|
fn parse_item_(
|
|
|
|
|
&mut self,
|
|
|
|
|
attrs: Vec<Attribute>,
|
|
|
|
|
macros_allowed: bool,
|
|
|
|
|
attributes_allowed: bool,
|
|
|
|
|
) -> PResult<'a, Option<P<Item>>> {
|
2019-03-03 20:45:49 +00:00
|
|
|
|
let mut unclosed_delims = vec![];
|
2018-07-22 15:48:29 +00:00
|
|
|
|
let (ret, tokens) = self.collect_tokens(|this| {
|
2019-03-03 20:14:25 +00:00
|
|
|
|
let item = this.parse_item_implementation(attrs, macros_allowed, attributes_allowed);
|
2019-03-03 20:45:49 +00:00
|
|
|
|
unclosed_delims.append(&mut this.unclosed_delims);
|
2019-03-03 20:14:25 +00:00
|
|
|
|
item
|
2018-07-22 15:48:29 +00:00
|
|
|
|
})?;
|
2019-03-03 20:45:49 +00:00
|
|
|
|
self.unclosed_delims.append(&mut unclosed_delims);
|
2018-07-22 15:48:29 +00:00
|
|
|
|
|
|
|
|
|
// Once we've parsed an item and recorded the tokens we got while
|
|
|
|
|
// parsing we may want to store `tokens` into the item we're about to
|
|
|
|
|
// return. Note, though, that we specifically didn't capture tokens
|
|
|
|
|
// related to outer attributes. The `tokens` field here may later be
|
|
|
|
|
// used with procedural macros to convert this item back into a token
|
|
|
|
|
// stream, but during expansion we may be removing attributes as we go
|
|
|
|
|
// along.
|
|
|
|
|
//
|
|
|
|
|
// If we've got inner attributes then the `tokens` we've got above holds
|
|
|
|
|
// these inner attributes. If an inner attribute is expanded we won't
|
|
|
|
|
// actually remove it from the token stream, so we'll just keep yielding
|
|
|
|
|
// it (bad!). To work around this case for now we just avoid recording
|
|
|
|
|
// `tokens` if we detect any inner attributes. This should help keep
|
|
|
|
|
// expansion correct, but we should fix this bug one day!
|
|
|
|
|
Ok(ret.map(|item| {
|
|
|
|
|
item.map(|mut i| {
|
|
|
|
|
if !i.attrs.iter().any(|attr| attr.style == AttrStyle::Inner) {
|
|
|
|
|
i.tokens = Some(tokens);
|
|
|
|
|
}
|
|
|
|
|
i
|
|
|
|
|
})
|
|
|
|
|
}))
|
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses one of the items allowed by the flags.
|
2018-07-22 15:48:29 +00:00
|
|
|
|
fn parse_item_implementation(
|
|
|
|
|
&mut self,
|
|
|
|
|
attrs: Vec<Attribute>,
|
|
|
|
|
macros_allowed: bool,
|
|
|
|
|
attributes_allowed: bool,
|
|
|
|
|
) -> PResult<'a, Option<P<Item>>> {
|
2016-11-02 03:03:55 +00:00
|
|
|
|
maybe_whole!(self, NtItem, |item| {
|
2017-12-16 23:21:29 +00:00
|
|
|
|
let mut item = item.into_inner();
|
2016-07-03 21:38:37 +00:00
|
|
|
|
let mut attrs = attrs;
|
|
|
|
|
mem::swap(&mut item.attrs, &mut attrs);
|
|
|
|
|
item.attrs.extend(attrs);
|
2016-11-02 03:03:55 +00:00
|
|
|
|
Some(P(item))
|
|
|
|
|
});
|
2013-08-08 17:28:06 +00:00
|
|
|
|
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let lo = self.span;
|
2012-08-03 20:58:14 +00:00
|
|
|
|
|
2017-03-18 04:13:00 +00:00
|
|
|
|
let visibility = self.parse_visibility(false)?;
|
2013-04-01 22:50:58 +00:00
|
|
|
|
|
2019-05-11 14:41:37 +00:00
|
|
|
|
if self.eat_keyword(kw::Use) {
|
2014-12-23 13:07:30 +00:00
|
|
|
|
// USE ITEM
|
2018-03-09 23:02:39 +00:00
|
|
|
|
let item_ = ItemKind::Use(P(self.parse_use_tree()?));
|
2016-03-23 03:01:37 +00:00
|
|
|
|
self.expect(&token::Semi)?;
|
2014-12-23 13:07:30 +00:00
|
|
|
|
|
2018-03-10 15:44:44 +00:00
|
|
|
|
let span = lo.to(self.prev_span);
|
2019-05-11 14:41:37 +00:00
|
|
|
|
let item =
|
2019-05-11 16:08:09 +00:00
|
|
|
|
self.mk_item(span, Ident::invalid(), item_, visibility, attrs);
|
2015-03-28 21:58:51 +00:00
|
|
|
|
return Ok(Some(item));
|
2012-08-03 20:58:14 +00:00
|
|
|
|
}
|
2014-12-23 13:07:30 +00:00
|
|
|
|
|
2019-05-11 14:41:37 +00:00
|
|
|
|
if self.eat_keyword(kw::Extern) {
|
|
|
|
|
if self.eat_keyword(kw::Crate) {
|
2016-03-23 03:01:37 +00:00
|
|
|
|
return Ok(Some(self.parse_item_extern_crate(lo, visibility, attrs)?));
|
2014-02-02 22:52:06 +00:00
|
|
|
|
}
|
|
|
|
|
|
2016-03-23 03:01:37 +00:00
|
|
|
|
let opt_abi = self.parse_opt_abi()?;
|
2012-08-03 20:58:14 +00:00
|
|
|
|
|
2019-05-11 14:41:37 +00:00
|
|
|
|
if self.eat_keyword(kw::Fn) {
|
2013-04-01 22:50:58 +00:00
|
|
|
|
// EXTERN FUNCTION ITEM
|
2016-09-21 02:09:22 +00:00
|
|
|
|
let fn_span = self.prev_span;
|
2016-02-05 12:13:36 +00:00
|
|
|
|
let abi = opt_abi.unwrap_or(Abi::C);
|
2013-04-01 22:50:58 +00:00
|
|
|
|
let (ident, item_, extra_attrs) =
|
2016-08-10 23:20:12 +00:00
|
|
|
|
self.parse_item_fn(Unsafety::Normal,
|
2019-02-23 18:39:27 +00:00
|
|
|
|
respan(fn_span, IsAsync::NotAsync),
|
2016-08-10 23:20:12 +00:00
|
|
|
|
respan(fn_span, Constness::NotConst),
|
|
|
|
|
abi)?;
|
2016-09-21 02:09:22 +00:00
|
|
|
|
let prev_span = self.prev_span;
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let item = self.mk_item(lo.to(prev_span),
|
2013-12-30 22:04:00 +00:00
|
|
|
|
ident,
|
|
|
|
|
item_,
|
|
|
|
|
visibility,
|
|
|
|
|
maybe_append(attrs, extra_attrs));
|
2015-03-28 21:58:51 +00:00
|
|
|
|
return Ok(Some(item));
|
Make the parser’s ‘expected <foo>, found <bar>’ errors more accurate
As an example of what this changes, the following code:
let x: [int ..4];
Currently spits out ‘expected `]`, found `..`’. However, a comma would also be
valid there, as would a number of other tokens. This change adjusts the parser
to produce more accurate errors, so that that example now produces ‘expected one
of `(`, `+`, `,`, `::`, or `]`, found `..`’.
2014-12-03 09:47:53 +00:00
|
|
|
|
} else if self.check(&token::OpenDelim(token::Brace)) {
|
2016-03-23 03:01:37 +00:00
|
|
|
|
return Ok(Some(self.parse_item_foreign_mod(lo, opt_abi, visibility, attrs)?));
|
2013-04-01 22:50:58 +00:00
|
|
|
|
}
|
2014-02-02 22:52:06 +00:00
|
|
|
|
|
2016-03-23 03:01:37 +00:00
|
|
|
|
self.unexpected()?;
|
2013-04-01 22:50:58 +00:00
|
|
|
|
}
|
2014-02-02 22:52:06 +00:00
|
|
|
|
|
2017-10-07 14:36:28 +00:00
|
|
|
|
if self.is_static_global() {
|
|
|
|
|
self.bump();
|
2013-10-03 09:53:46 +00:00
|
|
|
|
// STATIC ITEM
|
2019-05-11 14:41:37 +00:00
|
|
|
|
let m = if self.eat_keyword(kw::Mut) {
|
2016-02-09 16:44:47 +00:00
|
|
|
|
Mutability::Mutable
|
|
|
|
|
} else {
|
|
|
|
|
Mutability::Immutable
|
|
|
|
|
};
|
2016-03-23 03:01:37 +00:00
|
|
|
|
let (ident, item_, extra_attrs) = self.parse_item_const(Some(m))?;
|
2016-09-21 02:09:22 +00:00
|
|
|
|
let prev_span = self.prev_span;
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let item = self.mk_item(lo.to(prev_span),
|
2014-10-02 22:06:08 +00:00
|
|
|
|
ident,
|
|
|
|
|
item_,
|
|
|
|
|
visibility,
|
|
|
|
|
maybe_append(attrs, extra_attrs));
|
2015-03-28 21:58:51 +00:00
|
|
|
|
return Ok(Some(item));
|
2014-10-02 22:06:08 +00:00
|
|
|
|
}
|
2019-05-11 14:41:37 +00:00
|
|
|
|
if self.eat_keyword(kw::Const) {
|
2016-09-21 02:09:22 +00:00
|
|
|
|
let const_span = self.prev_span;
|
2019-05-11 14:41:37 +00:00
|
|
|
|
if self.check_keyword(kw::Fn)
|
|
|
|
|
|| (self.check_keyword(kw::Unsafe)
|
|
|
|
|
&& self.look_ahead(1, |t| t.is_keyword(kw::Fn))) {
|
2015-02-25 20:05:07 +00:00
|
|
|
|
// CONST FUNCTION ITEM
|
2017-12-02 19:15:03 +00:00
|
|
|
|
let unsafety = self.parse_unsafety();
|
2015-12-30 23:11:53 +00:00
|
|
|
|
self.bump();
|
2015-02-25 20:05:07 +00:00
|
|
|
|
let (ident, item_, extra_attrs) =
|
2016-08-10 23:20:12 +00:00
|
|
|
|
self.parse_item_fn(unsafety,
|
2019-02-23 18:39:27 +00:00
|
|
|
|
respan(const_span, IsAsync::NotAsync),
|
2016-08-10 23:20:12 +00:00
|
|
|
|
respan(const_span, Constness::Const),
|
|
|
|
|
Abi::Rust)?;
|
2016-09-21 02:09:22 +00:00
|
|
|
|
let prev_span = self.prev_span;
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let item = self.mk_item(lo.to(prev_span),
|
2015-02-25 20:05:07 +00:00
|
|
|
|
ident,
|
|
|
|
|
item_,
|
|
|
|
|
visibility,
|
|
|
|
|
maybe_append(attrs, extra_attrs));
|
2015-05-05 12:47:04 +00:00
|
|
|
|
return Ok(Some(item));
|
2015-02-25 20:05:07 +00:00
|
|
|
|
}
|
|
|
|
|
|
2014-10-02 22:06:08 +00:00
|
|
|
|
// CONST ITEM
|
2019-05-11 14:41:37 +00:00
|
|
|
|
if self.eat_keyword(kw::Mut) {
|
2016-09-21 02:09:22 +00:00
|
|
|
|
let prev_span = self.prev_span;
|
2019-01-21 03:37:38 +00:00
|
|
|
|
let mut err = self.diagnostic()
|
|
|
|
|
.struct_span_err(prev_span, "const globals cannot be mutable");
|
|
|
|
|
err.span_label(prev_span, "cannot be mutable");
|
2019-01-25 21:03:27 +00:00
|
|
|
|
err.span_suggestion(
|
2019-01-21 03:37:38 +00:00
|
|
|
|
const_span,
|
|
|
|
|
"you might want to declare a static instead",
|
|
|
|
|
"static".to_owned(),
|
|
|
|
|
Applicability::MaybeIncorrect,
|
|
|
|
|
);
|
|
|
|
|
err.emit();
|
2014-10-02 22:06:08 +00:00
|
|
|
|
}
|
2016-03-23 03:01:37 +00:00
|
|
|
|
let (ident, item_, extra_attrs) = self.parse_item_const(None)?;
|
2016-09-21 02:09:22 +00:00
|
|
|
|
let prev_span = self.prev_span;
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let item = self.mk_item(lo.to(prev_span),
|
2013-12-30 22:04:00 +00:00
|
|
|
|
ident,
|
|
|
|
|
item_,
|
|
|
|
|
visibility,
|
|
|
|
|
maybe_append(attrs, extra_attrs));
|
2015-03-28 21:58:51 +00:00
|
|
|
|
return Ok(Some(item));
|
2013-03-20 01:00:18 +00:00
|
|
|
|
}
|
2018-06-18 23:49:34 +00:00
|
|
|
|
|
|
|
|
|
// `unsafe async fn` or `async fn`
|
|
|
|
|
if (
|
2019-05-11 14:41:37 +00:00
|
|
|
|
self.check_keyword(kw::Unsafe) &&
|
|
|
|
|
self.look_ahead(1, |t| t.is_keyword(kw::Async))
|
2018-06-18 23:49:34 +00:00
|
|
|
|
) || (
|
2019-05-11 14:41:37 +00:00
|
|
|
|
self.check_keyword(kw::Async) &&
|
|
|
|
|
self.look_ahead(1, |t| t.is_keyword(kw::Fn))
|
2018-06-18 23:49:34 +00:00
|
|
|
|
)
|
2018-06-06 22:50:59 +00:00
|
|
|
|
{
|
2018-05-17 05:55:18 +00:00
|
|
|
|
// ASYNC FUNCTION ITEM
|
|
|
|
|
let unsafety = self.parse_unsafety();
|
2019-05-11 14:41:37 +00:00
|
|
|
|
self.expect_keyword(kw::Async)?;
|
2019-02-23 18:39:27 +00:00
|
|
|
|
let async_span = self.prev_span;
|
2019-05-11 14:41:37 +00:00
|
|
|
|
self.expect_keyword(kw::Fn)?;
|
2018-05-17 05:55:18 +00:00
|
|
|
|
let fn_span = self.prev_span;
|
|
|
|
|
let (ident, item_, extra_attrs) =
|
|
|
|
|
self.parse_item_fn(unsafety,
|
2019-02-23 18:39:27 +00:00
|
|
|
|
respan(async_span, IsAsync::Async {
|
2018-06-26 09:56:24 +00:00
|
|
|
|
closure_id: ast::DUMMY_NODE_ID,
|
|
|
|
|
return_impl_trait_id: ast::DUMMY_NODE_ID,
|
2019-03-12 16:00:20 +00:00
|
|
|
|
arguments: Vec::new(),
|
2019-02-23 18:39:27 +00:00
|
|
|
|
}),
|
2018-05-17 05:55:18 +00:00
|
|
|
|
respan(fn_span, Constness::NotConst),
|
|
|
|
|
Abi::Rust)?;
|
|
|
|
|
let prev_span = self.prev_span;
|
|
|
|
|
let item = self.mk_item(lo.to(prev_span),
|
|
|
|
|
ident,
|
|
|
|
|
item_,
|
|
|
|
|
visibility,
|
|
|
|
|
maybe_append(attrs, extra_attrs));
|
2019-02-23 18:39:27 +00:00
|
|
|
|
if self.span.rust_2015() {
|
|
|
|
|
self.diagnostic().struct_span_err_with_code(
|
|
|
|
|
async_span,
|
|
|
|
|
"`async fn` is not permitted in the 2015 edition",
|
|
|
|
|
DiagnosticId::Error("E0670".into())
|
|
|
|
|
).emit();
|
|
|
|
|
}
|
2018-05-17 05:55:18 +00:00
|
|
|
|
return Ok(Some(item));
|
|
|
|
|
}
|
2019-05-11 14:41:37 +00:00
|
|
|
|
if self.check_keyword(kw::Unsafe) &&
|
|
|
|
|
(self.look_ahead(1, |t| t.is_keyword(kw::Trait)) ||
|
|
|
|
|
self.look_ahead(1, |t| t.is_keyword(kw::Auto)))
|
2014-12-10 00:59:20 +00:00
|
|
|
|
{
|
|
|
|
|
// UNSAFE TRAIT ITEM
|
2017-12-02 19:15:03 +00:00
|
|
|
|
self.bump(); // `unsafe`
|
2019-05-11 14:41:37 +00:00
|
|
|
|
let is_auto = if self.eat_keyword(kw::Trait) {
|
2017-10-15 18:03:03 +00:00
|
|
|
|
IsAuto::No
|
|
|
|
|
} else {
|
2019-05-11 14:41:37 +00:00
|
|
|
|
self.expect_keyword(kw::Auto)?;
|
|
|
|
|
self.expect_keyword(kw::Trait)?;
|
2017-10-15 18:03:03 +00:00
|
|
|
|
IsAuto::Yes
|
|
|
|
|
};
|
2014-12-10 00:59:20 +00:00
|
|
|
|
let (ident, item_, extra_attrs) =
|
2017-12-02 19:15:03 +00:00
|
|
|
|
self.parse_item_trait(is_auto, Unsafety::Unsafe)?;
|
2017-10-12 22:00:30 +00:00
|
|
|
|
let prev_span = self.prev_span;
|
|
|
|
|
let item = self.mk_item(lo.to(prev_span),
|
|
|
|
|
ident,
|
|
|
|
|
item_,
|
|
|
|
|
visibility,
|
|
|
|
|
maybe_append(attrs, extra_attrs));
|
|
|
|
|
return Ok(Some(item));
|
|
|
|
|
}
|
2019-05-11 14:41:37 +00:00
|
|
|
|
if self.check_keyword(kw::Impl) ||
|
|
|
|
|
self.check_keyword(kw::Unsafe) &&
|
|
|
|
|
self.look_ahead(1, |t| t.is_keyword(kw::Impl)) ||
|
|
|
|
|
self.check_keyword(kw::Default) &&
|
|
|
|
|
self.look_ahead(1, |t| t.is_keyword(kw::Impl)) ||
|
|
|
|
|
self.check_keyword(kw::Default) &&
|
|
|
|
|
self.look_ahead(1, |t| t.is_keyword(kw::Unsafe)) {
|
2014-12-10 11:15:06 +00:00
|
|
|
|
// IMPL ITEM
|
2017-12-02 19:15:03 +00:00
|
|
|
|
let defaultness = self.parse_defaultness();
|
|
|
|
|
let unsafety = self.parse_unsafety();
|
2019-05-11 14:41:37 +00:00
|
|
|
|
self.expect_keyword(kw::Impl)?;
|
2017-12-02 19:15:03 +00:00
|
|
|
|
let (ident, item, extra_attrs) = self.parse_item_impl(unsafety, defaultness)?;
|
|
|
|
|
let span = lo.to(self.prev_span);
|
|
|
|
|
return Ok(Some(self.mk_item(span, ident, item, visibility,
|
|
|
|
|
maybe_append(attrs, extra_attrs))));
|
2014-12-10 11:15:06 +00:00
|
|
|
|
}
|
2019-05-11 14:41:37 +00:00
|
|
|
|
if self.check_keyword(kw::Fn) {
|
2013-03-29 17:35:23 +00:00
|
|
|
|
// FUNCTION ITEM
|
2015-12-30 23:11:53 +00:00
|
|
|
|
self.bump();
|
2016-09-21 02:09:22 +00:00
|
|
|
|
let fn_span = self.prev_span;
|
2013-03-14 02:25:28 +00:00
|
|
|
|
let (ident, item_, extra_attrs) =
|
2016-08-10 23:20:12 +00:00
|
|
|
|
self.parse_item_fn(Unsafety::Normal,
|
2019-02-23 18:39:27 +00:00
|
|
|
|
respan(fn_span, IsAsync::NotAsync),
|
2016-08-10 23:20:12 +00:00
|
|
|
|
respan(fn_span, Constness::NotConst),
|
|
|
|
|
Abi::Rust)?;
|
2016-09-21 02:09:22 +00:00
|
|
|
|
let prev_span = self.prev_span;
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let item = self.mk_item(lo.to(prev_span),
|
2013-12-30 22:04:00 +00:00
|
|
|
|
ident,
|
|
|
|
|
item_,
|
|
|
|
|
visibility,
|
|
|
|
|
maybe_append(attrs, extra_attrs));
|
2015-03-28 21:58:51 +00:00
|
|
|
|
return Ok(Some(item));
|
2013-03-20 01:00:18 +00:00
|
|
|
|
}
|
2019-05-11 14:41:37 +00:00
|
|
|
|
if self.check_keyword(kw::Unsafe)
|
2015-01-28 01:01:48 +00:00
|
|
|
|
&& self.look_ahead(1, |t| *t != token::OpenDelim(token::Brace)) {
|
2013-04-01 22:50:58 +00:00
|
|
|
|
// UNSAFE FUNCTION ITEM
|
2017-12-02 19:15:03 +00:00
|
|
|
|
self.bump(); // `unsafe`
|
2018-02-19 18:07:45 +00:00
|
|
|
|
// `{` is also expected after `unsafe`, in case of error, include it in the diagnostic
|
|
|
|
|
self.check(&token::OpenDelim(token::Brace));
|
2019-05-11 14:41:37 +00:00
|
|
|
|
let abi = if self.eat_keyword(kw::Extern) {
|
2016-03-23 03:01:37 +00:00
|
|
|
|
self.parse_opt_abi()?.unwrap_or(Abi::C)
|
2014-05-07 01:43:56 +00:00
|
|
|
|
} else {
|
2016-02-05 12:13:36 +00:00
|
|
|
|
Abi::Rust
|
2014-05-07 01:43:56 +00:00
|
|
|
|
};
|
2019-05-11 14:41:37 +00:00
|
|
|
|
self.expect_keyword(kw::Fn)?;
|
2016-09-21 02:09:22 +00:00
|
|
|
|
let fn_span = self.prev_span;
|
2013-03-14 02:25:28 +00:00
|
|
|
|
let (ident, item_, extra_attrs) =
|
2016-08-10 23:20:12 +00:00
|
|
|
|
self.parse_item_fn(Unsafety::Unsafe,
|
2019-02-23 18:39:27 +00:00
|
|
|
|
respan(fn_span, IsAsync::NotAsync),
|
2016-08-10 23:20:12 +00:00
|
|
|
|
respan(fn_span, Constness::NotConst),
|
|
|
|
|
abi)?;
|
2016-09-21 02:09:22 +00:00
|
|
|
|
let prev_span = self.prev_span;
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let item = self.mk_item(lo.to(prev_span),
|
2013-12-30 22:04:00 +00:00
|
|
|
|
ident,
|
|
|
|
|
item_,
|
|
|
|
|
visibility,
|
|
|
|
|
maybe_append(attrs, extra_attrs));
|
2015-03-28 21:58:51 +00:00
|
|
|
|
return Ok(Some(item));
|
2013-03-20 01:00:18 +00:00
|
|
|
|
}
|
2019-05-11 14:41:37 +00:00
|
|
|
|
if self.eat_keyword(kw::Mod) {
|
2013-02-11 21:36:24 +00:00
|
|
|
|
// MODULE ITEM
|
2014-02-28 20:54:01 +00:00
|
|
|
|
let (ident, item_, extra_attrs) =
|
2016-03-23 03:01:37 +00:00
|
|
|
|
self.parse_item_mod(&attrs[..])?;
|
2016-09-21 02:09:22 +00:00
|
|
|
|
let prev_span = self.prev_span;
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let item = self.mk_item(lo.to(prev_span),
|
2013-12-30 22:04:00 +00:00
|
|
|
|
ident,
|
|
|
|
|
item_,
|
|
|
|
|
visibility,
|
|
|
|
|
maybe_append(attrs, extra_attrs));
|
2015-03-28 21:58:51 +00:00
|
|
|
|
return Ok(Some(item));
|
2013-03-20 01:00:18 +00:00
|
|
|
|
}
|
2018-07-03 17:38:14 +00:00
|
|
|
|
if let Some(type_) = self.eat_type() {
|
|
|
|
|
let (ident, alias, generics) = type_?;
|
2013-02-11 21:36:24 +00:00
|
|
|
|
// TYPE ITEM
|
2018-07-03 17:38:14 +00:00
|
|
|
|
let item_ = match alias {
|
|
|
|
|
AliasKind::Weak(ty) => ItemKind::Ty(ty, generics),
|
|
|
|
|
AliasKind::Existential(bounds) => ItemKind::Existential(bounds, generics),
|
|
|
|
|
};
|
2016-09-21 02:09:22 +00:00
|
|
|
|
let prev_span = self.prev_span;
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let item = self.mk_item(lo.to(prev_span),
|
2013-12-30 22:04:00 +00:00
|
|
|
|
ident,
|
|
|
|
|
item_,
|
|
|
|
|
visibility,
|
2018-07-03 17:38:14 +00:00
|
|
|
|
attrs);
|
2015-03-28 21:58:51 +00:00
|
|
|
|
return Ok(Some(item));
|
2013-03-20 01:00:18 +00:00
|
|
|
|
}
|
2019-05-11 14:41:37 +00:00
|
|
|
|
if self.eat_keyword(kw::Enum) {
|
2013-02-11 21:36:24 +00:00
|
|
|
|
// ENUM ITEM
|
2016-03-23 03:01:37 +00:00
|
|
|
|
let (ident, item_, extra_attrs) = self.parse_item_enum()?;
|
2016-09-21 02:09:22 +00:00
|
|
|
|
let prev_span = self.prev_span;
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let item = self.mk_item(lo.to(prev_span),
|
2013-12-30 22:04:00 +00:00
|
|
|
|
ident,
|
|
|
|
|
item_,
|
|
|
|
|
visibility,
|
|
|
|
|
maybe_append(attrs, extra_attrs));
|
2015-03-28 21:58:51 +00:00
|
|
|
|
return Ok(Some(item));
|
2013-03-20 01:00:18 +00:00
|
|
|
|
}
|
2019-05-11 14:41:37 +00:00
|
|
|
|
if self.check_keyword(kw::Trait)
|
|
|
|
|
|| (self.check_keyword(kw::Auto)
|
|
|
|
|
&& self.look_ahead(1, |t| t.is_keyword(kw::Trait)))
|
2017-10-15 18:03:03 +00:00
|
|
|
|
{
|
2019-05-11 14:41:37 +00:00
|
|
|
|
let is_auto = if self.eat_keyword(kw::Trait) {
|
2017-10-15 18:03:03 +00:00
|
|
|
|
IsAuto::No
|
|
|
|
|
} else {
|
2019-05-11 14:41:37 +00:00
|
|
|
|
self.expect_keyword(kw::Auto)?;
|
|
|
|
|
self.expect_keyword(kw::Trait)?;
|
2017-10-15 18:03:03 +00:00
|
|
|
|
IsAuto::Yes
|
|
|
|
|
};
|
2013-02-11 21:36:24 +00:00
|
|
|
|
// TRAIT ITEM
|
2014-12-10 00:59:20 +00:00
|
|
|
|
let (ident, item_, extra_attrs) =
|
2017-12-02 19:15:03 +00:00
|
|
|
|
self.parse_item_trait(is_auto, Unsafety::Normal)?;
|
2016-09-21 02:09:22 +00:00
|
|
|
|
let prev_span = self.prev_span;
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let item = self.mk_item(lo.to(prev_span),
|
2013-12-30 22:04:00 +00:00
|
|
|
|
ident,
|
|
|
|
|
item_,
|
|
|
|
|
visibility,
|
|
|
|
|
maybe_append(attrs, extra_attrs));
|
2015-03-28 21:58:51 +00:00
|
|
|
|
return Ok(Some(item));
|
2013-03-20 01:00:18 +00:00
|
|
|
|
}
|
2019-05-11 14:41:37 +00:00
|
|
|
|
if self.eat_keyword(kw::Struct) {
|
2013-02-11 21:36:24 +00:00
|
|
|
|
// STRUCT ITEM
|
2016-03-23 03:01:37 +00:00
|
|
|
|
let (ident, item_, extra_attrs) = self.parse_item_struct()?;
|
2016-09-21 02:09:22 +00:00
|
|
|
|
let prev_span = self.prev_span;
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let item = self.mk_item(lo.to(prev_span),
|
2016-08-08 22:18:47 +00:00
|
|
|
|
ident,
|
|
|
|
|
item_,
|
|
|
|
|
visibility,
|
|
|
|
|
maybe_append(attrs, extra_attrs));
|
|
|
|
|
return Ok(Some(item));
|
|
|
|
|
}
|
2016-10-19 20:33:41 +00:00
|
|
|
|
if self.is_union_item() {
|
2016-08-08 22:18:47 +00:00
|
|
|
|
// UNION ITEM
|
2016-08-18 15:31:47 +00:00
|
|
|
|
self.bump();
|
2016-08-08 22:18:47 +00:00
|
|
|
|
let (ident, item_, extra_attrs) = self.parse_item_union()?;
|
2016-09-21 02:09:22 +00:00
|
|
|
|
let prev_span = self.prev_span;
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let item = self.mk_item(lo.to(prev_span),
|
2013-12-30 22:04:00 +00:00
|
|
|
|
ident,
|
|
|
|
|
item_,
|
|
|
|
|
visibility,
|
|
|
|
|
maybe_append(attrs, extra_attrs));
|
2015-03-28 21:58:51 +00:00
|
|
|
|
return Ok(Some(item));
|
2013-03-20 01:00:18 +00:00
|
|
|
|
}
|
2017-09-06 21:11:16 +00:00
|
|
|
|
if let Some(macro_def) = self.eat_macro_def(&attrs, &visibility, lo)? {
|
2017-03-05 05:15:58 +00:00
|
|
|
|
return Ok(Some(macro_def));
|
|
|
|
|
}
|
|
|
|
|
|
2018-02-16 14:56:50 +00:00
|
|
|
|
// Verify whether we have encountered a struct or method definition where the user forgot to
|
2017-11-15 01:49:29 +00:00
|
|
|
|
// add the `struct` or `fn` keyword after writing `pub`: `pub S {}`
|
2018-07-06 20:18:38 +00:00
|
|
|
|
if visibility.node.is_pub() &&
|
2017-11-21 14:49:15 +00:00
|
|
|
|
self.check_ident() &&
|
|
|
|
|
self.look_ahead(1, |t| *t != token::Not)
|
|
|
|
|
{
|
2017-11-15 01:49:29 +00:00
|
|
|
|
// Space between `pub` keyword and the identifier
|
|
|
|
|
//
|
|
|
|
|
// pub S {}
|
|
|
|
|
// ^^^ `sp` points here
|
|
|
|
|
let sp = self.prev_span.between(self.span);
|
2017-11-22 17:49:27 +00:00
|
|
|
|
let full_sp = self.prev_span.to(self.span);
|
|
|
|
|
let ident_sp = self.span;
|
2017-11-15 01:49:29 +00:00
|
|
|
|
if self.look_ahead(1, |t| *t == token::OpenDelim(token::Brace)) {
|
|
|
|
|
// possible public struct definition where `struct` was forgotten
|
|
|
|
|
let ident = self.parse_ident().unwrap();
|
2017-11-21 14:49:15 +00:00
|
|
|
|
let msg = format!("add `struct` here to parse `{}` as a public struct",
|
|
|
|
|
ident);
|
|
|
|
|
let mut err = self.diagnostic()
|
|
|
|
|
.struct_span_err(sp, "missing `struct` for struct definition");
|
2019-01-25 21:03:27 +00:00
|
|
|
|
err.span_suggestion_short(
|
suggestion applicabilities for libsyntax and librustc, run-rustfix tests
Consider this a down payment on #50723. To recap, an `Applicability`
enum was recently (#50204) added, to convey to Rustfix and other tools
whether we think it's OK for them to blindly apply the suggestion, or
whether to prompt a human for guidance (because the suggestion might
contain placeholders that we can't infer, or because we think it has a
sufficiently high probability of being wrong even though it's—
presumably—right often enough to be worth emitting in the first place).
When a suggestion is marked as `MaybeIncorrect`, we try to use comments
to indicate precisely why (although there are a few places where we just
say `// speculative` because the present author's subjective judgement
balked at the idea that the suggestion has no false positives).
The `run-rustfix` directive is opporunistically set on some relevant UI
tests (and a couple tests that were in the `test/ui/suggestions`
directory, even if the suggestions didn't originate in librustc or
libsyntax). This is less trivial than it sounds, because a surprising
number of test files aren't equipped to be tested as fixed even when
they contain successfully fixable errors, because, e.g., there are more,
not-directly-related errors after fixing. Some test files need an
attribute or underscore to avoid unused warnings tripping up the "fixed
code is still producing diagnostics" check despite the fixes being
correct; this is an interesting contrast-to/inconsistency-with the
behavior of UI tests (which secretly pass `-A unused`), a behavior which
we probably ought to resolve one way or the other (filed issue #50926).
A few suggestion labels are reworded (e.g., to avoid phrasing it as a
question, which which is discouraged by the style guidelines listed in
`.span_suggestion`'s doc-comment).
2018-05-19 21:52:24 +00:00
|
|
|
|
sp, &msg, " struct ".into(), Applicability::MaybeIncorrect // speculative
|
|
|
|
|
);
|
2017-11-21 16:03:02 +00:00
|
|
|
|
return Err(err);
|
2017-11-15 01:49:29 +00:00
|
|
|
|
} else if self.look_ahead(1, |t| *t == token::OpenDelim(token::Paren)) {
|
|
|
|
|
let ident = self.parse_ident().unwrap();
|
2018-11-30 23:05:13 +00:00
|
|
|
|
self.bump(); // `(`
|
|
|
|
|
let kw_name = if let Ok(Some(_)) = self.parse_self_arg() {
|
|
|
|
|
"method"
|
|
|
|
|
} else {
|
|
|
|
|
"function"
|
|
|
|
|
};
|
2017-11-21 14:49:15 +00:00
|
|
|
|
self.consume_block(token::Paren);
|
2018-11-30 23:05:13 +00:00
|
|
|
|
let (kw, kw_name, ambiguous) = if self.check(&token::RArrow) {
|
|
|
|
|
self.eat_to_tokens(&[&token::OpenDelim(token::Brace)]);
|
|
|
|
|
self.bump(); // `{`
|
|
|
|
|
("fn", kw_name, false)
|
|
|
|
|
} else if self.check(&token::OpenDelim(token::Brace)) {
|
|
|
|
|
self.bump(); // `{`
|
|
|
|
|
("fn", kw_name, false)
|
2017-11-21 14:49:15 +00:00
|
|
|
|
} else if self.check(&token::Colon) {
|
|
|
|
|
let kw = "struct";
|
2017-11-21 16:03:02 +00:00
|
|
|
|
(kw, kw, false)
|
2017-11-21 14:49:15 +00:00
|
|
|
|
} else {
|
2018-11-30 23:05:13 +00:00
|
|
|
|
("fn` or `struct", "function or struct", true)
|
2017-11-21 14:49:15 +00:00
|
|
|
|
};
|
|
|
|
|
|
2017-11-21 16:03:02 +00:00
|
|
|
|
let msg = format!("missing `{}` for {} definition", kw, kw_name);
|
2017-11-21 14:49:15 +00:00
|
|
|
|
let mut err = self.diagnostic().struct_span_err(sp, &msg);
|
|
|
|
|
if !ambiguous {
|
2019-04-19 18:04:41 +00:00
|
|
|
|
self.consume_block(token::Brace);
|
2017-11-21 16:03:02 +00:00
|
|
|
|
let suggestion = format!("add `{}` here to parse `{}` as a public {}",
|
|
|
|
|
kw,
|
2017-11-21 14:49:15 +00:00
|
|
|
|
ident,
|
2017-11-21 16:03:02 +00:00
|
|
|
|
kw_name);
|
2019-01-25 21:03:27 +00:00
|
|
|
|
err.span_suggestion_short(
|
suggestion applicabilities for libsyntax and librustc, run-rustfix tests
Consider this a down payment on #50723. To recap, an `Applicability`
enum was recently (#50204) added, to convey to Rustfix and other tools
whether we think it's OK for them to blindly apply the suggestion, or
whether to prompt a human for guidance (because the suggestion might
contain placeholders that we can't infer, or because we think it has a
sufficiently high probability of being wrong even though it's—
presumably—right often enough to be worth emitting in the first place).
When a suggestion is marked as `MaybeIncorrect`, we try to use comments
to indicate precisely why (although there are a few places where we just
say `// speculative` because the present author's subjective judgement
balked at the idea that the suggestion has no false positives).
The `run-rustfix` directive is opporunistically set on some relevant UI
tests (and a couple tests that were in the `test/ui/suggestions`
directory, even if the suggestions didn't originate in librustc or
libsyntax). This is less trivial than it sounds, because a surprising
number of test files aren't equipped to be tested as fixed even when
they contain successfully fixable errors, because, e.g., there are more,
not-directly-related errors after fixing. Some test files need an
attribute or underscore to avoid unused warnings tripping up the "fixed
code is still producing diagnostics" check despite the fixes being
correct; this is an interesting contrast-to/inconsistency-with the
behavior of UI tests (which secretly pass `-A unused`), a behavior which
we probably ought to resolve one way or the other (filed issue #50926).
A few suggestion labels are reworded (e.g., to avoid phrasing it as a
question, which which is discouraged by the style guidelines listed in
`.span_suggestion`'s doc-comment).
2018-05-19 21:52:24 +00:00
|
|
|
|
sp, &suggestion, format!(" {} ", kw), Applicability::MachineApplicable
|
|
|
|
|
);
|
2017-11-22 17:49:27 +00:00
|
|
|
|
} else {
|
2018-08-18 10:14:09 +00:00
|
|
|
|
if let Ok(snippet) = self.sess.source_map().span_to_snippet(ident_sp) {
|
2019-01-25 21:03:27 +00:00
|
|
|
|
err.span_suggestion(
|
2017-11-22 17:49:27 +00:00
|
|
|
|
full_sp,
|
suggestion applicabilities for libsyntax and librustc, run-rustfix tests
Consider this a down payment on #50723. To recap, an `Applicability`
enum was recently (#50204) added, to convey to Rustfix and other tools
whether we think it's OK for them to blindly apply the suggestion, or
whether to prompt a human for guidance (because the suggestion might
contain placeholders that we can't infer, or because we think it has a
sufficiently high probability of being wrong even though it's—
presumably—right often enough to be worth emitting in the first place).
When a suggestion is marked as `MaybeIncorrect`, we try to use comments
to indicate precisely why (although there are a few places where we just
say `// speculative` because the present author's subjective judgement
balked at the idea that the suggestion has no false positives).
The `run-rustfix` directive is opporunistically set on some relevant UI
tests (and a couple tests that were in the `test/ui/suggestions`
directory, even if the suggestions didn't originate in librustc or
libsyntax). This is less trivial than it sounds, because a surprising
number of test files aren't equipped to be tested as fixed even when
they contain successfully fixable errors, because, e.g., there are more,
not-directly-related errors after fixing. Some test files need an
attribute or underscore to avoid unused warnings tripping up the "fixed
code is still producing diagnostics" check despite the fixes being
correct; this is an interesting contrast-to/inconsistency-with the
behavior of UI tests (which secretly pass `-A unused`), a behavior which
we probably ought to resolve one way or the other (filed issue #50926).
A few suggestion labels are reworded (e.g., to avoid phrasing it as a
question, which which is discouraged by the style guidelines listed in
`.span_suggestion`'s doc-comment).
2018-05-19 21:52:24 +00:00
|
|
|
|
"if you meant to call a macro, try",
|
|
|
|
|
format!("{}!", snippet),
|
|
|
|
|
// this is the `ambiguous` conditional branch
|
|
|
|
|
Applicability::MaybeIncorrect
|
|
|
|
|
);
|
2017-11-22 17:49:27 +00:00
|
|
|
|
} else {
|
|
|
|
|
err.help("if you meant to call a macro, remove the `pub` \
|
|
|
|
|
and add a trailing `!` after the identifier");
|
|
|
|
|
}
|
2017-11-15 01:49:29 +00:00
|
|
|
|
}
|
2017-11-21 16:03:02 +00:00
|
|
|
|
return Err(err);
|
2018-11-26 18:11:46 +00:00
|
|
|
|
} else if self.look_ahead(1, |t| *t == token::Lt) {
|
|
|
|
|
let ident = self.parse_ident().unwrap();
|
|
|
|
|
self.eat_to_tokens(&[&token::Gt]);
|
2018-11-30 23:05:13 +00:00
|
|
|
|
self.bump(); // `>`
|
|
|
|
|
let (kw, kw_name, ambiguous) = if self.eat(&token::OpenDelim(token::Paren)) {
|
|
|
|
|
if let Ok(Some(_)) = self.parse_self_arg() {
|
|
|
|
|
("fn", "method", false)
|
|
|
|
|
} else {
|
|
|
|
|
("fn", "function", false)
|
|
|
|
|
}
|
2018-11-26 18:11:46 +00:00
|
|
|
|
} else if self.check(&token::OpenDelim(token::Brace)) {
|
|
|
|
|
("struct", "struct", false)
|
|
|
|
|
} else {
|
2018-11-30 23:05:13 +00:00
|
|
|
|
("fn` or `struct", "function or struct", true)
|
2018-11-26 18:11:46 +00:00
|
|
|
|
};
|
|
|
|
|
let msg = format!("missing `{}` for {} definition", kw, kw_name);
|
|
|
|
|
let mut err = self.diagnostic().struct_span_err(sp, &msg);
|
|
|
|
|
if !ambiguous {
|
2019-01-25 21:03:27 +00:00
|
|
|
|
err.span_suggestion_short(
|
2018-11-26 18:11:46 +00:00
|
|
|
|
sp,
|
|
|
|
|
&format!("add `{}` here to parse `{}` as a public {}", kw, ident, kw_name),
|
|
|
|
|
format!(" {} ", kw),
|
|
|
|
|
Applicability::MachineApplicable,
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
return Err(err);
|
2017-11-15 01:49:29 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
self.parse_macro_use_or_failure(attrs, macros_allowed, attributes_allowed, lo, visibility)
|
2013-04-01 22:50:58 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses a foreign item.
|
2018-10-03 21:24:31 +00:00
|
|
|
|
crate fn parse_foreign_item(&mut self) -> PResult<'a, ForeignItem> {
|
|
|
|
|
maybe_whole!(self, NtForeignItem, |ni| ni);
|
2018-03-11 02:16:26 +00:00
|
|
|
|
|
2016-03-23 03:01:37 +00:00
|
|
|
|
let attrs = self.parse_outer_attributes()?;
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let lo = self.span;
|
2017-03-18 04:13:00 +00:00
|
|
|
|
let visibility = self.parse_visibility(false)?;
|
2013-04-01 22:50:58 +00:00
|
|
|
|
|
2017-08-09 22:43:06 +00:00
|
|
|
|
// FOREIGN STATIC ITEM
|
|
|
|
|
// Treat `const` as `static` for error recovery, but don't add it to expected tokens.
|
2019-05-11 14:41:37 +00:00
|
|
|
|
if self.check_keyword(kw::Static) || self.token.is_keyword(kw::Const) {
|
|
|
|
|
if self.token.is_keyword(kw::Const) {
|
2017-08-09 22:43:06 +00:00
|
|
|
|
self.diagnostic()
|
|
|
|
|
.struct_span_err(self.span, "extern items cannot be `const`")
|
2019-01-25 21:03:27 +00:00
|
|
|
|
.span_suggestion(
|
suggestion applicabilities for libsyntax and librustc, run-rustfix tests
Consider this a down payment on #50723. To recap, an `Applicability`
enum was recently (#50204) added, to convey to Rustfix and other tools
whether we think it's OK for them to blindly apply the suggestion, or
whether to prompt a human for guidance (because the suggestion might
contain placeholders that we can't infer, or because we think it has a
sufficiently high probability of being wrong even though it's—
presumably—right often enough to be worth emitting in the first place).
When a suggestion is marked as `MaybeIncorrect`, we try to use comments
to indicate precisely why (although there are a few places where we just
say `// speculative` because the present author's subjective judgement
balked at the idea that the suggestion has no false positives).
The `run-rustfix` directive is opporunistically set on some relevant UI
tests (and a couple tests that were in the `test/ui/suggestions`
directory, even if the suggestions didn't originate in librustc or
libsyntax). This is less trivial than it sounds, because a surprising
number of test files aren't equipped to be tested as fixed even when
they contain successfully fixable errors, because, e.g., there are more,
not-directly-related errors after fixing. Some test files need an
attribute or underscore to avoid unused warnings tripping up the "fixed
code is still producing diagnostics" check despite the fixes being
correct; this is an interesting contrast-to/inconsistency-with the
behavior of UI tests (which secretly pass `-A unused`), a behavior which
we probably ought to resolve one way or the other (filed issue #50926).
A few suggestion labels are reworded (e.g., to avoid phrasing it as a
question, which which is discouraged by the style guidelines listed in
`.span_suggestion`'s doc-comment).
2018-05-19 21:52:24 +00:00
|
|
|
|
self.span,
|
|
|
|
|
"try using a static value",
|
|
|
|
|
"static".to_owned(),
|
|
|
|
|
Applicability::MachineApplicable
|
|
|
|
|
).emit();
|
2017-08-09 22:43:06 +00:00
|
|
|
|
}
|
|
|
|
|
self.bump(); // `static` or `const`
|
2018-10-03 21:24:31 +00:00
|
|
|
|
return Ok(self.parse_item_foreign_static(visibility, lo, attrs)?);
|
2013-04-01 22:50:58 +00:00
|
|
|
|
}
|
2017-08-09 22:43:06 +00:00
|
|
|
|
// FOREIGN FUNCTION ITEM
|
2019-05-11 14:41:37 +00:00
|
|
|
|
if self.check_keyword(kw::Fn) {
|
2018-10-03 21:24:31 +00:00
|
|
|
|
return Ok(self.parse_item_foreign_fn(visibility, lo, attrs)?);
|
2013-03-20 01:00:18 +00:00
|
|
|
|
}
|
2017-09-03 18:53:58 +00:00
|
|
|
|
// FOREIGN TYPE ITEM
|
2019-05-11 14:41:37 +00:00
|
|
|
|
if self.check_keyword(kw::Type) {
|
2018-10-03 21:24:31 +00:00
|
|
|
|
return Ok(self.parse_item_foreign_type(visibility, lo, attrs)?);
|
2017-09-03 18:53:58 +00:00
|
|
|
|
}
|
2014-12-23 13:07:30 +00:00
|
|
|
|
|
2018-03-11 02:16:26 +00:00
|
|
|
|
match self.parse_assoc_macro_invoc("extern", Some(&visibility), &mut false)? {
|
|
|
|
|
Some(mac) => {
|
2018-10-03 21:24:31 +00:00
|
|
|
|
Ok(
|
2018-03-11 02:16:26 +00:00
|
|
|
|
ForeignItem {
|
2019-05-11 16:08:09 +00:00
|
|
|
|
ident: Ident::invalid(),
|
2018-03-11 02:16:26 +00:00
|
|
|
|
span: lo.to(self.prev_span),
|
|
|
|
|
id: ast::DUMMY_NODE_ID,
|
|
|
|
|
attrs,
|
|
|
|
|
vis: visibility,
|
|
|
|
|
node: ForeignItemKind::Macro(mac),
|
|
|
|
|
}
|
2018-10-03 21:24:31 +00:00
|
|
|
|
)
|
2018-03-11 02:16:26 +00:00
|
|
|
|
}
|
|
|
|
|
None => {
|
2018-10-03 21:24:31 +00:00
|
|
|
|
if !attrs.is_empty() {
|
2019-01-20 19:52:16 +00:00
|
|
|
|
self.expected_item_err(&attrs)?;
|
2018-03-11 02:16:26 +00:00
|
|
|
|
}
|
|
|
|
|
|
2018-10-03 21:24:31 +00:00
|
|
|
|
self.unexpected()
|
2015-03-13 09:34:51 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2013-04-01 22:50:58 +00:00
|
|
|
|
}
|
|
|
|
|
|
2014-06-09 20:12:30 +00:00
|
|
|
|
/// This is the fall-through for parsing items.
|
2013-04-01 22:50:58 +00:00
|
|
|
|
fn parse_macro_use_or_failure(
|
2013-12-30 22:04:00 +00:00
|
|
|
|
&mut self,
|
2014-02-28 21:09:09 +00:00
|
|
|
|
attrs: Vec<Attribute> ,
|
2013-04-01 22:50:58 +00:00
|
|
|
|
macros_allowed: bool,
|
2015-11-03 16:39:51 +00:00
|
|
|
|
attributes_allowed: bool,
|
2017-03-15 00:22:48 +00:00
|
|
|
|
lo: Span,
|
2014-01-09 13:05:33 +00:00
|
|
|
|
visibility: Visibility
|
2015-12-20 21:00:43 +00:00
|
|
|
|
) -> PResult<'a, Option<P<Item>>> {
|
2019-02-23 18:39:27 +00:00
|
|
|
|
if macros_allowed && self.token.is_path_start() &&
|
|
|
|
|
!(self.is_async_fn() && self.span.rust_2015()) {
|
2013-02-11 21:36:24 +00:00
|
|
|
|
// MACRO INVOCATION ITEM
|
2012-11-22 03:38:27 +00:00
|
|
|
|
|
2016-09-21 02:09:22 +00:00
|
|
|
|
let prev_span = self.prev_span;
|
2018-01-29 05:12:09 +00:00
|
|
|
|
self.complain_if_pub_macro(&visibility.node, prev_span);
|
2015-02-13 04:43:57 +00:00
|
|
|
|
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let mac_lo = self.span;
|
2015-11-26 19:14:10 +00:00
|
|
|
|
|
2012-07-05 19:10:33 +00:00
|
|
|
|
// item macro.
|
2016-09-22 07:05:05 +00:00
|
|
|
|
let pth = self.parse_path(PathStyle::Mod)?;
|
2016-03-23 03:01:37 +00:00
|
|
|
|
self.expect(&token::Not)?;
|
2012-11-19 05:36:26 +00:00
|
|
|
|
|
|
|
|
|
// a 'special' identifier (like what `macro_rules!` uses)
|
|
|
|
|
// is optional. We should eventually unify invoc syntax
|
|
|
|
|
// and remove this.
|
2016-04-16 01:10:59 +00:00
|
|
|
|
let id = if self.token.is_ident() {
|
2016-03-23 03:01:37 +00:00
|
|
|
|
self.parse_ident()?
|
2012-11-21 19:49:19 +00:00
|
|
|
|
} else {
|
2019-05-11 16:08:09 +00:00
|
|
|
|
Ident::invalid() // no special identifier
|
2012-11-09 04:12:45 +00:00
|
|
|
|
};
|
2013-02-11 21:36:24 +00:00
|
|
|
|
// eat a matched-delimiter token tree:
|
2017-01-31 02:21:24 +00:00
|
|
|
|
let (delim, tts) = self.expect_delimited_token_tree()?;
|
2019-03-11 01:01:53 +00:00
|
|
|
|
if delim != MacDelimiter::Brace && !self.eat(&token::Semi) {
|
|
|
|
|
self.report_invalid_macro_expansion_item();
|
2014-11-14 17:18:10 +00:00
|
|
|
|
}
|
|
|
|
|
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let hi = self.prev_span;
|
2018-05-22 15:01:21 +00:00
|
|
|
|
let mac = respan(mac_lo.to(hi), Mac_ { path: pth, tts, delim });
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let item = self.mk_item(lo.to(hi), id, ItemKind::Mac(mac), visibility, attrs);
|
2015-03-28 21:58:51 +00:00
|
|
|
|
return Ok(Some(item));
|
2013-03-20 01:00:18 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// FAILURE TO PARSE ITEM
|
2018-01-29 05:12:09 +00:00
|
|
|
|
match visibility.node {
|
|
|
|
|
VisibilityKind::Inherited => {}
|
2016-03-31 19:10:38 +00:00
|
|
|
|
_ => {
|
2017-07-20 04:54:01 +00:00
|
|
|
|
return Err(self.span_fatal(self.prev_span, "unmatched visibility `pub`"));
|
2013-06-12 02:13:42 +00:00
|
|
|
|
}
|
2013-03-20 01:00:18 +00:00
|
|
|
|
}
|
2015-03-13 09:34:51 +00:00
|
|
|
|
|
2015-11-03 16:39:51 +00:00
|
|
|
|
if !attributes_allowed && !attrs.is_empty() {
|
2019-01-20 19:52:16 +00:00
|
|
|
|
self.expected_item_err(&attrs)?;
|
2015-03-13 09:34:51 +00:00
|
|
|
|
}
|
2015-03-28 21:58:51 +00:00
|
|
|
|
Ok(None)
|
2012-08-14 00:11:52 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses a macro invocation inside a `trait`, `impl` or `extern` block.
|
2018-03-11 02:16:26 +00:00
|
|
|
|
fn parse_assoc_macro_invoc(&mut self, item_kind: &str, vis: Option<&Visibility>,
|
|
|
|
|
at_end: &mut bool) -> PResult<'a, Option<Mac>>
|
|
|
|
|
{
|
2019-02-23 18:39:27 +00:00
|
|
|
|
if self.token.is_path_start() &&
|
|
|
|
|
!(self.is_async_fn() && self.span.rust_2015()) {
|
2018-03-11 02:16:26 +00:00
|
|
|
|
let prev_span = self.prev_span;
|
|
|
|
|
let lo = self.span;
|
|
|
|
|
let pth = self.parse_path(PathStyle::Mod)?;
|
|
|
|
|
|
|
|
|
|
if pth.segments.len() == 1 {
|
|
|
|
|
if !self.eat(&token::Not) {
|
|
|
|
|
return Err(self.missing_assoc_item_kind_err(item_kind, prev_span));
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
self.expect(&token::Not)?;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if let Some(vis) = vis {
|
|
|
|
|
self.complain_if_pub_macro(&vis.node, prev_span);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
*at_end = true;
|
|
|
|
|
|
|
|
|
|
// eat a matched-delimiter token tree:
|
|
|
|
|
let (delim, tts) = self.expect_delimited_token_tree()?;
|
2018-05-22 15:01:21 +00:00
|
|
|
|
if delim != MacDelimiter::Brace {
|
2019-01-28 05:04:50 +00:00
|
|
|
|
self.expect(&token::Semi)?;
|
2018-03-11 02:16:26 +00:00
|
|
|
|
}
|
|
|
|
|
|
2018-05-22 15:01:21 +00:00
|
|
|
|
Ok(Some(respan(lo.to(self.prev_span), Mac_ { path: pth, tts, delim })))
|
2018-03-11 02:16:26 +00:00
|
|
|
|
} else {
|
|
|
|
|
Ok(None)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2017-07-12 16:50:05 +00:00
|
|
|
|
fn collect_tokens<F, R>(&mut self, f: F) -> PResult<'a, (R, TokenStream)>
|
|
|
|
|
where F: FnOnce(&mut Self) -> PResult<'a, R>
|
|
|
|
|
{
|
|
|
|
|
// Record all tokens we parse when parsing this item.
|
|
|
|
|
let mut tokens = Vec::new();
|
2018-07-22 15:48:29 +00:00
|
|
|
|
let prev_collecting = match self.token_cursor.frame.last_token {
|
|
|
|
|
LastToken::Collecting(ref mut list) => {
|
|
|
|
|
Some(mem::replace(list, Vec::new()))
|
2017-07-12 16:50:05 +00:00
|
|
|
|
}
|
2018-07-22 15:48:29 +00:00
|
|
|
|
LastToken::Was(ref mut last) => {
|
|
|
|
|
tokens.extend(last.take());
|
|
|
|
|
None
|
|
|
|
|
}
|
|
|
|
|
};
|
2017-07-12 16:50:05 +00:00
|
|
|
|
self.token_cursor.frame.last_token = LastToken::Collecting(tokens);
|
|
|
|
|
let prev = self.token_cursor.stack.len();
|
|
|
|
|
let ret = f(self);
|
|
|
|
|
let last_token = if self.token_cursor.stack.len() == prev {
|
|
|
|
|
&mut self.token_cursor.frame.last_token
|
|
|
|
|
} else {
|
|
|
|
|
&mut self.token_cursor.stack[prev].last_token
|
|
|
|
|
};
|
2018-07-22 15:48:29 +00:00
|
|
|
|
|
2019-01-10 00:58:38 +00:00
|
|
|
|
// Pull out the tokens that we've collected from the call to `f` above.
|
2018-07-22 15:48:29 +00:00
|
|
|
|
let mut collected_tokens = match *last_token {
|
2017-07-12 16:50:05 +00:00
|
|
|
|
LastToken::Collecting(ref mut v) => mem::replace(v, Vec::new()),
|
|
|
|
|
LastToken::Was(_) => panic!("our vector went away?"),
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// If we're not at EOF our current token wasn't actually consumed by
|
|
|
|
|
// `f`, but it'll still be in our list that we pulled out. In that case
|
|
|
|
|
// put it back.
|
2018-07-22 15:48:29 +00:00
|
|
|
|
let extra_token = if self.token != token::Eof {
|
|
|
|
|
collected_tokens.pop()
|
2017-07-12 16:50:05 +00:00
|
|
|
|
} else {
|
2018-07-22 15:48:29 +00:00
|
|
|
|
None
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// If we were previously collecting tokens, then this was a recursive
|
|
|
|
|
// call. In that case we need to record all the tokens we collected in
|
|
|
|
|
// our parent list as well. To do that we push a clone of our stream
|
|
|
|
|
// onto the previous list.
|
|
|
|
|
match prev_collecting {
|
|
|
|
|
Some(mut list) => {
|
2019-01-10 00:58:38 +00:00
|
|
|
|
list.extend(collected_tokens.iter().cloned());
|
2018-07-22 15:48:29 +00:00
|
|
|
|
list.extend(extra_token);
|
|
|
|
|
*last_token = LastToken::Collecting(list);
|
|
|
|
|
}
|
|
|
|
|
None => {
|
|
|
|
|
*last_token = LastToken::Was(extra_token);
|
|
|
|
|
}
|
2017-07-12 16:50:05 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-01-10 00:58:38 +00:00
|
|
|
|
Ok((ret?, TokenStream::new(collected_tokens)))
|
2017-07-12 16:50:05 +00:00
|
|
|
|
}
|
|
|
|
|
|
2015-12-20 21:00:43 +00:00
|
|
|
|
pub fn parse_item(&mut self) -> PResult<'a, Option<P<Item>>> {
|
2016-03-23 03:01:37 +00:00
|
|
|
|
let attrs = self.parse_outer_attributes()?;
|
2018-07-22 15:48:29 +00:00
|
|
|
|
self.parse_item_(attrs, true, false)
|
2012-05-23 22:06:11 +00:00
|
|
|
|
}
|
|
|
|
|
|
2018-03-10 15:44:44 +00:00
|
|
|
|
/// `::{` or `::*`
|
|
|
|
|
fn is_import_coupler(&mut self) -> bool {
|
|
|
|
|
self.check(&token::ModSep) &&
|
|
|
|
|
self.look_ahead(1, |t| *t == token::OpenDelim(token::Brace) ||
|
|
|
|
|
*t == token::BinOp(token::Star))
|
2016-04-17 00:48:40 +00:00
|
|
|
|
}
|
2015-03-28 21:58:51 +00:00
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses a `UseTree`.
|
2017-09-26 21:04:00 +00:00
|
|
|
|
///
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// ```
|
2018-03-10 15:44:44 +00:00
|
|
|
|
/// USE_TREE = [`::`] `*` |
|
|
|
|
|
/// [`::`] `{` USE_TREE_LIST `}` |
|
2017-09-26 21:04:00 +00:00
|
|
|
|
/// PATH `::` `*` |
|
|
|
|
|
/// PATH `::` `{` USE_TREE_LIST `}` |
|
|
|
|
|
/// PATH [`as` IDENT]
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// ```
|
2018-03-09 23:02:39 +00:00
|
|
|
|
fn parse_use_tree(&mut self) -> PResult<'a, UseTree> {
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let lo = self.span;
|
2017-09-26 21:04:00 +00:00
|
|
|
|
|
2018-03-10 15:44:44 +00:00
|
|
|
|
let mut prefix = ast::Path { segments: Vec::new(), span: lo.shrink_to_lo() };
|
|
|
|
|
let kind = if self.check(&token::OpenDelim(token::Brace)) ||
|
|
|
|
|
self.check(&token::BinOp(token::Star)) ||
|
|
|
|
|
self.is_import_coupler() {
|
|
|
|
|
// `use *;` or `use ::*;` or `use {...};` or `use ::{...};`
|
2018-11-18 00:25:59 +00:00
|
|
|
|
let mod_sep_ctxt = self.span.ctxt();
|
2017-09-26 21:04:00 +00:00
|
|
|
|
if self.eat(&token::ModSep) {
|
2018-11-18 00:25:59 +00:00
|
|
|
|
prefix.segments.push(
|
2018-12-02 12:15:42 +00:00
|
|
|
|
PathSegment::path_root(lo.shrink_to_lo().with_ctxt(mod_sep_ctxt))
|
2018-11-18 00:25:59 +00:00
|
|
|
|
);
|
2017-09-26 21:04:00 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if self.eat(&token::BinOp(token::Star)) {
|
|
|
|
|
UseTreeKind::Glob
|
2016-10-23 21:43:41 +00:00
|
|
|
|
} else {
|
2018-03-10 15:44:44 +00:00
|
|
|
|
UseTreeKind::Nested(self.parse_use_tree_list()?)
|
2017-09-26 21:04:00 +00:00
|
|
|
|
}
|
2016-04-17 00:48:40 +00:00
|
|
|
|
} else {
|
2018-03-10 15:44:44 +00:00
|
|
|
|
// `use path::*;` or `use path::{...};` or `use path;` or `use path as bar;`
|
|
|
|
|
prefix = self.parse_path(PathStyle::Mod)?;
|
2017-09-26 21:04:00 +00:00
|
|
|
|
|
|
|
|
|
if self.eat(&token::ModSep) {
|
|
|
|
|
if self.eat(&token::BinOp(token::Star)) {
|
|
|
|
|
UseTreeKind::Glob
|
2016-04-17 00:48:40 +00:00
|
|
|
|
} else {
|
2018-03-10 15:44:44 +00:00
|
|
|
|
UseTreeKind::Nested(self.parse_use_tree_list()?)
|
2012-05-23 22:06:11 +00:00
|
|
|
|
}
|
2016-04-17 00:48:40 +00:00
|
|
|
|
} else {
|
2018-06-13 16:44:06 +00:00
|
|
|
|
UseTreeKind::Simple(self.parse_rename()?, ast::DUMMY_NODE_ID, ast::DUMMY_NODE_ID)
|
2011-08-16 22:21:30 +00:00
|
|
|
|
}
|
2017-09-26 21:04:00 +00:00
|
|
|
|
};
|
|
|
|
|
|
2018-03-10 15:44:44 +00:00
|
|
|
|
Ok(UseTree { prefix, kind, span: lo.to(self.prev_span) })
|
2017-09-26 21:04:00 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses a `UseTreeKind::Nested(list)`.
|
2017-09-26 21:04:00 +00:00
|
|
|
|
///
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// ```
|
2017-09-26 21:04:00 +00:00
|
|
|
|
/// USE_TREE_LIST = Ø | (USE_TREE `,`)* USE_TREE [`,`]
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// ```
|
2017-09-26 21:04:00 +00:00
|
|
|
|
fn parse_use_tree_list(&mut self) -> PResult<'a, Vec<(UseTree, ast::NodeId)>> {
|
|
|
|
|
self.parse_unspanned_seq(&token::OpenDelim(token::Brace),
|
|
|
|
|
&token::CloseDelim(token::Brace),
|
|
|
|
|
SeqSep::trailing_allowed(token::Comma), |this| {
|
2018-03-09 23:02:39 +00:00
|
|
|
|
Ok((this.parse_use_tree()?, ast::DUMMY_NODE_ID))
|
2017-09-26 21:04:00 +00:00
|
|
|
|
})
|
2015-08-01 05:20:25 +00:00
|
|
|
|
}
|
|
|
|
|
|
2015-12-20 21:00:43 +00:00
|
|
|
|
fn parse_rename(&mut self) -> PResult<'a, Option<Ident>> {
|
2019-05-11 14:41:37 +00:00
|
|
|
|
if self.eat_keyword(kw::As) {
|
2018-11-30 19:34:24 +00:00
|
|
|
|
self.parse_ident_or_underscore().map(Some)
|
2015-08-01 05:20:25 +00:00
|
|
|
|
} else {
|
|
|
|
|
Ok(None)
|
2014-08-13 02:25:05 +00:00
|
|
|
|
}
|
2013-04-01 22:50:58 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Parses a source module as a crate. This is the main entry point for the parser.
|
2015-12-20 21:00:43 +00:00
|
|
|
|
pub fn parse_crate_mod(&mut self) -> PResult<'a, Crate> {
|
2017-03-15 00:22:48 +00:00
|
|
|
|
let lo = self.span;
|
2019-01-28 05:04:50 +00:00
|
|
|
|
let krate = Ok(ast::Crate {
|
2016-03-23 03:01:37 +00:00
|
|
|
|
attrs: self.parse_inner_attributes()?,
|
|
|
|
|
module: self.parse_mod_items(&token::Eof, lo)?,
|
2017-03-15 00:22:48 +00:00
|
|
|
|
span: lo.to(self.span),
|
2019-01-28 05:04:50 +00:00
|
|
|
|
});
|
|
|
|
|
krate
|
2011-06-23 22:42:55 +00:00
|
|
|
|
}
|
2011-06-15 18:19:50 +00:00
|
|
|
|
|
2018-06-20 18:19:05 +00:00
|
|
|
|
pub fn parse_optional_str(&mut self) -> Option<(Symbol, ast::StrStyle, Option<ast::Name>)> {
|
2014-11-19 04:48:38 +00:00
|
|
|
|
let ret = match self.token {
|
2019-05-18 22:04:26 +00:00
|
|
|
|
token::Literal(token::Lit { kind: token::Str, symbol, suffix }) =>
|
|
|
|
|
(symbol, ast::StrStyle::Cooked, suffix),
|
|
|
|
|
token::Literal(token::Lit { kind: token::StrRaw(n), symbol, suffix }) =>
|
|
|
|
|
(symbol, ast::StrStyle::Raw(n), suffix),
|
2015-12-30 23:11:53 +00:00
|
|
|
|
_ => return None
|
2013-10-08 00:49:10 +00:00
|
|
|
|
};
|
2015-12-30 23:11:53 +00:00
|
|
|
|
self.bump();
|
|
|
|
|
Some(ret)
|
2013-07-12 21:43:57 +00:00
|
|
|
|
}
|
|
|
|
|
|
2016-11-16 10:52:37 +00:00
|
|
|
|
pub fn parse_str(&mut self) -> PResult<'a, (Symbol, StrStyle)> {
|
2015-12-30 23:11:53 +00:00
|
|
|
|
match self.parse_optional_str() {
|
2014-11-19 04:48:38 +00:00
|
|
|
|
Some((s, style, suf)) => {
|
2016-09-21 02:09:22 +00:00
|
|
|
|
let sp = self.prev_span;
|
2019-03-26 19:09:13 +00:00
|
|
|
|
self.expect_no_suffix(sp, "a string literal", suf);
|
2015-03-28 21:58:51 +00:00
|
|
|
|
Ok((s, style))
|
2014-11-19 04:48:38 +00:00
|
|
|
|
}
|
2018-02-19 07:08:23 +00:00
|
|
|
|
_ => {
|
|
|
|
|
let msg = "expected string literal";
|
|
|
|
|
let mut err = self.fatal(msg);
|
|
|
|
|
err.span_label(self.span, msg);
|
|
|
|
|
Err(err)
|
|
|
|
|
}
|
2011-02-25 01:00:24 +00:00
|
|
|
|
}
|
2012-05-23 22:06:11 +00:00
|
|
|
|
}
|
2019-03-11 01:01:53 +00:00
|
|
|
|
|
|
|
|
|
fn report_invalid_macro_expansion_item(&self) {
|
|
|
|
|
self.struct_span_err(
|
|
|
|
|
self.prev_span,
|
|
|
|
|
"macros that expand to items must be delimited with braces or followed by a semicolon",
|
|
|
|
|
).multipart_suggestion(
|
|
|
|
|
"change the delimiters to curly braces",
|
|
|
|
|
vec![
|
|
|
|
|
(self.prev_span.with_hi(self.prev_span.lo() + BytePos(1)), String::from(" {")),
|
|
|
|
|
(self.prev_span.with_lo(self.prev_span.hi() - BytePos(1)), '}'.to_string()),
|
|
|
|
|
],
|
|
|
|
|
Applicability::MaybeIncorrect,
|
|
|
|
|
).span_suggestion(
|
|
|
|
|
self.sess.source_map.next_point(self.prev_span),
|
|
|
|
|
"add a semicolon",
|
|
|
|
|
';'.to_string(),
|
|
|
|
|
Applicability::MaybeIncorrect,
|
|
|
|
|
).emit();
|
|
|
|
|
}
|
2019-03-08 23:12:51 +00:00
|
|
|
|
|
2019-03-12 16:00:20 +00:00
|
|
|
|
/// When lowering a `async fn` to the HIR, we need to move all of the arguments of the function
|
|
|
|
|
/// into the generated closure so that they are dropped when the future is polled and not when
|
|
|
|
|
/// it is created.
|
|
|
|
|
///
|
|
|
|
|
/// The arguments of the function are replaced in HIR lowering with the arguments created by
|
|
|
|
|
/// this function and the statements created here are inserted at the top of the closure body.
|
2019-05-05 03:03:32 +00:00
|
|
|
|
fn construct_async_arguments(&mut self, asyncness: &mut Spanned<IsAsync>, decl: &mut FnDecl) {
|
2019-05-09 18:14:39 +00:00
|
|
|
|
// FIXME(davidtwco): This function should really live in the HIR lowering but because
|
|
|
|
|
// the types constructed here need to be used in parts of resolve so that the correct
|
|
|
|
|
// locals are considered upvars, it is currently easier for it to live here in the parser,
|
|
|
|
|
// where it can be constructed once.
|
2019-03-12 16:00:20 +00:00
|
|
|
|
if let IsAsync::Async { ref mut arguments, .. } = asyncness.node {
|
2019-05-05 03:03:32 +00:00
|
|
|
|
for (index, input) in decl.inputs.iter_mut().enumerate() {
|
2019-03-12 16:00:20 +00:00
|
|
|
|
let id = ast::DUMMY_NODE_ID;
|
|
|
|
|
let span = input.pat.span;
|
2019-05-03 21:24:52 +00:00
|
|
|
|
let desugared_span = self.sess.source_map()
|
|
|
|
|
.mark_span_with_reason(CompilerDesugaringKind::Async, span, None);
|
2019-03-12 16:00:20 +00:00
|
|
|
|
|
|
|
|
|
// Construct a name for our temporary argument.
|
|
|
|
|
let name = format!("__arg{}", index);
|
2019-05-01 13:31:27 +00:00
|
|
|
|
let ident = Ident::from_str(&name).gensym();
|
2019-03-12 16:00:20 +00:00
|
|
|
|
|
2019-05-01 12:35:34 +00:00
|
|
|
|
// Check if this is a ident pattern, if so, we can optimize and avoid adding a
|
|
|
|
|
// `let <pat> = __argN;` statement, instead just adding a `let <pat> = <pat>;`
|
|
|
|
|
// statement.
|
2019-05-03 04:04:26 +00:00
|
|
|
|
let (binding_mode, ident, is_simple_pattern) = match input.pat.node {
|
2019-05-05 03:03:32 +00:00
|
|
|
|
PatKind::Ident(binding_mode @ BindingMode::ByValue(_), ident, _) => {
|
2019-05-09 18:10:27 +00:00
|
|
|
|
// Simple patterns like this don't have a generated argument, but they are
|
|
|
|
|
// moved into the closure with a statement, so any `mut` bindings on the
|
|
|
|
|
// argument will be unused. This binding mode can't be removed, because
|
|
|
|
|
// this would affect the input to procedural macros, but they can have
|
|
|
|
|
// their span marked as being the result of a compiler desugaring so
|
|
|
|
|
// that they aren't linted against.
|
2019-05-03 21:24:52 +00:00
|
|
|
|
input.pat.span = desugared_span;
|
2019-05-09 18:10:27 +00:00
|
|
|
|
|
2019-05-05 03:03:32 +00:00
|
|
|
|
(binding_mode, ident, true)
|
|
|
|
|
}
|
|
|
|
|
_ => (BindingMode::ByValue(Mutability::Mutable), ident, false),
|
2019-05-01 12:35:34 +00:00
|
|
|
|
};
|
2019-03-12 16:00:20 +00:00
|
|
|
|
|
|
|
|
|
// Construct an argument representing `__argN: <ty>` to replace the argument of the
|
2019-05-01 12:35:34 +00:00
|
|
|
|
// async function if it isn't a simple pattern.
|
|
|
|
|
let arg = if is_simple_pattern {
|
|
|
|
|
None
|
|
|
|
|
} else {
|
|
|
|
|
Some(Arg {
|
|
|
|
|
ty: input.ty.clone(),
|
|
|
|
|
id,
|
|
|
|
|
pat: P(Pat {
|
|
|
|
|
id,
|
|
|
|
|
node: PatKind::Ident(
|
|
|
|
|
BindingMode::ByValue(Mutability::Immutable), ident, None,
|
|
|
|
|
),
|
2019-05-03 21:24:52 +00:00
|
|
|
|
span: desugared_span,
|
2019-05-01 12:35:34 +00:00
|
|
|
|
}),
|
|
|
|
|
source: ArgSource::AsyncFn(input.pat.clone()),
|
|
|
|
|
})
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// Construct a `let __argN = __argN;` statement to insert at the top of the
|
|
|
|
|
// async closure. This makes sure that the argument is captured by the closure and
|
|
|
|
|
// that the drop order is correct.
|
|
|
|
|
let move_local = Local {
|
2019-03-12 16:00:20 +00:00
|
|
|
|
pat: P(Pat {
|
|
|
|
|
id,
|
2019-05-03 04:04:26 +00:00
|
|
|
|
node: PatKind::Ident(binding_mode, ident, None),
|
2019-05-03 21:24:52 +00:00
|
|
|
|
span: desugared_span,
|
2019-03-12 16:00:20 +00:00
|
|
|
|
}),
|
2019-03-13 16:08:34 +00:00
|
|
|
|
// We explicitly do not specify the type for this statement. When the user's
|
|
|
|
|
// argument type is `impl Trait` then this would require the
|
|
|
|
|
// `impl_trait_in_bindings` feature to also be present for that same type to
|
|
|
|
|
// be valid in this binding. At the time of writing (13 Mar 19),
|
|
|
|
|
// `impl_trait_in_bindings` is not stable.
|
|
|
|
|
ty: None,
|
2019-03-12 16:00:20 +00:00
|
|
|
|
init: Some(P(Expr {
|
|
|
|
|
id,
|
|
|
|
|
node: ExprKind::Path(None, ast::Path {
|
|
|
|
|
span,
|
|
|
|
|
segments: vec![PathSegment { ident, id, args: None }],
|
|
|
|
|
}),
|
|
|
|
|
span,
|
|
|
|
|
attrs: ThinVec::new(),
|
|
|
|
|
})),
|
|
|
|
|
id,
|
|
|
|
|
span,
|
|
|
|
|
attrs: ThinVec::new(),
|
|
|
|
|
source: LocalSource::AsyncFn,
|
2019-05-01 12:35:34 +00:00
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// Construct a `let <pat> = __argN;` statement to insert at the top of the
|
|
|
|
|
// async closure if this isn't a simple pattern.
|
|
|
|
|
let pat_stmt = if is_simple_pattern {
|
|
|
|
|
None
|
|
|
|
|
} else {
|
|
|
|
|
Some(Stmt {
|
|
|
|
|
id,
|
|
|
|
|
node: StmtKind::Local(P(Local {
|
|
|
|
|
pat: input.pat.clone(),
|
|
|
|
|
..move_local.clone()
|
|
|
|
|
})),
|
|
|
|
|
span,
|
|
|
|
|
})
|
|
|
|
|
};
|
2019-03-12 16:00:20 +00:00
|
|
|
|
|
2019-05-01 12:35:34 +00:00
|
|
|
|
let move_stmt = Stmt { id, node: StmtKind::Local(P(move_local)), span };
|
|
|
|
|
arguments.push(AsyncArgument { ident, arg, pat_stmt, move_stmt });
|
2019-03-12 16:00:20 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2011-01-11 02:18:16 +00:00
|
|
|
|
}
|
2019-02-05 09:35:25 +00:00
|
|
|
|
|
2019-03-03 20:14:25 +00:00
|
|
|
|
pub fn emit_unclosed_delims(unclosed_delims: &mut Vec<UnmatchedBrace>, handler: &errors::Handler) {
|
|
|
|
|
for unmatched in unclosed_delims.iter() {
|
2019-02-05 09:35:25 +00:00
|
|
|
|
let mut err = handler.struct_span_err(unmatched.found_span, &format!(
|
|
|
|
|
"incorrect close delimiter: `{}`",
|
|
|
|
|
pprust::token_to_string(&token::Token::CloseDelim(unmatched.found_delim)),
|
|
|
|
|
));
|
|
|
|
|
err.span_label(unmatched.found_span, "incorrect close delimiter");
|
|
|
|
|
if let Some(sp) = unmatched.candidate_span {
|
|
|
|
|
err.span_label(sp, "close delimiter possibly meant for this");
|
|
|
|
|
}
|
|
|
|
|
if let Some(sp) = unmatched.unclosed_span {
|
|
|
|
|
err.span_label(sp, "un-closed delimiter");
|
|
|
|
|
}
|
|
|
|
|
err.emit();
|
|
|
|
|
}
|
2019-03-03 20:14:25 +00:00
|
|
|
|
unclosed_delims.clear();
|
2019-02-05 10:26:26 +00:00
|
|
|
|
}
|