mirror of
https://github.com/rust-lang/rust.git
synced 2025-02-08 21:13:55 +00:00
Require passing an AttrWrapper
to collect_tokens_trailing_token
This is a pure refactoring split out from #80689. It represents the most invasive part of that PR, requiring changes in every caller of `parse_outer_attributes` In order to eagerly expand `#[cfg]` attributes while preserving the original `TokenStream`, we need to know the range of tokens that corresponds to every attribute target. This is accomplished by making `parse_outer_attributes` return an opaque `AttrWrapper` struct. An `AttrWrapper` must be converted to a plain `AttrVec` by passing it to `collect_tokens_trailing_token`. This makes it difficult to accidentally construct an AST node with attributes without calling `collect_tokens_trailing_token`, since AST nodes store an `AttrVec`, not an `AttrWrapper`. As a result, we now call `collect_tokens_trailing_token` for attribute targets which only support inert attributes, such as generic arguments and struct fields. Currently, the constructed `LazyTokenStream` is simply discarded. Future PRs will record the token range corresponding to the attribute target, allowing those tokens to be removed from an enclosing `collect_tokens_trailing_token` call if necessary.
This commit is contained in:
parent
7e0241c637
commit
0b411f56e1
@ -2975,3 +2975,18 @@ macro_rules! derive_has_tokens {
|
|||||||
derive_has_tokens! {
|
derive_has_tokens! {
|
||||||
Item, Expr, Ty, AttrItem, Visibility, Path, Block, Pat
|
Item, Expr, Ty, AttrItem, Visibility, Path, Block, Pat
|
||||||
}
|
}
|
||||||
|
|
||||||
|
macro_rules! derive_has_attrs_no_tokens {
|
||||||
|
($($ty:path),*) => { $(
|
||||||
|
impl HasTokens for $ty {
|
||||||
|
fn finalize_tokens(&mut self, _tokens: LazyTokenStream) {}
|
||||||
|
}
|
||||||
|
)* }
|
||||||
|
}
|
||||||
|
|
||||||
|
// These ast nodes only support inert attributes, so they don't
|
||||||
|
// store tokens (since nothing can observe them)
|
||||||
|
derive_has_attrs_no_tokens! {
|
||||||
|
StructField, Arm,
|
||||||
|
Field, FieldPat, Variant, Param, GenericParam
|
||||||
|
}
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
use super::{Parser, PathStyle};
|
use super::{AttrWrapper, Parser, PathStyle};
|
||||||
use rustc_ast as ast;
|
use rustc_ast as ast;
|
||||||
use rustc_ast::attr;
|
use rustc_ast::attr;
|
||||||
use rustc_ast::token::{self, Nonterminal};
|
use rustc_ast::token::{self, Nonterminal};
|
||||||
@ -26,7 +26,7 @@ pub(super) const DEFAULT_INNER_ATTR_FORBIDDEN: InnerAttrPolicy<'_> = InnerAttrPo
|
|||||||
|
|
||||||
impl<'a> Parser<'a> {
|
impl<'a> Parser<'a> {
|
||||||
/// Parses attributes that appear before an item.
|
/// Parses attributes that appear before an item.
|
||||||
pub(super) fn parse_outer_attributes(&mut self) -> PResult<'a, Vec<ast::Attribute>> {
|
pub(super) fn parse_outer_attributes(&mut self) -> PResult<'a, AttrWrapper> {
|
||||||
let mut attrs: Vec<ast::Attribute> = Vec::new();
|
let mut attrs: Vec<ast::Attribute> = Vec::new();
|
||||||
let mut just_parsed_doc_comment = false;
|
let mut just_parsed_doc_comment = false;
|
||||||
loop {
|
loop {
|
||||||
@ -74,7 +74,7 @@ impl<'a> Parser<'a> {
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(attrs)
|
Ok(AttrWrapper { attrs })
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Matches `attribute = # ! [ meta_item ]`.
|
/// Matches `attribute = # ! [ meta_item ]`.
|
||||||
@ -89,7 +89,8 @@ impl<'a> Parser<'a> {
|
|||||||
inner_parse_policy, self.token
|
inner_parse_policy, self.token
|
||||||
);
|
);
|
||||||
let lo = self.token.span;
|
let lo = self.token.span;
|
||||||
self.collect_tokens(|this| {
|
// Attributse can't have attributes of their own
|
||||||
|
self.collect_tokens_no_attrs(|this| {
|
||||||
if this.eat(&token::Pound) {
|
if this.eat(&token::Pound) {
|
||||||
let style = if this.eat(&token::Not) {
|
let style = if this.eat(&token::Not) {
|
||||||
ast::AttrStyle::Inner
|
ast::AttrStyle::Inner
|
||||||
@ -163,7 +164,8 @@ impl<'a> Parser<'a> {
|
|||||||
let args = this.parse_attr_args()?;
|
let args = this.parse_attr_args()?;
|
||||||
Ok(ast::AttrItem { path, args, tokens: None })
|
Ok(ast::AttrItem { path, args, tokens: None })
|
||||||
};
|
};
|
||||||
if capture_tokens { self.collect_tokens(do_parse) } else { do_parse(self) }?
|
// Attr items don't have attributes
|
||||||
|
if capture_tokens { self.collect_tokens_no_attrs(do_parse) } else { do_parse(self) }?
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,6 +1,8 @@
|
|||||||
use super::pat::{GateOr, RecoverComma, PARAM_EXPECTED};
|
use super::pat::{GateOr, RecoverComma, PARAM_EXPECTED};
|
||||||
use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
|
use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
|
||||||
use super::{BlockMode, Parser, PathStyle, Restrictions, TokenType};
|
use super::{
|
||||||
|
AttrWrapper, BlockMode, ForceCollect, Parser, PathStyle, Restrictions, TokenType, TrailingToken,
|
||||||
|
};
|
||||||
use super::{SemiColonMode, SeqSep, TokenExpectType};
|
use super::{SemiColonMode, SeqSep, TokenExpectType};
|
||||||
use crate::maybe_recover_from_interpolated_ty_qpath;
|
use crate::maybe_recover_from_interpolated_ty_qpath;
|
||||||
|
|
||||||
@ -62,16 +64,16 @@ macro_rules! maybe_whole_expr {
|
|||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub(super) enum LhsExpr {
|
pub(super) enum LhsExpr {
|
||||||
NotYetParsed,
|
NotYetParsed,
|
||||||
AttributesParsed(AttrVec),
|
AttributesParsed(AttrWrapper),
|
||||||
AlreadyParsed(P<Expr>),
|
AlreadyParsed(P<Expr>),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<Option<AttrVec>> for LhsExpr {
|
impl From<Option<AttrWrapper>> for LhsExpr {
|
||||||
/// Converts `Some(attrs)` into `LhsExpr::AttributesParsed(attrs)`
|
/// Converts `Some(attrs)` into `LhsExpr::AttributesParsed(attrs)`
|
||||||
/// and `None` into `LhsExpr::NotYetParsed`.
|
/// and `None` into `LhsExpr::NotYetParsed`.
|
||||||
///
|
///
|
||||||
/// This conversion does not allocate.
|
/// This conversion does not allocate.
|
||||||
fn from(o: Option<AttrVec>) -> Self {
|
fn from(o: Option<AttrWrapper>) -> Self {
|
||||||
if let Some(attrs) = o { LhsExpr::AttributesParsed(attrs) } else { LhsExpr::NotYetParsed }
|
if let Some(attrs) = o { LhsExpr::AttributesParsed(attrs) } else { LhsExpr::NotYetParsed }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -123,7 +125,7 @@ impl<'a> Parser<'a> {
|
|||||||
pub(super) fn parse_expr_res(
|
pub(super) fn parse_expr_res(
|
||||||
&mut self,
|
&mut self,
|
||||||
r: Restrictions,
|
r: Restrictions,
|
||||||
already_parsed_attrs: Option<AttrVec>,
|
already_parsed_attrs: Option<AttrWrapper>,
|
||||||
) -> PResult<'a, P<Expr>> {
|
) -> PResult<'a, P<Expr>> {
|
||||||
self.with_res(r, |this| this.parse_assoc_expr(already_parsed_attrs))
|
self.with_res(r, |this| this.parse_assoc_expr(already_parsed_attrs))
|
||||||
}
|
}
|
||||||
@ -133,7 +135,10 @@ impl<'a> Parser<'a> {
|
|||||||
/// This parses an expression accounting for associativity and precedence of the operators in
|
/// This parses an expression accounting for associativity and precedence of the operators in
|
||||||
/// the expression.
|
/// the expression.
|
||||||
#[inline]
|
#[inline]
|
||||||
fn parse_assoc_expr(&mut self, already_parsed_attrs: Option<AttrVec>) -> PResult<'a, P<Expr>> {
|
fn parse_assoc_expr(
|
||||||
|
&mut self,
|
||||||
|
already_parsed_attrs: Option<AttrWrapper>,
|
||||||
|
) -> PResult<'a, P<Expr>> {
|
||||||
self.parse_assoc_expr_with(0, already_parsed_attrs.into())
|
self.parse_assoc_expr_with(0, already_parsed_attrs.into())
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -439,7 +444,7 @@ impl<'a> Parser<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Parses prefix-forms of range notation: `..expr`, `..`, `..=expr`.
|
/// Parses prefix-forms of range notation: `..expr`, `..`, `..=expr`.
|
||||||
fn parse_prefix_range_expr(&mut self, attrs: Option<AttrVec>) -> PResult<'a, P<Expr>> {
|
fn parse_prefix_range_expr(&mut self, attrs: Option<AttrWrapper>) -> PResult<'a, P<Expr>> {
|
||||||
// Check for deprecated `...` syntax.
|
// Check for deprecated `...` syntax.
|
||||||
if self.token == token::DotDotDot {
|
if self.token == token::DotDotDot {
|
||||||
self.err_dotdotdot_syntax(self.token.span);
|
self.err_dotdotdot_syntax(self.token.span);
|
||||||
@ -457,44 +462,68 @@ impl<'a> Parser<'a> {
|
|||||||
};
|
};
|
||||||
let op = AssocOp::from_token(&self.token);
|
let op = AssocOp::from_token(&self.token);
|
||||||
let attrs = self.parse_or_use_outer_attributes(attrs)?;
|
let attrs = self.parse_or_use_outer_attributes(attrs)?;
|
||||||
let lo = self.token.span;
|
// RESOLVED: It looks like we only haev non-empty attributes here when
|
||||||
self.bump();
|
// this is used as a statement:
|
||||||
let (span, opt_end) = if self.is_at_start_of_range_notation_rhs() {
|
// `#[my_attr] 25..;`
|
||||||
|
// We should still investigate `parse_or_use_outer_attributes`, since we haven't
|
||||||
|
// yet eaten the '..'
|
||||||
|
//
|
||||||
|
// FIXME - does this code ever haev attributes? `let a = #[attr] ..` doesn't even parse
|
||||||
|
// // We try to aprse attributes *before* bumping the token, so this can only
|
||||||
|
// ever succeeed if the `attrs` parameter is `Some`
|
||||||
|
self.collect_tokens_for_expr(attrs, |this, attrs| {
|
||||||
|
let lo = this.token.span;
|
||||||
|
this.bump();
|
||||||
|
let (span, opt_end) = if this.is_at_start_of_range_notation_rhs() {
|
||||||
// RHS must be parsed with more associativity than the dots.
|
// RHS must be parsed with more associativity than the dots.
|
||||||
self.parse_assoc_expr_with(op.unwrap().precedence() + 1, LhsExpr::NotYetParsed)
|
this.parse_assoc_expr_with(op.unwrap().precedence() + 1, LhsExpr::NotYetParsed)
|
||||||
.map(|x| (lo.to(x.span), Some(x)))?
|
.map(|x| (lo.to(x.span), Some(x)))?
|
||||||
} else {
|
} else {
|
||||||
(lo, None)
|
(lo, None)
|
||||||
};
|
};
|
||||||
Ok(self.mk_expr(span, self.mk_range(None, opt_end, limits)?, attrs))
|
Ok(this.mk_expr(span, this.mk_range(None, opt_end, limits)?, attrs.into()))
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Parses a prefix-unary-operator expr.
|
/// Parses a prefix-unary-operator expr.
|
||||||
fn parse_prefix_expr(&mut self, attrs: Option<AttrVec>) -> PResult<'a, P<Expr>> {
|
fn parse_prefix_expr(&mut self, attrs: Option<AttrWrapper>) -> PResult<'a, P<Expr>> {
|
||||||
let attrs = self.parse_or_use_outer_attributes(attrs)?;
|
let attrs = self.parse_or_use_outer_attributes(attrs)?;
|
||||||
// FIXME: Use super::attr::maybe_needs_tokens(&attrs) once we come up
|
let lo = self.token.span;
|
||||||
// with a good way of passing `force_tokens` through from `parse_nonterminal`.
|
|
||||||
// Checking !attrs.is_empty() is correct, but will cause us to unnecessarily
|
macro_rules! make_it {
|
||||||
// capture tokens in some circumstances.
|
($this:ident, $attrs:expr, |this, _| $body:expr) => {
|
||||||
let needs_tokens = !attrs.is_empty();
|
$this.collect_tokens_for_expr($attrs, |$this, attrs| {
|
||||||
let do_parse = |this: &mut Parser<'a>| {
|
let (hi, ex) = $body?;
|
||||||
let lo = this.token.span;
|
Ok($this.mk_expr(lo.to(hi), ex, attrs.into()))
|
||||||
// Note: when adding new unary operators, don't forget to adjust TokenKind::can_begin_expr()
|
})
|
||||||
let (hi, ex) = match this.token.uninterpolate().kind {
|
|
||||||
token::Not => this.parse_unary_expr(lo, UnOp::Not), // `!expr`
|
|
||||||
token::Tilde => this.recover_tilde_expr(lo), // `~expr`
|
|
||||||
token::BinOp(token::Minus) => this.parse_unary_expr(lo, UnOp::Neg), // `-expr`
|
|
||||||
token::BinOp(token::Star) => this.parse_unary_expr(lo, UnOp::Deref), // `*expr`
|
|
||||||
token::BinOp(token::And) | token::AndAnd => this.parse_borrow_expr(lo),
|
|
||||||
token::Ident(..) if this.token.is_keyword(kw::Box) => this.parse_box_expr(lo),
|
|
||||||
token::Ident(..) if this.is_mistaken_not_ident_negation() => {
|
|
||||||
this.recover_not_expr(lo)
|
|
||||||
}
|
|
||||||
_ => return this.parse_dot_or_call_expr(Some(attrs)),
|
|
||||||
}?;
|
|
||||||
Ok(this.mk_expr(lo.to(hi), ex, attrs))
|
|
||||||
};
|
};
|
||||||
if needs_tokens { self.collect_tokens(do_parse) } else { do_parse(self) }
|
}
|
||||||
|
|
||||||
|
let this = self;
|
||||||
|
|
||||||
|
// Note: when adding new unary operators, don't forget to adjust TokenKind::can_begin_expr()
|
||||||
|
match this.token.uninterpolate().kind {
|
||||||
|
token::Not => make_it!(this, attrs, |this, _| this.parse_unary_expr(lo, UnOp::Not)), // `!expr`
|
||||||
|
token::Tilde => make_it!(this, attrs, |this, _| this.recover_tilde_expr(lo)), // `~expr`
|
||||||
|
token::BinOp(token::Minus) => {
|
||||||
|
make_it!(this, attrs, |this, _| this.parse_unary_expr(lo, UnOp::Neg))
|
||||||
|
} // `-expr`
|
||||||
|
token::BinOp(token::Star) => {
|
||||||
|
make_it!(this, attrs, |this, _| this.parse_unary_expr(lo, UnOp::Deref))
|
||||||
|
} // `*expr`
|
||||||
|
token::BinOp(token::And) | token::AndAnd => {
|
||||||
|
make_it!(this, attrs, |this, _| this.parse_borrow_expr(lo))
|
||||||
|
}
|
||||||
|
token::Ident(..) if this.token.is_keyword(kw::Box) => {
|
||||||
|
make_it!(this, attrs, |this, _| this.parse_box_expr(lo))
|
||||||
|
}
|
||||||
|
token::Ident(..) if this.is_mistaken_not_ident_negation() => {
|
||||||
|
// FIXME - what is our polciy for handling tokens during recovery?
|
||||||
|
// Should we ever invoke a proc-macro with these tokens?
|
||||||
|
make_it!(this, attrs, |this, _| this.recover_not_expr(lo))
|
||||||
|
}
|
||||||
|
_ => return this.parse_dot_or_call_expr(Some(attrs.into())),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_prefix_expr_common(&mut self, lo: Span) -> PResult<'a, (Span, P<Expr>)> {
|
fn parse_prefix_expr_common(&mut self, lo: Span) -> PResult<'a, (Span, P<Expr>)> {
|
||||||
@ -805,18 +834,20 @@ impl<'a> Parser<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Parses `a.b` or `a(13)` or `a[4]` or just `a`.
|
/// Parses `a.b` or `a(13)` or `a[4]` or just `a`.
|
||||||
fn parse_dot_or_call_expr(&mut self, attrs: Option<AttrVec>) -> PResult<'a, P<Expr>> {
|
fn parse_dot_or_call_expr(&mut self, attrs: Option<AttrWrapper>) -> PResult<'a, P<Expr>> {
|
||||||
let attrs = self.parse_or_use_outer_attributes(attrs)?;
|
let attrs = self.parse_or_use_outer_attributes(attrs)?;
|
||||||
let base = self.parse_bottom_expr();
|
self.collect_tokens_for_expr(attrs, |this, attrs| {
|
||||||
let (span, base) = self.interpolated_or_expr_span(base)?;
|
let base = this.parse_bottom_expr();
|
||||||
self.parse_dot_or_call_expr_with(base, span, attrs)
|
let (span, base) = this.interpolated_or_expr_span(base)?;
|
||||||
|
this.parse_dot_or_call_expr_with(base, span, attrs)
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn parse_dot_or_call_expr_with(
|
pub(super) fn parse_dot_or_call_expr_with(
|
||||||
&mut self,
|
&mut self,
|
||||||
e0: P<Expr>,
|
e0: P<Expr>,
|
||||||
lo: Span,
|
lo: Span,
|
||||||
mut attrs: AttrVec,
|
mut attrs: Vec<ast::Attribute>,
|
||||||
) -> PResult<'a, P<Expr>> {
|
) -> PResult<'a, P<Expr>> {
|
||||||
// Stitch the list of outer attributes onto the return value.
|
// Stitch the list of outer attributes onto the return value.
|
||||||
// A little bit ugly, but the best way given the current code
|
// A little bit ugly, but the best way given the current code
|
||||||
@ -824,7 +855,7 @@ impl<'a> Parser<'a> {
|
|||||||
self.parse_dot_or_call_expr_with_(e0, lo).map(|expr| {
|
self.parse_dot_or_call_expr_with_(e0, lo).map(|expr| {
|
||||||
expr.map(|mut expr| {
|
expr.map(|mut expr| {
|
||||||
attrs.extend::<Vec<_>>(expr.attrs.into());
|
attrs.extend::<Vec<_>>(expr.attrs.into());
|
||||||
expr.attrs = attrs;
|
expr.attrs = attrs.into();
|
||||||
expr
|
expr
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
@ -1703,19 +1734,25 @@ impl<'a> Parser<'a> {
|
|||||||
fn parse_fn_block_param(&mut self) -> PResult<'a, Param> {
|
fn parse_fn_block_param(&mut self) -> PResult<'a, Param> {
|
||||||
let lo = self.token.span;
|
let lo = self.token.span;
|
||||||
let attrs = self.parse_outer_attributes()?;
|
let attrs = self.parse_outer_attributes()?;
|
||||||
let pat = self.parse_pat(PARAM_EXPECTED)?;
|
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
|
||||||
let ty = if self.eat(&token::Colon) {
|
let pat = this.parse_pat(PARAM_EXPECTED)?;
|
||||||
self.parse_ty()?
|
let ty = if this.eat(&token::Colon) {
|
||||||
|
this.parse_ty()?
|
||||||
} else {
|
} else {
|
||||||
self.mk_ty(self.prev_token.span, TyKind::Infer)
|
this.mk_ty(this.prev_token.span, TyKind::Infer)
|
||||||
};
|
};
|
||||||
Ok(Param {
|
|
||||||
|
Ok((
|
||||||
|
Param {
|
||||||
attrs: attrs.into(),
|
attrs: attrs.into(),
|
||||||
ty,
|
ty,
|
||||||
pat,
|
pat,
|
||||||
span: lo.to(self.token.span),
|
span: lo.to(this.token.span),
|
||||||
id: DUMMY_NODE_ID,
|
id: DUMMY_NODE_ID,
|
||||||
is_placeholder: false,
|
is_placeholder: false,
|
||||||
|
},
|
||||||
|
TrailingToken::MaybeComma,
|
||||||
|
))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1731,7 +1768,7 @@ impl<'a> Parser<'a> {
|
|||||||
let thn = if self.eat_keyword(kw::Else) || !cond.returns() {
|
let thn = if self.eat_keyword(kw::Else) || !cond.returns() {
|
||||||
self.error_missing_if_cond(lo, cond.span)
|
self.error_missing_if_cond(lo, cond.span)
|
||||||
} else {
|
} else {
|
||||||
let attrs = self.parse_outer_attributes()?; // For recovery.
|
let attrs = self.parse_outer_attributes()?.take_for_recovery(); // For recovery.
|
||||||
let not_block = self.token != token::OpenDelim(token::Brace);
|
let not_block = self.token != token::OpenDelim(token::Brace);
|
||||||
let block = self.parse_block().map_err(|mut err| {
|
let block = self.parse_block().map_err(|mut err| {
|
||||||
if not_block {
|
if not_block {
|
||||||
@ -1788,7 +1825,7 @@ impl<'a> Parser<'a> {
|
|||||||
/// Parses an `else { ... }` expression (`else` token already eaten).
|
/// Parses an `else { ... }` expression (`else` token already eaten).
|
||||||
fn parse_else_expr(&mut self) -> PResult<'a, P<Expr>> {
|
fn parse_else_expr(&mut self) -> PResult<'a, P<Expr>> {
|
||||||
let ctx_span = self.prev_token.span; // `else`
|
let ctx_span = self.prev_token.span; // `else`
|
||||||
let attrs = self.parse_outer_attributes()?; // For recovery.
|
let attrs = self.parse_outer_attributes()?.take_for_recovery(); // For recovery.
|
||||||
let expr = if self.eat_keyword(kw::If) {
|
let expr = if self.eat_keyword(kw::If) {
|
||||||
self.parse_if_expr(AttrVec::new())?
|
self.parse_if_expr(AttrVec::new())?
|
||||||
} else {
|
} else {
|
||||||
@ -1947,44 +1984,46 @@ impl<'a> Parser<'a> {
|
|||||||
|
|
||||||
pub(super) fn parse_arm(&mut self) -> PResult<'a, Arm> {
|
pub(super) fn parse_arm(&mut self) -> PResult<'a, Arm> {
|
||||||
let attrs = self.parse_outer_attributes()?;
|
let attrs = self.parse_outer_attributes()?;
|
||||||
let lo = self.token.span;
|
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
|
||||||
let pat = self.parse_top_pat(GateOr::No, RecoverComma::Yes)?;
|
let lo = this.token.span;
|
||||||
let guard = if self.eat_keyword(kw::If) {
|
let pat = this.parse_top_pat(GateOr::No, RecoverComma::Yes)?;
|
||||||
let if_span = self.prev_token.span;
|
let guard = if this.eat_keyword(kw::If) {
|
||||||
let cond = self.parse_expr()?;
|
let if_span = this.prev_token.span;
|
||||||
|
let cond = this.parse_expr()?;
|
||||||
if let ExprKind::Let(..) = cond.kind {
|
if let ExprKind::Let(..) = cond.kind {
|
||||||
// Remove the last feature gating of a `let` expression since it's stable.
|
// Remove the last feature gating of a `let` expression since it's stable.
|
||||||
self.sess.gated_spans.ungate_last(sym::let_chains, cond.span);
|
this.sess.gated_spans.ungate_last(sym::let_chains, cond.span);
|
||||||
let span = if_span.to(cond.span);
|
let span = if_span.to(cond.span);
|
||||||
self.sess.gated_spans.gate(sym::if_let_guard, span);
|
this.sess.gated_spans.gate(sym::if_let_guard, span);
|
||||||
}
|
}
|
||||||
Some(cond)
|
Some(cond)
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
};
|
};
|
||||||
let arrow_span = self.token.span;
|
let arrow_span = this.token.span;
|
||||||
self.expect(&token::FatArrow)?;
|
this.expect(&token::FatArrow)?;
|
||||||
let arm_start_span = self.token.span;
|
let arm_start_span = this.token.span;
|
||||||
|
|
||||||
let expr = self.parse_expr_res(Restrictions::STMT_EXPR, None).map_err(|mut err| {
|
let expr = this.parse_expr_res(Restrictions::STMT_EXPR, None).map_err(|mut err| {
|
||||||
err.span_label(arrow_span, "while parsing the `match` arm starting here");
|
err.span_label(arrow_span, "while parsing the `match` arm starting here");
|
||||||
err
|
err
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
let require_comma = classify::expr_requires_semi_to_be_stmt(&expr)
|
let require_comma = classify::expr_requires_semi_to_be_stmt(&expr)
|
||||||
&& self.token != token::CloseDelim(token::Brace);
|
&& this.token != token::CloseDelim(token::Brace);
|
||||||
|
|
||||||
let hi = self.prev_token.span;
|
let hi = this.prev_token.span;
|
||||||
|
|
||||||
if require_comma {
|
if require_comma {
|
||||||
let sm = self.sess.source_map();
|
let sm = this.sess.source_map();
|
||||||
self.expect_one_of(&[token::Comma], &[token::CloseDelim(token::Brace)]).map_err(
|
this.expect_one_of(&[token::Comma], &[token::CloseDelim(token::Brace)]).map_err(
|
||||||
|mut err| {
|
|mut err| {
|
||||||
match (sm.span_to_lines(expr.span), sm.span_to_lines(arm_start_span)) {
|
match (sm.span_to_lines(expr.span), sm.span_to_lines(arm_start_span)) {
|
||||||
(Ok(ref expr_lines), Ok(ref arm_start_lines))
|
(Ok(ref expr_lines), Ok(ref arm_start_lines))
|
||||||
if arm_start_lines.lines[0].end_col == expr_lines.lines[0].end_col
|
if arm_start_lines.lines[0].end_col
|
||||||
|
== expr_lines.lines[0].end_col
|
||||||
&& expr_lines.lines.len() == 2
|
&& expr_lines.lines.len() == 2
|
||||||
&& self.token == token::FatArrow =>
|
&& this.token == token::FatArrow =>
|
||||||
{
|
{
|
||||||
// We check whether there's any trailing code in the parse span,
|
// We check whether there's any trailing code in the parse span,
|
||||||
// if there isn't, we very likely have the following:
|
// if there isn't, we very likely have the following:
|
||||||
@ -2015,10 +2054,11 @@ impl<'a> Parser<'a> {
|
|||||||
},
|
},
|
||||||
)?;
|
)?;
|
||||||
} else {
|
} else {
|
||||||
self.eat(&token::Comma);
|
this.eat(&token::Comma);
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(ast::Arm {
|
Ok((
|
||||||
|
ast::Arm {
|
||||||
attrs,
|
attrs,
|
||||||
pat,
|
pat,
|
||||||
guard,
|
guard,
|
||||||
@ -2026,6 +2066,9 @@ impl<'a> Parser<'a> {
|
|||||||
span: lo.to(hi),
|
span: lo.to(hi),
|
||||||
id: DUMMY_NODE_ID,
|
id: DUMMY_NODE_ID,
|
||||||
is_placeholder: false,
|
is_placeholder: false,
|
||||||
|
},
|
||||||
|
TrailingToken::None,
|
||||||
|
))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2274,30 +2317,36 @@ impl<'a> Parser<'a> {
|
|||||||
|
|
||||||
/// Parses `ident (COLON expr)?`.
|
/// Parses `ident (COLON expr)?`.
|
||||||
fn parse_field(&mut self) -> PResult<'a, Field> {
|
fn parse_field(&mut self) -> PResult<'a, Field> {
|
||||||
let attrs = self.parse_outer_attributes()?.into();
|
let attrs = self.parse_outer_attributes()?;
|
||||||
let lo = self.token.span;
|
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
|
||||||
|
let lo = this.token.span;
|
||||||
|
|
||||||
// Check if a colon exists one ahead. This means we're parsing a fieldname.
|
// Check if a colon exists one ahead. This means we're parsing a fieldname.
|
||||||
let is_shorthand = !self.look_ahead(1, |t| t == &token::Colon || t == &token::Eq);
|
let is_shorthand = !this.look_ahead(1, |t| t == &token::Colon || t == &token::Eq);
|
||||||
let (ident, expr) = if is_shorthand {
|
let (ident, expr) = if is_shorthand {
|
||||||
// Mimic `x: x` for the `x` field shorthand.
|
// Mimic `x: x` for the `x` field shorthand.
|
||||||
let ident = self.parse_ident_common(false)?;
|
let ident = this.parse_ident_common(false)?;
|
||||||
let path = ast::Path::from_ident(ident);
|
let path = ast::Path::from_ident(ident);
|
||||||
(ident, self.mk_expr(ident.span, ExprKind::Path(None, path), AttrVec::new()))
|
(ident, this.mk_expr(ident.span, ExprKind::Path(None, path), AttrVec::new()))
|
||||||
} else {
|
} else {
|
||||||
let ident = self.parse_field_name()?;
|
let ident = this.parse_field_name()?;
|
||||||
self.error_on_eq_field_init(ident);
|
this.error_on_eq_field_init(ident);
|
||||||
self.bump(); // `:`
|
this.bump(); // `:`
|
||||||
(ident, self.parse_expr()?)
|
(ident, this.parse_expr()?)
|
||||||
};
|
};
|
||||||
Ok(ast::Field {
|
|
||||||
|
Ok((
|
||||||
|
ast::Field {
|
||||||
ident,
|
ident,
|
||||||
span: lo.to(expr.span),
|
span: lo.to(expr.span),
|
||||||
expr,
|
expr,
|
||||||
is_shorthand,
|
is_shorthand,
|
||||||
attrs,
|
attrs: attrs.into(),
|
||||||
id: DUMMY_NODE_ID,
|
id: DUMMY_NODE_ID,
|
||||||
is_placeholder: false,
|
is_placeholder: false,
|
||||||
|
},
|
||||||
|
TrailingToken::MaybeComma,
|
||||||
|
))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2405,4 +2454,27 @@ impl<'a> Parser<'a> {
|
|||||||
.map_or(lhs_span, |a| a.span)
|
.map_or(lhs_span, |a| a.span)
|
||||||
.to(rhs_span)
|
.to(rhs_span)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn collect_tokens_for_expr(
|
||||||
|
&mut self,
|
||||||
|
attrs: AttrWrapper,
|
||||||
|
f: impl FnOnce(&mut Self, Vec<ast::Attribute>) -> PResult<'a, P<Expr>>,
|
||||||
|
) -> PResult<'a, P<Expr>> {
|
||||||
|
// FIXME - come up with a nice way to properly forward `ForceCollect`from
|
||||||
|
// the nonterminal parsing code. TThis approach iscorrect, but will cause
|
||||||
|
// us to unnecessarily capture tokens for exprs that have only builtin
|
||||||
|
// attributes. Revisit this before #![feature(stmt_expr_attributes)] is stabilized
|
||||||
|
let force_collect = if attrs.is_empty() { ForceCollect::No } else { ForceCollect::Yes };
|
||||||
|
self.collect_tokens_trailing_token(attrs, force_collect, |this, attrs| {
|
||||||
|
let res = f(this, attrs)?;
|
||||||
|
let trailing = if this.restrictions.contains(Restrictions::STMT_EXPR)
|
||||||
|
&& this.token.kind == token::Semi
|
||||||
|
{
|
||||||
|
TrailingToken::Semi
|
||||||
|
} else {
|
||||||
|
TrailingToken::None
|
||||||
|
};
|
||||||
|
Ok((res, trailing))
|
||||||
|
})
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
use super::Parser;
|
use super::{ForceCollect, Parser, TrailingToken};
|
||||||
|
|
||||||
use rustc_ast::token;
|
use rustc_ast::token;
|
||||||
use rustc_ast::{
|
use rustc_ast::{
|
||||||
@ -84,57 +84,69 @@ impl<'a> Parser<'a> {
|
|||||||
/// a trailing comma and erroneous trailing attributes.
|
/// a trailing comma and erroneous trailing attributes.
|
||||||
pub(super) fn parse_generic_params(&mut self) -> PResult<'a, Vec<ast::GenericParam>> {
|
pub(super) fn parse_generic_params(&mut self) -> PResult<'a, Vec<ast::GenericParam>> {
|
||||||
let mut params = Vec::new();
|
let mut params = Vec::new();
|
||||||
loop {
|
let mut done = false;
|
||||||
|
while !done {
|
||||||
let attrs = self.parse_outer_attributes()?;
|
let attrs = self.parse_outer_attributes()?;
|
||||||
if self.check_lifetime() {
|
let param =
|
||||||
let lifetime = self.expect_lifetime();
|
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
|
||||||
|
let param = if this.check_lifetime() {
|
||||||
|
let lifetime = this.expect_lifetime();
|
||||||
// Parse lifetime parameter.
|
// Parse lifetime parameter.
|
||||||
let bounds =
|
let bounds = if this.eat(&token::Colon) {
|
||||||
if self.eat(&token::Colon) { self.parse_lt_param_bounds() } else { Vec::new() };
|
this.parse_lt_param_bounds()
|
||||||
params.push(ast::GenericParam {
|
} else {
|
||||||
|
Vec::new()
|
||||||
|
};
|
||||||
|
Some(ast::GenericParam {
|
||||||
ident: lifetime.ident,
|
ident: lifetime.ident,
|
||||||
id: lifetime.id,
|
id: lifetime.id,
|
||||||
attrs: attrs.into(),
|
attrs: attrs.into(),
|
||||||
bounds,
|
bounds,
|
||||||
kind: ast::GenericParamKind::Lifetime,
|
kind: ast::GenericParamKind::Lifetime,
|
||||||
is_placeholder: false,
|
is_placeholder: false,
|
||||||
});
|
})
|
||||||
} else if self.check_keyword(kw::Const) {
|
} else if this.check_keyword(kw::Const) {
|
||||||
// Parse const parameter.
|
// Parse const parameter.
|
||||||
params.push(self.parse_const_param(attrs)?);
|
Some(this.parse_const_param(attrs)?)
|
||||||
} else if self.check_ident() {
|
} else if this.check_ident() {
|
||||||
// Parse type parameter.
|
// Parse type parameter.
|
||||||
params.push(self.parse_ty_param(attrs)?);
|
Some(this.parse_ty_param(attrs)?)
|
||||||
} else if self.token.can_begin_type() {
|
} else if this.token.can_begin_type() {
|
||||||
// Trying to write an associated type bound? (#26271)
|
// Trying to write an associated type bound? (#26271)
|
||||||
let snapshot = self.clone();
|
let snapshot = this.clone();
|
||||||
match self.parse_ty_where_predicate() {
|
match this.parse_ty_where_predicate() {
|
||||||
Ok(where_predicate) => {
|
Ok(where_predicate) => {
|
||||||
self.struct_span_err(
|
this.struct_span_err(
|
||||||
where_predicate.span(),
|
where_predicate.span(),
|
||||||
"bounds on associated types do not belong here",
|
"bounds on associated types do not belong here",
|
||||||
)
|
)
|
||||||
.span_label(where_predicate.span(), "belongs in `where` clause")
|
.span_label(where_predicate.span(), "belongs in `where` clause")
|
||||||
.emit();
|
.emit();
|
||||||
|
// FIXME - try to continue parsing other generics?
|
||||||
|
return Ok((None, TrailingToken::None));
|
||||||
}
|
}
|
||||||
Err(mut err) => {
|
Err(mut err) => {
|
||||||
err.cancel();
|
err.cancel();
|
||||||
*self = snapshot;
|
// FIXME - maybe we should overwrite 'self' outside of `collect_tokens`?
|
||||||
break;
|
*this = snapshot;
|
||||||
|
return Ok((None, TrailingToken::None));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// Check for trailing attributes and stop parsing.
|
// Check for trailing attributes and stop parsing.
|
||||||
if !attrs.is_empty() {
|
if !attrs.is_empty() {
|
||||||
if !params.is_empty() {
|
if !params.is_empty() {
|
||||||
self.struct_span_err(
|
this.struct_span_err(
|
||||||
attrs[0].span,
|
attrs[0].span,
|
||||||
"trailing attribute after generic parameter",
|
"trailing attribute after generic parameter",
|
||||||
)
|
)
|
||||||
.span_label(attrs[0].span, "attributes must go before parameters")
|
.span_label(attrs[0].span, "attributes must go before parameters")
|
||||||
.emit();
|
.emit();
|
||||||
} else {
|
} else {
|
||||||
self.struct_span_err(attrs[0].span, "attribute without generic parameters")
|
this.struct_span_err(
|
||||||
|
attrs[0].span,
|
||||||
|
"attribute without generic parameters",
|
||||||
|
)
|
||||||
.span_label(
|
.span_label(
|
||||||
attrs[0].span,
|
attrs[0].span,
|
||||||
"attributes are only permitted when preceding parameters",
|
"attributes are only permitted when preceding parameters",
|
||||||
@ -142,10 +154,19 @@ impl<'a> Parser<'a> {
|
|||||||
.emit();
|
.emit();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
break;
|
return Ok((None, TrailingToken::None));
|
||||||
}
|
};
|
||||||
|
|
||||||
if !self.eat(&token::Comma) {
|
if !this.eat(&token::Comma) {
|
||||||
|
done = true;
|
||||||
|
}
|
||||||
|
// We just ate the comma, so no need to use `TrailingToken`
|
||||||
|
Ok((param, TrailingToken::None))
|
||||||
|
})?;
|
||||||
|
|
||||||
|
if let Some(param) = param {
|
||||||
|
params.push(param);
|
||||||
|
} else {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,8 +1,6 @@
|
|||||||
use super::diagnostics::{dummy_arg, ConsumeClosingDelim, Error};
|
use super::diagnostics::{dummy_arg, ConsumeClosingDelim, Error};
|
||||||
use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
|
use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
|
||||||
use super::{FollowedByType, ForceCollect, Parser, PathStyle, TrailingToken};
|
use super::{AttrWrapper, FollowedByType, ForceCollect, Parser, PathStyle, TrailingToken};
|
||||||
|
|
||||||
use crate::{maybe_collect_tokens, maybe_whole};
|
|
||||||
|
|
||||||
use rustc_ast::ast::*;
|
use rustc_ast::ast::*;
|
||||||
use rustc_ast::ptr::P;
|
use rustc_ast::ptr::P;
|
||||||
@ -108,21 +106,36 @@ impl<'a> Parser<'a> {
|
|||||||
|
|
||||||
pub(super) fn parse_item_common(
|
pub(super) fn parse_item_common(
|
||||||
&mut self,
|
&mut self,
|
||||||
mut attrs: Vec<Attribute>,
|
attrs: AttrWrapper,
|
||||||
mac_allowed: bool,
|
mac_allowed: bool,
|
||||||
attrs_allowed: bool,
|
attrs_allowed: bool,
|
||||||
req_name: ReqName,
|
req_name: ReqName,
|
||||||
force_collect: ForceCollect,
|
force_collect: ForceCollect,
|
||||||
) -> PResult<'a, Option<Item>> {
|
) -> PResult<'a, Option<Item>> {
|
||||||
maybe_whole!(self, NtItem, |item| {
|
// Don't use `maybe_whole` so that we have precise control
|
||||||
|
// over when we bump the parser
|
||||||
|
if let token::Interpolated(nt) = &self.token.kind {
|
||||||
|
if let token::NtItem(item) = &**nt {
|
||||||
|
let item = item.clone();
|
||||||
|
|
||||||
|
return self.collect_tokens_trailing_token(
|
||||||
|
attrs,
|
||||||
|
force_collect,
|
||||||
|
|this, mut attrs| {
|
||||||
let mut item = item;
|
let mut item = item;
|
||||||
mem::swap(&mut item.attrs, &mut attrs);
|
mem::swap(&mut item.attrs, &mut attrs);
|
||||||
item.attrs.extend(attrs);
|
item.attrs.extend(attrs);
|
||||||
Some(item.into_inner())
|
// Bump the parser so the we capture the token::Interpolated
|
||||||
});
|
this.bump();
|
||||||
|
Ok((Some(item.into_inner()), TrailingToken::None))
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
let mut unclosed_delims = vec![];
|
let mut unclosed_delims = vec![];
|
||||||
let item = maybe_collect_tokens!(self, force_collect, &attrs, |this: &mut Self| {
|
let item =
|
||||||
|
self.collect_tokens_trailing_token(attrs, force_collect, |this: &mut Self, attrs| {
|
||||||
let item = this.parse_item_common_(attrs, mac_allowed, attrs_allowed, req_name);
|
let item = this.parse_item_common_(attrs, mac_allowed, attrs_allowed, req_name);
|
||||||
unclosed_delims.append(&mut this.unclosed_delims);
|
unclosed_delims.append(&mut this.unclosed_delims);
|
||||||
Ok((item?, TrailingToken::None))
|
Ok((item?, TrailingToken::None))
|
||||||
@ -1109,26 +1122,30 @@ impl<'a> Parser<'a> {
|
|||||||
|
|
||||||
fn parse_enum_variant(&mut self) -> PResult<'a, Option<Variant>> {
|
fn parse_enum_variant(&mut self) -> PResult<'a, Option<Variant>> {
|
||||||
let variant_attrs = self.parse_outer_attributes()?;
|
let variant_attrs = self.parse_outer_attributes()?;
|
||||||
let vlo = self.token.span;
|
self.collect_tokens_trailing_token(
|
||||||
|
variant_attrs,
|
||||||
|
ForceCollect::No,
|
||||||
|
|this, variant_attrs| {
|
||||||
|
let vlo = this.token.span;
|
||||||
|
|
||||||
let vis = self.parse_visibility(FollowedByType::No)?;
|
let vis = this.parse_visibility(FollowedByType::No)?;
|
||||||
if !self.recover_nested_adt_item(kw::Enum)? {
|
if !this.recover_nested_adt_item(kw::Enum)? {
|
||||||
return Ok(None);
|
return Ok((None, TrailingToken::None));
|
||||||
}
|
}
|
||||||
let ident = self.parse_ident()?;
|
let ident = this.parse_ident()?;
|
||||||
|
|
||||||
let struct_def = if self.check(&token::OpenDelim(token::Brace)) {
|
let struct_def = if this.check(&token::OpenDelim(token::Brace)) {
|
||||||
// Parse a struct variant.
|
// Parse a struct variant.
|
||||||
let (fields, recovered) = self.parse_record_struct_body()?;
|
let (fields, recovered) = this.parse_record_struct_body()?;
|
||||||
VariantData::Struct(fields, recovered)
|
VariantData::Struct(fields, recovered)
|
||||||
} else if self.check(&token::OpenDelim(token::Paren)) {
|
} else if this.check(&token::OpenDelim(token::Paren)) {
|
||||||
VariantData::Tuple(self.parse_tuple_struct_body()?, DUMMY_NODE_ID)
|
VariantData::Tuple(this.parse_tuple_struct_body()?, DUMMY_NODE_ID)
|
||||||
} else {
|
} else {
|
||||||
VariantData::Unit(DUMMY_NODE_ID)
|
VariantData::Unit(DUMMY_NODE_ID)
|
||||||
};
|
};
|
||||||
|
|
||||||
let disr_expr =
|
let disr_expr =
|
||||||
if self.eat(&token::Eq) { Some(self.parse_anon_const_expr()?) } else { None };
|
if this.eat(&token::Eq) { Some(this.parse_anon_const_expr()?) } else { None };
|
||||||
|
|
||||||
let vr = ast::Variant {
|
let vr = ast::Variant {
|
||||||
ident,
|
ident,
|
||||||
@ -1137,11 +1154,13 @@ impl<'a> Parser<'a> {
|
|||||||
attrs: variant_attrs,
|
attrs: variant_attrs,
|
||||||
data: struct_def,
|
data: struct_def,
|
||||||
disr_expr,
|
disr_expr,
|
||||||
span: vlo.to(self.prev_token.span),
|
span: vlo.to(this.prev_token.span),
|
||||||
is_placeholder: false,
|
is_placeholder: false,
|
||||||
};
|
};
|
||||||
|
|
||||||
Ok(Some(vr))
|
Ok((Some(vr), TrailingToken::MaybeComma))
|
||||||
|
},
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Parses `struct Foo { ... }`.
|
/// Parses `struct Foo { ... }`.
|
||||||
@ -1262,10 +1281,13 @@ impl<'a> Parser<'a> {
|
|||||||
// Unit like structs are handled in parse_item_struct function
|
// Unit like structs are handled in parse_item_struct function
|
||||||
self.parse_paren_comma_seq(|p| {
|
self.parse_paren_comma_seq(|p| {
|
||||||
let attrs = p.parse_outer_attributes()?;
|
let attrs = p.parse_outer_attributes()?;
|
||||||
|
p.collect_tokens_trailing_token(attrs, ForceCollect::No, |p, attrs| {
|
||||||
let lo = p.token.span;
|
let lo = p.token.span;
|
||||||
let vis = p.parse_visibility(FollowedByType::Yes)?;
|
let vis = p.parse_visibility(FollowedByType::Yes)?;
|
||||||
let ty = p.parse_ty()?;
|
let ty = p.parse_ty()?;
|
||||||
Ok(StructField {
|
|
||||||
|
Ok((
|
||||||
|
StructField {
|
||||||
span: lo.to(ty.span),
|
span: lo.to(ty.span),
|
||||||
vis,
|
vis,
|
||||||
ident: None,
|
ident: None,
|
||||||
@ -1273,6 +1295,9 @@ impl<'a> Parser<'a> {
|
|||||||
ty,
|
ty,
|
||||||
attrs,
|
attrs,
|
||||||
is_placeholder: false,
|
is_placeholder: false,
|
||||||
|
},
|
||||||
|
TrailingToken::MaybeComma,
|
||||||
|
))
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
.map(|(r, _)| r)
|
.map(|(r, _)| r)
|
||||||
@ -1281,9 +1306,11 @@ impl<'a> Parser<'a> {
|
|||||||
/// Parses an element of a struct declaration.
|
/// Parses an element of a struct declaration.
|
||||||
fn parse_struct_decl_field(&mut self) -> PResult<'a, StructField> {
|
fn parse_struct_decl_field(&mut self) -> PResult<'a, StructField> {
|
||||||
let attrs = self.parse_outer_attributes()?;
|
let attrs = self.parse_outer_attributes()?;
|
||||||
let lo = self.token.span;
|
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
|
||||||
let vis = self.parse_visibility(FollowedByType::No)?;
|
let lo = this.token.span;
|
||||||
self.parse_single_struct_field(lo, vis, attrs)
|
let vis = this.parse_visibility(FollowedByType::No)?;
|
||||||
|
Ok((this.parse_single_struct_field(lo, vis, attrs)?, TrailingToken::None))
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Parses a structure field declaration.
|
/// Parses a structure field declaration.
|
||||||
@ -1736,74 +1763,79 @@ impl<'a> Parser<'a> {
|
|||||||
fn parse_param_general(&mut self, req_name: ReqName, first_param: bool) -> PResult<'a, Param> {
|
fn parse_param_general(&mut self, req_name: ReqName, first_param: bool) -> PResult<'a, Param> {
|
||||||
let lo = self.token.span;
|
let lo = self.token.span;
|
||||||
let attrs = self.parse_outer_attributes()?;
|
let attrs = self.parse_outer_attributes()?;
|
||||||
|
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
|
||||||
// Possibly parse `self`. Recover if we parsed it and it wasn't allowed here.
|
// Possibly parse `self`. Recover if we parsed it and it wasn't allowed here.
|
||||||
if let Some(mut param) = self.parse_self_param()? {
|
if let Some(mut param) = this.parse_self_param()? {
|
||||||
param.attrs = attrs.into();
|
param.attrs = attrs.into();
|
||||||
return if first_param { Ok(param) } else { self.recover_bad_self_param(param) };
|
let res = if first_param { Ok(param) } else { this.recover_bad_self_param(param) };
|
||||||
|
return Ok((res?, TrailingToken::None));
|
||||||
}
|
}
|
||||||
|
|
||||||
let is_name_required = match self.token.kind {
|
let is_name_required = match this.token.kind {
|
||||||
token::DotDotDot => false,
|
token::DotDotDot => false,
|
||||||
_ => req_name(self.token.span.edition()),
|
_ => req_name(this.token.span.edition()),
|
||||||
};
|
};
|
||||||
let (pat, ty) = if is_name_required || self.is_named_param() {
|
let (pat, ty) = if is_name_required || this.is_named_param() {
|
||||||
debug!("parse_param_general parse_pat (is_name_required:{})", is_name_required);
|
debug!("parse_param_general parse_pat (is_name_required:{})", is_name_required);
|
||||||
|
|
||||||
let pat = self.parse_fn_param_pat()?;
|
let pat = this.parse_fn_param_pat()?;
|
||||||
if let Err(mut err) = self.expect(&token::Colon) {
|
if let Err(mut err) = this.expect(&token::Colon) {
|
||||||
return if let Some(ident) =
|
return if let Some(ident) =
|
||||||
self.parameter_without_type(&mut err, pat, is_name_required, first_param)
|
this.parameter_without_type(&mut err, pat, is_name_required, first_param)
|
||||||
{
|
{
|
||||||
err.emit();
|
err.emit();
|
||||||
Ok(dummy_arg(ident))
|
Ok((dummy_arg(ident), TrailingToken::None))
|
||||||
} else {
|
} else {
|
||||||
Err(err)
|
Err(err)
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
self.eat_incorrect_doc_comment_for_param_type();
|
this.eat_incorrect_doc_comment_for_param_type();
|
||||||
(pat, self.parse_ty_for_param()?)
|
(pat, this.parse_ty_for_param()?)
|
||||||
} else {
|
} else {
|
||||||
debug!("parse_param_general ident_to_pat");
|
debug!("parse_param_general ident_to_pat");
|
||||||
let parser_snapshot_before_ty = self.clone();
|
let parser_snapshot_before_ty = this.clone();
|
||||||
self.eat_incorrect_doc_comment_for_param_type();
|
this.eat_incorrect_doc_comment_for_param_type();
|
||||||
let mut ty = self.parse_ty_for_param();
|
let mut ty = this.parse_ty_for_param();
|
||||||
if ty.is_ok()
|
if ty.is_ok()
|
||||||
&& self.token != token::Comma
|
&& this.token != token::Comma
|
||||||
&& self.token != token::CloseDelim(token::Paren)
|
&& this.token != token::CloseDelim(token::Paren)
|
||||||
{
|
{
|
||||||
// This wasn't actually a type, but a pattern looking like a type,
|
// This wasn't actually a type, but a pattern looking like a type,
|
||||||
// so we are going to rollback and re-parse for recovery.
|
// so we are going to rollback and re-parse for recovery.
|
||||||
ty = self.unexpected();
|
ty = this.unexpected();
|
||||||
}
|
}
|
||||||
match ty {
|
match ty {
|
||||||
Ok(ty) => {
|
Ok(ty) => {
|
||||||
let ident = Ident::new(kw::Empty, self.prev_token.span);
|
let ident = Ident::new(kw::Empty, this.prev_token.span);
|
||||||
let bm = BindingMode::ByValue(Mutability::Not);
|
let bm = BindingMode::ByValue(Mutability::Not);
|
||||||
let pat = self.mk_pat_ident(ty.span, bm, ident);
|
let pat = this.mk_pat_ident(ty.span, bm, ident);
|
||||||
(pat, ty)
|
(pat, ty)
|
||||||
}
|
}
|
||||||
// If this is a C-variadic argument and we hit an error, return the error.
|
// If this is a C-variadic argument and we hit an error, return the error.
|
||||||
Err(err) if self.token == token::DotDotDot => return Err(err),
|
Err(err) if this.token == token::DotDotDot => return Err(err),
|
||||||
// Recover from attempting to parse the argument as a type without pattern.
|
// Recover from attempting to parse the argument as a type without pattern.
|
||||||
Err(mut err) => {
|
Err(mut err) => {
|
||||||
err.cancel();
|
err.cancel();
|
||||||
*self = parser_snapshot_before_ty;
|
*this = parser_snapshot_before_ty;
|
||||||
self.recover_arg_parse()?
|
this.recover_arg_parse()?
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let span = lo.until(self.token.span);
|
let span = lo.until(this.token.span);
|
||||||
|
|
||||||
Ok(Param {
|
Ok((
|
||||||
|
Param {
|
||||||
attrs: attrs.into(),
|
attrs: attrs.into(),
|
||||||
id: ast::DUMMY_NODE_ID,
|
id: ast::DUMMY_NODE_ID,
|
||||||
is_placeholder: false,
|
is_placeholder: false,
|
||||||
pat,
|
pat,
|
||||||
span,
|
span,
|
||||||
ty,
|
ty,
|
||||||
|
},
|
||||||
|
TrailingToken::None,
|
||||||
|
))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -64,6 +64,24 @@ pub enum ForceCollect {
|
|||||||
pub enum TrailingToken {
|
pub enum TrailingToken {
|
||||||
None,
|
None,
|
||||||
Semi,
|
Semi,
|
||||||
|
/// If the trailing token is a comma, then capture it
|
||||||
|
/// Otherwise, ignore the trailing token
|
||||||
|
MaybeComma,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct AttrWrapper {
|
||||||
|
attrs: Vec<ast::Attribute>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AttrWrapper {
|
||||||
|
// FIXME: Delay span bug here?
|
||||||
|
fn take_for_recovery(self) -> Vec<ast::Attribute> {
|
||||||
|
self.attrs
|
||||||
|
}
|
||||||
|
fn is_empty(&self) -> bool {
|
||||||
|
self.attrs.is_empty()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Like `maybe_whole_expr`, but for things other than expressions.
|
/// Like `maybe_whole_expr`, but for things other than expressions.
|
||||||
@ -1004,12 +1022,12 @@ impl<'a> Parser<'a> {
|
|||||||
|
|
||||||
fn parse_or_use_outer_attributes(
|
fn parse_or_use_outer_attributes(
|
||||||
&mut self,
|
&mut self,
|
||||||
already_parsed_attrs: Option<AttrVec>,
|
already_parsed_attrs: Option<AttrWrapper>,
|
||||||
) -> PResult<'a, AttrVec> {
|
) -> PResult<'a, AttrWrapper> {
|
||||||
if let Some(attrs) = already_parsed_attrs {
|
if let Some(attrs) = already_parsed_attrs {
|
||||||
Ok(attrs)
|
Ok(attrs)
|
||||||
} else {
|
} else {
|
||||||
self.parse_outer_attributes().map(|a| a.into())
|
self.parse_outer_attributes()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1226,11 +1244,17 @@ impl<'a> Parser<'a> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn collect_tokens<R: HasTokens>(
|
pub fn collect_tokens_no_attrs<R: HasTokens>(
|
||||||
&mut self,
|
&mut self,
|
||||||
f: impl FnOnce(&mut Self) -> PResult<'a, R>,
|
f: impl FnOnce(&mut Self) -> PResult<'a, R>,
|
||||||
) -> PResult<'a, R> {
|
) -> PResult<'a, R> {
|
||||||
self.collect_tokens_trailing_token(|this| Ok((f(this)?, TrailingToken::None)))
|
// The only reason to call `collect_tokens_no_attrs` is if you want tokens, so use
|
||||||
|
// `ForceCollect::Yes`
|
||||||
|
self.collect_tokens_trailing_token(
|
||||||
|
AttrWrapper { attrs: Vec::new() },
|
||||||
|
ForceCollect::Yes,
|
||||||
|
|this, _attrs| Ok((f(this)?, TrailingToken::None)),
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Records all tokens consumed by the provided callback,
|
/// Records all tokens consumed by the provided callback,
|
||||||
@ -1251,12 +1275,17 @@ impl<'a> Parser<'a> {
|
|||||||
/// a parsed AST item, which always has matching delimiters.
|
/// a parsed AST item, which always has matching delimiters.
|
||||||
pub fn collect_tokens_trailing_token<R: HasTokens>(
|
pub fn collect_tokens_trailing_token<R: HasTokens>(
|
||||||
&mut self,
|
&mut self,
|
||||||
f: impl FnOnce(&mut Self) -> PResult<'a, (R, TrailingToken)>,
|
attrs: AttrWrapper,
|
||||||
|
force_collect: ForceCollect,
|
||||||
|
f: impl FnOnce(&mut Self, Vec<ast::Attribute>) -> PResult<'a, (R, TrailingToken)>,
|
||||||
) -> PResult<'a, R> {
|
) -> PResult<'a, R> {
|
||||||
|
if matches!(force_collect, ForceCollect::No) && !attr::maybe_needs_tokens(&attrs.attrs) {
|
||||||
|
return Ok(f(self, attrs.attrs)?.0);
|
||||||
|
}
|
||||||
let start_token = (self.token.clone(), self.token_spacing);
|
let start_token = (self.token.clone(), self.token_spacing);
|
||||||
let cursor_snapshot = self.token_cursor.clone();
|
let cursor_snapshot = self.token_cursor.clone();
|
||||||
|
|
||||||
let (mut ret, trailing_token) = f(self)?;
|
let (mut ret, trailing_token) = f(self, attrs.attrs)?;
|
||||||
|
|
||||||
// Produces a `TokenStream` on-demand. Using `cursor_snapshot`
|
// Produces a `TokenStream` on-demand. Using `cursor_snapshot`
|
||||||
// and `num_calls`, we can reconstruct the `TokenStream` seen
|
// and `num_calls`, we can reconstruct the `TokenStream` seen
|
||||||
@ -1306,6 +1335,11 @@ impl<'a> Parser<'a> {
|
|||||||
assert_eq!(self.token.kind, token::Semi);
|
assert_eq!(self.token.kind, token::Semi);
|
||||||
num_calls += 1;
|
num_calls += 1;
|
||||||
}
|
}
|
||||||
|
TrailingToken::MaybeComma => {
|
||||||
|
if self.token.kind == token::Comma {
|
||||||
|
num_calls += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let lazy_impl = LazyTokenStreamImpl {
|
let lazy_impl = LazyTokenStreamImpl {
|
||||||
@ -1409,16 +1443,3 @@ fn make_token_stream(
|
|||||||
assert!(stack.is_empty(), "Stack should be empty: final_buf={:?} stack={:?}", final_buf, stack);
|
assert!(stack.is_empty(), "Stack should be empty: final_buf={:?} stack={:?}", final_buf, stack);
|
||||||
TokenStream::new(final_buf.inner)
|
TokenStream::new(final_buf.inner)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[macro_export]
|
|
||||||
macro_rules! maybe_collect_tokens {
|
|
||||||
($self:ident, $force_collect:expr, $attrs:expr, $f:expr) => {
|
|
||||||
if matches!($force_collect, ForceCollect::Yes)
|
|
||||||
|| $crate::parser::attr::maybe_needs_tokens($attrs)
|
|
||||||
{
|
|
||||||
$self.collect_tokens_trailing_token($f)
|
|
||||||
} else {
|
|
||||||
Ok($f($self)?.0)
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
@ -108,7 +108,9 @@ impl<'a> Parser<'a> {
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
NonterminalKind::Block => {
|
NonterminalKind::Block => {
|
||||||
token::NtBlock(self.collect_tokens(|this| this.parse_block())?)
|
// While an block *expression* may have attributes (e.g. `#[my_attr] { ... }`),
|
||||||
|
// the ':block' matcher does not support them
|
||||||
|
token::NtBlock(self.collect_tokens_no_attrs(|this| this.parse_block())?)
|
||||||
}
|
}
|
||||||
NonterminalKind::Stmt => match self.parse_stmt(ForceCollect::Yes)? {
|
NonterminalKind::Stmt => match self.parse_stmt(ForceCollect::Yes)? {
|
||||||
Some(s) => token::NtStmt(s),
|
Some(s) => token::NtStmt(s),
|
||||||
@ -117,7 +119,7 @@ impl<'a> Parser<'a> {
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
NonterminalKind::Pat2018 { .. } | NonterminalKind::Pat2021 { .. } => {
|
NonterminalKind::Pat2018 { .. } | NonterminalKind::Pat2021 { .. } => {
|
||||||
token::NtPat(self.collect_tokens(|this| match kind {
|
token::NtPat(self.collect_tokens_no_attrs(|this| match kind {
|
||||||
NonterminalKind::Pat2018 { .. } => this.parse_pat(None),
|
NonterminalKind::Pat2018 { .. } => this.parse_pat(None),
|
||||||
NonterminalKind::Pat2021 { .. } => {
|
NonterminalKind::Pat2021 { .. } => {
|
||||||
this.parse_top_pat(GateOr::Yes, RecoverComma::No)
|
this.parse_top_pat(GateOr::Yes, RecoverComma::No)
|
||||||
@ -125,11 +127,33 @@ impl<'a> Parser<'a> {
|
|||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
})?)
|
})?)
|
||||||
}
|
}
|
||||||
NonterminalKind::Expr => token::NtExpr(self.collect_tokens(|this| this.parse_expr())?),
|
|
||||||
NonterminalKind::Literal => {
|
// If there are attributes present, then `parse_expr` will end up collecting tokens,
|
||||||
token::NtLiteral(self.collect_tokens(|this| this.parse_literal_maybe_minus())?)
|
// turning the outer `collect_tokens_no_attrs` into a no-op due to the already present
|
||||||
|
// tokens. If there are *not* attributes present, then the outer
|
||||||
|
// `collect_tokens_no_attrs` will ensure that we will end up collecting tokens for the
|
||||||
|
// expressions.
|
||||||
|
//
|
||||||
|
// This is less efficient than it could be, since the outer `collect_tokens_no_attrs`
|
||||||
|
// still needs to snapshot the `TokenCursor` before calling `parse_expr`, even when
|
||||||
|
// `parse_expr` will end up collecting tokens. Ideally, this would work more like
|
||||||
|
// `parse_item`, and take in a `ForceCollect` parameter. However, this would require
|
||||||
|
// adding a `ForceCollect` parameter in a bunch of places in expression parsing
|
||||||
|
// for little gain. If the perf impact from this turns out to be noticeable, we should
|
||||||
|
// revisit this apporach.
|
||||||
|
NonterminalKind::Expr => {
|
||||||
|
token::NtExpr(self.collect_tokens_no_attrs(|this| this.parse_expr())?)
|
||||||
|
}
|
||||||
|
NonterminalKind::Literal => {
|
||||||
|
// The `:literal` matcher does not support attributes
|
||||||
|
token::NtLiteral(
|
||||||
|
self.collect_tokens_no_attrs(|this| this.parse_literal_maybe_minus())?,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
NonterminalKind::Ty => {
|
||||||
|
token::NtTy(self.collect_tokens_no_attrs(|this| this.parse_ty())?)
|
||||||
}
|
}
|
||||||
NonterminalKind::Ty => token::NtTy(self.collect_tokens(|this| this.parse_ty())?),
|
|
||||||
// this could be handled like a token, since it is one
|
// this could be handled like a token, since it is one
|
||||||
NonterminalKind::Ident => {
|
NonterminalKind::Ident => {
|
||||||
if let Some((ident, is_raw)) = get_macro_ident(&self.token) {
|
if let Some((ident, is_raw)) = get_macro_ident(&self.token) {
|
||||||
@ -141,15 +165,15 @@ impl<'a> Parser<'a> {
|
|||||||
return Err(self.struct_span_err(self.token.span, msg));
|
return Err(self.struct_span_err(self.token.span, msg));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
NonterminalKind::Path => {
|
NonterminalKind::Path => token::NtPath(
|
||||||
token::NtPath(self.collect_tokens(|this| this.parse_path(PathStyle::Type))?)
|
self.collect_tokens_no_attrs(|this| this.parse_path(PathStyle::Type))?,
|
||||||
}
|
),
|
||||||
NonterminalKind::Meta => {
|
NonterminalKind::Meta => {
|
||||||
token::NtMeta(P(self.collect_tokens(|this| this.parse_attr_item(false))?))
|
token::NtMeta(P(self.collect_tokens_no_attrs(|this| this.parse_attr_item(false))?))
|
||||||
}
|
}
|
||||||
NonterminalKind::TT => token::NtTT(self.parse_token_tree()),
|
NonterminalKind::TT => token::NtTT(self.parse_token_tree()),
|
||||||
NonterminalKind::Vis => token::NtVis(
|
NonterminalKind::Vis => token::NtVis(
|
||||||
self.collect_tokens(|this| this.parse_visibility(FollowedByType::Yes))?,
|
self.collect_tokens_no_attrs(|this| this.parse_visibility(FollowedByType::Yes))?,
|
||||||
),
|
),
|
||||||
NonterminalKind::Lifetime => {
|
NonterminalKind::Lifetime => {
|
||||||
if self.check_lifetime() {
|
if self.check_lifetime() {
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
use super::{Parser, PathStyle};
|
use super::{ForceCollect, Parser, PathStyle, TrailingToken};
|
||||||
use crate::{maybe_recover_from_interpolated_ty_qpath, maybe_whole};
|
use crate::{maybe_recover_from_interpolated_ty_qpath, maybe_whole};
|
||||||
use rustc_ast::mut_visit::{noop_visit_pat, MutVisitor};
|
use rustc_ast::mut_visit::{noop_visit_pat, MutVisitor};
|
||||||
use rustc_ast::ptr::P;
|
use rustc_ast::ptr::P;
|
||||||
@ -938,16 +938,24 @@ impl<'a> Parser<'a> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fields.push(match self.parse_pat_field(lo, attrs) {
|
let field =
|
||||||
Ok(field) => field,
|
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
|
||||||
|
let field = match this.parse_pat_field(lo, attrs) {
|
||||||
|
Ok(field) => Ok(field),
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
if let Some(mut delayed_err) = delayed_err {
|
if let Some(mut delayed_err) = delayed_err.take() {
|
||||||
delayed_err.emit();
|
delayed_err.emit();
|
||||||
}
|
}
|
||||||
return Err(err);
|
return Err(err);
|
||||||
}
|
}
|
||||||
});
|
}?;
|
||||||
ate_comma = self.eat(&token::Comma);
|
ate_comma = this.eat(&token::Comma);
|
||||||
|
// We just ate a comma, so there's no need to use
|
||||||
|
// `TrailingToken::Comma`
|
||||||
|
Ok((field, TrailingToken::None))
|
||||||
|
})?;
|
||||||
|
|
||||||
|
fields.push(field)
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(mut err) = delayed_err {
|
if let Some(mut err) = delayed_err {
|
||||||
|
@ -3,8 +3,10 @@ use super::diagnostics::{AttemptLocalParseRecovery, Error};
|
|||||||
use super::expr::LhsExpr;
|
use super::expr::LhsExpr;
|
||||||
use super::pat::{GateOr, RecoverComma};
|
use super::pat::{GateOr, RecoverComma};
|
||||||
use super::path::PathStyle;
|
use super::path::PathStyle;
|
||||||
use super::{BlockMode, ForceCollect, Parser, Restrictions, SemiColonMode, TrailingToken};
|
use super::{
|
||||||
use crate::{maybe_collect_tokens, maybe_whole};
|
AttrWrapper, BlockMode, ForceCollect, Parser, Restrictions, SemiColonMode, TrailingToken,
|
||||||
|
};
|
||||||
|
use crate::maybe_whole;
|
||||||
|
|
||||||
use rustc_ast as ast;
|
use rustc_ast as ast;
|
||||||
use rustc_ast::attr::HasAttrs;
|
use rustc_ast::attr::HasAttrs;
|
||||||
@ -38,30 +40,47 @@ impl<'a> Parser<'a> {
|
|||||||
capture_semi: bool,
|
capture_semi: bool,
|
||||||
force_collect: ForceCollect,
|
force_collect: ForceCollect,
|
||||||
) -> PResult<'a, Option<Stmt>> {
|
) -> PResult<'a, Option<Stmt>> {
|
||||||
let mut attrs = self.parse_outer_attributes()?;
|
let attrs = self.parse_outer_attributes()?;
|
||||||
let lo = self.token.span;
|
let lo = self.token.span;
|
||||||
|
|
||||||
maybe_whole!(self, NtStmt, |stmt| {
|
// Don't use `maybe_whole` so that we have precise control
|
||||||
let mut stmt = stmt;
|
// over when we bump the parser
|
||||||
|
if let token::Interpolated(nt) = &self.token.kind {
|
||||||
|
if let token::NtStmt(stmt) = &**nt {
|
||||||
|
let mut stmt = stmt.clone();
|
||||||
|
return self.collect_tokens_trailing_token(
|
||||||
|
attrs,
|
||||||
|
force_collect,
|
||||||
|
|this, mut attrs| {
|
||||||
stmt.visit_attrs(|stmt_attrs| {
|
stmt.visit_attrs(|stmt_attrs| {
|
||||||
mem::swap(stmt_attrs, &mut attrs);
|
mem::swap(stmt_attrs, &mut attrs);
|
||||||
stmt_attrs.extend(attrs);
|
stmt_attrs.extend(attrs);
|
||||||
});
|
});
|
||||||
Some(stmt)
|
// Make sure we capture the token::Interpolated
|
||||||
});
|
this.bump();
|
||||||
|
Ok((Some(stmt), TrailingToken::None))
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
Ok(Some(if self.token.is_keyword(kw::Let) {
|
Ok(Some(if self.token.is_keyword(kw::Let) {
|
||||||
self.parse_local_mk(lo, attrs.into(), capture_semi, force_collect)?
|
self.parse_local_mk(lo, attrs, capture_semi, force_collect)?
|
||||||
} else if self.is_kw_followed_by_ident(kw::Mut) {
|
} else if self.is_kw_followed_by_ident(kw::Mut) {
|
||||||
self.recover_stmt_local(lo, attrs.into(), "missing keyword", "let mut")?
|
self.recover_stmt_local(
|
||||||
|
lo,
|
||||||
|
attrs.take_for_recovery().into(),
|
||||||
|
"missing keyword",
|
||||||
|
"let mut",
|
||||||
|
)?
|
||||||
} else if self.is_kw_followed_by_ident(kw::Auto) {
|
} else if self.is_kw_followed_by_ident(kw::Auto) {
|
||||||
self.bump(); // `auto`
|
self.bump(); // `auto`
|
||||||
let msg = "write `let` instead of `auto` to introduce a new variable";
|
let msg = "write `let` instead of `auto` to introduce a new variable";
|
||||||
self.recover_stmt_local(lo, attrs.into(), msg, "let")?
|
self.recover_stmt_local(lo, attrs.take_for_recovery().into(), msg, "let")?
|
||||||
} else if self.is_kw_followed_by_ident(sym::var) {
|
} else if self.is_kw_followed_by_ident(sym::var) {
|
||||||
self.bump(); // `var`
|
self.bump(); // `var`
|
||||||
let msg = "write `let` instead of `var` to introduce a new variable";
|
let msg = "write `let` instead of `var` to introduce a new variable";
|
||||||
self.recover_stmt_local(lo, attrs.into(), msg, "let")?
|
self.recover_stmt_local(lo, attrs.take_for_recovery().into(), msg, "let")?
|
||||||
} else if self.check_path() && !self.token.is_qpath_start() && !self.is_path_start_item() {
|
} else if self.check_path() && !self.token.is_qpath_start() && !self.is_path_start_item() {
|
||||||
// We have avoided contextual keywords like `union`, items with `crate` visibility,
|
// We have avoided contextual keywords like `union`, items with `crate` visibility,
|
||||||
// or `auto trait` items. We aim to parse an arbitrary path `a::b` but not something
|
// or `auto trait` items. We aim to parse an arbitrary path `a::b` but not something
|
||||||
@ -75,14 +94,14 @@ impl<'a> Parser<'a> {
|
|||||||
self.mk_stmt(lo.to(item.span), StmtKind::Item(P(item)))
|
self.mk_stmt(lo.to(item.span), StmtKind::Item(P(item)))
|
||||||
} else if self.eat(&token::Semi) {
|
} else if self.eat(&token::Semi) {
|
||||||
// Do not attempt to parse an expression if we're done here.
|
// Do not attempt to parse an expression if we're done here.
|
||||||
self.error_outer_attrs(&attrs);
|
self.error_outer_attrs(&attrs.take_for_recovery());
|
||||||
self.mk_stmt(lo, StmtKind::Empty)
|
self.mk_stmt(lo, StmtKind::Empty)
|
||||||
} else if self.token != token::CloseDelim(token::Brace) {
|
} else if self.token != token::CloseDelim(token::Brace) {
|
||||||
// Remainder are line-expr stmts.
|
// Remainder are line-expr stmts.
|
||||||
let e = self.parse_expr_res(Restrictions::STMT_EXPR, Some(attrs.into()))?;
|
let e = self.parse_expr_res(Restrictions::STMT_EXPR, Some(attrs.into()))?;
|
||||||
self.mk_stmt(lo.to(e.span), StmtKind::Expr(e))
|
self.mk_stmt(lo.to(e.span), StmtKind::Expr(e))
|
||||||
} else {
|
} else {
|
||||||
self.error_outer_attrs(&attrs);
|
self.error_outer_attrs(&attrs.take_for_recovery());
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
@ -90,10 +109,10 @@ impl<'a> Parser<'a> {
|
|||||||
fn parse_stmt_path_start(
|
fn parse_stmt_path_start(
|
||||||
&mut self,
|
&mut self,
|
||||||
lo: Span,
|
lo: Span,
|
||||||
attrs: Vec<Attribute>,
|
attrs: AttrWrapper,
|
||||||
force_collect: ForceCollect,
|
force_collect: ForceCollect,
|
||||||
) -> PResult<'a, Stmt> {
|
) -> PResult<'a, Stmt> {
|
||||||
maybe_collect_tokens!(self, force_collect, &attrs, |this: &mut Parser<'a>| {
|
self.collect_tokens_trailing_token(attrs, force_collect, |this, attrs| {
|
||||||
let path = this.parse_path(PathStyle::Expr)?;
|
let path = this.parse_path(PathStyle::Expr)?;
|
||||||
|
|
||||||
if this.eat(&token::Not) {
|
if this.eat(&token::Not) {
|
||||||
@ -142,7 +161,7 @@ impl<'a> Parser<'a> {
|
|||||||
// Since none of the above applied, this is an expression statement macro.
|
// Since none of the above applied, this is an expression statement macro.
|
||||||
let e = self.mk_expr(lo.to(hi), ExprKind::MacCall(mac), AttrVec::new());
|
let e = self.mk_expr(lo.to(hi), ExprKind::MacCall(mac), AttrVec::new());
|
||||||
let e = self.maybe_recover_from_bad_qpath(e, true)?;
|
let e = self.maybe_recover_from_bad_qpath(e, true)?;
|
||||||
let e = self.parse_dot_or_call_expr_with(e, lo, attrs)?;
|
let e = self.parse_dot_or_call_expr_with(e, lo, attrs.into())?;
|
||||||
let e = self.parse_assoc_expr_with(0, LhsExpr::AlreadyParsed(e))?;
|
let e = self.parse_assoc_expr_with(0, LhsExpr::AlreadyParsed(e))?;
|
||||||
StmtKind::Expr(e)
|
StmtKind::Expr(e)
|
||||||
};
|
};
|
||||||
@ -178,11 +197,11 @@ impl<'a> Parser<'a> {
|
|||||||
fn parse_local_mk(
|
fn parse_local_mk(
|
||||||
&mut self,
|
&mut self,
|
||||||
lo: Span,
|
lo: Span,
|
||||||
attrs: AttrVec,
|
attrs: AttrWrapper,
|
||||||
capture_semi: bool,
|
capture_semi: bool,
|
||||||
force_collect: ForceCollect,
|
force_collect: ForceCollect,
|
||||||
) -> PResult<'a, Stmt> {
|
) -> PResult<'a, Stmt> {
|
||||||
maybe_collect_tokens!(self, force_collect, &attrs, |this: &mut Parser<'a>| {
|
self.collect_tokens_trailing_token(attrs, force_collect, |this, attrs| {
|
||||||
this.expect_keyword(kw::Let)?;
|
this.expect_keyword(kw::Let)?;
|
||||||
let local = this.parse_local(attrs.into())?;
|
let local = this.parse_local(attrs.into())?;
|
||||||
let trailing = if capture_semi && this.token.kind == token::Semi {
|
let trailing = if capture_semi && this.token.kind == token::Semi {
|
||||||
|
Loading…
Reference in New Issue
Block a user