mirror of
https://github.com/rust-lang/rust.git
synced 2024-11-01 15:01:51 +00:00
Require passing an AttrWrapper
to collect_tokens_trailing_token
This is a pure refactoring split out from #80689. It represents the most invasive part of that PR, requiring changes in every caller of `parse_outer_attributes` In order to eagerly expand `#[cfg]` attributes while preserving the original `TokenStream`, we need to know the range of tokens that corresponds to every attribute target. This is accomplished by making `parse_outer_attributes` return an opaque `AttrWrapper` struct. An `AttrWrapper` must be converted to a plain `AttrVec` by passing it to `collect_tokens_trailing_token`. This makes it difficult to accidentally construct an AST node with attributes without calling `collect_tokens_trailing_token`, since AST nodes store an `AttrVec`, not an `AttrWrapper`. As a result, we now call `collect_tokens_trailing_token` for attribute targets which only support inert attributes, such as generic arguments and struct fields. Currently, the constructed `LazyTokenStream` is simply discarded. Future PRs will record the token range corresponding to the attribute target, allowing those tokens to be removed from an enclosing `collect_tokens_trailing_token` call if necessary.
This commit is contained in:
parent
7e0241c637
commit
0b411f56e1
@ -2975,3 +2975,18 @@ macro_rules! derive_has_tokens {
|
||||
derive_has_tokens! {
|
||||
Item, Expr, Ty, AttrItem, Visibility, Path, Block, Pat
|
||||
}
|
||||
|
||||
macro_rules! derive_has_attrs_no_tokens {
|
||||
($($ty:path),*) => { $(
|
||||
impl HasTokens for $ty {
|
||||
fn finalize_tokens(&mut self, _tokens: LazyTokenStream) {}
|
||||
}
|
||||
)* }
|
||||
}
|
||||
|
||||
// These ast nodes only support inert attributes, so they don't
|
||||
// store tokens (since nothing can observe them)
|
||||
derive_has_attrs_no_tokens! {
|
||||
StructField, Arm,
|
||||
Field, FieldPat, Variant, Param, GenericParam
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
use super::{Parser, PathStyle};
|
||||
use super::{AttrWrapper, Parser, PathStyle};
|
||||
use rustc_ast as ast;
|
||||
use rustc_ast::attr;
|
||||
use rustc_ast::token::{self, Nonterminal};
|
||||
@ -26,7 +26,7 @@ pub(super) const DEFAULT_INNER_ATTR_FORBIDDEN: InnerAttrPolicy<'_> = InnerAttrPo
|
||||
|
||||
impl<'a> Parser<'a> {
|
||||
/// Parses attributes that appear before an item.
|
||||
pub(super) fn parse_outer_attributes(&mut self) -> PResult<'a, Vec<ast::Attribute>> {
|
||||
pub(super) fn parse_outer_attributes(&mut self) -> PResult<'a, AttrWrapper> {
|
||||
let mut attrs: Vec<ast::Attribute> = Vec::new();
|
||||
let mut just_parsed_doc_comment = false;
|
||||
loop {
|
||||
@ -74,7 +74,7 @@ impl<'a> Parser<'a> {
|
||||
break;
|
||||
}
|
||||
}
|
||||
Ok(attrs)
|
||||
Ok(AttrWrapper { attrs })
|
||||
}
|
||||
|
||||
/// Matches `attribute = # ! [ meta_item ]`.
|
||||
@ -89,7 +89,8 @@ impl<'a> Parser<'a> {
|
||||
inner_parse_policy, self.token
|
||||
);
|
||||
let lo = self.token.span;
|
||||
self.collect_tokens(|this| {
|
||||
// Attributse can't have attributes of their own
|
||||
self.collect_tokens_no_attrs(|this| {
|
||||
if this.eat(&token::Pound) {
|
||||
let style = if this.eat(&token::Not) {
|
||||
ast::AttrStyle::Inner
|
||||
@ -163,7 +164,8 @@ impl<'a> Parser<'a> {
|
||||
let args = this.parse_attr_args()?;
|
||||
Ok(ast::AttrItem { path, args, tokens: None })
|
||||
};
|
||||
if capture_tokens { self.collect_tokens(do_parse) } else { do_parse(self) }?
|
||||
// Attr items don't have attributes
|
||||
if capture_tokens { self.collect_tokens_no_attrs(do_parse) } else { do_parse(self) }?
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -1,6 +1,8 @@
|
||||
use super::pat::{GateOr, RecoverComma, PARAM_EXPECTED};
|
||||
use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
|
||||
use super::{BlockMode, Parser, PathStyle, Restrictions, TokenType};
|
||||
use super::{
|
||||
AttrWrapper, BlockMode, ForceCollect, Parser, PathStyle, Restrictions, TokenType, TrailingToken,
|
||||
};
|
||||
use super::{SemiColonMode, SeqSep, TokenExpectType};
|
||||
use crate::maybe_recover_from_interpolated_ty_qpath;
|
||||
|
||||
@ -62,16 +64,16 @@ macro_rules! maybe_whole_expr {
|
||||
#[derive(Debug)]
|
||||
pub(super) enum LhsExpr {
|
||||
NotYetParsed,
|
||||
AttributesParsed(AttrVec),
|
||||
AttributesParsed(AttrWrapper),
|
||||
AlreadyParsed(P<Expr>),
|
||||
}
|
||||
|
||||
impl From<Option<AttrVec>> for LhsExpr {
|
||||
impl From<Option<AttrWrapper>> for LhsExpr {
|
||||
/// Converts `Some(attrs)` into `LhsExpr::AttributesParsed(attrs)`
|
||||
/// and `None` into `LhsExpr::NotYetParsed`.
|
||||
///
|
||||
/// This conversion does not allocate.
|
||||
fn from(o: Option<AttrVec>) -> Self {
|
||||
fn from(o: Option<AttrWrapper>) -> Self {
|
||||
if let Some(attrs) = o { LhsExpr::AttributesParsed(attrs) } else { LhsExpr::NotYetParsed }
|
||||
}
|
||||
}
|
||||
@ -123,7 +125,7 @@ impl<'a> Parser<'a> {
|
||||
pub(super) fn parse_expr_res(
|
||||
&mut self,
|
||||
r: Restrictions,
|
||||
already_parsed_attrs: Option<AttrVec>,
|
||||
already_parsed_attrs: Option<AttrWrapper>,
|
||||
) -> PResult<'a, P<Expr>> {
|
||||
self.with_res(r, |this| this.parse_assoc_expr(already_parsed_attrs))
|
||||
}
|
||||
@ -133,7 +135,10 @@ impl<'a> Parser<'a> {
|
||||
/// This parses an expression accounting for associativity and precedence of the operators in
|
||||
/// the expression.
|
||||
#[inline]
|
||||
fn parse_assoc_expr(&mut self, already_parsed_attrs: Option<AttrVec>) -> PResult<'a, P<Expr>> {
|
||||
fn parse_assoc_expr(
|
||||
&mut self,
|
||||
already_parsed_attrs: Option<AttrWrapper>,
|
||||
) -> PResult<'a, P<Expr>> {
|
||||
self.parse_assoc_expr_with(0, already_parsed_attrs.into())
|
||||
}
|
||||
|
||||
@ -439,7 +444,7 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
|
||||
/// Parses prefix-forms of range notation: `..expr`, `..`, `..=expr`.
|
||||
fn parse_prefix_range_expr(&mut self, attrs: Option<AttrVec>) -> PResult<'a, P<Expr>> {
|
||||
fn parse_prefix_range_expr(&mut self, attrs: Option<AttrWrapper>) -> PResult<'a, P<Expr>> {
|
||||
// Check for deprecated `...` syntax.
|
||||
if self.token == token::DotDotDot {
|
||||
self.err_dotdotdot_syntax(self.token.span);
|
||||
@ -457,44 +462,68 @@ impl<'a> Parser<'a> {
|
||||
};
|
||||
let op = AssocOp::from_token(&self.token);
|
||||
let attrs = self.parse_or_use_outer_attributes(attrs)?;
|
||||
let lo = self.token.span;
|
||||
self.bump();
|
||||
let (span, opt_end) = if self.is_at_start_of_range_notation_rhs() {
|
||||
// RHS must be parsed with more associativity than the dots.
|
||||
self.parse_assoc_expr_with(op.unwrap().precedence() + 1, LhsExpr::NotYetParsed)
|
||||
.map(|x| (lo.to(x.span), Some(x)))?
|
||||
} else {
|
||||
(lo, None)
|
||||
};
|
||||
Ok(self.mk_expr(span, self.mk_range(None, opt_end, limits)?, attrs))
|
||||
// RESOLVED: It looks like we only haev non-empty attributes here when
|
||||
// this is used as a statement:
|
||||
// `#[my_attr] 25..;`
|
||||
// We should still investigate `parse_or_use_outer_attributes`, since we haven't
|
||||
// yet eaten the '..'
|
||||
//
|
||||
// FIXME - does this code ever haev attributes? `let a = #[attr] ..` doesn't even parse
|
||||
// // We try to aprse attributes *before* bumping the token, so this can only
|
||||
// ever succeeed if the `attrs` parameter is `Some`
|
||||
self.collect_tokens_for_expr(attrs, |this, attrs| {
|
||||
let lo = this.token.span;
|
||||
this.bump();
|
||||
let (span, opt_end) = if this.is_at_start_of_range_notation_rhs() {
|
||||
// RHS must be parsed with more associativity than the dots.
|
||||
this.parse_assoc_expr_with(op.unwrap().precedence() + 1, LhsExpr::NotYetParsed)
|
||||
.map(|x| (lo.to(x.span), Some(x)))?
|
||||
} else {
|
||||
(lo, None)
|
||||
};
|
||||
Ok(this.mk_expr(span, this.mk_range(None, opt_end, limits)?, attrs.into()))
|
||||
})
|
||||
}
|
||||
|
||||
/// Parses a prefix-unary-operator expr.
|
||||
fn parse_prefix_expr(&mut self, attrs: Option<AttrVec>) -> PResult<'a, P<Expr>> {
|
||||
fn parse_prefix_expr(&mut self, attrs: Option<AttrWrapper>) -> PResult<'a, P<Expr>> {
|
||||
let attrs = self.parse_or_use_outer_attributes(attrs)?;
|
||||
// FIXME: Use super::attr::maybe_needs_tokens(&attrs) once we come up
|
||||
// with a good way of passing `force_tokens` through from `parse_nonterminal`.
|
||||
// Checking !attrs.is_empty() is correct, but will cause us to unnecessarily
|
||||
// capture tokens in some circumstances.
|
||||
let needs_tokens = !attrs.is_empty();
|
||||
let do_parse = |this: &mut Parser<'a>| {
|
||||
let lo = this.token.span;
|
||||
// Note: when adding new unary operators, don't forget to adjust TokenKind::can_begin_expr()
|
||||
let (hi, ex) = match this.token.uninterpolate().kind {
|
||||
token::Not => this.parse_unary_expr(lo, UnOp::Not), // `!expr`
|
||||
token::Tilde => this.recover_tilde_expr(lo), // `~expr`
|
||||
token::BinOp(token::Minus) => this.parse_unary_expr(lo, UnOp::Neg), // `-expr`
|
||||
token::BinOp(token::Star) => this.parse_unary_expr(lo, UnOp::Deref), // `*expr`
|
||||
token::BinOp(token::And) | token::AndAnd => this.parse_borrow_expr(lo),
|
||||
token::Ident(..) if this.token.is_keyword(kw::Box) => this.parse_box_expr(lo),
|
||||
token::Ident(..) if this.is_mistaken_not_ident_negation() => {
|
||||
this.recover_not_expr(lo)
|
||||
}
|
||||
_ => return this.parse_dot_or_call_expr(Some(attrs)),
|
||||
}?;
|
||||
Ok(this.mk_expr(lo.to(hi), ex, attrs))
|
||||
};
|
||||
if needs_tokens { self.collect_tokens(do_parse) } else { do_parse(self) }
|
||||
let lo = self.token.span;
|
||||
|
||||
macro_rules! make_it {
|
||||
($this:ident, $attrs:expr, |this, _| $body:expr) => {
|
||||
$this.collect_tokens_for_expr($attrs, |$this, attrs| {
|
||||
let (hi, ex) = $body?;
|
||||
Ok($this.mk_expr(lo.to(hi), ex, attrs.into()))
|
||||
})
|
||||
};
|
||||
}
|
||||
|
||||
let this = self;
|
||||
|
||||
// Note: when adding new unary operators, don't forget to adjust TokenKind::can_begin_expr()
|
||||
match this.token.uninterpolate().kind {
|
||||
token::Not => make_it!(this, attrs, |this, _| this.parse_unary_expr(lo, UnOp::Not)), // `!expr`
|
||||
token::Tilde => make_it!(this, attrs, |this, _| this.recover_tilde_expr(lo)), // `~expr`
|
||||
token::BinOp(token::Minus) => {
|
||||
make_it!(this, attrs, |this, _| this.parse_unary_expr(lo, UnOp::Neg))
|
||||
} // `-expr`
|
||||
token::BinOp(token::Star) => {
|
||||
make_it!(this, attrs, |this, _| this.parse_unary_expr(lo, UnOp::Deref))
|
||||
} // `*expr`
|
||||
token::BinOp(token::And) | token::AndAnd => {
|
||||
make_it!(this, attrs, |this, _| this.parse_borrow_expr(lo))
|
||||
}
|
||||
token::Ident(..) if this.token.is_keyword(kw::Box) => {
|
||||
make_it!(this, attrs, |this, _| this.parse_box_expr(lo))
|
||||
}
|
||||
token::Ident(..) if this.is_mistaken_not_ident_negation() => {
|
||||
// FIXME - what is our polciy for handling tokens during recovery?
|
||||
// Should we ever invoke a proc-macro with these tokens?
|
||||
make_it!(this, attrs, |this, _| this.recover_not_expr(lo))
|
||||
}
|
||||
_ => return this.parse_dot_or_call_expr(Some(attrs.into())),
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_prefix_expr_common(&mut self, lo: Span) -> PResult<'a, (Span, P<Expr>)> {
|
||||
@ -805,18 +834,20 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
|
||||
/// Parses `a.b` or `a(13)` or `a[4]` or just `a`.
|
||||
fn parse_dot_or_call_expr(&mut self, attrs: Option<AttrVec>) -> PResult<'a, P<Expr>> {
|
||||
fn parse_dot_or_call_expr(&mut self, attrs: Option<AttrWrapper>) -> PResult<'a, P<Expr>> {
|
||||
let attrs = self.parse_or_use_outer_attributes(attrs)?;
|
||||
let base = self.parse_bottom_expr();
|
||||
let (span, base) = self.interpolated_or_expr_span(base)?;
|
||||
self.parse_dot_or_call_expr_with(base, span, attrs)
|
||||
self.collect_tokens_for_expr(attrs, |this, attrs| {
|
||||
let base = this.parse_bottom_expr();
|
||||
let (span, base) = this.interpolated_or_expr_span(base)?;
|
||||
this.parse_dot_or_call_expr_with(base, span, attrs)
|
||||
})
|
||||
}
|
||||
|
||||
pub(super) fn parse_dot_or_call_expr_with(
|
||||
&mut self,
|
||||
e0: P<Expr>,
|
||||
lo: Span,
|
||||
mut attrs: AttrVec,
|
||||
mut attrs: Vec<ast::Attribute>,
|
||||
) -> PResult<'a, P<Expr>> {
|
||||
// Stitch the list of outer attributes onto the return value.
|
||||
// A little bit ugly, but the best way given the current code
|
||||
@ -824,7 +855,7 @@ impl<'a> Parser<'a> {
|
||||
self.parse_dot_or_call_expr_with_(e0, lo).map(|expr| {
|
||||
expr.map(|mut expr| {
|
||||
attrs.extend::<Vec<_>>(expr.attrs.into());
|
||||
expr.attrs = attrs;
|
||||
expr.attrs = attrs.into();
|
||||
expr
|
||||
})
|
||||
})
|
||||
@ -1703,19 +1734,25 @@ impl<'a> Parser<'a> {
|
||||
fn parse_fn_block_param(&mut self) -> PResult<'a, Param> {
|
||||
let lo = self.token.span;
|
||||
let attrs = self.parse_outer_attributes()?;
|
||||
let pat = self.parse_pat(PARAM_EXPECTED)?;
|
||||
let ty = if self.eat(&token::Colon) {
|
||||
self.parse_ty()?
|
||||
} else {
|
||||
self.mk_ty(self.prev_token.span, TyKind::Infer)
|
||||
};
|
||||
Ok(Param {
|
||||
attrs: attrs.into(),
|
||||
ty,
|
||||
pat,
|
||||
span: lo.to(self.token.span),
|
||||
id: DUMMY_NODE_ID,
|
||||
is_placeholder: false,
|
||||
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
|
||||
let pat = this.parse_pat(PARAM_EXPECTED)?;
|
||||
let ty = if this.eat(&token::Colon) {
|
||||
this.parse_ty()?
|
||||
} else {
|
||||
this.mk_ty(this.prev_token.span, TyKind::Infer)
|
||||
};
|
||||
|
||||
Ok((
|
||||
Param {
|
||||
attrs: attrs.into(),
|
||||
ty,
|
||||
pat,
|
||||
span: lo.to(this.token.span),
|
||||
id: DUMMY_NODE_ID,
|
||||
is_placeholder: false,
|
||||
},
|
||||
TrailingToken::MaybeComma,
|
||||
))
|
||||
})
|
||||
}
|
||||
|
||||
@ -1731,7 +1768,7 @@ impl<'a> Parser<'a> {
|
||||
let thn = if self.eat_keyword(kw::Else) || !cond.returns() {
|
||||
self.error_missing_if_cond(lo, cond.span)
|
||||
} else {
|
||||
let attrs = self.parse_outer_attributes()?; // For recovery.
|
||||
let attrs = self.parse_outer_attributes()?.take_for_recovery(); // For recovery.
|
||||
let not_block = self.token != token::OpenDelim(token::Brace);
|
||||
let block = self.parse_block().map_err(|mut err| {
|
||||
if not_block {
|
||||
@ -1788,7 +1825,7 @@ impl<'a> Parser<'a> {
|
||||
/// Parses an `else { ... }` expression (`else` token already eaten).
|
||||
fn parse_else_expr(&mut self) -> PResult<'a, P<Expr>> {
|
||||
let ctx_span = self.prev_token.span; // `else`
|
||||
let attrs = self.parse_outer_attributes()?; // For recovery.
|
||||
let attrs = self.parse_outer_attributes()?.take_for_recovery(); // For recovery.
|
||||
let expr = if self.eat_keyword(kw::If) {
|
||||
self.parse_if_expr(AttrVec::new())?
|
||||
} else {
|
||||
@ -1947,85 +1984,91 @@ impl<'a> Parser<'a> {
|
||||
|
||||
pub(super) fn parse_arm(&mut self) -> PResult<'a, Arm> {
|
||||
let attrs = self.parse_outer_attributes()?;
|
||||
let lo = self.token.span;
|
||||
let pat = self.parse_top_pat(GateOr::No, RecoverComma::Yes)?;
|
||||
let guard = if self.eat_keyword(kw::If) {
|
||||
let if_span = self.prev_token.span;
|
||||
let cond = self.parse_expr()?;
|
||||
if let ExprKind::Let(..) = cond.kind {
|
||||
// Remove the last feature gating of a `let` expression since it's stable.
|
||||
self.sess.gated_spans.ungate_last(sym::let_chains, cond.span);
|
||||
let span = if_span.to(cond.span);
|
||||
self.sess.gated_spans.gate(sym::if_let_guard, span);
|
||||
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
|
||||
let lo = this.token.span;
|
||||
let pat = this.parse_top_pat(GateOr::No, RecoverComma::Yes)?;
|
||||
let guard = if this.eat_keyword(kw::If) {
|
||||
let if_span = this.prev_token.span;
|
||||
let cond = this.parse_expr()?;
|
||||
if let ExprKind::Let(..) = cond.kind {
|
||||
// Remove the last feature gating of a `let` expression since it's stable.
|
||||
this.sess.gated_spans.ungate_last(sym::let_chains, cond.span);
|
||||
let span = if_span.to(cond.span);
|
||||
this.sess.gated_spans.gate(sym::if_let_guard, span);
|
||||
}
|
||||
Some(cond)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let arrow_span = this.token.span;
|
||||
this.expect(&token::FatArrow)?;
|
||||
let arm_start_span = this.token.span;
|
||||
|
||||
let expr = this.parse_expr_res(Restrictions::STMT_EXPR, None).map_err(|mut err| {
|
||||
err.span_label(arrow_span, "while parsing the `match` arm starting here");
|
||||
err
|
||||
})?;
|
||||
|
||||
let require_comma = classify::expr_requires_semi_to_be_stmt(&expr)
|
||||
&& this.token != token::CloseDelim(token::Brace);
|
||||
|
||||
let hi = this.prev_token.span;
|
||||
|
||||
if require_comma {
|
||||
let sm = this.sess.source_map();
|
||||
this.expect_one_of(&[token::Comma], &[token::CloseDelim(token::Brace)]).map_err(
|
||||
|mut err| {
|
||||
match (sm.span_to_lines(expr.span), sm.span_to_lines(arm_start_span)) {
|
||||
(Ok(ref expr_lines), Ok(ref arm_start_lines))
|
||||
if arm_start_lines.lines[0].end_col
|
||||
== expr_lines.lines[0].end_col
|
||||
&& expr_lines.lines.len() == 2
|
||||
&& this.token == token::FatArrow =>
|
||||
{
|
||||
// We check whether there's any trailing code in the parse span,
|
||||
// if there isn't, we very likely have the following:
|
||||
//
|
||||
// X | &Y => "y"
|
||||
// | -- - missing comma
|
||||
// | |
|
||||
// | arrow_span
|
||||
// X | &X => "x"
|
||||
// | - ^^ self.token.span
|
||||
// | |
|
||||
// | parsed until here as `"y" & X`
|
||||
err.span_suggestion_short(
|
||||
arm_start_span.shrink_to_hi(),
|
||||
"missing a comma here to end this `match` arm",
|
||||
",".to_owned(),
|
||||
Applicability::MachineApplicable,
|
||||
);
|
||||
}
|
||||
_ => {
|
||||
err.span_label(
|
||||
arrow_span,
|
||||
"while parsing the `match` arm starting here",
|
||||
);
|
||||
}
|
||||
}
|
||||
err
|
||||
},
|
||||
)?;
|
||||
} else {
|
||||
this.eat(&token::Comma);
|
||||
}
|
||||
Some(cond)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let arrow_span = self.token.span;
|
||||
self.expect(&token::FatArrow)?;
|
||||
let arm_start_span = self.token.span;
|
||||
|
||||
let expr = self.parse_expr_res(Restrictions::STMT_EXPR, None).map_err(|mut err| {
|
||||
err.span_label(arrow_span, "while parsing the `match` arm starting here");
|
||||
err
|
||||
})?;
|
||||
|
||||
let require_comma = classify::expr_requires_semi_to_be_stmt(&expr)
|
||||
&& self.token != token::CloseDelim(token::Brace);
|
||||
|
||||
let hi = self.prev_token.span;
|
||||
|
||||
if require_comma {
|
||||
let sm = self.sess.source_map();
|
||||
self.expect_one_of(&[token::Comma], &[token::CloseDelim(token::Brace)]).map_err(
|
||||
|mut err| {
|
||||
match (sm.span_to_lines(expr.span), sm.span_to_lines(arm_start_span)) {
|
||||
(Ok(ref expr_lines), Ok(ref arm_start_lines))
|
||||
if arm_start_lines.lines[0].end_col == expr_lines.lines[0].end_col
|
||||
&& expr_lines.lines.len() == 2
|
||||
&& self.token == token::FatArrow =>
|
||||
{
|
||||
// We check whether there's any trailing code in the parse span,
|
||||
// if there isn't, we very likely have the following:
|
||||
//
|
||||
// X | &Y => "y"
|
||||
// | -- - missing comma
|
||||
// | |
|
||||
// | arrow_span
|
||||
// X | &X => "x"
|
||||
// | - ^^ self.token.span
|
||||
// | |
|
||||
// | parsed until here as `"y" & X`
|
||||
err.span_suggestion_short(
|
||||
arm_start_span.shrink_to_hi(),
|
||||
"missing a comma here to end this `match` arm",
|
||||
",".to_owned(),
|
||||
Applicability::MachineApplicable,
|
||||
);
|
||||
}
|
||||
_ => {
|
||||
err.span_label(
|
||||
arrow_span,
|
||||
"while parsing the `match` arm starting here",
|
||||
);
|
||||
}
|
||||
}
|
||||
err
|
||||
Ok((
|
||||
ast::Arm {
|
||||
attrs,
|
||||
pat,
|
||||
guard,
|
||||
body: expr,
|
||||
span: lo.to(hi),
|
||||
id: DUMMY_NODE_ID,
|
||||
is_placeholder: false,
|
||||
},
|
||||
)?;
|
||||
} else {
|
||||
self.eat(&token::Comma);
|
||||
}
|
||||
|
||||
Ok(ast::Arm {
|
||||
attrs,
|
||||
pat,
|
||||
guard,
|
||||
body: expr,
|
||||
span: lo.to(hi),
|
||||
id: DUMMY_NODE_ID,
|
||||
is_placeholder: false,
|
||||
TrailingToken::None,
|
||||
))
|
||||
})
|
||||
}
|
||||
|
||||
@ -2274,30 +2317,36 @@ impl<'a> Parser<'a> {
|
||||
|
||||
/// Parses `ident (COLON expr)?`.
|
||||
fn parse_field(&mut self) -> PResult<'a, Field> {
|
||||
let attrs = self.parse_outer_attributes()?.into();
|
||||
let lo = self.token.span;
|
||||
let attrs = self.parse_outer_attributes()?;
|
||||
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
|
||||
let lo = this.token.span;
|
||||
|
||||
// Check if a colon exists one ahead. This means we're parsing a fieldname.
|
||||
let is_shorthand = !self.look_ahead(1, |t| t == &token::Colon || t == &token::Eq);
|
||||
let (ident, expr) = if is_shorthand {
|
||||
// Mimic `x: x` for the `x` field shorthand.
|
||||
let ident = self.parse_ident_common(false)?;
|
||||
let path = ast::Path::from_ident(ident);
|
||||
(ident, self.mk_expr(ident.span, ExprKind::Path(None, path), AttrVec::new()))
|
||||
} else {
|
||||
let ident = self.parse_field_name()?;
|
||||
self.error_on_eq_field_init(ident);
|
||||
self.bump(); // `:`
|
||||
(ident, self.parse_expr()?)
|
||||
};
|
||||
Ok(ast::Field {
|
||||
ident,
|
||||
span: lo.to(expr.span),
|
||||
expr,
|
||||
is_shorthand,
|
||||
attrs,
|
||||
id: DUMMY_NODE_ID,
|
||||
is_placeholder: false,
|
||||
// Check if a colon exists one ahead. This means we're parsing a fieldname.
|
||||
let is_shorthand = !this.look_ahead(1, |t| t == &token::Colon || t == &token::Eq);
|
||||
let (ident, expr) = if is_shorthand {
|
||||
// Mimic `x: x` for the `x` field shorthand.
|
||||
let ident = this.parse_ident_common(false)?;
|
||||
let path = ast::Path::from_ident(ident);
|
||||
(ident, this.mk_expr(ident.span, ExprKind::Path(None, path), AttrVec::new()))
|
||||
} else {
|
||||
let ident = this.parse_field_name()?;
|
||||
this.error_on_eq_field_init(ident);
|
||||
this.bump(); // `:`
|
||||
(ident, this.parse_expr()?)
|
||||
};
|
||||
|
||||
Ok((
|
||||
ast::Field {
|
||||
ident,
|
||||
span: lo.to(expr.span),
|
||||
expr,
|
||||
is_shorthand,
|
||||
attrs: attrs.into(),
|
||||
id: DUMMY_NODE_ID,
|
||||
is_placeholder: false,
|
||||
},
|
||||
TrailingToken::MaybeComma,
|
||||
))
|
||||
})
|
||||
}
|
||||
|
||||
@ -2405,4 +2454,27 @@ impl<'a> Parser<'a> {
|
||||
.map_or(lhs_span, |a| a.span)
|
||||
.to(rhs_span)
|
||||
}
|
||||
|
||||
fn collect_tokens_for_expr(
|
||||
&mut self,
|
||||
attrs: AttrWrapper,
|
||||
f: impl FnOnce(&mut Self, Vec<ast::Attribute>) -> PResult<'a, P<Expr>>,
|
||||
) -> PResult<'a, P<Expr>> {
|
||||
// FIXME - come up with a nice way to properly forward `ForceCollect`from
|
||||
// the nonterminal parsing code. TThis approach iscorrect, but will cause
|
||||
// us to unnecessarily capture tokens for exprs that have only builtin
|
||||
// attributes. Revisit this before #![feature(stmt_expr_attributes)] is stabilized
|
||||
let force_collect = if attrs.is_empty() { ForceCollect::No } else { ForceCollect::Yes };
|
||||
self.collect_tokens_trailing_token(attrs, force_collect, |this, attrs| {
|
||||
let res = f(this, attrs)?;
|
||||
let trailing = if this.restrictions.contains(Restrictions::STMT_EXPR)
|
||||
&& this.token.kind == token::Semi
|
||||
{
|
||||
TrailingToken::Semi
|
||||
} else {
|
||||
TrailingToken::None
|
||||
};
|
||||
Ok((res, trailing))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
use super::Parser;
|
||||
use super::{ForceCollect, Parser, TrailingToken};
|
||||
|
||||
use rustc_ast::token;
|
||||
use rustc_ast::{
|
||||
@ -84,68 +84,89 @@ impl<'a> Parser<'a> {
|
||||
/// a trailing comma and erroneous trailing attributes.
|
||||
pub(super) fn parse_generic_params(&mut self) -> PResult<'a, Vec<ast::GenericParam>> {
|
||||
let mut params = Vec::new();
|
||||
loop {
|
||||
let mut done = false;
|
||||
while !done {
|
||||
let attrs = self.parse_outer_attributes()?;
|
||||
if self.check_lifetime() {
|
||||
let lifetime = self.expect_lifetime();
|
||||
// Parse lifetime parameter.
|
||||
let bounds =
|
||||
if self.eat(&token::Colon) { self.parse_lt_param_bounds() } else { Vec::new() };
|
||||
params.push(ast::GenericParam {
|
||||
ident: lifetime.ident,
|
||||
id: lifetime.id,
|
||||
attrs: attrs.into(),
|
||||
bounds,
|
||||
kind: ast::GenericParamKind::Lifetime,
|
||||
is_placeholder: false,
|
||||
});
|
||||
} else if self.check_keyword(kw::Const) {
|
||||
// Parse const parameter.
|
||||
params.push(self.parse_const_param(attrs)?);
|
||||
} else if self.check_ident() {
|
||||
// Parse type parameter.
|
||||
params.push(self.parse_ty_param(attrs)?);
|
||||
} else if self.token.can_begin_type() {
|
||||
// Trying to write an associated type bound? (#26271)
|
||||
let snapshot = self.clone();
|
||||
match self.parse_ty_where_predicate() {
|
||||
Ok(where_predicate) => {
|
||||
self.struct_span_err(
|
||||
where_predicate.span(),
|
||||
"bounds on associated types do not belong here",
|
||||
)
|
||||
.span_label(where_predicate.span(), "belongs in `where` clause")
|
||||
.emit();
|
||||
}
|
||||
Err(mut err) => {
|
||||
err.cancel();
|
||||
*self = snapshot;
|
||||
break;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Check for trailing attributes and stop parsing.
|
||||
if !attrs.is_empty() {
|
||||
if !params.is_empty() {
|
||||
self.struct_span_err(
|
||||
attrs[0].span,
|
||||
"trailing attribute after generic parameter",
|
||||
)
|
||||
.span_label(attrs[0].span, "attributes must go before parameters")
|
||||
.emit();
|
||||
let param =
|
||||
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
|
||||
let param = if this.check_lifetime() {
|
||||
let lifetime = this.expect_lifetime();
|
||||
// Parse lifetime parameter.
|
||||
let bounds = if this.eat(&token::Colon) {
|
||||
this.parse_lt_param_bounds()
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
Some(ast::GenericParam {
|
||||
ident: lifetime.ident,
|
||||
id: lifetime.id,
|
||||
attrs: attrs.into(),
|
||||
bounds,
|
||||
kind: ast::GenericParamKind::Lifetime,
|
||||
is_placeholder: false,
|
||||
})
|
||||
} else if this.check_keyword(kw::Const) {
|
||||
// Parse const parameter.
|
||||
Some(this.parse_const_param(attrs)?)
|
||||
} else if this.check_ident() {
|
||||
// Parse type parameter.
|
||||
Some(this.parse_ty_param(attrs)?)
|
||||
} else if this.token.can_begin_type() {
|
||||
// Trying to write an associated type bound? (#26271)
|
||||
let snapshot = this.clone();
|
||||
match this.parse_ty_where_predicate() {
|
||||
Ok(where_predicate) => {
|
||||
this.struct_span_err(
|
||||
where_predicate.span(),
|
||||
"bounds on associated types do not belong here",
|
||||
)
|
||||
.span_label(where_predicate.span(), "belongs in `where` clause")
|
||||
.emit();
|
||||
// FIXME - try to continue parsing other generics?
|
||||
return Ok((None, TrailingToken::None));
|
||||
}
|
||||
Err(mut err) => {
|
||||
err.cancel();
|
||||
// FIXME - maybe we should overwrite 'self' outside of `collect_tokens`?
|
||||
*this = snapshot;
|
||||
return Ok((None, TrailingToken::None));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
self.struct_span_err(attrs[0].span, "attribute without generic parameters")
|
||||
.span_label(
|
||||
attrs[0].span,
|
||||
"attributes are only permitted when preceding parameters",
|
||||
)
|
||||
.emit();
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
// Check for trailing attributes and stop parsing.
|
||||
if !attrs.is_empty() {
|
||||
if !params.is_empty() {
|
||||
this.struct_span_err(
|
||||
attrs[0].span,
|
||||
"trailing attribute after generic parameter",
|
||||
)
|
||||
.span_label(attrs[0].span, "attributes must go before parameters")
|
||||
.emit();
|
||||
} else {
|
||||
this.struct_span_err(
|
||||
attrs[0].span,
|
||||
"attribute without generic parameters",
|
||||
)
|
||||
.span_label(
|
||||
attrs[0].span,
|
||||
"attributes are only permitted when preceding parameters",
|
||||
)
|
||||
.emit();
|
||||
}
|
||||
}
|
||||
return Ok((None, TrailingToken::None));
|
||||
};
|
||||
|
||||
if !self.eat(&token::Comma) {
|
||||
if !this.eat(&token::Comma) {
|
||||
done = true;
|
||||
}
|
||||
// We just ate the comma, so no need to use `TrailingToken`
|
||||
Ok((param, TrailingToken::None))
|
||||
})?;
|
||||
|
||||
if let Some(param) = param {
|
||||
params.push(param);
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
@ -1,8 +1,6 @@
|
||||
use super::diagnostics::{dummy_arg, ConsumeClosingDelim, Error};
|
||||
use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
|
||||
use super::{FollowedByType, ForceCollect, Parser, PathStyle, TrailingToken};
|
||||
|
||||
use crate::{maybe_collect_tokens, maybe_whole};
|
||||
use super::{AttrWrapper, FollowedByType, ForceCollect, Parser, PathStyle, TrailingToken};
|
||||
|
||||
use rustc_ast::ast::*;
|
||||
use rustc_ast::ptr::P;
|
||||
@ -108,25 +106,40 @@ impl<'a> Parser<'a> {
|
||||
|
||||
pub(super) fn parse_item_common(
|
||||
&mut self,
|
||||
mut attrs: Vec<Attribute>,
|
||||
attrs: AttrWrapper,
|
||||
mac_allowed: bool,
|
||||
attrs_allowed: bool,
|
||||
req_name: ReqName,
|
||||
force_collect: ForceCollect,
|
||||
) -> PResult<'a, Option<Item>> {
|
||||
maybe_whole!(self, NtItem, |item| {
|
||||
let mut item = item;
|
||||
mem::swap(&mut item.attrs, &mut attrs);
|
||||
item.attrs.extend(attrs);
|
||||
Some(item.into_inner())
|
||||
});
|
||||
// Don't use `maybe_whole` so that we have precise control
|
||||
// over when we bump the parser
|
||||
if let token::Interpolated(nt) = &self.token.kind {
|
||||
if let token::NtItem(item) = &**nt {
|
||||
let item = item.clone();
|
||||
|
||||
return self.collect_tokens_trailing_token(
|
||||
attrs,
|
||||
force_collect,
|
||||
|this, mut attrs| {
|
||||
let mut item = item;
|
||||
mem::swap(&mut item.attrs, &mut attrs);
|
||||
item.attrs.extend(attrs);
|
||||
// Bump the parser so the we capture the token::Interpolated
|
||||
this.bump();
|
||||
Ok((Some(item.into_inner()), TrailingToken::None))
|
||||
},
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
let mut unclosed_delims = vec![];
|
||||
let item = maybe_collect_tokens!(self, force_collect, &attrs, |this: &mut Self| {
|
||||
let item = this.parse_item_common_(attrs, mac_allowed, attrs_allowed, req_name);
|
||||
unclosed_delims.append(&mut this.unclosed_delims);
|
||||
Ok((item?, TrailingToken::None))
|
||||
})?;
|
||||
let item =
|
||||
self.collect_tokens_trailing_token(attrs, force_collect, |this: &mut Self, attrs| {
|
||||
let item = this.parse_item_common_(attrs, mac_allowed, attrs_allowed, req_name);
|
||||
unclosed_delims.append(&mut this.unclosed_delims);
|
||||
Ok((item?, TrailingToken::None))
|
||||
})?;
|
||||
|
||||
self.unclosed_delims.append(&mut unclosed_delims);
|
||||
Ok(item)
|
||||
@ -1109,39 +1122,45 @@ impl<'a> Parser<'a> {
|
||||
|
||||
fn parse_enum_variant(&mut self) -> PResult<'a, Option<Variant>> {
|
||||
let variant_attrs = self.parse_outer_attributes()?;
|
||||
let vlo = self.token.span;
|
||||
self.collect_tokens_trailing_token(
|
||||
variant_attrs,
|
||||
ForceCollect::No,
|
||||
|this, variant_attrs| {
|
||||
let vlo = this.token.span;
|
||||
|
||||
let vis = self.parse_visibility(FollowedByType::No)?;
|
||||
if !self.recover_nested_adt_item(kw::Enum)? {
|
||||
return Ok(None);
|
||||
}
|
||||
let ident = self.parse_ident()?;
|
||||
let vis = this.parse_visibility(FollowedByType::No)?;
|
||||
if !this.recover_nested_adt_item(kw::Enum)? {
|
||||
return Ok((None, TrailingToken::None));
|
||||
}
|
||||
let ident = this.parse_ident()?;
|
||||
|
||||
let struct_def = if self.check(&token::OpenDelim(token::Brace)) {
|
||||
// Parse a struct variant.
|
||||
let (fields, recovered) = self.parse_record_struct_body()?;
|
||||
VariantData::Struct(fields, recovered)
|
||||
} else if self.check(&token::OpenDelim(token::Paren)) {
|
||||
VariantData::Tuple(self.parse_tuple_struct_body()?, DUMMY_NODE_ID)
|
||||
} else {
|
||||
VariantData::Unit(DUMMY_NODE_ID)
|
||||
};
|
||||
let struct_def = if this.check(&token::OpenDelim(token::Brace)) {
|
||||
// Parse a struct variant.
|
||||
let (fields, recovered) = this.parse_record_struct_body()?;
|
||||
VariantData::Struct(fields, recovered)
|
||||
} else if this.check(&token::OpenDelim(token::Paren)) {
|
||||
VariantData::Tuple(this.parse_tuple_struct_body()?, DUMMY_NODE_ID)
|
||||
} else {
|
||||
VariantData::Unit(DUMMY_NODE_ID)
|
||||
};
|
||||
|
||||
let disr_expr =
|
||||
if self.eat(&token::Eq) { Some(self.parse_anon_const_expr()?) } else { None };
|
||||
let disr_expr =
|
||||
if this.eat(&token::Eq) { Some(this.parse_anon_const_expr()?) } else { None };
|
||||
|
||||
let vr = ast::Variant {
|
||||
ident,
|
||||
vis,
|
||||
id: DUMMY_NODE_ID,
|
||||
attrs: variant_attrs,
|
||||
data: struct_def,
|
||||
disr_expr,
|
||||
span: vlo.to(self.prev_token.span),
|
||||
is_placeholder: false,
|
||||
};
|
||||
let vr = ast::Variant {
|
||||
ident,
|
||||
vis,
|
||||
id: DUMMY_NODE_ID,
|
||||
attrs: variant_attrs,
|
||||
data: struct_def,
|
||||
disr_expr,
|
||||
span: vlo.to(this.prev_token.span),
|
||||
is_placeholder: false,
|
||||
};
|
||||
|
||||
Ok(Some(vr))
|
||||
Ok((Some(vr), TrailingToken::MaybeComma))
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
/// Parses `struct Foo { ... }`.
|
||||
@ -1262,17 +1281,23 @@ impl<'a> Parser<'a> {
|
||||
// Unit like structs are handled in parse_item_struct function
|
||||
self.parse_paren_comma_seq(|p| {
|
||||
let attrs = p.parse_outer_attributes()?;
|
||||
let lo = p.token.span;
|
||||
let vis = p.parse_visibility(FollowedByType::Yes)?;
|
||||
let ty = p.parse_ty()?;
|
||||
Ok(StructField {
|
||||
span: lo.to(ty.span),
|
||||
vis,
|
||||
ident: None,
|
||||
id: DUMMY_NODE_ID,
|
||||
ty,
|
||||
attrs,
|
||||
is_placeholder: false,
|
||||
p.collect_tokens_trailing_token(attrs, ForceCollect::No, |p, attrs| {
|
||||
let lo = p.token.span;
|
||||
let vis = p.parse_visibility(FollowedByType::Yes)?;
|
||||
let ty = p.parse_ty()?;
|
||||
|
||||
Ok((
|
||||
StructField {
|
||||
span: lo.to(ty.span),
|
||||
vis,
|
||||
ident: None,
|
||||
id: DUMMY_NODE_ID,
|
||||
ty,
|
||||
attrs,
|
||||
is_placeholder: false,
|
||||
},
|
||||
TrailingToken::MaybeComma,
|
||||
))
|
||||
})
|
||||
})
|
||||
.map(|(r, _)| r)
|
||||
@ -1281,9 +1306,11 @@ impl<'a> Parser<'a> {
|
||||
/// Parses an element of a struct declaration.
|
||||
fn parse_struct_decl_field(&mut self) -> PResult<'a, StructField> {
|
||||
let attrs = self.parse_outer_attributes()?;
|
||||
let lo = self.token.span;
|
||||
let vis = self.parse_visibility(FollowedByType::No)?;
|
||||
self.parse_single_struct_field(lo, vis, attrs)
|
||||
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
|
||||
let lo = this.token.span;
|
||||
let vis = this.parse_visibility(FollowedByType::No)?;
|
||||
Ok((this.parse_single_struct_field(lo, vis, attrs)?, TrailingToken::None))
|
||||
})
|
||||
}
|
||||
|
||||
/// Parses a structure field declaration.
|
||||
@ -1736,74 +1763,79 @@ impl<'a> Parser<'a> {
|
||||
fn parse_param_general(&mut self, req_name: ReqName, first_param: bool) -> PResult<'a, Param> {
|
||||
let lo = self.token.span;
|
||||
let attrs = self.parse_outer_attributes()?;
|
||||
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
|
||||
// Possibly parse `self`. Recover if we parsed it and it wasn't allowed here.
|
||||
if let Some(mut param) = this.parse_self_param()? {
|
||||
param.attrs = attrs.into();
|
||||
let res = if first_param { Ok(param) } else { this.recover_bad_self_param(param) };
|
||||
return Ok((res?, TrailingToken::None));
|
||||
}
|
||||
|
||||
// Possibly parse `self`. Recover if we parsed it and it wasn't allowed here.
|
||||
if let Some(mut param) = self.parse_self_param()? {
|
||||
param.attrs = attrs.into();
|
||||
return if first_param { Ok(param) } else { self.recover_bad_self_param(param) };
|
||||
}
|
||||
let is_name_required = match this.token.kind {
|
||||
token::DotDotDot => false,
|
||||
_ => req_name(this.token.span.edition()),
|
||||
};
|
||||
let (pat, ty) = if is_name_required || this.is_named_param() {
|
||||
debug!("parse_param_general parse_pat (is_name_required:{})", is_name_required);
|
||||
|
||||
let is_name_required = match self.token.kind {
|
||||
token::DotDotDot => false,
|
||||
_ => req_name(self.token.span.edition()),
|
||||
};
|
||||
let (pat, ty) = if is_name_required || self.is_named_param() {
|
||||
debug!("parse_param_general parse_pat (is_name_required:{})", is_name_required);
|
||||
let pat = this.parse_fn_param_pat()?;
|
||||
if let Err(mut err) = this.expect(&token::Colon) {
|
||||
return if let Some(ident) =
|
||||
this.parameter_without_type(&mut err, pat, is_name_required, first_param)
|
||||
{
|
||||
err.emit();
|
||||
Ok((dummy_arg(ident), TrailingToken::None))
|
||||
} else {
|
||||
Err(err)
|
||||
};
|
||||
}
|
||||
|
||||
let pat = self.parse_fn_param_pat()?;
|
||||
if let Err(mut err) = self.expect(&token::Colon) {
|
||||
return if let Some(ident) =
|
||||
self.parameter_without_type(&mut err, pat, is_name_required, first_param)
|
||||
this.eat_incorrect_doc_comment_for_param_type();
|
||||
(pat, this.parse_ty_for_param()?)
|
||||
} else {
|
||||
debug!("parse_param_general ident_to_pat");
|
||||
let parser_snapshot_before_ty = this.clone();
|
||||
this.eat_incorrect_doc_comment_for_param_type();
|
||||
let mut ty = this.parse_ty_for_param();
|
||||
if ty.is_ok()
|
||||
&& this.token != token::Comma
|
||||
&& this.token != token::CloseDelim(token::Paren)
|
||||
{
|
||||
err.emit();
|
||||
Ok(dummy_arg(ident))
|
||||
} else {
|
||||
Err(err)
|
||||
};
|
||||
}
|
||||
|
||||
self.eat_incorrect_doc_comment_for_param_type();
|
||||
(pat, self.parse_ty_for_param()?)
|
||||
} else {
|
||||
debug!("parse_param_general ident_to_pat");
|
||||
let parser_snapshot_before_ty = self.clone();
|
||||
self.eat_incorrect_doc_comment_for_param_type();
|
||||
let mut ty = self.parse_ty_for_param();
|
||||
if ty.is_ok()
|
||||
&& self.token != token::Comma
|
||||
&& self.token != token::CloseDelim(token::Paren)
|
||||
{
|
||||
// This wasn't actually a type, but a pattern looking like a type,
|
||||
// so we are going to rollback and re-parse for recovery.
|
||||
ty = self.unexpected();
|
||||
}
|
||||
match ty {
|
||||
Ok(ty) => {
|
||||
let ident = Ident::new(kw::Empty, self.prev_token.span);
|
||||
let bm = BindingMode::ByValue(Mutability::Not);
|
||||
let pat = self.mk_pat_ident(ty.span, bm, ident);
|
||||
(pat, ty)
|
||||
// This wasn't actually a type, but a pattern looking like a type,
|
||||
// so we are going to rollback and re-parse for recovery.
|
||||
ty = this.unexpected();
|
||||
}
|
||||
// If this is a C-variadic argument and we hit an error, return the error.
|
||||
Err(err) if self.token == token::DotDotDot => return Err(err),
|
||||
// Recover from attempting to parse the argument as a type without pattern.
|
||||
Err(mut err) => {
|
||||
err.cancel();
|
||||
*self = parser_snapshot_before_ty;
|
||||
self.recover_arg_parse()?
|
||||
match ty {
|
||||
Ok(ty) => {
|
||||
let ident = Ident::new(kw::Empty, this.prev_token.span);
|
||||
let bm = BindingMode::ByValue(Mutability::Not);
|
||||
let pat = this.mk_pat_ident(ty.span, bm, ident);
|
||||
(pat, ty)
|
||||
}
|
||||
// If this is a C-variadic argument and we hit an error, return the error.
|
||||
Err(err) if this.token == token::DotDotDot => return Err(err),
|
||||
// Recover from attempting to parse the argument as a type without pattern.
|
||||
Err(mut err) => {
|
||||
err.cancel();
|
||||
*this = parser_snapshot_before_ty;
|
||||
this.recover_arg_parse()?
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
let span = lo.until(self.token.span);
|
||||
let span = lo.until(this.token.span);
|
||||
|
||||
Ok(Param {
|
||||
attrs: attrs.into(),
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
is_placeholder: false,
|
||||
pat,
|
||||
span,
|
||||
ty,
|
||||
Ok((
|
||||
Param {
|
||||
attrs: attrs.into(),
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
is_placeholder: false,
|
||||
pat,
|
||||
span,
|
||||
ty,
|
||||
},
|
||||
TrailingToken::None,
|
||||
))
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -64,6 +64,24 @@ pub enum ForceCollect {
|
||||
pub enum TrailingToken {
|
||||
None,
|
||||
Semi,
|
||||
/// If the trailing token is a comma, then capture it
|
||||
/// Otherwise, ignore the trailing token
|
||||
MaybeComma,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct AttrWrapper {
|
||||
attrs: Vec<ast::Attribute>,
|
||||
}
|
||||
|
||||
impl AttrWrapper {
|
||||
// FIXME: Delay span bug here?
|
||||
fn take_for_recovery(self) -> Vec<ast::Attribute> {
|
||||
self.attrs
|
||||
}
|
||||
fn is_empty(&self) -> bool {
|
||||
self.attrs.is_empty()
|
||||
}
|
||||
}
|
||||
|
||||
/// Like `maybe_whole_expr`, but for things other than expressions.
|
||||
@ -1004,12 +1022,12 @@ impl<'a> Parser<'a> {
|
||||
|
||||
fn parse_or_use_outer_attributes(
|
||||
&mut self,
|
||||
already_parsed_attrs: Option<AttrVec>,
|
||||
) -> PResult<'a, AttrVec> {
|
||||
already_parsed_attrs: Option<AttrWrapper>,
|
||||
) -> PResult<'a, AttrWrapper> {
|
||||
if let Some(attrs) = already_parsed_attrs {
|
||||
Ok(attrs)
|
||||
} else {
|
||||
self.parse_outer_attributes().map(|a| a.into())
|
||||
self.parse_outer_attributes()
|
||||
}
|
||||
}
|
||||
|
||||
@ -1226,11 +1244,17 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn collect_tokens<R: HasTokens>(
|
||||
pub fn collect_tokens_no_attrs<R: HasTokens>(
|
||||
&mut self,
|
||||
f: impl FnOnce(&mut Self) -> PResult<'a, R>,
|
||||
) -> PResult<'a, R> {
|
||||
self.collect_tokens_trailing_token(|this| Ok((f(this)?, TrailingToken::None)))
|
||||
// The only reason to call `collect_tokens_no_attrs` is if you want tokens, so use
|
||||
// `ForceCollect::Yes`
|
||||
self.collect_tokens_trailing_token(
|
||||
AttrWrapper { attrs: Vec::new() },
|
||||
ForceCollect::Yes,
|
||||
|this, _attrs| Ok((f(this)?, TrailingToken::None)),
|
||||
)
|
||||
}
|
||||
|
||||
/// Records all tokens consumed by the provided callback,
|
||||
@ -1251,12 +1275,17 @@ impl<'a> Parser<'a> {
|
||||
/// a parsed AST item, which always has matching delimiters.
|
||||
pub fn collect_tokens_trailing_token<R: HasTokens>(
|
||||
&mut self,
|
||||
f: impl FnOnce(&mut Self) -> PResult<'a, (R, TrailingToken)>,
|
||||
attrs: AttrWrapper,
|
||||
force_collect: ForceCollect,
|
||||
f: impl FnOnce(&mut Self, Vec<ast::Attribute>) -> PResult<'a, (R, TrailingToken)>,
|
||||
) -> PResult<'a, R> {
|
||||
if matches!(force_collect, ForceCollect::No) && !attr::maybe_needs_tokens(&attrs.attrs) {
|
||||
return Ok(f(self, attrs.attrs)?.0);
|
||||
}
|
||||
let start_token = (self.token.clone(), self.token_spacing);
|
||||
let cursor_snapshot = self.token_cursor.clone();
|
||||
|
||||
let (mut ret, trailing_token) = f(self)?;
|
||||
let (mut ret, trailing_token) = f(self, attrs.attrs)?;
|
||||
|
||||
// Produces a `TokenStream` on-demand. Using `cursor_snapshot`
|
||||
// and `num_calls`, we can reconstruct the `TokenStream` seen
|
||||
@ -1306,6 +1335,11 @@ impl<'a> Parser<'a> {
|
||||
assert_eq!(self.token.kind, token::Semi);
|
||||
num_calls += 1;
|
||||
}
|
||||
TrailingToken::MaybeComma => {
|
||||
if self.token.kind == token::Comma {
|
||||
num_calls += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let lazy_impl = LazyTokenStreamImpl {
|
||||
@ -1409,16 +1443,3 @@ fn make_token_stream(
|
||||
assert!(stack.is_empty(), "Stack should be empty: final_buf={:?} stack={:?}", final_buf, stack);
|
||||
TokenStream::new(final_buf.inner)
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! maybe_collect_tokens {
|
||||
($self:ident, $force_collect:expr, $attrs:expr, $f:expr) => {
|
||||
if matches!($force_collect, ForceCollect::Yes)
|
||||
|| $crate::parser::attr::maybe_needs_tokens($attrs)
|
||||
{
|
||||
$self.collect_tokens_trailing_token($f)
|
||||
} else {
|
||||
Ok($f($self)?.0)
|
||||
}
|
||||
};
|
||||
}
|
||||
|
@ -108,7 +108,9 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
},
|
||||
NonterminalKind::Block => {
|
||||
token::NtBlock(self.collect_tokens(|this| this.parse_block())?)
|
||||
// While an block *expression* may have attributes (e.g. `#[my_attr] { ... }`),
|
||||
// the ':block' matcher does not support them
|
||||
token::NtBlock(self.collect_tokens_no_attrs(|this| this.parse_block())?)
|
||||
}
|
||||
NonterminalKind::Stmt => match self.parse_stmt(ForceCollect::Yes)? {
|
||||
Some(s) => token::NtStmt(s),
|
||||
@ -117,7 +119,7 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
},
|
||||
NonterminalKind::Pat2018 { .. } | NonterminalKind::Pat2021 { .. } => {
|
||||
token::NtPat(self.collect_tokens(|this| match kind {
|
||||
token::NtPat(self.collect_tokens_no_attrs(|this| match kind {
|
||||
NonterminalKind::Pat2018 { .. } => this.parse_pat(None),
|
||||
NonterminalKind::Pat2021 { .. } => {
|
||||
this.parse_top_pat(GateOr::Yes, RecoverComma::No)
|
||||
@ -125,11 +127,33 @@ impl<'a> Parser<'a> {
|
||||
_ => unreachable!(),
|
||||
})?)
|
||||
}
|
||||
NonterminalKind::Expr => token::NtExpr(self.collect_tokens(|this| this.parse_expr())?),
|
||||
NonterminalKind::Literal => {
|
||||
token::NtLiteral(self.collect_tokens(|this| this.parse_literal_maybe_minus())?)
|
||||
|
||||
// If there are attributes present, then `parse_expr` will end up collecting tokens,
|
||||
// turning the outer `collect_tokens_no_attrs` into a no-op due to the already present
|
||||
// tokens. If there are *not* attributes present, then the outer
|
||||
// `collect_tokens_no_attrs` will ensure that we will end up collecting tokens for the
|
||||
// expressions.
|
||||
//
|
||||
// This is less efficient than it could be, since the outer `collect_tokens_no_attrs`
|
||||
// still needs to snapshot the `TokenCursor` before calling `parse_expr`, even when
|
||||
// `parse_expr` will end up collecting tokens. Ideally, this would work more like
|
||||
// `parse_item`, and take in a `ForceCollect` parameter. However, this would require
|
||||
// adding a `ForceCollect` parameter in a bunch of places in expression parsing
|
||||
// for little gain. If the perf impact from this turns out to be noticeable, we should
|
||||
// revisit this apporach.
|
||||
NonterminalKind::Expr => {
|
||||
token::NtExpr(self.collect_tokens_no_attrs(|this| this.parse_expr())?)
|
||||
}
|
||||
NonterminalKind::Literal => {
|
||||
// The `:literal` matcher does not support attributes
|
||||
token::NtLiteral(
|
||||
self.collect_tokens_no_attrs(|this| this.parse_literal_maybe_minus())?,
|
||||
)
|
||||
}
|
||||
|
||||
NonterminalKind::Ty => {
|
||||
token::NtTy(self.collect_tokens_no_attrs(|this| this.parse_ty())?)
|
||||
}
|
||||
NonterminalKind::Ty => token::NtTy(self.collect_tokens(|this| this.parse_ty())?),
|
||||
// this could be handled like a token, since it is one
|
||||
NonterminalKind::Ident => {
|
||||
if let Some((ident, is_raw)) = get_macro_ident(&self.token) {
|
||||
@ -141,15 +165,15 @@ impl<'a> Parser<'a> {
|
||||
return Err(self.struct_span_err(self.token.span, msg));
|
||||
}
|
||||
}
|
||||
NonterminalKind::Path => {
|
||||
token::NtPath(self.collect_tokens(|this| this.parse_path(PathStyle::Type))?)
|
||||
}
|
||||
NonterminalKind::Path => token::NtPath(
|
||||
self.collect_tokens_no_attrs(|this| this.parse_path(PathStyle::Type))?,
|
||||
),
|
||||
NonterminalKind::Meta => {
|
||||
token::NtMeta(P(self.collect_tokens(|this| this.parse_attr_item(false))?))
|
||||
token::NtMeta(P(self.collect_tokens_no_attrs(|this| this.parse_attr_item(false))?))
|
||||
}
|
||||
NonterminalKind::TT => token::NtTT(self.parse_token_tree()),
|
||||
NonterminalKind::Vis => token::NtVis(
|
||||
self.collect_tokens(|this| this.parse_visibility(FollowedByType::Yes))?,
|
||||
self.collect_tokens_no_attrs(|this| this.parse_visibility(FollowedByType::Yes))?,
|
||||
),
|
||||
NonterminalKind::Lifetime => {
|
||||
if self.check_lifetime() {
|
||||
|
@ -1,4 +1,4 @@
|
||||
use super::{Parser, PathStyle};
|
||||
use super::{ForceCollect, Parser, PathStyle, TrailingToken};
|
||||
use crate::{maybe_recover_from_interpolated_ty_qpath, maybe_whole};
|
||||
use rustc_ast::mut_visit::{noop_visit_pat, MutVisitor};
|
||||
use rustc_ast::ptr::P;
|
||||
@ -938,16 +938,24 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fields.push(match self.parse_pat_field(lo, attrs) {
|
||||
Ok(field) => field,
|
||||
Err(err) => {
|
||||
if let Some(mut delayed_err) = delayed_err {
|
||||
delayed_err.emit();
|
||||
}
|
||||
return Err(err);
|
||||
}
|
||||
});
|
||||
ate_comma = self.eat(&token::Comma);
|
||||
let field =
|
||||
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
|
||||
let field = match this.parse_pat_field(lo, attrs) {
|
||||
Ok(field) => Ok(field),
|
||||
Err(err) => {
|
||||
if let Some(mut delayed_err) = delayed_err.take() {
|
||||
delayed_err.emit();
|
||||
}
|
||||
return Err(err);
|
||||
}
|
||||
}?;
|
||||
ate_comma = this.eat(&token::Comma);
|
||||
// We just ate a comma, so there's no need to use
|
||||
// `TrailingToken::Comma`
|
||||
Ok((field, TrailingToken::None))
|
||||
})?;
|
||||
|
||||
fields.push(field)
|
||||
}
|
||||
|
||||
if let Some(mut err) = delayed_err {
|
||||
|
@ -3,8 +3,10 @@ use super::diagnostics::{AttemptLocalParseRecovery, Error};
|
||||
use super::expr::LhsExpr;
|
||||
use super::pat::{GateOr, RecoverComma};
|
||||
use super::path::PathStyle;
|
||||
use super::{BlockMode, ForceCollect, Parser, Restrictions, SemiColonMode, TrailingToken};
|
||||
use crate::{maybe_collect_tokens, maybe_whole};
|
||||
use super::{
|
||||
AttrWrapper, BlockMode, ForceCollect, Parser, Restrictions, SemiColonMode, TrailingToken,
|
||||
};
|
||||
use crate::maybe_whole;
|
||||
|
||||
use rustc_ast as ast;
|
||||
use rustc_ast::attr::HasAttrs;
|
||||
@ -38,30 +40,47 @@ impl<'a> Parser<'a> {
|
||||
capture_semi: bool,
|
||||
force_collect: ForceCollect,
|
||||
) -> PResult<'a, Option<Stmt>> {
|
||||
let mut attrs = self.parse_outer_attributes()?;
|
||||
let attrs = self.parse_outer_attributes()?;
|
||||
let lo = self.token.span;
|
||||
|
||||
maybe_whole!(self, NtStmt, |stmt| {
|
||||
let mut stmt = stmt;
|
||||
stmt.visit_attrs(|stmt_attrs| {
|
||||
mem::swap(stmt_attrs, &mut attrs);
|
||||
stmt_attrs.extend(attrs);
|
||||
});
|
||||
Some(stmt)
|
||||
});
|
||||
// Don't use `maybe_whole` so that we have precise control
|
||||
// over when we bump the parser
|
||||
if let token::Interpolated(nt) = &self.token.kind {
|
||||
if let token::NtStmt(stmt) = &**nt {
|
||||
let mut stmt = stmt.clone();
|
||||
return self.collect_tokens_trailing_token(
|
||||
attrs,
|
||||
force_collect,
|
||||
|this, mut attrs| {
|
||||
stmt.visit_attrs(|stmt_attrs| {
|
||||
mem::swap(stmt_attrs, &mut attrs);
|
||||
stmt_attrs.extend(attrs);
|
||||
});
|
||||
// Make sure we capture the token::Interpolated
|
||||
this.bump();
|
||||
Ok((Some(stmt), TrailingToken::None))
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(Some(if self.token.is_keyword(kw::Let) {
|
||||
self.parse_local_mk(lo, attrs.into(), capture_semi, force_collect)?
|
||||
self.parse_local_mk(lo, attrs, capture_semi, force_collect)?
|
||||
} else if self.is_kw_followed_by_ident(kw::Mut) {
|
||||
self.recover_stmt_local(lo, attrs.into(), "missing keyword", "let mut")?
|
||||
self.recover_stmt_local(
|
||||
lo,
|
||||
attrs.take_for_recovery().into(),
|
||||
"missing keyword",
|
||||
"let mut",
|
||||
)?
|
||||
} else if self.is_kw_followed_by_ident(kw::Auto) {
|
||||
self.bump(); // `auto`
|
||||
let msg = "write `let` instead of `auto` to introduce a new variable";
|
||||
self.recover_stmt_local(lo, attrs.into(), msg, "let")?
|
||||
self.recover_stmt_local(lo, attrs.take_for_recovery().into(), msg, "let")?
|
||||
} else if self.is_kw_followed_by_ident(sym::var) {
|
||||
self.bump(); // `var`
|
||||
let msg = "write `let` instead of `var` to introduce a new variable";
|
||||
self.recover_stmt_local(lo, attrs.into(), msg, "let")?
|
||||
self.recover_stmt_local(lo, attrs.take_for_recovery().into(), msg, "let")?
|
||||
} else if self.check_path() && !self.token.is_qpath_start() && !self.is_path_start_item() {
|
||||
// We have avoided contextual keywords like `union`, items with `crate` visibility,
|
||||
// or `auto trait` items. We aim to parse an arbitrary path `a::b` but not something
|
||||
@ -75,14 +94,14 @@ impl<'a> Parser<'a> {
|
||||
self.mk_stmt(lo.to(item.span), StmtKind::Item(P(item)))
|
||||
} else if self.eat(&token::Semi) {
|
||||
// Do not attempt to parse an expression if we're done here.
|
||||
self.error_outer_attrs(&attrs);
|
||||
self.error_outer_attrs(&attrs.take_for_recovery());
|
||||
self.mk_stmt(lo, StmtKind::Empty)
|
||||
} else if self.token != token::CloseDelim(token::Brace) {
|
||||
// Remainder are line-expr stmts.
|
||||
let e = self.parse_expr_res(Restrictions::STMT_EXPR, Some(attrs.into()))?;
|
||||
self.mk_stmt(lo.to(e.span), StmtKind::Expr(e))
|
||||
} else {
|
||||
self.error_outer_attrs(&attrs);
|
||||
self.error_outer_attrs(&attrs.take_for_recovery());
|
||||
return Ok(None);
|
||||
}))
|
||||
}
|
||||
@ -90,10 +109,10 @@ impl<'a> Parser<'a> {
|
||||
fn parse_stmt_path_start(
|
||||
&mut self,
|
||||
lo: Span,
|
||||
attrs: Vec<Attribute>,
|
||||
attrs: AttrWrapper,
|
||||
force_collect: ForceCollect,
|
||||
) -> PResult<'a, Stmt> {
|
||||
maybe_collect_tokens!(self, force_collect, &attrs, |this: &mut Parser<'a>| {
|
||||
self.collect_tokens_trailing_token(attrs, force_collect, |this, attrs| {
|
||||
let path = this.parse_path(PathStyle::Expr)?;
|
||||
|
||||
if this.eat(&token::Not) {
|
||||
@ -142,7 +161,7 @@ impl<'a> Parser<'a> {
|
||||
// Since none of the above applied, this is an expression statement macro.
|
||||
let e = self.mk_expr(lo.to(hi), ExprKind::MacCall(mac), AttrVec::new());
|
||||
let e = self.maybe_recover_from_bad_qpath(e, true)?;
|
||||
let e = self.parse_dot_or_call_expr_with(e, lo, attrs)?;
|
||||
let e = self.parse_dot_or_call_expr_with(e, lo, attrs.into())?;
|
||||
let e = self.parse_assoc_expr_with(0, LhsExpr::AlreadyParsed(e))?;
|
||||
StmtKind::Expr(e)
|
||||
};
|
||||
@ -178,11 +197,11 @@ impl<'a> Parser<'a> {
|
||||
fn parse_local_mk(
|
||||
&mut self,
|
||||
lo: Span,
|
||||
attrs: AttrVec,
|
||||
attrs: AttrWrapper,
|
||||
capture_semi: bool,
|
||||
force_collect: ForceCollect,
|
||||
) -> PResult<'a, Stmt> {
|
||||
maybe_collect_tokens!(self, force_collect, &attrs, |this: &mut Parser<'a>| {
|
||||
self.collect_tokens_trailing_token(attrs, force_collect, |this, attrs| {
|
||||
this.expect_keyword(kw::Let)?;
|
||||
let local = this.parse_local(attrs.into())?;
|
||||
let trailing = if capture_semi && this.token.kind == token::Semi {
|
||||
|
Loading…
Reference in New Issue
Block a user