mirror of
https://github.com/rust-lang/rust.git
synced 2025-04-28 02:57:37 +00:00
Auto merge of #137517 - nnethercote:rm-NtPat-NtItem-NtStmt, r=petrochenkov
Remove `NtPat`, `NtMeta`, and `NtPath` Another part of #124141. r? `@petrochenkov`
This commit is contained in:
commit
aa3c2d73ef
@ -202,9 +202,6 @@ impl HasTokens for Nonterminal {
|
||||
Nonterminal::NtItem(item) => item.tokens(),
|
||||
Nonterminal::NtStmt(stmt) => stmt.tokens(),
|
||||
Nonterminal::NtExpr(expr) | Nonterminal::NtLiteral(expr) => expr.tokens(),
|
||||
Nonterminal::NtPat(pat) => pat.tokens(),
|
||||
Nonterminal::NtMeta(attr_item) => attr_item.tokens(),
|
||||
Nonterminal::NtPath(path) => path.tokens(),
|
||||
Nonterminal::NtBlock(block) => block.tokens(),
|
||||
}
|
||||
}
|
||||
@ -213,9 +210,6 @@ impl HasTokens for Nonterminal {
|
||||
Nonterminal::NtItem(item) => item.tokens_mut(),
|
||||
Nonterminal::NtStmt(stmt) => stmt.tokens_mut(),
|
||||
Nonterminal::NtExpr(expr) | Nonterminal::NtLiteral(expr) => expr.tokens_mut(),
|
||||
Nonterminal::NtPat(pat) => pat.tokens_mut(),
|
||||
Nonterminal::NtMeta(attr_item) => attr_item.tokens_mut(),
|
||||
Nonterminal::NtPath(path) => path.tokens_mut(),
|
||||
Nonterminal::NtBlock(block) => block.tokens_mut(),
|
||||
}
|
||||
}
|
||||
|
@ -14,7 +14,7 @@ use crate::ast::{
|
||||
PathSegment, Safety,
|
||||
};
|
||||
use crate::ptr::P;
|
||||
use crate::token::{self, CommentKind, Delimiter, Token};
|
||||
use crate::token::{self, CommentKind, Delimiter, InvisibleOrigin, MetaVarKind, Token};
|
||||
use crate::tokenstream::{
|
||||
DelimSpan, LazyAttrTokenStream, Spacing, TokenStream, TokenStreamIter, TokenTree,
|
||||
};
|
||||
@ -405,11 +405,17 @@ impl MetaItem {
|
||||
let span = span.with_hi(segments.last().unwrap().ident.span.hi());
|
||||
Path { span, segments, tokens: None }
|
||||
}
|
||||
Some(TokenTree::Token(Token { kind: token::Interpolated(nt), .. }, _)) => match &**nt {
|
||||
token::Nonterminal::NtMeta(item) => return item.meta(item.path.span),
|
||||
token::Nonterminal::NtPath(path) => (**path).clone(),
|
||||
_ => return None,
|
||||
},
|
||||
Some(TokenTree::Delimited(
|
||||
_span,
|
||||
_spacing,
|
||||
Delimiter::Invisible(InvisibleOrigin::MetaVar(
|
||||
MetaVarKind::Meta { .. } | MetaVarKind::Path,
|
||||
)),
|
||||
_stream,
|
||||
)) => {
|
||||
// This path is currently unreachable in the test suite.
|
||||
unreachable!()
|
||||
}
|
||||
Some(TokenTree::Token(
|
||||
Token { kind: token::OpenDelim(_) | token::CloseDelim(_), .. },
|
||||
_,
|
||||
|
@ -905,16 +905,8 @@ fn visit_nonterminal<T: MutVisitor>(vis: &mut T, nt: &mut token::Nonterminal) {
|
||||
vis.flat_map_stmt(stmt).expect_one("expected visitor to produce exactly one item")
|
||||
})
|
||||
}),
|
||||
token::NtPat(pat) => vis.visit_pat(pat),
|
||||
token::NtExpr(expr) => vis.visit_expr(expr),
|
||||
token::NtLiteral(expr) => vis.visit_expr(expr),
|
||||
token::NtMeta(item) => {
|
||||
let AttrItem { unsafety: _, path, args, tokens } = item.deref_mut();
|
||||
vis.visit_path(path);
|
||||
visit_attr_args(vis, args);
|
||||
visit_lazy_tts(vis, tokens);
|
||||
}
|
||||
token::NtPath(path) => vis.visit_path(path),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -90,7 +90,10 @@ pub enum MetaVarKind {
|
||||
Ident,
|
||||
Lifetime,
|
||||
Literal,
|
||||
Meta,
|
||||
Meta {
|
||||
/// Will `AttrItem::meta` succeed on this, if reparsed?
|
||||
has_meta_form: bool,
|
||||
},
|
||||
Path,
|
||||
Vis,
|
||||
TT,
|
||||
@ -110,7 +113,7 @@ impl fmt::Display for MetaVarKind {
|
||||
MetaVarKind::Ident => sym::ident,
|
||||
MetaVarKind::Lifetime => sym::lifetime,
|
||||
MetaVarKind::Literal => sym::literal,
|
||||
MetaVarKind::Meta => sym::meta,
|
||||
MetaVarKind::Meta { .. } => sym::meta,
|
||||
MetaVarKind::Path => sym::path,
|
||||
MetaVarKind::Vis => sym::vis,
|
||||
MetaVarKind::TT => sym::tt,
|
||||
@ -621,8 +624,7 @@ impl Token {
|
||||
matches!(&**nt,
|
||||
NtBlock(..) |
|
||||
NtExpr(..) |
|
||||
NtLiteral(..) |
|
||||
NtPath(..)
|
||||
NtLiteral(..)
|
||||
),
|
||||
OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(
|
||||
MetaVarKind::Block |
|
||||
@ -658,14 +660,11 @@ impl Token {
|
||||
matches!(&**nt,
|
||||
| NtExpr(..)
|
||||
| NtLiteral(..)
|
||||
| NtMeta(..)
|
||||
| NtPat(..)
|
||||
| NtPath(..)
|
||||
),
|
||||
OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(
|
||||
MetaVarKind::Expr { .. } |
|
||||
MetaVarKind::Literal |
|
||||
MetaVarKind::Meta |
|
||||
MetaVarKind::Meta { .. } |
|
||||
MetaVarKind::Pat(_) |
|
||||
MetaVarKind::Path |
|
||||
MetaVarKind::Ty { .. }
|
||||
@ -689,7 +688,6 @@ impl Token {
|
||||
Lifetime(..) | // lifetime bound in trait object
|
||||
Lt | BinOp(Shl) | // associated path
|
||||
PathSep => true, // global path
|
||||
Interpolated(ref nt) => matches!(&**nt, NtPath(..)),
|
||||
OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(
|
||||
MetaVarKind::Ty { .. } |
|
||||
MetaVarKind::Path
|
||||
@ -848,27 +846,16 @@ impl Token {
|
||||
self.ident().is_some_and(|(ident, _)| ident.name == name)
|
||||
}
|
||||
|
||||
/// Returns `true` if the token is an interpolated path.
|
||||
fn is_whole_path(&self) -> bool {
|
||||
if let Interpolated(nt) = &self.kind
|
||||
&& let NtPath(..) = &**nt
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
/// Is this a pre-parsed expression dropped into the token stream
|
||||
/// (which happens while parsing the result of macro expansion)?
|
||||
pub fn is_whole_expr(&self) -> bool {
|
||||
if let Interpolated(nt) = &self.kind
|
||||
&& let NtExpr(_) | NtLiteral(_) | NtPath(_) | NtBlock(_) = &**nt
|
||||
&& let NtExpr(_) | NtLiteral(_) | NtBlock(_) = &**nt
|
||||
{
|
||||
return true;
|
||||
true
|
||||
} else {
|
||||
matches!(self.is_metavar_seq(), Some(MetaVarKind::Path))
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
/// Is the token an interpolated block (`$b:block`)?
|
||||
@ -894,7 +881,7 @@ impl Token {
|
||||
pub fn is_path_start(&self) -> bool {
|
||||
self == &PathSep
|
||||
|| self.is_qpath_start()
|
||||
|| self.is_whole_path()
|
||||
|| matches!(self.is_metavar_seq(), Some(MetaVarKind::Path))
|
||||
|| self.is_path_segment_keyword()
|
||||
|| self.is_ident() && !self.is_reserved_ident()
|
||||
}
|
||||
@ -1075,12 +1062,8 @@ pub enum Nonterminal {
|
||||
NtItem(P<ast::Item>),
|
||||
NtBlock(P<ast::Block>),
|
||||
NtStmt(P<ast::Stmt>),
|
||||
NtPat(P<ast::Pat>),
|
||||
NtExpr(P<ast::Expr>),
|
||||
NtLiteral(P<ast::Expr>),
|
||||
/// Stuff inside brackets for attributes
|
||||
NtMeta(P<ast::AttrItem>),
|
||||
NtPath(P<ast::Path>),
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Encodable, Decodable, Hash, HashStable_Generic)]
|
||||
@ -1172,10 +1155,7 @@ impl Nonterminal {
|
||||
NtItem(item) => item.span,
|
||||
NtBlock(block) => block.span,
|
||||
NtStmt(stmt) => stmt.span,
|
||||
NtPat(pat) => pat.span,
|
||||
NtExpr(expr) | NtLiteral(expr) => expr.span,
|
||||
NtMeta(attr_item) => attr_item.span(),
|
||||
NtPath(path) => path.span,
|
||||
}
|
||||
}
|
||||
|
||||
@ -1184,11 +1164,8 @@ impl Nonterminal {
|
||||
NtItem(..) => "item",
|
||||
NtBlock(..) => "block",
|
||||
NtStmt(..) => "statement",
|
||||
NtPat(..) => "pattern",
|
||||
NtExpr(..) => "expression",
|
||||
NtLiteral(..) => "literal",
|
||||
NtMeta(..) => "attribute",
|
||||
NtPath(..) => "path",
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1209,11 +1186,8 @@ impl fmt::Debug for Nonterminal {
|
||||
NtItem(..) => f.pad("NtItem(..)"),
|
||||
NtBlock(..) => f.pad("NtBlock(..)"),
|
||||
NtStmt(..) => f.pad("NtStmt(..)"),
|
||||
NtPat(..) => f.pad("NtPat(..)"),
|
||||
NtExpr(..) => f.pad("NtExpr(..)"),
|
||||
NtLiteral(..) => f.pad("NtLiteral(..)"),
|
||||
NtMeta(..) => f.pad("NtMeta(..)"),
|
||||
NtPath(..) => f.pad("NtPath(..)"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -468,9 +468,6 @@ impl TokenStream {
|
||||
TokenStream::token_alone(token::Semi, stmt.span)
|
||||
}
|
||||
Nonterminal::NtStmt(stmt) => TokenStream::from_ast(stmt),
|
||||
Nonterminal::NtPat(pat) => TokenStream::from_ast(pat),
|
||||
Nonterminal::NtMeta(attr) => TokenStream::from_ast(attr),
|
||||
Nonterminal::NtPath(path) => TokenStream::from_ast(path),
|
||||
Nonterminal::NtExpr(expr) | Nonterminal::NtLiteral(expr) => TokenStream::from_ast(expr),
|
||||
}
|
||||
}
|
||||
|
@ -477,41 +477,21 @@ impl<'a> MetaItemListParserContext<'a> {
|
||||
|
||||
// or a path.
|
||||
let path =
|
||||
if let Some(TokenTree::Token(Token { kind: token::Interpolated(nt), span, .. }, _)) =
|
||||
if let Some(TokenTree::Token(Token { kind: token::Interpolated(_), span, .. }, _)) =
|
||||
self.inside_delimiters.peek()
|
||||
{
|
||||
match &**nt {
|
||||
// or maybe a full nt meta including the path but we return immediately
|
||||
token::Nonterminal::NtMeta(item) => {
|
||||
self.inside_delimiters.next();
|
||||
self.inside_delimiters.next();
|
||||
// We go into this path if an expr ended up in an attribute that
|
||||
// expansion did not turn into a literal. Say, `#[repr(align(macro!()))]`
|
||||
// where the macro didn't expand to a literal. An error is already given
|
||||
// for this at this point, and then we do continue. This makes this path
|
||||
// reachable...
|
||||
let e = self.dcx.span_delayed_bug(
|
||||
*span,
|
||||
"expr in place where literal is expected (builtin attr parsing)",
|
||||
);
|
||||
|
||||
return Some(MetaItemOrLitParser::MetaItemParser(MetaItemParser {
|
||||
path: PathParser::Ast(&item.path),
|
||||
args: ArgParser::from_attr_args(&item.args, self.dcx),
|
||||
}));
|
||||
}
|
||||
// an already interpolated path from a macro expansion is a path, no need to parse
|
||||
// one from tokens
|
||||
token::Nonterminal::NtPath(path) => {
|
||||
self.inside_delimiters.next();
|
||||
|
||||
AttrPath::from_ast(path)
|
||||
}
|
||||
_ => {
|
||||
self.inside_delimiters.next();
|
||||
// we go into this path if an expr ended up in an attribute that
|
||||
// expansion did not turn into a literal. Say, `#[repr(align(macro!()))]`
|
||||
// where the macro didn't expand to a literal. An error is already given
|
||||
// for this at this point, and then we do continue. This makes this path
|
||||
// reachable...
|
||||
let e = self.dcx.span_delayed_bug(
|
||||
*span,
|
||||
"expr in place where literal is expected (builtin attr parsing)",
|
||||
);
|
||||
|
||||
return Some(MetaItemOrLitParser::Err(*span, e));
|
||||
}
|
||||
}
|
||||
return Some(MetaItemOrLitParser::Err(*span, e));
|
||||
} else {
|
||||
self.next_path()?
|
||||
};
|
||||
|
@ -279,9 +279,9 @@ pub(super) fn transcribe<'a>(
|
||||
if let Some(cur_matched) = lookup_cur_matched(ident, interp, &repeats) {
|
||||
// We wrap the tokens in invisible delimiters, unless they are already wrapped
|
||||
// in invisible delimiters with the same `MetaVarKind`. Because some proc
|
||||
// macros can't multiple layers of invisible delimiters of the same
|
||||
// macros can't handle multiple layers of invisible delimiters of the same
|
||||
// `MetaVarKind`. This loses some span info, though it hopefully won't matter.
|
||||
let mut mk_delimited = |mv_kind, mut stream: TokenStream| {
|
||||
let mut mk_delimited = |mk_span, mv_kind, mut stream: TokenStream| {
|
||||
if stream.len() == 1 {
|
||||
let tree = stream.iter().next().unwrap();
|
||||
if let TokenTree::Delimited(_, _, delim, inner) = tree
|
||||
@ -295,6 +295,7 @@ pub(super) fn transcribe<'a>(
|
||||
// Emit as a token stream within `Delimiter::Invisible` to maintain
|
||||
// parsing priorities.
|
||||
marker.visit_span(&mut sp);
|
||||
with_metavar_spans(|mspans| mspans.insert(mk_span, sp));
|
||||
// Both the open delim and close delim get the same span, which covers the
|
||||
// `$foo` in the decl macro RHS.
|
||||
TokenTree::Delimited(
|
||||
@ -322,12 +323,32 @@ pub(super) fn transcribe<'a>(
|
||||
let kind = token::NtLifetime(*ident, *is_raw);
|
||||
TokenTree::token_alone(kind, sp)
|
||||
}
|
||||
MatchedSingle(ParseNtResult::Pat(pat, pat_kind)) => mk_delimited(
|
||||
pat.span,
|
||||
MetaVarKind::Pat(*pat_kind),
|
||||
TokenStream::from_ast(pat),
|
||||
),
|
||||
MatchedSingle(ParseNtResult::Ty(ty)) => {
|
||||
let is_path = matches!(&ty.kind, TyKind::Path(None, _path));
|
||||
mk_delimited(MetaVarKind::Ty { is_path }, TokenStream::from_ast(ty))
|
||||
mk_delimited(
|
||||
ty.span,
|
||||
MetaVarKind::Ty { is_path },
|
||||
TokenStream::from_ast(ty),
|
||||
)
|
||||
}
|
||||
MatchedSingle(ParseNtResult::Meta(attr_item)) => {
|
||||
let has_meta_form = attr_item.meta_kind().is_some();
|
||||
mk_delimited(
|
||||
attr_item.span(),
|
||||
MetaVarKind::Meta { has_meta_form },
|
||||
TokenStream::from_ast(attr_item),
|
||||
)
|
||||
}
|
||||
MatchedSingle(ParseNtResult::Path(path)) => {
|
||||
mk_delimited(path.span, MetaVarKind::Path, TokenStream::from_ast(path))
|
||||
}
|
||||
MatchedSingle(ParseNtResult::Vis(vis)) => {
|
||||
mk_delimited(MetaVarKind::Vis, TokenStream::from_ast(vis))
|
||||
mk_delimited(vis.span, MetaVarKind::Vis, TokenStream::from_ast(vis))
|
||||
}
|
||||
MatchedSingle(ParseNtResult::Nt(nt)) => {
|
||||
// Other variables are emitted into the output stream as groups with
|
||||
|
@ -424,7 +424,7 @@ parse_invalid_logical_operator = `{$incorrect}` is not a logical operator
|
||||
.use_amp_amp_for_conjunction = use `&&` to perform logical conjunction
|
||||
.use_pipe_pipe_for_disjunction = use `||` to perform logical disjunction
|
||||
|
||||
parse_invalid_meta_item = expected unsuffixed literal, found `{$token}`
|
||||
parse_invalid_meta_item = expected unsuffixed literal, found {$descr}
|
||||
.quote_ident_sugg = surround the identifier with quotation marks to make it into a string literal
|
||||
|
||||
parse_invalid_offset_of = offset_of expects dot-separated field and variant names
|
||||
|
@ -1024,7 +1024,7 @@ pub(crate) struct SuffixedLiteralInAttribute {
|
||||
pub(crate) struct InvalidMetaItem {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
pub token: Token,
|
||||
pub descr: String,
|
||||
#[subdiagnostic]
|
||||
pub quote_ident_sugg: Option<InvalidMetaItemQuoteIdentSugg>,
|
||||
}
|
||||
|
@ -1,4 +1,6 @@
|
||||
use rustc_ast::{self as ast, Attribute, attr, token};
|
||||
use rustc_ast as ast;
|
||||
use rustc_ast::token::{self, MetaVarKind};
|
||||
use rustc_ast::{Attribute, attr};
|
||||
use rustc_errors::codes::*;
|
||||
use rustc_errors::{Diag, PResult};
|
||||
use rustc_span::{BytePos, Span};
|
||||
@ -9,7 +11,7 @@ use super::{
|
||||
AttrWrapper, Capturing, FnParseMode, ForceCollect, Parser, ParserRange, PathStyle, Trailing,
|
||||
UsePreAttrPos,
|
||||
};
|
||||
use crate::{errors, exp, fluent_generated as fluent, maybe_whole};
|
||||
use crate::{errors, exp, fluent_generated as fluent};
|
||||
|
||||
// Public for rustfmt usage
|
||||
#[derive(Debug)]
|
||||
@ -269,7 +271,12 @@ impl<'a> Parser<'a> {
|
||||
/// PATH `=` UNSUFFIXED_LIT
|
||||
/// The delimiters or `=` are still put into the resulting token stream.
|
||||
pub fn parse_attr_item(&mut self, force_collect: ForceCollect) -> PResult<'a, ast::AttrItem> {
|
||||
maybe_whole!(self, NtMeta, |attr| attr.into_inner());
|
||||
if let Some(item) = self.eat_metavar_seq_with_matcher(
|
||||
|mv_kind| matches!(mv_kind, MetaVarKind::Meta { .. }),
|
||||
|this| this.parse_attr_item(force_collect),
|
||||
) {
|
||||
return Ok(item);
|
||||
}
|
||||
|
||||
// Attr items don't have attributes.
|
||||
self.collect_tokens(None, AttrWrapper::empty(), force_collect, |this, _empty_attrs| {
|
||||
@ -396,18 +403,17 @@ impl<'a> Parser<'a> {
|
||||
&mut self,
|
||||
unsafe_allowed: AllowLeadingUnsafe,
|
||||
) -> PResult<'a, ast::MetaItem> {
|
||||
// We can't use `maybe_whole` here because it would bump in the `None`
|
||||
// case, which we don't want.
|
||||
if let token::Interpolated(nt) = &self.token.kind
|
||||
&& let token::NtMeta(attr_item) = &**nt
|
||||
{
|
||||
match attr_item.meta(attr_item.path.span) {
|
||||
Some(meta) => {
|
||||
self.bump();
|
||||
return Ok(meta);
|
||||
}
|
||||
None => self.unexpected()?,
|
||||
}
|
||||
if let Some(MetaVarKind::Meta { has_meta_form }) = self.token.is_metavar_seq() {
|
||||
return if has_meta_form {
|
||||
let attr_item = self
|
||||
.eat_metavar_seq(MetaVarKind::Meta { has_meta_form: true }, |this| {
|
||||
this.parse_attr_item(ForceCollect::No)
|
||||
})
|
||||
.unwrap();
|
||||
Ok(attr_item.meta(attr_item.path.span).unwrap())
|
||||
} else {
|
||||
self.unexpected_any()
|
||||
};
|
||||
}
|
||||
|
||||
let lo = self.token.span;
|
||||
@ -464,7 +470,7 @@ impl<'a> Parser<'a> {
|
||||
|
||||
let mut err = errors::InvalidMetaItem {
|
||||
span: self.token.span,
|
||||
token: self.token.clone(),
|
||||
descr: super::token_descr(&self.token),
|
||||
quote_ident_sugg: None,
|
||||
};
|
||||
|
||||
|
@ -1,17 +1,15 @@
|
||||
use std::mem::take;
|
||||
use std::ops::{Deref, DerefMut};
|
||||
use std::sync::Arc;
|
||||
|
||||
use ast::token::IdentIsRaw;
|
||||
use rustc_ast as ast;
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::token::{self, Delimiter, Lit, LitKind, Token, TokenKind};
|
||||
use rustc_ast::tokenstream::AttrTokenTree;
|
||||
use rustc_ast::util::parser::AssocOp;
|
||||
use rustc_ast::{
|
||||
AngleBracketedArg, AngleBracketedArgs, AnonConst, AttrVec, BinOpKind, BindingMode, Block,
|
||||
BlockCheckMode, Expr, ExprKind, GenericArg, Generics, HasTokens, Item, ItemKind, Param, Pat,
|
||||
PatKind, Path, PathSegment, QSelf, Recovered, Ty, TyKind,
|
||||
BlockCheckMode, Expr, ExprKind, GenericArg, Generics, Item, ItemKind, Param, Pat, PatKind,
|
||||
Path, PathSegment, QSelf, Recovered, Ty, TyKind,
|
||||
};
|
||||
use rustc_ast_pretty::pprust;
|
||||
use rustc_data_structures::fx::FxHashSet;
|
||||
@ -2406,52 +2404,6 @@ impl<'a> Parser<'a> {
|
||||
err.subdiagnostic(ExprParenthesesNeeded::surrounding(*sp));
|
||||
}
|
||||
err.span_label(span, "expected expression");
|
||||
|
||||
// Walk the chain of macro expansions for the current token to point at how the original
|
||||
// code was interpreted. This helps the user realize when a macro argument of one type is
|
||||
// later reinterpreted as a different type, like `$x:expr` being reinterpreted as `$x:pat`
|
||||
// in a subsequent macro invocation (#71039).
|
||||
let mut tok = self.token.clone();
|
||||
let mut labels = vec![];
|
||||
while let TokenKind::Interpolated(nt) = &tok.kind {
|
||||
let tokens = nt.tokens();
|
||||
labels.push(Arc::clone(nt));
|
||||
if let Some(tokens) = tokens
|
||||
&& let tokens = tokens.to_attr_token_stream()
|
||||
&& let tokens = tokens.0.deref()
|
||||
&& let [AttrTokenTree::Token(token, _)] = &tokens[..]
|
||||
{
|
||||
tok = token.clone();
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
let mut iter = labels.into_iter().peekable();
|
||||
let mut show_link = false;
|
||||
while let Some(nt) = iter.next() {
|
||||
let descr = nt.descr();
|
||||
if let Some(next) = iter.peek() {
|
||||
let next_descr = next.descr();
|
||||
if next_descr != descr {
|
||||
err.span_label(next.use_span(), format!("this is expected to be {next_descr}"));
|
||||
err.span_label(
|
||||
nt.use_span(),
|
||||
format!(
|
||||
"this is interpreted as {}, but it is expected to be {}",
|
||||
next_descr, descr,
|
||||
),
|
||||
);
|
||||
show_link = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
if show_link {
|
||||
err.note(
|
||||
"when forwarding a matched fragment to another macro-by-example, matchers in the \
|
||||
second macro will see an opaque AST of the fragment type, not the underlying \
|
||||
tokens",
|
||||
);
|
||||
}
|
||||
err
|
||||
}
|
||||
|
||||
|
@ -4,7 +4,7 @@ use core::mem;
|
||||
use core::ops::{Bound, ControlFlow};
|
||||
|
||||
use ast::mut_visit::{self, MutVisitor};
|
||||
use ast::token::IdentIsRaw;
|
||||
use ast::token::{IdentIsRaw, MetaVarKind};
|
||||
use ast::{CoroutineKind, ForLoopKind, GenBlockKind, MatchKind, Pat, Path, PathSegment, Recovered};
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::token::{self, Delimiter, Token, TokenKind};
|
||||
@ -1344,6 +1344,7 @@ impl<'a> Parser<'a> {
|
||||
fn parse_expr_bottom(&mut self) -> PResult<'a, P<Expr>> {
|
||||
maybe_recover_from_interpolated_ty_qpath!(self, true);
|
||||
|
||||
let span = self.token.span;
|
||||
if let token::Interpolated(nt) = &self.token.kind {
|
||||
match &**nt {
|
||||
token::NtExpr(e) | token::NtLiteral(e) => {
|
||||
@ -1351,11 +1352,6 @@ impl<'a> Parser<'a> {
|
||||
self.bump();
|
||||
return Ok(e);
|
||||
}
|
||||
token::NtPath(path) => {
|
||||
let path = (**path).clone();
|
||||
self.bump();
|
||||
return Ok(self.mk_expr(self.prev_token.span, ExprKind::Path(None, path)));
|
||||
}
|
||||
token::NtBlock(block) => {
|
||||
let block = block.clone();
|
||||
self.bump();
|
||||
@ -1363,6 +1359,10 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
_ => {}
|
||||
};
|
||||
} else if let Some(path) = self.eat_metavar_seq(MetaVarKind::Path, |this| {
|
||||
this.collect_tokens_no_attrs(|this| this.parse_path(PathStyle::Type))
|
||||
}) {
|
||||
return Ok(self.mk_expr(span, ExprKind::Path(None, path)));
|
||||
}
|
||||
|
||||
// Outer attributes are already parsed and will be
|
||||
|
@ -4,7 +4,7 @@ use std::mem;
|
||||
use ast::token::IdentIsRaw;
|
||||
use rustc_ast::ast::*;
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::token::{self, Delimiter, TokenKind};
|
||||
use rustc_ast::token::{self, Delimiter, InvisibleOrigin, MetaVarKind, TokenKind};
|
||||
use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree};
|
||||
use rustc_ast::util::case::Case;
|
||||
use rustc_ast::{self as ast};
|
||||
@ -3071,8 +3071,10 @@ impl<'a> Parser<'a> {
|
||||
|
||||
fn is_named_param(&self) -> bool {
|
||||
let offset = match &self.token.kind {
|
||||
token::Interpolated(nt) => match &**nt {
|
||||
token::NtPat(..) => return self.look_ahead(1, |t| t == &token::Colon),
|
||||
token::OpenDelim(Delimiter::Invisible(origin)) => match origin {
|
||||
InvisibleOrigin::MetaVar(MetaVarKind::Pat(_)) => {
|
||||
return self.check_noexpect_past_close_delim(&token::Colon);
|
||||
}
|
||||
_ => 0,
|
||||
},
|
||||
token::BinOp(token::And) | token::AndAnd => 1,
|
||||
|
@ -24,7 +24,8 @@ pub use pat::{CommaRecoveryMode, RecoverColon, RecoverComma};
|
||||
use path::PathStyle;
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::token::{
|
||||
self, Delimiter, IdentIsRaw, InvisibleOrigin, MetaVarKind, Nonterminal, Token, TokenKind,
|
||||
self, Delimiter, IdentIsRaw, InvisibleOrigin, MetaVarKind, Nonterminal, NtPatKind, Token,
|
||||
TokenKind,
|
||||
};
|
||||
use rustc_ast::tokenstream::{AttrsTarget, Spacing, TokenStream, TokenTree};
|
||||
use rustc_ast::util::case::Case;
|
||||
@ -1745,7 +1746,10 @@ pub enum ParseNtResult {
|
||||
Tt(TokenTree),
|
||||
Ident(Ident, IdentIsRaw),
|
||||
Lifetime(Ident, IdentIsRaw),
|
||||
Pat(P<ast::Pat>, NtPatKind),
|
||||
Ty(P<ast::Ty>),
|
||||
Meta(P<ast::AttrItem>),
|
||||
Path(P<ast::Path>),
|
||||
Vis(P<ast::Visibility>),
|
||||
|
||||
/// This variant will eventually be removed, along with `Token::Interpolate`.
|
||||
|
@ -32,7 +32,7 @@ impl<'a> Parser<'a> {
|
||||
| MetaVarKind::Expr { .. }
|
||||
| MetaVarKind::Ty { .. }
|
||||
| MetaVarKind::Literal // `true`, `false`
|
||||
| MetaVarKind::Meta
|
||||
| MetaVarKind::Meta { .. }
|
||||
| MetaVarKind::Path => true,
|
||||
|
||||
MetaVarKind::Item
|
||||
@ -49,11 +49,9 @@ impl<'a> Parser<'a> {
|
||||
fn nt_may_be_ident(nt: &Nonterminal) -> bool {
|
||||
match nt {
|
||||
NtStmt(_)
|
||||
| NtPat(_)
|
||||
| NtExpr(_)
|
||||
| NtLiteral(_) // `true`, `false`
|
||||
| NtMeta(_)
|
||||
| NtPath(_) => true,
|
||||
=> true,
|
||||
|
||||
NtItem(_) | NtBlock(_) => false,
|
||||
}
|
||||
@ -99,7 +97,7 @@ impl<'a> Parser<'a> {
|
||||
token::NtLifetime(..) => true,
|
||||
token::Interpolated(nt) => match &**nt {
|
||||
NtBlock(_) | NtStmt(_) | NtExpr(_) | NtLiteral(_) => true,
|
||||
NtItem(_) | NtPat(_) | NtMeta(_) | NtPath(_) => false,
|
||||
NtItem(_) => false,
|
||||
},
|
||||
token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(k))) => match k {
|
||||
MetaVarKind::Block
|
||||
@ -109,7 +107,7 @@ impl<'a> Parser<'a> {
|
||||
MetaVarKind::Item
|
||||
| MetaVarKind::Pat(_)
|
||||
| MetaVarKind::Ty { .. }
|
||||
| MetaVarKind::Meta
|
||||
| MetaVarKind::Meta { .. }
|
||||
| MetaVarKind::Path
|
||||
| MetaVarKind::Vis => false,
|
||||
MetaVarKind::Lifetime | MetaVarKind::Ident | MetaVarKind::TT => {
|
||||
@ -170,15 +168,18 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
},
|
||||
NonterminalKind::Pat(pat_kind) => {
|
||||
NtPat(self.collect_tokens_no_attrs(|this| match pat_kind {
|
||||
PatParam { .. } => this.parse_pat_no_top_alt(None, None),
|
||||
PatWithOr => this.parse_pat_no_top_guard(
|
||||
None,
|
||||
RecoverComma::No,
|
||||
RecoverColon::No,
|
||||
CommaRecoveryMode::EitherTupleOrPipe,
|
||||
),
|
||||
})?)
|
||||
return Ok(ParseNtResult::Pat(
|
||||
self.collect_tokens_no_attrs(|this| match pat_kind {
|
||||
PatParam { .. } => this.parse_pat_no_top_alt(None, None),
|
||||
PatWithOr => this.parse_pat_no_top_guard(
|
||||
None,
|
||||
RecoverComma::No,
|
||||
RecoverColon::No,
|
||||
CommaRecoveryMode::EitherTupleOrPipe,
|
||||
),
|
||||
})?,
|
||||
pat_kind,
|
||||
));
|
||||
}
|
||||
NonterminalKind::Expr(_) => NtExpr(self.parse_expr_force_collect()?),
|
||||
NonterminalKind::Literal => {
|
||||
@ -203,9 +204,13 @@ impl<'a> Parser<'a> {
|
||||
};
|
||||
}
|
||||
NonterminalKind::Path => {
|
||||
NtPath(P(self.collect_tokens_no_attrs(|this| this.parse_path(PathStyle::Type))?))
|
||||
return Ok(ParseNtResult::Path(P(
|
||||
self.collect_tokens_no_attrs(|this| this.parse_path(PathStyle::Type))?
|
||||
)));
|
||||
}
|
||||
NonterminalKind::Meta => {
|
||||
return Ok(ParseNtResult::Meta(P(self.parse_attr_item(ForceCollect::Yes)?)));
|
||||
}
|
||||
NonterminalKind::Meta => NtMeta(P(self.parse_attr_item(ForceCollect::Yes)?)),
|
||||
NonterminalKind::Vis => {
|
||||
return Ok(ParseNtResult::Vis(P(self.collect_tokens_no_attrs(|this| {
|
||||
this.parse_visibility(FollowedByType::Yes)
|
||||
|
@ -2,7 +2,8 @@ use std::ops::Bound;
|
||||
|
||||
use rustc_ast::mut_visit::{self, MutVisitor};
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::token::{self, BinOpToken, Delimiter, IdentIsRaw, Token};
|
||||
use rustc_ast::token::NtPatKind::*;
|
||||
use rustc_ast::token::{self, BinOpToken, Delimiter, IdentIsRaw, MetaVarKind, Token};
|
||||
use rustc_ast::util::parser::ExprPrecedence;
|
||||
use rustc_ast::visit::{self, Visitor};
|
||||
use rustc_ast::{
|
||||
@ -30,7 +31,7 @@ use crate::errors::{
|
||||
UnexpectedVertVertInPattern, WrapInParens,
|
||||
};
|
||||
use crate::parser::expr::{DestructuredFloat, could_be_unclosed_char_literal};
|
||||
use crate::{exp, maybe_recover_from_interpolated_ty_qpath, maybe_whole};
|
||||
use crate::{exp, maybe_recover_from_interpolated_ty_qpath};
|
||||
|
||||
#[derive(PartialEq, Copy, Clone)]
|
||||
pub enum Expected {
|
||||
@ -689,6 +690,27 @@ impl<'a> Parser<'a> {
|
||||
PatVisitor { parser: self, stmt, arm: None, field: None }.visit_stmt(stmt);
|
||||
}
|
||||
|
||||
fn eat_metavar_pat(&mut self) -> Option<P<Pat>> {
|
||||
// Must try both kinds of pattern nonterminals.
|
||||
if let Some(pat) = self.eat_metavar_seq_with_matcher(
|
||||
|mv_kind| matches!(mv_kind, MetaVarKind::Pat(PatParam { .. })),
|
||||
|this| this.parse_pat_no_top_alt(None, None),
|
||||
) {
|
||||
Some(pat)
|
||||
} else if let Some(pat) = self.eat_metavar_seq(MetaVarKind::Pat(PatWithOr), |this| {
|
||||
this.parse_pat_no_top_guard(
|
||||
None,
|
||||
RecoverComma::No,
|
||||
RecoverColon::No,
|
||||
CommaRecoveryMode::EitherTupleOrPipe,
|
||||
)
|
||||
}) {
|
||||
Some(pat)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Parses a pattern, with a setting whether modern range patterns (e.g., `a..=b`, `a..b` are
|
||||
/// allowed).
|
||||
fn parse_pat_with_range_pat(
|
||||
@ -698,7 +720,10 @@ impl<'a> Parser<'a> {
|
||||
syntax_loc: Option<PatternLocation>,
|
||||
) -> PResult<'a, P<Pat>> {
|
||||
maybe_recover_from_interpolated_ty_qpath!(self, true);
|
||||
maybe_whole!(self, NtPat, |pat| pat);
|
||||
|
||||
if let Some(pat) = self.eat_metavar_pat() {
|
||||
return Ok(pat);
|
||||
}
|
||||
|
||||
let mut lo = self.token.span;
|
||||
|
||||
@ -1043,10 +1068,8 @@ impl<'a> Parser<'a> {
|
||||
self.recover_additional_muts();
|
||||
|
||||
// Make sure we don't allow e.g. `let mut $p;` where `$p:pat`.
|
||||
if let token::Interpolated(nt) = &self.token.kind {
|
||||
if let token::NtPat(..) = &**nt {
|
||||
self.expected_ident_found_err().emit();
|
||||
}
|
||||
if let Some(MetaVarKind::Pat(_)) = self.token.is_metavar_seq() {
|
||||
self.expected_ident_found_err().emit();
|
||||
}
|
||||
|
||||
// Parse the pattern we hope to be an identifier.
|
||||
|
@ -15,9 +15,9 @@ use tracing::debug;
|
||||
|
||||
use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
|
||||
use super::{Parser, Restrictions, TokenType};
|
||||
use crate::errors::{PathSingleColon, PathTripleColon};
|
||||
use crate::errors::{self, PathSingleColon, PathTripleColon};
|
||||
use crate::exp;
|
||||
use crate::parser::{CommaRecoveryMode, RecoverColon, RecoverComma};
|
||||
use crate::{errors, exp, maybe_whole};
|
||||
|
||||
/// Specifies how to parse a path.
|
||||
#[derive(Copy, Clone, PartialEq)]
|
||||
@ -194,7 +194,11 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
};
|
||||
|
||||
maybe_whole!(self, NtPath, |path| reject_generics_if_mod_style(self, path.into_inner()));
|
||||
if let Some(path) =
|
||||
self.eat_metavar_seq(MetaVarKind::Path, |this| this.parse_path(PathStyle::Type))
|
||||
{
|
||||
return Ok(reject_generics_if_mod_style(self, path));
|
||||
}
|
||||
|
||||
// If we have a `ty` metavar in the form of a path, reparse it directly as a path, instead
|
||||
// of reparsing it as a `ty` and then extracting the path.
|
||||
|
@ -5,7 +5,7 @@
|
||||
macro_rules! pass_nonterminal {
|
||||
($n:expr) => {
|
||||
#[repr(align($n))]
|
||||
//~^ ERROR expected unsuffixed literal, found `n!()`
|
||||
//~^ ERROR expected unsuffixed literal, found expression `n!()`
|
||||
//~^^ ERROR incorrect `repr(align)` attribute format: `align` expects a literal integer as argument [E0693]
|
||||
struct S;
|
||||
};
|
||||
|
@ -1,4 +1,4 @@
|
||||
error: expected unsuffixed literal, found `n!()`
|
||||
error: expected unsuffixed literal, found expression `n!()`
|
||||
--> $DIR/nonterminal-expansion.rs:7:22
|
||||
|
|
||||
LL | #[repr(align($n))]
|
||||
|
@ -28,8 +28,8 @@ struct S9;
|
||||
macro_rules! generate_s10 {
|
||||
($expr: expr) => {
|
||||
#[cfg(feature = $expr)]
|
||||
//~^ ERROR expected unsuffixed literal, found `concat!("nonexistent")`
|
||||
//~| ERROR expected unsuffixed literal, found `concat!("nonexistent")`
|
||||
//~^ ERROR expected unsuffixed literal, found expression `concat!("nonexistent")`
|
||||
//~| ERROR expected unsuffixed literal, found expression `concat!("nonexistent")`
|
||||
struct S10;
|
||||
}
|
||||
}
|
||||
|
@ -54,7 +54,7 @@ LL | #[cfg(a = b"hi")]
|
||||
| |
|
||||
| help: consider removing the prefix
|
||||
|
||||
error: expected unsuffixed literal, found `concat!("nonexistent")`
|
||||
error: expected unsuffixed literal, found expression `concat!("nonexistent")`
|
||||
--> $DIR/cfg-attr-syntax-validation.rs:30:25
|
||||
|
|
||||
LL | #[cfg(feature = $expr)]
|
||||
@ -65,7 +65,7 @@ LL | generate_s10!(concat!("nonexistent"));
|
||||
|
|
||||
= note: this error originates in the macro `generate_s10` (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||
|
||||
error: expected unsuffixed literal, found `concat!("nonexistent")`
|
||||
error: expected unsuffixed literal, found expression `concat!("nonexistent")`
|
||||
--> $DIR/cfg-attr-syntax-validation.rs:30:25
|
||||
|
|
||||
LL | #[cfg(feature = $expr)]
|
||||
|
@ -8,7 +8,7 @@ mod a {
|
||||
}
|
||||
|
||||
macro_rules! import {
|
||||
($p: path) => (use ::$p {S, Z}); //~ERROR expected identifier, found `a::b::c`
|
||||
($p: path) => (use ::$p {S, Z}); //~ERROR expected identifier, found metavariable
|
||||
}
|
||||
|
||||
import! { a::b::c }
|
||||
|
@ -1,8 +1,8 @@
|
||||
error: expected identifier, found `a::b::c`
|
||||
error: expected identifier, found metavariable
|
||||
--> $DIR/import-prefix-macro-2.rs:11:26
|
||||
|
|
||||
LL | ($p: path) => (use ::$p {S, Z});
|
||||
| ^^ expected identifier
|
||||
| ^^ expected identifier, found metavariable
|
||||
...
|
||||
LL | import! { a::b::c }
|
||||
| ------------------- in this macro invocation
|
||||
|
@ -31,7 +31,7 @@ macro_rules! foo {
|
||||
(tt $x:tt) => { bar!(tt $x); };
|
||||
(expr $x:expr) => { bar!(expr $x); }; //~ ERROR: no rules expected expression `3`
|
||||
(literal $x:literal) => { bar!(literal $x); }; //~ ERROR: no rules expected literal `4`
|
||||
(path $x:path) => { bar!(path $x); }; //~ ERROR: no rules expected path `a::b::c`
|
||||
(path $x:path) => { bar!(path $x); }; //~ ERROR: no rules expected `path` metavariable
|
||||
(stmt $x:stmt) => { bar!(stmt $x); }; //~ ERROR: no rules expected statement `let abc = 0`
|
||||
}
|
||||
|
||||
|
@ -67,7 +67,7 @@ LL | (literal 4) => {};
|
||||
= help: try using `:tt` instead in the macro definition
|
||||
= note: this error originates in the macro `foo` (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||
|
||||
error: no rules expected path `a::b::c`
|
||||
error: no rules expected `path` metavariable
|
||||
--> $DIR/nonterminal-matching.rs:34:35
|
||||
|
|
||||
LL | (path $x:path) => { bar!(path $x); };
|
||||
|
@ -46,7 +46,7 @@ macro_rules! test {
|
||||
(let $p:pat = $e:expr) => {test!(($p,$e))};
|
||||
// this should be expr
|
||||
// vvv
|
||||
(($p:pat, $e:pat)) => {let $p = $e;}; //~ ERROR expected expression, found pattern `1+1`
|
||||
(($p:pat, $e:pat)) => {let $p = $e;}; //~ ERROR expected expression, found `pat` metavariable
|
||||
}
|
||||
|
||||
fn foo() {
|
||||
|
@ -50,7 +50,7 @@ LL | my_recursive_macro!();
|
||||
= note: expanding `my_recursive_macro! { }`
|
||||
= note: to `my_recursive_macro! ();`
|
||||
|
||||
error: expected expression, found pattern `A { a : a, b : 0, c : _, .. }`
|
||||
error: expected expression, found `pat` metavariable
|
||||
--> $DIR/trace_faulty_macros.rs:16:9
|
||||
|
|
||||
LL | $a
|
||||
@ -69,22 +69,15 @@ LL | #[derive(Debug)]
|
||||
LL | fn use_derive_macro_as_attr() {}
|
||||
| -------------------------------- not a `struct`, `enum` or `union`
|
||||
|
||||
error: expected expression, found pattern `1+1`
|
||||
error: expected expression, found `pat` metavariable
|
||||
--> $DIR/trace_faulty_macros.rs:49:37
|
||||
|
|
||||
LL | (let $p:pat = $e:expr) => {test!(($p,$e))};
|
||||
| -- this is interpreted as expression, but it is expected to be pattern
|
||||
...
|
||||
LL | (($p:pat, $e:pat)) => {let $p = $e;};
|
||||
| ^^ expected expression
|
||||
...
|
||||
LL | test!(let x = 1+1);
|
||||
| ------------------
|
||||
| | |
|
||||
| | this is expected to be expression
|
||||
| in this macro invocation
|
||||
| ------------------ in this macro invocation
|
||||
|
|
||||
= note: when forwarding a matched fragment to another macro-by-example, matchers in the second macro will see an opaque AST of the fragment type, not the underlying tokens
|
||||
= note: this error originates in the macro `test` (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||
|
||||
note: trace_macro
|
||||
|
@ -1,8 +1,8 @@
|
||||
macro_rules! mac {
|
||||
($attr_item: meta) => {
|
||||
#[cfg($attr_item)]
|
||||
//~^ ERROR expected unsuffixed literal, found `an(arbitrary token stream)`
|
||||
//~| ERROR expected unsuffixed literal, found `an(arbitrary token stream)`
|
||||
//~^ ERROR expected unsuffixed literal, found `meta` metavariable
|
||||
//~| ERROR expected unsuffixed literal, found `meta` metavariable
|
||||
struct S;
|
||||
}
|
||||
}
|
||||
|
@ -4,7 +4,7 @@ error: expected unsuffixed literal, found `-`
|
||||
LL | #[cfg(feature = -1)]
|
||||
| ^
|
||||
|
||||
error: expected unsuffixed literal, found `an(arbitrary token stream)`
|
||||
error: expected unsuffixed literal, found `meta` metavariable
|
||||
--> $DIR/attr-bad-meta-4.rs:3:15
|
||||
|
|
||||
LL | #[cfg($attr_item)]
|
||||
@ -15,7 +15,7 @@ LL | mac!(an(arbitrary token stream));
|
||||
|
|
||||
= note: this error originates in the macro `mac` (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||
|
||||
error: expected unsuffixed literal, found `an(arbitrary token stream)`
|
||||
error: expected unsuffixed literal, found `meta` metavariable
|
||||
--> $DIR/attr-bad-meta-4.rs:3:15
|
||||
|
|
||||
LL | #[cfg($attr_item)]
|
||||
|
@ -19,7 +19,7 @@ fn main() {
|
||||
|
||||
macro_rules! make {
|
||||
($name:ident) => { #[doc(alias = $name)] pub struct S; }
|
||||
//~^ ERROR expected unsuffixed literal, found `nickname`
|
||||
//~^ ERROR expected unsuffixed literal, found identifier `nickname`
|
||||
}
|
||||
|
||||
make!(nickname); //~ NOTE in this expansion
|
||||
|
@ -20,7 +20,7 @@ help: surround the identifier with quotation marks to make it into a string lite
|
||||
LL | #[cfg(key="foo bar baz")]
|
||||
| + +
|
||||
|
||||
error: expected unsuffixed literal, found `nickname`
|
||||
error: expected unsuffixed literal, found identifier `nickname`
|
||||
--> $DIR/attr-unquoted-ident.rs:21:38
|
||||
|
|
||||
LL | ($name:ident) => { #[doc(alias = $name)] pub struct S; }
|
||||
|
@ -12,7 +12,7 @@ macro_rules! mac2 {
|
||||
($eval:pat) => {
|
||||
let mut $eval = ();
|
||||
//~^ ERROR `mut` must be followed by a named binding
|
||||
//~| ERROR expected identifier, found `does_not_exist!()`
|
||||
//~| ERROR expected identifier, found metavariable
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -15,11 +15,11 @@ LL - let mut $eval = ();
|
||||
LL + let $eval = ();
|
||||
|
|
||||
|
||||
error: expected identifier, found `does_not_exist!()`
|
||||
error: expected identifier, found metavariable
|
||||
--> $DIR/issue-65122-mac-invoc-in-mut-patterns.rs:13:17
|
||||
|
|
||||
LL | let mut $eval = ();
|
||||
| ^^^^^ expected identifier
|
||||
| ^^^^^ expected identifier, found metavariable
|
||||
...
|
||||
LL | mac2! { does_not_exist!() }
|
||||
| --------------------------- in this macro invocation
|
||||
|
@ -45,7 +45,7 @@ pub fn main() {
|
||||
// Make sure we don't accidentally allow `mut $p` where `$p:pat`.
|
||||
macro_rules! foo {
|
||||
($p:pat) => {
|
||||
let mut $p = 0; //~ ERROR expected identifier, found `x`
|
||||
let mut $p = 0; //~ ERROR expected identifier, found metavariable
|
||||
}
|
||||
}
|
||||
foo!(x);
|
||||
|
@ -158,11 +158,11 @@ LL - let mut W(mut a, W(b, W(ref c, W(d, B { box f }))))
|
||||
LL + let W(mut a, W(mut b, W(ref c, W(mut d, B { box mut f }))))
|
||||
|
|
||||
|
||||
error: expected identifier, found `x`
|
||||
error: expected identifier, found metavariable
|
||||
--> $DIR/mut-patterns.rs:48:21
|
||||
|
|
||||
LL | let mut $p = 0;
|
||||
| ^^ expected identifier
|
||||
| ^^ expected identifier, found metavariable
|
||||
...
|
||||
LL | foo!(x);
|
||||
| ------- in this macro invocation
|
||||
|
Loading…
Reference in New Issue
Block a user