mirror of
https://github.com/rust-lang/rust.git
synced 2024-11-27 01:04:03 +00:00
parser: token
-> normalized_token
, nonnormalized_token
-> token
This commit is contained in:
parent
59261f0a7c
commit
b2605c118d
@ -158,7 +158,7 @@ fn parse_args<'a>(
|
||||
} // accept trailing commas
|
||||
if p.token.is_ident() && p.look_ahead(1, |t| *t == token::Eq) {
|
||||
named = true;
|
||||
let name = if let token::Ident(name, _) = p.token.kind {
|
||||
let name = if let token::Ident(name, _) = p.normalized_token.kind {
|
||||
p.bump();
|
||||
name
|
||||
} else {
|
||||
|
@ -889,9 +889,8 @@ fn parse_nt_inner<'a>(p: &mut Parser<'a>, sp: Span, name: Symbol) -> PResult<'a,
|
||||
// this could be handled like a token, since it is one
|
||||
sym::ident => {
|
||||
if let Some((name, is_raw)) = get_macro_name(&p.token) {
|
||||
let span = p.token.span;
|
||||
p.bump();
|
||||
token::NtIdent(Ident::new(name, span), is_raw)
|
||||
token::NtIdent(Ident::new(name, p.normalized_prev_token.span), is_raw)
|
||||
} else {
|
||||
let token_str = pprust::token_to_string(&p.token);
|
||||
let msg = &format!("expected ident, found {}", &token_str);
|
||||
|
@ -192,12 +192,12 @@ impl<'a> Parser<'a> {
|
||||
TokenKind::CloseDelim(token::DelimToken::Brace),
|
||||
TokenKind::CloseDelim(token::DelimToken::Paren),
|
||||
];
|
||||
if let token::Ident(name, false) = self.token.kind {
|
||||
if Ident::new(name, self.token.span).is_raw_guess()
|
||||
if let token::Ident(name, false) = self.normalized_token.kind {
|
||||
if Ident::new(name, self.normalized_token.span).is_raw_guess()
|
||||
&& self.look_ahead(1, |t| valid_follow.contains(&t.kind))
|
||||
{
|
||||
err.span_suggestion(
|
||||
self.token.span,
|
||||
self.normalized_token.span,
|
||||
"you can escape reserved keywords to use them as identifiers",
|
||||
format!("r#{}", name),
|
||||
Applicability::MaybeIncorrect,
|
||||
|
@ -97,15 +97,14 @@ impl<'a> Parser<'a> {
|
||||
fn parse_expr_catch_underscore(&mut self) -> PResult<'a, P<Expr>> {
|
||||
match self.parse_expr() {
|
||||
Ok(expr) => Ok(expr),
|
||||
Err(mut err) => match self.token.kind {
|
||||
Err(mut err) => match self.normalized_token.kind {
|
||||
token::Ident(name, false)
|
||||
if name == kw::Underscore && self.look_ahead(1, |t| t == &token::Comma) =>
|
||||
{
|
||||
// Special-case handling of `foo(_, _, _)`
|
||||
err.emit();
|
||||
let sp = self.token.span;
|
||||
self.bump();
|
||||
Ok(self.mk_expr(sp, ExprKind::Err, AttrVec::new()))
|
||||
Ok(self.mk_expr(self.prev_token.span, ExprKind::Err, AttrVec::new()))
|
||||
}
|
||||
_ => Err(err),
|
||||
},
|
||||
@ -166,7 +165,7 @@ impl<'a> Parser<'a> {
|
||||
while let Some(op) = self.check_assoc_op() {
|
||||
// Adjust the span for interpolated LHS to point to the `$lhs` token
|
||||
// and not to what it refers to.
|
||||
let lhs_span = match self.unnormalized_prev_token.kind {
|
||||
let lhs_span = match self.prev_token.kind {
|
||||
TokenKind::Interpolated(..) => self.prev_span,
|
||||
_ => lhs.span,
|
||||
};
|
||||
@ -337,7 +336,7 @@ impl<'a> Parser<'a> {
|
||||
/// Also performs recovery for `and` / `or` which are mistaken for `&&` and `||` respectively.
|
||||
fn check_assoc_op(&self) -> Option<Spanned<AssocOp>> {
|
||||
Some(Spanned {
|
||||
node: match (AssocOp::from_token(&self.token), &self.token.kind) {
|
||||
node: match (AssocOp::from_token(&self.token), &self.normalized_token.kind) {
|
||||
(Some(op), _) => op,
|
||||
(None, token::Ident(sym::and, false)) => {
|
||||
self.error_bad_logical_op("and", "&&", "conjunction");
|
||||
@ -349,7 +348,7 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
_ => return None,
|
||||
},
|
||||
span: self.token.span,
|
||||
span: self.normalized_token.span,
|
||||
})
|
||||
}
|
||||
|
||||
@ -441,7 +440,7 @@ impl<'a> Parser<'a> {
|
||||
let attrs = self.parse_or_use_outer_attributes(attrs)?;
|
||||
let lo = self.token.span;
|
||||
// Note: when adding new unary operators, don't forget to adjust TokenKind::can_begin_expr()
|
||||
let (hi, ex) = match self.token.kind {
|
||||
let (hi, ex) = match self.normalized_token.kind {
|
||||
token::Not => self.parse_unary_expr(lo, UnOp::Not), // `!expr`
|
||||
token::Tilde => self.recover_tilde_expr(lo), // `~expr`
|
||||
token::BinOp(token::Minus) => self.parse_unary_expr(lo, UnOp::Neg), // `-expr`
|
||||
@ -527,7 +526,7 @@ impl<'a> Parser<'a> {
|
||||
) -> PResult<'a, (Span, P<Expr>)> {
|
||||
expr.map(|e| {
|
||||
(
|
||||
match self.unnormalized_prev_token.kind {
|
||||
match self.prev_token.kind {
|
||||
TokenKind::Interpolated(..) => self.prev_span,
|
||||
_ => e.span,
|
||||
},
|
||||
@ -708,7 +707,7 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
|
||||
fn parse_dot_suffix_expr(&mut self, lo: Span, base: P<Expr>) -> PResult<'a, P<Expr>> {
|
||||
match self.token.kind {
|
||||
match self.normalized_token.kind {
|
||||
token::Ident(..) => self.parse_dot_suffix(base, lo),
|
||||
token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) => {
|
||||
Ok(self.parse_tuple_field_access_expr(lo, base, symbol, suffix))
|
||||
@ -777,8 +776,8 @@ impl<'a> Parser<'a> {
|
||||
field: Symbol,
|
||||
suffix: Option<Symbol>,
|
||||
) -> P<Expr> {
|
||||
let span = self.token.span;
|
||||
self.bump();
|
||||
let span = self.prev_token.span;
|
||||
let field = ExprKind::Field(base, Ident::new(field, span));
|
||||
self.expect_no_suffix(span, "a tuple index", suffix);
|
||||
self.mk_expr(lo.to(span), field, AttrVec::new())
|
||||
@ -802,7 +801,7 @@ impl<'a> Parser<'a> {
|
||||
|
||||
/// Assuming we have just parsed `.`, continue parsing into an expression.
|
||||
fn parse_dot_suffix(&mut self, self_arg: P<Expr>, lo: Span) -> PResult<'a, P<Expr>> {
|
||||
if self.token.span.rust_2018() && self.eat_keyword(kw::Await) {
|
||||
if self.normalized_token.span.rust_2018() && self.eat_keyword(kw::Await) {
|
||||
return self.mk_await_expr(self_arg, lo);
|
||||
}
|
||||
|
||||
@ -916,7 +915,7 @@ impl<'a> Parser<'a> {
|
||||
// | ^ expected expression
|
||||
self.bump();
|
||||
Ok(self.mk_expr_err(self.token.span))
|
||||
} else if self.token.span.rust_2018() {
|
||||
} else if self.normalized_token.span.rust_2018() {
|
||||
// `Span::rust_2018()` is somewhat expensive; don't get it repeatedly.
|
||||
if self.check_keyword(kw::Async) {
|
||||
if self.is_async_block() {
|
||||
@ -1346,7 +1345,7 @@ impl<'a> Parser<'a> {
|
||||
if self.eat_keyword(kw::Static) { Movability::Static } else { Movability::Movable };
|
||||
|
||||
let asyncness =
|
||||
if self.token.span.rust_2018() { self.parse_asyncness() } else { Async::No };
|
||||
if self.normalized_token.span.rust_2018() { self.parse_asyncness() } else { Async::No };
|
||||
if asyncness.is_async() {
|
||||
// Feature-gate `async ||` closures.
|
||||
self.sess.gated_spans.gate(sym::async_closure, self.prev_span);
|
||||
@ -1560,9 +1559,8 @@ impl<'a> Parser<'a> {
|
||||
|
||||
fn eat_label(&mut self) -> Option<Label> {
|
||||
self.token.lifetime().map(|ident| {
|
||||
let span = self.token.span;
|
||||
self.bump();
|
||||
Label { ident: Ident::new(ident.name, span) }
|
||||
Label { ident }
|
||||
})
|
||||
}
|
||||
|
||||
@ -1704,7 +1702,7 @@ impl<'a> Parser<'a> {
|
||||
fn is_try_block(&self) -> bool {
|
||||
self.token.is_keyword(kw::Try) &&
|
||||
self.look_ahead(1, |t| *t == token::OpenDelim(token::Brace)) &&
|
||||
self.token.span.rust_2018() &&
|
||||
self.normalized_token.span.rust_2018() &&
|
||||
// Prevent `while try {} {}`, `if try {} {} else {}`, etc.
|
||||
!self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL)
|
||||
}
|
||||
@ -1854,13 +1852,12 @@ impl<'a> Parser<'a> {
|
||||
|
||||
/// Use in case of error after field-looking code: `S { foo: () with a }`.
|
||||
fn find_struct_error_after_field_looking_code(&self) -> Option<Field> {
|
||||
if let token::Ident(name, _) = self.token.kind {
|
||||
if let token::Ident(name, _) = self.normalized_token.kind {
|
||||
if !self.token.is_reserved_ident() && self.look_ahead(1, |t| *t == token::Colon) {
|
||||
let span = self.token.span;
|
||||
return Some(ast::Field {
|
||||
ident: Ident::new(name, span),
|
||||
span,
|
||||
expr: self.mk_expr_err(span),
|
||||
ident: Ident::new(name, self.normalized_token.span),
|
||||
span: self.token.span,
|
||||
expr: self.mk_expr_err(self.token.span),
|
||||
is_shorthand: false,
|
||||
attrs: AttrVec::new(),
|
||||
id: DUMMY_NODE_ID,
|
||||
|
@ -747,11 +747,10 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
|
||||
fn parse_ident_or_underscore(&mut self) -> PResult<'a, ast::Ident> {
|
||||
match self.token.kind {
|
||||
match self.normalized_token.kind {
|
||||
token::Ident(name @ kw::Underscore, false) => {
|
||||
let span = self.token.span;
|
||||
self.bump();
|
||||
Ok(Ident::new(name, span))
|
||||
Ok(Ident::new(name, self.normalized_prev_token.span))
|
||||
}
|
||||
_ => self.parse_ident(),
|
||||
}
|
||||
@ -1545,7 +1544,7 @@ impl<'a> Parser<'a> {
|
||||
|
||||
let is_name_required = match self.token.kind {
|
||||
token::DotDotDot => false,
|
||||
_ => req_name(&self.token),
|
||||
_ => req_name(&self.normalized_token),
|
||||
};
|
||||
let (pat, ty) = if is_name_required || self.is_named_param() {
|
||||
debug!("parse_param_general parse_pat (is_name_required:{})", is_name_required);
|
||||
@ -1611,12 +1610,11 @@ impl<'a> Parser<'a> {
|
||||
fn parse_self_param(&mut self) -> PResult<'a, Option<Param>> {
|
||||
// Extract an identifier *after* having confirmed that the token is one.
|
||||
let expect_self_ident = |this: &mut Self| {
|
||||
match this.token.kind {
|
||||
match this.normalized_token.kind {
|
||||
// Preserve hygienic context.
|
||||
token::Ident(name, _) => {
|
||||
let span = this.token.span;
|
||||
this.bump();
|
||||
Ident::new(name, span)
|
||||
Ident::new(name, this.normalized_prev_token.span)
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
@ -1653,7 +1651,7 @@ impl<'a> Parser<'a> {
|
||||
// Only a limited set of initial token sequences is considered `self` parameters; anything
|
||||
// else is parsed as a normal function parameter list, so some lookahead is required.
|
||||
let eself_lo = self.token.span;
|
||||
let (eself, eself_ident, eself_hi) = match self.token.kind {
|
||||
let (eself, eself_ident, eself_hi) = match self.normalized_token.kind {
|
||||
token::BinOp(token::And) => {
|
||||
let eself = if is_isolated_self(self, 1) {
|
||||
// `&self`
|
||||
|
@ -86,23 +86,22 @@ macro_rules! maybe_recover_from_interpolated_ty_qpath {
|
||||
#[derive(Clone)]
|
||||
pub struct Parser<'a> {
|
||||
pub sess: &'a ParseSess,
|
||||
/// The current non-normalized token.
|
||||
pub token: Token,
|
||||
/// The current normalized token.
|
||||
/// "Normalized" means that some interpolated tokens
|
||||
/// (`$i: ident` and `$l: lifetime` meta-variables) are replaced
|
||||
/// with non-interpolated identifier and lifetime tokens they refer to.
|
||||
/// Use span from this token if you need an isolated span.
|
||||
pub token: Token,
|
||||
/// The current non-normalized token if it's different from `token`.
|
||||
/// Use span from this token if you need to concatenate it with some neighbouring spans.
|
||||
unnormalized_token: Token,
|
||||
/// Use this if you need to check for `token::Ident` or `token::Lifetime` specifically,
|
||||
/// this also includes edition checks for edition-specific keyword identifiers.
|
||||
pub normalized_token: Token,
|
||||
/// The previous non-normalized token.
|
||||
pub prev_token: Token,
|
||||
/// The previous normalized token.
|
||||
/// Use span from this token if you need an isolated span.
|
||||
prev_token: Token,
|
||||
/// The previous non-normalized token if it's different from `prev_token`.
|
||||
/// Use span from this token if you need to concatenate it with some neighbouring spans.
|
||||
unnormalized_prev_token: Token,
|
||||
/// Equivalent to `unnormalized_prev_token.span`.
|
||||
/// FIXME: Remove in favor of `(unnormalized_)prev_token.span`.
|
||||
/// Use this if you need to check for `token::Ident` or `token::Lifetime` specifically,
|
||||
/// this also includes edition checks for edition-specific keyword identifiers.
|
||||
pub normalized_prev_token: Token,
|
||||
/// FIXME: Remove in favor of the equivalent `prev_token.span`.
|
||||
pub prev_span: Span,
|
||||
restrictions: Restrictions,
|
||||
/// Used to determine the path to externally loaded source files.
|
||||
@ -376,9 +375,9 @@ impl<'a> Parser<'a> {
|
||||
let mut parser = Parser {
|
||||
sess,
|
||||
token: Token::dummy(),
|
||||
unnormalized_token: Token::dummy(),
|
||||
normalized_token: Token::dummy(),
|
||||
prev_token: Token::dummy(),
|
||||
unnormalized_prev_token: Token::dummy(),
|
||||
normalized_prev_token: Token::dummy(),
|
||||
prev_span: DUMMY_SP,
|
||||
restrictions: Restrictions::empty(),
|
||||
recurse_into_file_modules,
|
||||
@ -483,7 +482,7 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
|
||||
fn parse_ident_common(&mut self, recover: bool) -> PResult<'a, ast::Ident> {
|
||||
match self.token.kind {
|
||||
match self.normalized_token.kind {
|
||||
token::Ident(name, _) => {
|
||||
if self.token.is_reserved_ident() {
|
||||
let mut err = self.expected_ident_found();
|
||||
@ -493,9 +492,8 @@ impl<'a> Parser<'a> {
|
||||
return Err(err);
|
||||
}
|
||||
}
|
||||
let span = self.token.span;
|
||||
self.bump();
|
||||
Ok(Ident::new(name, span))
|
||||
Ok(Ident::new(name, self.normalized_prev_token.span))
|
||||
}
|
||||
_ => Err(match self.prev_token.kind {
|
||||
TokenKind::DocComment(..) => {
|
||||
@ -825,16 +823,16 @@ impl<'a> Parser<'a> {
|
||||
// tokens are replaced with usual identifier and lifetime tokens,
|
||||
// so the former are never encountered during normal parsing.
|
||||
crate fn set_token(&mut self, token: Token) {
|
||||
self.unnormalized_token = token;
|
||||
self.token = match &self.unnormalized_token.kind {
|
||||
self.token = token;
|
||||
self.normalized_token = match &self.token.kind {
|
||||
token::Interpolated(nt) => match **nt {
|
||||
token::NtIdent(ident, is_raw) => {
|
||||
Token::new(token::Ident(ident.name, is_raw), ident.span)
|
||||
}
|
||||
token::NtLifetime(ident) => Token::new(token::Lifetime(ident.name), ident.span),
|
||||
_ => self.unnormalized_token.clone(),
|
||||
_ => self.token.clone(),
|
||||
},
|
||||
_ => self.unnormalized_token.clone(),
|
||||
_ => self.token.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
@ -848,11 +846,11 @@ impl<'a> Parser<'a> {
|
||||
|
||||
// Update the current and previous tokens.
|
||||
self.prev_token = self.token.take();
|
||||
self.unnormalized_prev_token = self.unnormalized_token.take();
|
||||
self.normalized_prev_token = self.normalized_token.take();
|
||||
self.set_token(next_token);
|
||||
|
||||
// Update fields derived from the previous token.
|
||||
self.prev_span = self.unnormalized_prev_token.span;
|
||||
self.prev_span = self.prev_token.span;
|
||||
|
||||
// Diagnostics.
|
||||
self.expected_tokens.clear();
|
||||
@ -860,7 +858,7 @@ impl<'a> Parser<'a> {
|
||||
|
||||
/// Advance the parser by one token.
|
||||
pub fn bump(&mut self) {
|
||||
let next_token = self.next_tok(self.unnormalized_token.span);
|
||||
let next_token = self.next_tok(self.token.span);
|
||||
self.bump_with(next_token);
|
||||
}
|
||||
|
||||
@ -891,7 +889,7 @@ impl<'a> Parser<'a> {
|
||||
/// Parses asyncness: `async` or nothing.
|
||||
fn parse_asyncness(&mut self) -> Async {
|
||||
if self.eat_keyword(kw::Async) {
|
||||
let span = self.prev_span;
|
||||
let span = self.normalized_prev_token.span;
|
||||
Async::Yes { span, closure_id: DUMMY_NODE_ID, return_impl_trait_id: DUMMY_NODE_ID }
|
||||
} else {
|
||||
Async::No
|
||||
|
@ -134,7 +134,7 @@ impl<'a> Parser<'a> {
|
||||
path
|
||||
});
|
||||
|
||||
let lo = self.unnormalized_token.span;
|
||||
let lo = self.token.span;
|
||||
let mut segments = Vec::new();
|
||||
let mod_sep_ctxt = self.token.span.ctxt();
|
||||
if self.eat(&token::ModSep) {
|
||||
@ -238,11 +238,10 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
|
||||
pub(super) fn parse_path_segment_ident(&mut self) -> PResult<'a, Ident> {
|
||||
match self.token.kind {
|
||||
match self.normalized_token.kind {
|
||||
token::Ident(name, _) if name.is_path_segment_keyword() => {
|
||||
let span = self.token.span;
|
||||
self.bump();
|
||||
Ok(Ident::new(name, span))
|
||||
Ok(Ident::new(name, self.normalized_prev_token.span))
|
||||
}
|
||||
_ => self.parse_ident(),
|
||||
}
|
||||
|
@ -5,7 +5,7 @@ use crate::{maybe_recover_from_interpolated_ty_qpath, maybe_whole};
|
||||
use rustc_errors::{pluralize, struct_span_err, Applicability, PResult};
|
||||
use rustc_span::source_map::Span;
|
||||
use rustc_span::symbol::{kw, sym};
|
||||
use syntax::ast::{self, BareFnTy, FnRetTy, GenericParam, Ident, Lifetime, MutTy, Ty, TyKind};
|
||||
use syntax::ast::{self, BareFnTy, FnRetTy, GenericParam, Lifetime, MutTy, Ty, TyKind};
|
||||
use syntax::ast::{
|
||||
GenericBound, GenericBounds, PolyTraitRef, TraitBoundModifier, TraitObjectSyntax,
|
||||
};
|
||||
@ -323,7 +323,7 @@ impl<'a> Parser<'a> {
|
||||
/// Is a `dyn B0 + ... + Bn` type allowed here?
|
||||
fn is_explicit_dyn_type(&mut self) -> bool {
|
||||
self.check_keyword(kw::Dyn)
|
||||
&& (self.token.span.rust_2018()
|
||||
&& (self.normalized_token.span.rust_2018()
|
||||
|| self.look_ahead(1, |t| {
|
||||
t.can_begin_bound() && !can_continue_type_after_non_fn_ident(t)
|
||||
}))
|
||||
@ -604,9 +604,8 @@ impl<'a> Parser<'a> {
|
||||
/// Parses a single lifetime `'a` or panics.
|
||||
pub fn expect_lifetime(&mut self) -> Lifetime {
|
||||
if let Some(ident) = self.token.lifetime() {
|
||||
let span = self.token.span;
|
||||
self.bump();
|
||||
Lifetime { ident: Ident::new(ident.name, span), id: ast::DUMMY_NODE_ID }
|
||||
Lifetime { ident, id: ast::DUMMY_NODE_ID }
|
||||
} else {
|
||||
self.span_bug(self.token.span, "not a lifetime")
|
||||
}
|
||||
|
@ -1,14 +1,16 @@
|
||||
error[E0507]: cannot move out of static item `D`
|
||||
--> $DIR/move-error-snippets.rs:16:18
|
||||
--> $DIR/move-error-snippets-ext.rs:5:17
|
||||
|
|
||||
LL | | #[macro_use]
|
||||
| |__________________^ move occurs because `D` has type `A`, which does not implement the `Copy` trait
|
||||
...
|
||||
LL | aaa!(D);
|
||||
| __________________^
|
||||
...
|
||||
LL | sss!();
|
||||
| ------- in this macro invocation
|
||||
LL | let a = $c;
|
||||
| ^^
|
||||
| |
|
||||
| move occurs because `D` has type `A`, which does not implement the `Copy` trait
|
||||
| help: consider borrowing here: `&$c`
|
||||
|
|
||||
::: $DIR/move-error-snippets.rs:21:1
|
||||
|
|
||||
LL | sss!();
|
||||
| ------- in this macro invocation
|
||||
|
|
||||
= note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
// Test that macro-expanded non-inline modules behave correctly
|
||||
|
||||
macro_rules! mod_decl {
|
||||
($i:ident) => { mod $i; }
|
||||
($i:ident) => { mod $i; } //~ ERROR Cannot declare a non-inline module inside a block
|
||||
}
|
||||
|
||||
mod macro_expanded_mod_helper {
|
||||
@ -10,5 +10,4 @@ mod macro_expanded_mod_helper {
|
||||
|
||||
fn main() {
|
||||
mod_decl!(foo);
|
||||
//~^ ERROR Cannot declare a non-inline module inside a block
|
||||
}
|
||||
|
@ -1,8 +1,13 @@
|
||||
error: Cannot declare a non-inline module inside a block unless it has a path attribute
|
||||
--> $DIR/macro-expanded-mod.rs:12:15
|
||||
--> $DIR/macro-expanded-mod.rs:4:25
|
||||
|
|
||||
LL | ($i:ident) => { mod $i; }
|
||||
| ^^
|
||||
...
|
||||
LL | mod_decl!(foo);
|
||||
| ^^^
|
||||
| --------------- in this macro invocation
|
||||
|
|
||||
= note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||
|
||||
error: aborting due to previous error
|
||||
|
||||
|
@ -1,10 +1,10 @@
|
||||
error[E0124]: field `a` is already declared
|
||||
--> $DIR/fields-definition.rs:14:17
|
||||
--> $DIR/fields-definition.rs:14:13
|
||||
|
|
||||
LL | a: u8,
|
||||
| ----- `a` first declared here
|
||||
LL | $a: u8,
|
||||
| ^^ field already declared
|
||||
| ^^^^^^ field already declared
|
||||
...
|
||||
LL | legacy!(a);
|
||||
| ----------- in this macro invocation
|
||||
|
@ -1,10 +1,9 @@
|
||||
macro_rules! get_opt {
|
||||
($tgt:expr, $field:ident) => {
|
||||
if $tgt.has_$field() {}
|
||||
if $tgt.has_$field() {} //~ ERROR expected `{`, found `foo`
|
||||
}
|
||||
}
|
||||
|
||||
fn main() {
|
||||
get_opt!(bar, foo);
|
||||
//~^ ERROR expected `{`, found `foo`
|
||||
}
|
||||
|
@ -1,13 +1,17 @@
|
||||
error: expected `{`, found `foo`
|
||||
--> $DIR/issue-39848.rs:8:19
|
||||
--> $DIR/issue-39848.rs:3:21
|
||||
|
|
||||
LL | if $tgt.has_$field() {}
|
||||
| -- -- help: try placing this code inside a block: `{ () }`
|
||||
| |
|
||||
| -- ^^^^^^--
|
||||
| | |
|
||||
| | expected `{`
|
||||
| | help: try placing this code inside a block: `{ $field() }`
|
||||
| this `if` expression has a condition, but no block
|
||||
...
|
||||
LL | get_opt!(bar, foo);
|
||||
| ^^^ expected `{`
|
||||
| ------------------- in this macro invocation
|
||||
|
|
||||
= note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||
|
||||
error: aborting due to previous error
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user