Auto merge of #133793 - nnethercote:speed-up-expected_tokens, r=spastorino

Speed up `Parser::expected_tokens`

The constant pushing/clearing of `Parser::expected_tokens` during parsing is slow. This PR speeds it up greatly.

r? `@estebank`
This commit is contained in:
bors 2024-12-19 19:58:57 +00:00
commit 9e136a30a9
22 changed files with 1415 additions and 844 deletions

View File

@ -1,16 +1,16 @@
use ast::token::IdentIsRaw;
use lint::BuiltinLintDiag;
use rustc_ast::AsmMacro;
use rustc_ast::ptr::P;
use rustc_ast::token::{self, Delimiter};
use rustc_ast::tokenstream::TokenStream;
use rustc_ast::{AsmMacro, token};
use rustc_data_structures::fx::{FxHashMap, FxIndexMap};
use rustc_errors::PResult;
use rustc_expand::base::*;
use rustc_index::bit_set::GrowableBitSet;
use rustc_parse::parser::Parser;
use rustc_parse::exp;
use rustc_parse::parser::{ExpKeywordPair, Parser};
use rustc_session::lint;
use rustc_span::{ErrorGuaranteed, Ident, InnerSpan, Span, Symbol, kw, sym};
use rustc_span::{ErrorGuaranteed, Ident, InnerSpan, Span, Symbol, kw};
use rustc_target::asm::InlineAsmArch;
use smallvec::smallvec;
use {rustc_ast as ast, rustc_parse_format as parse};
@ -38,16 +38,16 @@ pub struct AsmArgs {
/// - `Err(_)` if the current token matches the keyword, but was not expected
fn eat_operand_keyword<'a>(
p: &mut Parser<'a>,
symbol: Symbol,
exp: ExpKeywordPair,
asm_macro: AsmMacro,
) -> PResult<'a, bool> {
if matches!(asm_macro, AsmMacro::Asm) {
Ok(p.eat_keyword(symbol))
Ok(p.eat_keyword(exp))
} else {
let span = p.token.span;
if p.eat_keyword_noexpect(symbol) {
if p.eat_keyword_noexpect(exp.kw) {
// in gets printed as `r#in` otherwise
let symbol = if symbol == kw::In { "in" } else { symbol.as_str() };
let symbol = if exp.kw == kw::In { "in" } else { exp.kw.as_str() };
Err(p.dcx().create_err(errors::AsmUnsupportedOperand {
span,
symbol,
@ -95,13 +95,13 @@ pub fn parse_asm_args<'a>(
let mut allow_templates = true;
while p.token != token::Eof {
if !p.eat(&token::Comma) {
if !p.eat(exp!(Comma)) {
if allow_templates {
// After a template string, we always expect *only* a comma...
return Err(dcx.create_err(errors::AsmExpectedComma { span: p.token.span }));
} else {
// ...after that delegate to `expect` to also include the other expected tokens.
return Err(p.expect(&token::Comma).err().unwrap());
return Err(p.expect(exp!(Comma)).err().unwrap());
}
}
if p.token == token::Eof {
@ -109,14 +109,14 @@ pub fn parse_asm_args<'a>(
} // accept trailing commas
// Parse clobber_abi
if p.eat_keyword(sym::clobber_abi) {
if p.eat_keyword(exp!(ClobberAbi)) {
parse_clobber_abi(p, &mut args)?;
allow_templates = false;
continue;
}
// Parse options
if p.eat_keyword(sym::options) {
if p.eat_keyword(exp!(Options)) {
parse_options(p, &mut args, asm_macro)?;
allow_templates = false;
continue;
@ -128,7 +128,7 @@ pub fn parse_asm_args<'a>(
let name = if p.token.is_ident() && p.look_ahead(1, |t| *t == token::Eq) {
let (ident, _) = p.token.ident().unwrap();
p.bump();
p.expect(&token::Eq)?;
p.expect(exp!(Eq))?;
allow_templates = false;
Some(ident.name)
} else {
@ -136,57 +136,57 @@ pub fn parse_asm_args<'a>(
};
let mut explicit_reg = false;
let op = if eat_operand_keyword(p, kw::In, asm_macro)? {
let op = if eat_operand_keyword(p, exp!(In), asm_macro)? {
let reg = parse_reg(p, &mut explicit_reg)?;
if p.eat_keyword(kw::Underscore) {
if p.eat_keyword(exp!(Underscore)) {
let err = dcx.create_err(errors::AsmUnderscoreInput { span: p.token.span });
return Err(err);
}
let expr = p.parse_expr()?;
ast::InlineAsmOperand::In { reg, expr }
} else if eat_operand_keyword(p, sym::out, asm_macro)? {
} else if eat_operand_keyword(p, exp!(Out), asm_macro)? {
let reg = parse_reg(p, &mut explicit_reg)?;
let expr = if p.eat_keyword(kw::Underscore) { None } else { Some(p.parse_expr()?) };
let expr = if p.eat_keyword(exp!(Underscore)) { None } else { Some(p.parse_expr()?) };
ast::InlineAsmOperand::Out { reg, expr, late: false }
} else if eat_operand_keyword(p, sym::lateout, asm_macro)? {
} else if eat_operand_keyword(p, exp!(Lateout), asm_macro)? {
let reg = parse_reg(p, &mut explicit_reg)?;
let expr = if p.eat_keyword(kw::Underscore) { None } else { Some(p.parse_expr()?) };
let expr = if p.eat_keyword(exp!(Underscore)) { None } else { Some(p.parse_expr()?) };
ast::InlineAsmOperand::Out { reg, expr, late: true }
} else if eat_operand_keyword(p, sym::inout, asm_macro)? {
} else if eat_operand_keyword(p, exp!(Inout), asm_macro)? {
let reg = parse_reg(p, &mut explicit_reg)?;
if p.eat_keyword(kw::Underscore) {
if p.eat_keyword(exp!(Underscore)) {
let err = dcx.create_err(errors::AsmUnderscoreInput { span: p.token.span });
return Err(err);
}
let expr = p.parse_expr()?;
if p.eat(&token::FatArrow) {
if p.eat(exp!(FatArrow)) {
let out_expr =
if p.eat_keyword(kw::Underscore) { None } else { Some(p.parse_expr()?) };
if p.eat_keyword(exp!(Underscore)) { None } else { Some(p.parse_expr()?) };
ast::InlineAsmOperand::SplitInOut { reg, in_expr: expr, out_expr, late: false }
} else {
ast::InlineAsmOperand::InOut { reg, expr, late: false }
}
} else if eat_operand_keyword(p, sym::inlateout, asm_macro)? {
} else if eat_operand_keyword(p, exp!(Inlateout), asm_macro)? {
let reg = parse_reg(p, &mut explicit_reg)?;
if p.eat_keyword(kw::Underscore) {
if p.eat_keyword(exp!(Underscore)) {
let err = dcx.create_err(errors::AsmUnderscoreInput { span: p.token.span });
return Err(err);
}
let expr = p.parse_expr()?;
if p.eat(&token::FatArrow) {
if p.eat(exp!(FatArrow)) {
let out_expr =
if p.eat_keyword(kw::Underscore) { None } else { Some(p.parse_expr()?) };
if p.eat_keyword(exp!(Underscore)) { None } else { Some(p.parse_expr()?) };
ast::InlineAsmOperand::SplitInOut { reg, in_expr: expr, out_expr, late: true }
} else {
ast::InlineAsmOperand::InOut { reg, expr, late: true }
}
} else if eat_operand_keyword(p, sym::label, asm_macro)? {
} else if eat_operand_keyword(p, exp!(Label), asm_macro)? {
let block = p.parse_block()?;
ast::InlineAsmOperand::Label { block }
} else if p.eat_keyword(kw::Const) {
} else if p.eat_keyword(exp!(Const)) {
let anon_const = p.parse_expr_anon_const()?;
ast::InlineAsmOperand::Const { anon_const }
} else if p.eat_keyword(sym::sym) {
} else if p.eat_keyword(exp!(Sym)) {
let expr = p.parse_expr()?;
let ast::ExprKind::Path(qself, path) = &expr.kind else {
let err = dcx.create_err(errors::AsmSymNoPath { span: expr.span });
@ -389,31 +389,31 @@ fn parse_options<'a>(
) -> PResult<'a, ()> {
let span_start = p.prev_token.span;
p.expect(&token::OpenDelim(Delimiter::Parenthesis))?;
p.expect(exp!(OpenParen))?;
while !p.eat(&token::CloseDelim(Delimiter::Parenthesis)) {
const OPTIONS: [(Symbol, ast::InlineAsmOptions); ast::InlineAsmOptions::COUNT] = [
(sym::pure, ast::InlineAsmOptions::PURE),
(sym::nomem, ast::InlineAsmOptions::NOMEM),
(sym::readonly, ast::InlineAsmOptions::READONLY),
(sym::preserves_flags, ast::InlineAsmOptions::PRESERVES_FLAGS),
(sym::noreturn, ast::InlineAsmOptions::NORETURN),
(sym::nostack, ast::InlineAsmOptions::NOSTACK),
(sym::may_unwind, ast::InlineAsmOptions::MAY_UNWIND),
(sym::att_syntax, ast::InlineAsmOptions::ATT_SYNTAX),
(kw::Raw, ast::InlineAsmOptions::RAW),
while !p.eat(exp!(CloseParen)) {
const OPTIONS: [(ExpKeywordPair, ast::InlineAsmOptions); ast::InlineAsmOptions::COUNT] = [
(exp!(Pure), ast::InlineAsmOptions::PURE),
(exp!(Nomem), ast::InlineAsmOptions::NOMEM),
(exp!(Readonly), ast::InlineAsmOptions::READONLY),
(exp!(PreservesFlags), ast::InlineAsmOptions::PRESERVES_FLAGS),
(exp!(Noreturn), ast::InlineAsmOptions::NORETURN),
(exp!(Nostack), ast::InlineAsmOptions::NOSTACK),
(exp!(MayUnwind), ast::InlineAsmOptions::MAY_UNWIND),
(exp!(AttSyntax), ast::InlineAsmOptions::ATT_SYNTAX),
(exp!(Raw), ast::InlineAsmOptions::RAW),
];
'blk: {
for (symbol, option) in OPTIONS {
for (exp, option) in OPTIONS {
let kw_matched = if asm_macro.is_supported_option(option) {
p.eat_keyword(symbol)
p.eat_keyword(exp)
} else {
p.eat_keyword_noexpect(symbol)
p.eat_keyword_noexpect(exp.kw)
};
if kw_matched {
try_set_option(p, args, asm_macro, symbol, option);
try_set_option(p, args, asm_macro, exp.kw, option);
break 'blk;
}
}
@ -422,10 +422,10 @@ fn parse_options<'a>(
}
// Allow trailing commas
if p.eat(&token::CloseDelim(Delimiter::Parenthesis)) {
if p.eat(exp!(CloseParen)) {
break;
}
p.expect(&token::Comma)?;
p.expect(exp!(Comma))?;
}
let new_span = span_start.to(p.prev_token.span);
@ -437,14 +437,14 @@ fn parse_options<'a>(
fn parse_clobber_abi<'a>(p: &mut Parser<'a>, args: &mut AsmArgs) -> PResult<'a, ()> {
let span_start = p.prev_token.span;
p.expect(&token::OpenDelim(Delimiter::Parenthesis))?;
p.expect(exp!(OpenParen))?;
if p.eat(&token::CloseDelim(Delimiter::Parenthesis)) {
if p.eat(exp!(CloseParen)) {
return Err(p.dcx().create_err(errors::NonABI { span: p.token.span }));
}
let mut new_abis = Vec::new();
while !p.eat(&token::CloseDelim(Delimiter::Parenthesis)) {
while !p.eat(exp!(CloseParen)) {
match p.parse_str_lit() {
Ok(str_lit) => {
new_abis.push((str_lit.symbol_unescaped, str_lit.span));
@ -456,10 +456,10 @@ fn parse_clobber_abi<'a>(p: &mut Parser<'a>, args: &mut AsmArgs) -> PResult<'a,
};
// Allow trailing commas
if p.eat(&token::CloseDelim(Delimiter::Parenthesis)) {
if p.eat(exp!(CloseParen)) {
break;
}
p.expect(&token::Comma)?;
p.expect(exp!(Comma))?;
}
let full_span = span_start.to(p.prev_token.span);
@ -482,7 +482,7 @@ fn parse_reg<'a>(
p: &mut Parser<'a>,
explicit_reg: &mut bool,
) -> PResult<'a, ast::InlineAsmRegOrRegClass> {
p.expect(&token::OpenDelim(Delimiter::Parenthesis))?;
p.expect(exp!(OpenParen))?;
let result = match p.token.uninterpolate().kind {
token::Ident(name, IdentIsRaw::No) => ast::InlineAsmRegOrRegClass::RegClass(name),
token::Literal(token::Lit { kind: token::LitKind::Str, symbol, suffix: _ }) => {
@ -496,7 +496,7 @@ fn parse_reg<'a>(
}
};
p.bump();
p.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
p.expect(exp!(CloseParen))?;
Ok(result)
}

View File

@ -7,6 +7,7 @@ use rustc_ast::{DelimArgs, Expr, ExprKind, MacCall, Path, PathSegment, UnOp, tok
use rustc_ast_pretty::pprust;
use rustc_errors::PResult;
use rustc_expand::base::{DummyResult, ExpandResult, ExtCtxt, MacEager, MacroExpanderResult};
use rustc_parse::exp;
use rustc_parse::parser::Parser;
use rustc_span::{DUMMY_SP, Ident, Span, Symbol, sym};
use thin_vec::thin_vec;
@ -143,7 +144,7 @@ fn parse_assert<'a>(cx: &ExtCtxt<'a>, sp: Span, stream: TokenStream) -> PResult<
cx.dcx().emit_err(errors::AssertMissingComma { span: parser.token.span, comma });
parse_custom_message(&mut parser)
} else if parser.eat(&token::Comma) {
} else if parser.eat(exp!(Comma)) {
parse_custom_message(&mut parser)
} else {
None

View File

@ -6,6 +6,7 @@ use rustc_ast::token;
use rustc_ast::tokenstream::TokenStream;
use rustc_errors::PResult;
use rustc_expand::base::{DummyResult, ExpandResult, ExtCtxt, MacEager, MacroExpanderResult};
use rustc_parse::exp;
use rustc_span::Span;
use {rustc_ast as ast, rustc_attr_parsing as attr};
@ -48,9 +49,9 @@ fn parse_cfg<'a>(
let cfg = p.parse_meta_item_inner()?;
let _ = p.eat(&token::Comma);
let _ = p.eat(exp!(Comma));
if !p.eat(&token::Eof) {
if !p.eat(exp!(Eof)) {
return Err(cx.dcx().create_err(errors::OneCfgPattern { span }));
}

View File

@ -12,6 +12,7 @@ use rustc_errors::{Applicability, Diag, MultiSpan, PResult, SingleLabelManySpans
use rustc_expand::base::*;
use rustc_lint_defs::builtin::NAMED_ARGUMENTS_USED_POSITIONALLY;
use rustc_lint_defs::{BufferedEarlyLint, BuiltinLintDiag, LintId};
use rustc_parse::exp;
use rustc_parse_format as parse;
use rustc_span::{BytePos, ErrorGuaranteed, Ident, InnerSpan, Span, Symbol};
@ -93,12 +94,12 @@ fn parse_args<'a>(ecx: &ExtCtxt<'a>, sp: Span, tts: TokenStream) -> PResult<'a,
let mut first = true;
while p.token != token::Eof {
if !p.eat(&token::Comma) {
if !p.eat(exp!(Comma)) {
if first {
p.clear_expected_tokens();
p.clear_expected_token_types();
}
match p.expect(&token::Comma) {
match p.expect(exp!(Comma)) {
Err(err) => {
match token::TokenKind::Comma.similar_tokens() {
Some(tks) if tks.contains(&p.token.kind) => {
@ -122,7 +123,7 @@ fn parse_args<'a>(ecx: &ExtCtxt<'a>, sp: Span, tts: TokenStream) -> PResult<'a,
match p.token.ident() {
Some((ident, _)) if p.look_ahead(1, |t| *t == token::Eq) => {
p.bump();
p.expect(&token::Eq)?;
p.expect(exp!(Eq))?;
let expr = p.parse_expr()?;
if let Some((_, prev)) = args.by_name(ident.name) {
ecx.dcx().emit_err(errors::FormatDuplicateArg {

View File

@ -3,7 +3,8 @@ use rustc_ast::tokenstream::TokenStream;
use rustc_ast::{Pat, Ty, ast};
use rustc_errors::PResult;
use rustc_expand::base::{self, DummyResult, ExpandResult, ExtCtxt, MacroExpanderResult};
use rustc_span::{Span, sym};
use rustc_parse::exp;
use rustc_span::Span;
pub(crate) fn expand<'cx>(
cx: &'cx mut ExtCtxt<'_>,
@ -24,7 +25,7 @@ fn parse_pat_ty<'a>(cx: &mut ExtCtxt<'a>, stream: TokenStream) -> PResult<'a, (P
let mut parser = cx.new_parser_from_tts(stream);
let ty = parser.parse_ty()?;
parser.expect_keyword(sym::is)?;
parser.expect_keyword(exp!(Is))?;
let pat = parser.parse_pat_no_top_alt(None, None)?;
Ok((ty, pat))

View File

@ -7,7 +7,7 @@ use rustc_expand::expand::AstFragment;
use rustc_feature::AttributeTemplate;
use rustc_lint_defs::BuiltinLintDiag;
use rustc_lint_defs::builtin::DUPLICATE_MACRO_ATTRIBUTES;
use rustc_parse::{parser, validate_attr};
use rustc_parse::{exp, parser, validate_attr};
use rustc_session::errors::report_lit_error;
use rustc_span::{BytePos, Span, Symbol};
@ -204,7 +204,7 @@ pub(crate) fn get_single_expr_from_tts(
Ok(ret) => ret,
Err(guar) => return ExpandResult::Ready(Err(guar)),
};
let _ = p.eat(&token::Comma);
let _ = p.eat(exp!(Comma));
if p.token != token::Eof {
cx.dcx().emit_err(errors::OnlyOneArgument { span, name });
@ -237,7 +237,7 @@ pub(crate) fn get_exprs_from_tts(
let expr = cx.expander().fully_expand_fragment(AstFragment::Expr(expr)).make_expr();
es.push(expr);
if p.eat(&token::Comma) {
if p.eat(exp!(Comma)) {
continue;
}
if p.token != token::Eof {

View File

@ -2,9 +2,9 @@ use std::iter::once;
use std::path::{self, Path, PathBuf};
use rustc_ast::ptr::P;
use rustc_ast::{AttrVec, Attribute, Inline, Item, ModSpans, token};
use rustc_ast::{AttrVec, Attribute, Inline, Item, ModSpans};
use rustc_errors::{Diag, ErrorGuaranteed};
use rustc_parse::{new_parser_from_file, unwrap_or_emit_fatal, validate_attr};
use rustc_parse::{exp, new_parser_from_file, unwrap_or_emit_fatal, validate_attr};
use rustc_session::Session;
use rustc_session::parse::ParseSess;
use rustc_span::{Ident, Span, sym};
@ -70,7 +70,7 @@ pub(crate) fn parse_external_mod(
let mut parser =
unwrap_or_emit_fatal(new_parser_from_file(&sess.psess, &mp.file_path, Some(span)));
let (inner_attrs, items, inner_span) =
parser.parse_mod(&token::Eof).map_err(|err| ModError::ParserError(err))?;
parser.parse_mod(exp!(Eof)).map_err(|err| ModError::ParserError(err))?;
attrs.extend(inner_attrs);
(items, inner_span, mp.file_path)
};

View File

@ -15,7 +15,7 @@ use rustc_data_structures::sync::Lrc;
use rustc_errors::{Diag, ErrorGuaranteed, MultiSpan, PResult};
use rustc_parse::lexer::nfc_normalize;
use rustc_parse::parser::Parser;
use rustc_parse::{new_parser_from_source_str, source_str_to_stream, unwrap_or_emit_fatal};
use rustc_parse::{exp, new_parser_from_source_str, source_str_to_stream, unwrap_or_emit_fatal};
use rustc_session::parse::ParseSess;
use rustc_span::def_id::CrateNum;
use rustc_span::{BytePos, FileName, Pos, SourceFile, Span, Symbol, sym};
@ -473,7 +473,7 @@ impl server::FreeFunctions for Rustc<'_, '_> {
unwrap_or_emit_fatal(new_parser_from_source_str(self.psess(), name, s.to_owned()));
let first_span = parser.token.span.data();
let minus_present = parser.eat(&token::BinOp(token::Minus));
let minus_present = parser.eat(exp!(Minus));
let lit_span = parser.token.span.data();
let token::Literal(mut lit) = parser.token.kind else {

View File

@ -1,8 +1,7 @@
use rustc_ast::token::{self, Delimiter};
use rustc_ast::{self as ast, Attribute, attr};
use rustc_ast::{self as ast, Attribute, attr, token};
use rustc_errors::codes::*;
use rustc_errors::{Diag, PResult};
use rustc_span::{BytePos, Span, kw};
use rustc_span::{BytePos, Span};
use thin_vec::ThinVec;
use tracing::debug;
@ -10,7 +9,7 @@ use super::{
AttrWrapper, Capturing, FnParseMode, ForceCollect, Parser, ParserRange, PathStyle, Trailing,
UsePreAttrPos,
};
use crate::{errors, fluent_generated as fluent, maybe_whole};
use crate::{errors, exp, fluent_generated as fluent, maybe_whole};
// Public for rustfmt usage
#[derive(Debug)]
@ -45,7 +44,7 @@ impl<'a> Parser<'a> {
let mut just_parsed_doc_comment = false;
let start_pos = self.num_bump_calls;
loop {
let attr = if self.check(&token::Pound) {
let attr = if self.check(exp!(Pound)) {
let prev_outer_attr_sp = outer_attrs.last().map(|attr: &Attribute| attr.span);
let inner_error_reason = if just_parsed_doc_comment {
@ -126,14 +125,14 @@ impl<'a> Parser<'a> {
let lo = self.token.span;
// Attributes can't have attributes of their own [Editor's note: not with that attitude]
self.collect_tokens_no_attrs(|this| {
assert!(this.eat(&token::Pound), "parse_attribute called in non-attribute position");
assert!(this.eat(exp!(Pound)), "parse_attribute called in non-attribute position");
let style =
if this.eat(&token::Not) { ast::AttrStyle::Inner } else { ast::AttrStyle::Outer };
if this.eat(exp!(Not)) { ast::AttrStyle::Inner } else { ast::AttrStyle::Outer };
this.expect(&token::OpenDelim(Delimiter::Bracket))?;
this.expect(exp!(OpenBracket))?;
let item = this.parse_attr_item(ForceCollect::No)?;
this.expect(&token::CloseDelim(Delimiter::Bracket))?;
this.expect(exp!(CloseBracket))?;
let attr_sp = lo.to(this.prev_token.span);
// Emit error if inner attribute is encountered and forbidden.
@ -274,10 +273,10 @@ impl<'a> Parser<'a> {
// Attr items don't have attributes.
self.collect_tokens(None, AttrWrapper::empty(), force_collect, |this, _empty_attrs| {
let is_unsafe = this.eat_keyword(kw::Unsafe);
let is_unsafe = this.eat_keyword(exp!(Unsafe));
let unsafety = if is_unsafe {
let unsafe_span = this.prev_token.span;
this.expect(&token::OpenDelim(Delimiter::Parenthesis))?;
this.expect(exp!(OpenParen))?;
ast::Safety::Unsafe(unsafe_span)
} else {
ast::Safety::Default
@ -286,7 +285,7 @@ impl<'a> Parser<'a> {
let path = this.parse_path(PathStyle::Mod)?;
let args = this.parse_attr_args()?;
if is_unsafe {
this.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
this.expect(exp!(CloseParen))?;
}
Ok((
ast::AttrItem { unsafety, path, args, tokens: None },
@ -306,7 +305,7 @@ impl<'a> Parser<'a> {
loop {
let start_pos = self.num_bump_calls;
// Only try to parse if it is an inner attribute (has `!`).
let attr = if self.check(&token::Pound) && self.look_ahead(1, |t| t == &token::Not) {
let attr = if self.check(exp!(Pound)) && self.look_ahead(1, |t| t == &token::Not) {
Some(self.parse_attribute(InnerAttrPolicy::Permitted)?)
} else if let token::DocComment(comment_kind, attr_style, data) = self.token.kind {
if attr_style == ast::AttrStyle::Inner {
@ -358,7 +357,7 @@ impl<'a> Parser<'a> {
&mut self,
) -> PResult<'a, (ast::MetaItemInner, Vec<(ast::AttrItem, Span)>)> {
let cfg_predicate = self.parse_meta_item_inner()?;
self.expect(&token::Comma)?;
self.expect(exp!(Comma))?;
// Presumably, the majority of the time there will only be one attr.
let mut expanded_attrs = Vec::with_capacity(1);
@ -366,7 +365,7 @@ impl<'a> Parser<'a> {
let lo = self.token.span;
let item = self.parse_attr_item(ForceCollect::Yes)?;
expanded_attrs.push((item, lo.to(self.prev_token.span)));
if !self.eat(&token::Comma) {
if !self.eat(exp!(Comma)) {
break;
}
}
@ -380,7 +379,7 @@ impl<'a> Parser<'a> {
let mut nmis = ThinVec::with_capacity(1);
while self.token != token::Eof {
nmis.push(self.parse_meta_item_inner()?);
if !self.eat(&token::Comma) {
if !self.eat(exp!(Comma)) {
break;
}
}
@ -413,13 +412,13 @@ impl<'a> Parser<'a> {
let lo = self.token.span;
let is_unsafe = if unsafe_allowed == AllowLeadingUnsafe::Yes {
self.eat_keyword(kw::Unsafe)
self.eat_keyword(exp!(Unsafe))
} else {
false
};
let unsafety = if is_unsafe {
let unsafe_span = self.prev_token.span;
self.expect(&token::OpenDelim(Delimiter::Parenthesis))?;
self.expect(exp!(OpenParen))?;
ast::Safety::Unsafe(unsafe_span)
} else {
@ -429,7 +428,7 @@ impl<'a> Parser<'a> {
let path = self.parse_path(PathStyle::Mod)?;
let kind = self.parse_meta_item_kind()?;
if is_unsafe {
self.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
self.expect(exp!(CloseParen))?;
}
let span = lo.to(self.prev_token.span);
@ -437,9 +436,9 @@ impl<'a> Parser<'a> {
}
pub(crate) fn parse_meta_item_kind(&mut self) -> PResult<'a, ast::MetaItemKind> {
Ok(if self.eat(&token::Eq) {
Ok(if self.eat(exp!(Eq)) {
ast::MetaItemKind::NameValue(self.parse_unsuffixed_meta_item_lit()?)
} else if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
} else if self.check(exp!(OpenParen)) {
let (list, _) = self.parse_paren_comma_seq(|p| p.parse_meta_item_inner())?;
ast::MetaItemKind::List(list)
} else {

View File

@ -29,7 +29,8 @@ use tracing::{debug, trace};
use super::pat::Expected;
use super::{
BlockMode, CommaRecoveryMode, Parser, PathStyle, Restrictions, SemiColonMode, SeqSep, TokenType,
BlockMode, CommaRecoveryMode, ExpTokenPair, Parser, PathStyle, Restrictions, SemiColonMode,
SeqSep, TokenType,
};
use crate::errors::{
AddParen, AmbiguousPlus, AsyncMoveBlockIn2015, AttributeOnParamType, AwaitSuggestion,
@ -47,7 +48,7 @@ use crate::errors::{
UnexpectedConstParamDeclarationSugg, UnmatchedAngleBrackets, UseEqInstead, WrapType,
};
use crate::parser::attr::InnerAttrPolicy;
use crate::{fluent_generated as fluent, parser};
use crate::{exp, fluent_generated as fluent};
/// Creates a placeholder argument.
pub(super) fn dummy_arg(ident: Ident, guar: ErrorGuaranteed) -> Param {
@ -462,8 +463,8 @@ impl<'a> Parser<'a> {
pub(super) fn expected_one_of_not_found(
&mut self,
edible: &[TokenKind],
inedible: &[TokenKind],
edible: &[ExpTokenPair<'_>],
inedible: &[ExpTokenPair<'_>],
) -> PResult<'a, ErrorGuaranteed> {
debug!("expected_one_of_not_found(edible: {:?}, inedible: {:?})", edible, inedible);
fn tokens_to_string(tokens: &[TokenType]) -> String {
@ -483,49 +484,17 @@ impl<'a> Parser<'a> {
})
}
self.expected_tokens.extend(edible.iter().chain(inedible).cloned().map(TokenType::Token));
let mut expected = self
.expected_tokens
.iter()
.filter(|token| {
// Filter out suggestions that suggest the same token which was found and deemed incorrect.
fn is_ident_eq_keyword(found: &TokenKind, expected: &TokenType) -> bool {
if let TokenKind::Ident(current_sym, _) = found
&& let TokenType::Keyword(suggested_sym) = expected
{
return current_sym == suggested_sym;
}
false
}
if **token != parser::TokenType::Token(self.token.kind.clone()) {
let eq = is_ident_eq_keyword(&self.token.kind, &token);
// If the suggestion is a keyword and the found token is an ident,
// the content of which are equal to the suggestion's content,
// we can remove that suggestion (see the `return false` below).
// If this isn't the case however, and the suggestion is a token the
// content of which is the same as the found token's, we remove it as well.
if !eq {
if let TokenType::Token(kind) = token {
if self.token == *kind {
return false;
}
}
return true;
}
}
false
})
.cloned()
.collect::<Vec<_>>();
for exp in edible.iter().chain(inedible.iter()) {
self.expected_token_types.insert(exp.token_type);
}
let mut expected: Vec<_> = self.expected_token_types.iter().collect();
expected.sort_by_cached_key(|x| x.to_string());
expected.dedup();
let sm = self.psess.source_map();
// Special-case "expected `;`" errors.
if expected.contains(&TokenType::Token(token::Semi)) {
if expected.contains(&TokenType::Semi) {
// If the user is trying to write a ternary expression, recover it and
// return an Err to prevent a cascade of irrelevant diagnostics.
if self.prev_token == token::Question
@ -577,7 +546,7 @@ impl<'a> Parser<'a> {
|| (sm.is_multiline(
self.prev_token.span.shrink_to_hi().until(self.token.span.shrink_to_lo()),
) && t == &token::Pound)
}) && !expected.contains(&TokenType::Token(token::Comma))
}) && !expected.contains(&TokenType::Comma)
{
// Missing semicolon typo. This is triggered if the next token could either start a
// new statement or is a block close. For example:
@ -597,7 +566,7 @@ impl<'a> Parser<'a> {
if self.token == TokenKind::EqEq
&& self.prev_token.is_ident()
&& expected.iter().any(|tok| matches!(tok, TokenType::Token(TokenKind::Eq)))
&& expected.contains(&TokenType::Eq)
{
// Likely typo: `=` → `==` in let expr or enum item
return Err(self.dcx().create_err(UseEqInstead { span: self.token.span }));
@ -636,15 +605,8 @@ impl<'a> Parser<'a> {
// Look for usages of '=>' where '>=' was probably intended
if self.token == token::FatArrow
&& expected
.iter()
.any(|tok| matches!(tok, TokenType::Operator | TokenType::Token(TokenKind::Le)))
&& !expected.iter().any(|tok| {
matches!(
tok,
TokenType::Token(TokenKind::FatArrow) | TokenType::Token(TokenKind::Comma)
)
})
&& expected.iter().any(|tok| matches!(tok, TokenType::Operator | TokenType::Le))
&& !expected.iter().any(|tok| matches!(tok, TokenType::FatArrow | TokenType::Comma))
{
err.span_suggestion(
self.token.span,
@ -741,7 +703,7 @@ impl<'a> Parser<'a> {
};
if self.check_too_many_raw_str_terminators(&mut err) {
if expected.contains(&TokenType::Token(token::Semi)) && self.eat(&token::Semi) {
if expected.contains(&TokenType::Semi) && self.eat(exp!(Semi)) {
let guar = err.emit();
return Ok(guar);
} else {
@ -785,17 +747,15 @@ impl<'a> Parser<'a> {
let Some((curr_ident, _)) = self.token.ident() else {
return;
};
let expected_tokens: &[TokenType] =
let expected_token_types: &[TokenType] =
expected.len().checked_sub(10).map_or(&expected, |index| &expected[index..]);
let expected_keywords: Vec<Symbol> = expected_tokens
.iter()
.filter_map(|token| if let TokenType::Keyword(kw) = token { Some(*kw) } else { None })
.collect();
let expected_keywords: Vec<Symbol> =
expected_token_types.iter().filter_map(|token| token.is_keyword()).collect();
// When there are a few keywords in the last ten elements of `self.expected_tokens` and the current
// token is an identifier, it's probably a misspelled keyword.
// This handles code like `async Move {}`, misspelled `if` in match guard, misspelled `else` in `if`-`else`
// and mispelled `where` in a where clause.
// When there are a few keywords in the last ten elements of `self.expected_token_types`
// and the current token is an identifier, it's probably a misspelled keyword. This handles
// code like `async Move {}`, misspelled `if` in match guard, misspelled `else` in
// `if`-`else` and mispelled `where` in a where clause.
if !expected_keywords.is_empty()
&& !curr_ident.is_used_keyword()
&& let Some(misspelled_kw) = find_similar_kw(curr_ident, &expected_keywords)
@ -1052,7 +1012,7 @@ impl<'a> Parser<'a> {
(Err(snapshot_err), Err(err)) => {
// We don't know what went wrong, emit the normal error.
snapshot_err.cancel();
self.consume_block(Delimiter::Brace, ConsumeClosingDelim::Yes);
self.consume_block(exp!(OpenBrace), exp!(CloseBrace), ConsumeClosingDelim::Yes);
Err(err)
}
(Ok(_), Ok(mut tail)) => {
@ -1089,7 +1049,7 @@ impl<'a> Parser<'a> {
Applicability::MaybeIncorrect,
);
let guar = err.emit();
self.eat_to_tokens(&[&token::CloseDelim(Delimiter::Brace)]);
self.eat_to_tokens(&[exp!(CloseBrace)]);
guar
}
token::OpenDelim(Delimiter::Parenthesis)
@ -1097,7 +1057,7 @@ impl<'a> Parser<'a> {
{
// We are within a function call or tuple, we can emit the error
// and recover.
self.eat_to_tokens(&[&token::CloseDelim(Delimiter::Parenthesis), &token::Comma]);
self.eat_to_tokens(&[exp!(CloseParen), exp!(Comma)]);
err.multipart_suggestion_verbose(
"you might have meant to open the body of the closure",
@ -1124,11 +1084,11 @@ impl<'a> Parser<'a> {
Ok(self.mk_expr_err(lo.to(self.token.span), guar))
}
/// Eats and discards tokens until one of `kets` is encountered. Respects token trees,
/// Eats and discards tokens until one of `closes` is encountered. Respects token trees,
/// passes through any errors encountered. Used for error recovery.
pub(super) fn eat_to_tokens(&mut self, kets: &[&TokenKind]) {
if let Err(err) =
self.parse_seq_to_before_tokens(kets, &[], SeqSep::none(), |p| Ok(p.parse_token_tree()))
pub(super) fn eat_to_tokens(&mut self, closes: &[ExpTokenPair<'_>]) {
if let Err(err) = self
.parse_seq_to_before_tokens(closes, &[], SeqSep::none(), |p| Ok(p.parse_token_tree()))
{
err.cancel();
}
@ -1147,7 +1107,7 @@ impl<'a> Parser<'a> {
pub(super) fn check_trailing_angle_brackets(
&mut self,
segment: &PathSegment,
end: &[&TokenKind],
end: &[ExpTokenPair<'_>],
) -> Option<ErrorGuaranteed> {
if !self.may_recover() {
return None;
@ -1230,7 +1190,7 @@ impl<'a> Parser<'a> {
// second case.
if self.look_ahead(position, |t| {
trace!("check_trailing_angle_brackets: t={:?}", t);
end.contains(&&t.kind)
end.iter().any(|exp| exp.tok == &t.kind)
}) {
// Eat from where we started until the end token so that parsing can continue
// as if we didn't have those extra angle brackets.
@ -1298,11 +1258,11 @@ impl<'a> Parser<'a> {
) -> PResult<'a, ErrorGuaranteed> {
if let ExprKind::Binary(binop, _, _) = &expr.kind
&& let ast::BinOpKind::Lt = binop.node
&& self.eat(&token::Comma)
&& self.eat(exp!(Comma))
{
let x = self.parse_seq_to_before_end(
&token::Gt,
SeqSep::trailing_allowed(token::Comma),
exp!(Gt),
SeqSep::trailing_allowed(exp!(Comma)),
|p| match p.parse_generic_arg(None)? {
Some(arg) => Ok(arg),
// If we didn't eat a generic arg, then we should error.
@ -1311,7 +1271,7 @@ impl<'a> Parser<'a> {
);
match x {
Ok((_, _, Recovered::No)) => {
if self.eat(&token::Gt) {
if self.eat(exp!(Gt)) {
// We made sense of it. Improve the error message.
e.span_suggestion_verbose(
binop.span.shrink_to_lo(),
@ -1874,7 +1834,7 @@ impl<'a> Parser<'a> {
ty_span: Span,
ty: P<Ty>,
) -> PResult<'a, P<T>> {
self.expect(&token::PathSep)?;
self.expect(exp!(PathSep))?;
let mut path = ast::Path { segments: ThinVec::new(), span: DUMMY_SP, tokens: None };
self.parse_path_segments(&mut path.segments, T::PATH_STYLE, None)?;
@ -1956,10 +1916,10 @@ impl<'a> Parser<'a> {
}
pub(super) fn expect_semi(&mut self) -> PResult<'a, ()> {
if self.eat(&token::Semi) || self.recover_colon_as_semi() {
if self.eat(exp!(Semi)) || self.recover_colon_as_semi() {
return Ok(());
}
self.expect(&token::Semi).map(drop) // Error unconditionally
self.expect(exp!(Semi)).map(drop) // Error unconditionally
}
pub(super) fn recover_colon_as_semi(&mut self) -> bool {
@ -2004,15 +1964,15 @@ impl<'a> Parser<'a> {
}
fn recover_await_macro(&mut self) -> PResult<'a, (Span, P<Expr>, bool)> {
self.expect(&token::Not)?;
self.expect(&token::OpenDelim(Delimiter::Parenthesis))?;
self.expect(exp!(Not))?;
self.expect(exp!(OpenParen))?;
let expr = self.parse_expr()?;
self.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
self.expect(exp!(CloseParen))?;
Ok((self.prev_token.span, expr, false))
}
fn recover_await_prefix(&mut self, await_sp: Span) -> PResult<'a, (Span, P<Expr>, bool)> {
let is_question = self.eat(&token::Question); // Handle `await? <expr>`.
let is_question = self.eat(exp!(Question)); // Handle `await? <expr>`.
let expr = if self.token == token::OpenDelim(Delimiter::Brace) {
// Handle `await { <expr> }`.
// This needs to be handled separately from the next arm to avoid
@ -2074,7 +2034,7 @@ impl<'a> Parser<'a> {
let try_span = lo.to(self.token.span); //we take the try!( span
self.bump(); //remove (
let is_empty = self.token == token::CloseDelim(Delimiter::Parenthesis); //check if the block is empty
self.consume_block(Delimiter::Parenthesis, ConsumeClosingDelim::No); //eat the block
self.consume_block(exp!(OpenParen), exp!(CloseParen), ConsumeClosingDelim::No); //eat the block
let hi = self.token.span;
self.bump(); //remove )
let mut err = self.dcx().struct_span_err(lo.to(hi), "use of deprecated `try` macro");
@ -2130,13 +2090,14 @@ impl<'a> Parser<'a> {
pub(super) fn recover_seq_parse_error(
&mut self,
delim: Delimiter,
open: ExpTokenPair<'_>,
close: ExpTokenPair<'_>,
lo: Span,
err: Diag<'a>,
) -> P<Expr> {
let guar = err.emit();
// Recover from parse error, callers expect the closing delim to be consumed.
self.consume_block(delim, ConsumeClosingDelim::Yes);
self.consume_block(open, close, ConsumeClosingDelim::Yes);
self.mk_expr(lo.to(self.prev_token.span), ExprKind::Err(guar))
}
@ -2225,7 +2186,7 @@ impl<'a> Parser<'a> {
}
pub(super) fn check_for_for_in_in_typo(&mut self, in_span: Span) {
if self.eat_keyword(kw::In) {
if self.eat_keyword(exp!(In)) {
// a common typo: `for _ in in bar {}`
self.dcx().emit_err(InInTypo {
span: self.prev_token.span,
@ -2366,7 +2327,7 @@ impl<'a> Parser<'a> {
pub(super) fn recover_arg_parse(&mut self) -> PResult<'a, (P<ast::Pat>, P<ast::Ty>)> {
let pat = self.parse_pat_no_top_alt(Some(Expected::ArgumentName), None)?;
self.expect(&token::Colon)?;
self.expect(exp!(Colon))?;
let ty = self.parse_ty()?;
self.dcx().emit_err(PatternMethodParamWithoutBody { span: pat.span });
@ -2384,12 +2345,17 @@ impl<'a> Parser<'a> {
Ok(param)
}
pub(super) fn consume_block(&mut self, delim: Delimiter, consume_close: ConsumeClosingDelim) {
pub(super) fn consume_block(
&mut self,
open: ExpTokenPair<'_>,
close: ExpTokenPair<'_>,
consume_close: ConsumeClosingDelim,
) {
let mut brace_depth = 0;
loop {
if self.eat(&token::OpenDelim(delim)) {
if self.eat(open) {
brace_depth += 1;
} else if self.check(&token::CloseDelim(delim)) {
} else if self.check(close) {
if brace_depth == 0 {
if let ConsumeClosingDelim::Yes = consume_close {
// Some of the callers of this method expect to be able to parse the
@ -2545,7 +2511,7 @@ impl<'a> Parser<'a> {
match self.recover_const_arg(arg.span(), err) {
Ok(arg) => {
args.push(AngleBracketedArg::Arg(arg));
if self.eat(&token::Comma) {
if self.eat(exp!(Comma)) {
return Ok(true); // Continue
}
}
@ -3016,7 +2982,7 @@ impl<'a> Parser<'a> {
/// Check for exclusive ranges written as `..<`
pub(crate) fn maybe_err_dotdotlt_syntax(&self, maybe_lt: Token, mut err: Diag<'a>) -> Diag<'a> {
if maybe_lt == token::Lt
&& (self.expected_tokens.contains(&TokenType::Token(token::Gt))
&& (self.expected_token_types.contains(TokenType::Gt)
|| matches!(self.token.kind, token::Literal(..)))
{
err.span_suggestion(
@ -3146,9 +3112,9 @@ impl<'a> Parser<'a> {
/// Parse and throw away a parenthesized comma separated
/// sequence of patterns until `)` is reached.
fn skip_pat_list(&mut self) -> PResult<'a, ()> {
while !self.check(&token::CloseDelim(Delimiter::Parenthesis)) {
while !self.check(exp!(CloseParen)) {
self.parse_pat_no_top_alt(None, None)?;
if !self.eat(&token::Comma) {
if !self.eat(exp!(Comma)) {
return Ok(());
}
}

View File

@ -35,10 +35,10 @@ use super::diagnostics::SnapshotParser;
use super::pat::{CommaRecoveryMode, Expected, RecoverColon, RecoverComma};
use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
use super::{
AttrWrapper, BlockMode, ClosureSpans, ForceCollect, Parser, PathStyle, Restrictions,
SemiColonMode, SeqSep, TokenType, Trailing, UsePreAttrPos,
AttrWrapper, BlockMode, ClosureSpans, ExpTokenPair, ForceCollect, Parser, PathStyle,
Restrictions, SemiColonMode, SeqSep, TokenType, Trailing, UsePreAttrPos,
};
use crate::{errors, maybe_recover_from_interpolated_ty_qpath};
use crate::{errors, exp, maybe_recover_from_interpolated_ty_qpath};
#[derive(Debug)]
pub(super) enum DestructuredFloat {
@ -153,7 +153,7 @@ impl<'a> Parser<'a> {
return Ok((lhs, parsed_something));
}
self.expected_tokens.push(TokenType::Operator);
self.expected_token_types.insert(TokenType::Operator);
while let Some(op) = self.check_assoc_op() {
let lhs_span = self.interpolated_or_expr_span(&lhs);
let cur_op_span = self.token.span;
@ -873,9 +873,9 @@ impl<'a> Parser<'a> {
/// Parse `mut?` or `raw [ const | mut ]`.
fn parse_borrow_modifiers(&mut self) -> (ast::BorrowKind, ast::Mutability) {
if self.check_keyword(kw::Raw) && self.look_ahead(1, Token::is_mutability) {
if self.check_keyword(exp!(Raw)) && self.look_ahead(1, Token::is_mutability) {
// `raw [ const | mut ]`.
let found_raw = self.eat_keyword(kw::Raw);
let found_raw = self.eat_keyword(exp!(Raw));
assert!(found_raw);
let mutability = self.parse_const_or_mut().unwrap();
(ast::BorrowKind::Raw, mutability)
@ -908,7 +908,7 @@ impl<'a> Parser<'a> {
// a `return` which could be suggested otherwise.
self.eat_noexpect(&token::Question)
} else {
self.eat(&token::Question)
self.eat(exp!(Question))
};
if has_question {
// `expr?`
@ -926,7 +926,7 @@ impl<'a> Parser<'a> {
self.dcx().emit_err(errors::ExprRArrowCall { span });
true
} else {
self.eat(&token::Dot)
self.eat(exp!(Dot))
};
if has_dot {
// expr.f
@ -1251,7 +1251,7 @@ impl<'a> Parser<'a> {
.map(|args| self.mk_expr(lo.to(self.prev_token.span), self.mk_call(fun, args)));
match self.maybe_recover_struct_lit_bad_delims(lo, open_paren, seq, snapshot) {
Ok(expr) => expr,
Err(err) => self.recover_seq_parse_error(Delimiter::Parenthesis, lo, err),
Err(err) => self.recover_seq_parse_error(exp!(OpenParen), exp!(CloseParen), lo, err),
}
}
@ -1268,10 +1268,8 @@ impl<'a> Parser<'a> {
match (self.may_recover(), seq, snapshot) {
(true, Err(err), Some((mut snapshot, ExprKind::Path(None, path)))) => {
snapshot.bump(); // `(`
match snapshot.parse_struct_fields(path.clone(), false, Delimiter::Parenthesis) {
Ok((fields, ..))
if snapshot.eat(&token::CloseDelim(Delimiter::Parenthesis)) =>
{
match snapshot.parse_struct_fields(path.clone(), false, exp!(CloseParen)) {
Ok((fields, ..)) if snapshot.eat(exp!(CloseParen)) => {
// We are certain we have `Enum::Foo(a: 3, b: 4)`, suggest
// `Enum::Foo { a: 3, b: 4 }` or `Enum::Foo(3, 4)`.
self.restore_snapshot(snapshot);
@ -1328,7 +1326,7 @@ impl<'a> Parser<'a> {
self.bump(); // `[`
let index = self.parse_expr()?;
self.suggest_missing_semicolon_before_array(prev_span, open_delim_span)?;
self.expect(&token::CloseDelim(Delimiter::Bracket))?;
self.expect(exp!(CloseBracket))?;
Ok(self.mk_expr(
lo.to(self.prev_token.span),
self.mk_index(base, index, open_delim_span.to(self.prev_token.span)),
@ -1337,12 +1335,12 @@ impl<'a> Parser<'a> {
/// Assuming we have just parsed `.`, continue parsing into an expression.
fn parse_dot_suffix(&mut self, self_arg: P<Expr>, lo: Span) -> PResult<'a, P<Expr>> {
if self.token.uninterpolated_span().at_least_rust_2018() && self.eat_keyword(kw::Await) {
if self.token.uninterpolated_span().at_least_rust_2018() && self.eat_keyword(exp!(Await)) {
return Ok(self.mk_await_expr(self_arg, lo));
}
// Post-fix match
if self.eat_keyword(kw::Match) {
if self.eat_keyword(exp!(Match)) {
let match_span = self.prev_token.span;
self.psess.gated_spans.gate(sym::postfix_match, match_span);
return self.parse_match_block(lo, match_span, self_arg, MatchKind::Postfix);
@ -1350,10 +1348,10 @@ impl<'a> Parser<'a> {
let fn_span_lo = self.token.span;
let mut seg = self.parse_path_segment(PathStyle::Expr, None)?;
self.check_trailing_angle_brackets(&seg, &[&token::OpenDelim(Delimiter::Parenthesis)]);
self.check_trailing_angle_brackets(&seg, &[exp!(OpenParen)]);
self.check_turbofish_missing_angle_brackets(&mut seg);
if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
if self.check(exp!(OpenParen)) {
// Method call `expr.f()`
let args = self.parse_expr_paren_seq()?;
let fn_span = fn_span_lo.to(self.prev_token.span);
@ -1415,18 +1413,18 @@ impl<'a> Parser<'a> {
let restrictions = self.restrictions;
self.with_res(restrictions - Restrictions::ALLOW_LET, |this| {
// Note: when adding new syntax here, don't forget to adjust `TokenKind::can_begin_expr()`.
// Note: adding new syntax here? Don't forget to adjust `TokenKind::can_begin_expr()`.
let lo = this.token.span;
if let token::Literal(_) = this.token.kind {
// This match arm is a special-case of the `_` match arm below and
// could be removed without changing functionality, but it's faster
// to have it here, especially for programs with large constants.
this.parse_expr_lit()
} else if this.check(&token::OpenDelim(Delimiter::Parenthesis)) {
} else if this.check(exp!(OpenParen)) {
this.parse_expr_tuple_parens(restrictions)
} else if this.check(&token::OpenDelim(Delimiter::Brace)) {
} else if this.check(exp!(OpenBrace)) {
this.parse_expr_block(None, lo, BlockCheckMode::Default)
} else if this.check(&token::BinOp(token::Or)) || this.check(&token::OrOr) {
} else if this.check(exp!(Or)) || this.check(exp!(OrOr)) {
this.parse_expr_closure().map_err(|mut err| {
// If the input is something like `if a { 1 } else { 2 } | if a { 3 } else { 4 }`
// then suggest parens around the lhs.
@ -1435,41 +1433,41 @@ impl<'a> Parser<'a> {
}
err
})
} else if this.check(&token::OpenDelim(Delimiter::Bracket)) {
this.parse_expr_array_or_repeat(Delimiter::Bracket)
} else if this.check(exp!(OpenBracket)) {
this.parse_expr_array_or_repeat(exp!(CloseBracket))
} else if this.is_builtin() {
this.parse_expr_builtin()
} else if this.check_path() {
this.parse_expr_path_start()
} else if this.check_keyword(kw::Move)
|| this.check_keyword(kw::Static)
} else if this.check_keyword(exp!(Move))
|| this.check_keyword(exp!(Static))
|| this.check_const_closure()
{
this.parse_expr_closure()
} else if this.eat_keyword(kw::If) {
} else if this.eat_keyword(exp!(If)) {
this.parse_expr_if()
} else if this.check_keyword(kw::For) {
} else if this.check_keyword(exp!(For)) {
if this.choose_generics_over_qpath(1) {
this.parse_expr_closure()
} else {
assert!(this.eat_keyword(kw::For));
assert!(this.eat_keyword(exp!(For)));
this.parse_expr_for(None, lo)
}
} else if this.eat_keyword(kw::While) {
} else if this.eat_keyword(exp!(While)) {
this.parse_expr_while(None, lo)
} else if let Some(label) = this.eat_label() {
this.parse_expr_labeled(label, true)
} else if this.eat_keyword(kw::Loop) {
} else if this.eat_keyword(exp!(Loop)) {
this.parse_expr_loop(None, lo).map_err(|mut err| {
err.span_label(lo, "while parsing this `loop` expression");
err
})
} else if this.eat_keyword(kw::Match) {
} else if this.eat_keyword(exp!(Match)) {
this.parse_expr_match().map_err(|mut err| {
err.span_label(lo, "while parsing this `match` expression");
err
})
} else if this.eat_keyword(kw::Unsafe) {
} else if this.eat_keyword(exp!(Unsafe)) {
this.parse_expr_block(None, lo, BlockCheckMode::Unsafe(ast::UserProvided)).map_err(
|mut err| {
err.span_label(lo, "while parsing this `unsafe` expression");
@ -1481,23 +1479,23 @@ impl<'a> Parser<'a> {
} else if this.may_recover() && this.is_do_catch_block() {
this.recover_do_catch()
} else if this.is_try_block() {
this.expect_keyword(kw::Try)?;
this.expect_keyword(exp!(Try))?;
this.parse_try_block(lo)
} else if this.eat_keyword(kw::Return) {
} else if this.eat_keyword(exp!(Return)) {
this.parse_expr_return()
} else if this.eat_keyword(kw::Continue) {
} else if this.eat_keyword(exp!(Continue)) {
this.parse_expr_continue(lo)
} else if this.eat_keyword(kw::Break) {
} else if this.eat_keyword(exp!(Break)) {
this.parse_expr_break()
} else if this.eat_keyword(kw::Yield) {
} else if this.eat_keyword(exp!(Yield)) {
this.parse_expr_yield()
} else if this.is_do_yeet() {
this.parse_expr_yeet()
} else if this.eat_keyword(kw::Become) {
} else if this.eat_keyword(exp!(Become)) {
this.parse_expr_become()
} else if this.check_keyword(kw::Let) {
} else if this.check_keyword(exp!(Let)) {
this.parse_expr_let(restrictions)
} else if this.eat_keyword(kw::Underscore) {
} else if this.eat_keyword(exp!(Underscore)) {
Ok(this.mk_expr(this.prev_token.span, ExprKind::Underscore))
} else if this.token.uninterpolated_span().at_least_rust_2018() {
// `Span::at_least_rust_2018()` is somewhat expensive; don't get it repeatedly.
@ -1505,11 +1503,11 @@ impl<'a> Parser<'a> {
// check for `gen {}` and `gen move {}`
// or `async gen {}` and `async gen move {}`
&& (this.is_gen_block(kw::Gen, 0)
|| (this.check_keyword(kw::Async) && this.is_gen_block(kw::Gen, 1)))
|| (this.check_keyword(exp!(Async)) && this.is_gen_block(kw::Gen, 1)))
{
// FIXME: (async) gen closures aren't yet parsed.
this.parse_gen_block()
} else if this.check_keyword(kw::Async) {
} else if this.check_keyword(exp!(Async)) {
// FIXME(gen_blocks): Parse `gen async` and suggest swap
if this.is_gen_block(kw::Async, 0) {
// Check for `async {` and `async move {`,
@ -1541,15 +1539,20 @@ impl<'a> Parser<'a> {
fn parse_expr_tuple_parens(&mut self, restrictions: Restrictions) -> PResult<'a, P<Expr>> {
let lo = self.token.span;
self.expect(&token::OpenDelim(Delimiter::Parenthesis))?;
self.expect(exp!(OpenParen))?;
let (es, trailing_comma) = match self.parse_seq_to_end(
&token::CloseDelim(Delimiter::Parenthesis),
SeqSep::trailing_allowed(token::Comma),
exp!(CloseParen),
SeqSep::trailing_allowed(exp!(Comma)),
|p| p.parse_expr_catch_underscore(restrictions.intersection(Restrictions::ALLOW_LET)),
) {
Ok(x) => x,
Err(err) => {
return Ok(self.recover_seq_parse_error(Delimiter::Parenthesis, lo, err));
return Ok(self.recover_seq_parse_error(
exp!(OpenParen),
exp!(CloseParen),
lo,
err,
));
}
};
let kind = if es.len() == 1 && matches!(trailing_comma, Trailing::No) {
@ -1563,25 +1566,24 @@ impl<'a> Parser<'a> {
self.maybe_recover_from_bad_qpath(expr)
}
fn parse_expr_array_or_repeat(&mut self, close_delim: Delimiter) -> PResult<'a, P<Expr>> {
fn parse_expr_array_or_repeat(&mut self, close: ExpTokenPair<'_>) -> PResult<'a, P<Expr>> {
let lo = self.token.span;
self.bump(); // `[` or other open delim
let close = &token::CloseDelim(close_delim);
let kind = if self.eat(close) {
// Empty vector
ExprKind::Array(ThinVec::new())
} else {
// Non-empty vector
let first_expr = self.parse_expr()?;
if self.eat(&token::Semi) {
if self.eat(exp!(Semi)) {
// Repeating array syntax: `[ 0; 512 ]`
let count = self.parse_expr_anon_const()?;
self.expect(close)?;
ExprKind::Repeat(first_expr, count)
} else if self.eat(&token::Comma) {
} else if self.eat(exp!(Comma)) {
// Vector with two or more elements.
let sep = SeqSep::trailing_allowed(token::Comma);
let sep = SeqSep::trailing_allowed(exp!(Comma));
let (mut exprs, _) = self.parse_seq_to_end(close, sep, |p| p.parse_expr())?;
exprs.insert(0, first_expr);
ExprKind::Array(exprs)
@ -1615,7 +1617,7 @@ impl<'a> Parser<'a> {
};
// `!`, as an operator, is prefix, so we know this isn't that.
let (span, kind) = if self.eat(&token::Not) {
let (span, kind) = if self.eat(exp!(Not)) {
// MACRO INVOCATION expression
if qself.is_some() {
self.dcx().emit_err(errors::MacroInvocationWithQualifiedPath(path.span));
@ -1623,7 +1625,7 @@ impl<'a> Parser<'a> {
let lo = path.span;
let mac = P(MacCall { path, args: self.parse_delim_args()? });
(lo.to(self.prev_token.span), ExprKind::MacCall(mac))
} else if self.check(&token::OpenDelim(Delimiter::Brace))
} else if self.check(exp!(OpenBrace))
&& let Some(expr) = self.maybe_parse_struct_expr(&qself, &path)
{
if qself.is_some() {
@ -1646,13 +1648,13 @@ impl<'a> Parser<'a> {
) -> PResult<'a, P<Expr>> {
let lo = label_.ident.span;
let label = Some(label_);
let ate_colon = self.eat(&token::Colon);
let ate_colon = self.eat(exp!(Colon));
let tok_sp = self.token.span;
let expr = if self.eat_keyword(kw::While) {
let expr = if self.eat_keyword(exp!(While)) {
self.parse_expr_while(label, lo)
} else if self.eat_keyword(kw::For) {
} else if self.eat_keyword(exp!(For)) {
self.parse_expr_for(label, lo)
} else if self.eat_keyword(kw::Loop) {
} else if self.eat_keyword(exp!(Loop)) {
self.parse_expr_loop(label, lo)
} else if self.check_noexpect(&token::OpenDelim(Delimiter::Brace))
|| self.token.is_whole_block()
@ -1958,7 +1960,7 @@ impl<'a> Parser<'a> {
self.psess.gated_spans.gate(sym::builtin_syntax, ident.span);
self.bump();
self.expect(&TokenKind::OpenDelim(Delimiter::Parenthesis))?;
self.expect(exp!(OpenParen))?;
let ret = if let Some(res) = parse(self, lo, ident)? {
Ok(res)
} else {
@ -1968,7 +1970,7 @@ impl<'a> Parser<'a> {
});
return Err(err);
};
self.expect(&TokenKind::CloseDelim(Delimiter::Parenthesis))?;
self.expect(exp!(CloseParen))?;
ret
}
@ -1976,14 +1978,12 @@ impl<'a> Parser<'a> {
/// Built-in macro for `offset_of!` expressions.
pub(crate) fn parse_expr_offset_of(&mut self, lo: Span) -> PResult<'a, P<Expr>> {
let container = self.parse_ty()?;
self.expect(&TokenKind::Comma)?;
self.expect(exp!(Comma))?;
let fields = self.parse_floating_field_access()?;
let trailing_comma = self.eat_noexpect(&TokenKind::Comma);
if let Err(mut e) =
self.expect_one_of(&[], &[TokenKind::CloseDelim(Delimiter::Parenthesis)])
{
if let Err(mut e) = self.expect_one_of(&[], &[exp!(CloseParen)]) {
if trailing_comma {
e.note("unexpected third argument to offset_of");
} else {
@ -2006,7 +2006,7 @@ impl<'a> Parser<'a> {
/// Built-in macro for type ascription expressions.
pub(crate) fn parse_expr_type_ascribe(&mut self, lo: Span) -> PResult<'a, P<Expr>> {
let expr = self.parse_expr()?;
self.expect(&token::Comma)?;
self.expect(exp!(Comma))?;
let ty = self.parse_ty()?;
let span = lo.to(self.token.span);
Ok(self.mk_expr(span, ExprKind::Type(expr, ty)))
@ -2018,7 +2018,7 @@ impl<'a> Parser<'a> {
kind: UnsafeBinderCastKind,
) -> PResult<'a, P<Expr>> {
let expr = self.parse_expr()?;
let ty = if self.eat(&TokenKind::Comma) { Some(self.parse_ty()?) } else { None };
let ty = if self.eat(exp!(Comma)) { Some(self.parse_ty()?) } else { None };
let span = lo.to(self.token.span);
Ok(self.mk_expr(span, ExprKind::UnsafeBinderCast(kind, expr, ty)))
}
@ -2214,7 +2214,7 @@ impl<'a> Parser<'a> {
}
let lo = self.token.span;
let minus_present = self.eat(&token::BinOp(token::Minus));
let minus_present = self.eat(exp!(Minus));
let (token_lit, span) = self.parse_token_lit()?;
let expr = self.mk_expr(span, ExprKind::Lit(token_lit));
@ -2236,7 +2236,7 @@ impl<'a> Parser<'a> {
/// expression.
fn maybe_suggest_brackets_instead_of_braces(&mut self, lo: Span) -> Option<P<Expr>> {
let mut snapshot = self.create_snapshot_for_diagnostic();
match snapshot.parse_expr_array_or_repeat(Delimiter::Brace) {
match snapshot.parse_expr_array_or_repeat(exp!(CloseBrace)) {
Ok(arr) => {
let guar = self.dcx().emit_err(errors::ArrayBracketsInsteadOfSpaces {
span: arr.span,
@ -2272,8 +2272,8 @@ impl<'a> Parser<'a> {
let mut snapshot = self.create_snapshot_for_diagnostic();
snapshot.bump();
match snapshot.parse_seq_to_before_end(
&token::CloseDelim(Delimiter::Bracket),
SeqSep::trailing_allowed(token::Comma),
exp!(CloseBracket),
SeqSep::trailing_allowed(exp!(Comma)),
|p| p.parse_expr(),
) {
Ok(_)
@ -2337,7 +2337,7 @@ impl<'a> Parser<'a> {
let lo = self.token.span;
let before = self.prev_token.clone();
let binder = if self.check_keyword(kw::For) {
let binder = if self.check_keyword(exp!(For)) {
let lo = self.token.span;
let (lifetime_defs, _) = self.parse_late_bound_lifetime_defs()?;
let span = lo.to(self.prev_token.span);
@ -2352,7 +2352,7 @@ impl<'a> Parser<'a> {
let constness = self.parse_closure_constness();
let movability =
if self.eat_keyword(kw::Static) { Movability::Static } else { Movability::Movable };
if self.eat_keyword(exp!(Static)) { Movability::Static } else { Movability::Movable };
let coroutine_kind = if self.token.uninterpolated_span().at_least_rust_2018() {
self.parse_coroutine_kind(Case::Sensitive)
@ -2433,10 +2433,10 @@ impl<'a> Parser<'a> {
/// Parses an optional `move` prefix to a closure-like construct.
fn parse_capture_clause(&mut self) -> PResult<'a, CaptureBy> {
if self.eat_keyword(kw::Move) {
if self.eat_keyword(exp!(Move)) {
let move_kw_span = self.prev_token.span;
// Check for `move async` and recover
if self.check_keyword(kw::Async) {
if self.check_keyword(exp!(Async)) {
let move_async_span = self.token.span.with_lo(self.prev_token.span.data().lo);
Err(self
.dcx()
@ -2453,15 +2453,15 @@ impl<'a> Parser<'a> {
fn parse_fn_block_decl(&mut self) -> PResult<'a, (P<FnDecl>, Span)> {
let arg_start = self.token.span.lo();
let inputs = if self.eat(&token::OrOr) {
let inputs = if self.eat(exp!(OrOr)) {
ThinVec::new()
} else {
self.expect(&token::BinOp(token::Or))?;
self.expect(exp!(Or))?;
let args = self
.parse_seq_to_before_tokens(
&[&token::BinOp(token::Or)],
&[exp!(Or)],
&[&token::OrOr],
SeqSep::trailing_allowed(token::Comma),
SeqSep::trailing_allowed(exp!(Comma)),
|p| p.parse_fn_block_param(),
)?
.0;
@ -2481,7 +2481,7 @@ impl<'a> Parser<'a> {
let attrs = self.parse_outer_attributes()?;
self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
let pat = this.parse_pat_no_top_alt(Some(Expected::ParameterName), None)?;
let ty = if this.eat(&token::Colon) {
let ty = if this.eat(exp!(Colon)) {
this.parse_ty()?
} else {
this.mk_ty(pat.span, TyKind::Infer)
@ -2566,7 +2566,7 @@ impl<'a> Parser<'a> {
} else {
let attrs = self.parse_outer_attributes()?; // For recovery.
let maybe_fatarrow = self.token.clone();
let block = if self.check(&token::OpenDelim(Delimiter::Brace)) {
let block = if self.check(exp!(OpenBrace)) {
self.parse_block()?
} else if let Some(block) = recover_block_from_condition(self) {
block
@ -2609,7 +2609,7 @@ impl<'a> Parser<'a> {
self.error_on_if_block_attrs(lo, false, block.span, attrs);
block
};
let els = if self.eat_keyword(kw::Else) { Some(self.parse_expr_else()?) } else { None };
let els = if self.eat_keyword(exp!(Else)) { Some(self.parse_expr_else()?) } else { None };
Ok(self.mk_expr(lo.to(self.prev_token.span), ExprKind::If(cond, thn, els)))
}
@ -2662,7 +2662,7 @@ impl<'a> Parser<'a> {
});
self.bump();
} else {
self.expect(&token::Eq)?;
self.expect(exp!(Eq))?;
}
let attrs = self.parse_outer_attributes()?;
let (expr, _) =
@ -2675,9 +2675,9 @@ impl<'a> Parser<'a> {
fn parse_expr_else(&mut self) -> PResult<'a, P<Expr>> {
let else_span = self.prev_token.span; // `else`
let attrs = self.parse_outer_attributes()?; // For recovery.
let expr = if self.eat_keyword(kw::If) {
let expr = if self.eat_keyword(exp!(If)) {
ensure_sufficient_stack(|| self.parse_expr_if())?
} else if self.check(&TokenKind::OpenDelim(Delimiter::Brace)) {
} else if self.check(exp!(OpenBrace)) {
self.parse_simple_block()?
} else {
let snapshot = self.create_snapshot_for_diagnostic();
@ -2719,7 +2719,7 @@ impl<'a> Parser<'a> {
// while true {}
// }
// ^
if self.check(&TokenKind::OpenDelim(Delimiter::Brace))
if self.check(exp!(OpenBrace))
&& (classify::expr_requires_semi_to_be_stmt(&cond)
|| matches!(cond.kind, ExprKind::MacCall(..)))
=>
@ -2805,7 +2805,7 @@ impl<'a> Parser<'a> {
begin_paren,
) {
(Ok(pat), _) => pat, // Happy path.
(Err(err), Some((start_span, left))) if self.eat_keyword(kw::In) => {
(Err(err), Some((start_span, left))) if self.eat_keyword(exp!(In)) => {
// We know for sure we have seen `for ($SOMETHING in`. In the happy path this would
// happen right before the return of this method.
let attrs = self.parse_outer_attributes()?;
@ -2839,7 +2839,7 @@ impl<'a> Parser<'a> {
}
(Err(err), _) => return Err(err), // Some other error, bubble up.
};
if !self.eat_keyword(kw::In) {
if !self.eat_keyword(exp!(In)) {
self.error_missing_in_for_loop();
}
self.check_for_for_in_in_typo(self.prev_token.span);
@ -2851,7 +2851,7 @@ impl<'a> Parser<'a> {
/// Parses `for await? <src_pat> in <src_expr> <src_loop_block>` (`for` token already eaten).
fn parse_expr_for(&mut self, opt_label: Option<Label>, lo: Span) -> PResult<'a, P<Expr>> {
let is_await =
self.token.uninterpolated_span().at_least_rust_2018() && self.eat_keyword(kw::Await);
self.token.uninterpolated_span().at_least_rust_2018() && self.eat_keyword(exp!(Await));
if is_await {
self.psess.gated_spans.gate(sym::async_for_loop, self.prev_token.span);
@ -2981,7 +2981,7 @@ impl<'a> Parser<'a> {
scrutinee: P<Expr>,
match_kind: MatchKind,
) -> PResult<'a, P<Expr>> {
if let Err(mut e) = self.expect(&token::OpenDelim(Delimiter::Brace)) {
if let Err(mut e) = self.expect(exp!(OpenBrace)) {
if self.token == token::Semi {
e.span_suggestion_short(
match_span,
@ -3121,7 +3121,7 @@ impl<'a> Parser<'a> {
let span_before_body = this.prev_token.span;
let arm_body;
let is_fat_arrow = this.check(&token::FatArrow);
let is_fat_arrow = this.check(exp!(FatArrow));
let is_almost_fat_arrow = TokenKind::FatArrow
.similar_tokens()
.is_some_and(|similar_tokens| similar_tokens.contains(&this.token.kind));
@ -3134,17 +3134,15 @@ impl<'a> Parser<'a> {
let mut result = if armless {
// A pattern without a body, allowed for never patterns.
arm_body = None;
this.expect_one_of(&[token::Comma], &[token::CloseDelim(Delimiter::Brace)]).map(
|x| {
// Don't gate twice
if !pat.contains_never_pattern() {
this.psess.gated_spans.gate(sym::never_patterns, pat.span);
}
x
},
)
this.expect_one_of(&[exp!(Comma)], &[exp!(CloseBrace)]).map(|x| {
// Don't gate twice
if !pat.contains_never_pattern() {
this.psess.gated_spans.gate(sym::never_patterns, pat.span);
}
x
})
} else {
if let Err(mut err) = this.expect(&token::FatArrow) {
if let Err(mut err) = this.expect(exp!(FatArrow)) {
// We might have a `=>` -> `=` or `->` typo (issue #89396).
if is_almost_fat_arrow {
err.span_suggestion(
@ -3184,7 +3182,7 @@ impl<'a> Parser<'a> {
if !require_comma {
arm_body = Some(expr);
// Eat a comma if it exists, though.
let _ = this.eat(&token::Comma);
let _ = this.eat(exp!(Comma));
Ok(Recovered::No)
} else if let Some((span, guar)) =
this.parse_arm_body_missing_braces(&expr, arrow_span)
@ -3195,42 +3193,40 @@ impl<'a> Parser<'a> {
} else {
let expr_span = expr.span;
arm_body = Some(expr);
this.expect_one_of(&[token::Comma], &[token::CloseDelim(Delimiter::Brace)])
.map_err(|mut err| {
if this.token == token::FatArrow {
let sm = this.psess.source_map();
if let Ok(expr_lines) = sm.span_to_lines(expr_span)
&& let Ok(arm_start_lines) = sm.span_to_lines(arm_start_span)
&& arm_start_lines.lines[0].end_col
== expr_lines.lines[0].end_col
&& expr_lines.lines.len() == 2
{
// We check whether there's any trailing code in the parse span,
// if there isn't, we very likely have the following:
//
// X | &Y => "y"
// | -- - missing comma
// | |
// | arrow_span
// X | &X => "x"
// | - ^^ self.token.span
// | |
// | parsed until here as `"y" & X`
err.span_suggestion_short(
arm_start_span.shrink_to_hi(),
"missing a comma here to end this `match` arm",
",",
Applicability::MachineApplicable,
);
}
} else {
err.span_label(
arrow_span,
"while parsing the `match` arm starting here",
this.expect_one_of(&[exp!(Comma)], &[exp!(CloseBrace)]).map_err(|mut err| {
if this.token == token::FatArrow {
let sm = this.psess.source_map();
if let Ok(expr_lines) = sm.span_to_lines(expr_span)
&& let Ok(arm_start_lines) = sm.span_to_lines(arm_start_span)
&& arm_start_lines.lines[0].end_col == expr_lines.lines[0].end_col
&& expr_lines.lines.len() == 2
{
// We check whether there's any trailing code in the parse span,
// if there isn't, we very likely have the following:
//
// X | &Y => "y"
// | -- - missing comma
// | |
// | arrow_span
// X | &X => "x"
// | - ^^ self.token.span
// | |
// | parsed until here as `"y" & X`
err.span_suggestion_short(
arm_start_span.shrink_to_hi(),
"missing a comma here to end this `match` arm",
",",
Applicability::MachineApplicable,
);
}
err
})
} else {
err.span_label(
arrow_span,
"while parsing the `match` arm starting here",
);
}
err
})
}
};
@ -3267,7 +3263,7 @@ impl<'a> Parser<'a> {
)
.map_err(|err| err.cancel())
.is_ok();
if pattern_follows && snapshot.check(&TokenKind::FatArrow) {
if pattern_follows && snapshot.check(exp!(FatArrow)) {
err.cancel();
let guar = this.dcx().emit_err(errors::MissingCommaAfterMatchArm {
span: arm_span.shrink_to_hi(),
@ -3309,7 +3305,7 @@ impl<'a> Parser<'a> {
_ => (false, true),
}
}
if !self.eat_keyword(kw::If) {
if !self.eat_keyword(exp!(If)) {
// No match arm guard present.
return Ok(None);
}
@ -3384,7 +3380,7 @@ impl<'a> Parser<'a> {
// errors.
self.recover_stmt_(SemiColonMode::Ignore, BlockMode::Ignore);
let msg = "you might have meant to start a match arm after the match guard";
if self.eat(&token::CloseDelim(Delimiter::Brace)) {
if self.eat(exp!(CloseBrace)) {
let applicability = if self.token != token::FatArrow {
// We have high confidence that we indeed didn't have a struct
// literal in the match guard, but rather we had some operation
@ -3409,7 +3405,7 @@ impl<'a> Parser<'a> {
/// Parses a `try {...}` expression (`try` token already eaten).
fn parse_try_block(&mut self, span_lo: Span) -> PResult<'a, P<Expr>> {
let (attrs, body) = self.parse_inner_attrs_and_block()?;
if self.eat_keyword(kw::Catch) {
if self.eat_keyword(exp!(Catch)) {
Err(self.dcx().create_err(errors::CatchAfterTry { span: self.prev_token.span }))
} else {
let span = span_lo.to(body.span);
@ -3440,10 +3436,10 @@ impl<'a> Parser<'a> {
/// Parses an `async move? {...}` or `gen move? {...}` expression.
fn parse_gen_block(&mut self) -> PResult<'a, P<Expr>> {
let lo = self.token.span;
let kind = if self.eat_keyword(kw::Async) {
if self.eat_keyword(kw::Gen) { GenBlockKind::AsyncGen } else { GenBlockKind::Async }
let kind = if self.eat_keyword(exp!(Async)) {
if self.eat_keyword(exp!(Gen)) { GenBlockKind::AsyncGen } else { GenBlockKind::Async }
} else {
assert!(self.eat_keyword(kw::Gen));
assert!(self.eat_keyword(exp!(Gen)));
GenBlockKind::Gen
};
match kind {
@ -3504,7 +3500,7 @@ impl<'a> Parser<'a> {
) -> Option<PResult<'a, P<Expr>>> {
let struct_allowed = !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL);
if struct_allowed || self.is_certainly_not_a_block() {
if let Err(err) = self.expect(&token::OpenDelim(Delimiter::Brace)) {
if let Err(err) = self.expect(exp!(OpenBrace)) {
return Some(Err(err));
}
let expr = self.parse_expr_struct(qself.clone(), path.clone(), true);
@ -3527,7 +3523,7 @@ impl<'a> Parser<'a> {
&mut self,
pth: ast::Path,
recover: bool,
close_delim: Delimiter,
close: ExpTokenPair<'_>,
) -> PResult<
'a,
(
@ -3546,11 +3542,11 @@ impl<'a> Parser<'a> {
errors::HelpUseLatestEdition::new().add_to_diag(e);
};
while self.token != token::CloseDelim(close_delim) {
if self.eat(&token::DotDot) || self.recover_struct_field_dots(close_delim) {
while self.token != *close.tok {
if self.eat(exp!(DotDot)) || self.recover_struct_field_dots(close.tok) {
let exp_span = self.prev_token.span;
// We permit `.. }` on the left-hand side of a destructuring assignment.
if self.check(&token::CloseDelim(close_delim)) {
if self.check(close) {
base = ast::StructRest::Rest(self.prev_token.span);
break;
}
@ -3625,7 +3621,7 @@ impl<'a> Parser<'a> {
Applicability::MaybeIncorrect,
);
}
if in_if_guard && close_delim == Delimiter::Brace {
if in_if_guard && close.token_type == TokenType::CloseBrace {
return Err(e);
}
@ -3655,9 +3651,9 @@ impl<'a> Parser<'a> {
let is_shorthand = parsed_field.as_ref().is_ok_and(|f| f.is_shorthand);
// A shorthand field can be turned into a full field with `:`.
// We should point this out.
self.check_or_expected(!is_shorthand, TokenType::Token(token::Colon));
self.check_or_expected(!is_shorthand, TokenType::Colon);
match self.expect_one_of(&[token::Comma], &[token::CloseDelim(close_delim)]) {
match self.expect_one_of(&[exp!(Comma)], &[close]) {
Ok(_) => {
if let Ok(f) = parsed_field.or_else(|guar| field_ident(self, guar).ok_or(guar))
{
@ -3689,7 +3685,7 @@ impl<'a> Parser<'a> {
fields.push(f);
}
self.recover_stmt_(SemiColonMode::Comma, BlockMode::Ignore);
let _ = self.eat(&token::Comma);
let _ = self.eat(exp!(Comma));
}
}
}
@ -3705,9 +3701,9 @@ impl<'a> Parser<'a> {
) -> PResult<'a, P<Expr>> {
let lo = pth.span;
let (fields, base, recovered_async) =
self.parse_struct_fields(pth.clone(), recover, Delimiter::Brace)?;
self.parse_struct_fields(pth.clone(), recover, exp!(CloseBrace))?;
let span = lo.to(self.token.span);
self.expect(&token::CloseDelim(Delimiter::Brace))?;
self.expect(exp!(CloseBrace))?;
let expr = if let Some(guar) = recovered_async {
ExprKind::Err(guar)
} else {
@ -3727,10 +3723,8 @@ impl<'a> Parser<'a> {
self.recover_stmt();
}
fn recover_struct_field_dots(&mut self, close_delim: Delimiter) -> bool {
if !self.look_ahead(1, |t| *t == token::CloseDelim(close_delim))
&& self.eat(&token::DotDotDot)
{
fn recover_struct_field_dots(&mut self, close: &TokenKind) -> bool {
if !self.look_ahead(1, |t| t == close) && self.eat(exp!(DotDotDot)) {
// recover from typo of `...`, suggest `..`
let span = self.prev_token.span;
self.dcx().emit_err(errors::MissingDotDot { token_span: span, sugg_span: span });

View File

@ -13,6 +13,7 @@ use crate::errors::{
UnexpectedSelfInGenericParameters, WhereClauseBeforeTupleStructBody,
WhereClauseBeforeTupleStructBodySugg,
};
use crate::exp;
enum PredicateKindOrStructBody {
PredicateKind(ast::WherePredicateKind),
@ -52,7 +53,7 @@ impl<'a> Parser<'a> {
// Parse optional colon and param bounds.
let mut colon_span = None;
let bounds = if self.eat(&token::Colon) {
let bounds = if self.eat(exp!(Colon)) {
colon_span = Some(self.prev_token.span);
// recover from `impl Trait` in type param bound
if self.token.is_keyword(kw::Impl) {
@ -89,7 +90,7 @@ impl<'a> Parser<'a> {
Vec::new()
};
let default = if self.eat(&token::Eq) { Some(self.parse_ty()?) } else { None };
let default = if self.eat(exp!(Eq)) { Some(self.parse_ty()?) } else { None };
Ok(GenericParam {
ident,
id: ast::DUMMY_NODE_ID,
@ -107,13 +108,13 @@ impl<'a> Parser<'a> {
) -> PResult<'a, GenericParam> {
let const_span = self.token.span;
self.expect_keyword(kw::Const)?;
self.expect_keyword(exp!(Const))?;
let ident = self.parse_ident()?;
self.expect(&token::Colon)?;
self.expect(exp!(Colon))?;
let ty = self.parse_ty()?;
// Parse optional const generics default value.
let default = if self.eat(&token::Eq) { Some(self.parse_const_arg()?) } else { None };
let default = if self.eat(exp!(Eq)) { Some(self.parse_const_arg()?) } else { None };
Ok(GenericParam {
ident,
@ -132,11 +133,11 @@ impl<'a> Parser<'a> {
mistyped_const_ident: Ident,
) -> PResult<'a, GenericParam> {
let ident = self.parse_ident()?;
self.expect(&token::Colon)?;
self.expect(exp!(Colon))?;
let ty = self.parse_ty()?;
// Parse optional const generics default value.
let default = if self.eat(&token::Eq) { Some(self.parse_const_arg()?) } else { None };
let default = if self.eat(exp!(Eq)) { Some(self.parse_const_arg()?) } else { None };
self.dcx()
.struct_span_err(
@ -177,13 +178,13 @@ impl<'a> Parser<'a> {
.emit_err(UnexpectedSelfInGenericParameters { span: this.prev_token.span });
// Eat a trailing comma, if it exists.
let _ = this.eat(&token::Comma);
let _ = this.eat(exp!(Comma));
}
let param = if this.check_lifetime() {
let lifetime = this.expect_lifetime();
// Parse lifetime parameter.
let (colon_span, bounds) = if this.eat(&token::Colon) {
let (colon_span, bounds) = if this.eat(exp!(Colon)) {
(Some(this.prev_token.span), this.parse_lt_param_bounds())
} else {
(None, Vec::new())
@ -209,7 +210,7 @@ impl<'a> Parser<'a> {
is_placeholder: false,
colon_span,
})
} else if this.check_keyword(kw::Const) {
} else if this.check_keyword(exp!(Const)) {
// Parse const parameter.
Some(this.parse_const_param(attrs)?)
} else if this.check_ident() {
@ -246,7 +247,7 @@ impl<'a> Parser<'a> {
return Ok((None, Trailing::No, UsePreAttrPos::No));
};
if !this.eat(&token::Comma) {
if !this.eat(exp!(Comma)) {
done = true;
}
// We just ate the comma, so no need to capture the trailing token.
@ -324,7 +325,7 @@ impl<'a> Parser<'a> {
};
let mut tuple_struct_body = None;
if !self.eat_keyword(kw::Where) {
if !self.eat_keyword(exp!(Where)) {
return Ok((where_clause, None));
}
where_clause.has_where_token = true;
@ -344,7 +345,7 @@ impl<'a> Parser<'a> {
let kind = if self.check_lifetime() && self.look_ahead(1, |t| !t.is_like_plus()) {
let lifetime = self.expect_lifetime();
// Bounds starting with a colon are mandatory, but possibly empty.
self.expect(&token::Colon)?;
self.expect(exp!(Colon))?;
let bounds = self.parse_lt_param_bounds();
ast::WherePredicateKind::RegionPredicate(ast::WhereRegionPredicate {
lifetime,
@ -370,7 +371,7 @@ impl<'a> Parser<'a> {
});
let prev_token = self.prev_token.span;
let ate_comma = self.eat(&token::Comma);
let ate_comma = self.eat(exp!(Comma));
if self.eat_keyword_noexpect(kw::Where) {
self.dcx().emit_err(MultipleWhereClauses {
@ -464,7 +465,7 @@ impl<'a> Parser<'a> {
// Parse type with mandatory colon and (possibly empty) bounds,
// or with mandatory equality sign and the second type.
let ty = self.parse_ty_for_where_clause()?;
if self.eat(&token::Colon) {
if self.eat(exp!(Colon)) {
let bounds = self.parse_generic_bounds()?;
Ok(ast::WherePredicateKind::BoundPredicate(ast::WhereBoundPredicate {
bound_generic_params: lifetime_defs,
@ -473,7 +474,7 @@ impl<'a> Parser<'a> {
}))
// FIXME: Decide what should be used here, `=` or `==`.
// FIXME: We are just dropping the binders in lifetime_defs on the floor here.
} else if self.eat(&token::Eq) || self.eat(&token::EqEq) {
} else if self.eat(exp!(Eq)) || self.eat(exp!(EqEq)) {
let rhs_ty = self.parse_ty()?;
Ok(ast::WherePredicateKind::EqPredicate(ast::WhereEqPredicate { lhs_ty: ty, rhs_ty }))
} else {

View File

@ -20,29 +20,29 @@ use tracing::debug;
use super::diagnostics::{ConsumeClosingDelim, dummy_arg};
use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
use super::{
AttrWrapper, FollowedByType, ForceCollect, Parser, PathStyle, Trailing, UsePreAttrPos,
AttrWrapper, ExpKeywordPair, ExpTokenPair, FollowedByType, ForceCollect, Parser, PathStyle,
Trailing, UsePreAttrPos,
};
use crate::errors::{self, MacroExpandsToAdtField};
use crate::{fluent_generated as fluent, maybe_whole};
use crate::{exp, fluent_generated as fluent, maybe_whole};
impl<'a> Parser<'a> {
/// Parses a source module as a crate. This is the main entry point for the parser.
pub fn parse_crate_mod(&mut self) -> PResult<'a, ast::Crate> {
let (attrs, items, spans) = self.parse_mod(&token::Eof)?;
let (attrs, items, spans) = self.parse_mod(exp!(Eof))?;
Ok(ast::Crate { attrs, items, spans, id: DUMMY_NODE_ID, is_placeholder: false })
}
/// Parses a `mod <foo> { ... }` or `mod <foo>;` item.
fn parse_item_mod(&mut self, attrs: &mut AttrVec) -> PResult<'a, ItemInfo> {
let safety = self.parse_safety(Case::Sensitive);
self.expect_keyword(kw::Mod)?;
self.expect_keyword(exp!(Mod))?;
let id = self.parse_ident()?;
let mod_kind = if self.eat(&token::Semi) {
let mod_kind = if self.eat(exp!(Semi)) {
ModKind::Unloaded
} else {
self.expect(&token::OpenDelim(Delimiter::Brace))?;
let (inner_attrs, items, inner_span) =
self.parse_mod(&token::CloseDelim(Delimiter::Brace))?;
self.expect(exp!(OpenBrace))?;
let (inner_attrs, items, inner_span) = self.parse_mod(exp!(CloseBrace))?;
attrs.extend(inner_attrs);
ModKind::Loaded(items, Inline::Yes, inner_span, Ok(()))
};
@ -55,7 +55,7 @@ impl<'a> Parser<'a> {
/// - `}` for mod items
pub fn parse_mod(
&mut self,
term: &TokenKind,
term: ExpTokenPair<'_>,
) -> PResult<'a, (AttrVec, ThinVec<P<Item>>, ModSpans)> {
let lo = self.token.span;
let attrs = self.parse_inner_attributes()?;
@ -209,15 +209,15 @@ impl<'a> Parser<'a> {
let check_pub = def == &Defaultness::Final;
let mut def_ = || mem::replace(def, Defaultness::Final);
let info = if self.eat_keyword_case(kw::Use, case) {
let info = if self.eat_keyword_case(exp!(Use), case) {
self.parse_use_item()?
} else if self.check_fn_front_matter(check_pub, case) {
// FUNCTION ITEM
let (ident, sig, generics, body) =
self.parse_fn(attrs, fn_parse_mode, lo, vis, case)?;
(ident, ItemKind::Fn(Box::new(Fn { defaultness: def_(), sig, generics, body })))
} else if self.eat_keyword(kw::Extern) {
if self.eat_keyword(kw::Crate) {
} else if self.eat_keyword(exp!(Extern)) {
if self.eat_keyword(exp!(Crate)) {
// EXTERN CRATE
self.parse_item_extern_crate()?
} else {
@ -227,7 +227,7 @@ impl<'a> Parser<'a> {
} else if self.is_unsafe_foreign_mod() {
// EXTERN BLOCK
let safety = self.parse_safety(Case::Sensitive);
self.expect_keyword(kw::Extern)?;
self.expect_keyword(exp!(Extern))?;
self.parse_item_foreign_mod(attrs, safety)?
} else if self.is_static_global() {
let safety = self.parse_safety(Case::Sensitive);
@ -255,28 +255,28 @@ impl<'a> Parser<'a> {
})),
)
}
} else if self.check_keyword(kw::Trait) || self.check_auto_or_unsafe_trait_item() {
} else if self.check_keyword(exp!(Trait)) || self.check_auto_or_unsafe_trait_item() {
// TRAIT ITEM
self.parse_item_trait(attrs, lo)?
} else if self.check_keyword(kw::Impl)
|| self.check_keyword(kw::Unsafe) && self.is_keyword_ahead(1, &[kw::Impl])
} else if self.check_keyword(exp!(Impl))
|| self.check_keyword(exp!(Unsafe)) && self.is_keyword_ahead(1, &[kw::Impl])
{
// IMPL ITEM
self.parse_item_impl(attrs, def_())?
} else if self.is_reuse_path_item() {
self.parse_item_delegation()?
} else if self.check_keyword(kw::Mod)
|| self.check_keyword(kw::Unsafe) && self.is_keyword_ahead(1, &[kw::Mod])
} else if self.check_keyword(exp!(Mod))
|| self.check_keyword(exp!(Unsafe)) && self.is_keyword_ahead(1, &[kw::Mod])
{
// MODULE ITEM
self.parse_item_mod(attrs)?
} else if self.eat_keyword(kw::Type) {
} else if self.eat_keyword(exp!(Type)) {
// TYPE ITEM
self.parse_type_alias(def_())?
} else if self.eat_keyword(kw::Enum) {
} else if self.eat_keyword(exp!(Enum)) {
// ENUM ITEM
self.parse_item_enum()?
} else if self.eat_keyword(kw::Struct) {
} else if self.eat_keyword(exp!(Struct)) {
// STRUCT ITEM
self.parse_item_struct()?
} else if self.is_kw_followed_by_ident(kw::Union) {
@ -286,7 +286,7 @@ impl<'a> Parser<'a> {
} else if self.is_builtin() {
// BUILTIN# ITEM
return self.parse_item_builtin();
} else if self.eat_keyword(kw::Macro) {
} else if self.eat_keyword(exp!(Macro)) {
// MACROS 2.0 ITEM
self.parse_item_decl_macro(lo)?
} else if let IsMacroRulesItem::Yes { has_bang } = self.is_macro_rules_item() {
@ -407,13 +407,13 @@ impl<'a> Parser<'a> {
};
let mut found_generics = false;
if self.check(&token::Lt) {
if self.check(exp!(Lt)) {
found_generics = true;
self.eat_to_tokens(&[&token::Gt]);
self.eat_to_tokens(&[exp!(Gt)]);
self.bump(); // `>`
}
let err = if self.check(&token::OpenDelim(Delimiter::Brace)) {
let err = if self.check(exp!(OpenBrace)) {
// possible struct or enum definition where `struct` or `enum` was forgotten
if self.look_ahead(1, |t| *t == token::CloseDelim(Delimiter::Brace)) {
// `S {}` could be unit enum or struct
@ -426,25 +426,23 @@ impl<'a> Parser<'a> {
} else {
Some(errors::MissingKeywordForItemDefinition::Enum { span, insert_span, ident })
}
} else if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
} else if self.check(exp!(OpenParen)) {
// possible function or tuple struct definition where `fn` or `struct` was forgotten
self.bump(); // `(`
let is_method = self.recover_self_param();
self.consume_block(Delimiter::Parenthesis, ConsumeClosingDelim::Yes);
self.consume_block(exp!(OpenParen), exp!(CloseParen), ConsumeClosingDelim::Yes);
let err = if self.check(&token::RArrow)
|| self.check(&token::OpenDelim(Delimiter::Brace))
{
self.eat_to_tokens(&[&token::OpenDelim(Delimiter::Brace)]);
let err = if self.check(exp!(RArrow)) || self.check(exp!(OpenBrace)) {
self.eat_to_tokens(&[exp!(OpenBrace)]);
self.bump(); // `{`
self.consume_block(Delimiter::Brace, ConsumeClosingDelim::Yes);
self.consume_block(exp!(OpenBrace), exp!(CloseBrace), ConsumeClosingDelim::Yes);
if is_method {
errors::MissingKeywordForItemDefinition::Method { span, insert_span, ident }
} else {
errors::MissingKeywordForItemDefinition::Function { span, insert_span, ident }
}
} else if is_pub && self.check(&token::Semi) {
} else if is_pub && self.check(exp!(Semi)) {
errors::MissingKeywordForItemDefinition::Struct { span, insert_span, ident }
} else {
errors::MissingKeywordForItemDefinition::Ambiguous {
@ -479,7 +477,7 @@ impl<'a> Parser<'a> {
/// Parses an item macro, e.g., `item!();`.
fn parse_item_macro(&mut self, vis: &Visibility) -> PResult<'a, MacCall> {
let path = self.parse_path(PathStyle::Mod)?; // `foo::bar`
self.expect(&token::Not)?; // `!`
self.expect(exp!(Not))?; // `!`
match self.parse_delim_args() {
// `( .. )` or `[ .. ]` (followed by `;`), or `{ .. }`.
Ok(args) => {
@ -539,7 +537,7 @@ impl<'a> Parser<'a> {
fn parse_polarity(&mut self) -> ast::ImplPolarity {
// Disambiguate `impl !Trait for Type { ... }` and `impl ! { ... }` for the never type.
if self.check(&token::Not) && self.look_ahead(1, |t| t.can_begin_type()) {
if self.check(exp!(Not)) && self.look_ahead(1, |t| t.can_begin_type()) {
self.bump(); // `!`
ast::ImplPolarity::Negative(self.prev_token.span)
} else {
@ -567,7 +565,7 @@ impl<'a> Parser<'a> {
defaultness: Defaultness,
) -> PResult<'a, ItemInfo> {
let safety = self.parse_safety(Case::Sensitive);
self.expect_keyword(kw::Impl)?;
self.expect_keyword(exp!(Impl))?;
// First, parse generic parameters if necessary.
let mut generics = if self.choose_generics_over_qpath(0) {
@ -617,7 +615,7 @@ impl<'a> Parser<'a> {
};
// If `for` is missing we try to recover.
let has_for = self.eat_keyword(kw::For);
let has_for = self.eat_keyword(exp!(For));
let missing_for_span = self.prev_token.span.between(self.token.span);
let ty_second = if self.token == token::DotDot {
@ -702,7 +700,7 @@ impl<'a> Parser<'a> {
fn parse_item_delegation(&mut self) -> PResult<'a, ItemInfo> {
let span = self.token.span;
self.expect_keyword(kw::Reuse)?;
self.expect_keyword(exp!(Reuse))?;
let (qself, path) = if self.eat_lt() {
let (qself, path) = self.parse_qpath(PathStyle::Expr)?;
@ -712,23 +710,23 @@ impl<'a> Parser<'a> {
};
let rename = |this: &mut Self| {
Ok(if this.eat_keyword(kw::As) { Some(this.parse_ident()?) } else { None })
Ok(if this.eat_keyword(exp!(As)) { Some(this.parse_ident()?) } else { None })
};
let body = |this: &mut Self| {
Ok(if this.check(&token::OpenDelim(Delimiter::Brace)) {
Ok(if this.check(exp!(OpenBrace)) {
Some(this.parse_block()?)
} else {
this.expect(&token::Semi)?;
this.expect(exp!(Semi))?;
None
})
};
let (ident, item_kind) = if self.eat_path_sep() {
let suffixes = if self.eat(&token::BinOp(token::Star)) {
let suffixes = if self.eat(exp!(Star)) {
None
} else {
let parse_suffix = |p: &mut Self| Ok((p.parse_path_segment_ident()?, rename(p)?));
Some(self.parse_delim_comma_seq(Delimiter::Brace, parse_suffix)?.0)
Some(self.parse_delim_comma_seq(exp!(OpenBrace), exp!(CloseBrace), parse_suffix)?.0)
};
let deleg = DelegationMac { qself, prefix: path, suffixes, body: body(self)? };
(Ident::empty(), ItemKind::DelegationMac(Box::new(deleg)))
@ -766,11 +764,11 @@ impl<'a> Parser<'a> {
return Ok(ThinVec::new());
}
self.expect(&token::OpenDelim(Delimiter::Brace))?;
self.expect(exp!(OpenBrace))?;
attrs.extend(self.parse_inner_attributes()?);
let mut items = ThinVec::new();
while !self.eat(&token::CloseDelim(Delimiter::Brace)) {
while !self.eat(exp!(CloseBrace)) {
if self.recover_doc_comment_before_brace() {
continue;
}
@ -811,7 +809,7 @@ impl<'a> Parser<'a> {
let mut err =
self.dcx().struct_span_err(non_item_span, "non-item in item list");
self.consume_block(Delimiter::Brace, ConsumeClosingDelim::Yes);
self.consume_block(exp!(OpenBrace), exp!(CloseBrace), ConsumeClosingDelim::Yes);
if is_let {
err.span_suggestion_verbose(
non_item_span,
@ -837,7 +835,7 @@ impl<'a> Parser<'a> {
}
Ok(Some(item)) => items.extend(item),
Err(err) => {
self.consume_block(Delimiter::Brace, ConsumeClosingDelim::Yes);
self.consume_block(exp!(OpenBrace), exp!(CloseBrace), ConsumeClosingDelim::Yes);
err.with_span_label(
open_brace_span,
"while parsing this item list starting here",
@ -880,7 +878,7 @@ impl<'a> Parser<'a> {
// We are interested in `default` followed by another identifier.
// However, we must avoid keywords that occur as binary operators.
// Currently, the only applicable keyword is `as` (`default as Ty`).
if self.check_keyword(kw::Default)
if self.check_keyword(exp!(Default))
&& self.look_ahead(1, |t| t.is_non_raw_ident_where(|i| i.name != kw::As))
{
self.bump(); // `default`
@ -893,33 +891,33 @@ impl<'a> Parser<'a> {
/// Is this an `(unsafe auto? | auto) trait` item?
fn check_auto_or_unsafe_trait_item(&mut self) -> bool {
// auto trait
self.check_keyword(kw::Auto) && self.is_keyword_ahead(1, &[kw::Trait])
self.check_keyword(exp!(Auto)) && self.is_keyword_ahead(1, &[kw::Trait])
// unsafe auto trait
|| self.check_keyword(kw::Unsafe) && self.is_keyword_ahead(1, &[kw::Trait, kw::Auto])
|| self.check_keyword(exp!(Unsafe)) && self.is_keyword_ahead(1, &[kw::Trait, kw::Auto])
}
/// Parses `unsafe? auto? trait Foo { ... }` or `trait Foo = Bar;`.
fn parse_item_trait(&mut self, attrs: &mut AttrVec, lo: Span) -> PResult<'a, ItemInfo> {
let safety = self.parse_safety(Case::Sensitive);
// Parse optional `auto` prefix.
let is_auto = if self.eat_keyword(kw::Auto) {
let is_auto = if self.eat_keyword(exp!(Auto)) {
self.psess.gated_spans.gate(sym::auto_traits, self.prev_token.span);
IsAuto::Yes
} else {
IsAuto::No
};
self.expect_keyword(kw::Trait)?;
self.expect_keyword(exp!(Trait))?;
let ident = self.parse_ident()?;
let mut generics = self.parse_generics()?;
// Parse optional colon and supertrait bounds.
let had_colon = self.eat(&token::Colon);
let had_colon = self.eat(exp!(Colon));
let span_at_colon = self.prev_token.span;
let bounds = if had_colon { self.parse_generic_bounds()? } else { Vec::new() };
let span_before_eq = self.prev_token.span;
if self.eat(&token::Eq) {
if self.eat(exp!(Eq)) {
// It's a trait alias.
if had_colon {
let span = span_at_colon.to(span_before_eq);
@ -1007,11 +1005,10 @@ impl<'a> Parser<'a> {
let mut generics = self.parse_generics()?;
// Parse optional colon and param bounds.
let bounds =
if self.eat(&token::Colon) { self.parse_generic_bounds()? } else { Vec::new() };
let bounds = if self.eat(exp!(Colon)) { self.parse_generic_bounds()? } else { Vec::new() };
let before_where_clause = self.parse_where_clause()?;
let ty = if self.eat(&token::Eq) { Some(self.parse_ty()?) } else { None };
let ty = if self.eat(exp!(Eq)) { Some(self.parse_ty()?) } else { None };
let after_where_clause = self.parse_where_clause()?;
@ -1064,46 +1061,44 @@ impl<'a> Parser<'a> {
let mut prefix =
ast::Path { segments: ThinVec::new(), span: lo.shrink_to_lo(), tokens: None };
let kind = if self.check(&token::OpenDelim(Delimiter::Brace))
|| self.check(&token::BinOp(token::Star))
|| self.is_import_coupler()
{
// `use *;` or `use ::*;` or `use {...};` or `use ::{...};`
let mod_sep_ctxt = self.token.span.ctxt();
if self.eat_path_sep() {
prefix
.segments
.push(PathSegment::path_root(lo.shrink_to_lo().with_ctxt(mod_sep_ctxt)));
}
self.parse_use_tree_glob_or_nested()?
} else {
// `use path::*;` or `use path::{...};` or `use path;` or `use path as bar;`
prefix = self.parse_path(PathStyle::Mod)?;
if self.eat_path_sep() {
self.parse_use_tree_glob_or_nested()?
} else {
// Recover from using a colon as path separator.
while self.eat_noexpect(&token::Colon) {
self.dcx()
.emit_err(errors::SingleColonImportPath { span: self.prev_token.span });
// We parse the rest of the path and append it to the original prefix.
self.parse_path_segments(&mut prefix.segments, PathStyle::Mod, None)?;
prefix.span = lo.to(self.prev_token.span);
let kind =
if self.check(exp!(OpenBrace)) || self.check(exp!(Star)) || self.is_import_coupler() {
// `use *;` or `use ::*;` or `use {...};` or `use ::{...};`
let mod_sep_ctxt = self.token.span.ctxt();
if self.eat_path_sep() {
prefix
.segments
.push(PathSegment::path_root(lo.shrink_to_lo().with_ctxt(mod_sep_ctxt)));
}
UseTreeKind::Simple(self.parse_rename()?)
}
};
self.parse_use_tree_glob_or_nested()?
} else {
// `use path::*;` or `use path::{...};` or `use path;` or `use path as bar;`
prefix = self.parse_path(PathStyle::Mod)?;
if self.eat_path_sep() {
self.parse_use_tree_glob_or_nested()?
} else {
// Recover from using a colon as path separator.
while self.eat_noexpect(&token::Colon) {
self.dcx()
.emit_err(errors::SingleColonImportPath { span: self.prev_token.span });
// We parse the rest of the path and append it to the original prefix.
self.parse_path_segments(&mut prefix.segments, PathStyle::Mod, None)?;
prefix.span = lo.to(self.prev_token.span);
}
UseTreeKind::Simple(self.parse_rename()?)
}
};
Ok(UseTree { prefix, kind, span: lo.to(self.prev_token.span) })
}
/// Parses `*` or `{...}`.
fn parse_use_tree_glob_or_nested(&mut self) -> PResult<'a, UseTreeKind> {
Ok(if self.eat(&token::BinOp(token::Star)) {
Ok(if self.eat(exp!(Star)) {
UseTreeKind::Glob
} else {
let lo = self.token.span;
@ -1120,7 +1115,7 @@ impl<'a> Parser<'a> {
/// USE_TREE_LIST = ∅ | (USE_TREE `,`)* USE_TREE [`,`]
/// ```
fn parse_use_tree_list(&mut self) -> PResult<'a, ThinVec<(UseTree, ast::NodeId)>> {
self.parse_delim_comma_seq(Delimiter::Brace, |p| {
self.parse_delim_comma_seq(exp!(OpenBrace), exp!(CloseBrace), |p| {
p.recover_vcs_conflict_marker();
Ok((p.parse_use_tree()?, DUMMY_NODE_ID))
})
@ -1128,7 +1123,11 @@ impl<'a> Parser<'a> {
}
fn parse_rename(&mut self) -> PResult<'a, Option<Ident>> {
if self.eat_keyword(kw::As) { self.parse_ident_or_underscore().map(Some) } else { Ok(None) }
if self.eat_keyword(exp!(As)) {
self.parse_ident_or_underscore().map(Some)
} else {
Ok(None)
}
}
fn parse_ident_or_underscore(&mut self) -> PResult<'a, Ident> {
@ -1168,15 +1167,15 @@ impl<'a> Parser<'a> {
self.parse_ident()
}?;
let dash = token::BinOp(token::BinOpToken::Minus);
if self.token != dash {
let dash = exp!(Minus);
if self.token != *dash.tok {
return Ok(ident);
}
// Accept `extern crate name-like-this` for better diagnostics.
let mut dashes = vec![];
let mut idents = vec![];
while self.eat(&dash) {
while self.eat(dash) {
dashes.push(self.prev_token.span);
idents.push(self.parse_ident()?);
}
@ -1217,9 +1216,9 @@ impl<'a> Parser<'a> {
&& self.token.is_keyword(kw::Unsafe)
&& self.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace))
{
self.expect(&token::OpenDelim(Delimiter::Brace)).unwrap_err().emit();
self.expect(exp!(OpenBrace)).unwrap_err().emit();
safety = Safety::Unsafe(self.token.span);
let _ = self.eat_keyword(kw::Unsafe);
let _ = self.eat_keyword(exp!(Unsafe));
}
let module = ast::ForeignMod {
extern_span,
@ -1285,7 +1284,7 @@ impl<'a> Parser<'a> {
}
fn is_static_global(&mut self) -> bool {
if self.check_keyword(kw::Static) {
if self.check_keyword(exp!(Static)) {
// Check if this could be a closure.
!self.look_ahead(1, |token| {
if token.is_keyword(kw::Move) {
@ -1294,20 +1293,19 @@ impl<'a> Parser<'a> {
matches!(token.kind, token::BinOp(token::Or) | token::OrOr)
})
} else {
let quals: &[Symbol] = &[kw::Unsafe, kw::Safe];
// `$qual static`
quals.iter().any(|&kw| self.check_keyword(kw))
(self.check_keyword(exp!(Unsafe)) || self.check_keyword(exp!(Safe)))
&& self.look_ahead(1, |t| t.is_keyword(kw::Static))
}
}
/// Recover on `const mut` with `const` already eaten.
fn recover_const_mut(&mut self, const_span: Span) {
if self.eat_keyword(kw::Mut) {
if self.eat_keyword(exp!(Mut)) {
let span = self.prev_token.span;
self.dcx()
.emit_err(errors::ConstGlobalCannotBeMutable { ident_span: span, const_span });
} else if self.eat_keyword(kw::Let) {
} else if self.eat_keyword(exp!(Let)) {
let span = self.prev_token.span;
self.dcx().emit_err(errors::ConstLetMutuallyExclusive { span: const_span.to(span) });
}
@ -1372,14 +1370,13 @@ impl<'a> Parser<'a> {
// Parse the type of a static item. That is, the `":" $ty` fragment.
// FIXME: This could maybe benefit from `.may_recover()`?
let ty = match (self.eat(&token::Colon), self.check(&token::Eq) | self.check(&token::Semi))
{
let ty = match (self.eat(exp!(Colon)), self.check(exp!(Eq)) | self.check(exp!(Semi))) {
(true, false) => self.parse_ty()?,
// If there wasn't a `:` or the colon was followed by a `=` or `;`, recover a missing type.
(colon, _) => self.recover_missing_global_item_type(colon, Some(mutability)),
};
let expr = if self.eat(&token::Eq) { Some(self.parse_expr()?) } else { None };
let expr = if self.eat(exp!(Eq)) { Some(self.parse_expr()?) } else { None };
self.expect_semi()?;
@ -1405,8 +1402,8 @@ impl<'a> Parser<'a> {
// Parse the type of a constant item. That is, the `":" $ty` fragment.
// FIXME: This could maybe benefit from `.may_recover()`?
let ty = match (
self.eat(&token::Colon),
self.check(&token::Eq) | self.check(&token::Semi) | self.check_keyword(kw::Where),
self.eat(exp!(Colon)),
self.check(exp!(Eq)) | self.check(exp!(Semi)) | self.check_keyword(exp!(Where)),
) {
(true, false) => self.parse_ty()?,
// If there wasn't a `:` or the colon was followed by a `=`, `;` or `where`, recover a missing type.
@ -1418,7 +1415,7 @@ impl<'a> Parser<'a> {
let before_where_clause =
if self.may_recover() { self.parse_where_clause()? } else { WhereClause::default() };
let expr = if self.eat(&token::Eq) { Some(self.parse_expr()?) } else { None };
let expr = if self.eat(exp!(Eq)) { Some(self.parse_expr()?) } else { None };
let after_where_clause = self.parse_where_clause()?;
@ -1531,31 +1528,33 @@ impl<'a> Parser<'a> {
self.bump();
(thin_vec![], Trailing::No)
} else {
self.parse_delim_comma_seq(Delimiter::Brace, |p| p.parse_enum_variant(id.span))
.map_err(|mut err| {
err.span_label(id.span, "while parsing this enum");
if self.token == token::Colon {
let snapshot = self.create_snapshot_for_diagnostic();
self.bump();
match self.parse_ty() {
Ok(_) => {
err.span_suggestion_verbose(
prev_span,
"perhaps you meant to use `struct` here",
"struct",
Applicability::MaybeIncorrect,
);
}
Err(e) => {
e.cancel();
}
self.parse_delim_comma_seq(exp!(OpenBrace), exp!(CloseBrace), |p| {
p.parse_enum_variant(id.span)
})
.map_err(|mut err| {
err.span_label(id.span, "while parsing this enum");
if self.token == token::Colon {
let snapshot = self.create_snapshot_for_diagnostic();
self.bump();
match self.parse_ty() {
Ok(_) => {
err.span_suggestion_verbose(
prev_span,
"perhaps you meant to use `struct` here",
"struct",
Applicability::MaybeIncorrect,
);
}
Err(e) => {
e.cancel();
}
self.restore_snapshot(snapshot);
}
self.eat_to_tokens(&[&token::CloseDelim(Delimiter::Brace)]);
self.bump(); // }
err
})?
self.restore_snapshot(snapshot);
}
self.eat_to_tokens(&[exp!(CloseBrace)]);
self.bump(); // }
err
})?
};
let enum_definition = EnumDef { variants: variants.into_iter().flatten().collect() };
@ -1588,7 +1587,7 @@ impl<'a> Parser<'a> {
return Ok((None, Trailing::from(this.token == token::Comma), UsePreAttrPos::No));
}
let struct_def = if this.check(&token::OpenDelim(Delimiter::Brace)) {
let struct_def = if this.check(exp!(OpenBrace)) {
// Parse a struct variant.
let (fields, recovered) =
match this.parse_record_struct_body("struct", ident.span, false) {
@ -1598,7 +1597,7 @@ impl<'a> Parser<'a> {
// We handle `enum` to `struct` suggestion in the caller.
return Err(err);
}
this.eat_to_tokens(&[&token::CloseDelim(Delimiter::Brace)]);
this.eat_to_tokens(&[exp!(CloseBrace)]);
this.bump(); // }
err.span_label(span, "while parsing this enum");
err.help(help);
@ -1607,7 +1606,7 @@ impl<'a> Parser<'a> {
}
};
VariantData::Struct { fields, recovered }
} else if this.check(&token::OpenDelim(Delimiter::Parenthesis)) {
} else if this.check(exp!(OpenParen)) {
let body = match this.parse_tuple_struct_body() {
Ok(body) => body,
Err(mut err) => {
@ -1615,7 +1614,7 @@ impl<'a> Parser<'a> {
// We handle `enum` to `struct` suggestion in the caller.
return Err(err);
}
this.eat_to_tokens(&[&token::CloseDelim(Delimiter::Parenthesis)]);
this.eat_to_tokens(&[exp!(CloseParen)]);
this.bump(); // )
err.span_label(span, "while parsing this enum");
err.help(help);
@ -1629,7 +1628,7 @@ impl<'a> Parser<'a> {
};
let disr_expr =
if this.eat(&token::Eq) { Some(this.parse_expr_anon_const()?) } else { None };
if this.eat(exp!(Eq)) { Some(this.parse_expr_anon_const()?) } else { None };
let vr = ast::Variant {
ident,
@ -1680,7 +1679,7 @@ impl<'a> Parser<'a> {
let body = VariantData::Tuple(body, DUMMY_NODE_ID);
self.expect_semi()?;
body
} else if self.eat(&token::Semi) {
} else if self.eat(exp!(Semi)) {
// If we see a: `struct Foo<T> where T: Copy;` style decl.
VariantData::Unit(DUMMY_NODE_ID)
} else {
@ -1693,7 +1692,7 @@ impl<'a> Parser<'a> {
VariantData::Struct { fields, recovered }
}
// No `where` so: `struct Foo<T>;`
} else if self.eat(&token::Semi) {
} else if self.eat(exp!(Semi)) {
VariantData::Unit(DUMMY_NODE_ID)
// Record-style struct definition
} else if self.token == token::OpenDelim(Delimiter::Brace) {
@ -1762,14 +1761,18 @@ impl<'a> Parser<'a> {
) -> PResult<'a, (ThinVec<FieldDef>, Recovered)> {
let mut fields = ThinVec::new();
let mut recovered = Recovered::No;
if self.eat(&token::OpenDelim(Delimiter::Brace)) {
if self.eat(exp!(OpenBrace)) {
while self.token != token::CloseDelim(Delimiter::Brace) {
match self.parse_field_def(adt_ty) {
Ok(field) => {
fields.push(field);
}
Err(mut err) => {
self.consume_block(Delimiter::Brace, ConsumeClosingDelim::No);
self.consume_block(
exp!(OpenBrace),
exp!(CloseBrace),
ConsumeClosingDelim::No,
);
err.span_label(ident_span, format!("while parsing this {adt_ty}"));
let guar = err.emit();
recovered = Recovered::Yes(guar);
@ -1777,7 +1780,7 @@ impl<'a> Parser<'a> {
}
}
}
self.expect(&token::CloseDelim(Delimiter::Brace))?;
self.expect(exp!(CloseBrace))?;
} else {
let token_str = super::token_descr(&self.token);
let where_str = if parsed_where { "" } else { "`where`, or " };
@ -1792,7 +1795,7 @@ impl<'a> Parser<'a> {
fn parse_unsafe_field(&mut self) -> Safety {
// not using parse_safety as that also accepts `safe`.
if self.eat_keyword(kw::Unsafe) {
if self.eat_keyword(exp!(Unsafe)) {
let span = self.prev_token.span;
self.psess.gated_spans.gate(sym::unsafe_fields, span);
Safety::Unsafe(span)
@ -1900,7 +1903,7 @@ impl<'a> Parser<'a> {
if self.token == token::Comma {
seen_comma = true;
}
if self.eat(&token::Semi) {
if self.eat(exp!(Semi)) {
let sp = self.prev_token.span;
let mut err =
self.dcx().struct_span_err(sp, format!("{adt_ty} fields are separated by `,`"));
@ -1924,7 +1927,7 @@ impl<'a> Parser<'a> {
missing_comma: None,
};
self.bump(); // consume the doc comment
let comma_after_doc_seen = self.eat(&token::Comma);
let comma_after_doc_seen = self.eat(exp!(Comma));
// `seen_comma` is always false, because we are inside doc block
// condition is here to make code more readable
if !seen_comma && comma_after_doc_seen {
@ -1949,13 +1952,13 @@ impl<'a> Parser<'a> {
if let TyKind::Path(_, Path { segments, .. }) = &a_var.ty.kind {
if let Some(last_segment) = segments.last() {
let guar = self.check_trailing_angle_brackets(last_segment, &[
&token::Comma,
&token::CloseDelim(Delimiter::Brace),
exp!(Comma),
exp!(CloseBrace),
]);
if let Some(_guar) = guar {
// Handle a case like `Vec<u8>>,` where we can continue parsing fields
// after the comma
let _ = self.eat(&token::Comma);
let _ = self.eat(exp!(Comma));
// `check_trailing_angle_brackets` already emitted a nicer error, as
// proven by the presence of `_guar`. We can continue parsing.
@ -1988,7 +1991,7 @@ impl<'a> Parser<'a> {
}
fn expect_field_ty_separator(&mut self) -> PResult<'a, ()> {
if let Err(err) = self.expect(&token::Colon) {
if let Err(err) = self.expect(exp!(Colon)) {
let sm = self.psess.source_map();
let eq_typo = self.token == token::Eq && self.look_ahead(1, |t| t.is_path_start());
let semi_typo = self.token == token::Semi
@ -2096,7 +2099,7 @@ impl<'a> Parser<'a> {
self.expected_ident_found_err()
}
}
} else if self.eat_keyword(kw::Struct) {
} else if self.eat_keyword(exp!(Struct)) {
match self.parse_item_struct() {
Ok((ident, _)) => self
.dcx()
@ -2153,12 +2156,12 @@ impl<'a> Parser<'a> {
/// ```
fn parse_item_decl_macro(&mut self, lo: Span) -> PResult<'a, ItemInfo> {
let ident = self.parse_ident()?;
let body = if self.check(&token::OpenDelim(Delimiter::Brace)) {
let body = if self.check(exp!(OpenBrace)) {
self.parse_delim_args()? // `MacBody`
} else if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
} else if self.check(exp!(OpenParen)) {
let params = self.parse_token_tree(); // `MacParams`
let pspan = params.span();
if !self.check(&token::OpenDelim(Delimiter::Brace)) {
if !self.check(exp!(OpenBrace)) {
self.unexpected()?;
}
let body = self.parse_token_tree(); // `MacBody`
@ -2178,7 +2181,7 @@ impl<'a> Parser<'a> {
/// Is this a possibly malformed start of a `macro_rules! foo` item definition?
fn is_macro_rules_item(&mut self) -> IsMacroRulesItem {
if self.check_keyword(kw::MacroRules) {
if self.check_keyword(exp!(MacroRules)) {
let macro_rules_span = self.token.span;
if self.look_ahead(1, |t| *t == token::Not) && self.look_ahead(2, |t| t.is_ident()) {
@ -2203,14 +2206,14 @@ impl<'a> Parser<'a> {
vis: &Visibility,
has_bang: bool,
) -> PResult<'a, ItemInfo> {
self.expect_keyword(kw::MacroRules)?; // `macro_rules`
self.expect_keyword(exp!(MacroRules))?; // `macro_rules`
if has_bang {
self.expect(&token::Not)?; // `!`
self.expect(exp!(Not))?; // `!`
}
let ident = self.parse_ident()?;
if self.eat(&token::Not) {
if self.eat(exp!(Not)) {
// Handle macro_rules! foo!
let span = self.prev_token.span;
self.dcx().emit_err(errors::MacroNameRemoveBang { span });
@ -2240,7 +2243,7 @@ impl<'a> Parser<'a> {
}
fn eat_semi_for_macro_if_needed(&mut self, args: &DelimArgs) {
if args.need_semicolon() && !self.eat(&token::Semi) {
if args.need_semicolon() && !self.eat(exp!(Semi)) {
self.report_invalid_macro_expansion_item(args);
}
}
@ -2416,11 +2419,8 @@ impl<'a> Parser<'a> {
req_body: bool,
fn_params_end: Option<Span>,
) -> PResult<'a, ErrorGuaranteed> {
let expected = if req_body {
&[token::OpenDelim(Delimiter::Brace)][..]
} else {
&[token::Semi, token::OpenDelim(Delimiter::Brace)]
};
let expected: &[_] =
if req_body { &[exp!(OpenBrace)] } else { &[exp!(Semi), exp!(OpenBrace)] };
match self.expected_one_of_not_found(&[], expected) {
Ok(error_guaranteed) => Ok(error_guaranteed),
Err(mut err) => {
@ -2505,14 +2505,14 @@ impl<'a> Parser<'a> {
self.token == TokenKind::Semi
} else {
// Only include `;` in list of expected tokens if body is not required
self.check(&TokenKind::Semi)
self.check(exp!(Semi))
};
let (inner_attrs, body) = if has_semi {
// Include the trailing semicolon in the span of the signature
self.expect_semi()?;
*sig_hi = self.prev_token.span;
(AttrVec::new(), None)
} else if self.check(&token::OpenDelim(Delimiter::Brace)) || self.token.is_whole_block() {
} else if self.check(exp!(OpenBrace)) || self.token.is_whole_block() {
self.parse_block_common(self.token.span, BlockCheckMode::Default, false)
.map(|(attrs, body)| (attrs, Some(body)))?
} else if self.token == token::Eq {
@ -2540,21 +2540,28 @@ impl<'a> Parser<'a> {
/// `check_pub` adds additional `pub` to the checks in case users place it
/// wrongly, can be used to ensure `pub` never comes after `default`.
pub(super) fn check_fn_front_matter(&mut self, check_pub: bool, case: Case) -> bool {
const ALL_QUALS: &[Symbol] =
&[kw::Pub, kw::Gen, kw::Const, kw::Async, kw::Unsafe, kw::Safe, kw::Extern];
const ALL_QUALS: &[ExpKeywordPair] = &[
exp!(Pub),
exp!(Gen),
exp!(Const),
exp!(Async),
exp!(Unsafe),
exp!(Safe),
exp!(Extern),
];
// We use an over-approximation here.
// `const const`, `fn const` won't parse, but we're not stepping over other syntax either.
// `pub` is added in case users got confused with the ordering like `async pub fn`,
// only if it wasn't preceded by `default` as `default pub` is invalid.
let quals: &[Symbol] = if check_pub {
let quals: &[_] = if check_pub {
ALL_QUALS
} else {
&[kw::Gen, kw::Const, kw::Async, kw::Unsafe, kw::Safe, kw::Extern]
&[exp!(Gen), exp!(Const), exp!(Async), exp!(Unsafe), exp!(Safe), exp!(Extern)]
};
self.check_keyword_case(kw::Fn, case) // Definitely an `fn`.
self.check_keyword_case(exp!(Fn), case) // Definitely an `fn`.
// `$qual fn` or `$qual $qual`:
|| quals.iter().any(|&kw| self.check_keyword_case(kw, case))
|| quals.iter().any(|&exp| self.check_keyword_case(exp, case))
&& self.look_ahead(1, |t| {
// `$qual fn`, e.g. `const fn` or `async fn`.
t.is_keyword_case(kw::Fn, case)
@ -2562,12 +2569,14 @@ impl<'a> Parser<'a> {
|| (
(
t.is_non_raw_ident_where(|i|
quals.contains(&i.name)
quals.iter().any(|exp| exp.kw == i.name)
// Rule out 2015 `const async: T = val`.
&& i.is_reserved()
)
|| case == Case::Insensitive
&& t.is_non_raw_ident_where(|i| quals.iter().any(|qual| qual.as_str() == i.name.as_str().to_lowercase()))
&& t.is_non_raw_ident_where(|i| quals.iter().any(|exp| {
exp.kw.as_str() == i.name.as_str().to_lowercase()
}))
)
// Rule out `unsafe extern {`.
&& !self.is_unsafe_foreign_mod()
@ -2575,12 +2584,13 @@ impl<'a> Parser<'a> {
&& !self.is_async_gen_block())
})
// `extern ABI fn`
|| self.check_keyword_case(kw::Extern, case)
|| self.check_keyword_case(exp!(Extern), case)
&& self.look_ahead(1, |t| t.can_begin_string_literal())
&& (self.look_ahead(2, |t| t.is_keyword_case(kw::Fn, case)) ||
// this branch is only for better diagnostics; `pub`, `unsafe`, etc. are not allowed here
// This branch is only for better diagnostics; `pub`, `unsafe`, etc. are not
// allowed here.
(self.may_recover()
&& self.look_ahead(2, |t| ALL_QUALS.iter().any(|&kw| t.is_keyword(kw)))
&& self.look_ahead(2, |t| ALL_QUALS.iter().any(|exp| t.is_keyword(exp.kw)))
&& self.look_ahead(3, |t| t.is_keyword_case(kw::Fn, case))))
}
@ -2628,9 +2638,9 @@ impl<'a> Parser<'a> {
Some(CoroutineKind::Async { .. }) | None => {}
}
if !self.eat_keyword_case(kw::Fn, case) {
if !self.eat_keyword_case(exp!(Fn), case) {
// It is possible for `expect_one_of` to recover given the contents of
// `self.expected_tokens`, therefore, do not use `self.unexpected()` which doesn't
// `self.expected_token_types`, therefore, do not use `self.unexpected()` which doesn't
// account for this.
match self.expect_one_of(&[], &[]) {
Ok(Recovered::Yes(_)) => {}
@ -2648,7 +2658,7 @@ impl<'a> Parser<'a> {
let mut recover_safety = safety;
// This will allow the machine fix to directly place the keyword in the correct place or to indicate
// that the keyword is already present and the second instance should be removed.
let wrong_kw = if self.check_keyword(kw::Const) {
let wrong_kw = if self.check_keyword(exp!(Const)) {
match constness {
Const::Yes(sp) => Some(WrongKw::Duplicated(sp)),
Const::No => {
@ -2656,7 +2666,7 @@ impl<'a> Parser<'a> {
Some(WrongKw::Misplaced(async_start_sp))
}
}
} else if self.check_keyword(kw::Async) {
} else if self.check_keyword(exp!(Async)) {
match coroutine_kind {
Some(CoroutineKind::Async { span, .. }) => {
Some(WrongKw::Duplicated(span))
@ -2682,7 +2692,7 @@ impl<'a> Parser<'a> {
Some(WrongKw::Misplaced(unsafe_start_sp))
}
}
} else if self.check_keyword(kw::Unsafe) {
} else if self.check_keyword(exp!(Unsafe)) {
match safety {
Safety::Unsafe(sp) => Some(WrongKw::Duplicated(sp)),
Safety::Safe(sp) => {
@ -2694,7 +2704,7 @@ impl<'a> Parser<'a> {
Some(WrongKw::Misplaced(ext_start_sp))
}
}
} else if self.check_keyword(kw::Safe) {
} else if self.check_keyword(exp!(Safe)) {
match safety {
Safety::Safe(sp) => Some(WrongKw::Duplicated(sp)),
Safety::Unsafe(sp) => {
@ -2740,7 +2750,7 @@ impl<'a> Parser<'a> {
}
}
// Recover incorrect visibility order such as `async pub`
else if self.check_keyword(kw::Pub) {
else if self.check_keyword(exp!(Pub)) {
let sp = sp_start.to(self.prev_token.span);
if let Ok(snippet) = self.span_to_snippet(sp) {
let current_vis = match self.parse_visibility(FollowedByType::No) {
@ -2843,7 +2853,7 @@ impl<'a> Parser<'a> {
};
p.restore_snapshot(snapshot);
// Skip every token until next possible arg or end.
p.eat_to_tokens(&[&token::Comma, &token::CloseDelim(Delimiter::Parenthesis)]);
p.eat_to_tokens(&[exp!(Comma), exp!(CloseParen)]);
// Create a placeholder argument for proper arg count (issue #34264).
Ok(dummy_arg(Ident::new(kw::Empty, lo.to(p.prev_token.span)), guar))
});
@ -2954,7 +2964,7 @@ impl<'a> Parser<'a> {
let parse_self_possibly_typed = |this: &mut Self, m| {
let eself_ident = expect_self_ident(this);
let eself_hi = this.prev_token.span;
let eself = if this.eat(&token::Colon) {
let eself = if this.eat(exp!(Colon)) {
SelfKind::Explicit(this.parse_ty()?, m)
} else {
SelfKind::Value(m)

View File

@ -8,6 +8,7 @@ mod nonterminal;
mod pat;
mod path;
mod stmt;
pub mod token_type;
mod ty;
use std::assert_matches::debug_assert_matches;
@ -39,11 +40,14 @@ use rustc_index::interval::IntervalSet;
use rustc_session::parse::ParseSess;
use rustc_span::{DUMMY_SP, Ident, Span, Symbol, kw, sym};
use thin_vec::ThinVec;
use token_type::TokenTypeSet;
pub use token_type::{ExpKeywordPair, ExpTokenPair, TokenType};
use tracing::debug;
use crate::errors::{
self, IncorrectVisibilityRestriction, MismatchedClosingDelimiter, NonStringAbiLiteral,
};
use crate::exp;
use crate::lexer::UnmatchedDelim;
#[cfg(test)]
@ -141,7 +145,7 @@ pub struct Parser<'a> {
pub prev_token: Token,
pub capture_cfg: bool,
restrictions: Restrictions,
expected_tokens: Vec<TokenType>,
expected_token_types: TokenTypeSet,
token_cursor: TokenCursor,
// The number of calls to `bump`, i.e. the position in the token stream.
num_bump_calls: u32,
@ -184,9 +188,9 @@ pub struct Parser<'a> {
recovery: Recovery,
}
// This type is used a lot, e.g. it's cloned when matching many declarative macro rules with nonterminals. Make sure
// it doesn't unintentionally get bigger.
#[cfg(target_pointer_width = "64")]
// This type is used a lot, e.g. it's cloned when matching many declarative macro rules with
// nonterminals. Make sure it doesn't unintentionally get bigger.
#[cfg(all(target_pointer_width = "64", not(target_arch = "s390x")))]
rustc_data_structures::static_assert_size!(Parser<'_>, 288);
/// Stores span information about a closure.
@ -367,48 +371,21 @@ impl TokenCursor {
}
}
#[derive(Debug, Clone, PartialEq)]
enum TokenType {
Token(TokenKind),
Keyword(Symbol),
Operator,
Lifetime,
Ident,
Path,
Type,
Const,
}
impl TokenType {
fn to_string(&self) -> String {
match self {
TokenType::Token(t) => format!("`{}`", pprust::token_kind_to_string(t)),
TokenType::Keyword(kw) => format!("`{kw}`"),
TokenType::Operator => "an operator".to_string(),
TokenType::Lifetime => "lifetime".to_string(),
TokenType::Ident => "identifier".to_string(),
TokenType::Path => "path".to_string(),
TokenType::Type => "type".to_string(),
TokenType::Const => "a const expression".to_string(),
}
}
}
/// A sequence separator.
#[derive(Debug)]
struct SeqSep {
struct SeqSep<'a> {
/// The separator token.
sep: Option<TokenKind>,
sep: Option<ExpTokenPair<'a>>,
/// `true` if a trailing separator is allowed.
trailing_sep_allowed: bool,
}
impl SeqSep {
fn trailing_allowed(t: TokenKind) -> SeqSep {
SeqSep { sep: Some(t), trailing_sep_allowed: true }
impl<'a> SeqSep<'a> {
fn trailing_allowed(sep: ExpTokenPair<'a>) -> SeqSep<'a> {
SeqSep { sep: Some(sep), trailing_sep_allowed: true }
}
fn none() -> SeqSep {
fn none() -> SeqSep<'a> {
SeqSep { sep: None, trailing_sep_allowed: false }
}
}
@ -490,7 +467,7 @@ impl<'a> Parser<'a> {
prev_token: Token::dummy(),
capture_cfg: false,
restrictions: Restrictions::empty(),
expected_tokens: Vec::new(),
expected_token_types: TokenTypeSet::new(),
token_cursor: TokenCursor { curr: TokenTreeCursor::new(stream), stack: Vec::new() },
num_bump_calls: 0,
break_last_token: 0,
@ -553,16 +530,16 @@ impl<'a> Parser<'a> {
}
/// Expects and consumes the token `t`. Signals an error if the next token is not `t`.
pub fn expect(&mut self, t: &TokenKind) -> PResult<'a, Recovered> {
if self.expected_tokens.is_empty() {
if self.token == *t {
pub fn expect(&mut self, exp: ExpTokenPair<'_>) -> PResult<'a, Recovered> {
if self.expected_token_types.is_empty() {
if self.token == *exp.tok {
self.bump();
Ok(Recovered::No)
} else {
self.unexpected_try_recover(t)
self.unexpected_try_recover(exp.tok)
}
} else {
self.expect_one_of(slice::from_ref(t), &[])
self.expect_one_of(slice::from_ref(&exp), &[])
}
}
@ -571,13 +548,13 @@ impl<'a> Parser<'a> {
/// anything. Signal a fatal error if next token is unexpected.
fn expect_one_of(
&mut self,
edible: &[TokenKind],
inedible: &[TokenKind],
edible: &[ExpTokenPair<'_>],
inedible: &[ExpTokenPair<'_>],
) -> PResult<'a, Recovered> {
if edible.contains(&self.token.kind) {
if edible.iter().any(|exp| exp.tok == &self.token.kind) {
self.bump();
Ok(Recovered::No)
} else if inedible.contains(&self.token.kind) {
} else if inedible.iter().any(|exp| exp.tok == &self.token.kind) {
// leave it in the input
Ok(Recovered::No)
} else if self.token != token::Eof
@ -619,13 +596,13 @@ impl<'a> Parser<'a> {
/// Checks if the next token is `tok`, and returns `true` if so.
///
/// This method will automatically add `tok` to `expected_tokens` if `tok` is not
/// This method will automatically add `tok` to `expected_token_types` if `tok` is not
/// encountered.
#[inline]
fn check(&mut self, tok: &TokenKind) -> bool {
let is_present = self.token == *tok;
fn check(&mut self, exp: ExpTokenPair<'_>) -> bool {
let is_present = self.token == *exp.tok;
if !is_present {
self.expected_tokens.push(TokenType::Token(tok.clone()));
self.expected_token_types.insert(exp.token_type);
}
is_present
}
@ -653,8 +630,8 @@ impl<'a> Parser<'a> {
/// Consumes a token 'tok' if it exists. Returns whether the given token was present.
#[inline]
#[must_use]
pub fn eat(&mut self, tok: &TokenKind) -> bool {
let is_present = self.check(tok);
pub fn eat(&mut self, exp: ExpTokenPair<'_>) -> bool {
let is_present = self.check(exp);
if is_present {
self.bump()
}
@ -665,22 +642,23 @@ impl<'a> Parser<'a> {
/// An expectation is also added for diagnostics purposes.
#[inline]
#[must_use]
fn check_keyword(&mut self, kw: Symbol) -> bool {
self.expected_tokens.push(TokenType::Keyword(kw));
self.token.is_keyword(kw)
fn check_keyword(&mut self, exp: ExpKeywordPair) -> bool {
let is_keyword = self.token.is_keyword(exp.kw);
if !is_keyword {
self.expected_token_types.insert(exp.token_type);
}
is_keyword
}
#[inline]
#[must_use]
fn check_keyword_case(&mut self, kw: Symbol, case: Case) -> bool {
if self.check_keyword(kw) {
return true;
}
fn check_keyword_case(&mut self, exp: ExpKeywordPair, case: Case) -> bool {
if self.check_keyword(exp) {
true
// Do an ASCII case-insensitive match, because all keywords are ASCII.
if case == Case::Insensitive
} else if case == Case::Insensitive
&& let Some((ident, IdentIsRaw::No)) = self.token.ident()
&& ident.as_str().eq_ignore_ascii_case(kw.as_str())
&& ident.as_str().eq_ignore_ascii_case(exp.kw.as_str())
{
true
} else {
@ -693,13 +671,12 @@ impl<'a> Parser<'a> {
// Public for rustc_builtin_macros and rustfmt usage.
#[inline]
#[must_use]
pub fn eat_keyword(&mut self, kw: Symbol) -> bool {
if self.check_keyword(kw) {
pub fn eat_keyword(&mut self, exp: ExpKeywordPair) -> bool {
let is_keyword = self.check_keyword(exp);
if is_keyword {
self.bump();
true
} else {
false
}
is_keyword
}
/// Eats a keyword, optionally ignoring the case.
@ -707,21 +684,19 @@ impl<'a> Parser<'a> {
/// This is useful for recovery.
#[inline]
#[must_use]
fn eat_keyword_case(&mut self, kw: Symbol, case: Case) -> bool {
if self.eat_keyword(kw) {
return true;
}
if case == Case::Insensitive
fn eat_keyword_case(&mut self, exp: ExpKeywordPair, case: Case) -> bool {
if self.eat_keyword(exp) {
true
} else if case == Case::Insensitive
&& let Some((ident, IdentIsRaw::No)) = self.token.ident()
&& ident.as_str().to_lowercase() == kw.as_str().to_lowercase()
&& ident.as_str().to_lowercase() == exp.kw.as_str().to_lowercase()
{
self.dcx().emit_err(errors::KwBadCase { span: ident.span, kw: kw.as_str() });
self.dcx().emit_err(errors::KwBadCase { span: ident.span, kw: exp.kw.as_str() });
self.bump();
return true;
true
} else {
false
}
false
}
/// If the next token is the given keyword, eats it and returns `true`.
@ -730,19 +705,18 @@ impl<'a> Parser<'a> {
#[inline]
#[must_use]
pub fn eat_keyword_noexpect(&mut self, kw: Symbol) -> bool {
if self.token.is_keyword(kw) {
let is_keyword = self.token.is_keyword(kw);
if is_keyword {
self.bump();
true
} else {
false
}
is_keyword
}
/// If the given word is not a keyword, signals an error.
/// If the next token is not the given word, signals an error.
/// Otherwise, eats it.
pub fn expect_keyword(&mut self, kw: Symbol) -> PResult<'a, ()> {
if !self.eat_keyword(kw) { self.unexpected() } else { Ok(()) }
pub fn expect_keyword(&mut self, exp: ExpKeywordPair) -> PResult<'a, ()> {
if !self.eat_keyword(exp) { self.unexpected() } else { Ok(()) }
}
/// Is the given keyword `kw` followed by a non-reserved identifier?
@ -751,13 +725,11 @@ impl<'a> Parser<'a> {
}
#[inline]
fn check_or_expected(&mut self, ok: bool, typ: TokenType) -> bool {
if ok {
true
} else {
self.expected_tokens.push(typ);
false
fn check_or_expected(&mut self, ok: bool, token_type: TokenType) -> bool {
if !ok {
self.expected_token_types.insert(token_type);
}
ok
}
fn check_ident(&mut self) -> bool {
@ -800,22 +772,19 @@ impl<'a> Parser<'a> {
/// Otherwise returns `false`.
#[inline]
fn check_plus(&mut self) -> bool {
self.check_or_expected(
self.token.is_like_plus(),
TokenType::Token(token::BinOp(token::Plus)),
)
self.check_or_expected(self.token.is_like_plus(), TokenType::Plus)
}
/// Eats the expected token if it's present possibly breaking
/// compound tokens like multi-character operators in process.
/// Returns `true` if the token was eaten.
fn break_and_eat(&mut self, expected: TokenKind) -> bool {
if self.token == expected {
fn break_and_eat(&mut self, exp: ExpTokenPair<'_>) -> bool {
if self.token == *exp.tok {
self.bump();
return true;
}
match self.token.kind.break_two_token_op(1) {
Some((first, second)) if first == expected => {
Some((first, second)) if first == *exp.tok => {
let first_span = self.psess.source_map().start_point(self.token.span);
let second_span = self.token.span.with_lo(first_span.hi());
self.token = Token::new(first, first_span);
@ -832,7 +801,7 @@ impl<'a> Parser<'a> {
true
}
_ => {
self.expected_tokens.push(TokenType::Token(expected));
self.expected_token_types.insert(exp.token_type);
false
}
}
@ -840,24 +809,24 @@ impl<'a> Parser<'a> {
/// Eats `+` possibly breaking tokens like `+=` in process.
fn eat_plus(&mut self) -> bool {
self.break_and_eat(token::BinOp(token::Plus))
self.break_and_eat(exp!(Plus))
}
/// Eats `&` possibly breaking tokens like `&&` in process.
/// Signals an error if `&` is not eaten.
fn expect_and(&mut self) -> PResult<'a, ()> {
if self.break_and_eat(token::BinOp(token::And)) { Ok(()) } else { self.unexpected() }
if self.break_and_eat(exp!(And)) { Ok(()) } else { self.unexpected() }
}
/// Eats `|` possibly breaking tokens like `||` in process.
/// Signals an error if `|` was not eaten.
fn expect_or(&mut self) -> PResult<'a, ()> {
if self.break_and_eat(token::BinOp(token::Or)) { Ok(()) } else { self.unexpected() }
if self.break_and_eat(exp!(Or)) { Ok(()) } else { self.unexpected() }
}
/// Eats `<` possibly breaking tokens like `<<` in process.
fn eat_lt(&mut self) -> bool {
let ate = self.break_and_eat(token::Lt);
let ate = self.break_and_eat(exp!(Lt));
if ate {
// See doc comment for `unmatched_angle_bracket_count`.
self.unmatched_angle_bracket_count += 1;
@ -875,7 +844,7 @@ impl<'a> Parser<'a> {
/// Eats `>` possibly breaking tokens like `>>` in process.
/// Signals an error if `>` was not eaten.
fn expect_gt(&mut self) -> PResult<'a, ()> {
if self.break_and_eat(token::Gt) {
if self.break_and_eat(exp!(Gt)) {
// See doc comment for `unmatched_angle_bracket_count`.
if self.unmatched_angle_bracket_count > 0 {
self.unmatched_angle_bracket_count -= 1;
@ -887,14 +856,14 @@ impl<'a> Parser<'a> {
}
}
/// Checks if the next token is contained within `kets`, and returns `true` if so.
/// Checks if the next token is contained within `closes`, and returns `true` if so.
fn expect_any_with_type(
&mut self,
kets_expected: &[&TokenKind],
kets_not_expected: &[&TokenKind],
closes_expected: &[ExpTokenPair<'_>],
closes_not_expected: &[&TokenKind],
) -> bool {
kets_expected.iter().any(|k| self.check(k))
|| kets_not_expected.iter().any(|k| self.check_noexpect(k))
closes_expected.iter().any(|&close| self.check(close))
|| closes_not_expected.iter().any(|k| self.check_noexpect(k))
}
/// Parses a sequence until the specified delimiters. The function
@ -902,9 +871,9 @@ impl<'a> Parser<'a> {
/// closing bracket.
fn parse_seq_to_before_tokens<T>(
&mut self,
kets_expected: &[&TokenKind],
kets_not_expected: &[&TokenKind],
sep: SeqSep,
closes_expected: &[ExpTokenPair<'_>],
closes_not_expected: &[&TokenKind],
sep: SeqSep<'_>,
mut f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
) -> PResult<'a, (ThinVec<T>, Trailing, Recovered)> {
let mut first = true;
@ -912,17 +881,17 @@ impl<'a> Parser<'a> {
let mut trailing = Trailing::No;
let mut v = ThinVec::new();
while !self.expect_any_with_type(kets_expected, kets_not_expected) {
while !self.expect_any_with_type(closes_expected, closes_not_expected) {
if let token::CloseDelim(..) | token::Eof = self.token.kind {
break;
}
if let Some(t) = &sep.sep {
if let Some(exp) = sep.sep {
if first {
// no separator for the first element
first = false;
} else {
// check for separator
match self.expect(t) {
match self.expect(exp) {
Ok(Recovered::No) => {
self.current_closure.take();
}
@ -933,7 +902,7 @@ impl<'a> Parser<'a> {
}
Err(mut expect_err) => {
let sp = self.prev_token.span.shrink_to_hi();
let token_str = pprust::token_kind_to_string(t);
let token_str = pprust::token_kind_to_string(exp.tok);
match self.current_closure.take() {
Some(closure_spans) if self.token == TokenKind::Semi => {
@ -953,7 +922,7 @@ impl<'a> Parser<'a> {
_ => {
// Attempt to keep parsing if it was a similar separator.
if let Some(tokens) = t.similar_tokens() {
if let Some(tokens) = exp.tok.similar_tokens() {
if tokens.contains(&self.token.kind) {
self.bump();
}
@ -1003,15 +972,17 @@ impl<'a> Parser<'a> {
// Parsing failed, therefore it must be something more serious
// than just a missing separator.
for xx in &e.children {
// propagate the help message from sub error 'e' to main error 'expect_err;
// Propagate the help message from sub error `e` to main
// error `expect_err`.
expect_err.children.push(xx.clone());
}
e.cancel();
if self.token == token::Colon {
// we will try to recover in `maybe_recover_struct_lit_bad_delims`
// We will try to recover in
// `maybe_recover_struct_lit_bad_delims`.
return Err(expect_err);
} else if let [token::CloseDelim(Delimiter::Parenthesis)] =
kets_expected
} else if let [exp] = closes_expected
&& exp.token_type == TokenType::CloseParen
{
return Err(expect_err);
} else {
@ -1025,7 +996,7 @@ impl<'a> Parser<'a> {
}
}
if sep.trailing_sep_allowed
&& self.expect_any_with_type(kets_expected, kets_not_expected)
&& self.expect_any_with_type(closes_expected, closes_not_expected)
{
trailing = Trailing::Yes;
break;
@ -1045,7 +1016,7 @@ impl<'a> Parser<'a> {
) -> PResult<'a, ()> {
let initial_semicolon = self.token.span;
while self.eat(&TokenKind::Semi) {
while self.eat(exp!(Semi)) {
let _ = self.parse_stmt_without_recovery(false, ForceCollect::No).unwrap_or_else(|e| {
e.cancel();
None
@ -1101,11 +1072,11 @@ impl<'a> Parser<'a> {
/// closing bracket.
fn parse_seq_to_before_end<T>(
&mut self,
ket: &TokenKind,
sep: SeqSep,
close: ExpTokenPair<'_>,
sep: SeqSep<'_>,
f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
) -> PResult<'a, (ThinVec<T>, Trailing, Recovered)> {
self.parse_seq_to_before_tokens(&[ket], &[], sep, f)
self.parse_seq_to_before_tokens(&[close], &[], sep, f)
}
/// Parses a sequence, including only the closing delimiter. The function
@ -1113,15 +1084,15 @@ impl<'a> Parser<'a> {
/// closing bracket.
fn parse_seq_to_end<T>(
&mut self,
ket: &TokenKind,
sep: SeqSep,
close: ExpTokenPair<'_>,
sep: SeqSep<'_>,
f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
) -> PResult<'a, (ThinVec<T>, Trailing)> {
let (val, trailing, recovered) = self.parse_seq_to_before_end(ket, sep, f)?;
if matches!(recovered, Recovered::No) && !self.eat(ket) {
let (val, trailing, recovered) = self.parse_seq_to_before_end(close, sep, f)?;
if matches!(recovered, Recovered::No) && !self.eat(close) {
self.dcx().span_delayed_bug(
self.token.span,
"recovered but `parse_seq_to_before_end` did not give us the ket token",
"recovered but `parse_seq_to_before_end` did not give us the close token",
);
}
Ok((val, trailing))
@ -1132,13 +1103,13 @@ impl<'a> Parser<'a> {
/// closing bracket.
fn parse_unspanned_seq<T>(
&mut self,
bra: &TokenKind,
ket: &TokenKind,
sep: SeqSep,
open: ExpTokenPair<'_>,
close: ExpTokenPair<'_>,
sep: SeqSep<'_>,
f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
) -> PResult<'a, (ThinVec<T>, Trailing)> {
self.expect(bra)?;
self.parse_seq_to_end(ket, sep, f)
self.expect(open)?;
self.parse_seq_to_end(close, sep, f)
}
/// Parses a comma-separated sequence, including both delimiters.
@ -1146,15 +1117,11 @@ impl<'a> Parser<'a> {
/// closing bracket.
fn parse_delim_comma_seq<T>(
&mut self,
delim: Delimiter,
open: ExpTokenPair<'_>,
close: ExpTokenPair<'_>,
f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
) -> PResult<'a, (ThinVec<T>, Trailing)> {
self.parse_unspanned_seq(
&token::OpenDelim(delim),
&token::CloseDelim(delim),
SeqSep::trailing_allowed(token::Comma),
f,
)
self.parse_unspanned_seq(open, close, SeqSep::trailing_allowed(exp!(Comma)), f)
}
/// Parses a comma-separated sequence delimited by parentheses (e.g. `(x, y)`).
@ -1164,7 +1131,7 @@ impl<'a> Parser<'a> {
&mut self,
f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
) -> PResult<'a, (ThinVec<T>, Trailing)> {
self.parse_delim_comma_seq(Delimiter::Parenthesis, f)
self.parse_delim_comma_seq(exp!(OpenParen), exp!(CloseParen), f)
}
/// Advance the parser by one token using provided token as the next one.
@ -1180,7 +1147,7 @@ impl<'a> Parser<'a> {
self.token_spacing = next_spacing;
// Diagnostics.
self.expected_tokens.clear();
self.expected_token_types.clear();
}
/// Advance the parser by one token.
@ -1270,11 +1237,11 @@ impl<'a> Parser<'a> {
/// Parses asyncness: `async` or nothing.
fn parse_coroutine_kind(&mut self, case: Case) -> Option<CoroutineKind> {
let span = self.token.uninterpolated_span();
if self.eat_keyword_case(kw::Async, case) {
if self.eat_keyword_case(exp!(Async), case) {
// FIXME(gen_blocks): Do we want to unconditionally parse `gen` and then
// error if edition <= 2024, like we do with async and edition <= 2018?
if self.token.uninterpolated_span().at_least_rust_2024()
&& self.eat_keyword_case(kw::Gen, case)
&& self.eat_keyword_case(exp!(Gen), case)
{
let gen_span = self.prev_token.uninterpolated_span();
Some(CoroutineKind::AsyncGen {
@ -1290,7 +1257,7 @@ impl<'a> Parser<'a> {
})
}
} else if self.token.uninterpolated_span().at_least_rust_2024()
&& self.eat_keyword_case(kw::Gen, case)
&& self.eat_keyword_case(exp!(Gen), case)
{
Some(CoroutineKind::Gen {
span,
@ -1304,9 +1271,9 @@ impl<'a> Parser<'a> {
/// Parses fn unsafety: `unsafe`, `safe` or nothing.
fn parse_safety(&mut self, case: Case) -> Safety {
if self.eat_keyword_case(kw::Unsafe, case) {
if self.eat_keyword_case(exp!(Unsafe), case) {
Safety::Unsafe(self.prev_token.uninterpolated_span())
} else if self.eat_keyword_case(kw::Safe, case) {
} else if self.eat_keyword_case(exp!(Safe), case) {
Safety::Safe(self.prev_token.uninterpolated_span())
} else {
Safety::Default
@ -1332,7 +1299,7 @@ impl<'a> Parser<'a> {
if (self.check_const_closure() == is_closure)
&& !self
.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace) || t.is_whole_block())
&& self.eat_keyword_case(kw::Const, case)
&& self.eat_keyword_case(exp!(Const), case)
{
Const::Yes(self.prev_token.uninterpolated_span())
} else {
@ -1345,7 +1312,7 @@ impl<'a> Parser<'a> {
if pat {
self.psess.gated_spans.gate(sym::inline_const_pat, span);
}
self.expect_keyword(kw::Const)?;
self.expect_keyword(exp!(Const))?;
let (attrs, blk) = self.parse_inner_attrs_and_block()?;
let anon_const = AnonConst {
id: DUMMY_NODE_ID,
@ -1357,19 +1324,19 @@ impl<'a> Parser<'a> {
/// Parses mutability (`mut` or nothing).
fn parse_mutability(&mut self) -> Mutability {
if self.eat_keyword(kw::Mut) { Mutability::Mut } else { Mutability::Not }
if self.eat_keyword(exp!(Mut)) { Mutability::Mut } else { Mutability::Not }
}
/// Parses reference binding mode (`ref`, `ref mut`, or nothing).
fn parse_byref(&mut self) -> ByRef {
if self.eat_keyword(kw::Ref) { ByRef::Yes(self.parse_mutability()) } else { ByRef::No }
if self.eat_keyword(exp!(Ref)) { ByRef::Yes(self.parse_mutability()) } else { ByRef::No }
}
/// Possibly parses mutability (`const` or `mut`).
fn parse_const_or_mut(&mut self) -> Option<Mutability> {
if self.eat_keyword(kw::Mut) {
if self.eat_keyword(exp!(Mut)) {
Some(Mutability::Mut)
} else if self.eat_keyword(kw::Const) {
} else if self.eat_keyword(exp!(Const)) {
Some(Mutability::Not)
} else {
None
@ -1400,7 +1367,7 @@ impl<'a> Parser<'a> {
fn parse_attr_args(&mut self) -> PResult<'a, AttrArgs> {
Ok(if let Some(args) = self.parse_delim_args_inner() {
AttrArgs::Delimited(args)
} else if self.eat(&token::Eq) {
} else if self.eat(exp!(Eq)) {
let eq_span = self.prev_token.span;
AttrArgs::Eq { eq_span, expr: self.parse_expr_force_collect()? }
} else {
@ -1409,9 +1376,9 @@ impl<'a> Parser<'a> {
}
fn parse_delim_args_inner(&mut self) -> Option<DelimArgs> {
let delimited = self.check(&token::OpenDelim(Delimiter::Parenthesis))
|| self.check(&token::OpenDelim(Delimiter::Bracket))
|| self.check(&token::OpenDelim(Delimiter::Brace));
let delimited = self.check(exp!(OpenParen))
|| self.check(exp!(OpenBracket))
|| self.check(exp!(OpenBrace));
delimited.then(|| {
let TokenTree::Delimited(dspan, _, delim, tokens) = self.parse_token_tree() else {
@ -1490,7 +1457,7 @@ impl<'a> Parser<'a> {
pub fn parse_visibility(&mut self, fbt: FollowedByType) -> PResult<'a, Visibility> {
maybe_whole!(self, NtVis, |vis| vis.into_inner());
if !self.eat_keyword(kw::Pub) {
if !self.eat_keyword(exp!(Pub)) {
// We need a span for our `Spanned<VisibilityKind>`, but there's inherently no
// keyword to grab a span from for inherited visibility; an empty span at the
// beginning of the current token would seem to be the "Schelling span".
@ -1502,7 +1469,7 @@ impl<'a> Parser<'a> {
}
let lo = self.prev_token.span;
if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
if self.check(exp!(OpenParen)) {
// We don't `self.bump()` the `(` yet because this might be a struct definition where
// `()` or a tuple might be allowed. For example, `struct Struct(pub (), pub (usize));`.
// Because of this, we only `bump` the `(` if we're assured it is appropriate to do so
@ -1512,7 +1479,7 @@ impl<'a> Parser<'a> {
self.bump(); // `(`
self.bump(); // `in`
let path = self.parse_path(PathStyle::Mod)?; // `path`
self.expect(&token::CloseDelim(Delimiter::Parenthesis))?; // `)`
self.expect(exp!(CloseParen))?; // `)`
let vis = VisibilityKind::Restricted {
path: P(path),
id: ast::DUMMY_NODE_ID,
@ -1529,7 +1496,7 @@ impl<'a> Parser<'a> {
// Parse `pub(crate)`, `pub(self)`, or `pub(super)`.
self.bump(); // `(`
let path = self.parse_path(PathStyle::Mod)?; // `crate`/`super`/`self`
self.expect(&token::CloseDelim(Delimiter::Parenthesis))?; // `)`
self.expect(exp!(CloseParen))?; // `)`
let vis = VisibilityKind::Restricted {
path: P(path),
id: ast::DUMMY_NODE_ID,
@ -1555,7 +1522,7 @@ impl<'a> Parser<'a> {
fn recover_incorrect_vis_restriction(&mut self) -> PResult<'a, ()> {
self.bump(); // `(`
let path = self.parse_path(PathStyle::Mod)?;
self.expect(&token::CloseDelim(Delimiter::Parenthesis))?; // `)`
self.expect(exp!(CloseParen))?; // `)`
let path_str = pprust::path_to_string(&path);
self.dcx()
@ -1566,7 +1533,7 @@ impl<'a> Parser<'a> {
/// Parses `extern string_literal?`.
fn parse_extern(&mut self, case: Case) -> Extern {
if self.eat_keyword_case(kw::Extern, case) {
if self.eat_keyword_case(exp!(Extern), case) {
let mut extern_span = self.prev_token.span;
let abi = self.parse_abi();
if let Some(abi) = abi {
@ -1606,7 +1573,7 @@ impl<'a> Parser<'a> {
/// Checks for `::` or, potentially, `:::` and then look ahead after it.
fn check_path_sep_and_look_ahead(&mut self, looker: impl Fn(&Token) -> bool) -> bool {
if self.check(&token::PathSep) {
if self.check(exp!(PathSep)) {
if self.may_recover() && self.look_ahead(1, |t| t.kind == token::Colon) {
debug_assert!(!self.look_ahead(1, &looker), "Looker must not match on colon");
self.look_ahead(2, looker)
@ -1670,8 +1637,8 @@ impl<'a> Parser<'a> {
DebugParser { parser: self, lookahead }
}
pub fn clear_expected_tokens(&mut self) {
self.expected_tokens.clear();
pub fn clear_expected_token_types(&mut self) {
self.expected_token_types.clear();
}
pub fn approx_token_stream_pos(&self) -> u32 {

View File

@ -30,7 +30,7 @@ use crate::errors::{
UnexpectedVertVertInPattern, WrapInParens,
};
use crate::parser::expr::{DestructuredFloat, could_be_unclosed_char_literal};
use crate::{maybe_recover_from_interpolated_ty_qpath, maybe_whole};
use crate::{exp, maybe_recover_from_interpolated_ty_qpath, maybe_whole};
#[derive(PartialEq, Copy, Clone)]
pub enum Expected {
@ -110,7 +110,7 @@ impl<'a> Parser<'a> {
) -> PResult<'a, P<Pat>> {
let pat = self.parse_pat_no_top_guard(expected, rc, ra, rt)?;
if self.eat_keyword(kw::If) {
if self.eat_keyword(exp!(If)) {
let cond = self.parse_expr()?;
// Feature-gate guard patterns
self.psess.gated_spans.gate(sym::guard_patterns, cond.span);
@ -193,7 +193,7 @@ impl<'a> Parser<'a> {
// If the next token is not a `|`,
// this is not an or-pattern and we should exit here.
if !self.check(&token::BinOp(token::Or)) && self.token != token::OrOr {
if !self.check(exp!(Or)) && self.token != token::OrOr {
// If we parsed a leading `|` which should be gated,
// then we should really gate the leading `|`.
// This complicated procedure is done purely for diagnostics UX.
@ -263,7 +263,7 @@ impl<'a> Parser<'a> {
CommaRecoveryMode::LikelyTuple,
Some(syntax_loc),
)?;
let colon = self.eat(&token::Colon);
let colon = self.eat(exp!(Colon));
if let PatKind::Or(pats) = &pat.kind {
let span = pat.span;
@ -327,7 +327,7 @@ impl<'a> Parser<'a> {
self.dcx().emit_err(UnexpectedVertVertInPattern { span: self.token.span, start: lo });
self.bump();
EatOrResult::AteOr
} else if self.eat(&token::BinOp(token::Or)) {
} else if self.eat(exp!(Or)) {
EatOrResult::AteOr
} else {
EatOrResult::None
@ -714,40 +714,41 @@ impl<'a> Parser<'a> {
lo = self.token.span;
}
let pat = if self.check(&token::BinOp(token::And)) || self.token == token::AndAnd {
let pat = if self.check(exp!(And)) || self.token == token::AndAnd {
self.parse_pat_deref(expected)?
} else if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
} else if self.check(exp!(OpenParen)) {
self.parse_pat_tuple_or_parens()?
} else if self.check(&token::OpenDelim(Delimiter::Bracket)) {
} else if self.check(exp!(OpenBracket)) {
// Parse `[pat, pat,...]` as a slice pattern.
let (pats, _) = self.parse_delim_comma_seq(Delimiter::Bracket, |p| {
p.parse_pat_allow_top_guard(
None,
RecoverComma::No,
RecoverColon::No,
CommaRecoveryMode::EitherTupleOrPipe,
)
})?;
let (pats, _) =
self.parse_delim_comma_seq(exp!(OpenBracket), exp!(CloseBracket), |p| {
p.parse_pat_allow_top_guard(
None,
RecoverComma::No,
RecoverColon::No,
CommaRecoveryMode::EitherTupleOrPipe,
)
})?;
PatKind::Slice(pats)
} else if self.check(&token::DotDot) && !self.is_pat_range_end_start(1) {
} else if self.check(exp!(DotDot)) && !self.is_pat_range_end_start(1) {
// A rest pattern `..`.
self.bump(); // `..`
PatKind::Rest
} else if self.check(&token::DotDotDot) && !self.is_pat_range_end_start(1) {
} else if self.check(exp!(DotDotDot)) && !self.is_pat_range_end_start(1) {
self.recover_dotdotdot_rest_pat(lo)
} else if let Some(form) = self.parse_range_end() {
self.parse_pat_range_to(form)? // `..=X`, `...X`, or `..X`.
} else if self.eat(&token::Not) {
} else if self.eat(exp!(Not)) {
// Parse `!`
self.psess.gated_spans.gate(sym::never_patterns, self.prev_token.span);
PatKind::Never
} else if self.eat_keyword(kw::Underscore) {
} else if self.eat_keyword(exp!(Underscore)) {
// Parse `_`
PatKind::Wild
} else if self.eat_keyword(kw::Mut) {
} else if self.eat_keyword(exp!(Mut)) {
self.parse_pat_ident_mut()?
} else if self.eat_keyword(kw::Ref) {
if self.check_keyword(kw::Box) {
} else if self.eat_keyword(exp!(Ref)) {
if self.check_keyword(exp!(Box)) {
// Suggest `box ref`.
let span = self.prev_token.span.to(self.token.span);
self.bump();
@ -756,7 +757,7 @@ impl<'a> Parser<'a> {
// Parse ref ident @ pat / ref mut ident @ pat
let mutbl = self.parse_mutability();
self.parse_pat_ident(BindingMode(ByRef::Yes(mutbl), Mutability::Not), syntax_loc)?
} else if self.eat_keyword(kw::Box) {
} else if self.eat_keyword(exp!(Box)) {
self.parse_pat_box()?
} else if self.check_inline_const(0) {
// Parse `const pat`
@ -793,14 +794,14 @@ impl<'a> Parser<'a> {
};
let span = lo.to(self.prev_token.span);
if qself.is_none() && self.check(&token::Not) {
if qself.is_none() && self.check(exp!(Not)) {
self.parse_pat_mac_invoc(path)?
} else if let Some(form) = self.parse_range_end() {
let begin = self.mk_expr(span, ExprKind::Path(qself, path));
self.parse_pat_range_begin_with(begin, form)?
} else if self.check(&token::OpenDelim(Delimiter::Brace)) {
} else if self.check(exp!(OpenBrace)) {
self.parse_pat_struct(qself, path)?
} else if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
} else if self.check(exp!(OpenParen)) {
self.parse_pat_tuple_struct(qself, path)?
} else {
match self.maybe_recover_trailing_expr(span, false) {
@ -1106,7 +1107,7 @@ impl<'a> Parser<'a> {
/// Eat any extraneous `mut`s and error + recover if we ate any.
fn recover_additional_muts(&mut self) {
let lo = self.token.span;
while self.eat_keyword(kw::Mut) {}
while self.eat_keyword(exp!(Mut)) {}
if lo == self.token.span {
return;
}
@ -1147,11 +1148,11 @@ impl<'a> Parser<'a> {
/// Parses the range pattern end form `".." | "..." | "..=" ;`.
fn parse_range_end(&mut self) -> Option<Spanned<RangeEnd>> {
let re = if self.eat(&token::DotDotDot) {
let re = if self.eat(exp!(DotDotDot)) {
RangeEnd::Included(RangeSyntax::DotDotDot)
} else if self.eat(&token::DotDotEq) {
} else if self.eat(exp!(DotDotEq)) {
RangeEnd::Included(RangeSyntax::DotDotEq)
} else if self.eat(&token::DotDot) {
} else if self.eat(exp!(DotDot)) {
RangeEnd::Excluded
} else {
return None;
@ -1271,7 +1272,7 @@ impl<'a> Parser<'a> {
// recover trailing `)`
if let Some(open_paren) = open_paren {
self.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
self.expect(exp!(CloseParen))?;
self.dcx().emit_err(UnexpectedParenInRangePat {
span: vec![open_paren, self.prev_token.span],
@ -1331,7 +1332,7 @@ impl<'a> Parser<'a> {
}));
}
let sub = if self.eat(&token::At) {
let sub = if self.eat(exp!(At)) {
Some(self.parse_pat_no_top_alt(Some(Expected::BindingPattern), None)?)
} else {
None
@ -1447,7 +1448,7 @@ impl<'a> Parser<'a> {
// We cannot use `parse_pat_ident()` since it will complain `box`
// is not an identifier.
let sub = if self.eat(&token::At) {
let sub = if self.eat(exp!(At)) {
Some(self.parse_pat_no_top_alt(Some(Expected::BindingPattern), None)?)
} else {
None
@ -1504,9 +1505,9 @@ impl<'a> Parser<'a> {
}
ate_comma = false;
if self.check(&token::DotDot)
if self.check(exp!(DotDot))
|| self.check_noexpect(&token::DotDotDot)
|| self.check_keyword(kw::Underscore)
|| self.check_keyword(exp!(Underscore))
{
etc = PatFieldsRest::Rest;
let mut etc_sp = self.token.span;
@ -1594,7 +1595,7 @@ impl<'a> Parser<'a> {
return Err(err);
}
}?;
ate_comma = this.eat(&token::Comma);
ate_comma = this.eat(exp!(Comma));
last_non_comma_dotdot_span = Some(this.prev_token.span);
@ -1706,7 +1707,7 @@ impl<'a> Parser<'a> {
(pat, fieldname, false)
} else {
// Parsing a pattern of the form `(box) (ref) (mut) fieldname`.
let is_box = self.eat_keyword(kw::Box);
let is_box = self.eat_keyword(exp!(Box));
let boxed_span = self.token.span;
let mutability = self.parse_mutability();
let by_ref = self.parse_byref();

View File

@ -17,7 +17,7 @@ use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
use super::{Parser, Restrictions, TokenType};
use crate::errors::{PathSingleColon, PathTripleColon};
use crate::parser::{CommaRecoveryMode, RecoverColon, RecoverComma};
use crate::{errors, maybe_whole};
use crate::{errors, exp, maybe_whole};
/// Specifies how to parse a path.
#[derive(Copy, Clone, PartialEq)]
@ -80,7 +80,7 @@ impl<'a> Parser<'a> {
// above). `path_span` has the span of that path, or an empty
// span in the case of something like `<T>::Bar`.
let (mut path, path_span);
if self.eat_keyword(kw::As) {
if self.eat_keyword(exp!(As)) {
let path_lo = self.token.span;
path = self.parse_path(PathStyle::Type)?;
path_span = path_lo.to(self.prev_token.span);
@ -90,7 +90,7 @@ impl<'a> Parser<'a> {
}
// See doc comment for `unmatched_angle_bracket_count`.
self.expect(&token::Gt)?;
self.expect(exp!(Gt))?;
if self.unmatched_angle_bracket_count > 0 {
self.unmatched_angle_bracket_count -= 1;
debug!("parse_qpath: (decrement) count={:?}", self.unmatched_angle_bracket_count);
@ -98,7 +98,7 @@ impl<'a> Parser<'a> {
let is_import_coupler = self.is_import_coupler();
if !is_import_coupler && !self.recover_colon_before_qpath_proj() {
self.expect(&token::PathSep)?;
self.expect(exp!(PathSep))?;
}
let qself = P(QSelf { ty, path_span, position: path.segments.len() });
@ -242,7 +242,7 @@ impl<'a> Parser<'a> {
// `PathStyle::Expr` is only provided at the root invocation and never in
// `parse_path_segment` to recurse and therefore can be checked to maintain
// this invariant.
self.check_trailing_angle_brackets(&segment, &[&token::PathSep]);
self.check_trailing_angle_brackets(&segment, &[exp!(PathSep)]);
}
segments.push(segment);
@ -275,7 +275,7 @@ impl<'a> Parser<'a> {
/// Eat `::` or, potentially, `:::`.
#[must_use]
pub(super) fn eat_path_sep(&mut self) -> bool {
let result = self.eat(&token::PathSep);
let result = self.eat(exp!(PathSep));
if result && self.may_recover() {
if self.eat_noexpect(&token::Colon) {
self.dcx().emit_err(PathTripleColon { span: self.prev_token.span });
@ -300,10 +300,8 @@ impl<'a> Parser<'a> {
)
};
let check_args_start = |this: &mut Self| {
this.expected_tokens.extend_from_slice(&[
TokenType::Token(token::Lt),
TokenType::Token(token::OpenDelim(Delimiter::Parenthesis)),
]);
this.expected_token_types.insert(TokenType::Lt);
this.expected_token_types.insert(TokenType::OpenParen);
is_args_start(&this.token)
};
@ -367,7 +365,7 @@ impl<'a> Parser<'a> {
{
self.bump(); // (
self.bump(); // ..
self.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
self.expect(exp!(CloseParen))?;
let span = lo.to(self.prev_token.span);
self.psess.gated_spans.gate(sym::return_type_notation, span);
@ -661,12 +659,12 @@ impl<'a> Parser<'a> {
let mut args = ThinVec::new();
while let Some(arg) = self.parse_angle_arg(ty_generics)? {
args.push(arg);
if !self.eat(&token::Comma) {
if !self.eat(exp!(Comma)) {
if self.check_noexpect(&TokenKind::Semi)
&& self.look_ahead(1, |t| t.is_ident() || t.is_lifetime())
{
// Add `>` to the list of expected tokens.
self.check(&token::Gt);
self.check(exp!(Gt));
// Handle `,` to `;` substitution
let mut err = self.unexpected().unwrap_err();
self.bump();
@ -705,7 +703,7 @@ impl<'a> Parser<'a> {
// is present and then use that info to push the other token onto the tokens list
let separated =
self.check_noexpect(&token::Colon) || self.check_noexpect(&token::Eq);
if separated && (self.check(&token::Colon) | self.check(&token::Eq)) {
if separated && (self.check(exp!(Colon)) | self.check(exp!(Eq))) {
let arg_span = arg.span();
let (binder, ident, gen_args) = match self.get_ident_from_generic_arg(&arg) {
Ok(ident_gen_args) => ident_gen_args,
@ -720,9 +718,9 @@ impl<'a> Parser<'a> {
"`for<...>` is not allowed on associated type bounds",
));
}
let kind = if self.eat(&token::Colon) {
let kind = if self.eat(exp!(Colon)) {
AssocItemConstraintKind::Bound { bounds: self.parse_generic_bounds()? }
} else if self.eat(&token::Eq) {
} else if self.eat(exp!(Eq)) {
self.parse_assoc_equality_term(
ident,
gen_args.as_ref(),
@ -743,8 +741,8 @@ impl<'a> Parser<'a> {
if self.prev_token.is_ident()
&& (self.token.is_ident() || self.look_ahead(1, |token| token.is_ident()))
{
self.check(&token::Colon);
self.check(&token::Eq);
self.check(exp!(Colon));
self.check(exp!(Eq));
}
Ok(Some(AngleBracketedArg::Arg(arg)))
}

View File

@ -24,7 +24,7 @@ use super::{
Trailing, UsePreAttrPos,
};
use crate::errors::MalformedLoopLabel;
use crate::{errors, maybe_whole};
use crate::{errors, exp, maybe_whole};
impl<'a> Parser<'a> {
/// Parses a statement. This stops just before trailing semicolons on everything but items.
@ -71,7 +71,7 @@ impl<'a> Parser<'a> {
let stmt = if self.token.is_keyword(kw::Let) {
self.collect_tokens(None, attrs, force_collect, |this, attrs| {
this.expect_keyword(kw::Let)?;
this.expect_keyword(exp!(Let))?;
let local = this.parse_local(attrs)?;
let trailing = Trailing::from(capture_semi && this.token == token::Semi);
Ok((
@ -140,7 +140,7 @@ impl<'a> Parser<'a> {
force_collect,
)? {
self.mk_stmt(lo.to(item.span), StmtKind::Item(P(item)))
} else if self.eat(&token::Semi) {
} else if self.eat(exp!(Semi)) {
// Do not attempt to parse an expression if we're done here.
self.error_outer_attrs(attrs);
self.mk_stmt(lo, StmtKind::Empty)
@ -156,7 +156,7 @@ impl<'a> Parser<'a> {
Ok((expr, Trailing::No, UsePreAttrPos::Yes))
},
)?;
if matches!(e.kind, ExprKind::Assign(..)) && self.eat_keyword(kw::Else) {
if matches!(e.kind, ExprKind::Assign(..)) && self.eat_keyword(exp!(Else)) {
let bl = self.parse_block()?;
// Destructuring assignment ... else.
// This is not allowed, but point it out in a nice way.
@ -176,7 +176,7 @@ impl<'a> Parser<'a> {
let stmt = self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
let path = this.parse_path(PathStyle::Expr)?;
if this.eat(&token::Not) {
if this.eat(exp!(Not)) {
let stmt_mac = this.parse_stmt_mac(lo, attrs, path)?;
return Ok((
stmt_mac,
@ -185,7 +185,7 @@ impl<'a> Parser<'a> {
));
}
let expr = if this.eat(&token::OpenDelim(Delimiter::Brace)) {
let expr = if this.eat(exp!(OpenBrace)) {
this.parse_expr_struct(None, path, true)?
} else {
let hi = this.prev_token.span;
@ -370,7 +370,7 @@ impl<'a> Parser<'a> {
let kind = match init {
None => LocalKind::Decl,
Some(init) => {
if self.eat_keyword(kw::Else) {
if self.eat_keyword(exp!(Else)) {
if self.token.is_keyword(kw::If) {
// `let...else if`. Emit the same error that `parse_block()` would,
// but explicitly point out that this pattern is not allowed.
@ -449,7 +449,7 @@ impl<'a> Parser<'a> {
self.bump();
true
}
_ => self.eat(&token::Eq),
_ => self.eat(exp!(Eq)),
};
Ok(if eq_consumed || eq_optional { Some(self.parse_expr()?) } else { None })
@ -509,7 +509,7 @@ impl<'a> Parser<'a> {
Ok(Some(Stmt { kind: StmtKind::Empty, .. })) => {}
Ok(Some(stmt)) => {
let stmt_own_line = self.psess.source_map().is_line_before_span_empty(sp);
let stmt_span = if stmt_own_line && self.eat(&token::Semi) {
let stmt_span = if stmt_own_line && self.eat(exp!(Semi)) {
// Expand the span to include the semicolon.
stmt.span.with_hi(self.prev_token.span.hi())
} else {
@ -651,7 +651,7 @@ impl<'a> Parser<'a> {
let maybe_ident = self.prev_token.clone();
self.maybe_recover_unexpected_block_label();
if !self.eat(&token::OpenDelim(Delimiter::Brace)) {
if !self.eat(exp!(OpenBrace)) {
return self.error_block_no_opening_brace();
}
@ -678,7 +678,7 @@ impl<'a> Parser<'a> {
) -> PResult<'a, P<Block>> {
let mut stmts = ThinVec::new();
let mut snapshot = None;
while !self.eat(&token::CloseDelim(Delimiter::Brace)) {
while !self.eat(exp!(CloseBrace)) {
if self.token == token::Eof {
break;
}
@ -781,8 +781,7 @@ impl<'a> Parser<'a> {
{
// Just check for errors and recover; do not eat semicolon yet.
let expect_result =
self.expect_one_of(&[], &[token::Semi, token::CloseDelim(Delimiter::Brace)]);
let expect_result = self.expect_one_of(&[], &[exp!(Semi), exp!(CloseBrace)]);
// Try to both emit a better diagnostic, and avoid further errors by replacing
// the `expr` with `ExprKind::Err`.
@ -930,7 +929,7 @@ impl<'a> Parser<'a> {
}
}
if add_semi_to_stmt || (eat_semi && self.eat(&token::Semi)) {
if add_semi_to_stmt || (eat_semi && self.eat(exp!(Semi))) {
stmt = stmt.add_trailing_semicolon();
}

View File

@ -0,0 +1,631 @@
use rustc_ast::token::TokenKind;
use rustc_span::symbol::{Symbol, kw, sym};
/// Used in "expected"/"expected one of" error messages. Tokens are added here
/// as necessary. Tokens with values (e.g. literals, identifiers) are
/// represented by a single variant (e.g. `Literal`, `Ident`).
///
/// It's an awkward representation, but it's important for performance. It's a
/// C-style parameterless enum so that `TokenTypeSet` can be a bitset. This is
/// important because `Parser::expected_token_types` is very hot. `TokenType`
/// used to have variants with parameters (e.g. all the keywords were in a
/// single `Keyword` variant with a `Symbol` parameter) and
/// `Parser::expected_token_types` was a `Vec<TokenType>` which was much slower
/// to manipulate.
///
/// We really want to keep the number of variants to 128 or fewer, so that
/// `TokenTypeSet` can be implemented with a `u128`.
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum TokenType {
// Expression-operator symbols
Eq,
Lt,
Le,
EqEq,
Gt,
AndAnd,
OrOr,
Not,
Tilde,
// BinOps
Plus,
Minus,
Star,
And,
Or,
// Structural symbols
At,
Dot,
DotDot,
DotDotDot,
DotDotEq,
Comma,
Semi,
Colon,
PathSep,
RArrow,
FatArrow,
Pound,
Question,
OpenParen,
CloseParen,
OpenBrace,
CloseBrace,
OpenBracket,
CloseBracket,
Eof,
// Token types with some details elided.
/// Any operator.
Operator,
/// Any identifier token.
Ident,
/// Any lifetime token.
Lifetime,
/// Any token that can start a path.
Path,
/// Any token that can start a type.
Type,
/// Any token that can start a const expression.
Const,
// Keywords
// tidy-alphabetical-start
KwAs,
KwAsync,
KwAuto,
KwAwait,
KwBecome,
KwBox,
KwBreak,
KwCatch,
KwConst,
KwContinue,
KwCrate,
KwDefault,
KwDyn,
KwElse,
KwEnum,
KwExtern,
KwFn,
KwFor,
KwGen,
KwIf,
KwImpl,
KwIn,
KwLet,
KwLoop,
KwMacro,
KwMacroRules,
KwMatch,
KwMod,
KwMove,
KwMut,
KwPub,
KwRaw,
KwRef,
KwReturn,
KwReuse,
KwSafe,
KwSelfUpper,
KwStatic,
KwStruct,
KwTrait,
KwTry,
KwType,
KwUnderscore,
KwUnsafe,
KwUse,
KwWhere,
KwWhile,
KwYield,
// tidy-alphabetical-end
// Keyword-like symbols.
// tidy-alphabetical-start
SymAttSyntax,
SymClobberAbi,
SymInlateout,
SymInout,
SymIs,
SymLabel,
SymLateout,
SymMayUnwind,
SymNomem,
SymNoreturn,
SymNostack,
SymOptions,
SymOut,
SymPreservesFlags,
SymPure,
SymReadonly,
SymSym,
// tidy-alphabetical-end
}
// Macro to avoid repetitive boilerplate code.
macro_rules! from_u32_match {
($val:ident; $($tok:ident,)+) => {
// A more obvious formulation would be `0 => TokenType::Eq`. But
// this formulation with the guard lets us avoid specifying a
// specific integer for each variant.
match $val {
$(
t if t == TokenType::$tok as u32 => TokenType::$tok,
)+
_ => panic!("unhandled value: {}", $val),
}
};
}
impl TokenType {
fn from_u32(val: u32) -> TokenType {
let token_type = from_u32_match! { val;
Eq,
Lt,
Le,
EqEq,
Gt,
AndAnd,
OrOr,
Not,
Tilde,
Plus,
Minus,
Star,
And,
Or,
At,
Dot,
DotDot,
DotDotDot,
DotDotEq,
Comma,
Semi,
Colon,
PathSep,
RArrow,
FatArrow,
Pound,
Question,
OpenParen,
CloseParen,
OpenBrace,
CloseBrace,
OpenBracket,
CloseBracket,
Eof,
Operator,
Ident,
Lifetime,
Path,
Type,
Const,
KwAs,
KwAsync,
KwAuto,
KwAwait,
KwBecome,
KwBox,
KwBreak,
KwCatch,
KwConst,
KwContinue,
KwCrate,
KwDefault,
KwDyn,
KwElse,
KwEnum,
KwExtern,
KwFn,
KwFor,
KwGen,
KwIf,
KwImpl,
KwIn,
KwLet,
KwLoop,
KwMacro,
KwMacroRules,
KwMatch,
KwMod,
KwMove,
KwMut,
KwPub,
KwRaw,
KwRef,
KwReturn,
KwReuse,
KwSafe,
KwSelfUpper,
KwStatic,
KwStruct,
KwTrait,
KwTry,
KwType,
KwUnderscore,
KwUnsafe,
KwUse,
KwWhere,
KwWhile,
KwYield,
SymAttSyntax,
SymClobberAbi,
SymInlateout,
SymInout,
SymIs,
SymLabel,
SymLateout,
SymMayUnwind,
SymNomem,
SymNoreturn,
SymNostack,
SymOptions,
SymOut,
SymPreservesFlags,
SymPure,
SymReadonly,
SymSym,
};
token_type
}
pub(super) fn is_keyword(&self) -> Option<Symbol> {
match self {
TokenType::KwAs => Some(kw::As),
TokenType::KwAsync => Some(kw::Async),
TokenType::KwAuto => Some(kw::Auto),
TokenType::KwAwait => Some(kw::Await),
TokenType::KwBecome => Some(kw::Become),
TokenType::KwBox => Some(kw::Box),
TokenType::KwBreak => Some(kw::Break),
TokenType::KwCatch => Some(kw::Catch),
TokenType::KwConst => Some(kw::Const),
TokenType::KwContinue => Some(kw::Continue),
TokenType::KwCrate => Some(kw::Crate),
TokenType::KwDefault => Some(kw::Default),
TokenType::KwDyn => Some(kw::Dyn),
TokenType::KwElse => Some(kw::Else),
TokenType::KwEnum => Some(kw::Enum),
TokenType::KwExtern => Some(kw::Extern),
TokenType::KwFn => Some(kw::Fn),
TokenType::KwFor => Some(kw::For),
TokenType::KwGen => Some(kw::Gen),
TokenType::KwIf => Some(kw::If),
TokenType::KwImpl => Some(kw::Impl),
TokenType::KwIn => Some(kw::In),
TokenType::KwLet => Some(kw::Let),
TokenType::KwLoop => Some(kw::Loop),
TokenType::KwMacroRules => Some(kw::MacroRules),
TokenType::KwMacro => Some(kw::Macro),
TokenType::KwMatch => Some(kw::Match),
TokenType::KwMod => Some(kw::Mod),
TokenType::KwMove => Some(kw::Move),
TokenType::KwMut => Some(kw::Mut),
TokenType::KwPub => Some(kw::Pub),
TokenType::KwRaw => Some(kw::Raw),
TokenType::KwRef => Some(kw::Ref),
TokenType::KwReturn => Some(kw::Return),
TokenType::KwReuse => Some(kw::Reuse),
TokenType::KwSafe => Some(kw::Safe),
TokenType::KwSelfUpper => Some(kw::SelfUpper),
TokenType::KwStatic => Some(kw::Static),
TokenType::KwStruct => Some(kw::Struct),
TokenType::KwTrait => Some(kw::Trait),
TokenType::KwTry => Some(kw::Try),
TokenType::KwType => Some(kw::Type),
TokenType::KwUnderscore => Some(kw::Underscore),
TokenType::KwUnsafe => Some(kw::Unsafe),
TokenType::KwUse => Some(kw::Use),
TokenType::KwWhere => Some(kw::Where),
TokenType::KwWhile => Some(kw::While),
TokenType::KwYield => Some(kw::Yield),
TokenType::SymAttSyntax => Some(sym::att_syntax),
TokenType::SymClobberAbi => Some(sym::clobber_abi),
TokenType::SymInlateout => Some(sym::inlateout),
TokenType::SymInout => Some(sym::inout),
TokenType::SymIs => Some(sym::is),
TokenType::SymLabel => Some(sym::label),
TokenType::SymLateout => Some(sym::lateout),
TokenType::SymMayUnwind => Some(sym::may_unwind),
TokenType::SymNomem => Some(sym::nomem),
TokenType::SymNoreturn => Some(sym::noreturn),
TokenType::SymNostack => Some(sym::nostack),
TokenType::SymOptions => Some(sym::options),
TokenType::SymOut => Some(sym::out),
TokenType::SymPreservesFlags => Some(sym::preserves_flags),
TokenType::SymPure => Some(sym::pure),
TokenType::SymReadonly => Some(sym::readonly),
TokenType::SymSym => Some(sym::sym),
_ => None,
}
}
// The output should be the same as that produced by
// `rustc_ast_pretty::pprust::token_to_string`.
pub(super) fn to_string(&self) -> String {
match self {
TokenType::Eq => "`=`",
TokenType::Lt => "`<`",
TokenType::Le => "`<=`",
TokenType::EqEq => "`==`",
TokenType::Gt => "`>`",
TokenType::AndAnd => "`&&`",
TokenType::OrOr => "`||`",
TokenType::Not => "`!`",
TokenType::Tilde => "`~`",
TokenType::Plus => "`+`",
TokenType::Minus => "`-`",
TokenType::Star => "`*`",
TokenType::And => "`&`",
TokenType::Or => "`|`",
TokenType::At => "`@`",
TokenType::Dot => "`.`",
TokenType::DotDot => "`..`",
TokenType::DotDotDot => "`...`",
TokenType::DotDotEq => "`..=`",
TokenType::Comma => "`,`",
TokenType::Semi => "`;`",
TokenType::Colon => "`:`",
TokenType::PathSep => "`::`",
TokenType::RArrow => "`->`",
TokenType::FatArrow => "`=>`",
TokenType::Pound => "`#`",
TokenType::Question => "`?`",
TokenType::OpenParen => "`(`",
TokenType::CloseParen => "`)`",
TokenType::OpenBrace => "`{`",
TokenType::CloseBrace => "`}`",
TokenType::OpenBracket => "`[`",
TokenType::CloseBracket => "`]`",
TokenType::Eof => "<eof>",
TokenType::Operator => "an operator",
TokenType::Ident => "identifier",
TokenType::Lifetime => "lifetime",
TokenType::Path => "path",
TokenType::Type => "type",
TokenType::Const => "a const expression",
_ => return format!("`{}`", self.is_keyword().unwrap()),
}
.to_string()
}
}
/// Used by various `Parser` methods such as `check` and `eat`. The first field
/// is always by used those methods. The second field is only used when the
/// first field doesn't match.
#[derive(Clone, Copy, Debug)]
pub struct ExpTokenPair<'a> {
pub tok: &'a TokenKind,
pub token_type: TokenType,
}
/// Used by various `Parser` methods such as `check_keyword` and `eat_keyword`.
/// The first field is always used by those methods. The second field is only
/// used when the first field doesn't match.
#[derive(Clone, Copy)]
pub struct ExpKeywordPair {
pub kw: Symbol,
pub token_type: TokenType,
}
// Gets a statically-known `ExpTokenPair` pair (for non-keywords) or
// `ExpKeywordPair` (for keywords), as used with various `check`/`expect`
// methods in `Parser`.
//
// The name is short because it's used a lot.
#[macro_export]
// We don't use the normal `#[rustfmt::skip]` here because that triggers a
// bogus "macro-expanded `macro_export` macros from the current crate cannot be
// referred to by absolute paths" error, ugh. See #52234.
#[cfg_attr(rustfmt, rustfmt::skip)]
macro_rules! exp {
// `ExpTokenPair` helper rules.
(@tok, $tok:ident) => {
$crate::parser::token_type::ExpTokenPair {
tok: &rustc_ast::token::$tok,
token_type: $crate::parser::token_type::TokenType::$tok
}
};
(@binop, $op:ident) => {
$crate::parser::token_type::ExpTokenPair {
tok: &rustc_ast::token::BinOp(rustc_ast::token::BinOpToken::$op),
token_type: $crate::parser::token_type::TokenType::$op,
}
};
(@open, $delim:ident, $token_type:ident) => {
$crate::parser::token_type::ExpTokenPair {
tok: &rustc_ast::token::OpenDelim(rustc_ast::token::Delimiter::$delim),
token_type: $crate::parser::token_type::TokenType::$token_type,
}
};
(@close, $delim:ident, $token_type:ident) => {
$crate::parser::token_type::ExpTokenPair {
tok: &rustc_ast::token::CloseDelim(rustc_ast::token::Delimiter::$delim),
token_type: $crate::parser::token_type::TokenType::$token_type,
}
};
// `ExpKeywordPair` helper rules.
(@kw, $kw:ident, $token_type:ident) => {
$crate::parser::token_type::ExpKeywordPair {
kw: rustc_span::symbol::kw::$kw,
token_type: $crate::parser::token_type::TokenType::$token_type,
}
};
(@sym, $kw:ident, $token_type:ident) => {
$crate::parser::token_type::ExpKeywordPair {
kw: rustc_span::symbol::sym::$kw,
token_type: $crate::parser::token_type::TokenType::$token_type,
}
};
(Eq) => { exp!(@tok, Eq) };
(Lt) => { exp!(@tok, Lt) };
(Le) => { exp!(@tok, Le) };
(EqEq) => { exp!(@tok, EqEq) };
(Gt) => { exp!(@tok, Gt) };
(AndAnd) => { exp!(@tok, AndAnd) };
(OrOr) => { exp!(@tok, OrOr) };
(Not) => { exp!(@tok, Not) };
(Tilde) => { exp!(@tok, Tilde) };
(At) => { exp!(@tok, At) };
(Dot) => { exp!(@tok, Dot) };
(DotDot) => { exp!(@tok, DotDot) };
(DotDotDot) => { exp!(@tok, DotDotDot) };
(DotDotEq) => { exp!(@tok, DotDotEq) };
(Comma) => { exp!(@tok, Comma) };
(Semi) => { exp!(@tok, Semi) };
(Colon) => { exp!(@tok, Colon) };
(PathSep) => { exp!(@tok, PathSep) };
(RArrow) => { exp!(@tok, RArrow) };
(FatArrow) => { exp!(@tok, FatArrow) };
(Pound) => { exp!(@tok, Pound) };
(Question) => { exp!(@tok, Question) };
(Eof) => { exp!(@tok, Eof) };
(Plus) => { exp!(@binop, Plus) };
(Minus) => { exp!(@binop, Minus) };
(Star) => { exp!(@binop, Star) };
(And) => { exp!(@binop, And) };
(Or) => { exp!(@binop, Or) };
(OpenParen) => { exp!(@open, Parenthesis, OpenParen) };
(OpenBrace) => { exp!(@open, Brace, OpenBrace) };
(OpenBracket) => { exp!(@open, Bracket, OpenBracket) };
(CloseParen) => { exp!(@close, Parenthesis, CloseParen) };
(CloseBrace) => { exp!(@close, Brace, CloseBrace) };
(CloseBracket) => { exp!(@close, Bracket, CloseBracket) };
(As) => { exp!(@kw, As, KwAs) };
(Async) => { exp!(@kw, Async, KwAsync) };
(Auto) => { exp!(@kw, Auto, KwAuto) };
(Await) => { exp!(@kw, Await, KwAwait) };
(Become) => { exp!(@kw, Become, KwBecome) };
(Box) => { exp!(@kw, Box, KwBox) };
(Break) => { exp!(@kw, Break, KwBreak) };
(Catch) => { exp!(@kw, Catch, KwCatch) };
(Const) => { exp!(@kw, Const, KwConst) };
(Continue) => { exp!(@kw, Continue, KwContinue) };
(Crate) => { exp!(@kw, Crate, KwCrate) };
(Default) => { exp!(@kw, Default, KwDefault) };
(Dyn) => { exp!(@kw, Dyn, KwDyn) };
(Else) => { exp!(@kw, Else, KwElse) };
(Enum) => { exp!(@kw, Enum, KwEnum) };
(Extern) => { exp!(@kw, Extern, KwExtern) };
(Fn) => { exp!(@kw, Fn, KwFn) };
(For) => { exp!(@kw, For, KwFor) };
(Gen) => { exp!(@kw, Gen, KwGen) };
(If) => { exp!(@kw, If, KwIf) };
(Impl) => { exp!(@kw, Impl, KwImpl) };
(In) => { exp!(@kw, In, KwIn) };
(Let) => { exp!(@kw, Let, KwLet) };
(Loop) => { exp!(@kw, Loop, KwLoop) };
(Macro) => { exp!(@kw, Macro, KwMacro) };
(MacroRules) => { exp!(@kw, MacroRules, KwMacroRules) };
(Match) => { exp!(@kw, Match, KwMatch) };
(Mod) => { exp!(@kw, Mod, KwMod) };
(Move) => { exp!(@kw, Move, KwMove) };
(Mut) => { exp!(@kw, Mut, KwMut) };
(Pub) => { exp!(@kw, Pub, KwPub) };
(Raw) => { exp!(@kw, Raw, KwRaw) };
(Ref) => { exp!(@kw, Ref, KwRef) };
(Return) => { exp!(@kw, Return, KwReturn) };
(Reuse) => { exp!(@kw, Reuse, KwReuse) };
(Safe) => { exp!(@kw, Safe, KwSafe) };
(SelfUpper) => { exp!(@kw, SelfUpper, KwSelfUpper) };
(Static) => { exp!(@kw, Static, KwStatic) };
(Struct) => { exp!(@kw, Struct, KwStruct) };
(Trait) => { exp!(@kw, Trait, KwTrait) };
(Try) => { exp!(@kw, Try, KwTry) };
(Type) => { exp!(@kw, Type, KwType) };
(Underscore) => { exp!(@kw, Underscore, KwUnderscore) };
(Unsafe) => { exp!(@kw, Unsafe, KwUnsafe) };
(Use) => { exp!(@kw, Use, KwUse) };
(Where) => { exp!(@kw, Where, KwWhere) };
(While) => { exp!(@kw, While, KwWhile) };
(Yield) => { exp!(@kw, Yield, KwYield) };
(AttSyntax) => { exp!(@sym, att_syntax, SymAttSyntax) };
(ClobberAbi) => { exp!(@sym, clobber_abi, SymClobberAbi) };
(Inlateout) => { exp!(@sym, inlateout, SymInlateout) };
(Inout) => { exp!(@sym, inout, SymInout) };
(Is) => { exp!(@sym, is, SymIs) };
(Label) => { exp!(@sym, label, SymLabel) };
(Lateout) => { exp!(@sym, lateout, SymLateout) };
(MayUnwind) => { exp!(@sym, may_unwind, SymMayUnwind) };
(Nomem) => { exp!(@sym, nomem, SymNomem) };
(Noreturn) => { exp!(@sym, noreturn, SymNoreturn) };
(Nostack) => { exp!(@sym, nostack, SymNostack) };
(Options) => { exp!(@sym, options, SymOptions) };
(Out) => { exp!(@sym, out, SymOut) };
(PreservesFlags) => { exp!(@sym, preserves_flags, SymPreservesFlags) };
(Pure) => { exp!(@sym, pure, SymPure) };
(Readonly) => { exp!(@sym, readonly, SymReadonly) };
(Sym) => { exp!(@sym, sym, SymSym) };
}
/// A bitset type designed specifically for `Parser::expected_token_types`,
/// which is very hot. `u128` is the smallest integer that will fit every
/// `TokenType` value.
#[derive(Clone, Copy)]
pub(super) struct TokenTypeSet(u128);
impl TokenTypeSet {
pub(super) fn new() -> TokenTypeSet {
TokenTypeSet(0)
}
pub(super) fn is_empty(&self) -> bool {
self.0 == 0
}
pub(super) fn insert(&mut self, token_type: TokenType) {
self.0 = self.0 | (1u128 << token_type as u32)
}
pub(super) fn clear(&mut self) {
self.0 = 0
}
pub(super) fn contains(&self, token_type: TokenType) -> bool {
self.0 & (1u128 << token_type as u32) != 0
}
pub(super) fn iter(&self) -> TokenTypeSetIter {
TokenTypeSetIter(*self)
}
}
// The `TokenTypeSet` is a copy of the set being iterated. It initially holds
// the entire set. Each bit is cleared as it is returned. We have finished once
// it is all zeroes.
pub(super) struct TokenTypeSetIter(TokenTypeSet);
impl Iterator for TokenTypeSetIter {
type Item = TokenType;
fn next(&mut self) -> Option<TokenType> {
let num_bits: u32 = (std::mem::size_of_val(&self.0.0) * 8) as u32;
assert_eq!(num_bits, 128);
let z = self.0.0.trailing_zeros();
if z == num_bits {
None
} else {
self.0.0 &= !(1 << z); // clear the trailing 1 bit
Some(TokenType::from_u32(z))
}
}
}

View File

@ -18,7 +18,7 @@ use crate::errors::{
HelpUseLatestEdition, InvalidDynKeyword, LifetimeAfterMut, NeedPlusAfterTraitObjectLifetime,
NestedCVariadicType, ReturnTypesUseThinArrow,
};
use crate::{maybe_recover_from_interpolated_ty_qpath, maybe_whole};
use crate::{exp, maybe_recover_from_interpolated_ty_qpath, maybe_whole};
/// Signals whether parsing a type should allow `+`.
///
@ -203,7 +203,7 @@ impl<'a> Parser<'a> {
recover_return_sign: RecoverReturnSign,
) -> PResult<'a, FnRetTy> {
let lo = self.prev_token.span;
Ok(if self.eat(&token::RArrow) {
Ok(if self.eat(exp!(RArrow)) {
// FIXME(Centril): Can we unconditionally `allow_plus`?
let ty = self.parse_ty_common(
allow_plus,
@ -251,28 +251,28 @@ impl<'a> Parser<'a> {
let lo = self.token.span;
let mut impl_dyn_multi = false;
let kind = if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
let kind = if self.check(exp!(OpenParen)) {
self.parse_ty_tuple_or_parens(lo, allow_plus)?
} else if self.eat(&token::Not) {
} else if self.eat(exp!(Not)) {
// Never type `!`
TyKind::Never
} else if self.eat(&token::BinOp(token::Star)) {
} else if self.eat(exp!(Star)) {
self.parse_ty_ptr()?
} else if self.eat(&token::OpenDelim(Delimiter::Bracket)) {
} else if self.eat(exp!(OpenBracket)) {
self.parse_array_or_slice_ty()?
} else if self.check(&token::BinOp(token::And)) || self.check(&token::AndAnd) {
} else if self.check(exp!(And)) || self.check(exp!(AndAnd)) {
// Reference
self.expect_and()?;
self.parse_borrowed_pointee()?
} else if self.eat_keyword_noexpect(kw::Typeof) {
self.parse_typeof_ty()?
} else if self.eat_keyword(kw::Underscore) {
} else if self.eat_keyword(exp!(Underscore)) {
// A type to be inferred `_`
TyKind::Infer
} else if self.check_fn_front_matter(false, Case::Sensitive) {
// Function pointer type
self.parse_ty_bare_fn(lo, ThinVec::new(), None, recover_return_sign)?
} else if self.check_keyword(kw::For) {
} else if self.check_keyword(exp!(For)) {
// Function pointer type or bound list (trait object type) starting with a poly-trait.
// `for<'lt> [unsafe] [extern "ABI"] fn (&'lt S) -> T`
// `for<'lt> Trait1<'lt> + Trait2 + 'a`
@ -324,7 +324,7 @@ impl<'a> Parser<'a> {
self.parse_remaining_bounds_path(lifetime_defs, path, lo, parse_plus)?
}
}
} else if self.eat_keyword(kw::Impl) {
} else if self.eat_keyword(exp!(Impl)) {
self.parse_impl_ty(&mut impl_dyn_multi)?
} else if self.is_explicit_dyn_type() {
self.parse_dyn_ty(&mut impl_dyn_multi)?
@ -336,7 +336,7 @@ impl<'a> Parser<'a> {
self.parse_path_start_ty(lo, allow_plus, ty_generics)?
} else if self.can_begin_bound() {
self.parse_bare_trait_object(lo, allow_plus)?
} else if self.eat(&token::DotDotDot) {
} else if self.eat(exp!(DotDotDot)) {
match allow_c_variadic {
AllowCVariadic::Yes => TyKind::CVarArgs,
AllowCVariadic::No => {
@ -347,7 +347,7 @@ impl<'a> Parser<'a> {
TyKind::Err(guar)
}
}
} else if self.check_keyword(kw::Unsafe)
} else if self.check_keyword(exp!(Unsafe))
&& self.look_ahead(1, |tok| matches!(tok.kind, token::Lt))
{
self.parse_unsafe_binder_ty()?
@ -374,7 +374,7 @@ impl<'a> Parser<'a> {
fn parse_unsafe_binder_ty(&mut self) -> PResult<'a, TyKind> {
let lo = self.token.span;
assert!(self.eat_keyword(kw::Unsafe));
assert!(self.eat_keyword(exp!(Unsafe)));
self.expect_lt()?;
let generic_params = self.parse_generic_params()?;
self.expect_gt()?;
@ -487,16 +487,16 @@ impl<'a> Parser<'a> {
Err(err) => return Err(err),
};
let ty = if self.eat(&token::Semi) {
let ty = if self.eat(exp!(Semi)) {
let mut length = self.parse_expr_anon_const()?;
if let Err(e) = self.expect(&token::CloseDelim(Delimiter::Bracket)) {
if let Err(e) = self.expect(exp!(CloseBracket)) {
// Try to recover from `X<Y, ...>` when `X::<Y, ...>` works
self.check_mistyped_turbofish_with_multiple_type_params(e, &mut length.value)?;
self.expect(&token::CloseDelim(Delimiter::Bracket))?;
self.expect(exp!(CloseBracket))?;
}
TyKind::Array(elt_ty, length)
} else {
self.expect(&token::CloseDelim(Delimiter::Bracket))?;
self.expect(exp!(CloseBracket))?;
TyKind::Slice(elt_ty)
};
@ -579,9 +579,9 @@ impl<'a> Parser<'a> {
// Parses the `typeof(EXPR)`.
// To avoid ambiguity, the type is surrounded by parentheses.
fn parse_typeof_ty(&mut self) -> PResult<'a, TyKind> {
self.expect(&token::OpenDelim(Delimiter::Parenthesis))?;
self.expect(exp!(OpenParen))?;
let expr = self.parse_expr_anon_const()?;
self.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
self.expect(exp!(CloseParen))?;
Ok(TyKind::Typeof(expr))
}
@ -697,15 +697,15 @@ impl<'a> Parser<'a> {
let lo = self.token.span;
self.expect_lt()?;
let (args, _, _) = self.parse_seq_to_before_tokens(
&[&TokenKind::Gt],
&[exp!(Gt)],
&[
&TokenKind::Ge,
&TokenKind::BinOp(BinOpToken::Shr),
&TokenKind::BinOpEq(BinOpToken::Shr),
],
SeqSep::trailing_allowed(token::Comma),
SeqSep::trailing_allowed(exp!(Comma)),
|self_| {
if self_.check_keyword(kw::SelfUpper) {
if self_.check_keyword(exp!(SelfUpper)) {
self_.bump();
Ok(PreciseCapturingArg::Arg(
ast::Path::from_ident(self_.prev_token.ident().unwrap().0),
@ -729,7 +729,7 @@ impl<'a> Parser<'a> {
/// Is a `dyn B0 + ... + Bn` type allowed here?
fn is_explicit_dyn_type(&mut self) -> bool {
self.check_keyword(kw::Dyn)
self.check_keyword(exp!(Dyn))
&& (self.token.uninterpolated_span().at_least_rust_2018()
|| self.look_ahead(1, |t| {
(can_begin_dyn_bound_in_edition_2015(t) || *t == TokenKind::BinOp(token::Star))
@ -745,7 +745,7 @@ impl<'a> Parser<'a> {
self.bump(); // `dyn`
// parse dyn* types
let syntax = if self.eat(&TokenKind::BinOp(token::Star)) {
let syntax = if self.eat(exp!(Star)) {
self.psess.gated_spans.gate(sym::dyn_star, lo.to(self.prev_token.span));
TraitObjectSyntax::DynStar
} else {
@ -772,7 +772,7 @@ impl<'a> Parser<'a> {
) -> PResult<'a, TyKind> {
// Simple path
let path = self.parse_path_inner(PathStyle::Type, ty_generics)?;
if self.eat(&token::Not) {
if self.eat(exp!(Not)) {
// Macro invocation in type position
Ok(TyKind::MacCall(P(MacCall { path, args: self.parse_delim_args()? })))
} else if allow_plus == AllowPlus::Yes && self.check_plus() {
@ -825,14 +825,14 @@ impl<'a> Parser<'a> {
fn can_begin_bound(&mut self) -> bool {
self.check_path()
|| self.check_lifetime()
|| self.check(&token::Not)
|| self.check(&token::Question)
|| self.check(&token::Tilde)
|| self.check_keyword(kw::For)
|| self.check(&token::OpenDelim(Delimiter::Parenthesis))
|| self.check_keyword(kw::Const)
|| self.check_keyword(kw::Async)
|| self.check_keyword(kw::Use)
|| self.check(exp!(Not))
|| self.check(exp!(Question))
|| self.check(exp!(Tilde))
|| self.check_keyword(exp!(For))
|| self.check(exp!(OpenParen))
|| self.check_keyword(exp!(Const))
|| self.check_keyword(exp!(Async))
|| self.check_keyword(exp!(Use))
}
/// Parses a bound according to the grammar:
@ -842,11 +842,11 @@ impl<'a> Parser<'a> {
fn parse_generic_bound(&mut self) -> PResult<'a, GenericBound> {
let lo = self.token.span;
let leading_token = self.prev_token.clone();
let has_parens = self.eat(&token::OpenDelim(Delimiter::Parenthesis));
let has_parens = self.eat(exp!(OpenParen));
let bound = if self.token.is_lifetime() {
self.parse_generic_lt_bound(lo, has_parens)?
} else if self.eat_keyword(kw::Use) {
} else if self.eat_keyword(exp!(Use)) {
// parse precise captures, if any. This is `use<'lt, 'lt, P, P>`; a list of
// lifetimes and ident params (including SelfUpper). These are validated later
// for order, duplication, and whether they actually reference params.
@ -919,7 +919,7 @@ impl<'a> Parser<'a> {
/// Recover on `('lifetime)` with `(` already eaten.
fn recover_paren_lifetime(&mut self, lo: Span) -> PResult<'a, ()> {
self.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
self.expect(exp!(CloseParen))?;
let span = lo.to(self.prev_token.span);
let sugg = errors::RemoveParens { lo, hi: self.prev_token.span };
@ -940,13 +940,13 @@ impl<'a> Parser<'a> {
/// See `parse_generic_ty_bound` for the complete grammar of trait bound modifiers.
fn parse_trait_bound_modifiers(&mut self) -> PResult<'a, TraitBoundModifiers> {
let modifier_lo = self.token.span;
let constness = if self.eat(&token::Tilde) {
let constness = if self.eat(exp!(Tilde)) {
let tilde = self.prev_token.span;
self.expect_keyword(kw::Const)?;
self.expect_keyword(exp!(Const))?;
let span = tilde.to(self.prev_token.span);
self.psess.gated_spans.gate(sym::const_trait_impl, span);
BoundConstness::Maybe(span)
} else if self.eat_keyword(kw::Const) {
} else if self.eat_keyword(exp!(Const)) {
self.psess.gated_spans.gate(sym::const_trait_impl, self.prev_token.span);
BoundConstness::Always(self.prev_token.span)
} else {
@ -954,7 +954,7 @@ impl<'a> Parser<'a> {
};
let asyncness = if self.token.uninterpolated_span().at_least_rust_2018()
&& self.eat_keyword(kw::Async)
&& self.eat_keyword(exp!(Async))
{
self.psess.gated_spans.gate(sym::async_trait_bounds, self.prev_token.span);
BoundAsyncness::Async(self.prev_token.span)
@ -974,9 +974,9 @@ impl<'a> Parser<'a> {
};
let modifier_hi = self.prev_token.span;
let polarity = if self.eat(&token::Question) {
let polarity = if self.eat(exp!(Question)) {
BoundPolarity::Maybe(self.prev_token.span)
} else if self.eat(&token::Not) {
} else if self.eat(exp!(Not)) {
self.psess.gated_spans.gate(sym::negative_bounds, self.prev_token.span);
BoundPolarity::Negative(self.prev_token.span)
} else {
@ -1122,7 +1122,7 @@ impl<'a> Parser<'a> {
if self.token.is_like_plus() && leading_token.is_keyword(kw::Dyn) {
let bounds = vec![];
self.parse_remaining_bounds(bounds, true)?;
self.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
self.expect(exp!(CloseParen))?;
self.dcx().emit_err(errors::IncorrectParensTraitBounds {
span: vec![lo, self.prev_token.span],
sugg: errors::IncorrectParensTraitBoundsSugg {
@ -1131,7 +1131,7 @@ impl<'a> Parser<'a> {
},
});
} else {
self.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
self.expect(exp!(CloseParen))?;
}
}
@ -1176,7 +1176,7 @@ impl<'a> Parser<'a> {
pub(super) fn parse_late_bound_lifetime_defs(
&mut self,
) -> PResult<'a, (ThinVec<GenericParam>, Option<Span>)> {
if self.eat_keyword(kw::For) {
if self.eat_keyword(exp!(For)) {
let lo = self.token.span;
self.expect_lt()?;
let params = self.parse_generic_params()?;
@ -1280,7 +1280,7 @@ impl<'a> Parser<'a> {
}
pub(super) fn check_lifetime(&mut self) -> bool {
self.expected_tokens.push(TokenType::Lifetime);
self.expected_token_types.insert(TokenType::Lifetime);
self.token.is_lifetime()
}

View File

@ -2,6 +2,7 @@ use std::panic::{AssertUnwindSafe, catch_unwind};
use rustc_ast::ast;
use rustc_ast::token::{Delimiter, TokenKind};
use rustc_parse::exp;
use rustc_parse::parser::ForceCollect;
use rustc_span::symbol::kw;
@ -31,7 +32,7 @@ fn parse_cfg_if_inner<'a>(
while parser.token.kind != TokenKind::Eof {
if process_if_cfg {
if !parser.eat_keyword(kw::If) {
if !parser.eat_keyword(exp!(If)) {
return Err("Expected `if`");
}
@ -55,7 +56,7 @@ fn parse_cfg_if_inner<'a>(
})?;
}
if !parser.eat(&TokenKind::OpenDelim(Delimiter::Brace)) {
if !parser.eat(exp!(OpenBrace)) {
return Err("Expected an opening brace");
}
@ -78,15 +79,15 @@ fn parse_cfg_if_inner<'a>(
}
}
if !parser.eat(&TokenKind::CloseDelim(Delimiter::Brace)) {
if !parser.eat(exp!(CloseBrace)) {
return Err("Expected a closing brace");
}
if parser.eat(&TokenKind::Eof) {
if parser.eat(exp!(Eof)) {
break;
}
if !parser.eat_keyword(kw::Else) {
if !parser.eat_keyword(exp!(Else)) {
return Err("Expected `else`");
}

View File

@ -1,8 +1,9 @@
use rustc_ast::ast;
use rustc_ast::ptr::P;
use rustc_ast::token::TokenKind;
use rustc_ast::token;
use rustc_ast::tokenstream::TokenStream;
use rustc_span::symbol::{self, kw};
use rustc_parse::exp;
use rustc_span::symbol;
use crate::rewrite::RewriteContext;
@ -31,19 +32,19 @@ pub(crate) fn parse_lazy_static(
}
}
}
while parser.token.kind != TokenKind::Eof {
while parser.token.kind != token::Eof {
// Parse a `lazy_static!` item.
// FIXME: These `eat_*` calls should be converted to `parse_or` to avoid
// silently formatting malformed lazy-statics.
let vis = parse_or!(parse_visibility, rustc_parse::parser::FollowedByType::No);
let _ = parser.eat_keyword(kw::Static);
let _ = parser.eat_keyword(kw::Ref);
let _ = parser.eat_keyword(exp!(Static));
let _ = parser.eat_keyword(exp!(Ref));
let id = parse_or!(parse_ident);
let _ = parser.eat(&TokenKind::Colon);
let _ = parser.eat(exp!(Colon));
let ty = parse_or!(parse_ty);
let _ = parser.eat(&TokenKind::Eq);
let _ = parser.eat(exp!(Eq));
let expr = parse_or!(parse_expr);
let _ = parser.eat(&TokenKind::Semi);
let _ = parser.eat(exp!(Semi));
result.push((vis, id, ty, expr));
}

View File

@ -1,11 +1,10 @@
use std::panic::{AssertUnwindSafe, catch_unwind};
use std::path::{Path, PathBuf};
use rustc_ast::token::TokenKind;
use rustc_ast::{ast, attr, ptr};
use rustc_errors::Diag;
use rustc_parse::parser::Parser as RawParser;
use rustc_parse::{new_parser_from_file, new_parser_from_source_str, unwrap_or_emit_fatal};
use rustc_parse::{exp, new_parser_from_file, new_parser_from_source_str, unwrap_or_emit_fatal};
use rustc_span::{Span, sym};
use thin_vec::ThinVec;
@ -107,7 +106,7 @@ impl<'a> Parser<'a> {
let result = catch_unwind(AssertUnwindSafe(|| {
let mut parser =
unwrap_or_emit_fatal(new_parser_from_file(psess.inner(), path, Some(span)));
match parser.parse_mod(&TokenKind::Eof) {
match parser.parse_mod(exp!(Eof)) {
Ok((a, i, spans)) => Some((a, i, spans.inner_span)),
Err(e) => {
e.emit();