mirror of
https://github.com/rust-lang/rust.git
synced 2025-01-27 07:03:45 +00:00
Rollup merge of #96433 - petrochenkov:delim, r=nnethercote
rustc_ast: Harmonize delimiter naming with `proc_macro::Delimiter` Compiler cannot reuse `proc_macro::Delimiter` directly due to extra impls, but can at least use the same naming. After this PR the only difference between these two enums is that `proc_macro::Delimiter::None` is turned into `token::Delimiter::Invisible`. It's my mistake that the invisible delimiter is called `None` on stable, during the stabilization I audited the naming and wrote the docs, but missed the fact that the `None` naming gives a wrong and confusing impression about what this thing is. cc https://github.com/rust-lang/rust/pull/96421 r? ``@nnethercote``
This commit is contained in:
commit
0cbf3b2b30
@ -23,7 +23,7 @@ pub use GenericArgs::*;
|
||||
pub use UnsafeSource::*;
|
||||
|
||||
use crate::ptr::P;
|
||||
use crate::token::{self, CommentKind, DelimToken, Token};
|
||||
use crate::token::{self, CommentKind, Delimiter, Token};
|
||||
use crate::tokenstream::{DelimSpan, LazyTokenStream, TokenStream, TokenTree};
|
||||
|
||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
||||
@ -1542,7 +1542,7 @@ pub enum MacArgs {
|
||||
}
|
||||
|
||||
impl MacArgs {
|
||||
pub fn delim(&self) -> Option<DelimToken> {
|
||||
pub fn delim(&self) -> Option<Delimiter> {
|
||||
match self {
|
||||
MacArgs::Delimited(_, delim, _) => Some(delim.to_token()),
|
||||
MacArgs::Empty | MacArgs::Eq(..) => None,
|
||||
@ -1582,20 +1582,20 @@ pub enum MacDelimiter {
|
||||
}
|
||||
|
||||
impl MacDelimiter {
|
||||
pub fn to_token(self) -> DelimToken {
|
||||
pub fn to_token(self) -> Delimiter {
|
||||
match self {
|
||||
MacDelimiter::Parenthesis => DelimToken::Paren,
|
||||
MacDelimiter::Bracket => DelimToken::Bracket,
|
||||
MacDelimiter::Brace => DelimToken::Brace,
|
||||
MacDelimiter::Parenthesis => Delimiter::Parenthesis,
|
||||
MacDelimiter::Bracket => Delimiter::Bracket,
|
||||
MacDelimiter::Brace => Delimiter::Brace,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_token(delim: DelimToken) -> Option<MacDelimiter> {
|
||||
pub fn from_token(delim: Delimiter) -> Option<MacDelimiter> {
|
||||
match delim {
|
||||
token::Paren => Some(MacDelimiter::Parenthesis),
|
||||
token::Bracket => Some(MacDelimiter::Bracket),
|
||||
token::Brace => Some(MacDelimiter::Brace),
|
||||
token::NoDelim => None,
|
||||
Delimiter::Parenthesis => Some(MacDelimiter::Parenthesis),
|
||||
Delimiter::Bracket => Some(MacDelimiter::Bracket),
|
||||
Delimiter::Brace => Some(MacDelimiter::Brace),
|
||||
Delimiter::Invisible => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -5,7 +5,7 @@ use crate::ast::{AttrId, AttrItem, AttrKind, AttrStyle, Attribute};
|
||||
use crate::ast::{Lit, LitKind};
|
||||
use crate::ast::{MacArgs, MacDelimiter, MetaItem, MetaItemKind, NestedMetaItem};
|
||||
use crate::ast::{Path, PathSegment};
|
||||
use crate::token::{self, CommentKind, Token};
|
||||
use crate::token::{self, CommentKind, Delimiter, Token};
|
||||
use crate::tokenstream::{AttrAnnotatedTokenStream, AttrAnnotatedTokenTree};
|
||||
use crate::tokenstream::{DelimSpan, Spacing, TokenTree, TreeAndSpacing};
|
||||
use crate::tokenstream::{LazyTokenStream, TokenStream};
|
||||
@ -513,7 +513,7 @@ impl MetaItemKind {
|
||||
vec![
|
||||
TokenTree::Delimited(
|
||||
DelimSpan::from_single(span),
|
||||
token::Paren,
|
||||
Delimiter::Parenthesis,
|
||||
TokenStream::new(tokens),
|
||||
)
|
||||
.into(),
|
||||
@ -540,7 +540,7 @@ impl MetaItemKind {
|
||||
tokens: &mut impl Iterator<Item = TokenTree>,
|
||||
) -> Option<MetaItemKind> {
|
||||
match tokens.next() {
|
||||
Some(TokenTree::Delimited(_, token::NoDelim, inner_tokens)) => {
|
||||
Some(TokenTree::Delimited(_, Delimiter::Invisible, inner_tokens)) => {
|
||||
MetaItemKind::name_value_from_tokens(&mut inner_tokens.trees())
|
||||
}
|
||||
Some(TokenTree::Token(token)) => {
|
||||
@ -565,7 +565,7 @@ impl MetaItemKind {
|
||||
tokens: &mut iter::Peekable<impl Iterator<Item = TokenTree>>,
|
||||
) -> Option<MetaItemKind> {
|
||||
match tokens.peek() {
|
||||
Some(TokenTree::Delimited(_, token::Paren, inner_tokens)) => {
|
||||
Some(TokenTree::Delimited(_, Delimiter::Parenthesis, inner_tokens)) => {
|
||||
let inner_tokens = inner_tokens.clone();
|
||||
tokens.next();
|
||||
MetaItemKind::list_from_tokens(inner_tokens)
|
||||
@ -606,7 +606,7 @@ impl NestedMetaItem {
|
||||
tokens.next();
|
||||
return Some(NestedMetaItem::Literal(lit));
|
||||
}
|
||||
Some(TokenTree::Delimited(_, token::NoDelim, inner_tokens)) => {
|
||||
Some(TokenTree::Delimited(_, Delimiter::Invisible, inner_tokens)) => {
|
||||
let inner_tokens = inner_tokens.clone();
|
||||
tokens.next();
|
||||
return NestedMetaItem::from_tokens(&mut inner_tokens.into_trees().peekable());
|
||||
|
@ -1,5 +1,4 @@
|
||||
pub use BinOpToken::*;
|
||||
pub use DelimToken::*;
|
||||
pub use LitKind::*;
|
||||
pub use Nonterminal::*;
|
||||
pub use TokenKind::*;
|
||||
@ -37,18 +36,26 @@ pub enum BinOpToken {
|
||||
Shr,
|
||||
}
|
||||
|
||||
/// A delimiter token.
|
||||
#[derive(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Debug, Copy)]
|
||||
#[derive(HashStable_Generic)]
|
||||
pub enum DelimToken {
|
||||
/// A round parenthesis (i.e., `(` or `)`).
|
||||
Paren,
|
||||
/// A square bracket (i.e., `[` or `]`).
|
||||
Bracket,
|
||||
/// A curly brace (i.e., `{` or `}`).
|
||||
/// Describes how a sequence of token trees is delimited.
|
||||
/// Cannot use `proc_macro::Delimiter` directly because this
|
||||
/// structure should implement some additional traits.
|
||||
/// The `None` variant is also renamed to `Invisible` to be
|
||||
/// less confusing and better convey the semantics.
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
#[derive(Encodable, Decodable, Hash, HashStable_Generic)]
|
||||
pub enum Delimiter {
|
||||
/// `( ... )`
|
||||
Parenthesis,
|
||||
/// `{ ... }`
|
||||
Brace,
|
||||
/// An empty delimiter.
|
||||
NoDelim,
|
||||
/// `[ ... ]`
|
||||
Bracket,
|
||||
/// `Ø ... Ø`
|
||||
/// An invisible delimiter, that may, for example, appear around tokens coming from a
|
||||
/// "macro variable" `$var`. It is important to preserve operator priorities in cases like
|
||||
/// `$var * 3` where `$var` is `1 + 2`.
|
||||
/// Invisible delimiters might not survive roundtrip of a token stream through a string.
|
||||
Invisible,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)]
|
||||
@ -212,9 +219,9 @@ pub enum TokenKind {
|
||||
/// Used by proc macros for representing lifetimes, not generated by lexer right now.
|
||||
SingleQuote,
|
||||
/// An opening delimiter (e.g., `{`).
|
||||
OpenDelim(DelimToken),
|
||||
OpenDelim(Delimiter),
|
||||
/// A closing delimiter (e.g., `}`).
|
||||
CloseDelim(DelimToken),
|
||||
CloseDelim(Delimiter),
|
||||
|
||||
/* Literals */
|
||||
Literal(Lit),
|
||||
@ -387,8 +394,8 @@ impl Token {
|
||||
match self.uninterpolate().kind {
|
||||
Ident(name, is_raw) =>
|
||||
ident_can_begin_type(name, self.span, is_raw), // type name or keyword
|
||||
OpenDelim(Paren) | // tuple
|
||||
OpenDelim(Bracket) | // array
|
||||
OpenDelim(Delimiter::Parenthesis) | // tuple
|
||||
OpenDelim(Delimiter::Bracket) | // array
|
||||
Not | // never
|
||||
BinOp(Star) | // raw pointer
|
||||
BinOp(And) | // reference
|
||||
@ -405,7 +412,7 @@ impl Token {
|
||||
/// Returns `true` if the token can appear at the start of a const param.
|
||||
pub fn can_begin_const_arg(&self) -> bool {
|
||||
match self.kind {
|
||||
OpenDelim(Brace) => true,
|
||||
OpenDelim(Delimiter::Brace) => true,
|
||||
Interpolated(ref nt) => matches!(**nt, NtExpr(..) | NtBlock(..) | NtLiteral(..)),
|
||||
_ => self.can_begin_literal_maybe_minus(),
|
||||
}
|
||||
@ -417,7 +424,7 @@ impl Token {
|
||||
|| self.is_lifetime()
|
||||
|| self.is_keyword(kw::For)
|
||||
|| self == &Question
|
||||
|| self == &OpenDelim(Paren)
|
||||
|| self == &OpenDelim(Delimiter::Parenthesis)
|
||||
}
|
||||
|
||||
/// Returns `true` if the token is any literal.
|
||||
|
@ -13,7 +13,7 @@
|
||||
//! and a borrowed `TokenStream` is sufficient to build an owned `TokenStream` without taking
|
||||
//! ownership of the original.
|
||||
|
||||
use crate::token::{self, DelimToken, Token, TokenKind};
|
||||
use crate::token::{self, Delimiter, Token, TokenKind};
|
||||
use crate::AttrVec;
|
||||
|
||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
||||
@ -42,7 +42,7 @@ pub enum TokenTree {
|
||||
/// A single token.
|
||||
Token(Token),
|
||||
/// A delimited sequence of token trees.
|
||||
Delimited(DelimSpan, DelimToken, TokenStream),
|
||||
Delimited(DelimSpan, Delimiter, TokenStream),
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
@ -57,7 +57,7 @@ fn _dummy()
|
||||
where
|
||||
Token: Send + Sync,
|
||||
DelimSpan: Send + Sync,
|
||||
DelimToken: Send + Sync,
|
||||
Delimiter: Send + Sync,
|
||||
TokenStream: Send + Sync,
|
||||
{
|
||||
}
|
||||
@ -175,7 +175,7 @@ pub struct AttrAnnotatedTokenStream(pub Lrc<Vec<(AttrAnnotatedTokenTree, Spacing
|
||||
#[derive(Clone, Debug, Encodable, Decodable)]
|
||||
pub enum AttrAnnotatedTokenTree {
|
||||
Token(Token),
|
||||
Delimited(DelimSpan, DelimToken, AttrAnnotatedTokenStream),
|
||||
Delimited(DelimSpan, Delimiter, AttrAnnotatedTokenStream),
|
||||
/// Stores the attributes for an attribute target,
|
||||
/// along with the tokens for that attribute target.
|
||||
/// See `AttributesData` for more information
|
||||
|
@ -38,7 +38,7 @@
|
||||
#![recursion_limit = "256"]
|
||||
#![allow(rustc::potential_query_instability)]
|
||||
|
||||
use rustc_ast::token::{self, Token};
|
||||
use rustc_ast::token::{Delimiter, Token};
|
||||
use rustc_ast::tokenstream::{CanSynthesizeMissingTokens, TokenStream, TokenTree};
|
||||
use rustc_ast::visit;
|
||||
use rustc_ast::{self as ast, *};
|
||||
@ -886,7 +886,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
match tokens.into_trees().next() {
|
||||
Some(TokenTree::Token(token)) => token,
|
||||
Some(TokenTree::Delimited(_, delim, tokens)) => {
|
||||
if delim != token::NoDelim {
|
||||
if delim != Delimiter::Invisible {
|
||||
sess.diagnostic().delay_span_bug(
|
||||
span,
|
||||
"unexpected delimiter in key-value attribute's value",
|
||||
|
@ -6,7 +6,7 @@ use crate::pp::Breaks::{Consistent, Inconsistent};
|
||||
use crate::pp::{self, Breaks};
|
||||
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::token::{self, BinOpToken, CommentKind, DelimToken, Nonterminal, Token, TokenKind};
|
||||
use rustc_ast::token::{self, BinOpToken, CommentKind, Delimiter, Nonterminal, Token, TokenKind};
|
||||
use rustc_ast::tokenstream::{TokenStream, TokenTree};
|
||||
use rustc_ast::util::classify;
|
||||
use rustc_ast::util::comments::{gather_comments, Comment, CommentStyle};
|
||||
@ -155,10 +155,10 @@ fn tt_prepend_space(tt: &TokenTree, prev: &TokenTree) -> bool {
|
||||
}
|
||||
match tt {
|
||||
TokenTree::Token(token) => !matches!(token.kind, token::Comma | token::Not | token::Dot),
|
||||
TokenTree::Delimited(_, DelimToken::Paren, _) => {
|
||||
TokenTree::Delimited(_, Delimiter::Parenthesis, _) => {
|
||||
!matches!(prev, TokenTree::Token(Token { kind: token::Ident(..), .. }))
|
||||
}
|
||||
TokenTree::Delimited(_, DelimToken::Bracket, _) => {
|
||||
TokenTree::Delimited(_, Delimiter::Bracket, _) => {
|
||||
!matches!(prev, TokenTree::Token(Token { kind: token::Pound, .. }))
|
||||
}
|
||||
TokenTree::Delimited(..) => true,
|
||||
@ -556,12 +556,12 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
|
||||
header: Option<MacHeader<'_>>,
|
||||
has_bang: bool,
|
||||
ident: Option<Ident>,
|
||||
delim: Option<DelimToken>,
|
||||
delim: Option<Delimiter>,
|
||||
tts: &TokenStream,
|
||||
convert_dollar_crate: bool,
|
||||
span: Span,
|
||||
) {
|
||||
if delim == Some(DelimToken::Brace) {
|
||||
if delim == Some(Delimiter::Brace) {
|
||||
self.cbox(INDENT_UNIT);
|
||||
}
|
||||
match header {
|
||||
@ -577,7 +577,7 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
|
||||
self.print_ident(ident);
|
||||
}
|
||||
match delim {
|
||||
Some(DelimToken::Brace) => {
|
||||
Some(Delimiter::Brace) => {
|
||||
if header.is_some() || has_bang || ident.is_some() {
|
||||
self.nbsp();
|
||||
}
|
||||
@ -758,13 +758,15 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
|
||||
token::RArrow => "->".into(),
|
||||
token::LArrow => "<-".into(),
|
||||
token::FatArrow => "=>".into(),
|
||||
token::OpenDelim(token::Paren) => "(".into(),
|
||||
token::CloseDelim(token::Paren) => ")".into(),
|
||||
token::OpenDelim(token::Bracket) => "[".into(),
|
||||
token::CloseDelim(token::Bracket) => "]".into(),
|
||||
token::OpenDelim(token::Brace) => "{".into(),
|
||||
token::CloseDelim(token::Brace) => "}".into(),
|
||||
token::OpenDelim(token::NoDelim) | token::CloseDelim(token::NoDelim) => "".into(),
|
||||
token::OpenDelim(Delimiter::Parenthesis) => "(".into(),
|
||||
token::CloseDelim(Delimiter::Parenthesis) => ")".into(),
|
||||
token::OpenDelim(Delimiter::Bracket) => "[".into(),
|
||||
token::CloseDelim(Delimiter::Bracket) => "]".into(),
|
||||
token::OpenDelim(Delimiter::Brace) => "{".into(),
|
||||
token::CloseDelim(Delimiter::Brace) => "}".into(),
|
||||
token::OpenDelim(Delimiter::Invisible) | token::CloseDelim(Delimiter::Invisible) => {
|
||||
"".into()
|
||||
}
|
||||
token::Pound => "#".into(),
|
||||
token::Dollar => "$".into(),
|
||||
token::Question => "?".into(),
|
||||
|
@ -1,6 +1,6 @@
|
||||
use rustc_ast as ast;
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::token;
|
||||
use rustc_ast::token::{self, Delimiter};
|
||||
use rustc_ast::tokenstream::TokenStream;
|
||||
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
|
||||
use rustc_errors::{Applicability, PResult};
|
||||
@ -395,9 +395,9 @@ fn parse_options<'a>(
|
||||
) -> PResult<'a, ()> {
|
||||
let span_start = p.prev_token.span;
|
||||
|
||||
p.expect(&token::OpenDelim(token::DelimToken::Paren))?;
|
||||
p.expect(&token::OpenDelim(Delimiter::Parenthesis))?;
|
||||
|
||||
while !p.eat(&token::CloseDelim(token::DelimToken::Paren)) {
|
||||
while !p.eat(&token::CloseDelim(Delimiter::Parenthesis)) {
|
||||
if !is_global_asm && p.eat_keyword(sym::pure) {
|
||||
try_set_option(p, args, sym::pure, ast::InlineAsmOptions::PURE);
|
||||
} else if !is_global_asm && p.eat_keyword(sym::nomem) {
|
||||
@ -421,7 +421,7 @@ fn parse_options<'a>(
|
||||
}
|
||||
|
||||
// Allow trailing commas
|
||||
if p.eat(&token::CloseDelim(token::DelimToken::Paren)) {
|
||||
if p.eat(&token::CloseDelim(Delimiter::Parenthesis)) {
|
||||
break;
|
||||
}
|
||||
p.expect(&token::Comma)?;
|
||||
@ -436,9 +436,9 @@ fn parse_options<'a>(
|
||||
fn parse_clobber_abi<'a>(p: &mut Parser<'a>, args: &mut AsmArgs) -> PResult<'a, ()> {
|
||||
let span_start = p.prev_token.span;
|
||||
|
||||
p.expect(&token::OpenDelim(token::DelimToken::Paren))?;
|
||||
p.expect(&token::OpenDelim(Delimiter::Parenthesis))?;
|
||||
|
||||
if p.eat(&token::CloseDelim(token::DelimToken::Paren)) {
|
||||
if p.eat(&token::CloseDelim(Delimiter::Parenthesis)) {
|
||||
let err = p.sess.span_diagnostic.struct_span_err(
|
||||
p.token.span,
|
||||
"at least one abi must be provided as an argument to `clobber_abi`",
|
||||
@ -454,7 +454,7 @@ fn parse_clobber_abi<'a>(p: &mut Parser<'a>, args: &mut AsmArgs) -> PResult<'a,
|
||||
}
|
||||
Err(opt_lit) => {
|
||||
// If the non-string literal is a closing paren then it's the end of the list and is fine
|
||||
if p.eat(&token::CloseDelim(token::DelimToken::Paren)) {
|
||||
if p.eat(&token::CloseDelim(Delimiter::Parenthesis)) {
|
||||
break;
|
||||
}
|
||||
let span = opt_lit.map_or(p.token.span, |lit| lit.span);
|
||||
@ -466,7 +466,7 @@ fn parse_clobber_abi<'a>(p: &mut Parser<'a>, args: &mut AsmArgs) -> PResult<'a,
|
||||
};
|
||||
|
||||
// Allow trailing commas
|
||||
if p.eat(&token::CloseDelim(token::DelimToken::Paren)) {
|
||||
if p.eat(&token::CloseDelim(Delimiter::Parenthesis)) {
|
||||
break;
|
||||
}
|
||||
p.expect(&token::Comma)?;
|
||||
@ -501,7 +501,7 @@ fn parse_reg<'a>(
|
||||
p: &mut Parser<'a>,
|
||||
explicit_reg: &mut bool,
|
||||
) -> PResult<'a, ast::InlineAsmRegOrRegClass> {
|
||||
p.expect(&token::OpenDelim(token::DelimToken::Paren))?;
|
||||
p.expect(&token::OpenDelim(Delimiter::Parenthesis))?;
|
||||
let result = match p.token.uninterpolate().kind {
|
||||
token::Ident(name, false) => ast::InlineAsmRegOrRegClass::RegClass(name),
|
||||
token::Literal(token::Lit { kind: token::LitKind::Str, symbol, suffix: _ }) => {
|
||||
@ -515,7 +515,7 @@ fn parse_reg<'a>(
|
||||
}
|
||||
};
|
||||
p.bump();
|
||||
p.expect(&token::CloseDelim(token::DelimToken::Paren))?;
|
||||
p.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
//! Conditional compilation stripping.
|
||||
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::token::{DelimToken, Token, TokenKind};
|
||||
use rustc_ast::token::{Delimiter, Token, TokenKind};
|
||||
use rustc_ast::tokenstream::{AttrAnnotatedTokenStream, AttrAnnotatedTokenTree};
|
||||
use rustc_ast::tokenstream::{DelimSpan, Spacing};
|
||||
use rustc_ast::tokenstream::{LazyTokenStream, TokenTree};
|
||||
@ -418,7 +418,7 @@ impl<'a> StripUnconfigured<'a> {
|
||||
// in `#[attr]`, so just use the span of the `#` token.
|
||||
let bracket_group = AttrAnnotatedTokenTree::Delimited(
|
||||
DelimSpan::from_single(pound_span),
|
||||
DelimToken::Bracket,
|
||||
Delimiter::Bracket,
|
||||
item.tokens
|
||||
.as_ref()
|
||||
.unwrap_or_else(|| panic!("Missing tokens for {:?}", item))
|
||||
|
@ -8,7 +8,7 @@ use crate::placeholders::{placeholder, PlaceholderExpander};
|
||||
use rustc_ast as ast;
|
||||
use rustc_ast::mut_visit::*;
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::token;
|
||||
use rustc_ast::token::{self, Delimiter};
|
||||
use rustc_ast::tokenstream::TokenStream;
|
||||
use rustc_ast::visit::{self, AssocCtxt, Visitor};
|
||||
use rustc_ast::{AssocItemKind, AstLike, AstLikeWrapper, AttrStyle, ExprKind, ForeignItemKind};
|
||||
@ -884,7 +884,7 @@ pub fn parse_ast_fragment<'a>(
|
||||
AstFragmentKind::Stmts => {
|
||||
let mut stmts = SmallVec::new();
|
||||
// Won't make progress on a `}`.
|
||||
while this.token != token::Eof && this.token != token::CloseDelim(token::Brace) {
|
||||
while this.token != token::Eof && this.token != token::CloseDelim(Delimiter::Brace) {
|
||||
if let Some(stmt) = this.parse_full_stmt(AttemptLocalParseRecovery::Yes)? {
|
||||
stmts.push(stmt);
|
||||
}
|
||||
|
@ -11,16 +11,16 @@ crate mod quoted;
|
||||
crate mod transcribe;
|
||||
|
||||
use metavar_expr::MetaVarExpr;
|
||||
use rustc_ast::token::{self, NonterminalKind, Token, TokenKind};
|
||||
use rustc_ast::token::{Delimiter, NonterminalKind, Token, TokenKind};
|
||||
use rustc_ast::tokenstream::DelimSpan;
|
||||
use rustc_span::symbol::Ident;
|
||||
use rustc_span::Span;
|
||||
|
||||
/// Contains the sub-token-trees of a "delimited" token tree such as `(a b c)`. The delimiters
|
||||
/// might be `NoDelim`, but they are not represented explicitly.
|
||||
/// Contains the sub-token-trees of a "delimited" token tree such as `(a b c)`.
|
||||
/// The delimiters are not represented explicitly in the `tts` vector.
|
||||
#[derive(PartialEq, Encodable, Decodable, Debug)]
|
||||
struct Delimited {
|
||||
delim: token::DelimToken,
|
||||
delim: Delimiter,
|
||||
/// FIXME: #67062 has details about why this is sub-optimal.
|
||||
tts: Vec<TokenTree>,
|
||||
}
|
||||
|
@ -106,7 +106,7 @@
|
||||
//! bound.
|
||||
use crate::mbe::{KleeneToken, TokenTree};
|
||||
|
||||
use rustc_ast::token::{DelimToken, Token, TokenKind};
|
||||
use rustc_ast::token::{Delimiter, Token, TokenKind};
|
||||
use rustc_ast::{NodeId, DUMMY_NODE_ID};
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_errors::MultiSpan;
|
||||
@ -439,7 +439,7 @@ fn check_nested_occurrences(
|
||||
}
|
||||
(NestedMacroState::MacroRulesNotName, &TokenTree::Delimited(_, ref del))
|
||||
| (NestedMacroState::MacroName, &TokenTree::Delimited(_, ref del))
|
||||
if del.delim == DelimToken::Brace =>
|
||||
if del.delim == Delimiter::Brace =>
|
||||
{
|
||||
let macro_rules = state == NestedMacroState::MacroRulesNotName;
|
||||
state = NestedMacroState::Empty;
|
||||
@ -469,7 +469,7 @@ fn check_nested_occurrences(
|
||||
check_occurrences(sess, node_id, tt, macros, binders, ops, valid);
|
||||
}
|
||||
(NestedMacroState::MacroName, &TokenTree::Delimited(_, ref del))
|
||||
if del.delim == DelimToken::Paren =>
|
||||
if del.delim == Delimiter::Parenthesis =>
|
||||
{
|
||||
state = NestedMacroState::MacroNameParen;
|
||||
nested_binders = Binders::default();
|
||||
@ -484,7 +484,7 @@ fn check_nested_occurrences(
|
||||
);
|
||||
}
|
||||
(NestedMacroState::MacroNameParen, &TokenTree::Delimited(_, ref del))
|
||||
if del.delim == DelimToken::Brace =>
|
||||
if del.delim == Delimiter::Brace =>
|
||||
{
|
||||
state = NestedMacroState::Empty;
|
||||
check_occurrences(
|
||||
|
@ -8,7 +8,7 @@ use crate::mbe::macro_parser::{MatchedSeq, MatchedTokenTree, MatcherLoc};
|
||||
use crate::mbe::transcribe::transcribe;
|
||||
|
||||
use rustc_ast as ast;
|
||||
use rustc_ast::token::{self, NonterminalKind, Token, TokenKind, TokenKind::*};
|
||||
use rustc_ast::token::{self, Delimiter, NonterminalKind, Token, TokenKind, TokenKind::*};
|
||||
use rustc_ast::tokenstream::{DelimSpan, TokenStream};
|
||||
use rustc_ast::{NodeId, DUMMY_NODE_ID};
|
||||
use rustc_ast_pretty::pprust;
|
||||
@ -1250,8 +1250,8 @@ fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow {
|
||||
];
|
||||
match tok {
|
||||
TokenTree::Token(token) => match token.kind {
|
||||
OpenDelim(token::DelimToken::Brace)
|
||||
| OpenDelim(token::DelimToken::Bracket)
|
||||
OpenDelim(Delimiter::Brace)
|
||||
| OpenDelim(Delimiter::Bracket)
|
||||
| Comma
|
||||
| FatArrow
|
||||
| Colon
|
||||
|
@ -1,4 +1,4 @@
|
||||
use rustc_ast::token;
|
||||
use rustc_ast::token::{self, Delimiter};
|
||||
use rustc_ast::tokenstream::{Cursor, TokenStream, TokenTree};
|
||||
use rustc_ast::{LitIntType, LitKind};
|
||||
use rustc_ast_pretty::pprust;
|
||||
@ -35,7 +35,7 @@ impl MetaVarExpr {
|
||||
) -> PResult<'sess, MetaVarExpr> {
|
||||
let mut tts = input.trees();
|
||||
let ident = parse_ident(&mut tts, sess, outer_span)?;
|
||||
let Some(TokenTree::Delimited(_, token::Paren, args)) = tts.next() else {
|
||||
let Some(TokenTree::Delimited(_, Delimiter::Parenthesis, args)) = tts.next() else {
|
||||
let msg = "meta-variable expression parameter must be wrapped in parentheses";
|
||||
return Err(sess.span_diagnostic.struct_span_err(ident.span, msg));
|
||||
};
|
||||
|
@ -1,7 +1,7 @@
|
||||
use crate::mbe::macro_parser::count_metavar_decls;
|
||||
use crate::mbe::{Delimited, KleeneOp, KleeneToken, MetaVarExpr, SequenceRepetition, TokenTree};
|
||||
|
||||
use rustc_ast::token::{self, Token};
|
||||
use rustc_ast::token::{self, Delimiter, Token};
|
||||
use rustc_ast::{tokenstream, NodeId};
|
||||
use rustc_ast_pretty::pprust;
|
||||
use rustc_feature::Features;
|
||||
@ -147,11 +147,11 @@ fn parse_tree(
|
||||
match tree {
|
||||
// `tree` is a `$` token. Look at the next token in `trees`
|
||||
tokenstream::TokenTree::Token(Token { kind: token::Dollar, span }) => {
|
||||
// FIXME: Handle `None`-delimited groups in a more systematic way
|
||||
// FIXME: Handle `Invisible`-delimited groups in a more systematic way
|
||||
// during parsing.
|
||||
let mut next = outer_trees.next();
|
||||
let mut trees: Box<dyn Iterator<Item = tokenstream::TokenTree>>;
|
||||
if let Some(tokenstream::TokenTree::Delimited(_, token::NoDelim, tts)) = next {
|
||||
if let Some(tokenstream::TokenTree::Delimited(_, Delimiter::Invisible, tts)) = next {
|
||||
trees = Box::new(tts.into_trees());
|
||||
next = trees.next();
|
||||
} else {
|
||||
@ -162,7 +162,7 @@ fn parse_tree(
|
||||
// `tree` is followed by a delimited set of token trees.
|
||||
Some(tokenstream::TokenTree::Delimited(delim_span, delim, tts)) => {
|
||||
if parsing_patterns {
|
||||
if delim != token::Paren {
|
||||
if delim != Delimiter::Parenthesis {
|
||||
span_dollar_dollar_or_metavar_in_the_lhs_err(
|
||||
sess,
|
||||
&Token { kind: token::OpenDelim(delim), span: delim_span.entire() },
|
||||
@ -170,7 +170,7 @@ fn parse_tree(
|
||||
}
|
||||
} else {
|
||||
match delim {
|
||||
token::Brace => {
|
||||
Delimiter::Brace => {
|
||||
// The delimiter is `{`. This indicates the beginning
|
||||
// of a meta-variable expression (e.g. `${count(ident)}`).
|
||||
// Try to parse the meta-variable expression.
|
||||
@ -191,7 +191,7 @@ fn parse_tree(
|
||||
}
|
||||
}
|
||||
}
|
||||
token::Paren => {}
|
||||
Delimiter::Parenthesis => {}
|
||||
_ => {
|
||||
let tok = pprust::token_kind_to_string(&token::OpenDelim(delim));
|
||||
let msg = format!("expected `(` or `{{`, found `{}`", tok);
|
||||
|
@ -2,7 +2,7 @@ use crate::base::ExtCtxt;
|
||||
use crate::mbe::macro_parser::{MatchedNonterminal, MatchedSeq, MatchedTokenTree, NamedMatch};
|
||||
use crate::mbe::{self, MetaVarExpr};
|
||||
use rustc_ast::mut_visit::{self, MutVisitor};
|
||||
use rustc_ast::token::{self, Token, TokenKind};
|
||||
use rustc_ast::token::{self, Delimiter, Token, TokenKind};
|
||||
use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndSpacing};
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_errors::{pluralize, PResult};
|
||||
@ -27,23 +27,14 @@ impl MutVisitor for Marker {
|
||||
|
||||
/// An iterator over the token trees in a delimited token tree (`{ ... }`) or a sequence (`$(...)`).
|
||||
enum Frame<'a> {
|
||||
Delimited {
|
||||
tts: &'a [mbe::TokenTree],
|
||||
idx: usize,
|
||||
delim_token: token::DelimToken,
|
||||
span: DelimSpan,
|
||||
},
|
||||
Sequence {
|
||||
tts: &'a [mbe::TokenTree],
|
||||
idx: usize,
|
||||
sep: Option<Token>,
|
||||
},
|
||||
Delimited { tts: &'a [mbe::TokenTree], idx: usize, delim: Delimiter, span: DelimSpan },
|
||||
Sequence { tts: &'a [mbe::TokenTree], idx: usize, sep: Option<Token> },
|
||||
}
|
||||
|
||||
impl<'a> Frame<'a> {
|
||||
/// Construct a new frame around the delimited set of tokens.
|
||||
fn new(src: &'a mbe::Delimited, span: DelimSpan) -> Frame<'a> {
|
||||
Frame::Delimited { tts: &src.tts, idx: 0, delim_token: src.delim, span }
|
||||
Frame::Delimited { tts: &src.tts, idx: 0, delim: src.delim, span }
|
||||
}
|
||||
}
|
||||
|
||||
@ -150,14 +141,14 @@ pub(super) fn transcribe<'a>(
|
||||
// We are done processing a Delimited. If this is the top-level delimited, we are
|
||||
// done. Otherwise, we unwind the result_stack to append what we have produced to
|
||||
// any previous results.
|
||||
Frame::Delimited { delim_token, span, .. } => {
|
||||
Frame::Delimited { delim, span, .. } => {
|
||||
if result_stack.is_empty() {
|
||||
// No results left to compute! We are back at the top-level.
|
||||
return Ok(TokenStream::new(result));
|
||||
}
|
||||
|
||||
// Step back into the parent Delimited.
|
||||
let tree = TokenTree::Delimited(span, delim_token, TokenStream::new(result));
|
||||
let tree = TokenTree::Delimited(span, delim, TokenStream::new(result));
|
||||
result = result_stack.pop().unwrap();
|
||||
result.push(tree.into());
|
||||
}
|
||||
@ -240,7 +231,7 @@ pub(super) fn transcribe<'a>(
|
||||
}
|
||||
MatchedNonterminal(ref nt) => {
|
||||
// Other variables are emitted into the output stream as groups with
|
||||
// `Delimiter::None` to maintain parsing priorities.
|
||||
// `Delimiter::Invisible` to maintain parsing priorities.
|
||||
// `Interpolated` is currently used for such groups in rustc parser.
|
||||
marker.visit_span(&mut sp);
|
||||
let token = TokenTree::token(token::Interpolated(nt.clone()), sp);
|
||||
@ -278,7 +269,7 @@ pub(super) fn transcribe<'a>(
|
||||
mut_visit::visit_delim_span(&mut span, &mut marker);
|
||||
stack.push(Frame::Delimited {
|
||||
tts: &delimited.tts,
|
||||
delim_token: delimited.delim,
|
||||
delim: delimited.delim,
|
||||
idx: 0,
|
||||
span,
|
||||
});
|
||||
|
@ -1,7 +1,7 @@
|
||||
use crate::tests::{matches_codepattern, string_to_stream, with_error_checking_parse};
|
||||
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::token::{self, Token};
|
||||
use rustc_ast::token::{self, Delimiter, Token};
|
||||
use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree};
|
||||
use rustc_ast::visit;
|
||||
use rustc_ast::{self as ast, PatKind};
|
||||
@ -77,13 +77,14 @@ fn string_to_tts_macro() {
|
||||
TokenTree::Delimited(_, first_delim, first_tts),
|
||||
TokenTree::Token(Token { kind: token::FatArrow, .. }),
|
||||
TokenTree::Delimited(_, second_delim, second_tts),
|
||||
] if macro_delim == &token::Paren => {
|
||||
] if macro_delim == &Delimiter::Parenthesis => {
|
||||
let tts = &first_tts.trees().collect::<Vec<_>>();
|
||||
match &tts[..] {
|
||||
[
|
||||
TokenTree::Token(Token { kind: token::Dollar, .. }),
|
||||
TokenTree::Token(Token { kind: token::Ident(name, false), .. }),
|
||||
] if first_delim == &token::Paren && name.as_str() == "a" => {}
|
||||
] if first_delim == &Delimiter::Parenthesis && name.as_str() == "a" => {
|
||||
}
|
||||
_ => panic!("value 3: {:?} {:?}", first_delim, first_tts),
|
||||
}
|
||||
let tts = &second_tts.trees().collect::<Vec<_>>();
|
||||
@ -91,7 +92,8 @@ fn string_to_tts_macro() {
|
||||
[
|
||||
TokenTree::Token(Token { kind: token::Dollar, .. }),
|
||||
TokenTree::Token(Token { kind: token::Ident(name, false), .. }),
|
||||
] if second_delim == &token::Paren && name.as_str() == "a" => {}
|
||||
] if second_delim == &Delimiter::Parenthesis
|
||||
&& name.as_str() == "a" => {}
|
||||
_ => panic!("value 4: {:?} {:?}", second_delim, second_tts),
|
||||
}
|
||||
}
|
||||
@ -113,7 +115,7 @@ fn string_to_tts_1() {
|
||||
TokenTree::token(token::Ident(Symbol::intern("a"), false), sp(3, 4)).into(),
|
||||
TokenTree::Delimited(
|
||||
DelimSpan::from_pair(sp(5, 6), sp(13, 14)),
|
||||
token::DelimToken::Paren,
|
||||
Delimiter::Parenthesis,
|
||||
TokenStream::new(vec![
|
||||
TokenTree::token(token::Ident(Symbol::intern("b"), false), sp(6, 7)).into(),
|
||||
TokenTree::token(token::Colon, sp(8, 9)).into(),
|
||||
@ -124,7 +126,7 @@ fn string_to_tts_1() {
|
||||
.into(),
|
||||
TokenTree::Delimited(
|
||||
DelimSpan::from_pair(sp(15, 16), sp(20, 21)),
|
||||
token::DelimToken::Brace,
|
||||
Delimiter::Brace,
|
||||
TokenStream::new(vec![
|
||||
TokenTree::token(token::Ident(Symbol::intern("b"), false), sp(17, 18)).into(),
|
||||
TokenTree::token(token::Semi, sp(18, 19)).into(),
|
||||
|
@ -28,24 +28,24 @@ trait ToInternal<T> {
|
||||
fn to_internal(self) -> T;
|
||||
}
|
||||
|
||||
impl FromInternal<token::DelimToken> for Delimiter {
|
||||
fn from_internal(delim: token::DelimToken) -> Delimiter {
|
||||
impl FromInternal<token::Delimiter> for Delimiter {
|
||||
fn from_internal(delim: token::Delimiter) -> Delimiter {
|
||||
match delim {
|
||||
token::Paren => Delimiter::Parenthesis,
|
||||
token::Brace => Delimiter::Brace,
|
||||
token::Bracket => Delimiter::Bracket,
|
||||
token::NoDelim => Delimiter::None,
|
||||
token::Delimiter::Parenthesis => Delimiter::Parenthesis,
|
||||
token::Delimiter::Brace => Delimiter::Brace,
|
||||
token::Delimiter::Bracket => Delimiter::Bracket,
|
||||
token::Delimiter::Invisible => Delimiter::None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ToInternal<token::DelimToken> for Delimiter {
|
||||
fn to_internal(self) -> token::DelimToken {
|
||||
impl ToInternal<token::Delimiter> for Delimiter {
|
||||
fn to_internal(self) -> token::Delimiter {
|
||||
match self {
|
||||
Delimiter::Parenthesis => token::Paren,
|
||||
Delimiter::Brace => token::Brace,
|
||||
Delimiter::Bracket => token::Bracket,
|
||||
Delimiter::None => token::NoDelim,
|
||||
Delimiter::Parenthesis => token::Delimiter::Parenthesis,
|
||||
Delimiter::Brace => token::Delimiter::Brace,
|
||||
Delimiter::Bracket => token::Delimiter::Bracket,
|
||||
Delimiter::None => token::Delimiter::Invisible,
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -61,7 +61,7 @@ impl FromInternal<(TreeAndSpacing, &'_ mut Vec<Self>, &mut Rustc<'_, '_>)>
|
||||
let joint = spacing == Joint;
|
||||
let Token { kind, span } = match tree {
|
||||
tokenstream::TokenTree::Delimited(span, delim, tts) => {
|
||||
let delimiter = Delimiter::from_internal(delim);
|
||||
let delimiter = pm::Delimiter::from_internal(delim);
|
||||
return TokenTree::Group(Group { delimiter, stream: tts, span, flatten: false });
|
||||
}
|
||||
tokenstream::TokenTree::Token(token) => token,
|
||||
@ -164,7 +164,7 @@ impl FromInternal<(TreeAndSpacing, &'_ mut Vec<Self>, &mut Rustc<'_, '_>)>
|
||||
.map(|kind| tokenstream::TokenTree::token(kind, span))
|
||||
.collect();
|
||||
stack.push(TokenTree::Group(Group {
|
||||
delimiter: Delimiter::Bracket,
|
||||
delimiter: pm::Delimiter::Bracket,
|
||||
stream,
|
||||
span: DelimSpan::from_single(span),
|
||||
flatten: false,
|
||||
@ -181,7 +181,7 @@ impl FromInternal<(TreeAndSpacing, &'_ mut Vec<Self>, &mut Rustc<'_, '_>)>
|
||||
Interpolated(nt) => {
|
||||
let stream = nt_to_tokenstream(&nt, rustc.sess(), CanSynthesizeMissingTokens::No);
|
||||
TokenTree::Group(Group {
|
||||
delimiter: Delimiter::None,
|
||||
delimiter: pm::Delimiter::None,
|
||||
stream,
|
||||
span: DelimSpan::from_single(span),
|
||||
flatten: crate::base::pretty_printing_compatibility_hack(&nt, rustc.sess()),
|
||||
|
@ -1,6 +1,6 @@
|
||||
use crate::lexer::unicode_chars::UNICODE_ARRAY;
|
||||
use rustc_ast::ast::{self, AttrStyle};
|
||||
use rustc_ast::token::{self, CommentKind, Token, TokenKind};
|
||||
use rustc_ast::token::{self, CommentKind, Delimiter, Token, TokenKind};
|
||||
use rustc_ast::tokenstream::{Spacing, TokenStream};
|
||||
use rustc_ast::util::unicode::contains_text_flow_control_chars;
|
||||
use rustc_errors::{error_code, Applicability, DiagnosticBuilder, ErrorGuaranteed, PResult};
|
||||
@ -24,8 +24,8 @@ use unescape_error_reporting::{emit_unescape_error, escaped_char};
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct UnmatchedBrace {
|
||||
pub expected_delim: token::DelimToken,
|
||||
pub found_delim: Option<token::DelimToken>,
|
||||
pub expected_delim: Delimiter,
|
||||
pub found_delim: Option<Delimiter>,
|
||||
pub found_span: Span,
|
||||
pub unclosed_span: Option<Span>,
|
||||
pub candidate_span: Option<Span>,
|
||||
@ -284,12 +284,12 @@ impl<'a> StringReader<'a> {
|
||||
rustc_lexer::TokenKind::Semi => token::Semi,
|
||||
rustc_lexer::TokenKind::Comma => token::Comma,
|
||||
rustc_lexer::TokenKind::Dot => token::Dot,
|
||||
rustc_lexer::TokenKind::OpenParen => token::OpenDelim(token::Paren),
|
||||
rustc_lexer::TokenKind::CloseParen => token::CloseDelim(token::Paren),
|
||||
rustc_lexer::TokenKind::OpenBrace => token::OpenDelim(token::Brace),
|
||||
rustc_lexer::TokenKind::CloseBrace => token::CloseDelim(token::Brace),
|
||||
rustc_lexer::TokenKind::OpenBracket => token::OpenDelim(token::Bracket),
|
||||
rustc_lexer::TokenKind::CloseBracket => token::CloseDelim(token::Bracket),
|
||||
rustc_lexer::TokenKind::OpenParen => token::OpenDelim(Delimiter::Parenthesis),
|
||||
rustc_lexer::TokenKind::CloseParen => token::CloseDelim(Delimiter::Parenthesis),
|
||||
rustc_lexer::TokenKind::OpenBrace => token::OpenDelim(Delimiter::Brace),
|
||||
rustc_lexer::TokenKind::CloseBrace => token::CloseDelim(Delimiter::Brace),
|
||||
rustc_lexer::TokenKind::OpenBracket => token::OpenDelim(Delimiter::Bracket),
|
||||
rustc_lexer::TokenKind::CloseBracket => token::CloseDelim(Delimiter::Bracket),
|
||||
rustc_lexer::TokenKind::At => token::At,
|
||||
rustc_lexer::TokenKind::Pound => token::Pound,
|
||||
rustc_lexer::TokenKind::Tilde => token::Tilde,
|
||||
|
@ -1,6 +1,6 @@
|
||||
use super::{StringReader, UnmatchedBrace};
|
||||
|
||||
use rustc_ast::token::{self, DelimToken, Token};
|
||||
use rustc_ast::token::{self, Delimiter, Token};
|
||||
use rustc_ast::tokenstream::{
|
||||
DelimSpan,
|
||||
Spacing::{self, *},
|
||||
@ -32,15 +32,15 @@ struct TokenTreesReader<'a> {
|
||||
string_reader: StringReader<'a>,
|
||||
token: Token,
|
||||
/// Stack of open delimiters and their spans. Used for error message.
|
||||
open_braces: Vec<(token::DelimToken, Span)>,
|
||||
open_braces: Vec<(Delimiter, Span)>,
|
||||
unmatched_braces: Vec<UnmatchedBrace>,
|
||||
/// The type and spans for all braces
|
||||
///
|
||||
/// Used only for error recovery when arriving to EOF with mismatched braces.
|
||||
matching_delim_spans: Vec<(token::DelimToken, Span, Span)>,
|
||||
matching_delim_spans: Vec<(Delimiter, Span, Span)>,
|
||||
last_unclosed_found_span: Option<Span>,
|
||||
/// Collect empty block spans that might have been auto-inserted by editors.
|
||||
last_delim_empty_block_spans: FxHashMap<token::DelimToken, Span>,
|
||||
last_delim_empty_block_spans: FxHashMap<Delimiter, Span>,
|
||||
/// Collect the spans of braces (Open, Close). Used only
|
||||
/// for detecting if blocks are empty and only braces.
|
||||
matching_block_spans: Vec<(Span, Span)>,
|
||||
@ -88,7 +88,7 @@ impl<'a> TokenTreesReader<'a> {
|
||||
for &(_, sp) in &self.open_braces {
|
||||
err.span_label(sp, "unclosed delimiter");
|
||||
self.unmatched_braces.push(UnmatchedBrace {
|
||||
expected_delim: token::DelimToken::Brace,
|
||||
expected_delim: Delimiter::Brace,
|
||||
found_delim: None,
|
||||
found_span: self.token.span,
|
||||
unclosed_span: Some(sp),
|
||||
@ -150,7 +150,7 @@ impl<'a> TokenTreesReader<'a> {
|
||||
}
|
||||
|
||||
//only add braces
|
||||
if let (DelimToken::Brace, DelimToken::Brace) = (open_brace, delim) {
|
||||
if let (Delimiter::Brace, Delimiter::Brace) = (open_brace, delim) {
|
||||
self.matching_block_spans.push((open_brace_span, close_brace_span));
|
||||
}
|
||||
|
||||
|
@ -2,7 +2,7 @@
|
||||
// https://www.unicode.org/Public/security/10.0.0/confusables.txt
|
||||
|
||||
use super::StringReader;
|
||||
use crate::token;
|
||||
use crate::token::{self, Delimiter};
|
||||
use rustc_errors::{Applicability, Diagnostic};
|
||||
use rustc_span::{symbol::kw, BytePos, Pos, Span};
|
||||
|
||||
@ -312,12 +312,12 @@ const ASCII_ARRAY: &[(char, &str, Option<token::TokenKind>)] = &[
|
||||
('!', "Exclamation Mark", Some(token::Not)),
|
||||
('?', "Question Mark", Some(token::Question)),
|
||||
('.', "Period", Some(token::Dot)),
|
||||
('(', "Left Parenthesis", Some(token::OpenDelim(token::Paren))),
|
||||
(')', "Right Parenthesis", Some(token::CloseDelim(token::Paren))),
|
||||
('[', "Left Square Bracket", Some(token::OpenDelim(token::Bracket))),
|
||||
(']', "Right Square Bracket", Some(token::CloseDelim(token::Bracket))),
|
||||
('{', "Left Curly Brace", Some(token::OpenDelim(token::Brace))),
|
||||
('}', "Right Curly Brace", Some(token::CloseDelim(token::Brace))),
|
||||
('(', "Left Parenthesis", Some(token::OpenDelim(Delimiter::Parenthesis))),
|
||||
(')', "Right Parenthesis", Some(token::CloseDelim(Delimiter::Parenthesis))),
|
||||
('[', "Left Square Bracket", Some(token::OpenDelim(Delimiter::Bracket))),
|
||||
(']', "Right Square Bracket", Some(token::CloseDelim(Delimiter::Bracket))),
|
||||
('{', "Left Curly Brace", Some(token::OpenDelim(Delimiter::Brace))),
|
||||
('}', "Right Curly Brace", Some(token::CloseDelim(Delimiter::Brace))),
|
||||
('*', "Asterisk", Some(token::BinOp(token::Star))),
|
||||
('/', "Slash", Some(token::BinOp(token::Slash))),
|
||||
('\\', "Backslash", None),
|
||||
|
@ -1,7 +1,7 @@
|
||||
use super::{AttrWrapper, Capturing, FnParseMode, ForceCollect, Parser, PathStyle};
|
||||
use rustc_ast as ast;
|
||||
use rustc_ast::attr;
|
||||
use rustc_ast::token::{self, Nonterminal};
|
||||
use rustc_ast::token::{self, Delimiter, Nonterminal};
|
||||
use rustc_ast_pretty::pprust;
|
||||
use rustc_errors::{error_code, Diagnostic, PResult};
|
||||
use rustc_span::{sym, BytePos, Span};
|
||||
@ -130,9 +130,9 @@ impl<'a> Parser<'a> {
|
||||
ast::AttrStyle::Outer
|
||||
};
|
||||
|
||||
this.expect(&token::OpenDelim(token::Bracket))?;
|
||||
this.expect(&token::OpenDelim(Delimiter::Bracket))?;
|
||||
let item = this.parse_attr_item(false)?;
|
||||
this.expect(&token::CloseDelim(token::Bracket))?;
|
||||
this.expect(&token::CloseDelim(Delimiter::Bracket))?;
|
||||
let attr_sp = lo.to(this.prev_token.span);
|
||||
|
||||
// Emit error if inner attribute is encountered and forbidden.
|
||||
@ -403,7 +403,7 @@ impl<'a> Parser<'a> {
|
||||
crate fn parse_meta_item_kind(&mut self) -> PResult<'a, ast::MetaItemKind> {
|
||||
Ok(if self.eat(&token::Eq) {
|
||||
ast::MetaItemKind::NameValue(self.parse_unsuffixed_lit()?)
|
||||
} else if self.check(&token::OpenDelim(token::Paren)) {
|
||||
} else if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
|
||||
// Matches `meta_seq = ( COMMASEP(meta_item_inner) )`.
|
||||
let (list, _) = self.parse_paren_comma_seq(|p| p.parse_meta_item_inner())?;
|
||||
ast::MetaItemKind::List(list)
|
||||
|
@ -1,5 +1,5 @@
|
||||
use super::{Capturing, FlatToken, ForceCollect, Parser, ReplaceRange, TokenCursor, TrailingToken};
|
||||
use rustc_ast::token::{self, DelimToken, Token, TokenKind};
|
||||
use rustc_ast::token::{self, Delimiter, Token, TokenKind};
|
||||
use rustc_ast::tokenstream::{AttrAnnotatedTokenStream, AttributesData, CreateTokenStream};
|
||||
use rustc_ast::tokenstream::{AttrAnnotatedTokenTree, DelimSpan, LazyTokenStream, Spacing};
|
||||
use rustc_ast::{self as ast};
|
||||
@ -388,11 +388,11 @@ impl<'a> Parser<'a> {
|
||||
/// Converts a flattened iterator of tokens (including open and close delimiter tokens)
|
||||
/// into a `TokenStream`, creating a `TokenTree::Delimited` for each matching pair
|
||||
/// of open and close delims.
|
||||
// FIXME(#67062): Currently, we don't parse `None`-delimited groups correctly,
|
||||
// which can cause us to end up with mismatched `None` delimiters in our
|
||||
// FIXME(#67062): Currently, we don't parse `Invisible`-delimited groups correctly,
|
||||
// which can cause us to end up with mismatched `Invisible` delimiters in our
|
||||
// captured tokens. This function contains several hacks to work around this -
|
||||
// essentially, we throw away mismatched `None` delimiters when we encounter them.
|
||||
// Once we properly parse `None` delimiters, they can be captured just like any
|
||||
// essentially, we throw away mismatched `Invisible` delimiters when we encounter them.
|
||||
// Once we properly parse `Invisible` delimiters, they can be captured just like any
|
||||
// other tokens, and these hacks can be removed.
|
||||
fn make_token_stream(
|
||||
mut iter: impl Iterator<Item = (FlatToken, Spacing)>,
|
||||
@ -401,7 +401,7 @@ fn make_token_stream(
|
||||
#[derive(Debug)]
|
||||
struct FrameData {
|
||||
// This is `None` for the first frame, `Some` for all others.
|
||||
open_delim_sp: Option<(DelimToken, Span)>,
|
||||
open_delim_sp: Option<(Delimiter, Span)>,
|
||||
inner: Vec<(AttrAnnotatedTokenTree, Spacing)>,
|
||||
}
|
||||
let mut stack = vec![FrameData { open_delim_sp: None, inner: vec![] }];
|
||||
@ -412,13 +412,13 @@ fn make_token_stream(
|
||||
stack.push(FrameData { open_delim_sp: Some((delim, span)), inner: vec![] });
|
||||
}
|
||||
FlatToken::Token(Token { kind: TokenKind::CloseDelim(delim), span }) => {
|
||||
// HACK: If we encounter a mismatched `None` delimiter at the top
|
||||
// HACK: If we encounter a mismatched `Invisible` delimiter at the top
|
||||
// level, just ignore it.
|
||||
if matches!(delim, DelimToken::NoDelim)
|
||||
if matches!(delim, Delimiter::Invisible)
|
||||
&& (stack.len() == 1
|
||||
|| !matches!(
|
||||
stack.last_mut().unwrap().open_delim_sp.unwrap().0,
|
||||
DelimToken::NoDelim
|
||||
Delimiter::Invisible
|
||||
))
|
||||
{
|
||||
token_and_spacing = iter.next();
|
||||
@ -428,11 +428,11 @@ fn make_token_stream(
|
||||
.pop()
|
||||
.unwrap_or_else(|| panic!("Token stack was empty for token: {:?}", token));
|
||||
|
||||
// HACK: If our current frame has a mismatched opening `None` delimiter,
|
||||
// HACK: If our current frame has a mismatched opening `Invisible` delimiter,
|
||||
// merge our current frame with the one above it. That is, transform
|
||||
// `[ { < first second } third ]` into `[ { first second } third ]`
|
||||
if !matches!(delim, DelimToken::NoDelim)
|
||||
&& matches!(frame_data.open_delim_sp.unwrap().0, DelimToken::NoDelim)
|
||||
if !matches!(delim, Delimiter::Invisible)
|
||||
&& matches!(frame_data.open_delim_sp.unwrap().0, Delimiter::Invisible)
|
||||
{
|
||||
stack.last_mut().unwrap().inner.extend(frame_data.inner);
|
||||
// Process our closing delimiter again, this time at the previous
|
||||
@ -472,10 +472,10 @@ fn make_token_stream(
|
||||
}
|
||||
token_and_spacing = iter.next();
|
||||
}
|
||||
// HACK: If we don't have a closing `None` delimiter for our last
|
||||
// HACK: If we don't have a closing `Invisible` delimiter for our last
|
||||
// frame, merge the frame with the top-level frame. That is,
|
||||
// turn `< first second` into `first second`
|
||||
if stack.len() == 2 && stack[1].open_delim_sp.unwrap().0 == DelimToken::NoDelim {
|
||||
if stack.len() == 2 && stack[1].open_delim_sp.unwrap().0 == Delimiter::Invisible {
|
||||
let temp_buf = stack.pop().unwrap();
|
||||
stack.last_mut().unwrap().inner.extend(temp_buf.inner);
|
||||
}
|
||||
|
@ -8,7 +8,7 @@ use super::{
|
||||
use crate::lexer::UnmatchedBrace;
|
||||
use rustc_ast as ast;
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::token::{self, Lit, LitKind, TokenKind};
|
||||
use rustc_ast::token::{self, Delimiter, Lit, LitKind, TokenKind};
|
||||
use rustc_ast::util::parser::AssocOp;
|
||||
use rustc_ast::{
|
||||
AngleBracketedArg, AngleBracketedArgs, AnonConst, AttrVec, BinOpKind, BindingMode, Block,
|
||||
@ -337,10 +337,10 @@ impl<'a> Parser<'a> {
|
||||
TokenKind::Comma,
|
||||
TokenKind::Semi,
|
||||
TokenKind::ModSep,
|
||||
TokenKind::OpenDelim(token::DelimToken::Brace),
|
||||
TokenKind::OpenDelim(token::DelimToken::Paren),
|
||||
TokenKind::CloseDelim(token::DelimToken::Brace),
|
||||
TokenKind::CloseDelim(token::DelimToken::Paren),
|
||||
TokenKind::OpenDelim(Delimiter::Brace),
|
||||
TokenKind::OpenDelim(Delimiter::Parenthesis),
|
||||
TokenKind::CloseDelim(Delimiter::Brace),
|
||||
TokenKind::CloseDelim(Delimiter::Parenthesis),
|
||||
];
|
||||
match self.token.ident() {
|
||||
Some((ident, false))
|
||||
@ -413,7 +413,7 @@ impl<'a> Parser<'a> {
|
||||
} else if !sm.is_multiline(self.prev_token.span.until(self.token.span)) {
|
||||
// The current token is in the same line as the prior token, not recoverable.
|
||||
} else if [token::Comma, token::Colon].contains(&self.token.kind)
|
||||
&& self.prev_token.kind == token::CloseDelim(token::Paren)
|
||||
&& self.prev_token.kind == token::CloseDelim(Delimiter::Parenthesis)
|
||||
{
|
||||
// Likely typo: The current token is on a new line and is expected to be
|
||||
// `.`, `;`, `?`, or an operator after a close delimiter token.
|
||||
@ -424,7 +424,7 @@ impl<'a> Parser<'a> {
|
||||
// ^
|
||||
// https://github.com/rust-lang/rust/issues/72253
|
||||
} else if self.look_ahead(1, |t| {
|
||||
t == &token::CloseDelim(token::Brace)
|
||||
t == &token::CloseDelim(Delimiter::Brace)
|
||||
|| t.can_begin_expr() && t.kind != token::Colon
|
||||
}) && [token::Comma, token::Colon].contains(&self.token.kind)
|
||||
{
|
||||
@ -441,7 +441,7 @@ impl<'a> Parser<'a> {
|
||||
.emit();
|
||||
return Ok(true);
|
||||
} else if self.look_ahead(0, |t| {
|
||||
t == &token::CloseDelim(token::Brace)
|
||||
t == &token::CloseDelim(Delimiter::Brace)
|
||||
|| (t.can_begin_expr() && t != &token::Semi && t != &token::Pound)
|
||||
// Avoid triggering with too many trailing `#` in raw string.
|
||||
|| (sm.is_multiline(
|
||||
@ -655,7 +655,7 @@ impl<'a> Parser<'a> {
|
||||
(Err(snapshot_err), Err(err)) => {
|
||||
// We don't know what went wrong, emit the normal error.
|
||||
snapshot_err.cancel();
|
||||
self.consume_block(token::Brace, ConsumeClosingDelim::Yes);
|
||||
self.consume_block(Delimiter::Brace, ConsumeClosingDelim::Yes);
|
||||
Err(err)
|
||||
}
|
||||
(Ok(_), Ok(mut tail)) => {
|
||||
@ -866,7 +866,7 @@ impl<'a> Parser<'a> {
|
||||
trailing_span = trailing_span.to(self.token.span);
|
||||
self.bump();
|
||||
}
|
||||
if self.token.kind == token::OpenDelim(token::Paren) {
|
||||
if self.token.kind == token::OpenDelim(Delimiter::Parenthesis) {
|
||||
// Recover from bad turbofish: `foo.collect::Vec<_>()`.
|
||||
let args = AngleBracketedArgs { args, span }.into();
|
||||
segment.args = args;
|
||||
@ -1098,7 +1098,7 @@ impl<'a> Parser<'a> {
|
||||
[(token::Lt, 1), (token::Gt, -1), (token::BinOp(token::Shr), -2)];
|
||||
self.consume_tts(1, &modifiers);
|
||||
|
||||
if !&[token::OpenDelim(token::Paren), token::ModSep]
|
||||
if !&[token::OpenDelim(Delimiter::Parenthesis), token::ModSep]
|
||||
.contains(&self.token.kind)
|
||||
{
|
||||
// We don't have `foo< bar >(` or `foo< bar >::`, so we rewind the
|
||||
@ -1132,7 +1132,7 @@ impl<'a> Parser<'a> {
|
||||
Err(err)
|
||||
}
|
||||
}
|
||||
} else if token::OpenDelim(token::Paren) == self.token.kind {
|
||||
} else if token::OpenDelim(Delimiter::Parenthesis) == self.token.kind {
|
||||
// We have high certainty that this was a bad turbofish at this point.
|
||||
// `foo< bar >(`
|
||||
suggest(&mut err);
|
||||
@ -1186,8 +1186,10 @@ impl<'a> Parser<'a> {
|
||||
self.bump(); // `(`
|
||||
|
||||
// Consume the fn call arguments.
|
||||
let modifiers =
|
||||
[(token::OpenDelim(token::Paren), 1), (token::CloseDelim(token::Paren), -1)];
|
||||
let modifiers = [
|
||||
(token::OpenDelim(Delimiter::Parenthesis), 1),
|
||||
(token::CloseDelim(Delimiter::Parenthesis), -1),
|
||||
];
|
||||
self.consume_tts(1, &modifiers);
|
||||
|
||||
if self.token.kind == token::Eof {
|
||||
@ -1579,15 +1581,15 @@ impl<'a> Parser<'a> {
|
||||
|
||||
fn recover_await_macro(&mut self) -> PResult<'a, (Span, P<Expr>, bool)> {
|
||||
self.expect(&token::Not)?;
|
||||
self.expect(&token::OpenDelim(token::Paren))?;
|
||||
self.expect(&token::OpenDelim(Delimiter::Parenthesis))?;
|
||||
let expr = self.parse_expr()?;
|
||||
self.expect(&token::CloseDelim(token::Paren))?;
|
||||
self.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
|
||||
Ok((self.prev_token.span, expr, false))
|
||||
}
|
||||
|
||||
fn recover_await_prefix(&mut self, await_sp: Span) -> PResult<'a, (Span, P<Expr>, bool)> {
|
||||
let is_question = self.eat(&token::Question); // Handle `await? <expr>`.
|
||||
let expr = if self.token == token::OpenDelim(token::Brace) {
|
||||
let expr = if self.token == token::OpenDelim(Delimiter::Brace) {
|
||||
// Handle `await { <expr> }`.
|
||||
// This needs to be handled separately from the next arm to avoid
|
||||
// interpreting `await { <expr> }?` as `<expr>?.await`.
|
||||
@ -1619,8 +1621,8 @@ impl<'a> Parser<'a> {
|
||||
|
||||
/// If encountering `future.await()`, consumes and emits an error.
|
||||
pub(super) fn recover_from_await_method_call(&mut self) {
|
||||
if self.token == token::OpenDelim(token::Paren)
|
||||
&& self.look_ahead(1, |t| t == &token::CloseDelim(token::Paren))
|
||||
if self.token == token::OpenDelim(Delimiter::Parenthesis)
|
||||
&& self.look_ahead(1, |t| t == &token::CloseDelim(Delimiter::Parenthesis))
|
||||
{
|
||||
// future.await()
|
||||
let lo = self.token.span;
|
||||
@ -1641,7 +1643,7 @@ impl<'a> Parser<'a> {
|
||||
pub(super) fn try_macro_suggestion(&mut self) -> PResult<'a, P<Expr>> {
|
||||
let is_try = self.token.is_keyword(kw::Try);
|
||||
let is_questionmark = self.look_ahead(1, |t| t == &token::Not); //check for !
|
||||
let is_open = self.look_ahead(2, |t| t == &token::OpenDelim(token::Paren)); //check for (
|
||||
let is_open = self.look_ahead(2, |t| t == &token::OpenDelim(Delimiter::Parenthesis)); //check for (
|
||||
|
||||
if is_try && is_questionmark && is_open {
|
||||
let lo = self.token.span;
|
||||
@ -1649,8 +1651,8 @@ impl<'a> Parser<'a> {
|
||||
self.bump(); //remove !
|
||||
let try_span = lo.to(self.token.span); //we take the try!( span
|
||||
self.bump(); //remove (
|
||||
let is_empty = self.token == token::CloseDelim(token::Paren); //check if the block is empty
|
||||
self.consume_block(token::Paren, ConsumeClosingDelim::No); //eat the block
|
||||
let is_empty = self.token == token::CloseDelim(Delimiter::Parenthesis); //check if the block is empty
|
||||
self.consume_block(Delimiter::Parenthesis, ConsumeClosingDelim::No); //eat the block
|
||||
let hi = self.token.span;
|
||||
self.bump(); //remove )
|
||||
let mut err = self.struct_span_err(lo.to(hi), "use of deprecated `try` macro");
|
||||
@ -1681,7 +1683,7 @@ impl<'a> Parser<'a> {
|
||||
begin_paren: Option<Span>,
|
||||
) -> P<Pat> {
|
||||
match (&self.token.kind, begin_paren) {
|
||||
(token::CloseDelim(token::Paren), Some(begin_par_sp)) => {
|
||||
(token::CloseDelim(Delimiter::Parenthesis), Some(begin_par_sp)) => {
|
||||
self.bump();
|
||||
|
||||
self.struct_span_err(
|
||||
@ -1714,8 +1716,8 @@ impl<'a> Parser<'a> {
|
||||
|| self.token.is_ident() &&
|
||||
matches!(node, ast::ExprKind::Path(..) | ast::ExprKind::Field(..)) &&
|
||||
!self.token.is_reserved_ident() && // v `foo:bar(baz)`
|
||||
self.look_ahead(1, |t| t == &token::OpenDelim(token::Paren))
|
||||
|| self.look_ahead(1, |t| t == &token::OpenDelim(token::Brace)) // `foo:bar {`
|
||||
self.look_ahead(1, |t| t == &token::OpenDelim(Delimiter::Parenthesis))
|
||||
|| self.look_ahead(1, |t| t == &token::OpenDelim(Delimiter::Brace)) // `foo:bar {`
|
||||
|| self.look_ahead(1, |t| t == &token::Colon) && // `foo:bar::<baz`
|
||||
self.look_ahead(2, |t| t == &token::Lt) &&
|
||||
self.look_ahead(3, |t| t.is_ident())
|
||||
@ -1728,7 +1730,7 @@ impl<'a> Parser<'a> {
|
||||
|
||||
pub(super) fn recover_seq_parse_error(
|
||||
&mut self,
|
||||
delim: token::DelimToken,
|
||||
delim: Delimiter,
|
||||
lo: Span,
|
||||
result: PResult<'a, P<Expr>>,
|
||||
) -> P<Expr> {
|
||||
@ -1845,7 +1847,7 @@ impl<'a> Parser<'a> {
|
||||
loop {
|
||||
debug!("recover_stmt_ loop {:?}", self.token);
|
||||
match self.token.kind {
|
||||
token::OpenDelim(token::DelimToken::Brace) => {
|
||||
token::OpenDelim(Delimiter::Brace) => {
|
||||
brace_depth += 1;
|
||||
self.bump();
|
||||
if break_on_block == BlockMode::Break && brace_depth == 1 && bracket_depth == 0
|
||||
@ -1853,11 +1855,11 @@ impl<'a> Parser<'a> {
|
||||
in_block = true;
|
||||
}
|
||||
}
|
||||
token::OpenDelim(token::DelimToken::Bracket) => {
|
||||
token::OpenDelim(Delimiter::Bracket) => {
|
||||
bracket_depth += 1;
|
||||
self.bump();
|
||||
}
|
||||
token::CloseDelim(token::DelimToken::Brace) => {
|
||||
token::CloseDelim(Delimiter::Brace) => {
|
||||
if brace_depth == 0 {
|
||||
debug!("recover_stmt_ return - close delim {:?}", self.token);
|
||||
break;
|
||||
@ -1869,7 +1871,7 @@ impl<'a> Parser<'a> {
|
||||
break;
|
||||
}
|
||||
}
|
||||
token::CloseDelim(token::DelimToken::Bracket) => {
|
||||
token::CloseDelim(Delimiter::Bracket) => {
|
||||
bracket_depth -= 1;
|
||||
if bracket_depth < 0 {
|
||||
bracket_depth = 0;
|
||||
@ -1927,11 +1929,11 @@ impl<'a> Parser<'a> {
|
||||
.emit();
|
||||
self.bump();
|
||||
} else if self.token == token::Pound
|
||||
&& self.look_ahead(1, |t| *t == token::OpenDelim(token::Bracket))
|
||||
&& self.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Bracket))
|
||||
{
|
||||
let lo = self.token.span;
|
||||
// Skip every token until next possible arg.
|
||||
while self.token != token::CloseDelim(token::Bracket) {
|
||||
while self.token != token::CloseDelim(Delimiter::Bracket) {
|
||||
self.bump();
|
||||
}
|
||||
let sp = lo.to(self.token.span);
|
||||
@ -1952,7 +1954,9 @@ impl<'a> Parser<'a> {
|
||||
// If we find a pattern followed by an identifier, it could be an (incorrect)
|
||||
// C-style parameter declaration.
|
||||
if self.check_ident()
|
||||
&& self.look_ahead(1, |t| *t == token::Comma || *t == token::CloseDelim(token::Paren))
|
||||
&& self.look_ahead(1, |t| {
|
||||
*t == token::Comma || *t == token::CloseDelim(Delimiter::Parenthesis)
|
||||
})
|
||||
{
|
||||
// `fn foo(String s) {}`
|
||||
let ident = self.parse_ident().unwrap();
|
||||
@ -1968,7 +1972,7 @@ impl<'a> Parser<'a> {
|
||||
} else if require_name
|
||||
&& (self.token == token::Comma
|
||||
|| self.token == token::Lt
|
||||
|| self.token == token::CloseDelim(token::Paren))
|
||||
|| self.token == token::CloseDelim(Delimiter::Parenthesis))
|
||||
{
|
||||
let rfc_note = "anonymous parameters are removed in the 2018 edition (see RFC 1685)";
|
||||
|
||||
@ -2086,11 +2090,7 @@ impl<'a> Parser<'a> {
|
||||
Ok(param)
|
||||
}
|
||||
|
||||
pub(super) fn consume_block(
|
||||
&mut self,
|
||||
delim: token::DelimToken,
|
||||
consume_close: ConsumeClosingDelim,
|
||||
) {
|
||||
pub(super) fn consume_block(&mut self, delim: Delimiter, consume_close: ConsumeClosingDelim) {
|
||||
let mut brace_depth = 0;
|
||||
loop {
|
||||
if self.eat(&token::OpenDelim(delim)) {
|
||||
@ -2109,7 +2109,8 @@ impl<'a> Parser<'a> {
|
||||
brace_depth -= 1;
|
||||
continue;
|
||||
}
|
||||
} else if self.token == token::Eof || self.eat(&token::CloseDelim(token::NoDelim)) {
|
||||
} else if self.token == token::Eof || self.eat(&token::CloseDelim(Delimiter::Invisible))
|
||||
{
|
||||
return;
|
||||
} else {
|
||||
self.bump();
|
||||
@ -2555,7 +2556,7 @@ impl<'a> Parser<'a> {
|
||||
|
||||
crate fn maybe_recover_unexpected_block_label(&mut self) -> bool {
|
||||
let Some(label) = self.eat_label().filter(|_| {
|
||||
self.eat(&token::Colon) && self.token.kind == token::OpenDelim(token::Brace)
|
||||
self.eat(&token::Colon) && self.token.kind == token::OpenDelim(Delimiter::Brace)
|
||||
}) else {
|
||||
return false;
|
||||
};
|
||||
@ -2652,7 +2653,7 @@ impl<'a> Parser<'a> {
|
||||
/// Parse and throw away a parenthesized comma separated
|
||||
/// sequence of patterns until `)` is reached.
|
||||
fn skip_pat_list(&mut self) -> PResult<'a, ()> {
|
||||
while !self.check(&token::CloseDelim(token::Paren)) {
|
||||
while !self.check(&token::CloseDelim(Delimiter::Parenthesis)) {
|
||||
self.parse_pat_no_top_alt(None)?;
|
||||
if !self.eat(&token::Comma) {
|
||||
return Ok(());
|
||||
|
@ -7,9 +7,8 @@ use super::{
|
||||
};
|
||||
use crate::maybe_recover_from_interpolated_ty_qpath;
|
||||
|
||||
use ast::token::DelimToken;
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::token::{self, Token, TokenKind};
|
||||
use rustc_ast::token::{self, Delimiter, Token, TokenKind};
|
||||
use rustc_ast::tokenstream::Spacing;
|
||||
use rustc_ast::util::classify;
|
||||
use rustc_ast::util::literal::LitError;
|
||||
@ -495,7 +494,7 @@ impl<'a> Parser<'a> {
|
||||
fn is_at_start_of_range_notation_rhs(&self) -> bool {
|
||||
if self.token.can_begin_expr() {
|
||||
// Parse `for i in 1.. { }` as infinite loop, not as `for i in (1..{})`.
|
||||
if self.token == token::OpenDelim(token::Brace) {
|
||||
if self.token == token::OpenDelim(Delimiter::Brace) {
|
||||
return !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL);
|
||||
}
|
||||
true
|
||||
@ -992,8 +991,8 @@ impl<'a> Parser<'a> {
|
||||
return Ok(e);
|
||||
}
|
||||
e = match self.token.kind {
|
||||
token::OpenDelim(token::Paren) => self.parse_fn_call_expr(lo, e),
|
||||
token::OpenDelim(token::Bracket) => self.parse_index_expr(lo, e)?,
|
||||
token::OpenDelim(Delimiter::Parenthesis) => self.parse_fn_call_expr(lo, e),
|
||||
token::OpenDelim(Delimiter::Bracket) => self.parse_index_expr(lo, e)?,
|
||||
_ => return Ok(e),
|
||||
}
|
||||
}
|
||||
@ -1156,7 +1155,7 @@ impl<'a> Parser<'a> {
|
||||
|
||||
/// Parse a function call expression, `expr(...)`.
|
||||
fn parse_fn_call_expr(&mut self, lo: Span, fun: P<Expr>) -> P<Expr> {
|
||||
let snapshot = if self.token.kind == token::OpenDelim(token::Paren)
|
||||
let snapshot = if self.token.kind == token::OpenDelim(Delimiter::Parenthesis)
|
||||
&& self.look_ahead_type_ascription_as_field()
|
||||
{
|
||||
Some((self.create_snapshot_for_diagnostic(), fun.kind.clone()))
|
||||
@ -1173,7 +1172,7 @@ impl<'a> Parser<'a> {
|
||||
{
|
||||
return expr;
|
||||
}
|
||||
self.recover_seq_parse_error(token::Paren, lo, seq)
|
||||
self.recover_seq_parse_error(Delimiter::Parenthesis, lo, seq)
|
||||
}
|
||||
|
||||
/// If we encounter a parser state that looks like the user has written a `struct` literal with
|
||||
@ -1190,8 +1189,10 @@ impl<'a> Parser<'a> {
|
||||
(Err(err), Some((mut snapshot, ExprKind::Path(None, path)))) => {
|
||||
let name = pprust::path_to_string(&path);
|
||||
snapshot.bump(); // `(`
|
||||
match snapshot.parse_struct_fields(path, false, token::Paren) {
|
||||
Ok((fields, ..)) if snapshot.eat(&token::CloseDelim(token::Paren)) => {
|
||||
match snapshot.parse_struct_fields(path, false, Delimiter::Parenthesis) {
|
||||
Ok((fields, ..))
|
||||
if snapshot.eat(&token::CloseDelim(Delimiter::Parenthesis)) =>
|
||||
{
|
||||
// We are certain we have `Enum::Foo(a: 3, b: 4)`, suggest
|
||||
// `Enum::Foo { a: 3, b: 4 }` or `Enum::Foo(3, 4)`.
|
||||
self.restore_snapshot(snapshot);
|
||||
@ -1241,7 +1242,7 @@ impl<'a> Parser<'a> {
|
||||
fn parse_index_expr(&mut self, lo: Span, base: P<Expr>) -> PResult<'a, P<Expr>> {
|
||||
self.bump(); // `[`
|
||||
let index = self.parse_expr()?;
|
||||
self.expect(&token::CloseDelim(token::Bracket))?;
|
||||
self.expect(&token::CloseDelim(Delimiter::Bracket))?;
|
||||
Ok(self.mk_expr(lo.to(self.prev_token.span), self.mk_index(base, index), AttrVec::new()))
|
||||
}
|
||||
|
||||
@ -1253,10 +1254,10 @@ impl<'a> Parser<'a> {
|
||||
|
||||
let fn_span_lo = self.token.span;
|
||||
let mut segment = self.parse_path_segment(PathStyle::Expr, None)?;
|
||||
self.check_trailing_angle_brackets(&segment, &[&token::OpenDelim(token::Paren)]);
|
||||
self.check_trailing_angle_brackets(&segment, &[&token::OpenDelim(Delimiter::Parenthesis)]);
|
||||
self.check_turbofish_missing_angle_brackets(&mut segment);
|
||||
|
||||
if self.check(&token::OpenDelim(token::Paren)) {
|
||||
if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
|
||||
// Method call `expr.f()`
|
||||
let mut args = self.parse_paren_expr_seq()?;
|
||||
args.insert(0, self_arg);
|
||||
@ -1302,9 +1303,9 @@ impl<'a> Parser<'a> {
|
||||
// could be removed without changing functionality, but it's faster
|
||||
// to have it here, especially for programs with large constants.
|
||||
self.parse_lit_expr(attrs)
|
||||
} else if self.check(&token::OpenDelim(token::Paren)) {
|
||||
} else if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
|
||||
self.parse_tuple_parens_expr(attrs)
|
||||
} else if self.check(&token::OpenDelim(token::Brace)) {
|
||||
} else if self.check(&token::OpenDelim(Delimiter::Brace)) {
|
||||
self.parse_block_expr(None, lo, BlockCheckMode::Default, attrs)
|
||||
} else if self.check(&token::BinOp(token::Or)) || self.check(&token::OrOr) {
|
||||
self.parse_closure_expr(attrs).map_err(|mut err| {
|
||||
@ -1315,8 +1316,8 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
err
|
||||
})
|
||||
} else if self.check(&token::OpenDelim(token::Bracket)) {
|
||||
self.parse_array_or_repeat_expr(attrs, token::Bracket)
|
||||
} else if self.check(&token::OpenDelim(Delimiter::Bracket)) {
|
||||
self.parse_array_or_repeat_expr(attrs, Delimiter::Bracket)
|
||||
} else if self.check_path() {
|
||||
self.parse_path_start_expr(attrs)
|
||||
} else if self.check_keyword(kw::Move) || self.check_keyword(kw::Static) {
|
||||
@ -1422,14 +1423,16 @@ impl<'a> Parser<'a> {
|
||||
|
||||
fn parse_tuple_parens_expr(&mut self, attrs: AttrVec) -> PResult<'a, P<Expr>> {
|
||||
let lo = self.token.span;
|
||||
self.expect(&token::OpenDelim(token::Paren))?;
|
||||
self.expect(&token::OpenDelim(Delimiter::Parenthesis))?;
|
||||
let (es, trailing_comma) = match self.parse_seq_to_end(
|
||||
&token::CloseDelim(token::Paren),
|
||||
&token::CloseDelim(Delimiter::Parenthesis),
|
||||
SeqSep::trailing_allowed(token::Comma),
|
||||
|p| p.parse_expr_catch_underscore(),
|
||||
) {
|
||||
Ok(x) => x,
|
||||
Err(err) => return Ok(self.recover_seq_parse_error(token::Paren, lo, Err(err))),
|
||||
Err(err) => {
|
||||
return Ok(self.recover_seq_parse_error(Delimiter::Parenthesis, lo, Err(err)));
|
||||
}
|
||||
};
|
||||
let kind = if es.len() == 1 && !trailing_comma {
|
||||
// `(e)` is parenthesized `e`.
|
||||
@ -1445,7 +1448,7 @@ impl<'a> Parser<'a> {
|
||||
fn parse_array_or_repeat_expr(
|
||||
&mut self,
|
||||
attrs: AttrVec,
|
||||
close_delim: token::DelimToken,
|
||||
close_delim: Delimiter,
|
||||
) -> PResult<'a, P<Expr>> {
|
||||
let lo = self.token.span;
|
||||
self.bump(); // `[` or other open delim
|
||||
@ -1500,7 +1503,7 @@ impl<'a> Parser<'a> {
|
||||
prior_type_ascription: self.last_type_ascription,
|
||||
};
|
||||
(self.prev_token.span, ExprKind::MacCall(mac))
|
||||
} else if self.check(&token::OpenDelim(token::Brace)) {
|
||||
} else if self.check(&token::OpenDelim(Delimiter::Brace)) {
|
||||
if let Some(expr) = self.maybe_parse_struct_expr(qself.as_ref(), &path, &attrs) {
|
||||
if qself.is_some() {
|
||||
self.sess.gated_spans.gate(sym::more_qualified_paths, path.span);
|
||||
@ -1533,7 +1536,7 @@ impl<'a> Parser<'a> {
|
||||
self.parse_for_expr(label, lo, attrs)
|
||||
} else if self.eat_keyword(kw::Loop) {
|
||||
self.parse_loop_expr(label, lo, attrs)
|
||||
} else if self.check(&token::OpenDelim(token::Brace)) || self.token.is_whole_block() {
|
||||
} else if self.check(&token::OpenDelim(Delimiter::Brace)) || self.token.is_whole_block() {
|
||||
self.parse_block_expr(label, lo, BlockCheckMode::Default, attrs)
|
||||
} else if !ate_colon && (self.check(&TokenKind::Comma) || self.check(&TokenKind::Gt)) {
|
||||
// We're probably inside of a `Path<'a>` that needs a turbofish
|
||||
@ -1631,7 +1634,7 @@ impl<'a> Parser<'a> {
|
||||
)
|
||||
.emit();
|
||||
Some(lexpr)
|
||||
} else if self.token != token::OpenDelim(token::Brace)
|
||||
} else if self.token != token::OpenDelim(Delimiter::Brace)
|
||||
|| !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL)
|
||||
{
|
||||
let expr = self.parse_expr_opt()?;
|
||||
@ -1940,7 +1943,7 @@ impl<'a> Parser<'a> {
|
||||
attrs: AttrVec,
|
||||
) -> Option<P<Expr>> {
|
||||
let mut snapshot = self.create_snapshot_for_diagnostic();
|
||||
match snapshot.parse_array_or_repeat_expr(attrs, token::Brace) {
|
||||
match snapshot.parse_array_or_repeat_expr(attrs, Delimiter::Brace) {
|
||||
Ok(arr) => {
|
||||
let hi = snapshot.prev_token.span;
|
||||
self.struct_span_err(arr.span, "this is a block expression, not an array")
|
||||
@ -2044,7 +2047,7 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
|
||||
if self.token.kind == TokenKind::Semi
|
||||
&& matches!(self.token_cursor.frame.delim_sp, Some((DelimToken::Paren, _)))
|
||||
&& matches!(self.token_cursor.frame.delim_sp, Some((Delimiter::Parenthesis, _)))
|
||||
{
|
||||
// It is likely that the closure body is a block but where the
|
||||
// braces have been removed. We will recover and eat the next
|
||||
@ -2158,7 +2161,7 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
} else {
|
||||
let attrs = self.parse_outer_attributes()?.take_for_recovery(); // For recovery.
|
||||
let not_block = self.token != token::OpenDelim(token::Brace);
|
||||
let not_block = self.token != token::OpenDelim(Delimiter::Brace);
|
||||
let block = self.parse_block().map_err(|err| {
|
||||
if not_block {
|
||||
self.error_missing_if_then_block(lo, Some(err), missing_then_block_binop_span())
|
||||
@ -2283,7 +2286,7 @@ impl<'a> Parser<'a> {
|
||||
// This is used below for recovery in case of `for ( $stuff ) $block`
|
||||
// in which case we will suggest `for $stuff $block`.
|
||||
let begin_paren = match self.token.kind {
|
||||
token::OpenDelim(token::Paren) => Some(self.token.span),
|
||||
token::OpenDelim(Delimiter::Parenthesis) => Some(self.token.span),
|
||||
_ => None,
|
||||
};
|
||||
|
||||
@ -2372,7 +2375,7 @@ impl<'a> Parser<'a> {
|
||||
let match_span = self.prev_token.span;
|
||||
let lo = self.prev_token.span;
|
||||
let scrutinee = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?;
|
||||
if let Err(mut e) = self.expect(&token::OpenDelim(token::Brace)) {
|
||||
if let Err(mut e) = self.expect(&token::OpenDelim(Delimiter::Brace)) {
|
||||
if self.token == token::Semi {
|
||||
e.span_suggestion_short(
|
||||
match_span,
|
||||
@ -2391,7 +2394,7 @@ impl<'a> Parser<'a> {
|
||||
attrs.extend(self.parse_inner_attributes()?);
|
||||
|
||||
let mut arms: Vec<Arm> = Vec::new();
|
||||
while self.token != token::CloseDelim(token::Brace) {
|
||||
while self.token != token::CloseDelim(Delimiter::Brace) {
|
||||
match self.parse_arm() {
|
||||
Ok(arm) => arms.push(arm),
|
||||
Err(mut e) => {
|
||||
@ -2399,7 +2402,7 @@ impl<'a> Parser<'a> {
|
||||
e.emit();
|
||||
self.recover_stmt();
|
||||
let span = lo.to(self.token.span);
|
||||
if self.token == token::CloseDelim(token::Brace) {
|
||||
if self.token == token::CloseDelim(Delimiter::Brace) {
|
||||
self.bump();
|
||||
}
|
||||
return Ok(self.mk_expr(span, ExprKind::Match(scrutinee, arms), attrs));
|
||||
@ -2463,7 +2466,7 @@ impl<'a> Parser<'a> {
|
||||
// We might have either a `,` -> `;` typo, or a block without braces. We need
|
||||
// a more subtle parsing strategy.
|
||||
loop {
|
||||
if self.token.kind == token::CloseDelim(token::Brace) {
|
||||
if self.token.kind == token::CloseDelim(Delimiter::Brace) {
|
||||
// We have reached the closing brace of the `match` expression.
|
||||
return Some(err(self, stmts));
|
||||
}
|
||||
@ -2571,7 +2574,7 @@ impl<'a> Parser<'a> {
|
||||
})?;
|
||||
|
||||
let require_comma = classify::expr_requires_semi_to_be_stmt(&expr)
|
||||
&& this.token != token::CloseDelim(token::Brace);
|
||||
&& this.token != token::CloseDelim(Delimiter::Brace);
|
||||
|
||||
let hi = this.prev_token.span;
|
||||
|
||||
@ -2592,8 +2595,8 @@ impl<'a> Parser<'a> {
|
||||
TrailingToken::None,
|
||||
));
|
||||
}
|
||||
this.expect_one_of(&[token::Comma], &[token::CloseDelim(token::Brace)]).map_err(
|
||||
|mut err| {
|
||||
this.expect_one_of(&[token::Comma], &[token::CloseDelim(Delimiter::Brace)])
|
||||
.map_err(|mut err| {
|
||||
match (sm.span_to_lines(expr.span), sm.span_to_lines(arm_start_span)) {
|
||||
(Ok(ref expr_lines), Ok(ref arm_start_lines))
|
||||
if arm_start_lines.lines[0].end_col
|
||||
@ -2627,8 +2630,7 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
}
|
||||
err
|
||||
},
|
||||
)?;
|
||||
})?;
|
||||
} else {
|
||||
this.eat(&token::Comma);
|
||||
}
|
||||
@ -2670,13 +2672,13 @@ impl<'a> Parser<'a> {
|
||||
fn is_do_catch_block(&self) -> bool {
|
||||
self.token.is_keyword(kw::Do)
|
||||
&& self.is_keyword_ahead(1, &[kw::Catch])
|
||||
&& self.look_ahead(2, |t| *t == token::OpenDelim(token::Brace))
|
||||
&& self.look_ahead(2, |t| *t == token::OpenDelim(Delimiter::Brace))
|
||||
&& !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL)
|
||||
}
|
||||
|
||||
fn is_try_block(&self) -> bool {
|
||||
self.token.is_keyword(kw::Try)
|
||||
&& self.look_ahead(1, |t| *t == token::OpenDelim(token::Brace))
|
||||
&& self.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace))
|
||||
&& self.token.uninterpolated_span().rust_2018()
|
||||
}
|
||||
|
||||
@ -2696,10 +2698,10 @@ impl<'a> Parser<'a> {
|
||||
&& ((
|
||||
// `async move {`
|
||||
self.is_keyword_ahead(1, &[kw::Move])
|
||||
&& self.look_ahead(2, |t| *t == token::OpenDelim(token::Brace))
|
||||
&& self.look_ahead(2, |t| *t == token::OpenDelim(Delimiter::Brace))
|
||||
) || (
|
||||
// `async {`
|
||||
self.look_ahead(1, |t| *t == token::OpenDelim(token::Brace))
|
||||
self.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace))
|
||||
))
|
||||
}
|
||||
|
||||
@ -2726,7 +2728,7 @@ impl<'a> Parser<'a> {
|
||||
) -> Option<PResult<'a, P<Expr>>> {
|
||||
let struct_allowed = !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL);
|
||||
if struct_allowed || self.is_certainly_not_a_block() {
|
||||
if let Err(err) = self.expect(&token::OpenDelim(token::Brace)) {
|
||||
if let Err(err) = self.expect(&token::OpenDelim(Delimiter::Brace)) {
|
||||
return Some(Err(err));
|
||||
}
|
||||
let expr = self.parse_struct_expr(qself.cloned(), path.clone(), attrs.clone(), true);
|
||||
@ -2753,7 +2755,7 @@ impl<'a> Parser<'a> {
|
||||
&mut self,
|
||||
pth: ast::Path,
|
||||
recover: bool,
|
||||
close_delim: token::DelimToken,
|
||||
close_delim: Delimiter,
|
||||
) -> PResult<'a, (Vec<ExprField>, ast::StructRest, bool)> {
|
||||
let mut fields = Vec::new();
|
||||
let mut base = ast::StructRest::None;
|
||||
@ -2853,9 +2855,9 @@ impl<'a> Parser<'a> {
|
||||
) -> PResult<'a, P<Expr>> {
|
||||
let lo = pth.span;
|
||||
let (fields, base, recover_async) =
|
||||
self.parse_struct_fields(pth.clone(), recover, token::Brace)?;
|
||||
self.parse_struct_fields(pth.clone(), recover, Delimiter::Brace)?;
|
||||
let span = lo.to(self.token.span);
|
||||
self.expect(&token::CloseDelim(token::Brace))?;
|
||||
self.expect(&token::CloseDelim(Delimiter::Brace))?;
|
||||
let expr = if recover_async {
|
||||
ExprKind::Err
|
||||
} else {
|
||||
|
@ -4,7 +4,7 @@ use super::{AttrWrapper, FollowedByType, ForceCollect, Parser, PathStyle, Traili
|
||||
|
||||
use rustc_ast::ast::*;
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::token::{self, TokenKind};
|
||||
use rustc_ast::token::{self, Delimiter, TokenKind};
|
||||
use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree};
|
||||
use rustc_ast::{self as ast, AttrVec, Attribute, DUMMY_NODE_ID};
|
||||
use rustc_ast::{Async, Const, Defaultness, IsAuto, Mutability, Unsafe, UseTree, UseTreeKind};
|
||||
@ -39,9 +39,9 @@ impl<'a> Parser<'a> {
|
||||
let mod_kind = if self.eat(&token::Semi) {
|
||||
ModKind::Unloaded
|
||||
} else {
|
||||
self.expect(&token::OpenDelim(token::Brace))?;
|
||||
self.expect(&token::OpenDelim(Delimiter::Brace))?;
|
||||
let (mut inner_attrs, items, inner_span) =
|
||||
self.parse_mod(&token::CloseDelim(token::Brace))?;
|
||||
self.parse_mod(&token::CloseDelim(Delimiter::Brace))?;
|
||||
attrs.append(&mut inner_attrs);
|
||||
ModKind::Loaded(items, Inline::Yes, inner_span)
|
||||
};
|
||||
@ -324,7 +324,7 @@ impl<'a> Parser<'a> {
|
||||
let sp = self.prev_token.span.between(self.token.span);
|
||||
let full_sp = self.prev_token.span.to(self.token.span);
|
||||
let ident_sp = self.token.span;
|
||||
if self.look_ahead(1, |t| *t == token::OpenDelim(token::Brace)) {
|
||||
if self.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace)) {
|
||||
// possible public struct definition where `struct` was forgotten
|
||||
let ident = self.parse_ident().unwrap();
|
||||
let msg = format!("add `struct` here to parse `{ident}` as a public struct");
|
||||
@ -336,16 +336,16 @@ impl<'a> Parser<'a> {
|
||||
Applicability::MaybeIncorrect, // speculative
|
||||
);
|
||||
Err(err)
|
||||
} else if self.look_ahead(1, |t| *t == token::OpenDelim(token::Paren)) {
|
||||
} else if self.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Parenthesis)) {
|
||||
let ident = self.parse_ident().unwrap();
|
||||
self.bump(); // `(`
|
||||
let kw_name = self.recover_first_param();
|
||||
self.consume_block(token::Paren, ConsumeClosingDelim::Yes);
|
||||
self.consume_block(Delimiter::Parenthesis, ConsumeClosingDelim::Yes);
|
||||
let (kw, kw_name, ambiguous) = if self.check(&token::RArrow) {
|
||||
self.eat_to_tokens(&[&token::OpenDelim(token::Brace)]);
|
||||
self.eat_to_tokens(&[&token::OpenDelim(Delimiter::Brace)]);
|
||||
self.bump(); // `{`
|
||||
("fn", kw_name, false)
|
||||
} else if self.check(&token::OpenDelim(token::Brace)) {
|
||||
} else if self.check(&token::OpenDelim(Delimiter::Brace)) {
|
||||
self.bump(); // `{`
|
||||
("fn", kw_name, false)
|
||||
} else if self.check(&token::Colon) {
|
||||
@ -358,7 +358,7 @@ impl<'a> Parser<'a> {
|
||||
let msg = format!("missing `{kw}` for {kw_name} definition");
|
||||
let mut err = self.struct_span_err(sp, &msg);
|
||||
if !ambiguous {
|
||||
self.consume_block(token::Brace, ConsumeClosingDelim::Yes);
|
||||
self.consume_block(Delimiter::Brace, ConsumeClosingDelim::Yes);
|
||||
let suggestion =
|
||||
format!("add `{kw}` here to parse `{ident}` as a public {kw_name}");
|
||||
err.span_suggestion_short(
|
||||
@ -386,9 +386,9 @@ impl<'a> Parser<'a> {
|
||||
let ident = self.parse_ident().unwrap();
|
||||
self.eat_to_tokens(&[&token::Gt]);
|
||||
self.bump(); // `>`
|
||||
let (kw, kw_name, ambiguous) = if self.eat(&token::OpenDelim(token::Paren)) {
|
||||
let (kw, kw_name, ambiguous) = if self.eat(&token::OpenDelim(Delimiter::Parenthesis)) {
|
||||
("fn", self.recover_first_param(), false)
|
||||
} else if self.check(&token::OpenDelim(token::Brace)) {
|
||||
} else if self.check(&token::OpenDelim(Delimiter::Brace)) {
|
||||
("struct", "struct", false)
|
||||
} else {
|
||||
("fn` or `struct", "function or struct", true)
|
||||
@ -630,11 +630,11 @@ impl<'a> Parser<'a> {
|
||||
mut parse_item: impl FnMut(&mut Parser<'a>) -> PResult<'a, Option<Option<T>>>,
|
||||
) -> PResult<'a, Vec<T>> {
|
||||
let open_brace_span = self.token.span;
|
||||
self.expect(&token::OpenDelim(token::Brace))?;
|
||||
self.expect(&token::OpenDelim(Delimiter::Brace))?;
|
||||
attrs.append(&mut self.parse_inner_attributes()?);
|
||||
|
||||
let mut items = Vec::new();
|
||||
while !self.eat(&token::CloseDelim(token::Brace)) {
|
||||
while !self.eat(&token::CloseDelim(Delimiter::Brace)) {
|
||||
if self.recover_doc_comment_before_brace() {
|
||||
continue;
|
||||
}
|
||||
@ -642,7 +642,7 @@ impl<'a> Parser<'a> {
|
||||
Ok(None) => {
|
||||
// We have to bail or we'll potentially never make progress.
|
||||
let non_item_span = self.token.span;
|
||||
self.consume_block(token::Brace, ConsumeClosingDelim::Yes);
|
||||
self.consume_block(Delimiter::Brace, ConsumeClosingDelim::Yes);
|
||||
self.struct_span_err(non_item_span, "non-item in item list")
|
||||
.span_label(open_brace_span, "item list starts here")
|
||||
.span_label(non_item_span, "non-item starts here")
|
||||
@ -652,7 +652,7 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
Ok(Some(item)) => items.extend(item),
|
||||
Err(mut err) => {
|
||||
self.consume_block(token::Brace, ConsumeClosingDelim::Yes);
|
||||
self.consume_block(Delimiter::Brace, ConsumeClosingDelim::Yes);
|
||||
err.span_label(open_brace_span, "while parsing this item list starting here")
|
||||
.span_label(self.prev_token.span, "the item list ends here")
|
||||
.emit();
|
||||
@ -666,7 +666,7 @@ impl<'a> Parser<'a> {
|
||||
/// Recover on a doc comment before `}`.
|
||||
fn recover_doc_comment_before_brace(&mut self) -> bool {
|
||||
if let token::DocComment(..) = self.token.kind {
|
||||
if self.look_ahead(1, |tok| tok == &token::CloseDelim(token::Brace)) {
|
||||
if self.look_ahead(1, |tok| tok == &token::CloseDelim(Delimiter::Brace)) {
|
||||
struct_span_err!(
|
||||
self.diagnostic(),
|
||||
self.token.span,
|
||||
@ -866,7 +866,7 @@ impl<'a> Parser<'a> {
|
||||
let lo = self.token.span;
|
||||
|
||||
let mut prefix = ast::Path { segments: Vec::new(), span: lo.shrink_to_lo(), tokens: None };
|
||||
let kind = if self.check(&token::OpenDelim(token::Brace))
|
||||
let kind = if self.check(&token::OpenDelim(Delimiter::Brace))
|
||||
|| self.check(&token::BinOp(token::Star))
|
||||
|| self.is_import_coupler()
|
||||
{
|
||||
@ -908,7 +908,7 @@ impl<'a> Parser<'a> {
|
||||
/// USE_TREE_LIST = Ø | (USE_TREE `,`)* USE_TREE [`,`]
|
||||
/// ```
|
||||
fn parse_use_tree_list(&mut self) -> PResult<'a, Vec<(UseTree, ast::NodeId)>> {
|
||||
self.parse_delim_comma_seq(token::Brace, |p| Ok((p.parse_use_tree()?, DUMMY_NODE_ID)))
|
||||
self.parse_delim_comma_seq(Delimiter::Brace, |p| Ok((p.parse_use_tree()?, DUMMY_NODE_ID)))
|
||||
.map(|(r, _)| r)
|
||||
}
|
||||
|
||||
@ -1077,7 +1077,7 @@ impl<'a> Parser<'a> {
|
||||
&& self.is_keyword_ahead(1, &[kw::Extern])
|
||||
&& self.look_ahead(
|
||||
2 + self.look_ahead(2, |t| t.can_begin_literal_maybe_minus() as usize),
|
||||
|t| t.kind == token::OpenDelim(token::Brace),
|
||||
|t| t.kind == token::OpenDelim(Delimiter::Brace),
|
||||
)
|
||||
}
|
||||
|
||||
@ -1204,8 +1204,9 @@ impl<'a> Parser<'a> {
|
||||
let mut generics = self.parse_generics()?;
|
||||
generics.where_clause = self.parse_where_clause()?;
|
||||
|
||||
let (variants, _) =
|
||||
self.parse_delim_comma_seq(token::Brace, |p| p.parse_enum_variant()).map_err(|e| {
|
||||
let (variants, _) = self
|
||||
.parse_delim_comma_seq(Delimiter::Brace, |p| p.parse_enum_variant())
|
||||
.map_err(|e| {
|
||||
self.recover_stmt();
|
||||
e
|
||||
})?;
|
||||
@ -1228,11 +1229,11 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
let ident = this.parse_field_ident("enum", vlo)?;
|
||||
|
||||
let struct_def = if this.check(&token::OpenDelim(token::Brace)) {
|
||||
let struct_def = if this.check(&token::OpenDelim(Delimiter::Brace)) {
|
||||
// Parse a struct variant.
|
||||
let (fields, recovered) = this.parse_record_struct_body("struct", false)?;
|
||||
VariantData::Struct(fields, recovered)
|
||||
} else if this.check(&token::OpenDelim(token::Paren)) {
|
||||
} else if this.check(&token::OpenDelim(Delimiter::Parenthesis)) {
|
||||
VariantData::Tuple(this.parse_tuple_struct_body()?, DUMMY_NODE_ID)
|
||||
} else {
|
||||
VariantData::Unit(DUMMY_NODE_ID)
|
||||
@ -1292,12 +1293,12 @@ impl<'a> Parser<'a> {
|
||||
} else if self.eat(&token::Semi) {
|
||||
VariantData::Unit(DUMMY_NODE_ID)
|
||||
// Record-style struct definition
|
||||
} else if self.token == token::OpenDelim(token::Brace) {
|
||||
} else if self.token == token::OpenDelim(Delimiter::Brace) {
|
||||
let (fields, recovered) =
|
||||
self.parse_record_struct_body("struct", generics.where_clause.has_where_token)?;
|
||||
VariantData::Struct(fields, recovered)
|
||||
// Tuple-style struct definition with optional where-clause.
|
||||
} else if self.token == token::OpenDelim(token::Paren) {
|
||||
} else if self.token == token::OpenDelim(Delimiter::Parenthesis) {
|
||||
let body = VariantData::Tuple(self.parse_tuple_struct_body()?, DUMMY_NODE_ID);
|
||||
generics.where_clause = self.parse_where_clause()?;
|
||||
self.expect_semi()?;
|
||||
@ -1326,7 +1327,7 @@ impl<'a> Parser<'a> {
|
||||
let (fields, recovered) =
|
||||
self.parse_record_struct_body("union", generics.where_clause.has_where_token)?;
|
||||
VariantData::Struct(fields, recovered)
|
||||
} else if self.token == token::OpenDelim(token::Brace) {
|
||||
} else if self.token == token::OpenDelim(Delimiter::Brace) {
|
||||
let (fields, recovered) =
|
||||
self.parse_record_struct_body("union", generics.where_clause.has_where_token)?;
|
||||
VariantData::Struct(fields, recovered)
|
||||
@ -1348,10 +1349,10 @@ impl<'a> Parser<'a> {
|
||||
) -> PResult<'a, (Vec<FieldDef>, /* recovered */ bool)> {
|
||||
let mut fields = Vec::new();
|
||||
let mut recovered = false;
|
||||
if self.eat(&token::OpenDelim(token::Brace)) {
|
||||
while self.token != token::CloseDelim(token::Brace) {
|
||||
if self.eat(&token::OpenDelim(Delimiter::Brace)) {
|
||||
while self.token != token::CloseDelim(Delimiter::Brace) {
|
||||
let field = self.parse_field_def(adt_ty).map_err(|e| {
|
||||
self.consume_block(token::Brace, ConsumeClosingDelim::No);
|
||||
self.consume_block(Delimiter::Brace, ConsumeClosingDelim::No);
|
||||
recovered = true;
|
||||
e
|
||||
});
|
||||
@ -1363,7 +1364,7 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
}
|
||||
}
|
||||
self.eat(&token::CloseDelim(token::Brace));
|
||||
self.eat(&token::CloseDelim(Delimiter::Brace));
|
||||
} else {
|
||||
let token_str = super::token_descr(&self.token);
|
||||
let msg = &format!(
|
||||
@ -1439,7 +1440,7 @@ impl<'a> Parser<'a> {
|
||||
token::Comma => {
|
||||
self.bump();
|
||||
}
|
||||
token::CloseDelim(token::Brace) => {}
|
||||
token::CloseDelim(Delimiter::Brace) => {}
|
||||
token::DocComment(..) => {
|
||||
let previous_span = self.prev_token.span;
|
||||
let mut err = self.span_err(self.token.span, Error::UselessDocComment);
|
||||
@ -1450,7 +1451,7 @@ impl<'a> Parser<'a> {
|
||||
if !seen_comma && comma_after_doc_seen {
|
||||
seen_comma = true;
|
||||
}
|
||||
if comma_after_doc_seen || self.token == token::CloseDelim(token::Brace) {
|
||||
if comma_after_doc_seen || self.token == token::CloseDelim(Delimiter::Brace) {
|
||||
err.emit();
|
||||
} else {
|
||||
if !seen_comma {
|
||||
@ -1478,7 +1479,7 @@ impl<'a> Parser<'a> {
|
||||
if let Some(last_segment) = segments.last() {
|
||||
recovered = self.check_trailing_angle_brackets(
|
||||
last_segment,
|
||||
&[&token::Comma, &token::CloseDelim(token::Brace)],
|
||||
&[&token::Comma, &token::CloseDelim(Delimiter::Brace)],
|
||||
);
|
||||
if recovered {
|
||||
// Handle a case like `Vec<u8>>,` where we can continue parsing fields
|
||||
@ -1636,12 +1637,12 @@ impl<'a> Parser<'a> {
|
||||
/// ```
|
||||
fn parse_item_decl_macro(&mut self, lo: Span) -> PResult<'a, ItemInfo> {
|
||||
let ident = self.parse_ident()?;
|
||||
let body = if self.check(&token::OpenDelim(token::Brace)) {
|
||||
let body = if self.check(&token::OpenDelim(Delimiter::Brace)) {
|
||||
self.parse_mac_args()? // `MacBody`
|
||||
} else if self.check(&token::OpenDelim(token::Paren)) {
|
||||
} else if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
|
||||
let params = self.parse_token_tree(); // `MacParams`
|
||||
let pspan = params.span();
|
||||
if !self.check(&token::OpenDelim(token::Brace)) {
|
||||
if !self.check(&token::OpenDelim(Delimiter::Brace)) {
|
||||
return self.unexpected();
|
||||
}
|
||||
let body = self.parse_token_tree(); // `MacBody`
|
||||
@ -1924,7 +1925,7 @@ impl<'a> Parser<'a> {
|
||||
self.expect_semi()?;
|
||||
*sig_hi = self.prev_token.span;
|
||||
(Vec::new(), None)
|
||||
} else if self.check(&token::OpenDelim(token::Brace)) || self.token.is_whole_block() {
|
||||
} else if self.check(&token::OpenDelim(Delimiter::Brace)) || self.token.is_whole_block() {
|
||||
self.parse_inner_attrs_and_block().map(|(attrs, body)| (attrs, Some(body)))?
|
||||
} else if self.token.kind == token::Eq {
|
||||
// Recover `fn foo() = $expr;`.
|
||||
@ -1943,12 +1944,12 @@ impl<'a> Parser<'a> {
|
||||
(Vec::new(), Some(self.mk_block_err(span)))
|
||||
} else {
|
||||
let expected = if req_body {
|
||||
&[token::OpenDelim(token::Brace)][..]
|
||||
&[token::OpenDelim(Delimiter::Brace)][..]
|
||||
} else {
|
||||
&[token::Semi, token::OpenDelim(token::Brace)]
|
||||
&[token::Semi, token::OpenDelim(Delimiter::Brace)]
|
||||
};
|
||||
if let Err(mut err) = self.expected_one_of_not_found(&[], &expected) {
|
||||
if self.token.kind == token::CloseDelim(token::Brace) {
|
||||
if self.token.kind == token::CloseDelim(Delimiter::Brace) {
|
||||
// The enclosing `mod`, `trait` or `impl` is being closed, so keep the `fn` in
|
||||
// the AST for typechecking.
|
||||
err.span_label(ident.span, "while parsing this `fn`");
|
||||
@ -2164,7 +2165,7 @@ impl<'a> Parser<'a> {
|
||||
e.emit();
|
||||
let lo = p.prev_token.span;
|
||||
// Skip every token until next possible arg or end.
|
||||
p.eat_to_tokens(&[&token::Comma, &token::CloseDelim(token::Paren)]);
|
||||
p.eat_to_tokens(&[&token::Comma, &token::CloseDelim(Delimiter::Parenthesis)]);
|
||||
// Create a placeholder argument for proper arg count (issue #34264).
|
||||
Ok(dummy_arg(Ident::new(kw::Empty, lo.to(p.prev_token.span))))
|
||||
});
|
||||
@ -2220,7 +2221,7 @@ impl<'a> Parser<'a> {
|
||||
let mut ty = this.parse_ty_for_param();
|
||||
if ty.is_ok()
|
||||
&& this.token != token::Comma
|
||||
&& this.token != token::CloseDelim(token::Paren)
|
||||
&& this.token != token::CloseDelim(Delimiter::Parenthesis)
|
||||
{
|
||||
// This wasn't actually a type, but a pattern looking like a type,
|
||||
// so we are going to rollback and re-parse for recovery.
|
||||
|
@ -19,7 +19,7 @@ pub use pat::{CommaRecoveryMode, RecoverColon, RecoverComma};
|
||||
pub use path::PathStyle;
|
||||
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::token::{self, DelimToken, Nonterminal, Token, TokenKind};
|
||||
use rustc_ast::token::{self, Delimiter, Nonterminal, Token, TokenKind};
|
||||
use rustc_ast::tokenstream::AttributesData;
|
||||
use rustc_ast::tokenstream::{self, DelimSpan, Spacing};
|
||||
use rustc_ast::tokenstream::{TokenStream, TokenTree};
|
||||
@ -244,12 +244,12 @@ struct TokenCursor {
|
||||
|
||||
#[derive(Clone)]
|
||||
struct TokenCursorFrame {
|
||||
delim_sp: Option<(DelimToken, DelimSpan)>,
|
||||
delim_sp: Option<(Delimiter, DelimSpan)>,
|
||||
tree_cursor: tokenstream::Cursor,
|
||||
}
|
||||
|
||||
impl TokenCursorFrame {
|
||||
fn new(delim_sp: Option<(DelimToken, DelimSpan)>, tts: TokenStream) -> Self {
|
||||
fn new(delim_sp: Option<(Delimiter, DelimSpan)>, tts: TokenStream) -> Self {
|
||||
TokenCursorFrame { delim_sp, tree_cursor: tts.into_trees() }
|
||||
}
|
||||
}
|
||||
@ -263,8 +263,8 @@ impl TokenCursor {
|
||||
#[inline(always)]
|
||||
fn inlined_next(&mut self, desugar_doc_comments: bool) -> (Token, Spacing) {
|
||||
loop {
|
||||
// FIXME: we currently don't return `NoDelim` open/close delims. To fix #67062 we will
|
||||
// need to, whereupon the `delim != DelimToken::NoDelim` conditions below can be
|
||||
// FIXME: we currently don't return `Delimiter` open/close delims. To fix #67062 we will
|
||||
// need to, whereupon the `delim != Delimiter::Invisible` conditions below can be
|
||||
// removed.
|
||||
if let Some((tree, spacing)) = self.frame.tree_cursor.next_with_spacing_ref() {
|
||||
match tree {
|
||||
@ -278,14 +278,14 @@ impl TokenCursor {
|
||||
// Set `open_delim` to true here because we deal with it immediately.
|
||||
let frame = TokenCursorFrame::new(Some((delim, sp)), tts.clone());
|
||||
self.stack.push(mem::replace(&mut self.frame, frame));
|
||||
if delim != DelimToken::NoDelim {
|
||||
if delim != Delimiter::Invisible {
|
||||
return (Token::new(token::OpenDelim(delim), sp.open), Spacing::Alone);
|
||||
}
|
||||
// No open delimeter to return; continue on to the next iteration.
|
||||
}
|
||||
};
|
||||
} else if let Some(frame) = self.stack.pop() {
|
||||
if let Some((delim, span)) = self.frame.delim_sp && delim != DelimToken::NoDelim {
|
||||
if let Some((delim, span)) = self.frame.delim_sp && delim != Delimiter::Invisible {
|
||||
self.frame = frame;
|
||||
return (Token::new(token::CloseDelim(delim), span.close), Spacing::Alone);
|
||||
}
|
||||
@ -314,7 +314,7 @@ impl TokenCursor {
|
||||
let delim_span = DelimSpan::from_single(span);
|
||||
let body = TokenTree::Delimited(
|
||||
delim_span,
|
||||
token::Bracket,
|
||||
Delimiter::Bracket,
|
||||
[
|
||||
TokenTree::token(token::Ident(sym::doc, false), span),
|
||||
TokenTree::token(token::Eq, span),
|
||||
@ -626,7 +626,7 @@ impl<'a> Parser<'a> {
|
||||
self.is_keyword_ahead(dist, &[kw::Const])
|
||||
&& self.look_ahead(dist + 1, |t| match t.kind {
|
||||
token::Interpolated(ref nt) => matches!(**nt, token::NtBlock(..)),
|
||||
token::OpenDelim(DelimToken::Brace) => true,
|
||||
token::OpenDelim(Delimiter::Brace) => true,
|
||||
_ => false,
|
||||
})
|
||||
}
|
||||
@ -954,7 +954,7 @@ impl<'a> Parser<'a> {
|
||||
|
||||
fn parse_delim_comma_seq<T>(
|
||||
&mut self,
|
||||
delim: DelimToken,
|
||||
delim: Delimiter,
|
||||
f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
|
||||
) -> PResult<'a, (Vec<T>, bool)> {
|
||||
self.parse_unspanned_seq(
|
||||
@ -969,7 +969,7 @@ impl<'a> Parser<'a> {
|
||||
&mut self,
|
||||
f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
|
||||
) -> PResult<'a, (Vec<T>, bool)> {
|
||||
self.parse_delim_comma_seq(token::Paren, f)
|
||||
self.parse_delim_comma_seq(Delimiter::Parenthesis, f)
|
||||
}
|
||||
|
||||
/// Advance the parser by one token using provided token as the next one.
|
||||
@ -1005,7 +1005,7 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
debug_assert!(!matches!(
|
||||
next.0.kind,
|
||||
token::OpenDelim(token::NoDelim) | token::CloseDelim(token::NoDelim)
|
||||
token::OpenDelim(Delimiter::Invisible) | token::CloseDelim(Delimiter::Invisible)
|
||||
));
|
||||
self.inlined_bump_with(next)
|
||||
}
|
||||
@ -1018,10 +1018,10 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
|
||||
let frame = &self.token_cursor.frame;
|
||||
if let Some((delim, span)) = frame.delim_sp && delim != DelimToken::NoDelim {
|
||||
if let Some((delim, span)) = frame.delim_sp && delim != Delimiter::Invisible {
|
||||
let all_normal = (0..dist).all(|i| {
|
||||
let token = frame.tree_cursor.look_ahead(i);
|
||||
!matches!(token, Some(TokenTree::Delimited(_, DelimToken::NoDelim, _)))
|
||||
!matches!(token, Some(TokenTree::Delimited(_, Delimiter::Invisible, _)))
|
||||
});
|
||||
if all_normal {
|
||||
return match frame.tree_cursor.look_ahead(dist - 1) {
|
||||
@ -1043,7 +1043,7 @@ impl<'a> Parser<'a> {
|
||||
token = cursor.next(/* desugar_doc_comments */ false).0;
|
||||
if matches!(
|
||||
token.kind,
|
||||
token::OpenDelim(token::NoDelim) | token::CloseDelim(token::NoDelim)
|
||||
token::OpenDelim(Delimiter::Invisible) | token::CloseDelim(Delimiter::Invisible)
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
@ -1079,7 +1079,7 @@ impl<'a> Parser<'a> {
|
||||
/// Parses constness: `const` or nothing.
|
||||
fn parse_constness(&mut self) -> Const {
|
||||
// Avoid const blocks to be parsed as const items
|
||||
if self.look_ahead(1, |t| t != &token::OpenDelim(DelimToken::Brace))
|
||||
if self.look_ahead(1, |t| t != &token::OpenDelim(Delimiter::Brace))
|
||||
&& self.eat_keyword(kw::Const)
|
||||
{
|
||||
Const::Yes(self.prev_token.uninterpolated_span())
|
||||
@ -1142,9 +1142,9 @@ impl<'a> Parser<'a> {
|
||||
|
||||
fn parse_mac_args_common(&mut self, delimited_only: bool) -> PResult<'a, MacArgs> {
|
||||
Ok(
|
||||
if self.check(&token::OpenDelim(DelimToken::Paren))
|
||||
|| self.check(&token::OpenDelim(DelimToken::Bracket))
|
||||
|| self.check(&token::OpenDelim(DelimToken::Brace))
|
||||
if self.check(&token::OpenDelim(Delimiter::Parenthesis))
|
||||
|| self.check(&token::OpenDelim(Delimiter::Bracket))
|
||||
|| self.check(&token::OpenDelim(Delimiter::Brace))
|
||||
{
|
||||
match self.parse_token_tree() {
|
||||
TokenTree::Delimited(dspan, delim, tokens) =>
|
||||
@ -1288,7 +1288,7 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
let lo = self.prev_token.span;
|
||||
|
||||
if self.check(&token::OpenDelim(token::Paren)) {
|
||||
if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
|
||||
// We don't `self.bump()` the `(` yet because this might be a struct definition where
|
||||
// `()` or a tuple might be allowed. For example, `struct Struct(pub (), pub (usize));`.
|
||||
// Because of this, we only `bump` the `(` if we're assured it is appropriate to do so
|
||||
@ -1299,7 +1299,7 @@ impl<'a> Parser<'a> {
|
||||
// Parse `pub(crate)`.
|
||||
self.bump(); // `(`
|
||||
self.bump(); // `crate`
|
||||
self.expect(&token::CloseDelim(token::Paren))?; // `)`
|
||||
self.expect(&token::CloseDelim(Delimiter::Parenthesis))?; // `)`
|
||||
let vis = VisibilityKind::Crate(CrateSugar::PubCrate);
|
||||
return Ok(Visibility {
|
||||
span: lo.to(self.prev_token.span),
|
||||
@ -1311,20 +1311,20 @@ impl<'a> Parser<'a> {
|
||||
self.bump(); // `(`
|
||||
self.bump(); // `in`
|
||||
let path = self.parse_path(PathStyle::Mod)?; // `path`
|
||||
self.expect(&token::CloseDelim(token::Paren))?; // `)`
|
||||
self.expect(&token::CloseDelim(Delimiter::Parenthesis))?; // `)`
|
||||
let vis = VisibilityKind::Restricted { path: P(path), id: ast::DUMMY_NODE_ID };
|
||||
return Ok(Visibility {
|
||||
span: lo.to(self.prev_token.span),
|
||||
kind: vis,
|
||||
tokens: None,
|
||||
});
|
||||
} else if self.look_ahead(2, |t| t == &token::CloseDelim(token::Paren))
|
||||
} else if self.look_ahead(2, |t| t == &token::CloseDelim(Delimiter::Parenthesis))
|
||||
&& self.is_keyword_ahead(1, &[kw::Super, kw::SelfLower])
|
||||
{
|
||||
// Parse `pub(self)` or `pub(super)`.
|
||||
self.bump(); // `(`
|
||||
let path = self.parse_path(PathStyle::Mod)?; // `super`/`self`
|
||||
self.expect(&token::CloseDelim(token::Paren))?; // `)`
|
||||
self.expect(&token::CloseDelim(Delimiter::Parenthesis))?; // `)`
|
||||
let vis = VisibilityKind::Restricted { path: P(path), id: ast::DUMMY_NODE_ID };
|
||||
return Ok(Visibility {
|
||||
span: lo.to(self.prev_token.span),
|
||||
@ -1346,7 +1346,7 @@ impl<'a> Parser<'a> {
|
||||
fn recover_incorrect_vis_restriction(&mut self) -> PResult<'a, ()> {
|
||||
self.bump(); // `(`
|
||||
let path = self.parse_path(PathStyle::Mod)?;
|
||||
self.expect(&token::CloseDelim(token::Paren))?; // `)`
|
||||
self.expect(&token::CloseDelim(Delimiter::Parenthesis))?; // `)`
|
||||
|
||||
let msg = "incorrect visibility restriction";
|
||||
let suggestion = r##"some possible visibility restrictions are:
|
||||
@ -1413,7 +1413,7 @@ impl<'a> Parser<'a> {
|
||||
fn is_import_coupler(&mut self) -> bool {
|
||||
self.check(&token::ModSep)
|
||||
&& self.look_ahead(1, |t| {
|
||||
*t == token::OpenDelim(token::Brace) || *t == token::BinOp(token::Star)
|
||||
*t == token::OpenDelim(Delimiter::Brace) || *t == token::BinOp(token::Star)
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::token::{self, NonterminalKind, Token};
|
||||
use rustc_ast::token::{self, Delimiter, NonterminalKind, Token};
|
||||
use rustc_ast::AstLike;
|
||||
use rustc_ast_pretty::pprust;
|
||||
use rustc_errors::PResult;
|
||||
@ -43,7 +43,7 @@ impl<'a> Parser<'a> {
|
||||
_ => token.can_begin_type(),
|
||||
},
|
||||
NonterminalKind::Block => match token.kind {
|
||||
token::OpenDelim(token::Brace) => true,
|
||||
token::OpenDelim(Delimiter::Brace) => true,
|
||||
token::Interpolated(ref nt) => !matches!(
|
||||
**nt,
|
||||
token::NtItem(_)
|
||||
@ -67,8 +67,8 @@ impl<'a> Parser<'a> {
|
||||
NonterminalKind::PatParam { .. } | NonterminalKind::PatWithOr { .. } => {
|
||||
match token.kind {
|
||||
token::Ident(..) | // box, ref, mut, and other identifiers (can stricten)
|
||||
token::OpenDelim(token::Paren) | // tuple pattern
|
||||
token::OpenDelim(token::Bracket) | // slice pattern
|
||||
token::OpenDelim(Delimiter::Parenthesis) | // tuple pattern
|
||||
token::OpenDelim(Delimiter::Bracket) | // slice pattern
|
||||
token::BinOp(token::And) | // reference
|
||||
token::BinOp(token::Minus) | // negative literal
|
||||
token::AndAnd | // double reference
|
||||
|
@ -2,7 +2,7 @@ use super::{ForceCollect, Parser, PathStyle, TrailingToken};
|
||||
use crate::{maybe_recover_from_interpolated_ty_qpath, maybe_whole};
|
||||
use rustc_ast::mut_visit::{noop_visit_pat, MutVisitor};
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::token;
|
||||
use rustc_ast::token::{self, Delimiter};
|
||||
use rustc_ast::{
|
||||
self as ast, AttrVec, Attribute, BindingMode, Expr, ExprKind, MacCall, Mutability, Pat,
|
||||
PatField, PatKind, Path, QSelf, RangeEnd, RangeSyntax,
|
||||
@ -260,9 +260,9 @@ impl<'a> Parser<'a> {
|
||||
| token::Semi // e.g. `let a |;`.
|
||||
| token::Colon // e.g. `let a | :`.
|
||||
| token::Comma // e.g. `let (a |,)`.
|
||||
| token::CloseDelim(token::Bracket) // e.g. `let [a | ]`.
|
||||
| token::CloseDelim(token::Paren) // e.g. `let (a | )`.
|
||||
| token::CloseDelim(token::Brace) // e.g. `let A { f: a | }`.
|
||||
| token::CloseDelim(Delimiter::Bracket) // e.g. `let [a | ]`.
|
||||
| token::CloseDelim(Delimiter::Parenthesis) // e.g. `let (a | )`.
|
||||
| token::CloseDelim(Delimiter::Brace) // e.g. `let A { f: a | }`.
|
||||
)
|
||||
});
|
||||
match (is_end_ahead, &self.token.kind) {
|
||||
@ -323,11 +323,11 @@ impl<'a> Parser<'a> {
|
||||
|
||||
let pat = if self.check(&token::BinOp(token::And)) || self.token.kind == token::AndAnd {
|
||||
self.parse_pat_deref(expected)?
|
||||
} else if self.check(&token::OpenDelim(token::Paren)) {
|
||||
} else if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
|
||||
self.parse_pat_tuple_or_parens()?
|
||||
} else if self.check(&token::OpenDelim(token::Bracket)) {
|
||||
} else if self.check(&token::OpenDelim(Delimiter::Bracket)) {
|
||||
// Parse `[pat, pat,...]` as a slice pattern.
|
||||
let (pats, _) = self.parse_delim_comma_seq(token::Bracket, |p| {
|
||||
let (pats, _) = self.parse_delim_comma_seq(Delimiter::Bracket, |p| {
|
||||
p.parse_pat_allow_top_alt(
|
||||
None,
|
||||
RecoverComma::No,
|
||||
@ -389,9 +389,9 @@ impl<'a> Parser<'a> {
|
||||
} else if let Some(form) = self.parse_range_end() {
|
||||
let begin = self.mk_expr(span, ExprKind::Path(qself, path), AttrVec::new());
|
||||
self.parse_pat_range_begin_with(begin, form)?
|
||||
} else if self.check(&token::OpenDelim(token::Brace)) {
|
||||
} else if self.check(&token::OpenDelim(Delimiter::Brace)) {
|
||||
self.parse_pat_struct(qself, path)?
|
||||
} else if self.check(&token::OpenDelim(token::Paren)) {
|
||||
} else if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
|
||||
self.parse_pat_tuple_struct(qself, path)?
|
||||
} else {
|
||||
PatKind::Path(qself, path)
|
||||
@ -845,8 +845,8 @@ impl<'a> Parser<'a> {
|
||||
// Avoid `in`. Due to recovery in the list parser this messes with `for ( $pat in $expr )`.
|
||||
&& !self.token.is_keyword(kw::In)
|
||||
// Try to do something more complex?
|
||||
&& self.look_ahead(1, |t| !matches!(t.kind, token::OpenDelim(token::Paren) // A tuple struct pattern.
|
||||
| token::OpenDelim(token::Brace) // A struct pattern.
|
||||
&& self.look_ahead(1, |t| !matches!(t.kind, token::OpenDelim(Delimiter::Parenthesis) // A tuple struct pattern.
|
||||
| token::OpenDelim(Delimiter::Brace) // A struct pattern.
|
||||
| token::DotDotDot | token::DotDotEq | token::DotDot // A range pattern.
|
||||
| token::ModSep // A tuple / struct variant pattern.
|
||||
| token::Not)) // A macro expanding to a pattern.
|
||||
@ -868,7 +868,7 @@ impl<'a> Parser<'a> {
|
||||
// This shortly leads to a parse error. Note that if there is no explicit
|
||||
// binding mode then we do not end up here, because the lookahead
|
||||
// will direct us over to `parse_enum_variant()`.
|
||||
if self.token == token::OpenDelim(token::Paren) {
|
||||
if self.token == token::OpenDelim(Delimiter::Parenthesis) {
|
||||
return Err(self
|
||||
.struct_span_err(self.prev_token.span, "expected identifier, found enum pattern"));
|
||||
}
|
||||
@ -917,7 +917,7 @@ impl<'a> Parser<'a> {
|
||||
let mut delayed_err: Option<DiagnosticBuilder<'a, ErrorGuaranteed>> = None;
|
||||
let mut etc_span = None;
|
||||
|
||||
while self.token != token::CloseDelim(token::Brace) {
|
||||
while self.token != token::CloseDelim(Delimiter::Brace) {
|
||||
let attrs = match self.parse_outer_attributes() {
|
||||
Ok(attrs) => attrs,
|
||||
Err(err) => {
|
||||
@ -946,7 +946,7 @@ impl<'a> Parser<'a> {
|
||||
self.recover_one_fewer_dotdot();
|
||||
self.bump(); // `..` || `...`
|
||||
|
||||
if self.token == token::CloseDelim(token::Brace) {
|
||||
if self.token == token::CloseDelim(Delimiter::Brace) {
|
||||
etc_span = Some(etc_sp);
|
||||
break;
|
||||
}
|
||||
@ -970,7 +970,7 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
|
||||
etc_span = Some(etc_sp.until(self.token.span));
|
||||
if self.token == token::CloseDelim(token::Brace) {
|
||||
if self.token == token::CloseDelim(Delimiter::Brace) {
|
||||
// If the struct looks otherwise well formed, recover and continue.
|
||||
if let Some(sp) = comma_sp {
|
||||
err.span_suggestion_short(
|
||||
|
@ -2,7 +2,7 @@ use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
|
||||
use super::{Parser, Restrictions, TokenType};
|
||||
use crate::maybe_whole;
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::token::{self, Token};
|
||||
use rustc_ast::token::{self, Delimiter, Token};
|
||||
use rustc_ast::{
|
||||
self as ast, AngleBracketedArg, AngleBracketedArgs, AnonConst, AssocConstraint,
|
||||
AssocConstraintKind, BlockCheckMode, GenericArg, GenericArgs, Generics, ParenthesizedArgs,
|
||||
@ -236,14 +236,14 @@ impl<'a> Parser<'a> {
|
||||
token.kind,
|
||||
token::Lt
|
||||
| token::BinOp(token::Shl)
|
||||
| token::OpenDelim(token::Paren)
|
||||
| token::OpenDelim(Delimiter::Parenthesis)
|
||||
| token::LArrow
|
||||
)
|
||||
};
|
||||
let check_args_start = |this: &mut Self| {
|
||||
this.expected_tokens.extend_from_slice(&[
|
||||
TokenType::Token(token::Lt),
|
||||
TokenType::Token(token::OpenDelim(token::Paren)),
|
||||
TokenType::Token(token::OpenDelim(Delimiter::Parenthesis)),
|
||||
]);
|
||||
is_args_start(&this.token)
|
||||
};
|
||||
@ -639,7 +639,7 @@ impl<'a> Parser<'a> {
|
||||
/// the caller.
|
||||
pub(super) fn parse_const_arg(&mut self) -> PResult<'a, AnonConst> {
|
||||
// Parse const argument.
|
||||
let value = if let token::OpenDelim(token::Brace) = self.token.kind {
|
||||
let value = if let token::OpenDelim(Delimiter::Brace) = self.token.kind {
|
||||
self.parse_block_expr(
|
||||
None,
|
||||
self.token.span,
|
||||
@ -667,7 +667,8 @@ impl<'a> Parser<'a> {
|
||||
GenericArg::Const(self.parse_const_arg()?)
|
||||
} else if self.check_type() {
|
||||
// Parse type argument.
|
||||
let is_const_fn = self.look_ahead(1, |t| t.kind == token::OpenDelim(token::Paren));
|
||||
let is_const_fn =
|
||||
self.look_ahead(1, |t| t.kind == token::OpenDelim(Delimiter::Parenthesis));
|
||||
let mut snapshot = self.create_snapshot_for_diagnostic();
|
||||
match self.parse_ty() {
|
||||
Ok(ty) => GenericArg::Type(ty),
|
||||
|
@ -11,7 +11,7 @@ use crate::maybe_whole;
|
||||
|
||||
use rustc_ast as ast;
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::token::{self, TokenKind};
|
||||
use rustc_ast::token::{self, Delimiter, TokenKind};
|
||||
use rustc_ast::util::classify;
|
||||
use rustc_ast::{
|
||||
AstLike, AttrStyle, AttrVec, Attribute, LocalKind, MacCall, MacCallStmt, MacStmtStyle,
|
||||
@ -92,7 +92,7 @@ impl<'a> Parser<'a> {
|
||||
// Do not attempt to parse an expression if we're done here.
|
||||
self.error_outer_attrs(&attrs.take_for_recovery());
|
||||
self.mk_stmt(lo, StmtKind::Empty)
|
||||
} else if self.token != token::CloseDelim(token::Brace) {
|
||||
} else if self.token != token::CloseDelim(Delimiter::Brace) {
|
||||
// Remainder are line-expr stmts.
|
||||
let e = if force_collect == ForceCollect::Yes {
|
||||
self.collect_tokens_no_attrs(|this| {
|
||||
@ -131,7 +131,7 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
let expr = if this.eat(&token::OpenDelim(token::Brace)) {
|
||||
let expr = if this.eat(&token::OpenDelim(Delimiter::Brace)) {
|
||||
this.parse_struct_expr(None, path, AttrVec::new(), true)?
|
||||
} else {
|
||||
let hi = this.prev_token.span;
|
||||
@ -165,7 +165,7 @@ impl<'a> Parser<'a> {
|
||||
let hi = self.prev_token.span;
|
||||
|
||||
let style = match delim {
|
||||
Some(token::Brace) => MacStmtStyle::Braces,
|
||||
Some(Delimiter::Brace) => MacStmtStyle::Braces,
|
||||
Some(_) => MacStmtStyle::NoBraces,
|
||||
None => unreachable!(),
|
||||
};
|
||||
@ -434,7 +434,7 @@ impl<'a> Parser<'a> {
|
||||
// If the next token is an open brace (e.g., `if a b {`), the place-
|
||||
// inside-a-block suggestion would be more likely wrong than right.
|
||||
Ok(Some(_))
|
||||
if self.look_ahead(1, |t| t == &token::OpenDelim(token::Brace))
|
||||
if self.look_ahead(1, |t| t == &token::OpenDelim(Delimiter::Brace))
|
||||
|| do_not_suggest_help => {}
|
||||
// Do not suggest `if foo println!("") {;}` (as would be seen in test for #46836).
|
||||
Ok(Some(Stmt { kind: StmtKind::Empty, .. })) => {}
|
||||
@ -488,7 +488,7 @@ impl<'a> Parser<'a> {
|
||||
maybe_whole!(self, NtBlock, |x| (Vec::new(), x));
|
||||
|
||||
self.maybe_recover_unexpected_block_label();
|
||||
if !self.eat(&token::OpenDelim(token::Brace)) {
|
||||
if !self.eat(&token::OpenDelim(Delimiter::Brace)) {
|
||||
return self.error_block_no_opening_brace();
|
||||
}
|
||||
|
||||
@ -509,7 +509,7 @@ impl<'a> Parser<'a> {
|
||||
recover: AttemptLocalParseRecovery,
|
||||
) -> PResult<'a, P<Block>> {
|
||||
let mut stmts = vec![];
|
||||
while !self.eat(&token::CloseDelim(token::Brace)) {
|
||||
while !self.eat(&token::CloseDelim(Delimiter::Brace)) {
|
||||
if self.token == token::Eof {
|
||||
break;
|
||||
}
|
||||
@ -553,7 +553,7 @@ impl<'a> Parser<'a> {
|
||||
{
|
||||
// Just check for errors and recover; do not eat semicolon yet.
|
||||
if let Err(mut e) =
|
||||
self.expect_one_of(&[], &[token::Semi, token::CloseDelim(token::Brace)])
|
||||
self.expect_one_of(&[], &[token::Semi, token::CloseDelim(Delimiter::Brace)])
|
||||
{
|
||||
if let TokenKind::DocComment(..) = self.token.kind {
|
||||
if let Ok(snippet) = self.span_to_snippet(self.token.span) {
|
||||
|
@ -3,7 +3,7 @@ use super::{Parser, PathStyle, TokenType};
|
||||
use crate::{maybe_recover_from_interpolated_ty_qpath, maybe_whole};
|
||||
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::token::{self, Token, TokenKind};
|
||||
use rustc_ast::token::{self, Delimiter, Token, TokenKind};
|
||||
use rustc_ast::{
|
||||
self as ast, BareFnTy, FnRetTy, GenericBound, GenericBounds, GenericParam, Generics, Lifetime,
|
||||
MacCall, MutTy, Mutability, PolyTraitRef, TraitBoundModifier, TraitObjectSyntax, Ty, TyKind,
|
||||
@ -249,14 +249,14 @@ impl<'a> Parser<'a> {
|
||||
|
||||
let lo = self.token.span;
|
||||
let mut impl_dyn_multi = false;
|
||||
let kind = if self.check(&token::OpenDelim(token::Paren)) {
|
||||
let kind = if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
|
||||
self.parse_ty_tuple_or_parens(lo, allow_plus)?
|
||||
} else if self.eat(&token::Not) {
|
||||
// Never type `!`
|
||||
TyKind::Never
|
||||
} else if self.eat(&token::BinOp(token::Star)) {
|
||||
self.parse_ty_ptr()?
|
||||
} else if self.eat(&token::OpenDelim(token::Bracket)) {
|
||||
} else if self.eat(&token::OpenDelim(Delimiter::Bracket)) {
|
||||
self.parse_array_or_slice_ty()?
|
||||
} else if self.check(&token::BinOp(token::And)) || self.check(&token::AndAnd) {
|
||||
// Reference
|
||||
@ -409,7 +409,7 @@ impl<'a> Parser<'a> {
|
||||
let elt_ty = match self.parse_ty() {
|
||||
Ok(ty) => ty,
|
||||
Err(mut err)
|
||||
if self.look_ahead(1, |t| t.kind == token::CloseDelim(token::Bracket))
|
||||
if self.look_ahead(1, |t| t.kind == token::CloseDelim(Delimiter::Bracket))
|
||||
| self.look_ahead(1, |t| t.kind == token::Semi) =>
|
||||
{
|
||||
// Recover from `[LIT; EXPR]` and `[LIT]`
|
||||
@ -422,14 +422,14 @@ impl<'a> Parser<'a> {
|
||||
|
||||
let ty = if self.eat(&token::Semi) {
|
||||
let mut length = self.parse_anon_const_expr()?;
|
||||
if let Err(e) = self.expect(&token::CloseDelim(token::Bracket)) {
|
||||
if let Err(e) = self.expect(&token::CloseDelim(Delimiter::Bracket)) {
|
||||
// Try to recover from `X<Y, ...>` when `X::<Y, ...>` works
|
||||
self.check_mistyped_turbofish_with_multiple_type_params(e, &mut length.value)?;
|
||||
self.expect(&token::CloseDelim(token::Bracket))?;
|
||||
self.expect(&token::CloseDelim(Delimiter::Bracket))?;
|
||||
}
|
||||
TyKind::Array(elt_ty, length)
|
||||
} else {
|
||||
self.expect(&token::CloseDelim(token::Bracket))?;
|
||||
self.expect(&token::CloseDelim(Delimiter::Bracket))?;
|
||||
TyKind::Slice(elt_ty)
|
||||
};
|
||||
|
||||
@ -492,9 +492,9 @@ impl<'a> Parser<'a> {
|
||||
// Parses the `typeof(EXPR)`.
|
||||
// To avoid ambiguity, the type is surrounded by parentheses.
|
||||
fn parse_typeof_ty(&mut self) -> PResult<'a, TyKind> {
|
||||
self.expect(&token::OpenDelim(token::Paren))?;
|
||||
self.expect(&token::OpenDelim(Delimiter::Parenthesis))?;
|
||||
let expr = self.parse_anon_const_expr()?;
|
||||
self.expect(&token::CloseDelim(token::Paren))?;
|
||||
self.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
|
||||
Ok(TyKind::Typeof(expr))
|
||||
}
|
||||
|
||||
@ -672,7 +672,7 @@ impl<'a> Parser<'a> {
|
||||
|| self.check(&token::Question)
|
||||
|| self.check(&token::Tilde)
|
||||
|| self.check_keyword(kw::For)
|
||||
|| self.check(&token::OpenDelim(token::Paren))
|
||||
|| self.check(&token::OpenDelim(Delimiter::Parenthesis))
|
||||
}
|
||||
|
||||
fn error_negative_bounds(
|
||||
@ -713,7 +713,7 @@ impl<'a> Parser<'a> {
|
||||
fn parse_generic_bound(&mut self) -> PResult<'a, Result<GenericBound, Span>> {
|
||||
let anchor_lo = self.prev_token.span;
|
||||
let lo = self.token.span;
|
||||
let has_parens = self.eat(&token::OpenDelim(token::Paren));
|
||||
let has_parens = self.eat(&token::OpenDelim(Delimiter::Parenthesis));
|
||||
let inner_lo = self.token.span;
|
||||
let is_negative = self.eat(&token::Not);
|
||||
|
||||
@ -766,7 +766,7 @@ impl<'a> Parser<'a> {
|
||||
/// Recover on `('lifetime)` with `(` already eaten.
|
||||
fn recover_paren_lifetime(&mut self, lo: Span, inner_lo: Span) -> PResult<'a, ()> {
|
||||
let inner_span = inner_lo.to(self.prev_token.span);
|
||||
self.expect(&token::CloseDelim(token::Paren))?;
|
||||
self.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
|
||||
let mut err = self.struct_span_err(
|
||||
lo.to(self.prev_token.span),
|
||||
"parenthesized lifetime bounds are not supported",
|
||||
@ -829,7 +829,7 @@ impl<'a> Parser<'a> {
|
||||
// suggestion is given.
|
||||
let bounds = vec![];
|
||||
self.parse_remaining_bounds(bounds, true)?;
|
||||
self.expect(&token::CloseDelim(token::Paren))?;
|
||||
self.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
|
||||
let sp = vec![lo, self.prev_token.span];
|
||||
let sugg: Vec<_> = sp.iter().map(|sp| (*sp, String::new())).collect();
|
||||
self.struct_span_err(sp, "incorrect braces around trait bounds")
|
||||
@ -840,7 +840,7 @@ impl<'a> Parser<'a> {
|
||||
)
|
||||
.emit();
|
||||
} else {
|
||||
self.expect(&token::CloseDelim(token::Paren))?;
|
||||
self.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
use crate::parse::ParseSess;
|
||||
use crate::session::Session;
|
||||
use rustc_ast::token::{self, DelimToken, Nonterminal, Token};
|
||||
use rustc_ast::token::{self, Delimiter, Nonterminal, Token};
|
||||
use rustc_ast::tokenstream::CanSynthesizeMissingTokens;
|
||||
use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree};
|
||||
use rustc_data_structures::profiling::VerboseTimingGuard;
|
||||
@ -137,7 +137,7 @@ impl<'a> FlattenNonterminals<'a> {
|
||||
let tts = (self.nt_to_tokenstream)(&nt, self.parse_sess, self.synthesize_tokens);
|
||||
TokenTree::Delimited(
|
||||
DelimSpan::from_single(token.span),
|
||||
DelimToken::NoDelim,
|
||||
Delimiter::Invisible,
|
||||
self.process_token_stream(tts),
|
||||
)
|
||||
.into()
|
||||
|
@ -704,10 +704,10 @@ pub enum Delimiter {
|
||||
#[stable(feature = "proc_macro_lib2", since = "1.29.0")]
|
||||
Bracket,
|
||||
/// `Ø ... Ø`
|
||||
/// An implicit delimiter, that may, for example, appear around tokens coming from a
|
||||
/// An invisible delimiter, that may, for example, appear around tokens coming from a
|
||||
/// "macro variable" `$var`. It is important to preserve operator priorities in cases like
|
||||
/// `$var * 3` where `$var` is `1 + 2`.
|
||||
/// Implicit delimiters might not survive roundtrip of a token stream through a string.
|
||||
/// Invisible delimiters might not survive roundtrip of a token stream through a string.
|
||||
#[stable(feature = "proc_macro_lib2", since = "1.29.0")]
|
||||
None,
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
use rustc_ast::token::{self, BinOpToken, DelimToken};
|
||||
use rustc_ast::token::{self, BinOpToken, Delimiter};
|
||||
use rustc_ast::tokenstream::{TokenStream, TokenTree};
|
||||
use rustc_ast_pretty::pprust::state::State as Printer;
|
||||
use rustc_ast_pretty::pprust::PrintState;
|
||||
@ -104,11 +104,11 @@ fn print_tt(printer: &mut Printer<'_>, tt: &TokenTree) {
|
||||
let open_delim = printer.token_kind_to_string(&token::OpenDelim(*delim));
|
||||
printer.word(open_delim);
|
||||
if !tts.is_empty() {
|
||||
if *delim == DelimToken::Brace {
|
||||
if *delim == Delimiter::Brace {
|
||||
printer.space();
|
||||
}
|
||||
print_tts(printer, tts);
|
||||
if *delim == DelimToken::Brace {
|
||||
if *delim == Delimiter::Brace {
|
||||
printer.space();
|
||||
}
|
||||
}
|
||||
@ -162,9 +162,9 @@ fn print_tts(printer: &mut Printer<'_>, tts: &TokenStream) {
|
||||
(_, _) => (true, Other),
|
||||
},
|
||||
TokenTree::Delimited(_, delim, _) => match (state, delim) {
|
||||
(Dollar, DelimToken::Paren) => (false, DollarParen),
|
||||
(Pound | PoundBang, DelimToken::Bracket) => (false, Other),
|
||||
(Ident, DelimToken::Paren | DelimToken::Bracket) => (false, Other),
|
||||
(Dollar, Delimiter::Parenthesis) => (false, DollarParen),
|
||||
(Pound | PoundBang, Delimiter::Bracket) => (false, Other),
|
||||
(Ident, Delimiter::Parenthesis | Delimiter::Bracket) => (false, Other),
|
||||
(_, _) => (true, Other),
|
||||
},
|
||||
};
|
||||
|
@ -3,7 +3,7 @@
|
||||
// compile-flags: -Z span-debug
|
||||
// edition:2018
|
||||
//
|
||||
// Tests the pretty-printing behavior of inserting `NoDelim` groups
|
||||
// Tests the pretty-printing behavior of inserting `Invisible`-delimited groups
|
||||
|
||||
#![no_std] // Don't load unnecessary hygiene information from std
|
||||
extern crate std;
|
||||
|
@ -2,7 +2,7 @@ use std::borrow::Cow;
|
||||
use std::cmp::min;
|
||||
|
||||
use itertools::Itertools;
|
||||
use rustc_ast::token::{DelimToken, LitKind};
|
||||
use rustc_ast::token::{Delimiter, LitKind};
|
||||
use rustc_ast::{ast, ptr};
|
||||
use rustc_span::{BytePos, Span};
|
||||
|
||||
@ -412,7 +412,7 @@ pub(crate) fn rewrite_array<'a, T: 'a + IntoOverflowableItem<'a>>(
|
||||
context: &'a RewriteContext<'_>,
|
||||
shape: Shape,
|
||||
force_separator_tactic: Option<SeparatorTactic>,
|
||||
delim_token: Option<DelimToken>,
|
||||
delim_token: Option<Delimiter>,
|
||||
) -> Option<String> {
|
||||
overflow::rewrite_with_square_brackets(
|
||||
context,
|
||||
|
@ -12,7 +12,7 @@
|
||||
use std::collections::HashMap;
|
||||
use std::panic::{catch_unwind, AssertUnwindSafe};
|
||||
|
||||
use rustc_ast::token::{BinOpToken, DelimToken, Token, TokenKind};
|
||||
use rustc_ast::token::{BinOpToken, Delimiter, Token, TokenKind};
|
||||
use rustc_ast::tokenstream::{Cursor, Spacing, TokenStream, TokenTree};
|
||||
use rustc_ast::{ast, ptr};
|
||||
use rustc_ast_pretty::pprust;
|
||||
@ -203,7 +203,7 @@ fn rewrite_macro_inner(
|
||||
let is_forced_bracket = FORCED_BRACKET_MACROS.contains(&¯o_name[..]);
|
||||
|
||||
let style = if is_forced_bracket && !is_nested_macro {
|
||||
DelimToken::Bracket
|
||||
Delimiter::Bracket
|
||||
} else {
|
||||
original_style
|
||||
};
|
||||
@ -212,15 +212,15 @@ fn rewrite_macro_inner(
|
||||
let has_comment = contains_comment(context.snippet(mac.span()));
|
||||
if ts.is_empty() && !has_comment {
|
||||
return match style {
|
||||
DelimToken::Paren if position == MacroPosition::Item => {
|
||||
Delimiter::Parenthesis if position == MacroPosition::Item => {
|
||||
Some(format!("{}();", macro_name))
|
||||
}
|
||||
DelimToken::Bracket if position == MacroPosition::Item => {
|
||||
Delimiter::Bracket if position == MacroPosition::Item => {
|
||||
Some(format!("{}[];", macro_name))
|
||||
}
|
||||
DelimToken::Paren => Some(format!("{}()", macro_name)),
|
||||
DelimToken::Bracket => Some(format!("{}[]", macro_name)),
|
||||
DelimToken::Brace => Some(format!("{} {{}}", macro_name)),
|
||||
Delimiter::Parenthesis => Some(format!("{}()", macro_name)),
|
||||
Delimiter::Bracket => Some(format!("{}[]", macro_name)),
|
||||
Delimiter::Brace => Some(format!("{} {{}}", macro_name)),
|
||||
_ => unreachable!(),
|
||||
};
|
||||
}
|
||||
@ -260,7 +260,7 @@ fn rewrite_macro_inner(
|
||||
}
|
||||
|
||||
match style {
|
||||
DelimToken::Paren => {
|
||||
Delimiter::Parenthesis => {
|
||||
// Handle special case: `vec!(expr; expr)`
|
||||
if vec_with_semi {
|
||||
handle_vec_semi(context, shape, arg_vec, macro_name, style)
|
||||
@ -286,7 +286,7 @@ fn rewrite_macro_inner(
|
||||
})
|
||||
}
|
||||
}
|
||||
DelimToken::Bracket => {
|
||||
Delimiter::Bracket => {
|
||||
// Handle special case: `vec![expr; expr]`
|
||||
if vec_with_semi {
|
||||
handle_vec_semi(context, shape, arg_vec, macro_name, style)
|
||||
@ -323,7 +323,7 @@ fn rewrite_macro_inner(
|
||||
Some(format!("{}{}", rewrite, comma))
|
||||
}
|
||||
}
|
||||
DelimToken::Brace => {
|
||||
Delimiter::Brace => {
|
||||
// For macro invocations with braces, always put a space between
|
||||
// the `macro_name!` and `{ /* macro_body */ }` but skip modifying
|
||||
// anything in between the braces (for now).
|
||||
@ -342,11 +342,11 @@ fn handle_vec_semi(
|
||||
shape: Shape,
|
||||
arg_vec: Vec<MacroArg>,
|
||||
macro_name: String,
|
||||
delim_token: DelimToken,
|
||||
delim_token: Delimiter,
|
||||
) -> Option<String> {
|
||||
let (left, right) = match delim_token {
|
||||
DelimToken::Paren => ("(", ")"),
|
||||
DelimToken::Bracket => ("[", "]"),
|
||||
Delimiter::Parenthesis => ("(", ")"),
|
||||
Delimiter::Bracket => ("[", "]"),
|
||||
_ => unreachable!(),
|
||||
};
|
||||
|
||||
@ -528,7 +528,7 @@ enum MacroArgKind {
|
||||
/// e.g., `$($foo: expr),*`
|
||||
Repeat(
|
||||
/// `()`, `[]` or `{}`.
|
||||
DelimToken,
|
||||
Delimiter,
|
||||
/// Inner arguments inside delimiters.
|
||||
Vec<ParsedMacroArg>,
|
||||
/// Something after the closing delimiter and the repeat token, if available.
|
||||
@ -537,7 +537,7 @@ enum MacroArgKind {
|
||||
Token,
|
||||
),
|
||||
/// e.g., `[derive(Debug)]`
|
||||
Delimited(DelimToken, Vec<ParsedMacroArg>),
|
||||
Delimited(Delimiter, Vec<ParsedMacroArg>),
|
||||
/// A possible separator. e.g., `,` or `;`.
|
||||
Separator(String, String),
|
||||
/// Other random stuff that does not fit to other kinds.
|
||||
@ -547,22 +547,22 @@ enum MacroArgKind {
|
||||
|
||||
fn delim_token_to_str(
|
||||
context: &RewriteContext<'_>,
|
||||
delim_token: DelimToken,
|
||||
delim_token: Delimiter,
|
||||
shape: Shape,
|
||||
use_multiple_lines: bool,
|
||||
inner_is_empty: bool,
|
||||
) -> (String, String) {
|
||||
let (lhs, rhs) = match delim_token {
|
||||
DelimToken::Paren => ("(", ")"),
|
||||
DelimToken::Bracket => ("[", "]"),
|
||||
DelimToken::Brace => {
|
||||
Delimiter::Parenthesis => ("(", ")"),
|
||||
Delimiter::Bracket => ("[", "]"),
|
||||
Delimiter::Brace => {
|
||||
if inner_is_empty || use_multiple_lines {
|
||||
("{", "}")
|
||||
} else {
|
||||
("{ ", " }")
|
||||
}
|
||||
}
|
||||
DelimToken::NoDelim => unreachable!(),
|
||||
Delimiter::Invisible => unreachable!(),
|
||||
};
|
||||
if use_multiple_lines {
|
||||
let indent_str = shape.indent.to_string_with_newline(context.config);
|
||||
@ -583,8 +583,8 @@ impl MacroArgKind {
|
||||
fn starts_with_brace(&self) -> bool {
|
||||
matches!(
|
||||
*self,
|
||||
MacroArgKind::Repeat(DelimToken::Brace, _, _, _)
|
||||
| MacroArgKind::Delimited(DelimToken::Brace, _)
|
||||
MacroArgKind::Repeat(Delimiter::Brace, _, _, _)
|
||||
| MacroArgKind::Delimited(Delimiter::Brace, _)
|
||||
)
|
||||
}
|
||||
|
||||
@ -753,7 +753,7 @@ impl MacroArgParser {
|
||||
}
|
||||
}
|
||||
|
||||
fn add_delimited(&mut self, inner: Vec<ParsedMacroArg>, delim: DelimToken) {
|
||||
fn add_delimited(&mut self, inner: Vec<ParsedMacroArg>, delim: Delimiter) {
|
||||
self.result.push(ParsedMacroArg {
|
||||
kind: MacroArgKind::Delimited(delim, inner),
|
||||
});
|
||||
@ -763,7 +763,7 @@ impl MacroArgParser {
|
||||
fn add_repeat(
|
||||
&mut self,
|
||||
inner: Vec<ParsedMacroArg>,
|
||||
delim: DelimToken,
|
||||
delim: Delimiter,
|
||||
iter: &mut Cursor,
|
||||
) -> Option<()> {
|
||||
let mut buffer = String::new();
|
||||
@ -1083,18 +1083,18 @@ pub(crate) fn convert_try_mac(
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn macro_style(mac: &ast::MacCall, context: &RewriteContext<'_>) -> DelimToken {
|
||||
pub(crate) fn macro_style(mac: &ast::MacCall, context: &RewriteContext<'_>) -> Delimiter {
|
||||
let snippet = context.snippet(mac.span());
|
||||
let paren_pos = snippet.find_uncommented("(").unwrap_or(usize::max_value());
|
||||
let bracket_pos = snippet.find_uncommented("[").unwrap_or(usize::max_value());
|
||||
let brace_pos = snippet.find_uncommented("{").unwrap_or(usize::max_value());
|
||||
|
||||
if paren_pos < bracket_pos && paren_pos < brace_pos {
|
||||
DelimToken::Paren
|
||||
Delimiter::Parenthesis
|
||||
} else if bracket_pos < brace_pos {
|
||||
DelimToken::Bracket
|
||||
Delimiter::Bracket
|
||||
} else {
|
||||
DelimToken::Brace
|
||||
Delimiter::Brace
|
||||
}
|
||||
}
|
||||
|
||||
@ -1174,7 +1174,7 @@ struct Macro {
|
||||
// rather than clone them, if we can make the borrowing work out.
|
||||
struct MacroBranch {
|
||||
span: Span,
|
||||
args_paren_kind: DelimToken,
|
||||
args_paren_kind: Delimiter,
|
||||
args: TokenStream,
|
||||
body: Span,
|
||||
whole_body: Span,
|
||||
@ -1188,7 +1188,7 @@ impl MacroBranch {
|
||||
multi_branch_style: bool,
|
||||
) -> Option<String> {
|
||||
// Only attempt to format function-like macros.
|
||||
if self.args_paren_kind != DelimToken::Paren {
|
||||
if self.args_paren_kind != Delimiter::Parenthesis {
|
||||
// FIXME(#1539): implement for non-sugared macros.
|
||||
return None;
|
||||
}
|
||||
@ -1350,18 +1350,18 @@ fn rewrite_macro_with_items(
|
||||
items: &[MacroArg],
|
||||
macro_name: &str,
|
||||
shape: Shape,
|
||||
style: DelimToken,
|
||||
style: Delimiter,
|
||||
position: MacroPosition,
|
||||
span: Span,
|
||||
) -> Option<String> {
|
||||
let (opener, closer) = match style {
|
||||
DelimToken::Paren => ("(", ")"),
|
||||
DelimToken::Bracket => ("[", "]"),
|
||||
DelimToken::Brace => (" {", "}"),
|
||||
Delimiter::Parenthesis => ("(", ")"),
|
||||
Delimiter::Bracket => ("[", "]"),
|
||||
Delimiter::Brace => (" {", "}"),
|
||||
_ => return None,
|
||||
};
|
||||
let trailing_semicolon = match style {
|
||||
DelimToken::Paren | DelimToken::Bracket if position == MacroPosition::Item => ";",
|
||||
Delimiter::Parenthesis | Delimiter::Bracket if position == MacroPosition::Item => ";",
|
||||
_ => "",
|
||||
};
|
||||
|
||||
|
@ -3,7 +3,7 @@
|
||||
use std::cmp::min;
|
||||
|
||||
use itertools::Itertools;
|
||||
use rustc_ast::token::DelimToken;
|
||||
use rustc_ast::token::Delimiter;
|
||||
use rustc_ast::{ast, ptr};
|
||||
use rustc_span::Span;
|
||||
|
||||
@ -297,11 +297,11 @@ pub(crate) fn rewrite_with_square_brackets<'a, T: 'a + IntoOverflowableItem<'a>>
|
||||
shape: Shape,
|
||||
span: Span,
|
||||
force_separator_tactic: Option<SeparatorTactic>,
|
||||
delim_token: Option<DelimToken>,
|
||||
delim_token: Option<Delimiter>,
|
||||
) -> Option<String> {
|
||||
let (lhs, rhs) = match delim_token {
|
||||
Some(DelimToken::Paren) => ("(", ")"),
|
||||
Some(DelimToken::Brace) => ("{", "}"),
|
||||
Some(Delimiter::Parenthesis) => ("(", ")"),
|
||||
Some(Delimiter::Brace) => ("{", "}"),
|
||||
_ => ("[", "]"),
|
||||
};
|
||||
Context::new(
|
||||
|
@ -1,7 +1,7 @@
|
||||
use std::panic::{catch_unwind, AssertUnwindSafe};
|
||||
|
||||
use rustc_ast::ast;
|
||||
use rustc_ast::token::{DelimToken, TokenKind};
|
||||
use rustc_ast::token::{Delimiter, TokenKind};
|
||||
use rustc_parse::parser::ForceCollect;
|
||||
use rustc_span::symbol::kw;
|
||||
|
||||
@ -47,11 +47,11 @@ fn parse_cfg_if_inner<'a>(
|
||||
.map_err(|_| "Failed to parse attributes")?;
|
||||
}
|
||||
|
||||
if !parser.eat(&TokenKind::OpenDelim(DelimToken::Brace)) {
|
||||
if !parser.eat(&TokenKind::OpenDelim(Delimiter::Brace)) {
|
||||
return Err("Expected an opening brace");
|
||||
}
|
||||
|
||||
while parser.token != TokenKind::CloseDelim(DelimToken::Brace)
|
||||
while parser.token != TokenKind::CloseDelim(Delimiter::Brace)
|
||||
&& parser.token.kind != TokenKind::Eof
|
||||
{
|
||||
let item = match parser.parse_item(ForceCollect::No) {
|
||||
@ -70,7 +70,7 @@ fn parse_cfg_if_inner<'a>(
|
||||
}
|
||||
}
|
||||
|
||||
if !parser.eat(&TokenKind::CloseDelim(DelimToken::Brace)) {
|
||||
if !parser.eat(&TokenKind::CloseDelim(Delimiter::Brace)) {
|
||||
return Err("Expected a closing brace");
|
||||
}
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
use rustc_ast::token::{DelimToken, TokenKind};
|
||||
use rustc_ast::token::{Delimiter, TokenKind};
|
||||
use rustc_ast::tokenstream::TokenStream;
|
||||
use rustc_ast::{ast, ptr};
|
||||
use rustc_parse::parser::{ForceCollect, Parser};
|
||||
@ -81,7 +81,7 @@ fn check_keyword<'a, 'b: 'a>(parser: &'a mut Parser<'b>) -> Option<MacroArg> {
|
||||
&& parser.look_ahead(1, |t| {
|
||||
t.kind == TokenKind::Eof
|
||||
|| t.kind == TokenKind::Comma
|
||||
|| t.kind == TokenKind::CloseDelim(DelimToken::NoDelim)
|
||||
|| t.kind == TokenKind::CloseDelim(Delimiter::Invisible)
|
||||
})
|
||||
{
|
||||
parser.bump();
|
||||
@ -97,7 +97,7 @@ fn check_keyword<'a, 'b: 'a>(parser: &'a mut Parser<'b>) -> Option<MacroArg> {
|
||||
pub(crate) fn parse_macro_args(
|
||||
context: &RewriteContext<'_>,
|
||||
tokens: TokenStream,
|
||||
style: DelimToken,
|
||||
style: Delimiter,
|
||||
forced_bracket: bool,
|
||||
) -> Option<ParsedMacroArgs> {
|
||||
let mut parser = build_parser(context, tokens);
|
||||
@ -105,7 +105,7 @@ pub(crate) fn parse_macro_args(
|
||||
let mut vec_with_semi = false;
|
||||
let mut trailing_comma = false;
|
||||
|
||||
if DelimToken::Brace != style {
|
||||
if Delimiter::Brace != style {
|
||||
loop {
|
||||
if let Some(arg) = check_keyword(&mut parser) {
|
||||
args.push(arg);
|
||||
|
@ -1,7 +1,7 @@
|
||||
use std::cell::{Cell, RefCell};
|
||||
use std::rc::Rc;
|
||||
|
||||
use rustc_ast::{ast, token::DelimToken, visit, AstLike};
|
||||
use rustc_ast::{ast, token::Delimiter, visit, AstLike};
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
use rustc_span::{symbol, BytePos, Pos, Span};
|
||||
|
||||
@ -689,7 +689,7 @@ impl<'b, 'a: 'b> FmtVisitor<'a> {
|
||||
// with whitespace between the delimiters and trailing semi (i.e. `foo!(abc) ;`)
|
||||
// are formatted correctly.
|
||||
let (span, rewrite) = match macro_style(mac, &self.get_context()) {
|
||||
DelimToken::Bracket | DelimToken::Paren if MacroPosition::Item == pos => {
|
||||
Delimiter::Bracket | Delimiter::Parenthesis if MacroPosition::Item == pos => {
|
||||
let search_span = mk_sp(mac.span().hi(), self.snippet_provider.end_pos());
|
||||
let hi = self.snippet_provider.span_before(search_span, ";");
|
||||
let target_span = mk_sp(mac.span().lo(), hi + BytePos(1));
|
||||
|
Loading…
Reference in New Issue
Block a user