Rollup merge of #121060 - clubby789:bool-newtypes, r=cjgillot

Add newtypes for bool fields/params/return types

Fixed all the cases of this found with some simple searches for `*/ bool` and `bool /*`; probably many more
This commit is contained in:
Matthias Krüger 2024-02-25 17:05:20 +01:00 committed by GitHub
commit 7c88ea2842
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
35 changed files with 332 additions and 207 deletions

View File

@ -107,7 +107,7 @@ impl Lit {
/// Keep this in sync with `Token::can_begin_literal_or_bool` excluding unary negation.
pub fn from_token(token: &Token) -> Option<Lit> {
match token.uninterpolate().kind {
Ident(name, false) if name.is_bool_lit() => Some(Lit::new(Bool, name, None)),
Ident(name, IdentIsRaw::No) if name.is_bool_lit() => Some(Lit::new(Bool, name, None)),
Literal(token_lit) => Some(token_lit),
Interpolated(ref nt)
if let NtExpr(expr) | NtLiteral(expr) = &nt.0
@ -183,7 +183,7 @@ impl LitKind {
}
}
pub fn ident_can_begin_expr(name: Symbol, span: Span, is_raw: bool) -> bool {
pub fn ident_can_begin_expr(name: Symbol, span: Span, is_raw: IdentIsRaw) -> bool {
let ident_token = Token::new(Ident(name, is_raw), span);
!ident_token.is_reserved_ident()
@ -214,7 +214,7 @@ pub fn ident_can_begin_expr(name: Symbol, span: Span, is_raw: bool) -> bool {
.contains(&name)
}
fn ident_can_begin_type(name: Symbol, span: Span, is_raw: bool) -> bool {
fn ident_can_begin_type(name: Symbol, span: Span, is_raw: IdentIsRaw) -> bool {
let ident_token = Token::new(Ident(name, is_raw), span);
!ident_token.is_reserved_ident()
@ -223,6 +223,24 @@ fn ident_can_begin_type(name: Symbol, span: Span, is_raw: bool) -> bool {
.contains(&name)
}
#[derive(PartialEq, Encodable, Decodable, Debug, Copy, Clone, HashStable_Generic)]
pub enum IdentIsRaw {
No,
Yes,
}
impl From<bool> for IdentIsRaw {
fn from(b: bool) -> Self {
if b { Self::Yes } else { Self::No }
}
}
impl From<IdentIsRaw> for bool {
fn from(is_raw: IdentIsRaw) -> bool {
matches!(is_raw, IdentIsRaw::Yes)
}
}
// SAFETY: due to the `Clone` impl below, all fields of all variants other than
// `Interpolated` must impl `Copy`.
#[derive(PartialEq, Encodable, Decodable, Debug, HashStable_Generic)]
@ -298,7 +316,7 @@ pub enum TokenKind {
/// Do not forget about `NtIdent` when you want to match on identifiers.
/// It's recommended to use `Token::(ident,uninterpolate,uninterpolated_span)` to
/// treat regular and interpolated identifiers in the same way.
Ident(Symbol, /* is_raw */ bool),
Ident(Symbol, IdentIsRaw),
/// Lifetime identifier token.
/// Do not forget about `NtLifetime` when you want to match on lifetime identifiers.
/// It's recommended to use `Token::(lifetime,uninterpolate,uninterpolated_span)` to
@ -411,7 +429,7 @@ impl Token {
/// Recovers a `Token` from an `Ident`. This creates a raw identifier if necessary.
pub fn from_ast_ident(ident: Ident) -> Self {
Token::new(Ident(ident.name, ident.is_raw_guess()), ident.span)
Token::new(Ident(ident.name, ident.is_raw_guess().into()), ident.span)
}
/// For interpolated tokens, returns a span of the fragment to which the interpolated
@ -567,7 +585,7 @@ impl Token {
pub fn can_begin_literal_maybe_minus(&self) -> bool {
match self.uninterpolate().kind {
Literal(..) | BinOp(Minus) => true,
Ident(name, false) if name.is_bool_lit() => true,
Ident(name, IdentIsRaw::No) if name.is_bool_lit() => true,
Interpolated(ref nt) => match &nt.0 {
NtLiteral(_) => true,
NtExpr(e) => match &e.kind {
@ -602,7 +620,7 @@ impl Token {
/// Returns an identifier if this token is an identifier.
#[inline]
pub fn ident(&self) -> Option<(Ident, /* is_raw */ bool)> {
pub fn ident(&self) -> Option<(Ident, IdentIsRaw)> {
// We avoid using `Token::uninterpolate` here because it's slow.
match &self.kind {
&Ident(name, is_raw) => Some((Ident::new(name, self.span), is_raw)),
@ -755,7 +773,7 @@ impl Token {
/// Returns `true` if the token is a non-raw identifier for which `pred` holds.
pub fn is_non_raw_ident_where(&self, pred: impl FnOnce(Ident) -> bool) -> bool {
match self.ident() {
Some((id, false)) => pred(id),
Some((id, IdentIsRaw::No)) => pred(id),
_ => false,
}
}
@ -806,7 +824,7 @@ impl Token {
_ => return None,
},
SingleQuote => match joint.kind {
Ident(name, false) => Lifetime(Symbol::intern(&format!("'{name}"))),
Ident(name, IdentIsRaw::No) => Lifetime(Symbol::intern(&format!("'{name}"))),
_ => return None,
},
@ -836,7 +854,7 @@ pub enum Nonterminal {
NtPat(P<ast::Pat>),
NtExpr(P<ast::Expr>),
NtTy(P<ast::Ty>),
NtIdent(Ident, /* is_raw */ bool),
NtIdent(Ident, IdentIsRaw),
NtLifetime(Ident),
NtLiteral(P<ast::Expr>),
/// Stuff inside brackets for attributes

View File

@ -656,7 +656,7 @@ impl TokenStream {
DelimSpacing::new(Spacing::JointHidden, Spacing::Alone),
Delimiter::Bracket,
[
TokenTree::token_alone(token::Ident(sym::doc, false), span),
TokenTree::token_alone(token::Ident(sym::doc, token::IdentIsRaw::No), span),
TokenTree::token_alone(token::Eq, span),
TokenTree::token_alone(
TokenKind::lit(token::StrRaw(num_of_hashes), data, None),

View File

@ -185,7 +185,7 @@ fn space_between(tt1: &TokenTree, tt2: &TokenTree) -> bool {
// IDENT + `!`: `println!()`, but `if !x { ... }` needs a space after the `if`
(Tok(Token { kind: Ident(sym, is_raw), span }, _), Tok(Token { kind: Not, .. }, _))
if !Ident::new(*sym, *span).is_reserved() || *is_raw =>
if !Ident::new(*sym, *span).is_reserved() || matches!(is_raw, IdentIsRaw::Yes) =>
{
false
}
@ -197,7 +197,7 @@ fn space_between(tt1: &TokenTree, tt2: &TokenTree) -> bool {
|| *sym == kw::Fn
|| *sym == kw::SelfUpper
|| *sym == kw::Pub
|| *is_raw =>
|| matches!(is_raw, IdentIsRaw::Yes) =>
{
false
}
@ -731,7 +731,7 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
token::NtBlock(e) => self.block_to_string(e),
token::NtStmt(e) => self.stmt_to_string(e),
token::NtPat(e) => self.pat_to_string(e),
token::NtIdent(e, is_raw) => IdentPrinter::for_ast_ident(*e, *is_raw).to_string(),
&token::NtIdent(e, is_raw) => IdentPrinter::for_ast_ident(e, is_raw.into()).to_string(),
token::NtLifetime(e) => e.to_string(),
token::NtLiteral(e) => self.expr_to_string(e),
token::NtVis(e) => self.vis_to_string(e),
@ -795,7 +795,7 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
/* Name components */
token::Ident(s, is_raw) => {
IdentPrinter::new(s, is_raw, convert_dollar_crate).to_string().into()
IdentPrinter::new(s, is_raw.into(), convert_dollar_crate).to_string().into()
}
token::Lifetime(s) => s.to_string().into(),

View File

@ -1,3 +1,4 @@
use ast::token::IdentIsRaw;
use rustc_ast as ast;
use rustc_ast::ptr::P;
use rustc_ast::token::{self, Delimiter};
@ -416,7 +417,7 @@ fn parse_reg<'a>(
) -> PResult<'a, ast::InlineAsmRegOrRegClass> {
p.expect(&token::OpenDelim(Delimiter::Parenthesis))?;
let result = match p.token.uninterpolate().kind {
token::Ident(name, false) => ast::InlineAsmRegOrRegClass::RegClass(name),
token::Ident(name, IdentIsRaw::No) => ast::InlineAsmRegOrRegClass::RegClass(name),
token::Literal(token::Lit { kind: token::LitKind::Str, symbol, suffix: _ }) => {
*explicit_reg = true;
ast::InlineAsmRegOrRegClass::Reg(symbol)

View File

@ -1,7 +1,6 @@
use rustc_ast::{
ptr::P,
token,
token::Delimiter,
token::{self, Delimiter, IdentIsRaw},
tokenstream::{DelimSpan, TokenStream, TokenTree},
BinOpKind, BorrowKind, DelimArgs, Expr, ExprKind, ItemKind, MacCall, MethodCall, Mutability,
Path, PathSegment, Stmt, StructRest, UnOp, UseTree, UseTreeKind, DUMMY_NODE_ID,
@ -170,7 +169,10 @@ impl<'cx, 'a> Context<'cx, 'a> {
];
let captures = self.capture_decls.iter().flat_map(|cap| {
[
TokenTree::token_joint_hidden(token::Ident(cap.ident.name, false), cap.ident.span),
TokenTree::token_joint_hidden(
token::Ident(cap.ident.name, IdentIsRaw::No),
cap.ident.span,
),
TokenTree::token_alone(token::Comma, self.span),
]
});

View File

@ -198,7 +198,7 @@ where
match fields {
Unnamed(fields, is_tuple) => {
let path_expr = cx.expr_path(outer_pat_path);
if !*is_tuple {
if matches!(is_tuple, IsTuple::No) {
path_expr
} else {
let fields = fields

View File

@ -62,8 +62,8 @@ fn default_struct_substructure(
let default_call = |span| cx.expr_call_global(span, default_ident.clone(), ThinVec::new());
let expr = match summary {
Unnamed(_, false) => cx.expr_ident(trait_span, substr.type_ident),
Unnamed(fields, true) => {
Unnamed(_, IsTuple::No) => cx.expr_ident(trait_span, substr.type_ident),
Unnamed(fields, IsTuple::Yes) => {
let exprs = fields.iter().map(|sp| default_call(*sp)).collect();
cx.expr_call_ident(trait_span, substr.type_ident, exprs)
}

View File

@ -286,10 +286,16 @@ pub struct FieldInfo {
pub other_selflike_exprs: Vec<P<Expr>>,
}
#[derive(Copy, Clone)]
pub enum IsTuple {
No,
Yes,
}
/// Fields for a static method
pub enum StaticFields {
/// Tuple and unit structs/enum variants like this.
Unnamed(Vec<Span>, bool /*is tuple*/),
Unnamed(Vec<Span>, IsTuple),
/// Normal structs/struct variants.
Named(Vec<(Ident, Span)>),
}
@ -1439,7 +1445,10 @@ impl<'a> TraitDef<'a> {
}
}
let is_tuple = matches!(struct_def, ast::VariantData::Tuple(..));
let is_tuple = match struct_def {
ast::VariantData::Tuple(..) => IsTuple::Yes,
_ => IsTuple::No,
};
match (just_spans.is_empty(), named_idents.is_empty()) {
(false, false) => cx
.dcx()

View File

@ -10,6 +10,7 @@ use rustc_ast::{
use rustc_data_structures::fx::FxHashSet;
use rustc_errors::{Applicability, DiagnosticBuilder, MultiSpan, PResult, SingleLabelManySpans};
use rustc_expand::base::{self, *};
use rustc_parse::parser::Recovered;
use rustc_parse_format as parse;
use rustc_span::symbol::{Ident, Symbol};
use rustc_span::{BytePos, InnerSpan, Span};
@ -111,9 +112,8 @@ fn parse_args<'a>(ecx: &mut ExtCtxt<'a>, sp: Span, tts: TokenStream) -> PResult<
_ => return Err(err),
}
}
Ok(recovered) => {
assert!(recovered);
}
Ok(Recovered::Yes) => (),
Ok(Recovered::No) => unreachable!(),
}
}
first = false;

View File

@ -2106,9 +2106,16 @@ fn generic_simd_intrinsic<'ll, 'tcx>(
return Ok(args[0].immediate());
}
#[derive(Copy, Clone)]
enum Sign {
Unsigned,
Signed,
}
use Sign::*;
enum Style {
Float,
Int(/* is signed? */ bool),
Int(Sign),
Unsupported,
}
@ -2116,11 +2123,11 @@ fn generic_simd_intrinsic<'ll, 'tcx>(
// vectors of pointer-sized integers should've been
// disallowed before here, so this unwrap is safe.
ty::Int(i) => (
Style::Int(true),
Style::Int(Signed),
i.normalize(bx.tcx().sess.target.pointer_width).bit_width().unwrap(),
),
ty::Uint(u) => (
Style::Int(false),
Style::Int(Unsigned),
u.normalize(bx.tcx().sess.target.pointer_width).bit_width().unwrap(),
),
ty::Float(f) => (Style::Float, f.bit_width()),
@ -2128,11 +2135,11 @@ fn generic_simd_intrinsic<'ll, 'tcx>(
};
let (out_style, out_width) = match out_elem.kind() {
ty::Int(i) => (
Style::Int(true),
Style::Int(Signed),
i.normalize(bx.tcx().sess.target.pointer_width).bit_width().unwrap(),
),
ty::Uint(u) => (
Style::Int(false),
Style::Int(Unsigned),
u.normalize(bx.tcx().sess.target.pointer_width).bit_width().unwrap(),
),
ty::Float(f) => (Style::Float, f.bit_width()),
@ -2140,31 +2147,31 @@ fn generic_simd_intrinsic<'ll, 'tcx>(
};
match (in_style, out_style) {
(Style::Int(in_is_signed), Style::Int(_)) => {
(Style::Int(sign), Style::Int(_)) => {
return Ok(match in_width.cmp(&out_width) {
Ordering::Greater => bx.trunc(args[0].immediate(), llret_ty),
Ordering::Equal => args[0].immediate(),
Ordering::Less => {
if in_is_signed {
bx.sext(args[0].immediate(), llret_ty)
} else {
bx.zext(args[0].immediate(), llret_ty)
}
}
Ordering::Less => match sign {
Sign::Signed => bx.sext(args[0].immediate(), llret_ty),
Sign::Unsigned => bx.zext(args[0].immediate(), llret_ty),
},
});
}
(Style::Int(in_is_signed), Style::Float) => {
return Ok(if in_is_signed {
bx.sitofp(args[0].immediate(), llret_ty)
} else {
bx.uitofp(args[0].immediate(), llret_ty)
});
(Style::Int(Sign::Signed), Style::Float) => {
return Ok(bx.sitofp(args[0].immediate(), llret_ty));
}
(Style::Float, Style::Int(out_is_signed)) => {
return Ok(match (out_is_signed, name == sym::simd_as) {
(false, false) => bx.fptoui(args[0].immediate(), llret_ty),
(true, false) => bx.fptosi(args[0].immediate(), llret_ty),
(_, true) => bx.cast_float_to_int(out_is_signed, args[0].immediate(), llret_ty),
(Style::Int(Sign::Unsigned), Style::Float) => {
return Ok(bx.uitofp(args[0].immediate(), llret_ty));
}
(Style::Float, Style::Int(sign)) => {
return Ok(match (sign, name == sym::simd_as) {
(Sign::Unsigned, false) => bx.fptoui(args[0].immediate(), llret_ty),
(Sign::Signed, false) => bx.fptosi(args[0].immediate(), llret_ty),
(_, true) => bx.cast_float_to_int(
matches!(sign, Sign::Signed),
args[0].immediate(),
llret_ty,
),
});
}
(Style::Float, Style::Float) => {

View File

@ -107,7 +107,7 @@
use crate::errors;
use crate::mbe::{KleeneToken, TokenTree};
use rustc_ast::token::{Delimiter, Token, TokenKind};
use rustc_ast::token::{Delimiter, IdentIsRaw, Token, TokenKind};
use rustc_ast::{NodeId, DUMMY_NODE_ID};
use rustc_data_structures::fx::FxHashMap;
use rustc_errors::{DiagnosticMessage, MultiSpan};
@ -409,7 +409,7 @@ fn check_nested_occurrences(
match (state, tt) {
(
NestedMacroState::Empty,
&TokenTree::Token(Token { kind: TokenKind::Ident(name, false), .. }),
&TokenTree::Token(Token { kind: TokenKind::Ident(name, IdentIsRaw::No), .. }),
) => {
if name == kw::MacroRules {
state = NestedMacroState::MacroRules;

View File

@ -8,6 +8,7 @@ use crate::mbe::macro_parser::{Error, ErrorReported, Failure, Success, TtParser}
use crate::mbe::macro_parser::{MatchedSeq, MatchedTokenTree, MatcherLoc};
use crate::mbe::transcribe::transcribe;
use ast::token::IdentIsRaw;
use rustc_ast as ast;
use rustc_ast::token::{self, Delimiter, NonterminalKind, Token, TokenKind, TokenKind::*};
use rustc_ast::tokenstream::{DelimSpan, TokenStream};
@ -1302,7 +1303,9 @@ fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow {
match tok {
TokenTree::Token(token) => match token.kind {
FatArrow | Comma | Eq | BinOp(token::Or) => IsInFollow::Yes,
Ident(name, false) if name == kw::If || name == kw::In => IsInFollow::Yes,
Ident(name, IdentIsRaw::No) if name == kw::If || name == kw::In => {
IsInFollow::Yes
}
_ => IsInFollow::No(TOKENS),
},
_ => IsInFollow::No(TOKENS),
@ -1313,7 +1316,9 @@ fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow {
match tok {
TokenTree::Token(token) => match token.kind {
FatArrow | Comma | Eq => IsInFollow::Yes,
Ident(name, false) if name == kw::If || name == kw::In => IsInFollow::Yes,
Ident(name, IdentIsRaw::No) if name == kw::If || name == kw::In => {
IsInFollow::Yes
}
_ => IsInFollow::No(TOKENS),
},
_ => IsInFollow::No(TOKENS),
@ -1336,7 +1341,7 @@ fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow {
| BinOp(token::Shr)
| Semi
| BinOp(token::Or) => IsInFollow::Yes,
Ident(name, false) if name == kw::As || name == kw::Where => {
Ident(name, IdentIsRaw::No) if name == kw::As || name == kw::Where => {
IsInFollow::Yes
}
_ => IsInFollow::No(TOKENS),
@ -1364,7 +1369,8 @@ fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow {
match tok {
TokenTree::Token(token) => match token.kind {
Comma => IsInFollow::Yes,
Ident(name, is_raw) if is_raw || name != kw::Priv => IsInFollow::Yes,
Ident(_, IdentIsRaw::Yes) => IsInFollow::Yes,
Ident(name, _) if name != kw::Priv => IsInFollow::Yes,
_ => {
if token.can_begin_type() {
IsInFollow::Yes

View File

@ -1,4 +1,4 @@
use rustc_ast::token::{self, Delimiter};
use rustc_ast::token::{self, Delimiter, IdentIsRaw};
use rustc_ast::tokenstream::{RefTokenTreeCursor, TokenStream, TokenTree};
use rustc_ast::{LitIntType, LitKind};
use rustc_ast_pretty::pprust;
@ -142,7 +142,7 @@ fn parse_ident<'sess>(
if let Some(tt) = iter.next()
&& let TokenTree::Token(token, _) = tt
{
if let Some((elem, false)) = token.ident() {
if let Some((elem, IdentIsRaw::No)) = token.ident() {
return Ok(elem);
}
let token_str = pprust::token_to_string(token);

View File

@ -2,7 +2,7 @@ use crate::errors;
use crate::mbe::macro_parser::count_metavar_decls;
use crate::mbe::{Delimited, KleeneOp, KleeneToken, MetaVarExpr, SequenceRepetition, TokenTree};
use rustc_ast::token::{self, Delimiter, Token};
use rustc_ast::token::{self, Delimiter, IdentIsRaw, Token};
use rustc_ast::{tokenstream, NodeId};
use rustc_ast_pretty::pprust;
use rustc_feature::Features;
@ -222,7 +222,7 @@ fn parse_tree<'a>(
Some(tokenstream::TokenTree::Token(token, _)) if token.is_ident() => {
let (ident, is_raw) = token.ident().unwrap();
let span = ident.span.with_lo(span.lo());
if ident.name == kw::Crate && !is_raw {
if ident.name == kw::Crate && matches!(is_raw, IdentIsRaw::No) {
TokenTree::token(token::Ident(kw::DollarCrate, is_raw), span)
} else {
TokenTree::MetaVar(span, ident)

View File

@ -2,6 +2,7 @@ use crate::tests::{
matches_codepattern, string_to_stream, with_error_checking_parse, with_expected_parse_error,
};
use ast::token::IdentIsRaw;
use rustc_ast::ptr::P;
use rustc_ast::token::{self, Delimiter, Token};
use rustc_ast::tokenstream::{DelimSpacing, DelimSpan, Spacing, TokenStream, TokenTree};
@ -74,9 +75,12 @@ fn string_to_tts_macro() {
match tts {
[
TokenTree::Token(Token { kind: token::Ident(name_macro_rules, false), .. }, _),
TokenTree::Token(
Token { kind: token::Ident(name_macro_rules, IdentIsRaw::No), .. },
_,
),
TokenTree::Token(Token { kind: token::Not, .. }, _),
TokenTree::Token(Token { kind: token::Ident(name_zip, false), .. }, _),
TokenTree::Token(Token { kind: token::Ident(name_zip, IdentIsRaw::No), .. }, _),
TokenTree::Delimited(.., macro_delim, macro_tts),
] if name_macro_rules == &kw::MacroRules && name_zip.as_str() == "zip" => {
let tts = &macro_tts.trees().collect::<Vec<_>>();
@ -90,7 +94,10 @@ fn string_to_tts_macro() {
match &tts[..] {
[
TokenTree::Token(Token { kind: token::Dollar, .. }, _),
TokenTree::Token(Token { kind: token::Ident(name, false), .. }, _),
TokenTree::Token(
Token { kind: token::Ident(name, IdentIsRaw::No), .. },
_,
),
] if first_delim == &Delimiter::Parenthesis && name.as_str() == "a" => {
}
_ => panic!("value 3: {:?} {:?}", first_delim, first_tts),
@ -99,7 +106,10 @@ fn string_to_tts_macro() {
match &tts[..] {
[
TokenTree::Token(Token { kind: token::Dollar, .. }, _),
TokenTree::Token(Token { kind: token::Ident(name, false), .. }, _),
TokenTree::Token(
Token { kind: token::Ident(name, IdentIsRaw::No), .. },
_,
),
] if second_delim == &Delimiter::Parenthesis
&& name.as_str() == "a" => {}
_ => panic!("value 4: {:?} {:?}", second_delim, second_tts),
@ -119,8 +129,11 @@ fn string_to_tts_1() {
let tts = string_to_stream("fn a(b: i32) { b; }".to_string());
let expected = TokenStream::new(vec![
TokenTree::token_alone(token::Ident(kw::Fn, false), sp(0, 2)),
TokenTree::token_joint_hidden(token::Ident(Symbol::intern("a"), false), sp(3, 4)),
TokenTree::token_alone(token::Ident(kw::Fn, IdentIsRaw::No), sp(0, 2)),
TokenTree::token_joint_hidden(
token::Ident(Symbol::intern("a"), IdentIsRaw::No),
sp(3, 4),
),
TokenTree::Delimited(
DelimSpan::from_pair(sp(4, 5), sp(11, 12)),
// `JointHidden` because the `(` is followed immediately by
@ -128,10 +141,16 @@ fn string_to_tts_1() {
DelimSpacing::new(Spacing::JointHidden, Spacing::Alone),
Delimiter::Parenthesis,
TokenStream::new(vec![
TokenTree::token_joint(token::Ident(Symbol::intern("b"), false), sp(5, 6)),
TokenTree::token_joint(
token::Ident(Symbol::intern("b"), IdentIsRaw::No),
sp(5, 6),
),
TokenTree::token_alone(token::Colon, sp(6, 7)),
// `JointHidden` because the `i32` is immediately followed by the `)`.
TokenTree::token_joint_hidden(token::Ident(sym::i32, false), sp(8, 11)),
TokenTree::token_joint_hidden(
token::Ident(sym::i32, IdentIsRaw::No),
sp(8, 11),
),
])
.into(),
),
@ -143,7 +162,10 @@ fn string_to_tts_1() {
DelimSpacing::new(Spacing::Alone, Spacing::Alone),
Delimiter::Brace,
TokenStream::new(vec![
TokenTree::token_joint(token::Ident(Symbol::intern("b"), false), sp(15, 16)),
TokenTree::token_joint(
token::Ident(Symbol::intern("b"), IdentIsRaw::No),
sp(15, 16),
),
// `Alone` because the `;` is followed by whitespace.
TokenTree::token_alone(token::Semi, sp(16, 17)),
])

View File

@ -1,4 +1,5 @@
use crate::base::ExtCtxt;
use ast::token::IdentIsRaw;
use pm::bridge::{
server, DelimSpan, Diagnostic, ExpnGlobals, Group, Ident, LitKind, Literal, Punct, TokenTree,
};
@ -216,7 +217,9 @@ impl FromInternal<(TokenStream, &mut Rustc<'_, '_>)> for Vec<TokenTree<TokenStre
Question => op("?"),
SingleQuote => op("'"),
Ident(sym, is_raw) => trees.push(TokenTree::Ident(Ident { sym, is_raw, span })),
Ident(sym, is_raw) => {
trees.push(TokenTree::Ident(Ident { sym, is_raw: is_raw.into(), span }))
}
Lifetime(name) => {
let ident = symbol::Ident::new(name, span).without_first_quote();
trees.extend([
@ -238,7 +241,7 @@ impl FromInternal<(TokenStream, &mut Rustc<'_, '_>)> for Vec<TokenTree<TokenStre
escaped.extend(ch.escape_debug());
}
let stream = [
Ident(sym::doc, false),
Ident(sym::doc, IdentIsRaw::No),
Eq,
TokenKind::lit(token::Str, Symbol::intern(&escaped), None),
]
@ -259,7 +262,7 @@ impl FromInternal<(TokenStream, &mut Rustc<'_, '_>)> for Vec<TokenTree<TokenStre
Interpolated(ref nt) if let NtIdent(ident, is_raw) = &nt.0 => {
trees.push(TokenTree::Ident(Ident {
sym: ident.name,
is_raw: *is_raw,
is_raw: matches!(is_raw, IdentIsRaw::Yes),
span: ident.span,
}))
}
@ -352,7 +355,7 @@ impl ToInternal<SmallVec<[tokenstream::TokenTree; 2]>>
}
TokenTree::Ident(self::Ident { sym, is_raw, span }) => {
rustc.sess().symbol_gallery.insert(sym, span);
smallvec![tokenstream::TokenTree::token_alone(Ident(sym, is_raw), span)]
smallvec![tokenstream::TokenTree::token_alone(Ident(sym, is_raw.into()), span)]
}
TokenTree::Literal(self::Literal {
kind: self::LitKind::Integer,
@ -570,7 +573,7 @@ impl server::TokenStream for Rustc<'_, '_> {
match &expr.kind {
ast::ExprKind::Lit(token_lit) if token_lit.kind == token::Bool => {
Ok(tokenstream::TokenStream::token_alone(
token::Ident(token_lit.symbol, false),
token::Ident(token_lit.symbol, IdentIsRaw::No),
expr.span,
))
}

View File

@ -1,6 +1,6 @@
use crate::tests::string_to_stream;
use rustc_ast::token;
use rustc_ast::token::{self, IdentIsRaw};
use rustc_ast::tokenstream::{TokenStream, TokenTree};
use rustc_span::create_default_session_globals_then;
use rustc_span::{BytePos, Span, Symbol};
@ -86,7 +86,8 @@ fn test_diseq_1() {
fn test_is_empty() {
create_default_session_globals_then(|| {
let test0 = TokenStream::default();
let test1 = TokenStream::token_alone(token::Ident(Symbol::intern("a"), false), sp(0, 1));
let test1 =
TokenStream::token_alone(token::Ident(Symbol::intern("a"), IdentIsRaw::No), sp(0, 1));
let test2 = string_to_ts("foo(bar::baz)");
assert_eq!(test0.is_empty(), true);

View File

@ -1,14 +1,13 @@
//! Orphan checker: every impl either implements a trait defined in this
//! crate or pertains to a type defined in this crate.
use crate::errors;
use rustc_errors::ErrorGuaranteed;
use rustc_hir as hir;
use rustc_middle::ty::{self, AliasKind, Ty, TyCtxt, TypeVisitableExt};
use rustc_span::def_id::LocalDefId;
use rustc_span::Span;
use rustc_trait_selection::traits;
use crate::errors;
use rustc_trait_selection::traits::{self, IsFirstInputType};
#[instrument(skip(tcx), level = "debug")]
pub(crate) fn orphan_check_impl(
@ -288,7 +287,7 @@ fn emit_orphan_check_error<'tcx>(
(Vec::new(), Vec::new(), Vec::new(), Vec::new(), Vec::new());
let mut sugg = None;
for &(mut ty, is_target_ty) in &tys {
let span = if is_target_ty {
let span = if matches!(is_target_ty, IsFirstInputType::Yes) {
// Point at `D<A>` in `impl<A, B> for C<B> in D<A>`
self_ty_span
} else {
@ -321,7 +320,8 @@ fn emit_orphan_check_error<'tcx>(
}
}
let is_foreign = !trait_ref.def_id.is_local() && !is_target_ty;
let is_foreign =
!trait_ref.def_id.is_local() && matches!(is_target_ty, IsFirstInputType::No);
match &ty.kind() {
ty::Slice(_) => {

View File

@ -1825,7 +1825,7 @@ impl KeywordIdents {
match tt {
// Only report non-raw idents.
TokenTree::Token(token, _) => {
if let Some((ident, false)) = token.ident() {
if let Some((ident, token::IdentIsRaw::No)) = token.ident() {
self.check_ident_token(cx, UnderMacro(true), ident);
}
}

View File

@ -4,7 +4,7 @@ use crate::errors;
use crate::lexer::unicode_chars::UNICODE_ARRAY;
use crate::make_unclosed_delims_error;
use rustc_ast::ast::{self, AttrStyle};
use rustc_ast::token::{self, CommentKind, Delimiter, Token, TokenKind};
use rustc_ast::token::{self, CommentKind, Delimiter, IdentIsRaw, Token, TokenKind};
use rustc_ast::tokenstream::TokenStream;
use rustc_ast::util::unicode::contains_text_flow_control_chars;
use rustc_errors::{codes::*, Applicability, DiagCtxt, DiagnosticBuilder, StashKey};
@ -181,7 +181,7 @@ impl<'sess, 'src> StringReader<'sess, 'src> {
self.dcx().emit_err(errors::CannotBeRawIdent { span, ident: sym });
}
self.sess.raw_identifier_spans.push(span);
token::Ident(sym, true)
token::Ident(sym, IdentIsRaw::Yes)
}
rustc_lexer::TokenKind::UnknownPrefix => {
self.report_unknown_prefix(start);
@ -201,7 +201,7 @@ impl<'sess, 'src> StringReader<'sess, 'src> {
let span = self.mk_sp(start, self.pos);
self.sess.bad_unicode_identifiers.borrow_mut().entry(sym).or_default()
.push(span);
token::Ident(sym, false)
token::Ident(sym, IdentIsRaw::No)
}
// split up (raw) c string literals to an ident and a string literal when edition < 2021.
rustc_lexer::TokenKind::Literal {
@ -339,7 +339,7 @@ impl<'sess, 'src> StringReader<'sess, 'src> {
let sym = nfc_normalize(self.str_from(start));
let span = self.mk_sp(start, self.pos);
self.sess.symbol_gallery.insert(sym, span);
token::Ident(sym, false)
token::Ident(sym, IdentIsRaw::No)
}
/// Detect usages of Unicode codepoints changing the direction of the text on screen and loudly

View File

@ -307,7 +307,7 @@ pub(crate) const UNICODE_ARRAY: &[(char, &str, &str)] = &[
// fancier error recovery to it, as there will be less overall work to do this way.
const ASCII_ARRAY: &[(&str, &str, Option<token::TokenKind>)] = &[
(" ", "Space", None),
("_", "Underscore", Some(token::Ident(kw::Underscore, false))),
("_", "Underscore", Some(token::Ident(kw::Underscore, token::IdentIsRaw::No))),
("-", "Minus/Hyphen", Some(token::BinOp(token::Minus))),
(",", "Comma", Some(token::Comma)),
(";", "Semicolon", Some(token::Semi)),

View File

@ -21,6 +21,8 @@ use crate::errors::{
use crate::fluent_generated as fluent;
use crate::parser;
use crate::parser::attr::InnerAttrPolicy;
use ast::token::IdentIsRaw;
use parser::Recovered;
use rustc_ast as ast;
use rustc_ast::ptr::P;
use rustc_ast::token::{self, Delimiter, Lit, LitKind, Token, TokenKind};
@ -264,7 +266,7 @@ impl<'a> Parser<'a> {
pub(super) fn expected_ident_found(
&mut self,
recover: bool,
) -> PResult<'a, (Ident, /* is_raw */ bool)> {
) -> PResult<'a, (Ident, IdentIsRaw)> {
if let TokenKind::DocComment(..) = self.prev_token.kind {
return Err(self.dcx().create_err(DocCommentDoesNotDocumentAnything {
span: self.prev_token.span,
@ -290,11 +292,11 @@ impl<'a> Parser<'a> {
let bad_token = self.token.clone();
// suggest prepending a keyword in identifier position with `r#`
let suggest_raw = if let Some((ident, false)) = self.token.ident()
let suggest_raw = if let Some((ident, IdentIsRaw::No)) = self.token.ident()
&& ident.is_raw_guess()
&& self.look_ahead(1, |t| valid_follow.contains(&t.kind))
{
recovered_ident = Some((ident, true));
recovered_ident = Some((ident, IdentIsRaw::Yes));
// `Symbol::to_string()` is different from `Symbol::into_diagnostic_arg()`,
// which uses `Symbol::to_ident_string()` and "helpfully" adds an implicit `r#`
@ -320,7 +322,7 @@ impl<'a> Parser<'a> {
let help_cannot_start_number = self.is_lit_bad_ident().map(|(len, valid_portion)| {
let (invalid, valid) = self.token.span.split_at(len as u32);
recovered_ident = Some((Ident::new(valid_portion, valid), false));
recovered_ident = Some((Ident::new(valid_portion, valid), IdentIsRaw::No));
HelpIdentifierStartsWithNumber { num_span: invalid }
});
@ -429,7 +431,7 @@ impl<'a> Parser<'a> {
&mut self,
edible: &[TokenKind],
inedible: &[TokenKind],
) -> PResult<'a, bool /* recovered */> {
) -> PResult<'a, Recovered> {
debug!("expected_one_of_not_found(edible: {:?}, inedible: {:?})", edible, inedible);
fn tokens_to_string(tokens: &[TokenType]) -> String {
let mut i = tokens.iter();
@ -532,7 +534,7 @@ impl<'a> Parser<'a> {
sugg: ExpectedSemiSugg::ChangeToSemi(self.token.span),
});
self.bump();
return Ok(true);
return Ok(Recovered::Yes);
} else if self.look_ahead(0, |t| {
t == &token::CloseDelim(Delimiter::Brace)
|| ((t.can_begin_expr() || t.can_begin_item())
@ -556,7 +558,7 @@ impl<'a> Parser<'a> {
unexpected_token_label: Some(self.token.span),
sugg: ExpectedSemiSugg::AddSemi(span),
});
return Ok(true);
return Ok(Recovered::Yes);
}
}
@ -653,9 +655,9 @@ impl<'a> Parser<'a> {
// positive for a `cr#` that wasn't intended to start a c-string literal, but identifying
// that in the parser requires unbounded lookahead, so we only add a hint to the existing
// error rather than replacing it entirely.
if ((self.prev_token.kind == TokenKind::Ident(sym::c, false)
if ((self.prev_token.kind == TokenKind::Ident(sym::c, IdentIsRaw::No)
&& matches!(&self.token.kind, TokenKind::Literal(token::Lit { kind: token::Str, .. })))
|| (self.prev_token.kind == TokenKind::Ident(sym::cr, false)
|| (self.prev_token.kind == TokenKind::Ident(sym::cr, IdentIsRaw::No)
&& matches!(
&self.token.kind,
TokenKind::Literal(token::Lit { kind: token::Str, .. }) | token::Pound
@ -711,7 +713,7 @@ impl<'a> Parser<'a> {
if self.check_too_many_raw_str_terminators(&mut err) {
if expected.contains(&TokenType::Token(token::Semi)) && self.eat(&token::Semi) {
err.emit();
return Ok(true);
return Ok(Recovered::Yes);
} else {
return Err(err);
}
@ -1223,7 +1225,7 @@ impl<'a> Parser<'a> {
|p| p.parse_generic_arg(None),
);
match x {
Ok((_, _, false)) => {
Ok((_, _, Recovered::No)) => {
if self.eat(&token::Gt) {
// We made sense of it. Improve the error message.
e.span_suggestion_verbose(
@ -1247,7 +1249,7 @@ impl<'a> Parser<'a> {
}
}
}
Ok((_, _, true)) => {}
Ok((_, _, Recovered::Yes)) => {}
Err(err) => {
err.cancel();
}
@ -1286,7 +1288,7 @@ impl<'a> Parser<'a> {
err: &mut ComparisonOperatorsCannotBeChained,
inner_op: &Expr,
outer_op: &Spanned<AssocOp>,
) -> bool /* advanced the cursor */ {
) -> Recovered {
if let ExprKind::Binary(op, l1, r1) = &inner_op.kind {
if let ExprKind::Field(_, ident) = l1.kind
&& ident.as_str().parse::<i32>().is_err()
@ -1294,7 +1296,7 @@ impl<'a> Parser<'a> {
{
// The parser has encountered `foo.bar<baz`, the likelihood of the turbofish
// suggestion being the only one to apply is high.
return false;
return Recovered::No;
}
return match (op.node, &outer_op.node) {
// `x == y == z`
@ -1313,7 +1315,7 @@ impl<'a> Parser<'a> {
span: inner_op.span.shrink_to_hi(),
middle_term: expr_to_str(r1),
});
false // Keep the current parse behavior, where the AST is `(x < y) < z`.
Recovered::No // Keep the current parse behavior, where the AST is `(x < y) < z`.
}
// `x == y < z`
(BinOpKind::Eq, AssocOp::Less | AssocOp::LessEqual | AssocOp::Greater | AssocOp::GreaterEqual) => {
@ -1327,12 +1329,12 @@ impl<'a> Parser<'a> {
left: r1.span.shrink_to_lo(),
right: r2.span.shrink_to_hi(),
});
true
Recovered::Yes
}
Err(expr_err) => {
expr_err.cancel();
self.restore_snapshot(snapshot);
false
Recovered::Yes
}
}
}
@ -1347,19 +1349,19 @@ impl<'a> Parser<'a> {
left: l1.span.shrink_to_lo(),
right: r1.span.shrink_to_hi(),
});
true
Recovered::Yes
}
Err(expr_err) => {
expr_err.cancel();
self.restore_snapshot(snapshot);
false
Recovered::No
}
}
}
_ => false,
_ => Recovered::No,
};
}
false
Recovered::No
}
/// Produces an error if comparison operators are chained (RFC #558).
@ -1487,8 +1489,9 @@ impl<'a> Parser<'a> {
// If it looks like a genuine attempt to chain operators (as opposed to a
// misformatted turbofish, for instance), suggest a correct form.
if self.attempt_chained_comparison_suggestion(&mut err, inner_op, outer_op)
{
let recovered = self
.attempt_chained_comparison_suggestion(&mut err, inner_op, outer_op);
if matches!(recovered, Recovered::Yes) {
self.dcx().emit_err(err);
mk_err_expr(self, inner_op.span.to(self.prev_token.span))
} else {
@ -1500,7 +1503,7 @@ impl<'a> Parser<'a> {
let recover =
self.attempt_chained_comparison_suggestion(&mut err, inner_op, outer_op);
self.dcx().emit_err(err);
if recover {
if matches!(recover, Recovered::Yes) {
return mk_err_expr(self, inner_op.span.to(self.prev_token.span));
}
}
@ -1840,10 +1843,7 @@ impl<'a> Parser<'a> {
/// Creates a `DiagnosticBuilder` for an unexpected token `t` and tries to recover if it is a
/// closing delimiter.
pub(super) fn unexpected_try_recover(
&mut self,
t: &TokenKind,
) -> PResult<'a, bool /* recovered */> {
pub(super) fn unexpected_try_recover(&mut self, t: &TokenKind) -> PResult<'a, Recovered> {
let token_str = pprust::token_kind_to_string(t);
let this_token_str = super::token_descr(&self.token);
let (prev_sp, sp) = match (&self.token.kind, self.subparser_name) {

View File

@ -3,13 +3,14 @@ use super::diagnostics::SnapshotParser;
use super::pat::{CommaRecoveryMode, Expected, RecoverColon, RecoverComma};
use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
use super::{
AttrWrapper, BlockMode, ClosureSpans, ForceCollect, Parser, PathStyle, Restrictions,
SemiColonMode, SeqSep, TokenExpectType, TokenType, TrailingToken,
AttrWrapper, BlockMode, ClosureSpans, ForceCollect, Parser, PathStyle, Recovered, Restrictions,
SemiColonMode, SeqSep, TokenExpectType, TokenType, Trailing, TrailingToken,
};
use crate::errors;
use crate::maybe_recover_from_interpolated_ty_qpath;
use ast::mut_visit::{noop_visit_expr, MutVisitor};
use ast::token::IdentIsRaw;
use ast::{CoroutineKind, ForLoopKind, GenBlockKind, Pat, Path, PathSegment};
use core::mem;
use rustc_ast::ptr::P;
@ -126,7 +127,7 @@ impl<'a> Parser<'a> {
match self.parse_expr_res(restrictions, None) {
Ok(expr) => Ok(expr),
Err(err) => match self.token.ident() {
Some((Ident { name: kw::Underscore, .. }, false))
Some((Ident { name: kw::Underscore, .. }, IdentIsRaw::No))
if self.may_recover() && self.look_ahead(1, |t| t == &token::Comma) =>
{
// Special-case handling of `foo(_, _, _)`
@ -457,7 +458,9 @@ impl<'a> Parser<'a> {
return None;
}
(Some(op), _) => (op, self.token.span),
(None, Some((Ident { name: sym::and, span }, false))) if self.may_recover() => {
(None, Some((Ident { name: sym::and, span }, IdentIsRaw::No)))
if self.may_recover() =>
{
self.dcx().emit_err(errors::InvalidLogicalOperator {
span: self.token.span,
incorrect: "and".into(),
@ -465,7 +468,7 @@ impl<'a> Parser<'a> {
});
(AssocOp::LAnd, span)
}
(None, Some((Ident { name: sym::or, span }, false))) if self.may_recover() => {
(None, Some((Ident { name: sym::or, span }, IdentIsRaw::No))) if self.may_recover() => {
self.dcx().emit_err(errors::InvalidLogicalOperator {
span: self.token.span,
incorrect: "or".into(),
@ -742,7 +745,7 @@ impl<'a> Parser<'a> {
(
// `foo: `
ExprKind::Path(None, ast::Path { segments, .. }),
token::Ident(kw::For | kw::Loop | kw::While, false),
token::Ident(kw::For | kw::Loop | kw::While, IdentIsRaw::No),
) if segments.len() == 1 => {
let snapshot = self.create_snapshot_for_diagnostic();
let label = Label {
@ -955,19 +958,20 @@ impl<'a> Parser<'a> {
fn parse_expr_dot_or_call_with_(&mut self, mut e: P<Expr>, lo: Span) -> PResult<'a, P<Expr>> {
loop {
let has_question = if self.prev_token.kind == TokenKind::Ident(kw::Return, false) {
// we are using noexpect here because we don't expect a `?` directly after a `return`
// which could be suggested otherwise
self.eat_noexpect(&token::Question)
} else {
self.eat(&token::Question)
};
let has_question =
if self.prev_token.kind == TokenKind::Ident(kw::Return, IdentIsRaw::No) {
// we are using noexpect here because we don't expect a `?` directly after a `return`
// which could be suggested otherwise
self.eat_noexpect(&token::Question)
} else {
self.eat(&token::Question)
};
if has_question {
// `expr?`
e = self.mk_expr(lo.to(self.prev_token.span), ExprKind::Try(e));
continue;
}
let has_dot = if self.prev_token.kind == TokenKind::Ident(kw::Return, false) {
let has_dot = if self.prev_token.kind == TokenKind::Ident(kw::Return, IdentIsRaw::No) {
// we are using noexpect here because we don't expect a `.` directly after a `return`
// which could be suggested otherwise
self.eat_noexpect(&token::Dot)
@ -1126,19 +1130,19 @@ impl<'a> Parser<'a> {
// 1.
DestructuredFloat::TrailingDot(sym, ident_span, dot_span) => {
assert!(suffix.is_none());
self.token = Token::new(token::Ident(sym, false), ident_span);
self.token = Token::new(token::Ident(sym, IdentIsRaw::No), ident_span);
let next_token = (Token::new(token::Dot, dot_span), self.token_spacing);
self.parse_expr_tuple_field_access(lo, base, sym, None, Some(next_token))
}
// 1.2 | 1.2e3
DestructuredFloat::MiddleDot(symbol1, ident1_span, dot_span, symbol2, ident2_span) => {
self.token = Token::new(token::Ident(symbol1, false), ident1_span);
self.token = Token::new(token::Ident(symbol1, IdentIsRaw::No), ident1_span);
// This needs to be `Spacing::Alone` to prevent regressions.
// See issue #76399 and PR #76285 for more details
let next_token1 = (Token::new(token::Dot, dot_span), Spacing::Alone);
let base1 =
self.parse_expr_tuple_field_access(lo, base, symbol1, None, Some(next_token1));
let next_token2 = Token::new(token::Ident(symbol2, false), ident2_span);
let next_token2 = Token::new(token::Ident(symbol2, IdentIsRaw::No), ident2_span);
self.bump_with((next_token2, self.token_spacing)); // `.`
self.parse_expr_tuple_field_access(lo, base1, symbol2, suffix, None)
}
@ -1555,7 +1559,7 @@ impl<'a> Parser<'a> {
return Ok(self.recover_seq_parse_error(Delimiter::Parenthesis, lo, err));
}
};
let kind = if es.len() == 1 && !trailing_comma {
let kind = if es.len() == 1 && matches!(trailing_comma, Trailing::No) {
// `(e)` is parenthesized `e`.
ExprKind::Paren(es.into_iter().next().unwrap())
} else {
@ -1946,7 +1950,7 @@ impl<'a> Parser<'a> {
self.bump(); // `builtin`
self.bump(); // `#`
let Some((ident, false)) = self.token.ident() else {
let Some((ident, IdentIsRaw::No)) = self.token.ident() else {
let err = self.dcx().create_err(errors::ExpectedBuiltinIdent { span: self.token.span });
return Err(err);
};
@ -3087,10 +3091,10 @@ impl<'a> Parser<'a> {
if !require_comma {
arm_body = Some(expr);
this.eat(&token::Comma);
Ok(false)
Ok(Recovered::No)
} else if let Some(body) = this.parse_arm_body_missing_braces(&expr, arrow_span) {
arm_body = Some(body);
Ok(true)
Ok(Recovered::Yes)
} else {
let expr_span = expr.span;
arm_body = Some(expr);
@ -3171,7 +3175,7 @@ impl<'a> Parser<'a> {
this.dcx().emit_err(errors::MissingCommaAfterMatchArm {
span: arm_span.shrink_to_hi(),
});
return Ok(true);
return Ok(Recovered::Yes);
}
Err(err)
});
@ -3574,7 +3578,7 @@ impl<'a> Parser<'a> {
fn find_struct_error_after_field_looking_code(&self) -> Option<ExprField> {
match self.token.ident() {
Some((ident, is_raw))
if (is_raw || !ident.is_reserved())
if (matches!(is_raw, IdentIsRaw::Yes) || !ident.is_reserved())
&& self.look_ahead(1, |t| *t == token::Colon) =>
{
Some(ast::ExprField {

View File

@ -1,8 +1,12 @@
use super::diagnostics::{dummy_arg, ConsumeClosingDelim};
use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
use super::{AttrWrapper, FollowedByType, ForceCollect, Parser, PathStyle, TrailingToken};
use super::{
AttrWrapper, FollowedByType, ForceCollect, Parser, PathStyle, Recovered, Trailing,
TrailingToken,
};
use crate::errors::{self, MacroExpandsToAdtField};
use crate::fluent_generated as fluent;
use ast::token::IdentIsRaw;
use rustc_ast::ast::*;
use rustc_ast::ptr::P;
use rustc_ast::token::{self, Delimiter, TokenKind};
@ -1076,7 +1080,7 @@ impl<'a> Parser<'a> {
fn parse_ident_or_underscore(&mut self) -> PResult<'a, Ident> {
match self.token.ident() {
Some((ident @ Ident { name: kw::Underscore, .. }, false)) => {
Some((ident @ Ident { name: kw::Underscore, .. }, IdentIsRaw::No)) => {
self.bump();
Ok(ident)
}
@ -1453,7 +1457,7 @@ impl<'a> Parser<'a> {
let (variants, _) = if self.token == TokenKind::Semi {
self.dcx().emit_err(errors::UseEmptyBlockNotSemi { span: self.token.span });
self.bump();
(thin_vec![], false)
(thin_vec![], Trailing::No)
} else {
self.parse_delim_comma_seq(Delimiter::Brace, |p| p.parse_enum_variant(id.span))
.map_err(|mut err| {
@ -1530,10 +1534,10 @@ impl<'a> Parser<'a> {
err.span_label(span, "while parsing this enum");
err.help(help);
err.emit();
(thin_vec![], true)
(thin_vec![], Recovered::Yes)
}
};
VariantData::Struct { fields, recovered }
VariantData::Struct { fields, recovered: recovered.into() }
} else if this.check(&token::OpenDelim(Delimiter::Parenthesis)) {
let body = match this.parse_tuple_struct_body() {
Ok(body) => body,
@ -1618,7 +1622,7 @@ impl<'a> Parser<'a> {
class_name.span,
generics.where_clause.has_where_token,
)?;
VariantData::Struct { fields, recovered }
VariantData::Struct { fields, recovered: recovered.into() }
}
// No `where` so: `struct Foo<T>;`
} else if self.eat(&token::Semi) {
@ -1630,7 +1634,7 @@ impl<'a> Parser<'a> {
class_name.span,
generics.where_clause.has_where_token,
)?;
VariantData::Struct { fields, recovered }
VariantData::Struct { fields, recovered: recovered.into() }
// Tuple-style struct definition with optional where-clause.
} else if self.token == token::OpenDelim(Delimiter::Parenthesis) {
let body = VariantData::Tuple(self.parse_tuple_struct_body()?, DUMMY_NODE_ID);
@ -1659,14 +1663,14 @@ impl<'a> Parser<'a> {
class_name.span,
generics.where_clause.has_where_token,
)?;
VariantData::Struct { fields, recovered }
VariantData::Struct { fields, recovered: recovered.into() }
} else if self.token == token::OpenDelim(Delimiter::Brace) {
let (fields, recovered) = self.parse_record_struct_body(
"union",
class_name.span,
generics.where_clause.has_where_token,
)?;
VariantData::Struct { fields, recovered }
VariantData::Struct { fields, recovered: recovered.into() }
} else {
let token_str = super::token_descr(&self.token);
let msg = format!("expected `where` or `{{` after union name, found {token_str}");
@ -1683,14 +1687,14 @@ impl<'a> Parser<'a> {
adt_ty: &str,
ident_span: Span,
parsed_where: bool,
) -> PResult<'a, (ThinVec<FieldDef>, /* recovered */ bool)> {
) -> PResult<'a, (ThinVec<FieldDef>, Recovered)> {
let mut fields = ThinVec::new();
let mut recovered = false;
let mut recovered = Recovered::No;
if self.eat(&token::OpenDelim(Delimiter::Brace)) {
while self.token != token::CloseDelim(Delimiter::Brace) {
let field = self.parse_field_def(adt_ty).map_err(|e| {
self.consume_block(Delimiter::Brace, ConsumeClosingDelim::No);
recovered = true;
recovered = Recovered::Yes;
e
});
match field {
@ -1962,7 +1966,7 @@ impl<'a> Parser<'a> {
let (ident, is_raw) = self.ident_or_err(true)?;
if ident.name == kw::Underscore {
self.sess.gated_spans.gate(sym::unnamed_fields, lo);
} else if !is_raw && ident.is_reserved() {
} else if matches!(is_raw, IdentIsRaw::No) && ident.is_reserved() {
let snapshot = self.create_snapshot_for_diagnostic();
let err = if self.check_fn_front_matter(false, Case::Sensitive) {
let inherited_vis = Visibility {
@ -2461,8 +2465,8 @@ impl<'a> Parser<'a> {
// `self.expected_tokens`, therefore, do not use `self.unexpected()` which doesn't
// account for this.
match self.expect_one_of(&[], &[]) {
Ok(true) => {}
Ok(false) => unreachable!(),
Ok(Recovered::Yes) => {}
Ok(Recovered::No) => unreachable!(),
Err(mut err) => {
// Qualifier keywords ordering check
enum WrongKw {
@ -2740,7 +2744,7 @@ impl<'a> Parser<'a> {
fn parse_self_param(&mut self) -> PResult<'a, Option<Param>> {
// Extract an identifier *after* having confirmed that the token is one.
let expect_self_ident = |this: &mut Self| match this.token.ident() {
Some((ident, false)) => {
Some((ident, IdentIsRaw::No)) => {
this.bump();
ident
}

View File

@ -11,6 +11,7 @@ mod stmt;
mod ty;
use crate::lexer::UnmatchedDelim;
use ast::token::IdentIsRaw;
pub use attr_wrapper::AttrWrapper;
pub use diagnostics::AttemptLocalParseRecovery;
pub(crate) use expr::ForbiddenLetReason;
@ -357,6 +358,25 @@ pub enum FollowedByType {
No,
}
/// Whether a function performed recovery
#[derive(Copy, Clone, Debug)]
pub enum Recovered {
No,
Yes,
}
impl From<Recovered> for bool {
fn from(r: Recovered) -> bool {
matches!(r, Recovered::Yes)
}
}
#[derive(Copy, Clone, Debug)]
pub enum Trailing {
No,
Yes,
}
#[derive(Clone, Copy, PartialEq, Eq)]
pub enum TokenDescription {
ReservedIdentifier,
@ -455,11 +475,11 @@ impl<'a> Parser<'a> {
}
/// Expects and consumes the token `t`. Signals an error if the next token is not `t`.
pub fn expect(&mut self, t: &TokenKind) -> PResult<'a, bool /* recovered */> {
pub fn expect(&mut self, t: &TokenKind) -> PResult<'a, Recovered> {
if self.expected_tokens.is_empty() {
if self.token == *t {
self.bump();
Ok(false)
Ok(Recovered::No)
} else {
self.unexpected_try_recover(t)
}
@ -475,13 +495,13 @@ impl<'a> Parser<'a> {
&mut self,
edible: &[TokenKind],
inedible: &[TokenKind],
) -> PResult<'a, bool /* recovered */> {
) -> PResult<'a, Recovered> {
if edible.contains(&self.token.kind) {
self.bump();
Ok(false)
Ok(Recovered::No)
} else if inedible.contains(&self.token.kind) {
// leave it in the input
Ok(false)
Ok(Recovered::No)
} else if self.token.kind != token::Eof
&& self.last_unexpected_token_span == Some(self.token.span)
{
@ -499,7 +519,7 @@ impl<'a> Parser<'a> {
fn parse_ident_common(&mut self, recover: bool) -> PResult<'a, Ident> {
let (ident, is_raw) = self.ident_or_err(recover)?;
if !is_raw && ident.is_reserved() {
if matches!(is_raw, IdentIsRaw::No) && ident.is_reserved() {
let err = self.expected_ident_found_err();
if recover {
err.emit();
@ -511,7 +531,7 @@ impl<'a> Parser<'a> {
Ok(ident)
}
fn ident_or_err(&mut self, recover: bool) -> PResult<'a, (Ident, /* is_raw */ bool)> {
fn ident_or_err(&mut self, recover: bool) -> PResult<'a, (Ident, IdentIsRaw)> {
match self.token.ident() {
Some(ident) => Ok(ident),
None => self.expected_ident_found(recover),
@ -568,7 +588,7 @@ impl<'a> Parser<'a> {
}
if case == Case::Insensitive
&& let Some((ident, /* is_raw */ false)) = self.token.ident()
&& let Some((ident, IdentIsRaw::No)) = self.token.ident()
&& ident.as_str().to_lowercase() == kw.as_str().to_lowercase()
{
true
@ -598,7 +618,7 @@ impl<'a> Parser<'a> {
}
if case == Case::Insensitive
&& let Some((ident, /* is_raw */ false)) = self.token.ident()
&& let Some((ident, IdentIsRaw::No)) = self.token.ident()
&& ident.as_str().to_lowercase() == kw.as_str().to_lowercase()
{
self.dcx().emit_err(errors::KwBadCase { span: ident.span, kw: kw.as_str() });
@ -783,10 +803,10 @@ impl<'a> Parser<'a> {
sep: SeqSep,
expect: TokenExpectType,
mut f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
) -> PResult<'a, (ThinVec<T>, bool /* trailing */, bool /* recovered */)> {
) -> PResult<'a, (ThinVec<T>, Trailing, Recovered)> {
let mut first = true;
let mut recovered = false;
let mut trailing = false;
let mut recovered = Recovered::No;
let mut trailing = Trailing::No;
let mut v = ThinVec::new();
while !self.expect_any_with_type(kets, expect) {
@ -800,12 +820,12 @@ impl<'a> Parser<'a> {
} else {
// check for separator
match self.expect(t) {
Ok(false) /* not recovered */ => {
Ok(Recovered::No) => {
self.current_closure.take();
}
Ok(true) /* recovered */ => {
Ok(Recovered::Yes) => {
self.current_closure.take();
recovered = true;
recovered = Recovered::Yes;
break;
}
Err(mut expect_err) => {
@ -900,7 +920,7 @@ impl<'a> Parser<'a> {
}
}
if sep.trailing_sep_allowed && self.expect_any_with_type(kets, expect) {
trailing = true;
trailing = Trailing::Yes;
break;
}
@ -978,7 +998,7 @@ impl<'a> Parser<'a> {
ket: &TokenKind,
sep: SeqSep,
f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
) -> PResult<'a, (ThinVec<T>, bool /* trailing */, bool /* recovered */)> {
) -> PResult<'a, (ThinVec<T>, Trailing, Recovered)> {
self.parse_seq_to_before_tokens(&[ket], sep, TokenExpectType::Expect, f)
}
@ -990,9 +1010,9 @@ impl<'a> Parser<'a> {
ket: &TokenKind,
sep: SeqSep,
f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
) -> PResult<'a, (ThinVec<T>, bool /* trailing */)> {
) -> PResult<'a, (ThinVec<T>, Trailing)> {
let (val, trailing, recovered) = self.parse_seq_to_before_end(ket, sep, f)?;
if !recovered {
if matches!(recovered, Recovered::No) {
self.eat(ket);
}
Ok((val, trailing))
@ -1007,7 +1027,7 @@ impl<'a> Parser<'a> {
ket: &TokenKind,
sep: SeqSep,
f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
) -> PResult<'a, (ThinVec<T>, bool /* trailing */)> {
) -> PResult<'a, (ThinVec<T>, Trailing)> {
self.expect(bra)?;
self.parse_seq_to_end(ket, sep, f)
}
@ -1019,7 +1039,7 @@ impl<'a> Parser<'a> {
&mut self,
delim: Delimiter,
f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
) -> PResult<'a, (ThinVec<T>, bool /* trailing */)> {
) -> PResult<'a, (ThinVec<T>, Trailing)> {
self.parse_unspanned_seq(
&token::OpenDelim(delim),
&token::CloseDelim(delim),
@ -1034,7 +1054,7 @@ impl<'a> Parser<'a> {
fn parse_paren_comma_seq<T>(
&mut self,
f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
) -> PResult<'a, (ThinVec<T>, bool /* trailing */)> {
) -> PResult<'a, (ThinVec<T>, Trailing)> {
self.parse_delim_comma_seq(Delimiter::Parenthesis, f)
}

View File

@ -201,6 +201,6 @@ impl<'a> Parser<'a> {
/// The token is an identifier, but not `_`.
/// We prohibit passing `_` to macros expecting `ident` for now.
fn get_macro_ident(token: &Token) -> Option<(Ident, bool)> {
fn get_macro_ident(token: &Token) -> Option<(Ident, token::IdentIsRaw)> {
token.ident().filter(|(ident, _)| ident.name != kw::Underscore)
}

View File

@ -1,4 +1,4 @@
use super::{ForceCollect, Parser, PathStyle, Restrictions, TrailingToken};
use super::{ForceCollect, Parser, PathStyle, Restrictions, Trailing, TrailingToken};
use crate::errors::{
self, AmbiguousRangePattern, DotDotDotForRemainingFields, DotDotDotRangeToPatternNotAllowed,
DotDotDotRestPattern, EnumPatternInsteadOfIdentifier, ExpectedBindingLeftOfAt,
@ -311,7 +311,7 @@ impl<'a> Parser<'a> {
matches!(
&token.uninterpolate().kind,
token::FatArrow // e.g. `a | => 0,`.
| token::Ident(kw::If, false) // e.g. `a | if expr`.
| token::Ident(kw::If, token::IdentIsRaw::No) // e.g. `a | if expr`.
| token::Eq // e.g. `let a | = 0`.
| token::Semi // e.g. `let a |;`.
| token::Colon // e.g. `let a | :`.
@ -696,7 +696,9 @@ impl<'a> Parser<'a> {
// Here, `(pat,)` is a tuple pattern.
// For backward compatibility, `(..)` is a tuple pattern as well.
Ok(if fields.len() == 1 && !(trailing_comma || fields[0].is_rest()) {
let paren_pattern =
fields.len() == 1 && !(matches!(trailing_comma, Trailing::Yes) || fields[0].is_rest());
if paren_pattern {
let pat = fields.into_iter().next().unwrap();
let close_paren = self.prev_token.span;
@ -714,7 +716,7 @@ impl<'a> Parser<'a> {
},
});
self.parse_pat_range_begin_with(begin.clone(), form)?
self.parse_pat_range_begin_with(begin.clone(), form)
}
// recover ranges with parentheses around the `(start)..`
PatKind::Err(_)
@ -729,15 +731,15 @@ impl<'a> Parser<'a> {
},
});
self.parse_pat_range_begin_with(self.mk_expr(pat.span, ExprKind::Err), form)?
self.parse_pat_range_begin_with(self.mk_expr(pat.span, ExprKind::Err), form)
}
// (pat) with optional parentheses
_ => PatKind::Paren(pat),
_ => Ok(PatKind::Paren(pat)),
}
} else {
PatKind::Tuple(fields)
})
Ok(PatKind::Tuple(fields))
}
}
/// Parse a mutable binding with the `mut` token already eaten.

View File

@ -2,6 +2,7 @@ use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
use super::{Parser, Restrictions, TokenType};
use crate::errors::PathSingleColon;
use crate::{errors, maybe_whole};
use ast::token::IdentIsRaw;
use rustc_ast::ptr::P;
use rustc_ast::token::{self, Delimiter, Token, TokenKind};
use rustc_ast::{
@ -390,7 +391,7 @@ impl<'a> Parser<'a> {
pub(super) fn parse_path_segment_ident(&mut self) -> PResult<'a, Ident> {
match self.token.ident() {
Some((ident, false)) if ident.is_path_segment_keyword() => {
Some((ident, IdentIsRaw::No)) if ident.is_path_segment_keyword() => {
self.bump();
Ok(ident)
}

View File

@ -11,6 +11,7 @@ use crate::errors;
use crate::maybe_whole;
use crate::errors::MalformedLoopLabel;
use crate::parser::Recovered;
use ast::Label;
use rustc_ast as ast;
use rustc_ast::ptr::P;
@ -661,7 +662,6 @@ impl<'a> Parser<'a> {
if self.token != token::Eof && classify::expr_requires_semi_to_be_stmt(expr) =>
{
// Just check for errors and recover; do not eat semicolon yet.
// `expect_one_of` returns PResult<'a, bool /* recovered */>
let expect_result =
self.expect_one_of(&[], &[token::Semi, token::CloseDelim(Delimiter::Brace)]);
@ -669,7 +669,7 @@ impl<'a> Parser<'a> {
let replace_with_err = 'break_recover: {
match expect_result {
// Recover from parser, skip type error to avoid extra errors.
Ok(true) => true,
Ok(Recovered::Yes) => true,
Err(e) => {
if self.recover_colon_as_semi() {
// recover_colon_as_semi has already emitted a nicer error.
@ -691,7 +691,7 @@ impl<'a> Parser<'a> {
token.kind,
token::Ident(
kw::For | kw::Loop | kw::While,
false
token::IdentIsRaw::No
) | token::OpenDelim(Delimiter::Brace)
)
})
@ -735,7 +735,7 @@ impl<'a> Parser<'a> {
true
}
Ok(false) => false,
Ok(Recovered::No) => false,
}
};

View File

@ -1,4 +1,4 @@
use super::{Parser, PathStyle, TokenType};
use super::{Parser, PathStyle, TokenType, Trailing};
use crate::errors::{
self, DynAfterMut, ExpectedFnPathFoundFnKeyword, ExpectedMutOrConstInRawPointerType,
@ -415,7 +415,7 @@ impl<'a> Parser<'a> {
Ok(ty)
})?;
if ts.len() == 1 && !trailing {
if ts.len() == 1 && matches!(trailing, Trailing::No) {
let ty = ts.into_iter().next().unwrap().into_inner();
let maybe_bounds = allow_plus == AllowPlus::Yes && self.token.is_like_plus();
match ty.kind {

View File

@ -1111,7 +1111,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
suggestions.extend(
tmp_suggestions
.into_iter()
.filter(|s| use_prelude || this.is_builtin_macro(s.res)),
.filter(|s| use_prelude.into() || this.is_builtin_macro(s.res)),
);
}
}

View File

@ -23,6 +23,18 @@ use Namespace::*;
type Visibility = ty::Visibility<LocalDefId>;
#[derive(Copy, Clone)]
pub enum UsePrelude {
No,
Yes,
}
impl From<UsePrelude> for bool {
fn from(up: UsePrelude) -> bool {
matches!(up, UsePrelude::Yes)
}
}
impl<'a, 'tcx> Resolver<'a, 'tcx> {
/// A generic scope visitor.
/// Visits scopes in order to resolve some identifier in them or perform other actions.
@ -32,12 +44,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
scope_set: ScopeSet<'a>,
parent_scope: &ParentScope<'a>,
ctxt: SyntaxContext,
mut visitor: impl FnMut(
&mut Self,
Scope<'a>,
/*use_prelude*/ bool,
SyntaxContext,
) -> Option<T>,
mut visitor: impl FnMut(&mut Self, Scope<'a>, UsePrelude, SyntaxContext) -> Option<T>,
) -> Option<T> {
// General principles:
// 1. Not controlled (user-defined) names should have higher priority than controlled names
@ -133,6 +140,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
};
if visit {
let use_prelude = if use_prelude { UsePrelude::Yes } else { UsePrelude::No };
if let break_result @ Some(..) = visitor(self, scope, use_prelude, ctxt) {
return break_result;
}
@ -579,7 +587,9 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
None,
ignore_binding,
) {
if use_prelude || this.is_builtin_macro(binding.res()) {
if matches!(use_prelude, UsePrelude::Yes)
|| this.is_builtin_macro(binding.res())
{
result = Ok((binding, Flags::MISC_FROM_PRELUDE));
}
}

View File

@ -601,9 +601,24 @@ pub fn trait_ref_is_local_or_fundamental<'tcx>(
trait_ref.def_id.krate == LOCAL_CRATE || tcx.has_attr(trait_ref.def_id, sym::fundamental)
}
#[derive(Debug, Copy, Clone)]
pub enum IsFirstInputType {
No,
Yes,
}
impl From<bool> for IsFirstInputType {
fn from(b: bool) -> IsFirstInputType {
match b {
false => IsFirstInputType::No,
true => IsFirstInputType::Yes,
}
}
}
#[derive(Debug)]
pub enum OrphanCheckErr<'tcx> {
NonLocalInputType(Vec<(Ty<'tcx>, bool /* Is this the first input type? */)>),
NonLocalInputType(Vec<(Ty<'tcx>, IsFirstInputType)>),
UncoveredTy(Ty<'tcx>, Option<Ty<'tcx>>),
}
@ -754,7 +769,7 @@ struct OrphanChecker<'tcx, F> {
/// Ignore orphan check failures and exclusively search for the first
/// local type.
search_first_local_ty: bool,
non_local_tys: Vec<(Ty<'tcx>, bool)>,
non_local_tys: Vec<(Ty<'tcx>, IsFirstInputType)>,
}
impl<'tcx, F, E> OrphanChecker<'tcx, F>
@ -772,7 +787,7 @@ where
}
fn found_non_local_ty(&mut self, t: Ty<'tcx>) -> ControlFlow<OrphanCheckEarlyExit<'tcx, E>> {
self.non_local_tys.push((t, self.in_self_ty));
self.non_local_tys.push((t, self.in_self_ty.into()));
ControlFlow::Continue(())
}

View File

@ -42,7 +42,7 @@ use std::fmt::Debug;
use std::ops::ControlFlow;
pub use self::coherence::{add_placeholder_note, orphan_check, overlapping_impls};
pub use self::coherence::{OrphanCheckErr, OverlapResult};
pub use self::coherence::{IsFirstInputType, OrphanCheckErr, OverlapResult};
pub use self::engine::{ObligationCtxt, TraitEngineExt};
pub use self::fulfill::{FulfillmentContext, PendingPredicateObligation};
pub use self::normalize::NormalizeExt;

View File

@ -1,4 +1,4 @@
use rustc_ast::token::{self, BinOpToken, Delimiter};
use rustc_ast::token::{self, BinOpToken, Delimiter, IdentIsRaw};
use rustc_ast::tokenstream::{TokenStream, TokenTree};
use rustc_ast_pretty::pprust::state::State as Printer;
use rustc_ast_pretty::pprust::PrintState;
@ -148,7 +148,7 @@ fn print_tts(printer: &mut Printer<'_>, tts: &TokenStream) {
(false, Other)
}
(Pound, token::Not) => (false, PoundBang),
(_, token::Ident(symbol, /* is_raw */ false))
(_, token::Ident(symbol, IdentIsRaw::No))
if !usually_needs_space_between_keyword_and_open_delim(*symbol, tt.span) =>
{
(true, Ident)