Auto merge of #122830 - matthiaskrgr:rollup-uk2by3f, r=matthiaskrgr

Rollup of 8 pull requests

Successful merges:

 - #122402 (Make `#[diagnostic::on_unimplemented]` format string parsing more robust)
 - #122644 (pattern analysis: add a custom test harness)
 - #122733 (Strip placeholders from hidden types before remapping generic parameter)
 - #122752 (Interpolated cleanups)
 - #122771 (add some comments to hir::ModuleItems)
 - #122793 (Implement macro-based deref!() syntax for deref patterns)
 - #122810 (Remove `target_override`)
 - #122827 (Remove unnecessary braces from `bug`/`span_bug`)

r? `@ghost`
`@rustbot` modify labels: rollup
This commit is contained in:
bors 2024-03-21 17:53:57 +00:00
commit a0569fa8f9
83 changed files with 1479 additions and 421 deletions

View File

@ -4440,6 +4440,8 @@ dependencies = [
"rustc_target", "rustc_target",
"smallvec", "smallvec",
"tracing", "tracing",
"tracing-subscriber",
"tracing-tree",
] ]
[[package]] [[package]]

View File

@ -621,7 +621,9 @@ impl Pat {
| PatKind::Or(s) => s.iter().for_each(|p| p.walk(it)), | PatKind::Or(s) => s.iter().for_each(|p| p.walk(it)),
// Trivial wrappers over inner patterns. // Trivial wrappers over inner patterns.
PatKind::Box(s) | PatKind::Ref(s, _) | PatKind::Paren(s) => s.walk(it), PatKind::Box(s) | PatKind::Deref(s) | PatKind::Ref(s, _) | PatKind::Paren(s) => {
s.walk(it)
}
// These patterns do not contain subpatterns, skip. // These patterns do not contain subpatterns, skip.
PatKind::Wild PatKind::Wild
@ -792,6 +794,9 @@ pub enum PatKind {
/// A `box` pattern. /// A `box` pattern.
Box(P<Pat>), Box(P<Pat>),
/// A `deref` pattern (currently `deref!()` macro-based syntax).
Deref(P<Pat>),
/// A reference pattern (e.g., `&mut (a, b)`). /// A reference pattern (e.g., `&mut (a, b)`).
Ref(P<Pat>, Mutability), Ref(P<Pat>, Mutability),

View File

@ -1295,6 +1295,7 @@ pub fn noop_visit_pat<T: MutVisitor>(pat: &mut P<Pat>, vis: &mut T) {
fields.flat_map_in_place(|field| vis.flat_map_pat_field(field)); fields.flat_map_in_place(|field| vis.flat_map_pat_field(field));
} }
PatKind::Box(inner) => vis.visit_pat(inner), PatKind::Box(inner) => vis.visit_pat(inner),
PatKind::Deref(inner) => vis.visit_pat(inner),
PatKind::Ref(inner, _mutbl) => vis.visit_pat(inner), PatKind::Ref(inner, _mutbl) => vis.visit_pat(inner),
PatKind::Range(e1, e2, Spanned { span: _, node: _ }) => { PatKind::Range(e1, e2, Spanned { span: _, node: _ }) => {
visit_opt(e1, |e| vis.visit_expr(e)); visit_opt(e1, |e| vis.visit_expr(e));

View File

@ -105,7 +105,7 @@ impl Lit {
} }
} }
/// Keep this in sync with `Token::can_begin_literal_or_bool` excluding unary negation. /// Keep this in sync with `Token::can_begin_literal_maybe_minus` excluding unary negation.
pub fn from_token(token: &Token) -> Option<Lit> { pub fn from_token(token: &Token) -> Option<Lit> {
match token.uninterpolate().kind { match token.uninterpolate().kind {
Ident(name, IdentIsRaw::No) if name.is_bool_lit() => Some(Lit::new(Bool, name, None)), Ident(name, IdentIsRaw::No) if name.is_bool_lit() => Some(Lit::new(Bool, name, None)),
@ -664,7 +664,7 @@ impl Token {
} }
/// Returns `true` if the token is an interpolated path. /// Returns `true` if the token is an interpolated path.
fn is_path(&self) -> bool { fn is_whole_path(&self) -> bool {
if let Interpolated(nt) = &self.kind if let Interpolated(nt) = &self.kind
&& let NtPath(..) = &nt.0 && let NtPath(..) = &nt.0
{ {
@ -710,7 +710,7 @@ impl Token {
pub fn is_path_start(&self) -> bool { pub fn is_path_start(&self) -> bool {
self == &ModSep self == &ModSep
|| self.is_qpath_start() || self.is_qpath_start()
|| self.is_path() || self.is_whole_path()
|| self.is_path_segment_keyword() || self.is_path_segment_keyword()
|| self.is_ident() && !self.is_reserved_ident() || self.is_ident() && !self.is_reserved_ident()
} }

View File

@ -28,18 +28,7 @@ use smallvec::{smallvec, SmallVec};
use std::borrow::Cow; use std::borrow::Cow;
use std::{cmp, fmt, iter}; use std::{cmp, fmt, iter};
/// When the main Rust parser encounters a syntax-extension invocation, it /// Part of a `TokenStream`.
/// parses the arguments to the invocation as a token tree. This is a very
/// loose structure, such that all sorts of different AST fragments can
/// be passed to syntax extensions using a uniform type.
///
/// If the syntax extension is an MBE macro, it will attempt to match its
/// LHS token tree against the provided token tree, and if it finds a
/// match, will transcribe the RHS token tree, splicing in any captured
/// `macro_parser::matched_nonterminals` into the `SubstNt`s it finds.
///
/// The RHS of an MBE macro is the only place `SubstNt`s are substituted.
/// Nothing special happens to misnamed or misplaced `SubstNt`s.
#[derive(Debug, Clone, PartialEq, Encodable, Decodable, HashStable_Generic)] #[derive(Debug, Clone, PartialEq, Encodable, Decodable, HashStable_Generic)]
pub enum TokenTree { pub enum TokenTree {
/// A single token. Should never be `OpenDelim` or `CloseDelim`, because /// A single token. Should never be `OpenDelim` or `CloseDelim`, because

View File

@ -576,7 +576,10 @@ pub fn walk_pat<'a, V: Visitor<'a>>(visitor: &mut V, pattern: &'a Pat) -> V::Res
try_visit!(visitor.visit_path(path, pattern.id)); try_visit!(visitor.visit_path(path, pattern.id));
walk_list!(visitor, visit_pat_field, fields); walk_list!(visitor, visit_pat_field, fields);
} }
PatKind::Box(subpattern) | PatKind::Ref(subpattern, _) | PatKind::Paren(subpattern) => { PatKind::Box(subpattern)
| PatKind::Deref(subpattern)
| PatKind::Ref(subpattern, _)
| PatKind::Paren(subpattern) => {
try_visit!(visitor.visit_pat(subpattern)); try_visit!(visitor.visit_pat(subpattern));
} }
PatKind::Ident(_, ident, optional_subpattern) => { PatKind::Ident(_, ident, optional_subpattern) => {

View File

@ -91,6 +91,9 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
PatKind::Box(inner) => { PatKind::Box(inner) => {
break hir::PatKind::Box(self.lower_pat(inner)); break hir::PatKind::Box(self.lower_pat(inner));
} }
PatKind::Deref(inner) => {
break hir::PatKind::Deref(self.lower_pat(inner));
}
PatKind::Ref(inner, mutbl) => { PatKind::Ref(inner, mutbl) => {
break hir::PatKind::Ref(self.lower_pat(inner), *mutbl); break hir::PatKind::Ref(self.lower_pat(inner), *mutbl);
} }

View File

@ -413,10 +413,7 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
} }
} }
PatKind::Box(..) => { PatKind::Box(..) => {
if !self.features.deref_patterns { gate!(&self, box_patterns, pattern.span, "box pattern syntax is experimental");
// Allow box patterns under `deref_patterns`.
gate!(&self, box_patterns, pattern.span, "box pattern syntax is experimental");
}
} }
PatKind::Range(_, Some(_), Spanned { node: RangeEnd::Excluded, .. }) => { PatKind::Range(_, Some(_), Spanned { node: RangeEnd::Excluded, .. }) => {
gate!( gate!(
@ -610,10 +607,7 @@ pub fn check_crate(krate: &ast::Crate, sess: &Session, features: &Features) {
}; };
} }
if !visitor.features.deref_patterns { gate_all_legacy_dont_use!(box_patterns, "box pattern syntax is experimental");
// Allow box patterns under `deref_patterns`.
gate_all_legacy_dont_use!(box_patterns, "box pattern syntax is experimental");
}
gate_all_legacy_dont_use!(trait_alias, "trait aliases are experimental"); gate_all_legacy_dont_use!(trait_alias, "trait aliases are experimental");
// Despite being a new feature, `where T: Trait<Assoc(): Sized>`, which is RTN syntax now, // Despite being a new feature, `where T: Trait<Assoc(): Sized>`, which is RTN syntax now,
// used to be gated under associated_type_bounds, which are right above, so RTN needs to // used to be gated under associated_type_bounds, which are right above, so RTN needs to

View File

@ -1626,6 +1626,12 @@ impl<'a> State<'a> {
self.word("box "); self.word("box ");
self.print_pat(inner); self.print_pat(inner);
} }
PatKind::Deref(inner) => {
self.word("deref!");
self.popen();
self.print_pat(inner);
self.pclose();
}
PatKind::Ref(inner, mutbl) => { PatKind::Ref(inner, mutbl) => {
self.word("&"); self.word("&");
if mutbl.is_mut() { if mutbl.is_mut() {

View File

@ -192,6 +192,11 @@ impl<'tcx> RegionInferenceContext<'tcx> {
.find(|ur_vid| self.eval_equal(vid, **ur_vid)) .find(|ur_vid| self.eval_equal(vid, **ur_vid))
.and_then(|ur_vid| self.definitions[*ur_vid].external_name) .and_then(|ur_vid| self.definitions[*ur_vid].external_name)
.unwrap_or(infcx.tcx.lifetimes.re_erased), .unwrap_or(infcx.tcx.lifetimes.re_erased),
ty::RePlaceholder(_) => ty::Region::new_error_with_message(
infcx.tcx,
concrete_type.span,
"hidden type contains placeholders, we don't support higher kinded opaques yet",
),
_ => region, _ => region,
}); });
debug!(?universal_concrete_type); debug!(?universal_concrete_type);

View File

@ -21,7 +21,6 @@ use rustc_session::{
}; };
use rustc_span::symbol::Symbol; use rustc_span::symbol::Symbol;
use rustc_target::abi::call::FnAbi; use rustc_target::abi::call::FnAbi;
use rustc_target::spec::Target;
use std::fmt; use std::fmt;
@ -70,12 +69,6 @@ pub trait CodegenBackend {
fn print_passes(&self) {} fn print_passes(&self) {}
fn print_version(&self) {} fn print_version(&self) {}
/// If this plugin provides additional builtin targets, provide the one enabled by the options here.
/// Be careful: this is called *before* init() is called.
fn target_override(&self, _opts: &config::Options) -> Option<Target> {
None
}
/// The metadata loader used to load rlib and dylib metadata. /// The metadata loader used to load rlib and dylib metadata.
/// ///
/// Alternative codegen backends may want to use different rlib or dylib formats than the /// Alternative codegen backends may want to use different rlib or dylib formats than the

View File

@ -890,7 +890,7 @@ pub fn version_at_macro_invocation(
let backend_name = debug_flags.iter().find_map(|x| x.strip_prefix("codegen-backend=")); let backend_name = debug_flags.iter().find_map(|x| x.strip_prefix("codegen-backend="));
let opts = config::Options::default(); let opts = config::Options::default();
let sysroot = filesearch::materialize_sysroot(opts.maybe_sysroot.clone()); let sysroot = filesearch::materialize_sysroot(opts.maybe_sysroot.clone());
let target = config::build_target_config(early_dcx, &opts, None, &sysroot); let target = config::build_target_config(early_dcx, &opts, &sysroot);
get_codegen_backend(early_dcx, &sysroot, backend_name, &target).print_version(); get_codegen_backend(early_dcx, &sysroot, backend_name, &target).print_version();
} }
@ -1100,7 +1100,7 @@ pub fn describe_flag_categories(early_dcx: &EarlyDiagCtxt, matches: &Matches) ->
let opts = config::Options::default(); let opts = config::Options::default();
let sysroot = filesearch::materialize_sysroot(opts.maybe_sysroot.clone()); let sysroot = filesearch::materialize_sysroot(opts.maybe_sysroot.clone());
let target = config::build_target_config(early_dcx, &opts, None, &sysroot); let target = config::build_target_config(early_dcx, &opts, &sysroot);
get_codegen_backend(early_dcx, &sysroot, backend_name, &target).print_passes(); get_codegen_backend(early_dcx, &sysroot, backend_name, &target).print_passes();
return true; return true;

View File

@ -392,12 +392,7 @@ pub(super) fn count_metavar_decls(matcher: &[TokenTree]) -> usize {
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub(crate) enum NamedMatch { pub(crate) enum NamedMatch {
MatchedSeq(Vec<NamedMatch>), MatchedSeq(Vec<NamedMatch>),
MatchedSingle(ParseNtResult<Lrc<(Nonterminal, Span)>>),
// A metavar match of type `tt`.
MatchedTokenTree(rustc_ast::tokenstream::TokenTree),
// A metavar match of any type other than `tt`.
MatchedNonterminal(Lrc<(Nonterminal, rustc_span::Span)>),
} }
/// Performs a token equality check, ignoring syntax context (that is, an unhygienic comparison) /// Performs a token equality check, ignoring syntax context (that is, an unhygienic comparison)
@ -691,11 +686,11 @@ impl TtParser {
} }
Ok(nt) => nt, Ok(nt) => nt,
}; };
let m = match nt { mp.push_match(
ParseNtResult::Nt(nt) => MatchedNonterminal(Lrc::new((nt, span))), next_metavar,
ParseNtResult::Tt(tt) => MatchedTokenTree(tt), seq_depth,
}; MatchedSingle(nt.map_nt(|nt| (Lrc::new((nt, span))))),
mp.push_match(next_metavar, seq_depth, m); );
mp.idx += 1; mp.idx += 1;
} else { } else {
unreachable!() unreachable!()

View File

@ -5,7 +5,7 @@ use crate::mbe;
use crate::mbe::diagnostics::{annotate_doc_comment, parse_failure_msg}; use crate::mbe::diagnostics::{annotate_doc_comment, parse_failure_msg};
use crate::mbe::macro_check; use crate::mbe::macro_check;
use crate::mbe::macro_parser::{Error, ErrorReported, Failure, Success, TtParser}; use crate::mbe::macro_parser::{Error, ErrorReported, Failure, Success, TtParser};
use crate::mbe::macro_parser::{MatchedSeq, MatchedTokenTree, MatcherLoc}; use crate::mbe::macro_parser::{MatcherLoc, NamedMatch::*};
use crate::mbe::transcribe::transcribe; use crate::mbe::transcribe::transcribe;
use ast::token::IdentIsRaw; use ast::token::IdentIsRaw;
@ -22,7 +22,7 @@ use rustc_lint_defs::builtin::{
RUST_2021_INCOMPATIBLE_OR_PATTERNS, SEMICOLON_IN_EXPRESSIONS_FROM_MACROS, RUST_2021_INCOMPATIBLE_OR_PATTERNS, SEMICOLON_IN_EXPRESSIONS_FROM_MACROS,
}; };
use rustc_lint_defs::BuiltinLintDiag; use rustc_lint_defs::BuiltinLintDiag;
use rustc_parse::parser::{Parser, Recovery}; use rustc_parse::parser::{ParseNtResult, Parser, Recovery};
use rustc_session::parse::ParseSess; use rustc_session::parse::ParseSess;
use rustc_session::Session; use rustc_session::Session;
use rustc_span::edition::Edition; use rustc_span::edition::Edition;
@ -479,7 +479,7 @@ pub fn compile_declarative_macro(
MatchedSeq(s) => s MatchedSeq(s) => s
.iter() .iter()
.map(|m| { .map(|m| {
if let MatchedTokenTree(tt) = m { if let MatchedSingle(ParseNtResult::Tt(tt)) = m {
let tt = mbe::quoted::parse( let tt = mbe::quoted::parse(
&TokenStream::new(vec![tt.clone()]), &TokenStream::new(vec![tt.clone()]),
true, true,
@ -505,7 +505,7 @@ pub fn compile_declarative_macro(
MatchedSeq(s) => s MatchedSeq(s) => s
.iter() .iter()
.map(|m| { .map(|m| {
if let MatchedTokenTree(tt) = m { if let MatchedSingle(ParseNtResult::Tt(tt)) = m {
return mbe::quoted::parse( return mbe::quoted::parse(
&TokenStream::new(vec![tt.clone()]), &TokenStream::new(vec![tt.clone()]),
false, false,

View File

@ -3,14 +3,14 @@ use crate::errors::{
CountRepetitionMisplaced, MetaVarExprUnrecognizedVar, MetaVarsDifSeqMatchers, MustRepeatOnce, CountRepetitionMisplaced, MetaVarExprUnrecognizedVar, MetaVarsDifSeqMatchers, MustRepeatOnce,
NoSyntaxVarsExprRepeat, VarStillRepeating, NoSyntaxVarsExprRepeat, VarStillRepeating,
}; };
use crate::mbe::macro_parser::{MatchedNonterminal, MatchedSeq, MatchedTokenTree, NamedMatch}; use crate::mbe::macro_parser::{NamedMatch, NamedMatch::*};
use crate::mbe::{self, KleeneOp, MetaVarExpr}; use crate::mbe::{self, KleeneOp, MetaVarExpr};
use rustc_ast::mut_visit::{self, MutVisitor}; use rustc_ast::mut_visit::{self, MutVisitor};
use rustc_ast::token::{self, Delimiter, Token, TokenKind}; use rustc_ast::token::{self, Delimiter, Token, TokenKind};
use rustc_ast::tokenstream::{DelimSpacing, DelimSpan, Spacing, TokenStream, TokenTree}; use rustc_ast::tokenstream::{DelimSpacing, DelimSpan, Spacing, TokenStream, TokenTree};
use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::fx::FxHashMap;
use rustc_errors::Diag; use rustc_errors::{pluralize, Diag, PResult};
use rustc_errors::{pluralize, PResult}; use rustc_parse::parser::ParseNtResult;
use rustc_span::hygiene::{LocalExpnId, Transparency}; use rustc_span::hygiene::{LocalExpnId, Transparency};
use rustc_span::symbol::{sym, Ident, MacroRulesNormalizedIdent}; use rustc_span::symbol::{sym, Ident, MacroRulesNormalizedIdent};
use rustc_span::{with_metavar_spans, Span, SyntaxContext}; use rustc_span::{with_metavar_spans, Span, SyntaxContext};
@ -250,26 +250,25 @@ pub(super) fn transcribe<'a>(
// the meta-var. // the meta-var.
let ident = MacroRulesNormalizedIdent::new(original_ident); let ident = MacroRulesNormalizedIdent::new(original_ident);
if let Some(cur_matched) = lookup_cur_matched(ident, interp, &repeats) { if let Some(cur_matched) = lookup_cur_matched(ident, interp, &repeats) {
match cur_matched { let tt = match cur_matched {
MatchedTokenTree(tt) => { MatchedSingle(ParseNtResult::Tt(tt)) => {
// `tt`s are emitted into the output stream directly as "raw tokens", // `tt`s are emitted into the output stream directly as "raw tokens",
// without wrapping them into groups. // without wrapping them into groups.
let tt = maybe_use_metavar_location(cx, &stack, sp, tt, &mut marker); maybe_use_metavar_location(cx, &stack, sp, tt, &mut marker)
result.push(tt);
} }
MatchedNonterminal(nt) => { MatchedSingle(ParseNtResult::Nt(nt)) => {
// Other variables are emitted into the output stream as groups with // Other variables are emitted into the output stream as groups with
// `Delimiter::Invisible` to maintain parsing priorities. // `Delimiter::Invisible` to maintain parsing priorities.
// `Interpolated` is currently used for such groups in rustc parser. // `Interpolated` is currently used for such groups in rustc parser.
marker.visit_span(&mut sp); marker.visit_span(&mut sp);
result TokenTree::token_alone(token::Interpolated(nt.clone()), sp)
.push(TokenTree::token_alone(token::Interpolated(nt.clone()), sp));
} }
MatchedSeq(..) => { MatchedSeq(..) => {
// We were unable to descend far enough. This is an error. // We were unable to descend far enough. This is an error.
return Err(cx.dcx().create_err(VarStillRepeating { span: sp, ident })); return Err(cx.dcx().create_err(VarStillRepeating { span: sp, ident }));
} }
} };
result.push(tt)
} else { } else {
// If we aren't able to match the meta-var, we push it back into the result but // If we aren't able to match the meta-var, we push it back into the result but
// with modified syntax context. (I believe this supports nested macros). // with modified syntax context. (I believe this supports nested macros).
@ -424,7 +423,7 @@ fn lookup_cur_matched<'a>(
interpolations.get(&ident).map(|mut matched| { interpolations.get(&ident).map(|mut matched| {
for &(idx, _) in repeats { for &(idx, _) in repeats {
match matched { match matched {
MatchedTokenTree(_) | MatchedNonterminal(_) => break, MatchedSingle(_) => break,
MatchedSeq(ads) => matched = ads.get(idx).unwrap(), MatchedSeq(ads) => matched = ads.get(idx).unwrap(),
} }
} }
@ -514,7 +513,7 @@ fn lockstep_iter_size(
let name = MacroRulesNormalizedIdent::new(*name); let name = MacroRulesNormalizedIdent::new(*name);
match lookup_cur_matched(name, interpolations, repeats) { match lookup_cur_matched(name, interpolations, repeats) {
Some(matched) => match matched { Some(matched) => match matched {
MatchedTokenTree(_) | MatchedNonterminal(_) => LockstepIterSize::Unconstrained, MatchedSingle(_) => LockstepIterSize::Unconstrained,
MatchedSeq(ads) => LockstepIterSize::Constraint(ads.len(), name), MatchedSeq(ads) => LockstepIterSize::Constraint(ads.len(), name),
}, },
_ => LockstepIterSize::Unconstrained, _ => LockstepIterSize::Unconstrained,
@ -557,7 +556,7 @@ fn count_repetitions<'a>(
// (or at the top-level of `matched` if no depth is given). // (or at the top-level of `matched` if no depth is given).
fn count<'a>(depth_curr: usize, depth_max: usize, matched: &NamedMatch) -> PResult<'a, usize> { fn count<'a>(depth_curr: usize, depth_max: usize, matched: &NamedMatch) -> PResult<'a, usize> {
match matched { match matched {
MatchedTokenTree(_) | MatchedNonterminal(_) => Ok(1), MatchedSingle(_) => Ok(1),
MatchedSeq(named_matches) => { MatchedSeq(named_matches) => {
if depth_curr == depth_max { if depth_curr == depth_max {
Ok(named_matches.len()) Ok(named_matches.len())
@ -571,7 +570,7 @@ fn count_repetitions<'a>(
/// Maximum depth /// Maximum depth
fn depth(counter: usize, matched: &NamedMatch) -> usize { fn depth(counter: usize, matched: &NamedMatch) -> usize {
match matched { match matched {
MatchedTokenTree(_) | MatchedNonterminal(_) => counter, MatchedSingle(_) => counter,
MatchedSeq(named_matches) => { MatchedSeq(named_matches) => {
let rslt = counter + 1; let rslt = counter + 1;
if let Some(elem) = named_matches.first() { depth(rslt, elem) } else { rslt } if let Some(elem) = named_matches.first() { depth(rslt, elem) } else { rslt }
@ -599,7 +598,7 @@ fn count_repetitions<'a>(
} }
} }
if let MatchedTokenTree(_) | MatchedNonterminal(_) = matched { if let MatchedSingle(_) = matched {
return Err(cx.dcx().create_err(CountRepetitionMisplaced { span: sp.entire() })); return Err(cx.dcx().create_err(CountRepetitionMisplaced { span: sp.entire() }));
} }

View File

@ -1015,7 +1015,7 @@ impl<'hir> Pat<'hir> {
use PatKind::*; use PatKind::*;
match self.kind { match self.kind {
Wild | Never | Lit(_) | Range(..) | Binding(.., None) | Path(_) | Err(_) => true, Wild | Never | Lit(_) | Range(..) | Binding(.., None) | Path(_) | Err(_) => true,
Box(s) | Ref(s, _) | Binding(.., Some(s)) => s.walk_short_(it), Box(s) | Deref(s) | Ref(s, _) | Binding(.., Some(s)) => s.walk_short_(it),
Struct(_, fields, _) => fields.iter().all(|field| field.pat.walk_short_(it)), Struct(_, fields, _) => fields.iter().all(|field| field.pat.walk_short_(it)),
TupleStruct(_, s, _) | Tuple(s, _) | Or(s) => s.iter().all(|p| p.walk_short_(it)), TupleStruct(_, s, _) | Tuple(s, _) | Or(s) => s.iter().all(|p| p.walk_short_(it)),
Slice(before, slice, after) => { Slice(before, slice, after) => {
@ -1042,7 +1042,7 @@ impl<'hir> Pat<'hir> {
use PatKind::*; use PatKind::*;
match self.kind { match self.kind {
Wild | Never | Lit(_) | Range(..) | Binding(.., None) | Path(_) | Err(_) => {} Wild | Never | Lit(_) | Range(..) | Binding(.., None) | Path(_) | Err(_) => {}
Box(s) | Ref(s, _) | Binding(.., Some(s)) => s.walk_(it), Box(s) | Deref(s) | Ref(s, _) | Binding(.., Some(s)) => s.walk_(it),
Struct(_, fields, _) => fields.iter().for_each(|field| field.pat.walk_(it)), Struct(_, fields, _) => fields.iter().for_each(|field| field.pat.walk_(it)),
TupleStruct(_, s, _) | Tuple(s, _) | Or(s) => s.iter().for_each(|p| p.walk_(it)), TupleStruct(_, s, _) | Tuple(s, _) | Or(s) => s.iter().for_each(|p| p.walk_(it)),
Slice(before, slice, after) => { Slice(before, slice, after) => {
@ -1185,6 +1185,9 @@ pub enum PatKind<'hir> {
/// A `box` pattern. /// A `box` pattern.
Box(&'hir Pat<'hir>), Box(&'hir Pat<'hir>),
/// A `deref` pattern (currently `deref!()` macro-based syntax).
Deref(&'hir Pat<'hir>),
/// A reference pattern (e.g., `&mut (a, b)`). /// A reference pattern (e.g., `&mut (a, b)`).
Ref(&'hir Pat<'hir>, Mutability), Ref(&'hir Pat<'hir>, Mutability),

View File

@ -660,7 +660,9 @@ pub fn walk_pat<'v, V: Visitor<'v>>(visitor: &mut V, pattern: &'v Pat<'v>) -> V:
PatKind::Tuple(tuple_elements, _) => { PatKind::Tuple(tuple_elements, _) => {
walk_list!(visitor, visit_pat, tuple_elements); walk_list!(visitor, visit_pat, tuple_elements);
} }
PatKind::Box(ref subpattern) | PatKind::Ref(ref subpattern, _) => { PatKind::Box(ref subpattern)
| PatKind::Deref(ref subpattern)
| PatKind::Ref(ref subpattern, _) => {
try_visit!(visitor.visit_pat(subpattern)); try_visit!(visitor.visit_pat(subpattern));
} }
PatKind::Binding(_, _hir_id, ident, ref optional_subpattern) => { PatKind::Binding(_, _hir_id, ident, ref optional_subpattern) => {

View File

@ -668,7 +668,7 @@ fn resolve_local<'tcx>(
| PatKind::TupleStruct(_, subpats, _) | PatKind::TupleStruct(_, subpats, _)
| PatKind::Tuple(subpats, _) => subpats.iter().any(|p| is_binding_pat(p)), | PatKind::Tuple(subpats, _) => subpats.iter().any(|p| is_binding_pat(p)),
PatKind::Box(subpat) => is_binding_pat(subpat), PatKind::Box(subpat) | PatKind::Deref(subpat) => is_binding_pat(subpat),
PatKind::Ref(_, _) PatKind::Ref(_, _)
| PatKind::Binding(hir::BindingAnnotation(hir::ByRef::No, _), ..) | PatKind::Binding(hir::BindingAnnotation(hir::ByRef::No, _), ..)

View File

@ -1808,6 +1808,12 @@ impl<'a> State<'a> {
self.pclose(); self.pclose();
} }
} }
PatKind::Deref(inner) => {
self.word("deref!");
self.popen();
self.print_pat(inner);
self.pclose();
}
PatKind::Ref(inner, mutbl) => { PatKind::Ref(inner, mutbl) => {
let is_range_inner = matches!(inner.kind, PatKind::Range(..)); let is_range_inner = matches!(inner.kind, PatKind::Range(..));
self.word("&"); self.word("&");

View File

@ -463,6 +463,7 @@ impl<'a, 'tcx> ExprUseVisitor<'a, 'tcx> {
} }
PatKind::Or(_) PatKind::Or(_)
| PatKind::Box(_) | PatKind::Box(_)
| PatKind::Deref(_)
| PatKind::Ref(..) | PatKind::Ref(..)
| PatKind::Wild | PatKind::Wild
| PatKind::Err(_) => { | PatKind::Err(_) => {

View File

@ -719,7 +719,7 @@ impl<'a, 'tcx> MemCategorizationContext<'a, 'tcx> {
self.cat_pattern_(place_with_id, subpat, op)?; self.cat_pattern_(place_with_id, subpat, op)?;
} }
PatKind::Box(subpat) | PatKind::Ref(subpat, _) => { PatKind::Box(subpat) | PatKind::Ref(subpat, _) | PatKind::Deref(subpat) => {
// box p1, &p1, &mut p1. we can ignore the mutability of // box p1, &p1, &mut p1. we can ignore the mutability of
// PatKind::Ref since that information is already contained // PatKind::Ref since that information is already contained
// in the type. // in the type.

View File

@ -210,10 +210,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
PatKind::Tuple(elements, ddpos) => { PatKind::Tuple(elements, ddpos) => {
self.check_pat_tuple(pat.span, elements, ddpos, expected, pat_info) self.check_pat_tuple(pat.span, elements, ddpos, expected, pat_info)
} }
PatKind::Box(inner) if self.tcx.features().deref_patterns => {
self.check_pat_deref(pat.span, inner, expected, pat_info)
}
PatKind::Box(inner) => self.check_pat_box(pat.span, inner, expected, pat_info), PatKind::Box(inner) => self.check_pat_box(pat.span, inner, expected, pat_info),
PatKind::Deref(inner) => self.check_pat_deref(pat.span, inner, expected, pat_info),
PatKind::Ref(inner, mutbl) => self.check_pat_ref(pat, inner, mutbl, expected, pat_info), PatKind::Ref(inner, mutbl) => self.check_pat_ref(pat, inner, mutbl, expected, pat_info),
PatKind::Slice(before, slice, after) => { PatKind::Slice(before, slice, after) => {
self.check_pat_slice(pat.span, before, slice, after, expected, pat_info) self.check_pat_slice(pat.span, before, slice, after, expected, pat_info)
@ -297,6 +295,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
| PatKind::TupleStruct(..) | PatKind::TupleStruct(..)
| PatKind::Tuple(..) | PatKind::Tuple(..)
| PatKind::Box(_) | PatKind::Box(_)
| PatKind::Deref(_)
| PatKind::Range(..) | PatKind::Range(..)
| PatKind::Slice(..) => AdjustMode::Peel, | PatKind::Slice(..) => AdjustMode::Peel,
// A never pattern behaves somewhat like a literal or unit variant. // A never pattern behaves somewhat like a literal or unit variant.
@ -762,6 +761,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
| PatKind::Binding(..) | PatKind::Binding(..)
| PatKind::Path(..) | PatKind::Path(..)
| PatKind::Box(..) | PatKind::Box(..)
| PatKind::Deref(_)
| PatKind::Ref(..) | PatKind::Ref(..)
| PatKind::Lit(..) | PatKind::Lit(..)
| PatKind::Range(..) | PatKind::Range(..)

View File

@ -148,7 +148,7 @@ pub fn check_dirty_clean_annotations(tcx: TyCtxt<'_>) {
let crate_items = tcx.hir_crate_items(()); let crate_items = tcx.hir_crate_items(());
for id in crate_items.items() { for id in crate_items.free_items() {
dirty_clean_visitor.check_item(id.owner_id.def_id); dirty_clean_visitor.check_item(id.owner_id.def_id);
} }

View File

@ -341,51 +341,22 @@ pub fn run_compiler<R: Send>(config: Config, f: impl FnOnce(&Compiler) -> R + Se
let sysroot = filesearch::materialize_sysroot(config.opts.maybe_sysroot.clone()); let sysroot = filesearch::materialize_sysroot(config.opts.maybe_sysroot.clone());
let (codegen_backend, target_override) = match config.make_codegen_backend { let target = config::build_target_config(&early_dcx, &config.opts, &sysroot);
None => {
// Build a target without override, so that it can override the backend if needed
let target =
config::build_target_config(&early_dcx, &config.opts, None, &sysroot);
let backend = util::get_codegen_backend( let codegen_backend = match config.make_codegen_backend {
&early_dcx, None => util::get_codegen_backend(
&sysroot, &early_dcx,
config.opts.unstable_opts.codegen_backend.as_deref(), &sysroot,
&target, config.opts.unstable_opts.codegen_backend.as_deref(),
); &target,
),
// target_override is documented to be called before init(), so this is okay
let target_override = backend.target_override(&config.opts);
// Assert that we don't use target's override of the backend and
// backend's override of the target at the same time
if config.opts.unstable_opts.codegen_backend.is_none()
&& target.default_codegen_backend.is_some()
&& target_override.is_some()
{
rustc_middle::bug!(
"Codegen backend requested target override even though the target requested the backend"
);
}
(backend, target_override)
}
Some(make_codegen_backend) => { Some(make_codegen_backend) => {
// N.B. `make_codegen_backend` takes precedence over `target.default_codegen_backend`, // N.B. `make_codegen_backend` takes precedence over
// which is ignored in this case. // `target.default_codegen_backend`, which is ignored in this case.
let backend = make_codegen_backend(&config.opts); make_codegen_backend(&config.opts)
// target_override is documented to be called before init(), so this is okay
let target_override = backend.target_override(&config.opts);
(backend, target_override)
} }
}; };
// Re-build target with the (potential) override
let target_cfg =
config::build_target_config(&early_dcx, &config.opts, target_override, &sysroot);
let temps_dir = config.opts.unstable_opts.temps_dir.as_deref().map(PathBuf::from); let temps_dir = config.opts.unstable_opts.temps_dir.as_deref().map(PathBuf::from);
let bundle = match rustc_errors::fluent_bundle( let bundle = match rustc_errors::fluent_bundle(
@ -418,7 +389,7 @@ pub fn run_compiler<R: Send>(config: Config, f: impl FnOnce(&Compiler) -> R + Se
locale_resources, locale_resources,
config.lint_caps, config.lint_caps,
config.file_loader, config.file_loader,
target_cfg, target,
sysroot, sysroot,
util::rustc_version_str().unwrap_or("unknown"), util::rustc_version_str().unwrap_or("unknown"),
config.ice_file, config.ice_file,

View File

@ -41,8 +41,7 @@ fn mk_session(matches: getopts::Matches) -> (Session, Cfg) {
let sysroot = filesearch::materialize_sysroot(sessopts.maybe_sysroot.clone()); let sysroot = filesearch::materialize_sysroot(sessopts.maybe_sysroot.clone());
let target_cfg = let target = rustc_session::config::build_target_config(&early_dcx, &sessopts, &sysroot);
rustc_session::config::build_target_config(&early_dcx, &sessopts, None, &sysroot);
let sess = build_session( let sess = build_session(
early_dcx, early_dcx,
@ -53,7 +52,7 @@ fn mk_session(matches: getopts::Matches) -> (Session, Cfg) {
vec![], vec![],
Default::default(), Default::default(),
None, None,
target_cfg, target,
sysroot, sysroot,
"", "",
None, None,

View File

@ -1183,7 +1183,7 @@ impl EarlyLintPass for UnusedParens {
self.check_unused_parens_pat(cx, &f.pat, false, false, keep_space); self.check_unused_parens_pat(cx, &f.pat, false, false, keep_space);
}, },
// Avoid linting on `i @ (p0 | .. | pn)` and `box (p0 | .. | pn)`, #64106. // Avoid linting on `i @ (p0 | .. | pn)` and `box (p0 | .. | pn)`, #64106.
Ident(.., Some(p)) | Box(p) => self.check_unused_parens_pat(cx, p, true, false, keep_space), Ident(.., Some(p)) | Box(p) | Deref(p) => self.check_unused_parens_pat(cx, p, true, false, keep_space),
// Avoid linting on `&(mut x)` as `&mut x` has a different meaning, #55342. // Avoid linting on `&(mut x)` as `&mut x` has a different meaning, #55342.
// Also avoid linting on `& mut? (p0 | .. | pn)`, #64106. // Also avoid linting on `& mut? (p0 | .. | pn)`, #64106.
Ref(p, m) => self.check_unused_parens_pat(cx, p, true, *m == Mutability::Not, keep_space), Ref(p, m) => self.check_unused_parens_pat(cx, p, true, *m == Mutability::Not, keep_space),

View File

@ -166,12 +166,12 @@ impl<'hir> Map<'hir> {
#[inline] #[inline]
pub fn items(self) -> impl Iterator<Item = ItemId> + 'hir { pub fn items(self) -> impl Iterator<Item = ItemId> + 'hir {
self.tcx.hir_crate_items(()).items.iter().copied() self.tcx.hir_crate_items(()).free_items.iter().copied()
} }
#[inline] #[inline]
pub fn module_items(self, module: LocalModDefId) -> impl Iterator<Item = ItemId> + 'hir { pub fn module_items(self, module: LocalModDefId) -> impl Iterator<Item = ItemId> + 'hir {
self.tcx.hir_module_items(module).items() self.tcx.hir_module_items(module).free_items()
} }
pub fn def_key(self, def_id: LocalDefId) -> DefKey { pub fn def_key(self, def_id: LocalDefId) -> DefKey {
@ -418,7 +418,7 @@ impl<'hir> Map<'hir> {
V: Visitor<'hir>, V: Visitor<'hir>,
{ {
let krate = self.tcx.hir_crate_items(()); let krate = self.tcx.hir_crate_items(());
walk_list!(visitor, visit_item, krate.items().map(|id| self.item(id))); walk_list!(visitor, visit_item, krate.free_items().map(|id| self.item(id)));
walk_list!(visitor, visit_trait_item, krate.trait_items().map(|id| self.trait_item(id))); walk_list!(visitor, visit_trait_item, krate.trait_items().map(|id| self.trait_item(id)));
walk_list!(visitor, visit_impl_item, krate.impl_items().map(|id| self.impl_item(id))); walk_list!(visitor, visit_impl_item, krate.impl_items().map(|id| self.impl_item(id)));
walk_list!( walk_list!(
@ -436,7 +436,7 @@ impl<'hir> Map<'hir> {
V: Visitor<'hir>, V: Visitor<'hir>,
{ {
let module = self.tcx.hir_module_items(module); let module = self.tcx.hir_module_items(module);
walk_list!(visitor, visit_item, module.items().map(|id| self.item(id))); walk_list!(visitor, visit_item, module.free_items().map(|id| self.item(id)));
walk_list!(visitor, visit_trait_item, module.trait_items().map(|id| self.trait_item(id))); walk_list!(visitor, visit_trait_item, module.trait_items().map(|id| self.trait_item(id)));
walk_list!(visitor, visit_impl_item, module.impl_items().map(|id| self.impl_item(id))); walk_list!(visitor, visit_impl_item, module.impl_items().map(|id| self.impl_item(id)));
walk_list!( walk_list!(
@ -1197,7 +1197,7 @@ pub(super) fn hir_module_items(tcx: TyCtxt<'_>, module_id: LocalModDefId) -> Mod
} = collector; } = collector;
return ModuleItems { return ModuleItems {
submodules: submodules.into_boxed_slice(), submodules: submodules.into_boxed_slice(),
items: items.into_boxed_slice(), free_items: items.into_boxed_slice(),
trait_items: trait_items.into_boxed_slice(), trait_items: trait_items.into_boxed_slice(),
impl_items: impl_items.into_boxed_slice(), impl_items: impl_items.into_boxed_slice(),
foreign_items: foreign_items.into_boxed_slice(), foreign_items: foreign_items.into_boxed_slice(),
@ -1226,7 +1226,7 @@ pub(crate) fn hir_crate_items(tcx: TyCtxt<'_>, _: ()) -> ModuleItems {
return ModuleItems { return ModuleItems {
submodules: submodules.into_boxed_slice(), submodules: submodules.into_boxed_slice(),
items: items.into_boxed_slice(), free_items: items.into_boxed_slice(),
trait_items: trait_items.into_boxed_slice(), trait_items: trait_items.into_boxed_slice(),
impl_items: impl_items.into_boxed_slice(), impl_items: impl_items.into_boxed_slice(),
foreign_items: foreign_items.into_boxed_slice(), foreign_items: foreign_items.into_boxed_slice(),

View File

@ -22,7 +22,7 @@ use rustc_span::{ErrorGuaranteed, ExpnId};
#[derive(Debug, HashStable, Encodable, Decodable)] #[derive(Debug, HashStable, Encodable, Decodable)]
pub struct ModuleItems { pub struct ModuleItems {
submodules: Box<[OwnerId]>, submodules: Box<[OwnerId]>,
items: Box<[ItemId]>, free_items: Box<[ItemId]>,
trait_items: Box<[TraitItemId]>, trait_items: Box<[TraitItemId]>,
impl_items: Box<[ImplItemId]>, impl_items: Box<[ImplItemId]>,
foreign_items: Box<[ForeignItemId]>, foreign_items: Box<[ForeignItemId]>,
@ -30,14 +30,22 @@ pub struct ModuleItems {
} }
impl ModuleItems { impl ModuleItems {
pub fn items(&self) -> impl Iterator<Item = ItemId> + '_ { /// Returns all non-associated locally defined items in all modules.
self.items.iter().copied() ///
/// Note that this does *not* include associated items of `impl` blocks! It also does not
/// include foreign items. If you want to e.g. get all functions, use `definitions()` below.
///
/// However, this does include the `impl` blocks themselves.
pub fn free_items(&self) -> impl Iterator<Item = ItemId> + '_ {
self.free_items.iter().copied()
} }
pub fn trait_items(&self) -> impl Iterator<Item = TraitItemId> + '_ { pub fn trait_items(&self) -> impl Iterator<Item = TraitItemId> + '_ {
self.trait_items.iter().copied() self.trait_items.iter().copied()
} }
/// Returns all items that are associated with some `impl` block (both inherent and trait impl
/// blocks).
pub fn impl_items(&self) -> impl Iterator<Item = ImplItemId> + '_ { pub fn impl_items(&self) -> impl Iterator<Item = ImplItemId> + '_ {
self.impl_items.iter().copied() self.impl_items.iter().copied()
} }
@ -47,7 +55,7 @@ impl ModuleItems {
} }
pub fn owners(&self) -> impl Iterator<Item = OwnerId> + '_ { pub fn owners(&self) -> impl Iterator<Item = OwnerId> + '_ {
self.items self.free_items
.iter() .iter()
.map(|id| id.owner_id) .map(|id| id.owner_id)
.chain(self.trait_items.iter().map(|id| id.owner_id)) .chain(self.trait_items.iter().map(|id| id.owner_id))
@ -63,7 +71,7 @@ impl ModuleItems {
&self, &self,
f: impl Fn(ItemId) -> Result<(), ErrorGuaranteed> + DynSend + DynSync, f: impl Fn(ItemId) -> Result<(), ErrorGuaranteed> + DynSend + DynSync,
) -> Result<(), ErrorGuaranteed> { ) -> Result<(), ErrorGuaranteed> {
try_par_for_each_in(&self.items[..], |&id| f(id)) try_par_for_each_in(&self.free_items[..], |&id| f(id))
} }
pub fn par_trait_items( pub fn par_trait_items(

View File

@ -11,12 +11,18 @@
/// [`span_bug`]: crate::span_bug /// [`span_bug`]: crate::span_bug
#[macro_export] #[macro_export]
macro_rules! bug { macro_rules! bug {
() => ( $crate::bug!("impossible case reached") ); () => (
($msg:expr) => ({ $crate::util::bug::bug_fmt(::std::format_args!($msg)) }); $crate::bug!("impossible case reached")
($msg:expr,) => ({ $crate::bug!($msg) }); );
($fmt:expr, $($arg:tt)+) => ({ ($msg:expr) => (
$crate::util::bug::bug_fmt(::std::format_args!($msg))
);
($msg:expr,) => (
$crate::bug!($msg)
);
($fmt:expr, $($arg:tt)+) => (
$crate::util::bug::bug_fmt(::std::format_args!($fmt, $($arg)+)) $crate::util::bug::bug_fmt(::std::format_args!($fmt, $($arg)+))
}); );
} }
/// A macro for triggering an ICE with a span. /// A macro for triggering an ICE with a span.
@ -30,11 +36,15 @@ macro_rules! bug {
/// [`DiagCtxt::span_delayed_bug`]: rustc_errors::DiagCtxt::span_delayed_bug /// [`DiagCtxt::span_delayed_bug`]: rustc_errors::DiagCtxt::span_delayed_bug
#[macro_export] #[macro_export]
macro_rules! span_bug { macro_rules! span_bug {
($span:expr, $msg:expr) => ({ $crate::util::bug::span_bug_fmt($span, ::std::format_args!($msg)) }); ($span:expr, $msg:expr) => (
($span:expr, $msg:expr,) => ({ $crate::span_bug!($span, $msg) }); $crate::util::bug::span_bug_fmt($span, ::std::format_args!($msg))
($span:expr, $fmt:expr, $($arg:tt)+) => ({ );
($span:expr, $msg:expr,) => (
$crate::span_bug!($span, $msg)
);
($span:expr, $fmt:expr, $($arg:tt)+) => (
$crate::util::bug::span_bug_fmt($span, ::std::format_args!($fmt, $($arg)+)) $crate::util::bug::span_bug_fmt($span, ::std::format_args!($fmt, $($arg)+))
}); );
} }
/////////////////////////////////////////////////////////////////////////// ///////////////////////////////////////////////////////////////////////////

View File

@ -1179,7 +1179,7 @@ impl<'tcx> fmt::Display for Pat<'tcx> {
write!(f, "{subpattern}") write!(f, "{subpattern}")
} }
PatKind::DerefPattern { ref subpattern } => { PatKind::DerefPattern { ref subpattern } => {
write!(f, "k#deref {subpattern}") write!(f, "deref!({subpattern})")
} }
PatKind::Constant { value } => write!(f, "{value}"), PatKind::Constant { value } => write!(f, "{value}"),
PatKind::InlineConstant { def: _, ref subpattern } => { PatKind::InlineConstant { def: _, ref subpattern } => {

View File

@ -257,7 +257,7 @@ impl<'a, 'tcx> PatCtxt<'a, 'tcx> {
return self.lower_path(qpath, pat.hir_id, pat.span); return self.lower_path(qpath, pat.hir_id, pat.span);
} }
hir::PatKind::Box(subpattern) if self.tcx.features().deref_patterns => { hir::PatKind::Deref(subpattern) => {
PatKind::DerefPattern { subpattern: self.lower_pattern(subpattern) } PatKind::DerefPattern { subpattern: self.lower_pattern(subpattern) }
} }
hir::PatKind::Ref(subpattern, _) | hir::PatKind::Box(subpattern) => { hir::PatKind::Ref(subpattern, _) | hir::PatKind::Box(subpattern) => {

View File

@ -1526,7 +1526,7 @@ fn collect_roots(tcx: TyCtxt<'_>, mode: MonoItemCollectionStrategy) -> Vec<MonoI
let crate_items = tcx.hir_crate_items(()); let crate_items = tcx.hir_crate_items(());
for id in crate_items.items() { for id in crate_items.free_items() {
collector.process_item(id); collector.process_item(id);
} }

View File

@ -390,8 +390,6 @@ parse_invalid_dyn_keyword = invalid `dyn` keyword
parse_invalid_expression_in_let_else = a `{$operator}` expression cannot be directly assigned in `let...else` parse_invalid_expression_in_let_else = a `{$operator}` expression cannot be directly assigned in `let...else`
parse_invalid_identifier_with_leading_number = identifiers cannot start with a number parse_invalid_identifier_with_leading_number = identifiers cannot start with a number
parse_invalid_interpolated_expression = invalid interpolated expression
parse_invalid_literal_suffix_on_tuple_index = suffixes on a tuple index are invalid parse_invalid_literal_suffix_on_tuple_index = suffixes on a tuple index are invalid
.label = invalid suffix `{$suffix}` .label = invalid suffix `{$suffix}`
.tuple_exception_line_1 = `{$suffix}` is *temporarily* accepted on tuple index fields as it was incorrectly accepted on stable for a few releases .tuple_exception_line_1 = `{$suffix}` is *temporarily* accepted on tuple index fields as it was incorrectly accepted on stable for a few releases

View File

@ -850,13 +850,6 @@ pub(crate) struct StructLiteralNotAllowedHereSugg {
pub right: Span, pub right: Span,
} }
#[derive(Diagnostic)]
#[diag(parse_invalid_interpolated_expression)]
pub(crate) struct InvalidInterpolatedExpression {
#[primary_span]
pub span: Span,
}
#[derive(Diagnostic)] #[derive(Diagnostic)]
#[diag(parse_invalid_literal_suffix_on_tuple_index)] #[diag(parse_invalid_literal_suffix_on_tuple_index)]
pub(crate) struct InvalidLiteralSuffixOnTupleIndex { pub(crate) struct InvalidLiteralSuffixOnTupleIndex {

View File

@ -3,11 +3,12 @@ use crate::errors::{
SuffixedLiteralInAttribute, SuffixedLiteralInAttribute,
}; };
use crate::fluent_generated as fluent; use crate::fluent_generated as fluent;
use crate::maybe_whole;
use super::{AttrWrapper, Capturing, FnParseMode, ForceCollect, Parser, PathStyle}; use super::{AttrWrapper, Capturing, FnParseMode, ForceCollect, Parser, PathStyle};
use rustc_ast as ast; use rustc_ast as ast;
use rustc_ast::attr; use rustc_ast::attr;
use rustc_ast::token::{self, Delimiter, Nonterminal}; use rustc_ast::token::{self, Delimiter};
use rustc_errors::{codes::*, Diag, PResult}; use rustc_errors::{codes::*, Diag, PResult};
use rustc_span::{sym, BytePos, Span}; use rustc_span::{sym, BytePos, Span};
use thin_vec::ThinVec; use thin_vec::ThinVec;
@ -251,25 +252,15 @@ impl<'a> Parser<'a> {
/// PATH `=` UNSUFFIXED_LIT /// PATH `=` UNSUFFIXED_LIT
/// The delimiters or `=` are still put into the resulting token stream. /// The delimiters or `=` are still put into the resulting token stream.
pub fn parse_attr_item(&mut self, capture_tokens: bool) -> PResult<'a, ast::AttrItem> { pub fn parse_attr_item(&mut self, capture_tokens: bool) -> PResult<'a, ast::AttrItem> {
let item = match &self.token.kind { maybe_whole!(self, NtMeta, |attr| attr.into_inner());
token::Interpolated(nt) => match &nt.0 {
Nonterminal::NtMeta(item) => Some(item.clone().into_inner()), let do_parse = |this: &mut Self| {
_ => None, let path = this.parse_path(PathStyle::Mod)?;
}, let args = this.parse_attr_args()?;
_ => None, Ok(ast::AttrItem { path, args, tokens: None })
}; };
Ok(if let Some(item) = item { // Attr items don't have attributes
self.bump(); if capture_tokens { self.collect_tokens_no_attrs(do_parse) } else { do_parse(self) }
item
} else {
let do_parse = |this: &mut Self| {
let path = this.parse_path(PathStyle::Mod)?;
let args = this.parse_attr_args()?;
Ok(ast::AttrItem { path, args, tokens: None })
};
// Attr items don't have attributes
if capture_tokens { self.collect_tokens_no_attrs(do_parse) } else { do_parse(self) }?
})
} }
/// Parses attributes that appear after the opening of an item. These should /// Parses attributes that appear after the opening of an item. These should
@ -371,22 +362,18 @@ impl<'a> Parser<'a> {
/// meta_item_inner : (meta_item | UNSUFFIXED_LIT) (',' meta_item_inner)? ; /// meta_item_inner : (meta_item | UNSUFFIXED_LIT) (',' meta_item_inner)? ;
/// ``` /// ```
pub fn parse_meta_item(&mut self) -> PResult<'a, ast::MetaItem> { pub fn parse_meta_item(&mut self) -> PResult<'a, ast::MetaItem> {
let nt_meta = match &self.token.kind { // We can't use `maybe_whole` here because it would bump in the `None`
token::Interpolated(nt) => match &nt.0 { // case, which we don't want.
token::NtMeta(e) => Some(e.clone()), if let token::Interpolated(nt) = &self.token.kind
_ => None, && let token::NtMeta(attr_item) = &nt.0
}, {
_ => None, match attr_item.meta(attr_item.path.span) {
};
if let Some(item) = nt_meta {
match item.meta(item.path.span) {
Some(meta) => { Some(meta) => {
self.bump(); self.bump();
return Ok(meta); return Ok(meta);
} }
None => self.unexpected()?, None => self.unexpected()?,
}; }
} }
let lo = self.token.span; let lo = self.token.span;

View File

@ -2053,16 +2053,6 @@ impl<'a> Parser<'a> {
&mut self, &mut self,
mk_lit_char: impl FnOnce(Symbol, Span) -> L, mk_lit_char: impl FnOnce(Symbol, Span) -> L,
) -> PResult<'a, L> { ) -> PResult<'a, L> {
if let token::Interpolated(nt) = &self.token.kind
&& let token::NtExpr(e) | token::NtLiteral(e) = &nt.0
&& matches!(e.kind, ExprKind::Err(_))
{
let mut err = self
.dcx()
.create_err(errors::InvalidInterpolatedExpression { span: self.token.span });
err.downgrade_to_delayed_bug();
return Err(err);
}
let token = self.token.clone(); let token = self.token.clone();
let err = |self_: &Self| { let err = |self_: &Self| {
let msg = format!("unexpected token: {}", super::token_descr(&token)); let msg = format!("unexpected token: {}", super::token_descr(&token));

View File

@ -6,6 +6,7 @@ use super::{
}; };
use crate::errors::{self, MacroExpandsToAdtField}; use crate::errors::{self, MacroExpandsToAdtField};
use crate::fluent_generated as fluent; use crate::fluent_generated as fluent;
use crate::maybe_whole;
use ast::token::IdentIsRaw; use ast::token::IdentIsRaw;
use rustc_ast::ast::*; use rustc_ast::ast::*;
use rustc_ast::ptr::P; use rustc_ast::ptr::P;
@ -115,17 +116,10 @@ impl<'a> Parser<'a> {
fn_parse_mode: FnParseMode, fn_parse_mode: FnParseMode,
force_collect: ForceCollect, force_collect: ForceCollect,
) -> PResult<'a, Option<Item>> { ) -> PResult<'a, Option<Item>> {
// Don't use `maybe_whole` so that we have precise control maybe_whole!(self, NtItem, |item| {
// over when we bump the parser
if let token::Interpolated(nt) = &self.token.kind
&& let token::NtItem(item) = &nt.0
{
let mut item = item.clone();
self.bump();
attrs.prepend_to_nt_inner(&mut item.attrs); attrs.prepend_to_nt_inner(&mut item.attrs);
return Ok(Some(item.into_inner())); Some(item.into_inner())
}; });
let item = let item =
self.collect_tokens_trailing_token(attrs, force_collect, |this: &mut Self, attrs| { self.collect_tokens_trailing_token(attrs, force_collect, |this: &mut Self, attrs| {

View File

@ -20,7 +20,7 @@ pub use pat::{CommaRecoveryMode, RecoverColon, RecoverComma};
pub use path::PathStyle; pub use path::PathStyle;
use rustc_ast::ptr::P; use rustc_ast::ptr::P;
use rustc_ast::token::{self, Delimiter, Nonterminal, Token, TokenKind}; use rustc_ast::token::{self, Delimiter, Token, TokenKind};
use rustc_ast::tokenstream::{AttributesData, DelimSpacing, DelimSpan, Spacing}; use rustc_ast::tokenstream::{AttributesData, DelimSpacing, DelimSpan, Spacing};
use rustc_ast::tokenstream::{TokenStream, TokenTree, TokenTreeCursor}; use rustc_ast::tokenstream::{TokenStream, TokenTree, TokenTreeCursor};
use rustc_ast::util::case::Case; use rustc_ast::util::case::Case;
@ -93,12 +93,13 @@ pub enum TrailingToken {
#[macro_export] #[macro_export]
macro_rules! maybe_whole { macro_rules! maybe_whole {
($p:expr, $constructor:ident, |$x:ident| $e:expr) => { ($p:expr, $constructor:ident, |$x:ident| $e:expr) => {
if let token::Interpolated(nt) = &$p.token.kind { if let token::Interpolated(nt) = &$p.token.kind
if let token::$constructor(x) = &nt.0 { && let token::$constructor(x) = &nt.0
let $x = x.clone(); {
$p.bump(); #[allow(unused_mut)]
return Ok($e); let mut $x = x.clone();
} $p.bump();
return Ok($e);
} }
}; };
} }
@ -1407,7 +1408,7 @@ impl<'a> Parser<'a> {
/// so emit a proper diagnostic. /// so emit a proper diagnostic.
// Public for rustfmt usage. // Public for rustfmt usage.
pub fn parse_visibility(&mut self, fbt: FollowedByType) -> PResult<'a, Visibility> { pub fn parse_visibility(&mut self, fbt: FollowedByType) -> PResult<'a, Visibility> {
maybe_whole!(self, NtVis, |x| x.into_inner()); maybe_whole!(self, NtVis, |vis| vis.into_inner());
if !self.eat_keyword(kw::Pub) { if !self.eat_keyword(kw::Pub) {
// We need a span for our `Spanned<VisibilityKind>`, but there's inherently no // We need a span for our `Spanned<VisibilityKind>`, but there's inherently no
@ -1584,8 +1585,21 @@ pub enum FlatToken {
Empty, Empty,
} }
#[derive(Debug)] // Metavar captures of various kinds.
pub enum ParseNtResult { #[derive(Clone, Debug)]
Nt(Nonterminal), pub enum ParseNtResult<NtType> {
Tt(TokenTree), Tt(TokenTree),
Nt(NtType),
}
impl<T> ParseNtResult<T> {
pub fn map_nt<F, U>(self, mut f: F) -> ParseNtResult<U>
where
F: FnMut(T) -> U,
{
match self {
ParseNtResult::Tt(tt) => ParseNtResult::Tt(tt),
ParseNtResult::Nt(nt) => ParseNtResult::Nt(f(nt)),
}
}
} }

View File

@ -1,5 +1,5 @@
use rustc_ast::ptr::P; use rustc_ast::ptr::P;
use rustc_ast::token::{self, Delimiter, Nonterminal::*, NonterminalKind, Token}; use rustc_ast::token::{self, Delimiter, Nonterminal, Nonterminal::*, NonterminalKind, Token};
use rustc_ast::HasTokens; use rustc_ast::HasTokens;
use rustc_ast_pretty::pprust; use rustc_ast_pretty::pprust;
use rustc_errors::PResult; use rustc_errors::PResult;
@ -66,15 +66,14 @@ impl<'a> Parser<'a> {
token::Interpolated(nt) => may_be_ident(&nt.0), token::Interpolated(nt) => may_be_ident(&nt.0),
_ => false, _ => false,
}, },
NonterminalKind::PatParam { .. } | NonterminalKind::PatWithOr => { NonterminalKind::PatParam { .. } | NonterminalKind::PatWithOr => match &token.kind {
match &token.kind {
token::Ident(..) | // box, ref, mut, and other identifiers (can stricten) token::Ident(..) | // box, ref, mut, and other identifiers (can stricten)
token::OpenDelim(Delimiter::Parenthesis) | // tuple pattern token::OpenDelim(Delimiter::Parenthesis) | // tuple pattern
token::OpenDelim(Delimiter::Bracket) | // slice pattern token::OpenDelim(Delimiter::Bracket) | // slice pattern
token::BinOp(token::And) | // reference token::BinOp(token::And) | // reference
token::BinOp(token::Minus) | // negative literal token::BinOp(token::Minus) | // negative literal
token::AndAnd | // double reference token::AndAnd | // double reference
token::Literal(_) | // literal token::Literal(_) | // literal
token::DotDot | // range pattern (future compat) token::DotDot | // range pattern (future compat)
token::DotDotDot | // range pattern (future compat) token::DotDotDot | // range pattern (future compat)
token::ModSep | // path token::ModSep | // path
@ -84,8 +83,7 @@ impl<'a> Parser<'a> {
token::BinOp(token::Or) => matches!(kind, NonterminalKind::PatWithOr), token::BinOp(token::Or) => matches!(kind, NonterminalKind::PatWithOr),
token::Interpolated(nt) => may_be_ident(&nt.0), token::Interpolated(nt) => may_be_ident(&nt.0),
_ => false, _ => false,
} },
}
NonterminalKind::Lifetime => match &token.kind { NonterminalKind::Lifetime => match &token.kind {
token::Lifetime(_) => true, token::Lifetime(_) => true,
token::Interpolated(nt) => { token::Interpolated(nt) => {
@ -102,7 +100,10 @@ impl<'a> Parser<'a> {
/// Parse a non-terminal (e.g. MBE `:pat` or `:ident`). Inlined because there is only one call /// Parse a non-terminal (e.g. MBE `:pat` or `:ident`). Inlined because there is only one call
/// site. /// site.
#[inline] #[inline]
pub fn parse_nonterminal(&mut self, kind: NonterminalKind) -> PResult<'a, ParseNtResult> { pub fn parse_nonterminal(
&mut self,
kind: NonterminalKind,
) -> PResult<'a, ParseNtResult<Nonterminal>> {
// A `macro_rules!` invocation may pass a captured item/expr to a proc-macro, // A `macro_rules!` invocation may pass a captured item/expr to a proc-macro,
// which requires having captured tokens available. Since we cannot determine // which requires having captured tokens available. Since we cannot determine
// in advance whether or not a proc-macro will be (transitively) invoked, // in advance whether or not a proc-macro will be (transitively) invoked,

View File

@ -435,7 +435,7 @@ impl<'a> Parser<'a> {
syntax_loc: Option<PatternLocation>, syntax_loc: Option<PatternLocation>,
) -> PResult<'a, P<Pat>> { ) -> PResult<'a, P<Pat>> {
maybe_recover_from_interpolated_ty_qpath!(self, true); maybe_recover_from_interpolated_ty_qpath!(self, true);
maybe_whole!(self, NtPat, |x| x); maybe_whole!(self, NtPat, |pat| pat);
let mut lo = self.token.span; let mut lo = self.token.span;
@ -498,11 +498,14 @@ impl<'a> Parser<'a> {
} else { } else {
PatKind::Lit(const_expr) PatKind::Lit(const_expr)
} }
} else if self.is_builtin() {
self.parse_pat_builtin()?
}
// Don't eagerly error on semantically invalid tokens when matching // Don't eagerly error on semantically invalid tokens when matching
// declarative macros, as the input to those doesn't have to be // declarative macros, as the input to those doesn't have to be
// semantically valid. For attribute/derive proc macros this is not the // semantically valid. For attribute/derive proc macros this is not the
// case, so doing the recovery for them is fine. // case, so doing the recovery for them is fine.
} else if self.can_be_ident_pat() else if self.can_be_ident_pat()
|| (self.is_lit_bad_ident().is_some() && self.may_recover()) || (self.is_lit_bad_ident().is_some() && self.may_recover())
{ {
// Parse `ident @ pat` // Parse `ident @ pat`
@ -1119,6 +1122,21 @@ impl<'a> Parser<'a> {
.contains(&self.token.kind) .contains(&self.token.kind)
} }
fn parse_pat_builtin(&mut self) -> PResult<'a, PatKind> {
self.parse_builtin(|self_, _lo, ident| {
Ok(match ident.name {
// builtin#deref(PAT)
sym::deref => Some(ast::PatKind::Deref(self_.parse_pat_allow_top_alt(
None,
RecoverComma::Yes,
RecoverColon::Yes,
CommaRecoveryMode::LikelyTuple,
)?)),
_ => None,
})
})
}
/// Parses `box pat` /// Parses `box pat`
fn parse_pat_box(&mut self) -> PResult<'a, PatKind> { fn parse_pat_box(&mut self) -> PResult<'a, PatKind> {
let box_span = self.prev_token.span; let box_span = self.prev_token.span;

View File

@ -40,8 +40,8 @@ impl<'a> Parser<'a> {
})) }))
} }
/// If `force_collect` is [`ForceCollect::Yes`], forces collection of tokens regardless of whether /// If `force_collect` is [`ForceCollect::Yes`], forces collection of tokens regardless of
/// or not we have attributes /// whether or not we have attributes.
// Public for `cfg_eval` macro expansion. // Public for `cfg_eval` macro expansion.
pub fn parse_stmt_without_recovery( pub fn parse_stmt_without_recovery(
&mut self, &mut self,
@ -51,18 +51,12 @@ impl<'a> Parser<'a> {
let attrs = self.parse_outer_attributes()?; let attrs = self.parse_outer_attributes()?;
let lo = self.token.span; let lo = self.token.span;
// Don't use `maybe_whole` so that we have precise control maybe_whole!(self, NtStmt, |stmt| {
// over when we bump the parser
if let token::Interpolated(nt) = &self.token.kind
&& let token::NtStmt(stmt) = &nt.0
{
let mut stmt = stmt.clone();
self.bump();
stmt.visit_attrs(|stmt_attrs| { stmt.visit_attrs(|stmt_attrs| {
attrs.prepend_to_nt_inner(stmt_attrs); attrs.prepend_to_nt_inner(stmt_attrs);
}); });
return Ok(Some(stmt.into_inner())); Some(stmt.into_inner())
} });
if self.token.is_keyword(kw::Mut) && self.is_keyword_ahead(1, &[kw::Let]) { if self.token.is_keyword(kw::Mut) && self.is_keyword_ahead(1, &[kw::Let]) {
self.bump(); self.bump();
@ -539,7 +533,7 @@ impl<'a> Parser<'a> {
blk_mode: BlockCheckMode, blk_mode: BlockCheckMode,
can_be_struct_literal: bool, can_be_struct_literal: bool,
) -> PResult<'a, (AttrVec, P<Block>)> { ) -> PResult<'a, (AttrVec, P<Block>)> {
maybe_whole!(self, NtBlock, |x| (AttrVec::new(), x)); maybe_whole!(self, NtBlock, |block| (AttrVec::new(), block));
let maybe_ident = self.prev_token.clone(); let maybe_ident = self.prev_token.clone();
self.maybe_recover_unexpected_block_label(); self.maybe_recover_unexpected_block_label();
@ -643,7 +637,7 @@ impl<'a> Parser<'a> {
recover: AttemptLocalParseRecovery, recover: AttemptLocalParseRecovery,
) -> PResult<'a, Option<Stmt>> { ) -> PResult<'a, Option<Stmt>> {
// Skip looking for a trailing semicolon when we have an interpolated statement. // Skip looking for a trailing semicolon when we have an interpolated statement.
maybe_whole!(self, NtStmt, |x| Some(x.into_inner())); maybe_whole!(self, NtStmt, |stmt| Some(stmt.into_inner()));
let Some(mut stmt) = self.parse_stmt_without_recovery(true, ForceCollect::No)? else { let Some(mut stmt) = self.parse_stmt_without_recovery(true, ForceCollect::No)? else {
return Ok(None); return Ok(None);

View File

@ -250,7 +250,7 @@ impl<'a> Parser<'a> {
) -> PResult<'a, P<Ty>> { ) -> PResult<'a, P<Ty>> {
let allow_qpath_recovery = recover_qpath == RecoverQPath::Yes; let allow_qpath_recovery = recover_qpath == RecoverQPath::Yes;
maybe_recover_from_interpolated_ty_qpath!(self, allow_qpath_recovery); maybe_recover_from_interpolated_ty_qpath!(self, allow_qpath_recovery);
maybe_whole!(self, NtTy, |x| x); maybe_whole!(self, NtTy, |ty| ty);
let lo = self.token.span; let lo = self.token.span;
let mut impl_dyn_multi = false; let mut impl_dyn_multi = false;

View File

@ -831,7 +831,7 @@ fn create_and_seed_worklist(
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let crate_items = tcx.hir_crate_items(()); let crate_items = tcx.hir_crate_items(());
for id in crate_items.items() { for id in crate_items.free_items() {
check_item(tcx, &mut worklist, &mut struct_constructors, &mut unsolved_impl_item, id); check_item(tcx, &mut worklist, &mut struct_constructors, &mut unsolved_impl_item, id);
} }
@ -1084,7 +1084,7 @@ fn check_mod_deathness(tcx: TyCtxt<'_>, module: LocalModDefId) {
let module_items = tcx.hir_module_items(module); let module_items = tcx.hir_module_items(module);
for item in module_items.items() { for item in module_items.free_items() {
let def_kind = tcx.def_kind(item.owner_id); let def_kind = tcx.def_kind(item.owner_id);
let mut dead_codes = Vec::new(); let mut dead_codes = Vec::new();

View File

@ -300,6 +300,7 @@ impl<'v> hir_visit::Visitor<'v> for StatCollector<'v> {
Path, Path,
Tuple, Tuple,
Box, Box,
Deref,
Ref, Ref,
Lit, Lit,
Range, Range,
@ -566,6 +567,7 @@ impl<'v> ast_visit::Visitor<'v> for StatCollector<'v> {
Path, Path,
Tuple, Tuple,
Box, Box,
Deref,
Ref, Ref,
Lit, Lit,
Range, Range,

View File

@ -437,7 +437,7 @@ fn reachable_set(tcx: TyCtxt<'_>, (): ()) -> LocalDefIdSet {
// trait is a lang item. // trait is a lang item.
let crate_items = tcx.hir_crate_items(()); let crate_items = tcx.hir_crate_items(());
for id in crate_items.items() { for id in crate_items.free_items() {
check_item(tcx, id, &mut reachable_context.worklist, effective_visibilities); check_item(tcx, id, &mut reachable_context.worklist, effective_visibilities);
} }

View File

@ -22,6 +22,10 @@ smallvec = { version = "1.8.1", features = ["union"] }
tracing = "0.1" tracing = "0.1"
# tidy-alphabetical-end # tidy-alphabetical-end
[dev-dependencies]
tracing-subscriber = { version = "0.3.3", default-features = false, features = ["fmt", "env-filter", "ansi"] }
tracing-tree = "0.2.0"
[features] [features]
default = ["rustc"] default = ["rustc"]
rustc = [ rustc = [

View File

@ -819,6 +819,81 @@ impl<Cx: PatCx> Constructor<Cx> {
} }
}) })
} }
pub(crate) fn fmt_fields(
&self,
f: &mut fmt::Formatter<'_>,
ty: &Cx::Ty,
mut fields: impl Iterator<Item = impl fmt::Debug>,
) -> fmt::Result {
let mut first = true;
let mut start_or_continue = |s| {
if first {
first = false;
""
} else {
s
}
};
let mut start_or_comma = || start_or_continue(", ");
match self {
Struct | Variant(_) | UnionField => {
Cx::write_variant_name(f, self, ty)?;
// Without `cx`, we can't know which field corresponds to which, so we can't
// get the names of the fields. Instead we just display everything as a tuple
// struct, which should be good enough.
write!(f, "(")?;
for p in fields {
write!(f, "{}{:?}", start_or_comma(), p)?;
}
write!(f, ")")?;
}
// Note: given the expansion of `&str` patterns done in `expand_pattern`, we should
// be careful to detect strings here. However a string literal pattern will never
// be reported as a non-exhaustiveness witness, so we can ignore this issue.
Ref => {
write!(f, "&{:?}", &fields.next().unwrap())?;
}
Slice(slice) => {
write!(f, "[")?;
match slice.kind {
SliceKind::FixedLen(_) => {
for p in fields {
write!(f, "{}{:?}", start_or_comma(), p)?;
}
}
SliceKind::VarLen(prefix_len, _) => {
for p in fields.by_ref().take(prefix_len) {
write!(f, "{}{:?}", start_or_comma(), p)?;
}
write!(f, "{}..", start_or_comma())?;
for p in fields {
write!(f, "{}{:?}", start_or_comma(), p)?;
}
}
}
write!(f, "]")?;
}
Bool(b) => write!(f, "{b}")?,
// Best-effort, will render signed ranges incorrectly
IntRange(range) => write!(f, "{range:?}")?,
F32Range(lo, hi, end) => write!(f, "{lo}{end}{hi}")?,
F64Range(lo, hi, end) => write!(f, "{lo}{end}{hi}")?,
Str(value) => write!(f, "{value:?}")?,
Opaque(..) => write!(f, "<constant pattern>")?,
Or => {
for pat in fields {
write!(f, "{}{:?}", start_or_continue(" | "), pat)?;
}
}
Never => write!(f, "!")?,
Wildcard | Missing | NonExhaustive | Hidden | PrivateUninhabited => {
write!(f, "_ : {:?}", ty)?
}
}
Ok(())
}
} }
#[derive(Debug, Clone, Copy)] #[derive(Debug, Clone, Copy)]

View File

@ -49,6 +49,12 @@ pub mod index {
} }
} }
impl<V> FromIterator<V> for IdxContainer<usize, V> {
fn from_iter<T: IntoIterator<Item = V>>(iter: T) -> Self {
Self(iter.into_iter().enumerate().collect())
}
}
#[derive(Debug)] #[derive(Debug)]
pub struct IdxSet<T>(pub rustc_hash::FxHashSet<T>); pub struct IdxSet<T>(pub rustc_hash::FxHashSet<T>);
impl<T: Idx> IdxSet<T> { impl<T: Idx> IdxSet<T> {
@ -120,7 +126,8 @@ pub trait PatCx: Sized + fmt::Debug {
/// `DeconstructedPat`. Only invoqued when `pat.ctor()` is `Struct | Variant(_) | UnionField`. /// `DeconstructedPat`. Only invoqued when `pat.ctor()` is `Struct | Variant(_) | UnionField`.
fn write_variant_name( fn write_variant_name(
f: &mut fmt::Formatter<'_>, f: &mut fmt::Formatter<'_>,
pat: &crate::pat::DeconstructedPat<Self>, ctor: &crate::constructor::Constructor<Self>,
ty: &Self::Ty,
) -> fmt::Result; ) -> fmt::Result;
/// Raise a bug. /// Raise a bug.

View File

@ -138,81 +138,11 @@ impl<Cx: PatCx> DeconstructedPat<Cx> {
/// This is best effort and not good enough for a `Display` impl. /// This is best effort and not good enough for a `Display` impl.
impl<Cx: PatCx> fmt::Debug for DeconstructedPat<Cx> { impl<Cx: PatCx> fmt::Debug for DeconstructedPat<Cx> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let pat = self;
let mut first = true;
let mut start_or_continue = |s| {
if first {
first = false;
""
} else {
s
}
};
let mut start_or_comma = || start_or_continue(", ");
let mut fields: Vec<_> = (0..self.arity).map(|_| PatOrWild::Wild).collect(); let mut fields: Vec<_> = (0..self.arity).map(|_| PatOrWild::Wild).collect();
for ipat in self.iter_fields() { for ipat in self.iter_fields() {
fields[ipat.idx] = PatOrWild::Pat(&ipat.pat); fields[ipat.idx] = PatOrWild::Pat(&ipat.pat);
} }
self.ctor().fmt_fields(f, self.ty(), fields.into_iter())
match pat.ctor() {
Struct | Variant(_) | UnionField => {
Cx::write_variant_name(f, pat)?;
// Without `cx`, we can't know which field corresponds to which, so we can't
// get the names of the fields. Instead we just display everything as a tuple
// struct, which should be good enough.
write!(f, "(")?;
for p in fields {
write!(f, "{}", start_or_comma())?;
write!(f, "{p:?}")?;
}
write!(f, ")")
}
// Note: given the expansion of `&str` patterns done in `expand_pattern`, we should
// be careful to detect strings here. However a string literal pattern will never
// be reported as a non-exhaustiveness witness, so we can ignore this issue.
Ref => {
write!(f, "&{:?}", &fields[0])
}
Slice(slice) => {
write!(f, "[")?;
match slice.kind {
SliceKind::FixedLen(_) => {
for p in fields {
write!(f, "{}{:?}", start_or_comma(), p)?;
}
}
SliceKind::VarLen(prefix_len, _) => {
for p in &fields[..prefix_len] {
write!(f, "{}{:?}", start_or_comma(), p)?;
}
write!(f, "{}", start_or_comma())?;
write!(f, "..")?;
for p in &fields[prefix_len..] {
write!(f, "{}{:?}", start_or_comma(), p)?;
}
}
}
write!(f, "]")
}
Bool(b) => write!(f, "{b}"),
// Best-effort, will render signed ranges incorrectly
IntRange(range) => write!(f, "{range:?}"),
F32Range(lo, hi, end) => write!(f, "{lo}{end}{hi}"),
F64Range(lo, hi, end) => write!(f, "{lo}{end}{hi}"),
Str(value) => write!(f, "{value:?}"),
Opaque(..) => write!(f, "<constant pattern>"),
Or => {
for pat in fields {
write!(f, "{}{:?}", start_or_continue(" | "), pat)?;
}
Ok(())
}
Never => write!(f, "!"),
Wildcard | Missing | NonExhaustive | Hidden | PrivateUninhabited => {
write!(f, "_ : {:?}", pat.ty())
}
}
} }
} }
@ -295,7 +225,6 @@ impl<'p, Cx: PatCx> fmt::Debug for PatOrWild<'p, Cx> {
/// Same idea as `DeconstructedPat`, except this is a fictitious pattern built up for diagnostics /// Same idea as `DeconstructedPat`, except this is a fictitious pattern built up for diagnostics
/// purposes. As such they don't use interning and can be cloned. /// purposes. As such they don't use interning and can be cloned.
#[derive(Debug)]
pub struct WitnessPat<Cx: PatCx> { pub struct WitnessPat<Cx: PatCx> {
ctor: Constructor<Cx>, ctor: Constructor<Cx>,
pub(crate) fields: Vec<WitnessPat<Cx>>, pub(crate) fields: Vec<WitnessPat<Cx>>,
@ -353,3 +282,10 @@ impl<Cx: PatCx> WitnessPat<Cx> {
self.fields.iter() self.fields.iter()
} }
} }
/// This is best effort and not good enough for a `Display` impl.
impl<Cx: PatCx> fmt::Debug for WitnessPat<Cx> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.ctor().fmt_fields(f, self.ty(), self.fields.iter())
}
}

View File

@ -880,13 +880,14 @@ impl<'p, 'tcx: 'p> PatCx for RustcPatCtxt<'p, 'tcx> {
fn write_variant_name( fn write_variant_name(
f: &mut fmt::Formatter<'_>, f: &mut fmt::Formatter<'_>,
pat: &crate::pat::DeconstructedPat<Self>, ctor: &crate::constructor::Constructor<Self>,
ty: &Self::Ty,
) -> fmt::Result { ) -> fmt::Result {
if let ty::Adt(adt, _) = pat.ty().kind() { if let ty::Adt(adt, _) = ty.kind() {
if adt.is_box() { if adt.is_box() {
write!(f, "Box")? write!(f, "Box")?
} else { } else {
let variant = adt.variant(Self::variant_index_for_adt(pat.ctor(), *adt)); let variant = adt.variant(Self::variant_index_for_adt(ctor, *adt));
write!(f, "{}", variant.name)?; write!(f, "{}", variant.name)?;
} }
} }

View File

@ -1042,7 +1042,7 @@ struct MatrixRow<'p, Cx: PatCx> {
is_under_guard: bool, is_under_guard: bool,
/// When we specialize, we remember which row of the original matrix produced a given row of the /// When we specialize, we remember which row of the original matrix produced a given row of the
/// specialized matrix. When we unspecialize, we use this to propagate usefulness back up the /// specialized matrix. When we unspecialize, we use this to propagate usefulness back up the
/// callstack. /// callstack. On creation, this stores the index of the original match arm.
parent_row: usize, parent_row: usize,
/// False when the matrix is just built. This is set to `true` by /// False when the matrix is just built. This is set to `true` by
/// [`compute_exhaustiveness_and_usefulness`] if the arm is found to be useful. /// [`compute_exhaustiveness_and_usefulness`] if the arm is found to be useful.
@ -1163,10 +1163,10 @@ impl<'p, Cx: PatCx> Matrix<'p, Cx> {
place_info: smallvec![place_info], place_info: smallvec![place_info],
wildcard_row_is_relevant: true, wildcard_row_is_relevant: true,
}; };
for (row_id, arm) in arms.iter().enumerate() { for (arm_id, arm) in arms.iter().enumerate() {
let v = MatrixRow { let v = MatrixRow {
pats: PatStack::from_pattern(arm.pat), pats: PatStack::from_pattern(arm.pat),
parent_row: row_id, // dummy, we don't read it parent_row: arm_id,
is_under_guard: arm.has_guard, is_under_guard: arm.has_guard,
useful: false, useful: false,
intersects: BitSet::new_empty(0), // Initialized in `Matrix::expand_and_push`. intersects: BitSet::new_empty(0), // Initialized in `Matrix::expand_and_push`.
@ -1738,6 +1738,9 @@ pub struct UsefulnessReport<'p, Cx: PatCx> {
/// If the match is exhaustive, this is empty. If not, this contains witnesses for the lack of /// If the match is exhaustive, this is empty. If not, this contains witnesses for the lack of
/// exhaustiveness. /// exhaustiveness.
pub non_exhaustiveness_witnesses: Vec<WitnessPat<Cx>>, pub non_exhaustiveness_witnesses: Vec<WitnessPat<Cx>>,
/// For each arm, a set of indices of arms above it that have non-empty intersection, i.e. there
/// is a value matched by both arms. This may miss real intersections.
pub arm_intersections: Vec<BitSet<usize>>,
} }
/// Computes whether a match is exhaustive and which of its arms are useful. /// Computes whether a match is exhaustive and which of its arms are useful.
@ -1769,5 +1772,19 @@ pub fn compute_match_usefulness<'p, Cx: PatCx>(
}) })
.collect(); .collect();
Ok(UsefulnessReport { arm_usefulness, non_exhaustiveness_witnesses }) let mut arm_intersections: Vec<_> =
arms.iter().enumerate().map(|(i, _)| BitSet::new_empty(i)).collect();
for row in matrix.rows() {
let arm_id = row.parent_row;
for intersection in row.intersects.iter() {
// Convert the matrix row ids into arm ids (they can differ because we expand or-patterns).
let arm_intersection = matrix.rows[intersection].parent_row;
// Note: self-intersection can happen with or-patterns.
if arm_intersection != arm_id {
arm_intersections[arm_id].insert(arm_intersection);
}
}
}
Ok(UsefulnessReport { arm_usefulness, non_exhaustiveness_witnesses, arm_intersections })
} }

View File

@ -0,0 +1,315 @@
use rustc_pattern_analysis::{
constructor::{
Constructor, ConstructorSet, IntRange, MaybeInfiniteInt, RangeEnd, VariantVisibility,
},
usefulness::{PlaceValidity, UsefulnessReport},
Captures, MatchArm, PatCx, PrivateUninhabitedField,
};
/// Sets up `tracing` for easier debugging. Tries to look like the `rustc` setup.
pub fn init_tracing() {
use tracing_subscriber::layer::SubscriberExt;
use tracing_subscriber::util::SubscriberInitExt;
use tracing_subscriber::Layer;
let _ = tracing_tree::HierarchicalLayer::default()
.with_writer(std::io::stderr)
.with_indent_lines(true)
.with_ansi(true)
.with_targets(true)
.with_indent_amount(2)
.with_subscriber(
tracing_subscriber::Registry::default()
.with(tracing_subscriber::EnvFilter::from_default_env()),
)
.try_init();
}
/// A simple set of types.
#[allow(dead_code)]
#[derive(Debug, Copy, Clone)]
pub enum Ty {
/// Booleans
Bool,
/// 8-bit unsigned integers
U8,
/// Tuples.
Tuple(&'static [Ty]),
/// A struct with `arity` fields of type `ty`.
BigStruct { arity: usize, ty: &'static Ty },
/// A enum with `arity` variants of type `ty`.
BigEnum { arity: usize, ty: &'static Ty },
}
/// The important logic.
impl Ty {
pub fn sub_tys(&self, ctor: &Constructor<Cx>) -> Vec<Self> {
use Constructor::*;
match (ctor, *self) {
(Struct, Ty::Tuple(tys)) => tys.iter().copied().collect(),
(Struct, Ty::BigStruct { arity, ty }) => (0..arity).map(|_| *ty).collect(),
(Variant(_), Ty::BigEnum { ty, .. }) => vec![*ty],
(Bool(..) | IntRange(..) | NonExhaustive | Missing | Wildcard, _) => vec![],
_ => panic!("Unexpected ctor {ctor:?} for type {self:?}"),
}
}
pub fn ctor_set(&self) -> ConstructorSet<Cx> {
match *self {
Ty::Bool => ConstructorSet::Bool,
Ty::U8 => ConstructorSet::Integers {
range_1: IntRange::from_range(
MaybeInfiniteInt::new_finite_uint(0),
MaybeInfiniteInt::new_finite_uint(255),
RangeEnd::Included,
),
range_2: None,
},
Ty::Tuple(..) | Ty::BigStruct { .. } => ConstructorSet::Struct { empty: false },
Ty::BigEnum { arity, .. } => ConstructorSet::Variants {
variants: (0..arity).map(|_| VariantVisibility::Visible).collect(),
non_exhaustive: false,
},
}
}
pub fn write_variant_name(
&self,
f: &mut std::fmt::Formatter<'_>,
ctor: &Constructor<Cx>,
) -> std::fmt::Result {
match (*self, ctor) {
(Ty::Tuple(..), _) => Ok(()),
(Ty::BigStruct { .. }, _) => write!(f, "BigStruct"),
(Ty::BigEnum { .. }, Constructor::Variant(i)) => write!(f, "BigEnum::Variant{i}"),
_ => write!(f, "{:?}::{:?}", self, ctor),
}
}
}
/// Compute usefulness in our simple context (and set up tracing for easier debugging).
pub fn compute_match_usefulness<'p>(
arms: &[MatchArm<'p, Cx>],
ty: Ty,
scrut_validity: PlaceValidity,
complexity_limit: Option<usize>,
) -> Result<UsefulnessReport<'p, Cx>, ()> {
init_tracing();
rustc_pattern_analysis::usefulness::compute_match_usefulness(
&Cx,
arms,
ty,
scrut_validity,
complexity_limit,
)
}
#[derive(Debug)]
pub struct Cx;
/// The context for pattern analysis. Forwards anything interesting to `Ty` methods.
impl PatCx for Cx {
type Ty = Ty;
type Error = ();
type VariantIdx = usize;
type StrLit = ();
type ArmData = ();
type PatData = ();
fn is_exhaustive_patterns_feature_on(&self) -> bool {
false
}
fn is_min_exhaustive_patterns_feature_on(&self) -> bool {
false
}
fn ctor_arity(&self, ctor: &Constructor<Self>, ty: &Self::Ty) -> usize {
ty.sub_tys(ctor).len()
}
fn ctor_sub_tys<'a>(
&'a self,
ctor: &'a Constructor<Self>,
ty: &'a Self::Ty,
) -> impl Iterator<Item = (Self::Ty, PrivateUninhabitedField)> + ExactSizeIterator + Captures<'a>
{
ty.sub_tys(ctor).into_iter().map(|ty| (ty, PrivateUninhabitedField(false)))
}
fn ctors_for_ty(&self, ty: &Self::Ty) -> Result<ConstructorSet<Self>, Self::Error> {
Ok(ty.ctor_set())
}
fn write_variant_name(
f: &mut std::fmt::Formatter<'_>,
ctor: &Constructor<Self>,
ty: &Self::Ty,
) -> std::fmt::Result {
ty.write_variant_name(f, ctor)
}
fn bug(&self, fmt: std::fmt::Arguments<'_>) -> Self::Error {
panic!("{}", fmt)
}
/// Abort when reaching the complexity limit. This is what we'll check in tests.
fn complexity_exceeded(&self) -> Result<(), Self::Error> {
Err(())
}
}
/// Construct a single pattern; see `pats!()`.
#[allow(unused_macros)]
macro_rules! pat {
($($rest:tt)*) => {{
let mut vec = pats!($($rest)*);
vec.pop().unwrap()
}};
}
/// A macro to construct patterns. Called like `pats!(type_expr; pattern, pattern, ..)` and returns
/// a `Vec<DeconstructedPat>`. A pattern can be nested and looks like `Constructor(pat, pat)` or
/// `Constructor { .i: pat, .j: pat }`, where `Constructor` is `Struct`, `Variant.i` (with index
/// `i`), as well as booleans and integer ranges.
///
/// The general structure of the macro is a tt-muncher with several stages identified with
/// `@something(args)`. The args are a key-value list (the keys ensure we don't mix the arguments
/// around) which is passed down and modified as needed. We then parse token-trees from
/// left-to-right. Non-trivial recursion happens when we parse the arguments to a pattern: we
/// recurse to parse the tokens inside `{..}`/`(..)`, and then we continue parsing anything that
/// follows.
macro_rules! pats {
// Entrypoint
// Parse `type; ..`
($ty:expr; $($rest:tt)*) => {{
#[allow(unused_imports)]
use rustc_pattern_analysis::{
constructor::{Constructor, IntRange, MaybeInfiniteInt, RangeEnd},
pat::DeconstructedPat,
};
let ty = $ty;
// The heart of the macro is designed to push `IndexedPat`s into a `Vec`, so we work around
// that.
let sub_tys = ::std::iter::repeat(&ty);
let mut vec = Vec::new();
pats!(@ctor(vec:vec, sub_tys:sub_tys, idx:0) $($rest)*);
vec.into_iter().map(|ipat| ipat.pat).collect::<Vec<_>>()
}};
// Parse `constructor ..`
(@ctor($($args:tt)*) true $($rest:tt)*) => {{
let ctor = Constructor::Bool(true);
pats!(@pat($($args)*, ctor:ctor) $($rest)*)
}};
(@ctor($($args:tt)*) false $($rest:tt)*) => {{
let ctor = Constructor::Bool(false);
pats!(@pat($($args)*, ctor:ctor) $($rest)*)
}};
(@ctor($($args:tt)*) Struct $($rest:tt)*) => {{
let ctor = Constructor::Struct;
pats!(@pat($($args)*, ctor:ctor) $($rest)*)
}};
(@ctor($($args:tt)*) ( $($fields:tt)* ) $($rest:tt)*) => {{
let ctor = Constructor::Struct; // tuples
pats!(@pat($($args)*, ctor:ctor) ( $($fields)* ) $($rest)*)
}};
(@ctor($($args:tt)*) Variant.$variant:ident $($rest:tt)*) => {{
let ctor = Constructor::Variant($variant);
pats!(@pat($($args)*, ctor:ctor) $($rest)*)
}};
(@ctor($($args:tt)*) Variant.$variant:literal $($rest:tt)*) => {{
let ctor = Constructor::Variant($variant);
pats!(@pat($($args)*, ctor:ctor) $($rest)*)
}};
(@ctor($($args:tt)*) _ $($rest:tt)*) => {{
let ctor = Constructor::Wildcard;
pats!(@pat($($args)*, ctor:ctor) $($rest)*)
}};
// Integers and int ranges
(@ctor($($args:tt)*) $($start:literal)?..$end:literal $($rest:tt)*) => {{
let ctor = Constructor::IntRange(IntRange::from_range(
pats!(@rangeboundary- $($start)?),
pats!(@rangeboundary+ $end),
RangeEnd::Excluded,
));
pats!(@pat($($args)*, ctor:ctor) $($rest)*)
}};
(@ctor($($args:tt)*) $($start:literal)?.. $($rest:tt)*) => {{
let ctor = Constructor::IntRange(IntRange::from_range(
pats!(@rangeboundary- $($start)?),
pats!(@rangeboundary+),
RangeEnd::Excluded,
));
pats!(@pat($($args)*, ctor:ctor) $($rest)*)
}};
(@ctor($($args:tt)*) $($start:literal)?..=$end:literal $($rest:tt)*) => {{
let ctor = Constructor::IntRange(IntRange::from_range(
pats!(@rangeboundary- $($start)?),
pats!(@rangeboundary+ $end),
RangeEnd::Included,
));
pats!(@pat($($args)*, ctor:ctor) $($rest)*)
}};
(@ctor($($args:tt)*) $int:literal $($rest:tt)*) => {{
let ctor = Constructor::IntRange(IntRange::from_range(
pats!(@rangeboundary- $int),
pats!(@rangeboundary+ $int),
RangeEnd::Included,
));
pats!(@pat($($args)*, ctor:ctor) $($rest)*)
}};
// Utility to manage range boundaries.
(@rangeboundary $sign:tt $int:literal) => { MaybeInfiniteInt::new_finite_uint($int) };
(@rangeboundary -) => { MaybeInfiniteInt::NegInfinity };
(@rangeboundary +) => { MaybeInfiniteInt::PosInfinity };
// Parse subfields: `(..)` or `{..}`
// Constructor with no fields, e.g. `bool` or `Variant.1`.
(@pat($($args:tt)*) $(,)?) => {
pats!(@pat($($args)*) {})
};
(@pat($($args:tt)*) , $($rest:tt)*) => {
pats!(@pat($($args)*) {}, $($rest)*)
};
// `(..)` and `{..}` are treated the same.
(@pat($($args:tt)*) ( $($subpat:tt)* ) $($rest:tt)*) => {{
pats!(@pat($($args)*) { $($subpat)* } $($rest)*)
}};
(@pat(vec:$vec:expr, sub_tys:$sub_tys:expr, idx:$idx:expr, ctor:$ctor:expr) { $($fields:tt)* } $($rest:tt)*) => {{
let sub_tys = $sub_tys;
let index = $idx;
// Silly dance to work with both a vec and `iter::repeat()`.
let ty = *(&sub_tys).clone().into_iter().nth(index).unwrap();
let ctor = $ctor;
let ctor_sub_tys = &ty.sub_tys(&ctor);
#[allow(unused_mut)]
let mut fields = Vec::new();
// Parse subpatterns (note the leading comma).
pats!(@fields(idx:0, vec:fields, sub_tys:ctor_sub_tys) ,$($fields)*);
let arity = ctor_sub_tys.len();
let pat = DeconstructedPat::new(ctor, fields, arity, ty, ()).at_index(index);
$vec.push(pat);
// Continue parsing further patterns.
pats!(@fields(idx:index+1, vec:$vec, sub_tys:sub_tys) $($rest)*);
}};
// Parse fields one by one.
// No fields left.
(@fields($($args:tt)*) $(,)?) => {};
// `.i: pat` sets the current index to `i`.
(@fields(idx:$_idx:expr, $($args:tt)*) , .$idx:literal : $($rest:tt)*) => {{
pats!(@ctor($($args)*, idx:$idx) $($rest)*);
}};
(@fields(idx:$_idx:expr, $($args:tt)*) , .$idx:ident : $($rest:tt)*) => {{
pats!(@ctor($($args)*, idx:$idx) $($rest)*);
}};
// Field without an explicit index; we use the current index which gets incremented above.
(@fields(idx:$idx:expr, $($args:tt)*) , $($rest:tt)*) => {{
pats!(@ctor($($args)*, idx:$idx) $($rest)*);
}};
}

View File

@ -0,0 +1,109 @@
//! Test the pattern complexity limit.
use common::*;
use rustc_pattern_analysis::{pat::DeconstructedPat, usefulness::PlaceValidity, MatchArm};
#[macro_use]
mod common;
/// Analyze a match made of these patterns. Ignore the report; we only care whether we exceeded the
/// limit or not.
fn check(patterns: &[DeconstructedPat<Cx>], complexity_limit: usize) -> Result<(), ()> {
let ty = *patterns[0].ty();
let arms: Vec<_> =
patterns.iter().map(|pat| MatchArm { pat, has_guard: false, arm_data: () }).collect();
compute_match_usefulness(arms.as_slice(), ty, PlaceValidity::ValidOnly, Some(complexity_limit))
.map(|_report| ())
}
/// Asserts that analyzing this match takes exactly `complexity` steps.
#[track_caller]
fn assert_complexity(patterns: Vec<DeconstructedPat<Cx>>, complexity: usize) {
assert!(check(&patterns, complexity).is_ok());
assert!(check(&patterns, complexity - 1).is_err());
}
/// Construct a match like:
/// ```ignore(illustrative)
/// match ... {
/// BigStruct { field01: true, .. } => {}
/// BigStruct { field02: true, .. } => {}
/// BigStruct { field03: true, .. } => {}
/// BigStruct { field04: true, .. } => {}
/// ...
/// _ => {}
/// }
/// ```
fn diagonal_match(arity: usize) -> Vec<DeconstructedPat<Cx>> {
let struct_ty = Ty::BigStruct { arity, ty: &Ty::Bool };
let mut patterns = vec![];
for i in 0..arity {
patterns.push(pat!(struct_ty; Struct { .i: true }));
}
patterns.push(pat!(struct_ty; _));
patterns
}
/// Construct a match like:
/// ```ignore(illustrative)
/// match ... {
/// BigStruct { field01: true, .. } => {}
/// BigStruct { field02: true, .. } => {}
/// BigStruct { field03: true, .. } => {}
/// BigStruct { field04: true, .. } => {}
/// ...
/// BigStruct { field01: false, .. } => {}
/// BigStruct { field02: false, .. } => {}
/// BigStruct { field03: false, .. } => {}
/// BigStruct { field04: false, .. } => {}
/// ...
/// _ => {}
/// }
/// ```
fn diagonal_exponential_match(arity: usize) -> Vec<DeconstructedPat<Cx>> {
let struct_ty = Ty::BigStruct { arity, ty: &Ty::Bool };
let mut patterns = vec![];
for i in 0..arity {
patterns.push(pat!(struct_ty; Struct { .i: true }));
}
for i in 0..arity {
patterns.push(pat!(struct_ty; Struct { .i: false }));
}
patterns.push(pat!(struct_ty; _));
patterns
}
#[test]
fn test_diagonal_struct_match() {
// These cases are nicely linear: we check `arity` patterns with exactly one `true`, matching
// in 2 branches each, and a final pattern with all `false`, matching only the `_` branch.
assert_complexity(diagonal_match(20), 41);
assert_complexity(diagonal_match(30), 61);
// This case goes exponential.
assert!(check(&diagonal_exponential_match(10), 10000).is_err());
}
/// Construct a match like:
/// ```ignore(illustrative)
/// match ... {
/// BigEnum::Variant1(_) => {}
/// BigEnum::Variant2(_) => {}
/// BigEnum::Variant3(_) => {}
/// ...
/// _ => {}
/// }
/// ```
fn big_enum(arity: usize) -> Vec<DeconstructedPat<Cx>> {
let enum_ty = Ty::BigEnum { arity, ty: &Ty::Bool };
let mut patterns = vec![];
for i in 0..arity {
patterns.push(pat!(enum_ty; Variant.i));
}
patterns.push(pat!(enum_ty; _));
patterns
}
#[test]
fn test_big_enum() {
// We try 2 branches per variant.
assert_complexity(big_enum(20), 40);
}

View File

@ -0,0 +1,77 @@
//! Test exhaustiveness checking.
use common::*;
use rustc_pattern_analysis::{
pat::{DeconstructedPat, WitnessPat},
usefulness::PlaceValidity,
MatchArm,
};
#[macro_use]
mod common;
/// Analyze a match made of these patterns.
fn check(patterns: Vec<DeconstructedPat<Cx>>) -> Vec<WitnessPat<Cx>> {
let ty = *patterns[0].ty();
let arms: Vec<_> =
patterns.iter().map(|pat| MatchArm { pat, has_guard: false, arm_data: () }).collect();
let report =
compute_match_usefulness(arms.as_slice(), ty, PlaceValidity::ValidOnly, None).unwrap();
report.non_exhaustiveness_witnesses
}
#[track_caller]
fn assert_exhaustive(patterns: Vec<DeconstructedPat<Cx>>) {
let witnesses = check(patterns);
if !witnesses.is_empty() {
panic!("non-exaustive match: missing {witnesses:?}");
}
}
#[track_caller]
fn assert_non_exhaustive(patterns: Vec<DeconstructedPat<Cx>>) {
let witnesses = check(patterns);
assert!(!witnesses.is_empty())
}
#[test]
fn test_int_ranges() {
let ty = Ty::U8;
assert_exhaustive(pats!(ty;
0..=255,
));
assert_exhaustive(pats!(ty;
0..,
));
assert_non_exhaustive(pats!(ty;
0..255,
));
assert_exhaustive(pats!(ty;
0..255,
255,
));
assert_exhaustive(pats!(ty;
..10,
10..
));
}
#[test]
fn test_nested() {
let ty = Ty::BigStruct { arity: 2, ty: &Ty::BigEnum { arity: 2, ty: &Ty::Bool } };
assert_non_exhaustive(pats!(ty;
Struct(Variant.0, _),
));
assert_exhaustive(pats!(ty;
Struct(Variant.0, _),
Struct(Variant.1, _),
));
assert_non_exhaustive(pats!(ty;
Struct(Variant.0, _),
Struct(_, Variant.0),
));
assert_exhaustive(pats!(ty;
Struct(Variant.0, _),
Struct(_, Variant.0),
Struct(Variant.1, Variant.1),
));
}

View File

@ -0,0 +1,69 @@
//! Test the computation of arm intersections.
use common::*;
use rustc_pattern_analysis::{pat::DeconstructedPat, usefulness::PlaceValidity, MatchArm};
#[macro_use]
mod common;
/// Analyze a match made of these patterns and returns the computed arm intersections.
fn check(patterns: Vec<DeconstructedPat<Cx>>) -> Vec<Vec<usize>> {
let ty = *patterns[0].ty();
let arms: Vec<_> =
patterns.iter().map(|pat| MatchArm { pat, has_guard: false, arm_data: () }).collect();
let report =
compute_match_usefulness(arms.as_slice(), ty, PlaceValidity::ValidOnly, None).unwrap();
report.arm_intersections.into_iter().map(|bitset| bitset.iter().collect()).collect()
}
#[track_caller]
fn assert_intersects(patterns: Vec<DeconstructedPat<Cx>>, intersects: &[&[usize]]) {
let computed_intersects = check(patterns);
assert_eq!(computed_intersects, intersects);
}
#[test]
fn test_int_ranges() {
let ty = Ty::U8;
assert_intersects(
pats!(ty;
0..=100,
100..,
),
&[&[], &[0]],
);
assert_intersects(
pats!(ty;
0..=101,
100..,
),
&[&[], &[0]],
);
assert_intersects(
pats!(ty;
0..100,
100..,
),
&[&[], &[]],
);
}
#[test]
fn test_nested() {
let ty = Ty::Tuple(&[Ty::Bool; 2]);
assert_intersects(
pats!(ty;
(true, true),
(true, _),
(_, true),
),
&[&[], &[0], &[0, 1]],
);
// Here we shortcut because `(true, true)` is irrelevant, so we fail to detect the intersection.
assert_intersects(
pats!(ty;
(true, _),
(_, true),
),
&[&[], &[]],
);
}

View File

@ -1696,7 +1696,7 @@ fn check_mod_privacy(tcx: TyCtxt<'_>, module_def_id: LocalModDefId) {
} }
} }
for id in module.items() { for id in module.free_items() {
if let ItemKind::Impl(i) = tcx.hir().item(id).kind { if let ItemKind::Impl(i) = tcx.hir().item(id).kind {
if let Some(item) = i.of_trait { if let Some(item) = i.of_trait {
let trait_ref = tcx.impl_trait_ref(id.owner_id.def_id).unwrap(); let trait_ref = tcx.impl_trait_ref(id.owner_id.def_id).unwrap();

View File

@ -1592,18 +1592,23 @@ impl<'a: 'ast, 'ast, 'tcx> LateResolutionVisitor<'a, '_, 'ast, 'tcx> {
match (res, source) { match (res, source) {
( (
Res::Def(DefKind::Macro(MacroKind::Bang), _), Res::Def(DefKind::Macro(MacroKind::Bang), def_id),
PathSource::Expr(Some(Expr { PathSource::Expr(Some(Expr {
kind: ExprKind::Index(..) | ExprKind::Call(..), .. kind: ExprKind::Index(..) | ExprKind::Call(..), ..
})) }))
| PathSource::Struct, | PathSource::Struct,
) => { ) => {
// Don't suggest macro if it's unstable.
let suggestable = def_id.is_local()
|| self.r.tcx.lookup_stability(def_id).map_or(true, |s| s.is_stable());
err.span_label(span, fallback_label.to_string()); err.span_label(span, fallback_label.to_string());
// Don't suggest `!` for a macro invocation if there are generic args // Don't suggest `!` for a macro invocation if there are generic args
if path if path
.last() .last()
.is_some_and(|segment| !segment.has_generic_args && !segment.has_lifetime_args) .is_some_and(|segment| !segment.has_generic_args && !segment.has_lifetime_args)
&& suggestable
{ {
err.span_suggestion_verbose( err.span_suggestion_verbose(
span.shrink_to_hi(), span.shrink_to_hi(),

View File

@ -22,7 +22,7 @@ use rustc_span::{FileName, FileNameDisplayPreference, RealFileName, SourceFileHa
use rustc_target::abi::Align; use rustc_target::abi::Align;
use rustc_target::spec::LinkSelfContainedComponents; use rustc_target::spec::LinkSelfContainedComponents;
use rustc_target::spec::{PanicStrategy, RelocModel, SanitizerSet, SplitDebuginfo}; use rustc_target::spec::{PanicStrategy, RelocModel, SanitizerSet, SplitDebuginfo};
use rustc_target::spec::{Target, TargetTriple, TargetWarnings, TARGETS}; use rustc_target::spec::{Target, TargetTriple, TARGETS};
use std::collections::btree_map::{ use std::collections::btree_map::{
Iter as BTreeMapIter, Keys as BTreeMapKeysIter, Values as BTreeMapValuesIter, Iter as BTreeMapIter, Keys as BTreeMapKeysIter, Values as BTreeMapValuesIter,
}; };
@ -1549,34 +1549,25 @@ pub fn build_configuration(sess: &Session, mut user_cfg: Cfg) -> Cfg {
user_cfg user_cfg
} }
pub fn build_target_config( pub fn build_target_config(early_dcx: &EarlyDiagCtxt, opts: &Options, sysroot: &Path) -> Target {
early_dcx: &EarlyDiagCtxt, match Target::search(&opts.target_triple, sysroot) {
opts: &Options, Ok((target, warnings)) => {
target_override: Option<Target>, for warning in warnings.warning_messages() {
sysroot: &Path, early_dcx.early_warn(warning)
) -> Target { }
let target_result = target_override.map_or_else( if !matches!(target.pointer_width, 16 | 32 | 64) {
|| Target::search(&opts.target_triple, sysroot), early_dcx.early_fatal(format!(
|t| Ok((t, TargetWarnings::empty())), "target specification was invalid: unrecognized target-pointer-width {}",
); target.pointer_width
let (target, target_warnings) = target_result.unwrap_or_else(|e| { ))
early_dcx.early_fatal(format!( }
target
}
Err(e) => early_dcx.early_fatal(format!(
"Error loading target specification: {e}. \ "Error loading target specification: {e}. \
Run `rustc --print target-list` for a list of built-in targets" Run `rustc --print target-list` for a list of built-in targets"
)) )),
});
for warning in target_warnings.warning_messages() {
early_dcx.early_warn(warning)
} }
if !matches!(target.pointer_width, 16 | 32 | 64) {
early_dcx.early_fatal(format!(
"target specification was invalid: unrecognized target-pointer-width {}",
target.pointer_width
))
}
target
} }
#[derive(Copy, Clone, PartialEq, Eq, Debug)] #[derive(Copy, Clone, PartialEq, Eq, Debug)]

View File

@ -1008,7 +1008,7 @@ pub fn build_session(
fluent_resources: Vec<&'static str>, fluent_resources: Vec<&'static str>,
driver_lint_caps: FxHashMap<lint::LintId, lint::Level>, driver_lint_caps: FxHashMap<lint::LintId, lint::Level>,
file_loader: Option<Box<dyn FileLoader + Send + Sync + 'static>>, file_loader: Option<Box<dyn FileLoader + Send + Sync + 'static>>,
target_cfg: Target, target: Target,
sysroot: PathBuf, sysroot: PathBuf,
cfg_version: &'static str, cfg_version: &'static str,
ice_file: Option<PathBuf>, ice_file: Option<PathBuf>,
@ -1036,7 +1036,7 @@ pub fn build_session(
let loader = file_loader.unwrap_or_else(|| Box::new(RealFileLoader)); let loader = file_loader.unwrap_or_else(|| Box::new(RealFileLoader));
let hash_kind = sopts.unstable_opts.src_hash_algorithm.unwrap_or_else(|| { let hash_kind = sopts.unstable_opts.src_hash_algorithm.unwrap_or_else(|| {
if target_cfg.is_like_msvc { if target.is_like_msvc {
SourceFileHashAlgorithm::Sha256 SourceFileHashAlgorithm::Sha256
} else { } else {
SourceFileHashAlgorithm::Md5 SourceFileHashAlgorithm::Md5
@ -1117,11 +1117,10 @@ pub fn build_session(
_ => CtfeBacktrace::Disabled, _ => CtfeBacktrace::Disabled,
}); });
let asm_arch = let asm_arch = if target.allow_asm { InlineAsmArch::from_str(&target.arch).ok() } else { None };
if target_cfg.allow_asm { InlineAsmArch::from_str(&target_cfg.arch).ok() } else { None };
let sess = Session { let sess = Session {
target: target_cfg, target,
host, host,
opts: sopts, opts: sopts,
host_tlib_path, host_tlib_path,

View File

@ -25,7 +25,7 @@ pub fn report_symbol_names(tcx: TyCtxt<'_>) {
let mut symbol_names = SymbolNamesTest { tcx }; let mut symbol_names = SymbolNamesTest { tcx };
let crate_items = tcx.hir_crate_items(()); let crate_items = tcx.hir_crate_items(());
for id in crate_items.items() { for id in crate_items.free_items() {
symbol_names.process_attrs(id.owner_id.def_id); symbol_names.process_attrs(id.owner_id.def_id);
} }

View File

@ -2092,6 +2092,9 @@ pub struct TargetOptions {
/// compiling `rustc` will be used instead (or llvm if it is not set). /// compiling `rustc` will be used instead (or llvm if it is not set).
/// ///
/// N.B. when *using* the compiler, backend can always be overridden with `-Zcodegen-backend`. /// N.B. when *using* the compiler, backend can always be overridden with `-Zcodegen-backend`.
///
/// This was added by WaffleLapkin in #116793. The motivation is a rustc fork that requires a
/// custom codegen backend for a particular target.
pub default_codegen_backend: Option<StaticCow<str>>, pub default_codegen_backend: Option<StaticCow<str>>,
/// Whether to generate trap instructions in places where optimization would /// Whether to generate trap instructions in places where optimization would

View File

@ -19,6 +19,9 @@ trait_selection_closure_kind_mismatch = expected a closure that implements the `
trait_selection_closure_kind_requirement = the requirement to implement `{$trait_prefix}{$expected}` derives from here trait_selection_closure_kind_requirement = the requirement to implement `{$trait_prefix}{$expected}` derives from here
trait_selection_disallowed_positional_argument = positional format arguments are not allowed here
.help = only named format arguments with the name of one of the generic types are allowed in this context
trait_selection_dump_vtable_entries = vtable entries for `{$trait_ref}`: {$entries} trait_selection_dump_vtable_entries = vtable entries for `{$trait_ref}`: {$entries}
trait_selection_empty_on_clause_in_rustc_on_unimplemented = empty `on`-clause in `#[rustc_on_unimplemented]` trait_selection_empty_on_clause_in_rustc_on_unimplemented = empty `on`-clause in `#[rustc_on_unimplemented]`
@ -30,6 +33,9 @@ trait_selection_ignored_diagnostic_option = `{$option_name}` is ignored due to p
trait_selection_inherent_projection_normalization_overflow = overflow evaluating associated type `{$ty}` trait_selection_inherent_projection_normalization_overflow = overflow evaluating associated type `{$ty}`
trait_selection_invalid_format_specifier = invalid format specifier
.help = no format specifier are supported in this position
trait_selection_invalid_on_clause_in_rustc_on_unimplemented = invalid `on`-clause in `#[rustc_on_unimplemented]` trait_selection_invalid_on_clause_in_rustc_on_unimplemented = invalid `on`-clause in `#[rustc_on_unimplemented]`
.label = invalid on-clause here .label = invalid on-clause here
@ -60,3 +66,6 @@ trait_selection_unable_to_construct_constant_value = unable to construct a const
trait_selection_unknown_format_parameter_for_on_unimplemented_attr = there is no parameter `{$argument_name}` on trait `{$trait_name}` trait_selection_unknown_format_parameter_for_on_unimplemented_attr = there is no parameter `{$argument_name}` on trait `{$trait_name}`
.help = expect either a generic argument name or {"`{Self}`"} as format argument .help = expect either a generic argument name or {"`{Self}`"} as format argument
trait_selection_wrapped_parser_error = {$description}
.label = {$label}

View File

@ -367,6 +367,23 @@ pub struct UnknownFormatParameterForOnUnimplementedAttr {
trait_name: Symbol, trait_name: Symbol,
} }
#[derive(LintDiagnostic)]
#[diag(trait_selection_disallowed_positional_argument)]
#[help]
pub struct DisallowedPositionalArgument;
#[derive(LintDiagnostic)]
#[diag(trait_selection_invalid_format_specifier)]
#[help]
pub struct InvalidFormatSpecifier;
#[derive(LintDiagnostic)]
#[diag(trait_selection_wrapped_parser_error)]
pub struct WrappedParserError {
description: String,
label: String,
}
impl<'tcx> OnUnimplementedDirective { impl<'tcx> OnUnimplementedDirective {
fn parse( fn parse(
tcx: TyCtxt<'tcx>, tcx: TyCtxt<'tcx>,
@ -758,64 +775,108 @@ impl<'tcx> OnUnimplementedFormatString {
let trait_name = tcx.item_name(trait_def_id); let trait_name = tcx.item_name(trait_def_id);
let generics = tcx.generics_of(item_def_id); let generics = tcx.generics_of(item_def_id);
let s = self.symbol.as_str(); let s = self.symbol.as_str();
let parser = Parser::new(s, None, None, false, ParseMode::Format); let mut parser = Parser::new(s, None, None, false, ParseMode::Format);
let mut result = Ok(()); let mut result = Ok(());
for token in parser { for token in &mut parser {
match token { match token {
Piece::String(_) => (), // Normal string, no need to check it Piece::String(_) => (), // Normal string, no need to check it
Piece::NextArgument(a) => match a.position { Piece::NextArgument(a) => {
Position::ArgumentNamed(s) => { let format_spec = a.format;
match Symbol::intern(s) { if self.is_diagnostic_namespace_variant
// `{ThisTraitsName}` is allowed && (format_spec.ty_span.is_some()
s if s == trait_name && !self.is_diagnostic_namespace_variant => (), || format_spec.width_span.is_some()
s if ALLOWED_FORMAT_SYMBOLS.contains(&s) || format_spec.precision_span.is_some()
&& !self.is_diagnostic_namespace_variant => || format_spec.fill_span.is_some())
{ {
() tcx.emit_node_span_lint(
} UNKNOWN_OR_MALFORMED_DIAGNOSTIC_ATTRIBUTES,
// So is `{A}` if A is a type parameter tcx.local_def_id_to_hir_id(item_def_id.expect_local()),
s if generics.params.iter().any(|param| param.name == s) => (), self.span,
s => { InvalidFormatSpecifier,
if self.is_diagnostic_namespace_variant { );
tcx.emit_node_span_lint( }
UNKNOWN_OR_MALFORMED_DIAGNOSTIC_ATTRIBUTES, match a.position {
tcx.local_def_id_to_hir_id(item_def_id.expect_local()), Position::ArgumentNamed(s) => {
self.span, match Symbol::intern(s) {
UnknownFormatParameterForOnUnimplementedAttr { // `{ThisTraitsName}` is allowed
argument_name: s, s if s == trait_name && !self.is_diagnostic_namespace_variant => (),
trait_name, s if ALLOWED_FORMAT_SYMBOLS.contains(&s)
}, && !self.is_diagnostic_namespace_variant =>
); {
} else { ()
result = Err(struct_span_code_err!( }
tcx.dcx(), // So is `{A}` if A is a type parameter
self.span, s if generics.params.iter().any(|param| param.name == s) => (),
E0230, s => {
"there is no parameter `{}` on {}", if self.is_diagnostic_namespace_variant {
s, tcx.emit_node_span_lint(
if trait_def_id == item_def_id { UNKNOWN_OR_MALFORMED_DIAGNOSTIC_ATTRIBUTES,
format!("trait `{trait_name}`") tcx.local_def_id_to_hir_id(item_def_id.expect_local()),
} else { self.span,
"impl".to_string() UnknownFormatParameterForOnUnimplementedAttr {
} argument_name: s,
) trait_name,
.emit()); },
);
} else {
result = Err(struct_span_code_err!(
tcx.dcx(),
self.span,
E0230,
"there is no parameter `{}` on {}",
s,
if trait_def_id == item_def_id {
format!("trait `{trait_name}`")
} else {
"impl".to_string()
}
)
.emit());
}
} }
} }
} }
// `{:1}` and `{}` are not to be used
Position::ArgumentIs(..) | Position::ArgumentImplicitlyIs(_) => {
if self.is_diagnostic_namespace_variant {
tcx.emit_node_span_lint(
UNKNOWN_OR_MALFORMED_DIAGNOSTIC_ATTRIBUTES,
tcx.local_def_id_to_hir_id(item_def_id.expect_local()),
self.span,
DisallowedPositionalArgument,
);
} else {
let reported = struct_span_code_err!(
tcx.dcx(),
self.span,
E0231,
"only named generic parameters are allowed"
)
.emit();
result = Err(reported);
}
}
} }
// `{:1}` and `{}` are not to be used }
Position::ArgumentIs(..) | Position::ArgumentImplicitlyIs(_) => { }
let reported = struct_span_code_err!( }
tcx.dcx(), // we cannot return errors from processing the format string as hard error here
self.span, // as the diagnostic namespace gurantees that malformed input cannot cause an error
E0231, //
"only named generic parameters are allowed" // if we encounter any error while processing we nevertheless want to show it as warning
) // so that users are aware that something is not correct
.emit(); for e in parser.errors {
result = Err(reported); if self.is_diagnostic_namespace_variant {
} tcx.emit_node_span_lint(
}, UNKNOWN_OR_MALFORMED_DIAGNOSTIC_ATTRIBUTES,
tcx.local_def_id_to_hir_id(item_def_id.expect_local()),
self.span,
WrappedParserError { description: e.description, label: e.label },
);
} else {
let reported =
struct_span_code_err!(tcx.dcx(), self.span, E0231, "{}", e.description,).emit();
result = Err(reported);
} }
} }
@ -853,9 +914,9 @@ impl<'tcx> OnUnimplementedFormatString {
let empty_string = String::new(); let empty_string = String::new();
let s = self.symbol.as_str(); let s = self.symbol.as_str();
let parser = Parser::new(s, None, None, false, ParseMode::Format); let mut parser = Parser::new(s, None, None, false, ParseMode::Format);
let item_context = (options.get(&sym::ItemContext)).unwrap_or(&empty_string); let item_context = (options.get(&sym::ItemContext)).unwrap_or(&empty_string);
parser let constructed_message = (&mut parser)
.map(|p| match p { .map(|p| match p {
Piece::String(s) => s.to_owned(), Piece::String(s) => s.to_owned(),
Piece::NextArgument(a) => match a.position { Piece::NextArgument(a) => match a.position {
@ -895,9 +956,29 @@ impl<'tcx> OnUnimplementedFormatString {
} }
} }
} }
Position::ArgumentImplicitlyIs(_) if self.is_diagnostic_namespace_variant => {
String::from("{}")
}
Position::ArgumentIs(idx) if self.is_diagnostic_namespace_variant => {
format!("{{{idx}}}")
}
_ => bug!("broken on_unimplemented {:?} - bad format arg", self.symbol), _ => bug!("broken on_unimplemented {:?} - bad format arg", self.symbol),
}, },
}) })
.collect() .collect();
// we cannot return errors from processing the format string as hard error here
// as the diagnostic namespace gurantees that malformed input cannot cause an error
//
// if we encounter any error while processing the format string
// we don't want to show the potentially half assembled formated string,
// therefore we fall back to just showing the input string in this case
//
// The actual parser errors are emitted earlier
// as lint warnings in OnUnimplementedFormatString::verify
if self.is_diagnostic_namespace_variant && !parser.errors.is_empty() {
String::from(s)
} else {
constructed_message
}
} }
} }

View File

@ -1714,6 +1714,18 @@ pub(crate) mod builtin {
builtin # type_ascribe($expr, $ty) builtin # type_ascribe($expr, $ty)
} }
#[cfg(not(bootstrap))]
/// Unstable placeholder for deref patterns.
#[allow_internal_unstable(builtin_syntax)]
#[unstable(
feature = "deref_patterns",
issue = "87121",
reason = "placeholder syntax for deref patterns"
)]
pub macro deref($pat:pat) {
builtin # deref($pat)
}
/// Unstable implementation detail of the `rustc` compiler, do not use. /// Unstable implementation detail of the `rustc` compiler, do not use.
#[rustc_builtin_macro] #[rustc_builtin_macro]
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]

View File

@ -103,3 +103,11 @@ pub use crate::macros::builtin::cfg_eval;
reason = "placeholder syntax for type ascription" reason = "placeholder syntax for type ascription"
)] )]
pub use crate::macros::builtin::type_ascribe; pub use crate::macros::builtin::type_ascribe;
#[cfg(not(bootstrap))]
#[unstable(
feature = "deref_patterns",
issue = "87121",
reason = "placeholder syntax for deref patterns"
)]
pub use crate::macros::builtin::deref;

View File

@ -91,6 +91,15 @@ pub use core::prelude::v1::cfg_eval;
)] )]
pub use core::prelude::v1::type_ascribe; pub use core::prelude::v1::type_ascribe;
#[cfg(not(bootstrap))]
// Do not `doc(no_inline)` either.
#[unstable(
feature = "deref_patterns",
issue = "87121",
reason = "placeholder syntax for deref patterns"
)]
pub use core::prelude::v1::deref;
// The file so far is equivalent to core/src/prelude/v1.rs. It is duplicated // The file so far is equivalent to core/src/prelude/v1.rs. It is duplicated
// rather than glob imported because we want docs to show these re-exports as // rather than glob imported because we want docs to show these re-exports as
// pointing to within `std`. // pointing to within `std`.

View File

@ -329,6 +329,7 @@ pub(crate) fn name_from_pat(p: &hir::Pat<'_>) -> Symbol {
elts.iter().map(|p| name_from_pat(p).to_string()).collect::<Vec<String>>().join(", ") elts.iter().map(|p| name_from_pat(p).to_string()).collect::<Vec<String>>().join(", ")
), ),
PatKind::Box(p) => return name_from_pat(&*p), PatKind::Box(p) => return name_from_pat(&*p),
PatKind::Deref(p) => format!("deref!({})", name_from_pat(&*p)),
PatKind::Ref(p, _) => return name_from_pat(&*p), PatKind::Ref(p, _) => return name_from_pat(&*p),
PatKind::Lit(..) => { PatKind::Lit(..) => {
warn!( warn!(

View File

@ -55,7 +55,7 @@ fn unary_pattern(pat: &Pat<'_>) -> bool {
| PatKind::Err(_) => false, | PatKind::Err(_) => false,
PatKind::Struct(_, a, etc) => !etc && a.iter().all(|x| unary_pattern(x.pat)), PatKind::Struct(_, a, etc) => !etc && a.iter().all(|x| unary_pattern(x.pat)),
PatKind::Tuple(a, etc) | PatKind::TupleStruct(_, a, etc) => etc.as_opt_usize().is_none() && array_rec(a), PatKind::Tuple(a, etc) | PatKind::TupleStruct(_, a, etc) => etc.as_opt_usize().is_none() && array_rec(a),
PatKind::Ref(x, _) | PatKind::Box(x) => unary_pattern(x), PatKind::Ref(x, _) | PatKind::Box(x) | PatKind::Deref(x) => unary_pattern(x),
PatKind::Path(_) | PatKind::Lit(_) => true, PatKind::Path(_) | PatKind::Lit(_) => true,
} }
} }

View File

@ -243,7 +243,7 @@ impl<'a> NormalizedPat<'a> {
fn from_pat(cx: &LateContext<'_>, arena: &'a DroplessArena, pat: &'a Pat<'_>) -> Self { fn from_pat(cx: &LateContext<'_>, arena: &'a DroplessArena, pat: &'a Pat<'_>) -> Self {
match pat.kind { match pat.kind {
PatKind::Wild | PatKind::Binding(.., None) => Self::Wild, PatKind::Wild | PatKind::Binding(.., None) => Self::Wild,
PatKind::Binding(.., Some(pat)) | PatKind::Box(pat) | PatKind::Ref(pat, _) => { PatKind::Binding(.., Some(pat)) | PatKind::Box(pat) | PatKind::Deref(pat) | PatKind::Ref(pat, _) => {
Self::from_pat(cx, arena, pat) Self::from_pat(cx, arena, pat)
}, },
PatKind::Never => Self::Never, PatKind::Never => Self::Never,

View File

@ -242,6 +242,8 @@ fn transform_with_focus_on_idx(alternatives: &mut ThinVec<P<Pat>>, focus_idx: us
|k| matches!(k, Box(_)), |k| matches!(k, Box(_)),
|k| always_pat!(k, Box(p) => p), |k| always_pat!(k, Box(p) => p),
), ),
// FIXME(deref_patterns): Should we merge patterns here?
Deref(_) => false,
// Transform `&mut x | ... | &mut y` into `&mut (x | y)`. // Transform `&mut x | ... | &mut y` into `&mut (x | y)`.
Ref(target, Mutability::Mut) => extend_with_matching( Ref(target, Mutability::Mut) => extend_with_matching(
target, start, alternatives, target, start, alternatives,

View File

@ -689,6 +689,11 @@ impl<'a, 'tcx> PrintVisitor<'a, 'tcx> {
kind!("Box({pat})"); kind!("Box({pat})");
self.pat(pat); self.pat(pat);
}, },
PatKind::Deref(pat) => {
bind!(self, pat);
kind!("Deref({pat})");
self.pat(pat);
},
PatKind::Ref(pat, muta) => { PatKind::Ref(pat, muta) => {
bind!(self, pat); bind!(self, pat);
kind!("Ref({pat}, Mutability::{muta:?})"); kind!("Ref({pat}, Mutability::{muta:?})");

View File

@ -955,6 +955,7 @@ impl<'a, 'tcx> SpanlessHash<'a, 'tcx> {
} }
}, },
PatKind::Box(pat) => self.hash_pat(pat), PatKind::Box(pat) => self.hash_pat(pat),
PatKind::Deref(pat) => self.hash_pat(pat),
PatKind::Lit(expr) => self.hash_expr(expr), PatKind::Lit(expr) => self.hash_expr(expr),
PatKind::Or(pats) => { PatKind::Or(pats) => {
for pat in pats { for pat in pats {

View File

@ -1678,7 +1678,7 @@ pub fn is_refutable(cx: &LateContext<'_>, pat: &Pat<'_>) -> bool {
match pat.kind { match pat.kind {
PatKind::Wild | PatKind::Never => false, // If `!` typechecked then the type is empty, so not refutable. PatKind::Wild | PatKind::Never => false, // If `!` typechecked then the type is empty, so not refutable.
PatKind::Binding(_, _, _, pat) => pat.map_or(false, |pat| is_refutable(cx, pat)), PatKind::Binding(_, _, _, pat) => pat.map_or(false, |pat| is_refutable(cx, pat)),
PatKind::Box(pat) | PatKind::Ref(pat, _) => is_refutable(cx, pat), PatKind::Box(pat) | PatKind::Deref(pat) | PatKind::Ref(pat, _) => is_refutable(cx, pat),
PatKind::Path(ref qpath) => is_enum_variant(cx, qpath, pat.hir_id), PatKind::Path(ref qpath) => is_enum_variant(cx, qpath, pat.hir_id),
PatKind::Or(pats) => { PatKind::Or(pats) => {
// TODO: should be the honest check, that pats is exhaustive set // TODO: should be the honest check, that pats is exhaustive set

View File

@ -55,9 +55,10 @@ fn is_short_pattern_inner(pat: &ast::Pat) -> bool {
ast::PatKind::TupleStruct(_, ref path, ref subpats) => { ast::PatKind::TupleStruct(_, ref path, ref subpats) => {
path.segments.len() <= 1 && subpats.len() <= 1 path.segments.len() <= 1 && subpats.len() <= 1
} }
ast::PatKind::Box(ref p) | ast::PatKind::Ref(ref p, _) | ast::PatKind::Paren(ref p) => { ast::PatKind::Box(ref p)
is_short_pattern_inner(&*p) | PatKind::Deref(ref p)
} | ast::PatKind::Ref(ref p, _)
| ast::PatKind::Paren(ref p) => is_short_pattern_inner(&*p),
PatKind::Or(ref pats) => pats.iter().all(|p| is_short_pattern_inner(p)), PatKind::Or(ref pats) => pats.iter().all(|p| is_short_pattern_inner(p)),
} }
} }
@ -277,6 +278,7 @@ impl Rewrite for Pat {
.rewrite(context, shape.offset_left(1)?.sub_width(1)?) .rewrite(context, shape.offset_left(1)?.sub_width(1)?)
.map(|inner_pat| format!("({})", inner_pat)), .map(|inner_pat| format!("({})", inner_pat)),
PatKind::Err(_) => None, PatKind::Err(_) => None,
PatKind::Deref(_) => None,
} }
} }
} }

View File

@ -68,7 +68,7 @@ impl rustc_driver::Callbacks for CompilerCalls {
let mut bodies = Vec::new(); let mut bodies = Vec::new();
let crate_items = tcx.hir_crate_items(()); let crate_items = tcx.hir_crate_items(());
for id in crate_items.items() { for id in crate_items.free_items() {
if matches!(tcx.def_kind(id.owner_id), DefKind::Fn) { if matches!(tcx.def_kind(id.owner_id), DefKind::Fn) {
bodies.push(id.owner_id); bodies.push(id.owner_id);
} }

View File

@ -0,0 +1,45 @@
#[diagnostic::on_unimplemented(message = "{{Test } thing")]
//~^WARN unmatched `}` found
//~|WARN unmatched `}` found
trait ImportantTrait1 {}
#[diagnostic::on_unimplemented(message = "Test {}")]
//~^WARN positional format arguments are not allowed here
//~|WARN positional format arguments are not allowed here
trait ImportantTrait2 {}
#[diagnostic::on_unimplemented(message = "Test {1:}")]
//~^WARN positional format arguments are not allowed here
//~|WARN positional format arguments are not allowed here
trait ImportantTrait3 {}
#[diagnostic::on_unimplemented(message = "Test {Self:123}")]
//~^WARN invalid format specifier
//~|WARN invalid format specifier
trait ImportantTrait4 {}
#[diagnostic::on_unimplemented(message = "Test {Self:!}")]
//~^WARN expected `'}'`, found `'!'`
//~|WARN expected `'}'`, found `'!'`
//~|WARN unmatched `}` found
//~|WARN unmatched `}` found
trait ImportantTrait5 {}
fn check_1(_: impl ImportantTrait1) {}
fn check_2(_: impl ImportantTrait2) {}
fn check_3(_: impl ImportantTrait3) {}
fn check_4(_: impl ImportantTrait4) {}
fn check_5(_: impl ImportantTrait5) {}
fn main() {
check_1(());
//~^ERROR {{Test } thing
check_2(());
//~^ERROR Test {}
check_3(());
//~^ERROR Test {1}
check_4(());
//~^ERROR Test ()
check_5(());
//~^ERROR Test {Self:!}
}

View File

@ -0,0 +1,193 @@
warning: unmatched `}` found
--> $DIR/broken_format.rs:1:32
|
LL | #[diagnostic::on_unimplemented(message = "{{Test } thing")]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^
|
= note: `#[warn(unknown_or_malformed_diagnostic_attributes)]` on by default
warning: positional format arguments are not allowed here
--> $DIR/broken_format.rs:6:32
|
LL | #[diagnostic::on_unimplemented(message = "Test {}")]
| ^^^^^^^^^^^^^^^^^^^
|
= help: only named format arguments with the name of one of the generic types are allowed in this context
warning: positional format arguments are not allowed here
--> $DIR/broken_format.rs:11:32
|
LL | #[diagnostic::on_unimplemented(message = "Test {1:}")]
| ^^^^^^^^^^^^^^^^^^^^^
|
= help: only named format arguments with the name of one of the generic types are allowed in this context
warning: invalid format specifier
--> $DIR/broken_format.rs:16:32
|
LL | #[diagnostic::on_unimplemented(message = "Test {Self:123}")]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
= help: no format specifier are supported in this position
warning: expected `'}'`, found `'!'`
--> $DIR/broken_format.rs:21:32
|
LL | #[diagnostic::on_unimplemented(message = "Test {Self:!}")]
| ^^^^^^^^^^^^^^^^^^^^^^^^^
warning: unmatched `}` found
--> $DIR/broken_format.rs:21:32
|
LL | #[diagnostic::on_unimplemented(message = "Test {Self:!}")]
| ^^^^^^^^^^^^^^^^^^^^^^^^^
warning: unmatched `}` found
--> $DIR/broken_format.rs:1:32
|
LL | #[diagnostic::on_unimplemented(message = "{{Test } thing")]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^
|
= note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no`
error[E0277]: {{Test } thing
--> $DIR/broken_format.rs:35:13
|
LL | check_1(());
| ------- ^^ the trait `ImportantTrait1` is not implemented for `()`
| |
| required by a bound introduced by this call
|
help: this trait has no implementations, consider adding one
--> $DIR/broken_format.rs:4:1
|
LL | trait ImportantTrait1 {}
| ^^^^^^^^^^^^^^^^^^^^^
note: required by a bound in `check_1`
--> $DIR/broken_format.rs:28:20
|
LL | fn check_1(_: impl ImportantTrait1) {}
| ^^^^^^^^^^^^^^^ required by this bound in `check_1`
warning: positional format arguments are not allowed here
--> $DIR/broken_format.rs:6:32
|
LL | #[diagnostic::on_unimplemented(message = "Test {}")]
| ^^^^^^^^^^^^^^^^^^^
|
= help: only named format arguments with the name of one of the generic types are allowed in this context
= note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no`
error[E0277]: Test {}
--> $DIR/broken_format.rs:37:13
|
LL | check_2(());
| ------- ^^ the trait `ImportantTrait2` is not implemented for `()`
| |
| required by a bound introduced by this call
|
help: this trait has no implementations, consider adding one
--> $DIR/broken_format.rs:9:1
|
LL | trait ImportantTrait2 {}
| ^^^^^^^^^^^^^^^^^^^^^
note: required by a bound in `check_2`
--> $DIR/broken_format.rs:29:20
|
LL | fn check_2(_: impl ImportantTrait2) {}
| ^^^^^^^^^^^^^^^ required by this bound in `check_2`
warning: positional format arguments are not allowed here
--> $DIR/broken_format.rs:11:32
|
LL | #[diagnostic::on_unimplemented(message = "Test {1:}")]
| ^^^^^^^^^^^^^^^^^^^^^
|
= help: only named format arguments with the name of one of the generic types are allowed in this context
= note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no`
error[E0277]: Test {1}
--> $DIR/broken_format.rs:39:13
|
LL | check_3(());
| ------- ^^ the trait `ImportantTrait3` is not implemented for `()`
| |
| required by a bound introduced by this call
|
help: this trait has no implementations, consider adding one
--> $DIR/broken_format.rs:14:1
|
LL | trait ImportantTrait3 {}
| ^^^^^^^^^^^^^^^^^^^^^
note: required by a bound in `check_3`
--> $DIR/broken_format.rs:30:20
|
LL | fn check_3(_: impl ImportantTrait3) {}
| ^^^^^^^^^^^^^^^ required by this bound in `check_3`
warning: invalid format specifier
--> $DIR/broken_format.rs:16:32
|
LL | #[diagnostic::on_unimplemented(message = "Test {Self:123}")]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
= help: no format specifier are supported in this position
= note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no`
error[E0277]: Test ()
--> $DIR/broken_format.rs:41:13
|
LL | check_4(());
| ------- ^^ the trait `ImportantTrait4` is not implemented for `()`
| |
| required by a bound introduced by this call
|
help: this trait has no implementations, consider adding one
--> $DIR/broken_format.rs:19:1
|
LL | trait ImportantTrait4 {}
| ^^^^^^^^^^^^^^^^^^^^^
note: required by a bound in `check_4`
--> $DIR/broken_format.rs:31:20
|
LL | fn check_4(_: impl ImportantTrait4) {}
| ^^^^^^^^^^^^^^^ required by this bound in `check_4`
warning: expected `'}'`, found `'!'`
--> $DIR/broken_format.rs:21:32
|
LL | #[diagnostic::on_unimplemented(message = "Test {Self:!}")]
| ^^^^^^^^^^^^^^^^^^^^^^^^^
|
= note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no`
warning: unmatched `}` found
--> $DIR/broken_format.rs:21:32
|
LL | #[diagnostic::on_unimplemented(message = "Test {Self:!}")]
| ^^^^^^^^^^^^^^^^^^^^^^^^^
|
= note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no`
error[E0277]: Test {Self:!}
--> $DIR/broken_format.rs:43:13
|
LL | check_5(());
| ------- ^^ the trait `ImportantTrait5` is not implemented for `()`
| |
| required by a bound introduced by this call
|
help: this trait has no implementations, consider adding one
--> $DIR/broken_format.rs:26:1
|
LL | trait ImportantTrait5 {}
| ^^^^^^^^^^^^^^^^^^^^^
note: required by a bound in `check_5`
--> $DIR/broken_format.rs:32:20
|
LL | fn check_5(_: impl ImportantTrait5) {}
| ^^^^^^^^^^^^^^^ required by this bound in `check_5`
error: aborting due to 5 previous errors; 12 warnings emitted
For more information about this error, try `rustc --explain E0277`.

View File

@ -0,0 +1,12 @@
macro_rules! mac {
($attr_item: meta) => {
#[cfg($attr_item)]
//~^ ERROR expected unsuffixed literal or identifier, found `an(arbitrary token stream)`
//~| ERROR expected unsuffixed literal or identifier, found `an(arbitrary token stream)`
struct S;
}
}
mac!(an(arbitrary token stream));
fn main() {}

View File

@ -0,0 +1,25 @@
error: expected unsuffixed literal or identifier, found `an(arbitrary token stream)`
--> $DIR/attr-bad-meta-4.rs:3:15
|
LL | #[cfg($attr_item)]
| ^^^^^^^^^^
...
LL | mac!(an(arbitrary token stream));
| -------------------------------- in this macro invocation
|
= note: this error originates in the macro `mac` (in Nightly builds, run with -Z macro-backtrace for more info)
error: expected unsuffixed literal or identifier, found `an(arbitrary token stream)`
--> $DIR/attr-bad-meta-4.rs:3:15
|
LL | #[cfg($attr_item)]
| ^^^^^^^^^^
...
LL | mac!(an(arbitrary token stream));
| -------------------------------- in this macro invocation
|
= note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no`
= note: this error originates in the macro `mac` (in Nightly builds, run with -Z macro-backtrace for more info)
error: aborting due to 2 previous errors

View File

@ -7,19 +7,19 @@ use std::rc::Rc;
fn main() { fn main() {
let vec: Vec<u32> = Vec::new(); let vec: Vec<u32> = Vec::new();
match vec { match vec {
box [..] => {} deref!([..]) => {}
_ => {} _ => {}
} }
match Box::new(true) { match Box::new(true) {
box true => {} deref!(true) => {}
_ => {} _ => {}
} }
match &Box::new(true) { match &Box::new(true) {
box true => {} deref!(true) => {}
_ => {} _ => {}
} }
match &Rc::new(0) { match &Rc::new(0) {
box (1..) => {} deref!(1..) => {}
_ => {} _ => {}
} }
// FIXME(deref_patterns): fails to typecheck because `"foo"` has type &str but deref creates a // FIXME(deref_patterns): fails to typecheck because `"foo"` has type &str but deref creates a

View File

@ -0,0 +1,25 @@
//! This test used to ICE because, while an error was emitted,
//! we still tried to remap generic params used in the hidden type
//! to the ones of the opaque type definition.
//@ edition: 2021
#![feature(type_alias_impl_trait)]
use std::future::Future;
type FutNothing<'a> = impl 'a + Future<Output = ()>;
//~^ ERROR: unconstrained opaque type
async fn operation(_: &mut ()) -> () {
//~^ ERROR: concrete type differs from previous
call(operation).await
}
async fn call<F>(_f: F)
where
for<'any> F: FnMut(&'any mut ()) -> FutNothing<'any>,
{
//~^ ERROR: expected generic lifetime parameter, found `'any`
}
fn main() {}

View File

@ -0,0 +1,34 @@
error: unconstrained opaque type
--> $DIR/hkl_forbidden4.rs:10:23
|
LL | type FutNothing<'a> = impl 'a + Future<Output = ()>;
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
= note: `FutNothing` must be used in combination with a concrete type within the same module
error: concrete type differs from previous defining opaque type use
--> $DIR/hkl_forbidden4.rs:13:1
|
LL | async fn operation(_: &mut ()) -> () {
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ expected `FutNothing<'_>`, got `{async fn body@$DIR/hkl_forbidden4.rs:13:38: 16:2}`
|
note: previous use here
--> $DIR/hkl_forbidden4.rs:15:5
|
LL | call(operation).await
| ^^^^^^^^^^^^^^^
error[E0792]: expected generic lifetime parameter, found `'any`
--> $DIR/hkl_forbidden4.rs:21:1
|
LL | type FutNothing<'a> = impl 'a + Future<Output = ()>;
| -- this generic parameter must be used with a generic lifetime parameter
...
LL | / {
LL | |
LL | | }
| |_^
error: aborting due to 3 previous errors
For more information about this error, try `rustc --explain E0792`.

View File

@ -26,11 +26,11 @@ note: closure parameter defined here
LL | let mut closure = expect_sig(|p, y| *p = y); LL | let mut closure = expect_sig(|p, y| *p = y);
| ^ | ^
error[E0425]: cannot find function `deref` in this scope error[E0423]: expected function, found macro `deref`
--> $DIR/unboxed-closures-type-mismatch-closure-from-another-scope.rs:13:5 --> $DIR/unboxed-closures-type-mismatch-closure-from-another-scope.rs:13:5
| |
LL | deref(p); LL | deref(p);
| ^^^^^ not found in this scope | ^^^^^ not a function
| |
help: use the `.` operator to call the method `Deref::deref` on `&&()` help: use the `.` operator to call the method `Deref::deref` on `&&()`
| |
@ -40,5 +40,5 @@ LL + p.deref();
error: aborting due to 4 previous errors error: aborting due to 4 previous errors
Some errors have detailed explanations: E0308, E0425. Some errors have detailed explanations: E0308, E0423, E0425.
For more information about an error, try `rustc --explain E0308`. For more information about an error, try `rustc --explain E0308`.