mirror of
https://github.com/rust-lang/rust.git
synced 2024-11-25 16:24:46 +00:00
Auto merge of #118842 - Nadrieril:librarify-further, r=compiler-errors
Make exhaustiveness usable outside of rustc With this PR, `rustc_pattern_analysis` compiles on stable (with the `stable` feature)! `rust-analyzer` will be able to use it to provide match-related diagnostics and refactors. Two questions: - Should I name the feature `nightly` instead of `rustc` for consistency with other crates? `rustc` makes more sense imo. - `typed-arena` is an optional dependency but tidy made me add it to the allow-list anyway. Can I avoid that somehow? r? `@compiler-errors`
This commit is contained in:
commit
3a539c0889
@ -4354,6 +4354,7 @@ dependencies = [
|
|||||||
"rustc_target",
|
"rustc_target",
|
||||||
"smallvec",
|
"smallvec",
|
||||||
"tracing",
|
"tracing",
|
||||||
|
"typed-arena",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -5689,6 +5690,12 @@ dependencies = [
|
|||||||
"rustc-hash",
|
"rustc-hash",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "typed-arena"
|
||||||
|
version = "2.0.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "6af6ae20167a9ece4bcb41af5b80f8a1f1df981f6391189ce00fd257af04126a"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "typenum"
|
name = "typenum"
|
||||||
version = "1.16.0"
|
version = "1.16.0"
|
||||||
|
@ -9,6 +9,7 @@ use std::slice;
|
|||||||
use arrayvec::ArrayVec;
|
use arrayvec::ArrayVec;
|
||||||
use smallvec::{smallvec, SmallVec};
|
use smallvec::{smallvec, SmallVec};
|
||||||
|
|
||||||
|
#[cfg(feature = "nightly")]
|
||||||
use rustc_macros::{Decodable, Encodable};
|
use rustc_macros::{Decodable, Encodable};
|
||||||
|
|
||||||
use crate::{Idx, IndexVec};
|
use crate::{Idx, IndexVec};
|
||||||
@ -111,7 +112,8 @@ macro_rules! bit_relations_inherent_impls {
|
|||||||
/// to or greater than the domain size. All operations that involve two bitsets
|
/// to or greater than the domain size. All operations that involve two bitsets
|
||||||
/// will panic if the bitsets have differing domain sizes.
|
/// will panic if the bitsets have differing domain sizes.
|
||||||
///
|
///
|
||||||
#[derive(Eq, PartialEq, Hash, Decodable, Encodable)]
|
#[cfg_attr(feature = "nightly", derive(Decodable, Encodable))]
|
||||||
|
#[derive(Eq, PartialEq, Hash)]
|
||||||
pub struct BitSet<T> {
|
pub struct BitSet<T> {
|
||||||
domain_size: usize,
|
domain_size: usize,
|
||||||
words: SmallVec<[Word; 2]>,
|
words: SmallVec<[Word; 2]>,
|
||||||
@ -491,10 +493,21 @@ impl<T: Idx> ChunkedBitSet<T> {
|
|||||||
match *chunk {
|
match *chunk {
|
||||||
Zeros(chunk_domain_size) => {
|
Zeros(chunk_domain_size) => {
|
||||||
if chunk_domain_size > 1 {
|
if chunk_domain_size > 1 {
|
||||||
// We take some effort to avoid copying the words.
|
#[cfg(feature = "nightly")]
|
||||||
let words = Rc::<[Word; CHUNK_WORDS]>::new_zeroed();
|
let mut words = {
|
||||||
// SAFETY: `words` can safely be all zeroes.
|
// We take some effort to avoid copying the words.
|
||||||
let mut words = unsafe { words.assume_init() };
|
let words = Rc::<[Word; CHUNK_WORDS]>::new_zeroed();
|
||||||
|
// SAFETY: `words` can safely be all zeroes.
|
||||||
|
unsafe { words.assume_init() }
|
||||||
|
};
|
||||||
|
#[cfg(not(feature = "nightly"))]
|
||||||
|
let mut words = {
|
||||||
|
let words = mem::MaybeUninit::<[Word; CHUNK_WORDS]>::zeroed();
|
||||||
|
// SAFETY: `words` can safely be all zeroes.
|
||||||
|
let words = unsafe { words.assume_init() };
|
||||||
|
// Unfortunate possibly-large copy
|
||||||
|
Rc::new(words)
|
||||||
|
};
|
||||||
let words_ref = Rc::get_mut(&mut words).unwrap();
|
let words_ref = Rc::get_mut(&mut words).unwrap();
|
||||||
|
|
||||||
let (word_index, mask) = chunk_word_index_and_mask(elem);
|
let (word_index, mask) = chunk_word_index_and_mask(elem);
|
||||||
@ -545,10 +558,21 @@ impl<T: Idx> ChunkedBitSet<T> {
|
|||||||
Zeros(_) => false,
|
Zeros(_) => false,
|
||||||
Ones(chunk_domain_size) => {
|
Ones(chunk_domain_size) => {
|
||||||
if chunk_domain_size > 1 {
|
if chunk_domain_size > 1 {
|
||||||
// We take some effort to avoid copying the words.
|
#[cfg(feature = "nightly")]
|
||||||
let words = Rc::<[Word; CHUNK_WORDS]>::new_zeroed();
|
let mut words = {
|
||||||
// SAFETY: `words` can safely be all zeroes.
|
// We take some effort to avoid copying the words.
|
||||||
let mut words = unsafe { words.assume_init() };
|
let words = Rc::<[Word; CHUNK_WORDS]>::new_zeroed();
|
||||||
|
// SAFETY: `words` can safely be all zeroes.
|
||||||
|
unsafe { words.assume_init() }
|
||||||
|
};
|
||||||
|
#[cfg(not(feature = "nightly"))]
|
||||||
|
let mut words = {
|
||||||
|
let words = mem::MaybeUninit::<[Word; CHUNK_WORDS]>::zeroed();
|
||||||
|
// SAFETY: `words` can safely be all zeroes.
|
||||||
|
let words = unsafe { words.assume_init() };
|
||||||
|
// Unfortunate possibly-large copy
|
||||||
|
Rc::new(words)
|
||||||
|
};
|
||||||
let words_ref = Rc::get_mut(&mut words).unwrap();
|
let words_ref = Rc::get_mut(&mut words).unwrap();
|
||||||
|
|
||||||
// Set only the bits in use.
|
// Set only the bits in use.
|
||||||
@ -1564,7 +1588,8 @@ impl<T: Idx> From<BitSet<T>> for GrowableBitSet<T> {
|
|||||||
///
|
///
|
||||||
/// All operations that involve a row and/or column index will panic if the
|
/// All operations that involve a row and/or column index will panic if the
|
||||||
/// index exceeds the relevant bound.
|
/// index exceeds the relevant bound.
|
||||||
#[derive(Clone, Eq, PartialEq, Hash, Decodable, Encodable)]
|
#[cfg_attr(feature = "nightly", derive(Decodable, Encodable))]
|
||||||
|
#[derive(Clone, Eq, PartialEq, Hash)]
|
||||||
pub struct BitMatrix<R: Idx, C: Idx> {
|
pub struct BitMatrix<R: Idx, C: Idx> {
|
||||||
num_rows: usize,
|
num_rows: usize,
|
||||||
num_columns: usize,
|
num_columns: usize,
|
||||||
@ -1993,7 +2018,8 @@ impl std::fmt::Debug for FiniteBitSet<u32> {
|
|||||||
|
|
||||||
/// A fixed-sized bitset type represented by an integer type. Indices outwith than the range
|
/// A fixed-sized bitset type represented by an integer type. Indices outwith than the range
|
||||||
/// representable by `T` are considered set.
|
/// representable by `T` are considered set.
|
||||||
#[derive(Copy, Clone, Eq, PartialEq, Decodable, Encodable)]
|
#[cfg_attr(feature = "nightly", derive(Decodable, Encodable))]
|
||||||
|
#[derive(Copy, Clone, Eq, PartialEq)]
|
||||||
pub struct FiniteBitSet<T: FiniteBitSetTy>(pub T);
|
pub struct FiniteBitSet<T: FiniteBitSetTy>(pub T);
|
||||||
|
|
||||||
impl<T: FiniteBitSetTy> FiniteBitSet<T> {
|
impl<T: FiniteBitSetTy> FiniteBitSet<T> {
|
||||||
|
@ -14,7 +14,6 @@
|
|||||||
)]
|
)]
|
||||||
#![cfg_attr(feature = "nightly", allow(internal_features))]
|
#![cfg_attr(feature = "nightly", allow(internal_features))]
|
||||||
|
|
||||||
#[cfg(feature = "nightly")]
|
|
||||||
pub mod bit_set;
|
pub mod bit_set;
|
||||||
#[cfg(feature = "nightly")]
|
#[cfg(feature = "nightly")]
|
||||||
pub mod interval;
|
pub mod interval;
|
||||||
|
@ -6,7 +6,7 @@ use rustc_errors::{
|
|||||||
};
|
};
|
||||||
use rustc_macros::{Diagnostic, LintDiagnostic, Subdiagnostic};
|
use rustc_macros::{Diagnostic, LintDiagnostic, Subdiagnostic};
|
||||||
use rustc_middle::ty::{self, Ty};
|
use rustc_middle::ty::{self, Ty};
|
||||||
use rustc_pattern_analysis::{cx::MatchCheckCtxt, errors::Uncovered};
|
use rustc_pattern_analysis::{errors::Uncovered, rustc::RustcMatchCheckCtxt};
|
||||||
use rustc_span::symbol::Symbol;
|
use rustc_span::symbol::Symbol;
|
||||||
use rustc_span::Span;
|
use rustc_span::Span;
|
||||||
|
|
||||||
@ -454,7 +454,7 @@ pub enum UnusedUnsafeEnclosing {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) struct NonExhaustivePatternsTypeNotEmpty<'p, 'tcx, 'm> {
|
pub(crate) struct NonExhaustivePatternsTypeNotEmpty<'p, 'tcx, 'm> {
|
||||||
pub cx: &'m MatchCheckCtxt<'p, 'tcx>,
|
pub cx: &'m RustcMatchCheckCtxt<'p, 'tcx>,
|
||||||
pub expr_span: Span,
|
pub expr_span: Span,
|
||||||
pub span: Span,
|
pub span: Span,
|
||||||
pub ty: Ty<'tcx>,
|
pub ty: Ty<'tcx>,
|
||||||
|
@ -1,13 +1,13 @@
|
|||||||
use rustc_pattern_analysis::constructor::Constructor;
|
|
||||||
use rustc_pattern_analysis::cx::MatchCheckCtxt;
|
|
||||||
use rustc_pattern_analysis::errors::Uncovered;
|
use rustc_pattern_analysis::errors::Uncovered;
|
||||||
use rustc_pattern_analysis::pat::{DeconstructedPat, WitnessPat};
|
use rustc_pattern_analysis::rustc::{
|
||||||
use rustc_pattern_analysis::usefulness::{Usefulness, UsefulnessReport};
|
Constructor, DeconstructedPat, RustcMatchCheckCtxt as MatchCheckCtxt, Usefulness,
|
||||||
|
UsefulnessReport, WitnessPat,
|
||||||
|
};
|
||||||
use rustc_pattern_analysis::{analyze_match, MatchArm};
|
use rustc_pattern_analysis::{analyze_match, MatchArm};
|
||||||
|
|
||||||
use crate::errors::*;
|
use crate::errors::*;
|
||||||
|
|
||||||
use rustc_arena::TypedArena;
|
use rustc_arena::{DroplessArena, TypedArena};
|
||||||
use rustc_ast::Mutability;
|
use rustc_ast::Mutability;
|
||||||
use rustc_data_structures::fx::FxIndexSet;
|
use rustc_data_structures::fx::FxIndexSet;
|
||||||
use rustc_data_structures::stack::ensure_sufficient_stack;
|
use rustc_data_structures::stack::ensure_sufficient_stack;
|
||||||
@ -31,6 +31,7 @@ pub(crate) fn check_match(tcx: TyCtxt<'_>, def_id: LocalDefId) -> Result<(), Err
|
|||||||
let (thir, expr) = tcx.thir_body(def_id)?;
|
let (thir, expr) = tcx.thir_body(def_id)?;
|
||||||
let thir = thir.borrow();
|
let thir = thir.borrow();
|
||||||
let pattern_arena = TypedArena::default();
|
let pattern_arena = TypedArena::default();
|
||||||
|
let dropless_arena = DroplessArena::default();
|
||||||
let mut visitor = MatchVisitor {
|
let mut visitor = MatchVisitor {
|
||||||
tcx,
|
tcx,
|
||||||
thir: &*thir,
|
thir: &*thir,
|
||||||
@ -38,6 +39,7 @@ pub(crate) fn check_match(tcx: TyCtxt<'_>, def_id: LocalDefId) -> Result<(), Err
|
|||||||
lint_level: tcx.local_def_id_to_hir_id(def_id),
|
lint_level: tcx.local_def_id_to_hir_id(def_id),
|
||||||
let_source: LetSource::None,
|
let_source: LetSource::None,
|
||||||
pattern_arena: &pattern_arena,
|
pattern_arena: &pattern_arena,
|
||||||
|
dropless_arena: &dropless_arena,
|
||||||
error: Ok(()),
|
error: Ok(()),
|
||||||
};
|
};
|
||||||
visitor.visit_expr(&thir[expr]);
|
visitor.visit_expr(&thir[expr]);
|
||||||
@ -82,6 +84,7 @@ struct MatchVisitor<'thir, 'p, 'tcx> {
|
|||||||
lint_level: HirId,
|
lint_level: HirId,
|
||||||
let_source: LetSource,
|
let_source: LetSource,
|
||||||
pattern_arena: &'p TypedArena<DeconstructedPat<'p, 'tcx>>,
|
pattern_arena: &'p TypedArena<DeconstructedPat<'p, 'tcx>>,
|
||||||
|
dropless_arena: &'p DroplessArena,
|
||||||
/// Tracks if we encountered an error while checking this body. That the first function to
|
/// Tracks if we encountered an error while checking this body. That the first function to
|
||||||
/// report it stores it here. Some functions return `Result` to allow callers to short-circuit
|
/// report it stores it here. Some functions return `Result` to allow callers to short-circuit
|
||||||
/// on error, but callers don't need to store it here again.
|
/// on error, but callers don't need to store it here again.
|
||||||
@ -382,6 +385,7 @@ impl<'thir, 'p, 'tcx> MatchVisitor<'thir, 'p, 'tcx> {
|
|||||||
param_env: self.param_env,
|
param_env: self.param_env,
|
||||||
module: self.tcx.parent_module(self.lint_level).to_def_id(),
|
module: self.tcx.parent_module(self.lint_level).to_def_id(),
|
||||||
pattern_arena: self.pattern_arena,
|
pattern_arena: self.pattern_arena,
|
||||||
|
dropless_arena: self.dropless_arena,
|
||||||
match_lint_level: self.lint_level,
|
match_lint_level: self.lint_level,
|
||||||
whole_match_span,
|
whole_match_span,
|
||||||
scrut_span,
|
scrut_span,
|
||||||
@ -425,7 +429,8 @@ impl<'thir, 'p, 'tcx> MatchVisitor<'thir, 'p, 'tcx> {
|
|||||||
let arm = &self.thir.arms[arm];
|
let arm = &self.thir.arms[arm];
|
||||||
let got_error = self.with_lint_level(arm.lint_level, |this| {
|
let got_error = self.with_lint_level(arm.lint_level, |this| {
|
||||||
let Ok(pat) = this.lower_pattern(&cx, &arm.pattern) else { return true };
|
let Ok(pat) = this.lower_pattern(&cx, &arm.pattern) else { return true };
|
||||||
let arm = MatchArm { pat, hir_id: this.lint_level, has_guard: arm.guard.is_some() };
|
let arm =
|
||||||
|
MatchArm { pat, arm_data: this.lint_level, has_guard: arm.guard.is_some() };
|
||||||
tarms.push(arm);
|
tarms.push(arm);
|
||||||
false
|
false
|
||||||
});
|
});
|
||||||
@ -548,7 +553,7 @@ impl<'thir, 'p, 'tcx> MatchVisitor<'thir, 'p, 'tcx> {
|
|||||||
) -> Result<(MatchCheckCtxt<'p, 'tcx>, UsefulnessReport<'p, 'tcx>), ErrorGuaranteed> {
|
) -> Result<(MatchCheckCtxt<'p, 'tcx>, UsefulnessReport<'p, 'tcx>), ErrorGuaranteed> {
|
||||||
let cx = self.new_cx(refutability, None, scrut, pat.span);
|
let cx = self.new_cx(refutability, None, scrut, pat.span);
|
||||||
let pat = self.lower_pattern(&cx, pat)?;
|
let pat = self.lower_pattern(&cx, pat)?;
|
||||||
let arms = [MatchArm { pat, hir_id: self.lint_level, has_guard: false }];
|
let arms = [MatchArm { pat, arm_data: self.lint_level, has_guard: false }];
|
||||||
let report = analyze_match(&cx, &arms, pat.ty());
|
let report = analyze_match(&cx, &arms, pat.ty());
|
||||||
Ok((cx, report))
|
Ok((cx, report))
|
||||||
}
|
}
|
||||||
@ -847,34 +852,34 @@ fn report_arm_reachability<'p, 'tcx>(
|
|||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
use Usefulness::*;
|
|
||||||
let mut catchall = None;
|
let mut catchall = None;
|
||||||
for (arm, is_useful) in report.arm_usefulness.iter() {
|
for (arm, is_useful) in report.arm_usefulness.iter() {
|
||||||
match is_useful {
|
match is_useful {
|
||||||
Redundant => report_unreachable_pattern(arm.pat.span(), arm.hir_id, catchall),
|
Usefulness::Redundant => {
|
||||||
Useful(redundant_spans) if redundant_spans.is_empty() => {}
|
report_unreachable_pattern(*arm.pat.data(), arm.arm_data, catchall)
|
||||||
|
}
|
||||||
|
Usefulness::Useful(redundant_subpats) if redundant_subpats.is_empty() => {}
|
||||||
// The arm is reachable, but contains redundant subpatterns (from or-patterns).
|
// The arm is reachable, but contains redundant subpatterns (from or-patterns).
|
||||||
Useful(redundant_spans) => {
|
Usefulness::Useful(redundant_subpats) => {
|
||||||
let mut redundant_spans = redundant_spans.clone();
|
let mut redundant_subpats = redundant_subpats.clone();
|
||||||
// Emit lints in the order in which they occur in the file.
|
// Emit lints in the order in which they occur in the file.
|
||||||
redundant_spans.sort_unstable();
|
redundant_subpats.sort_unstable_by_key(|pat| pat.data());
|
||||||
for span in redundant_spans {
|
for pat in redundant_subpats {
|
||||||
report_unreachable_pattern(span, arm.hir_id, None);
|
report_unreachable_pattern(*pat.data(), arm.arm_data, None);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if !arm.has_guard && catchall.is_none() && pat_is_catchall(arm.pat) {
|
if !arm.has_guard && catchall.is_none() && pat_is_catchall(arm.pat) {
|
||||||
catchall = Some(arm.pat.span());
|
catchall = Some(*arm.pat.data());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Checks for common cases of "catchall" patterns that may not be intended as such.
|
/// Checks for common cases of "catchall" patterns that may not be intended as such.
|
||||||
fn pat_is_catchall(pat: &DeconstructedPat<'_, '_>) -> bool {
|
fn pat_is_catchall(pat: &DeconstructedPat<'_, '_>) -> bool {
|
||||||
use Constructor::*;
|
|
||||||
match pat.ctor() {
|
match pat.ctor() {
|
||||||
Wildcard => true,
|
Constructor::Wildcard => true,
|
||||||
Single => pat.iter_fields().all(|pat| pat_is_catchall(pat)),
|
Constructor::Struct | Constructor::Ref => pat.iter_fields().all(|pat| pat_is_catchall(pat)),
|
||||||
_ => false,
|
_ => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -885,7 +890,7 @@ fn report_non_exhaustive_match<'p, 'tcx>(
|
|||||||
thir: &Thir<'tcx>,
|
thir: &Thir<'tcx>,
|
||||||
scrut_ty: Ty<'tcx>,
|
scrut_ty: Ty<'tcx>,
|
||||||
sp: Span,
|
sp: Span,
|
||||||
witnesses: Vec<WitnessPat<'tcx>>,
|
witnesses: Vec<WitnessPat<'p, 'tcx>>,
|
||||||
arms: &[ArmId],
|
arms: &[ArmId],
|
||||||
expr_span: Span,
|
expr_span: Span,
|
||||||
) -> ErrorGuaranteed {
|
) -> ErrorGuaranteed {
|
||||||
@ -1082,10 +1087,10 @@ fn report_non_exhaustive_match<'p, 'tcx>(
|
|||||||
|
|
||||||
fn joined_uncovered_patterns<'p, 'tcx>(
|
fn joined_uncovered_patterns<'p, 'tcx>(
|
||||||
cx: &MatchCheckCtxt<'p, 'tcx>,
|
cx: &MatchCheckCtxt<'p, 'tcx>,
|
||||||
witnesses: &[WitnessPat<'tcx>],
|
witnesses: &[WitnessPat<'p, 'tcx>],
|
||||||
) -> String {
|
) -> String {
|
||||||
const LIMIT: usize = 3;
|
const LIMIT: usize = 3;
|
||||||
let pat_to_str = |pat: &WitnessPat<'tcx>| cx.hoist_witness_pat(pat).to_string();
|
let pat_to_str = |pat: &WitnessPat<'p, 'tcx>| cx.hoist_witness_pat(pat).to_string();
|
||||||
match witnesses {
|
match witnesses {
|
||||||
[] => bug!(),
|
[] => bug!(),
|
||||||
[witness] => format!("`{}`", cx.hoist_witness_pat(witness)),
|
[witness] => format!("`{}`", cx.hoist_witness_pat(witness)),
|
||||||
@ -1103,7 +1108,7 @@ fn joined_uncovered_patterns<'p, 'tcx>(
|
|||||||
|
|
||||||
fn collect_non_exhaustive_tys<'tcx>(
|
fn collect_non_exhaustive_tys<'tcx>(
|
||||||
cx: &MatchCheckCtxt<'_, 'tcx>,
|
cx: &MatchCheckCtxt<'_, 'tcx>,
|
||||||
pat: &WitnessPat<'tcx>,
|
pat: &WitnessPat<'_, 'tcx>,
|
||||||
non_exhaustive_tys: &mut FxIndexSet<Ty<'tcx>>,
|
non_exhaustive_tys: &mut FxIndexSet<Ty<'tcx>>,
|
||||||
) {
|
) {
|
||||||
if matches!(pat.ctor(), Constructor::NonExhaustive) {
|
if matches!(pat.ctor(), Constructor::NonExhaustive) {
|
||||||
@ -1122,7 +1127,7 @@ fn collect_non_exhaustive_tys<'tcx>(
|
|||||||
fn report_adt_defined_here<'tcx>(
|
fn report_adt_defined_here<'tcx>(
|
||||||
tcx: TyCtxt<'tcx>,
|
tcx: TyCtxt<'tcx>,
|
||||||
ty: Ty<'tcx>,
|
ty: Ty<'tcx>,
|
||||||
witnesses: &[WitnessPat<'tcx>],
|
witnesses: &[WitnessPat<'_, 'tcx>],
|
||||||
point_at_non_local_ty: bool,
|
point_at_non_local_ty: bool,
|
||||||
) -> Option<AdtDefinedHere<'tcx>> {
|
) -> Option<AdtDefinedHere<'tcx>> {
|
||||||
let ty = ty.peel_refs();
|
let ty = ty.peel_refs();
|
||||||
@ -1144,15 +1149,14 @@ fn report_adt_defined_here<'tcx>(
|
|||||||
Some(AdtDefinedHere { adt_def_span, ty, variants })
|
Some(AdtDefinedHere { adt_def_span, ty, variants })
|
||||||
}
|
}
|
||||||
|
|
||||||
fn maybe_point_at_variant<'a, 'tcx: 'a>(
|
fn maybe_point_at_variant<'a, 'p: 'a, 'tcx: 'p>(
|
||||||
tcx: TyCtxt<'tcx>,
|
tcx: TyCtxt<'tcx>,
|
||||||
def: AdtDef<'tcx>,
|
def: AdtDef<'tcx>,
|
||||||
patterns: impl Iterator<Item = &'a WitnessPat<'tcx>>,
|
patterns: impl Iterator<Item = &'a WitnessPat<'p, 'tcx>>,
|
||||||
) -> Vec<Span> {
|
) -> Vec<Span> {
|
||||||
use Constructor::*;
|
|
||||||
let mut covered = vec![];
|
let mut covered = vec![];
|
||||||
for pattern in patterns {
|
for pattern in patterns {
|
||||||
if let Variant(variant_index) = pattern.ctor() {
|
if let Constructor::Variant(variant_index) = pattern.ctor() {
|
||||||
if let ty::Adt(this_def, _) = pattern.ty().kind()
|
if let ty::Adt(this_def, _) = pattern.ty().kind()
|
||||||
&& this_def.did() != def.did()
|
&& this_def.did() != def.did()
|
||||||
{
|
{
|
||||||
|
@ -6,17 +6,40 @@ edition = "2021"
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
# tidy-alphabetical-start
|
# tidy-alphabetical-start
|
||||||
rustc_apfloat = "0.2.0"
|
rustc_apfloat = "0.2.0"
|
||||||
rustc_arena = { path = "../rustc_arena" }
|
rustc_arena = { path = "../rustc_arena", optional = true }
|
||||||
rustc_data_structures = { path = "../rustc_data_structures" }
|
rustc_data_structures = { path = "../rustc_data_structures", optional = true }
|
||||||
rustc_errors = { path = "../rustc_errors" }
|
rustc_errors = { path = "../rustc_errors", optional = true }
|
||||||
rustc_fluent_macro = { path = "../rustc_fluent_macro" }
|
rustc_fluent_macro = { path = "../rustc_fluent_macro", optional = true }
|
||||||
rustc_hir = { path = "../rustc_hir" }
|
rustc_hir = { path = "../rustc_hir", optional = true }
|
||||||
rustc_index = { path = "../rustc_index" }
|
rustc_index = { path = "../rustc_index", default-features = false }
|
||||||
rustc_macros = { path = "../rustc_macros" }
|
rustc_macros = { path = "../rustc_macros", optional = true }
|
||||||
rustc_middle = { path = "../rustc_middle" }
|
rustc_middle = { path = "../rustc_middle", optional = true }
|
||||||
rustc_session = { path = "../rustc_session" }
|
rustc_session = { path = "../rustc_session", optional = true }
|
||||||
rustc_span = { path = "../rustc_span" }
|
rustc_span = { path = "../rustc_span", optional = true }
|
||||||
rustc_target = { path = "../rustc_target" }
|
rustc_target = { path = "../rustc_target", optional = true }
|
||||||
smallvec = { version = "1.8.1", features = ["union", "may_dangle"] }
|
smallvec = { version = "1.8.1", features = ["union"] }
|
||||||
tracing = "0.1"
|
tracing = "0.1"
|
||||||
|
typed-arena = { version = "2.0.2", optional = true }
|
||||||
# tidy-alphabetical-end
|
# tidy-alphabetical-end
|
||||||
|
|
||||||
|
[features]
|
||||||
|
default = ["rustc"]
|
||||||
|
# It's not possible to only enable the `typed_arena` dependency when the `rustc` feature is off, so
|
||||||
|
# we use another feature instead. The crate won't compile if one of these isn't enabled.
|
||||||
|
rustc = [
|
||||||
|
"dep:rustc_arena",
|
||||||
|
"dep:rustc_data_structures",
|
||||||
|
"dep:rustc_errors",
|
||||||
|
"dep:rustc_fluent_macro",
|
||||||
|
"dep:rustc_hir",
|
||||||
|
"dep:rustc_macros",
|
||||||
|
"dep:rustc_middle",
|
||||||
|
"dep:rustc_session",
|
||||||
|
"dep:rustc_span",
|
||||||
|
"dep:rustc_target",
|
||||||
|
"smallvec/may_dangle",
|
||||||
|
"rustc_index/nightly",
|
||||||
|
]
|
||||||
|
stable = [
|
||||||
|
"dep:typed-arena",
|
||||||
|
]
|
||||||
|
@ -40,7 +40,7 @@
|
|||||||
//! - That have no non-trivial intersection with any of the constructors in the column (i.e. they're
|
//! - That have no non-trivial intersection with any of the constructors in the column (i.e. they're
|
||||||
//! each either disjoint with or covered by any given column constructor).
|
//! each either disjoint with or covered by any given column constructor).
|
||||||
//!
|
//!
|
||||||
//! We compute this in two steps: first [`crate::cx::MatchCheckCtxt::ctors_for_ty`] determines the
|
//! We compute this in two steps: first [`TypeCx::ctors_for_ty`] determines the
|
||||||
//! set of all possible constructors for the type. Then [`ConstructorSet::split`] looks at the
|
//! set of all possible constructors for the type. Then [`ConstructorSet::split`] looks at the
|
||||||
//! column of constructors and splits the set into groups accordingly. The precise invariants of
|
//! column of constructors and splits the set into groups accordingly. The precise invariants of
|
||||||
//! [`ConstructorSet::split`] is described in [`SplitConstructorSet`].
|
//! [`ConstructorSet::split`] is described in [`SplitConstructorSet`].
|
||||||
@ -136,7 +136,7 @@
|
|||||||
//! the algorithm can't distinguish them from a nonempty constructor. The only known case where this
|
//! the algorithm can't distinguish them from a nonempty constructor. The only known case where this
|
||||||
//! could happen is the `[..]` pattern on `[!; N]` with `N > 0` so we must take care to not emit it.
|
//! could happen is the `[..]` pattern on `[!; N]` with `N > 0` so we must take care to not emit it.
|
||||||
//!
|
//!
|
||||||
//! This is all handled by [`crate::cx::MatchCheckCtxt::ctors_for_ty`] and
|
//! This is all handled by [`TypeCx::ctors_for_ty`] and
|
||||||
//! [`ConstructorSet::split`]. The invariants of [`SplitConstructorSet`] are also of interest.
|
//! [`ConstructorSet::split`]. The invariants of [`SplitConstructorSet`] are also of interest.
|
||||||
//!
|
//!
|
||||||
//!
|
//!
|
||||||
@ -155,17 +155,15 @@ use std::iter::once;
|
|||||||
use smallvec::SmallVec;
|
use smallvec::SmallVec;
|
||||||
|
|
||||||
use rustc_apfloat::ieee::{DoubleS, IeeeFloat, SingleS};
|
use rustc_apfloat::ieee::{DoubleS, IeeeFloat, SingleS};
|
||||||
use rustc_data_structures::fx::FxHashSet;
|
use rustc_index::bit_set::{BitSet, GrowableBitSet};
|
||||||
use rustc_hir::RangeEnd;
|
|
||||||
use rustc_index::IndexVec;
|
use rustc_index::IndexVec;
|
||||||
use rustc_middle::mir::Const;
|
|
||||||
use rustc_target::abi::VariantIdx;
|
|
||||||
|
|
||||||
use self::Constructor::*;
|
use self::Constructor::*;
|
||||||
use self::MaybeInfiniteInt::*;
|
use self::MaybeInfiniteInt::*;
|
||||||
use self::SliceKind::*;
|
use self::SliceKind::*;
|
||||||
|
|
||||||
use crate::usefulness::PatCtxt;
|
use crate::usefulness::PlaceCtxt;
|
||||||
|
use crate::TypeCx;
|
||||||
|
|
||||||
/// Whether we have seen a constructor in the column or not.
|
/// Whether we have seen a constructor in the column or not.
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
|
||||||
@ -174,6 +172,21 @@ enum Presence {
|
|||||||
Seen,
|
Seen,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
|
||||||
|
pub enum RangeEnd {
|
||||||
|
Included,
|
||||||
|
Excluded,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for RangeEnd {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
f.write_str(match self {
|
||||||
|
RangeEnd::Included => "..=",
|
||||||
|
RangeEnd::Excluded => "..",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// A possibly infinite integer. Values are encoded such that the ordering on `u128` matches the
|
/// A possibly infinite integer. Values are encoded such that the ordering on `u128` matches the
|
||||||
/// natural order on the original type. For example, `-128i8` is encoded as `0` and `127i8` as
|
/// natural order on the original type. For example, `-128i8` is encoded as `0` and `127i8` as
|
||||||
/// `255`. See `signed_bias` for details.
|
/// `255`. See `signed_bias` for details.
|
||||||
@ -221,7 +234,7 @@ impl MaybeInfiniteInt {
|
|||||||
match self {
|
match self {
|
||||||
Finite(n) => match n.checked_sub(1) {
|
Finite(n) => match n.checked_sub(1) {
|
||||||
Some(m) => Finite(m),
|
Some(m) => Finite(m),
|
||||||
None => bug!(),
|
None => panic!("Called `MaybeInfiniteInt::minus_one` on 0"),
|
||||||
},
|
},
|
||||||
JustAfterMax => Finite(u128::MAX),
|
JustAfterMax => Finite(u128::MAX),
|
||||||
x => x,
|
x => x,
|
||||||
@ -234,7 +247,7 @@ impl MaybeInfiniteInt {
|
|||||||
Some(m) => Finite(m),
|
Some(m) => Finite(m),
|
||||||
None => JustAfterMax,
|
None => JustAfterMax,
|
||||||
},
|
},
|
||||||
JustAfterMax => bug!(),
|
JustAfterMax => panic!("Called `MaybeInfiniteInt::plus_one` on u128::MAX+1"),
|
||||||
x => x,
|
x => x,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -253,7 +266,7 @@ pub struct IntRange {
|
|||||||
|
|
||||||
impl IntRange {
|
impl IntRange {
|
||||||
/// Best effort; will not know that e.g. `255u8..` is a singleton.
|
/// Best effort; will not know that e.g. `255u8..` is a singleton.
|
||||||
pub(crate) fn is_singleton(&self) -> bool {
|
pub fn is_singleton(&self) -> bool {
|
||||||
// Since `lo` and `hi` can't be the same `Infinity` and `plus_one` never changes from finite
|
// Since `lo` and `hi` can't be the same `Infinity` and `plus_one` never changes from finite
|
||||||
// to infinite, this correctly only detects ranges that contain exacly one `Finite(x)`.
|
// to infinite, this correctly only detects ranges that contain exacly one `Finite(x)`.
|
||||||
self.lo.plus_one() == self.hi
|
self.lo.plus_one() == self.hi
|
||||||
@ -271,7 +284,7 @@ impl IntRange {
|
|||||||
}
|
}
|
||||||
if lo >= hi {
|
if lo >= hi {
|
||||||
// This should have been caught earlier by E0030.
|
// This should have been caught earlier by E0030.
|
||||||
bug!("malformed range pattern: {lo:?}..{hi:?}");
|
panic!("malformed range pattern: {lo:?}..{hi:?}");
|
||||||
}
|
}
|
||||||
IntRange { lo, hi }
|
IntRange { lo, hi }
|
||||||
}
|
}
|
||||||
@ -432,7 +445,7 @@ impl Slice {
|
|||||||
let kind = match (array_len, kind) {
|
let kind = match (array_len, kind) {
|
||||||
// If the middle `..` has length 0, we effectively have a fixed-length pattern.
|
// If the middle `..` has length 0, we effectively have a fixed-length pattern.
|
||||||
(Some(len), VarLen(prefix, suffix)) if prefix + suffix == len => FixedLen(len),
|
(Some(len), VarLen(prefix, suffix)) if prefix + suffix == len => FixedLen(len),
|
||||||
(Some(len), VarLen(prefix, suffix)) if prefix + suffix > len => bug!(
|
(Some(len), VarLen(prefix, suffix)) if prefix + suffix > len => panic!(
|
||||||
"Slice pattern of length {} longer than its array length {len}",
|
"Slice pattern of length {} longer than its array length {len}",
|
||||||
prefix + suffix
|
prefix + suffix
|
||||||
),
|
),
|
||||||
@ -532,7 +545,7 @@ impl Slice {
|
|||||||
// therefore `Presence::Seen` in the column.
|
// therefore `Presence::Seen` in the column.
|
||||||
let mut min_var_len = usize::MAX;
|
let mut min_var_len = usize::MAX;
|
||||||
// Tracks the fixed-length slices we've seen, to mark them as `Presence::Seen`.
|
// Tracks the fixed-length slices we've seen, to mark them as `Presence::Seen`.
|
||||||
let mut seen_fixed_lens = FxHashSet::default();
|
let mut seen_fixed_lens = GrowableBitSet::new_empty();
|
||||||
match &mut max_slice {
|
match &mut max_slice {
|
||||||
VarLen(max_prefix_len, max_suffix_len) => {
|
VarLen(max_prefix_len, max_suffix_len) => {
|
||||||
// A length larger than any fixed-length slice encountered.
|
// A length larger than any fixed-length slice encountered.
|
||||||
@ -600,7 +613,7 @@ impl Slice {
|
|||||||
|
|
||||||
smaller_lengths.map(FixedLen).chain(once(max_slice)).map(move |kind| {
|
smaller_lengths.map(FixedLen).chain(once(max_slice)).map(move |kind| {
|
||||||
let arity = kind.arity();
|
let arity = kind.arity();
|
||||||
let seen = if min_var_len <= arity || seen_fixed_lens.contains(&arity) {
|
let seen = if min_var_len <= arity || seen_fixed_lens.contains(arity) {
|
||||||
Presence::Seen
|
Presence::Seen
|
||||||
} else {
|
} else {
|
||||||
Presence::Unseen
|
Presence::Unseen
|
||||||
@ -630,12 +643,17 @@ impl OpaqueId {
|
|||||||
/// constructor. `Constructor::apply` reconstructs the pattern from a pair of `Constructor` and
|
/// constructor. `Constructor::apply` reconstructs the pattern from a pair of `Constructor` and
|
||||||
/// `Fields`.
|
/// `Fields`.
|
||||||
#[derive(Clone, Debug, PartialEq)]
|
#[derive(Clone, Debug, PartialEq)]
|
||||||
pub enum Constructor<'tcx> {
|
pub enum Constructor<Cx: TypeCx> {
|
||||||
/// The constructor for patterns that have a single constructor, like tuples, struct patterns,
|
/// Tuples and structs.
|
||||||
/// and references. Fixed-length arrays are treated separately with `Slice`.
|
Struct,
|
||||||
Single,
|
|
||||||
/// Enum variants.
|
/// Enum variants.
|
||||||
Variant(VariantIdx),
|
Variant(Cx::VariantIdx),
|
||||||
|
/// References
|
||||||
|
Ref,
|
||||||
|
/// Array and slice patterns.
|
||||||
|
Slice(Slice),
|
||||||
|
/// Union field accesses.
|
||||||
|
UnionField,
|
||||||
/// Booleans
|
/// Booleans
|
||||||
Bool(bool),
|
Bool(bool),
|
||||||
/// Ranges of integer literal values (`2`, `2..=5` or `2..5`).
|
/// Ranges of integer literal values (`2`, `2..=5` or `2..5`).
|
||||||
@ -644,9 +662,7 @@ pub enum Constructor<'tcx> {
|
|||||||
F32Range(IeeeFloat<SingleS>, IeeeFloat<SingleS>, RangeEnd),
|
F32Range(IeeeFloat<SingleS>, IeeeFloat<SingleS>, RangeEnd),
|
||||||
F64Range(IeeeFloat<DoubleS>, IeeeFloat<DoubleS>, RangeEnd),
|
F64Range(IeeeFloat<DoubleS>, IeeeFloat<DoubleS>, RangeEnd),
|
||||||
/// String literals. Strings are not quite the same as `&[u8]` so we treat them separately.
|
/// String literals. Strings are not quite the same as `&[u8]` so we treat them separately.
|
||||||
Str(Const<'tcx>),
|
Str(Cx::StrLit),
|
||||||
/// Array and slice patterns.
|
|
||||||
Slice(Slice),
|
|
||||||
/// Constants that must not be matched structurally. They are treated as black boxes for the
|
/// Constants that must not be matched structurally. They are treated as black boxes for the
|
||||||
/// purposes of exhaustiveness: we must not inspect them, and they don't count towards making a
|
/// purposes of exhaustiveness: we must not inspect them, and they don't count towards making a
|
||||||
/// match exhaustive.
|
/// match exhaustive.
|
||||||
@ -669,12 +685,12 @@ pub enum Constructor<'tcx> {
|
|||||||
Missing,
|
Missing,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'tcx> Constructor<'tcx> {
|
impl<Cx: TypeCx> Constructor<Cx> {
|
||||||
pub(crate) fn is_non_exhaustive(&self) -> bool {
|
pub(crate) fn is_non_exhaustive(&self) -> bool {
|
||||||
matches!(self, NonExhaustive)
|
matches!(self, NonExhaustive)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn as_variant(&self) -> Option<VariantIdx> {
|
pub(crate) fn as_variant(&self) -> Option<Cx::VariantIdx> {
|
||||||
match self {
|
match self {
|
||||||
Variant(i) => Some(*i),
|
Variant(i) => Some(*i),
|
||||||
_ => None,
|
_ => None,
|
||||||
@ -701,8 +717,8 @@ impl<'tcx> Constructor<'tcx> {
|
|||||||
|
|
||||||
/// The number of fields for this constructor. This must be kept in sync with
|
/// The number of fields for this constructor. This must be kept in sync with
|
||||||
/// `Fields::wildcards`.
|
/// `Fields::wildcards`.
|
||||||
pub(crate) fn arity(&self, pcx: &PatCtxt<'_, '_, 'tcx>) -> usize {
|
pub(crate) fn arity(&self, pcx: &PlaceCtxt<'_, '_, Cx>) -> usize {
|
||||||
pcx.cx.ctor_arity(self, pcx.ty)
|
pcx.ctor_arity(self)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns whether `self` is covered by `other`, i.e. whether `self` is a subset of `other`.
|
/// Returns whether `self` is covered by `other`, i.e. whether `self` is a subset of `other`.
|
||||||
@ -710,20 +726,20 @@ impl<'tcx> Constructor<'tcx> {
|
|||||||
/// this checks for inclusion.
|
/// this checks for inclusion.
|
||||||
// We inline because this has a single call site in `Matrix::specialize_constructor`.
|
// We inline because this has a single call site in `Matrix::specialize_constructor`.
|
||||||
#[inline]
|
#[inline]
|
||||||
pub(crate) fn is_covered_by<'p>(&self, pcx: &PatCtxt<'_, 'p, 'tcx>, other: &Self) -> bool {
|
pub(crate) fn is_covered_by<'p>(&self, pcx: &PlaceCtxt<'_, 'p, Cx>, other: &Self) -> bool {
|
||||||
match (self, other) {
|
match (self, other) {
|
||||||
(Wildcard, _) => {
|
(Wildcard, _) => pcx
|
||||||
span_bug!(
|
.mcx
|
||||||
pcx.cx.scrut_span,
|
.tycx
|
||||||
"Constructor splitting should not have returned `Wildcard`"
|
.bug(format_args!("Constructor splitting should not have returned `Wildcard`")),
|
||||||
)
|
|
||||||
}
|
|
||||||
// Wildcards cover anything
|
// Wildcards cover anything
|
||||||
(_, Wildcard) => true,
|
(_, Wildcard) => true,
|
||||||
// Only a wildcard pattern can match these special constructors.
|
// Only a wildcard pattern can match these special constructors.
|
||||||
(Missing { .. } | NonExhaustive | Hidden, _) => false,
|
(Missing { .. } | NonExhaustive | Hidden, _) => false,
|
||||||
|
|
||||||
(Single, Single) => true,
|
(Struct, Struct) => true,
|
||||||
|
(Ref, Ref) => true,
|
||||||
|
(UnionField, UnionField) => true,
|
||||||
(Variant(self_id), Variant(other_id)) => self_id == other_id,
|
(Variant(self_id), Variant(other_id)) => self_id == other_id,
|
||||||
(Bool(self_b), Bool(other_b)) => self_b == other_b,
|
(Bool(self_b), Bool(other_b)) => self_b == other_b,
|
||||||
|
|
||||||
@ -756,12 +772,9 @@ impl<'tcx> Constructor<'tcx> {
|
|||||||
(Opaque(self_id), Opaque(other_id)) => self_id == other_id,
|
(Opaque(self_id), Opaque(other_id)) => self_id == other_id,
|
||||||
(Opaque(..), _) | (_, Opaque(..)) => false,
|
(Opaque(..), _) | (_, Opaque(..)) => false,
|
||||||
|
|
||||||
_ => span_bug!(
|
_ => pcx.mcx.tycx.bug(format_args!(
|
||||||
pcx.cx.scrut_span,
|
"trying to compare incompatible constructors {self:?} and {other:?}"
|
||||||
"trying to compare incompatible constructors {:?} and {:?}",
|
)),
|
||||||
self,
|
|
||||||
other
|
|
||||||
),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -785,13 +798,16 @@ pub enum VariantVisibility {
|
|||||||
/// In terms of division of responsibility, [`ConstructorSet::split`] handles all of the
|
/// In terms of division of responsibility, [`ConstructorSet::split`] handles all of the
|
||||||
/// `exhaustive_patterns` feature.
|
/// `exhaustive_patterns` feature.
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub enum ConstructorSet {
|
pub enum ConstructorSet<Cx: TypeCx> {
|
||||||
/// The type has a single constructor, e.g. `&T` or a struct. `empty` tracks whether the
|
/// The type is a tuple or struct. `empty` tracks whether the type is empty.
|
||||||
/// constructor is empty.
|
Struct { empty: bool },
|
||||||
Single { empty: bool },
|
|
||||||
/// This type has the following list of constructors. If `variants` is empty and
|
/// This type has the following list of constructors. If `variants` is empty and
|
||||||
/// `non_exhaustive` is false, don't use this; use `NoConstructors` instead.
|
/// `non_exhaustive` is false, don't use this; use `NoConstructors` instead.
|
||||||
Variants { variants: IndexVec<VariantIdx, VariantVisibility>, non_exhaustive: bool },
|
Variants { variants: IndexVec<Cx::VariantIdx, VariantVisibility>, non_exhaustive: bool },
|
||||||
|
/// The type is `&T`.
|
||||||
|
Ref,
|
||||||
|
/// The type is a union.
|
||||||
|
Union,
|
||||||
/// Booleans.
|
/// Booleans.
|
||||||
Bool,
|
Bool,
|
||||||
/// The type is spanned by integer values. The range or ranges give the set of allowed values.
|
/// The type is spanned by integer values. The range or ranges give the set of allowed values.
|
||||||
@ -830,25 +846,25 @@ pub enum ConstructorSet {
|
|||||||
/// of the `ConstructorSet` for the type, yet if we forgot to include them in `present` we would be
|
/// of the `ConstructorSet` for the type, yet if we forgot to include them in `present` we would be
|
||||||
/// ignoring any row with `Opaque`s in the algorithm. Hence the importance of point 4.
|
/// ignoring any row with `Opaque`s in the algorithm. Hence the importance of point 4.
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub(crate) struct SplitConstructorSet<'tcx> {
|
pub(crate) struct SplitConstructorSet<Cx: TypeCx> {
|
||||||
pub(crate) present: SmallVec<[Constructor<'tcx>; 1]>,
|
pub(crate) present: SmallVec<[Constructor<Cx>; 1]>,
|
||||||
pub(crate) missing: Vec<Constructor<'tcx>>,
|
pub(crate) missing: Vec<Constructor<Cx>>,
|
||||||
pub(crate) missing_empty: Vec<Constructor<'tcx>>,
|
pub(crate) missing_empty: Vec<Constructor<Cx>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ConstructorSet {
|
impl<Cx: TypeCx> ConstructorSet<Cx> {
|
||||||
/// This analyzes a column of constructors to 1/ determine which constructors of the type (if
|
/// This analyzes a column of constructors to 1/ determine which constructors of the type (if
|
||||||
/// any) are missing; 2/ split constructors to handle non-trivial intersections e.g. on ranges
|
/// any) are missing; 2/ split constructors to handle non-trivial intersections e.g. on ranges
|
||||||
/// or slices. This can get subtle; see [`SplitConstructorSet`] for details of this operation
|
/// or slices. This can get subtle; see [`SplitConstructorSet`] for details of this operation
|
||||||
/// and its invariants.
|
/// and its invariants.
|
||||||
#[instrument(level = "debug", skip(self, pcx, ctors), ret)]
|
#[instrument(level = "debug", skip(self, pcx, ctors), ret)]
|
||||||
pub(crate) fn split<'a, 'tcx>(
|
pub(crate) fn split<'a>(
|
||||||
&self,
|
&self,
|
||||||
pcx: &PatCtxt<'_, '_, 'tcx>,
|
pcx: &PlaceCtxt<'_, '_, Cx>,
|
||||||
ctors: impl Iterator<Item = &'a Constructor<'tcx>> + Clone,
|
ctors: impl Iterator<Item = &'a Constructor<Cx>> + Clone,
|
||||||
) -> SplitConstructorSet<'tcx>
|
) -> SplitConstructorSet<Cx>
|
||||||
where
|
where
|
||||||
'tcx: 'a,
|
Cx: 'a,
|
||||||
{
|
{
|
||||||
let mut present: SmallVec<[_; 1]> = SmallVec::new();
|
let mut present: SmallVec<[_; 1]> = SmallVec::new();
|
||||||
// Empty constructors found missing.
|
// Empty constructors found missing.
|
||||||
@ -866,22 +882,39 @@ impl ConstructorSet {
|
|||||||
}
|
}
|
||||||
|
|
||||||
match self {
|
match self {
|
||||||
ConstructorSet::Single { empty } => {
|
ConstructorSet::Struct { empty } => {
|
||||||
if !seen.is_empty() {
|
if !seen.is_empty() {
|
||||||
present.push(Single);
|
present.push(Struct);
|
||||||
} else if *empty {
|
} else if *empty {
|
||||||
missing_empty.push(Single);
|
missing_empty.push(Struct);
|
||||||
} else {
|
} else {
|
||||||
missing.push(Single);
|
missing.push(Struct);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ConstructorSet::Ref => {
|
||||||
|
if !seen.is_empty() {
|
||||||
|
present.push(Ref);
|
||||||
|
} else {
|
||||||
|
missing.push(Ref);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ConstructorSet::Union => {
|
||||||
|
if !seen.is_empty() {
|
||||||
|
present.push(UnionField);
|
||||||
|
} else {
|
||||||
|
missing.push(UnionField);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ConstructorSet::Variants { variants, non_exhaustive } => {
|
ConstructorSet::Variants { variants, non_exhaustive } => {
|
||||||
let seen_set: FxHashSet<_> = seen.iter().map(|c| c.as_variant().unwrap()).collect();
|
let mut seen_set: BitSet<_> = BitSet::new_empty(variants.len());
|
||||||
|
for idx in seen.iter().map(|c| c.as_variant().unwrap()) {
|
||||||
|
seen_set.insert(idx);
|
||||||
|
}
|
||||||
let mut skipped_a_hidden_variant = false;
|
let mut skipped_a_hidden_variant = false;
|
||||||
|
|
||||||
for (idx, visibility) in variants.iter_enumerated() {
|
for (idx, visibility) in variants.iter_enumerated() {
|
||||||
let ctor = Variant(idx);
|
let ctor = Variant(idx);
|
||||||
if seen_set.contains(&idx) {
|
if seen_set.contains(idx) {
|
||||||
present.push(ctor);
|
present.push(ctor);
|
||||||
} else {
|
} else {
|
||||||
// We only put visible variants directly into `missing`.
|
// We only put visible variants directly into `missing`.
|
||||||
@ -975,8 +1008,8 @@ impl ConstructorSet {
|
|||||||
// We have now grouped all the constructors into 3 buckets: present, missing, missing_empty.
|
// We have now grouped all the constructors into 3 buckets: present, missing, missing_empty.
|
||||||
// In the absence of the `exhaustive_patterns` feature however, we don't count nested empty
|
// In the absence of the `exhaustive_patterns` feature however, we don't count nested empty
|
||||||
// types as empty. Only non-nested `!` or `enum Foo {}` are considered empty.
|
// types as empty. Only non-nested `!` or `enum Foo {}` are considered empty.
|
||||||
if !pcx.cx.tcx.features().exhaustive_patterns
|
if !pcx.mcx.tycx.is_exhaustive_patterns_feature_on()
|
||||||
&& !(pcx.is_top_level && matches!(self, Self::NoConstructors))
|
&& !(pcx.is_scrutinee && matches!(self, Self::NoConstructors))
|
||||||
{
|
{
|
||||||
// Treat all missing constructors as nonempty.
|
// Treat all missing constructors as nonempty.
|
||||||
// This clears `missing_empty`.
|
// This clears `missing_empty`.
|
||||||
|
@ -1,11 +1,11 @@
|
|||||||
use crate::{cx::MatchCheckCtxt, pat::WitnessPat};
|
|
||||||
|
|
||||||
use rustc_errors::{AddToDiagnostic, Diagnostic, SubdiagnosticMessage};
|
use rustc_errors::{AddToDiagnostic, Diagnostic, SubdiagnosticMessage};
|
||||||
use rustc_macros::{LintDiagnostic, Subdiagnostic};
|
use rustc_macros::{LintDiagnostic, Subdiagnostic};
|
||||||
use rustc_middle::thir::Pat;
|
use rustc_middle::thir::Pat;
|
||||||
use rustc_middle::ty::Ty;
|
use rustc_middle::ty::Ty;
|
||||||
use rustc_span::Span;
|
use rustc_span::Span;
|
||||||
|
|
||||||
|
use crate::rustc::{RustcMatchCheckCtxt, WitnessPat};
|
||||||
|
|
||||||
#[derive(Subdiagnostic)]
|
#[derive(Subdiagnostic)]
|
||||||
#[label(pattern_analysis_uncovered)]
|
#[label(pattern_analysis_uncovered)]
|
||||||
pub struct Uncovered<'tcx> {
|
pub struct Uncovered<'tcx> {
|
||||||
@ -21,8 +21,8 @@ pub struct Uncovered<'tcx> {
|
|||||||
impl<'tcx> Uncovered<'tcx> {
|
impl<'tcx> Uncovered<'tcx> {
|
||||||
pub fn new<'p>(
|
pub fn new<'p>(
|
||||||
span: Span,
|
span: Span,
|
||||||
cx: &MatchCheckCtxt<'p, 'tcx>,
|
cx: &RustcMatchCheckCtxt<'p, 'tcx>,
|
||||||
witnesses: Vec<WitnessPat<'tcx>>,
|
witnesses: Vec<WitnessPat<'p, 'tcx>>,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
let witness_1 = cx.hoist_witness_pat(witnesses.get(0).unwrap());
|
let witness_1 = cx.hoist_witness_pat(witnesses.get(0).unwrap());
|
||||||
Self {
|
Self {
|
||||||
|
@ -1,54 +1,133 @@
|
|||||||
//! Analysis of patterns, notably match exhaustiveness checking.
|
//! Analysis of patterns, notably match exhaustiveness checking.
|
||||||
|
|
||||||
pub mod constructor;
|
pub mod constructor;
|
||||||
pub mod cx;
|
#[cfg(feature = "rustc")]
|
||||||
pub mod errors;
|
pub mod errors;
|
||||||
|
#[cfg(feature = "rustc")]
|
||||||
pub(crate) mod lints;
|
pub(crate) mod lints;
|
||||||
pub mod pat;
|
pub mod pat;
|
||||||
|
#[cfg(feature = "rustc")]
|
||||||
|
pub mod rustc;
|
||||||
pub mod usefulness;
|
pub mod usefulness;
|
||||||
|
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate tracing;
|
extern crate tracing;
|
||||||
|
#[cfg(feature = "rustc")]
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate rustc_middle;
|
extern crate rustc_middle;
|
||||||
|
|
||||||
|
#[cfg(feature = "rustc")]
|
||||||
rustc_fluent_macro::fluent_messages! { "../messages.ftl" }
|
rustc_fluent_macro::fluent_messages! { "../messages.ftl" }
|
||||||
|
|
||||||
use lints::PatternColumn;
|
use std::fmt;
|
||||||
use rustc_hir::HirId;
|
|
||||||
use rustc_middle::ty::Ty;
|
|
||||||
use usefulness::{compute_match_usefulness, UsefulnessReport};
|
|
||||||
|
|
||||||
use crate::cx::MatchCheckCtxt;
|
use rustc_index::Idx;
|
||||||
use crate::lints::{lint_nonexhaustive_missing_variants, lint_overlapping_range_endpoints};
|
#[cfg(feature = "rustc")]
|
||||||
|
use rustc_middle::ty::Ty;
|
||||||
|
|
||||||
|
use crate::constructor::{Constructor, ConstructorSet};
|
||||||
|
#[cfg(feature = "rustc")]
|
||||||
|
use crate::lints::{
|
||||||
|
lint_nonexhaustive_missing_variants, lint_overlapping_range_endpoints, PatternColumn,
|
||||||
|
};
|
||||||
use crate::pat::DeconstructedPat;
|
use crate::pat::DeconstructedPat;
|
||||||
|
#[cfg(feature = "rustc")]
|
||||||
|
use crate::rustc::RustcMatchCheckCtxt;
|
||||||
|
#[cfg(feature = "rustc")]
|
||||||
|
use crate::usefulness::{compute_match_usefulness, ValidityConstraint};
|
||||||
|
|
||||||
|
// It's not possible to only enable the `typed_arena` dependency when the `rustc` feature is off, so
|
||||||
|
// we use another feature instead. The crate won't compile if one of these isn't enabled.
|
||||||
|
#[cfg(feature = "rustc")]
|
||||||
|
pub(crate) use rustc_arena::TypedArena;
|
||||||
|
#[cfg(feature = "stable")]
|
||||||
|
pub(crate) use typed_arena::Arena as TypedArena;
|
||||||
|
|
||||||
|
pub trait Captures<'a> {}
|
||||||
|
impl<'a, T: ?Sized> Captures<'a> for T {}
|
||||||
|
|
||||||
|
/// Context that provides type information about constructors.
|
||||||
|
///
|
||||||
|
/// Most of the crate is parameterized on a type that implements this trait.
|
||||||
|
pub trait TypeCx: Sized + Clone + fmt::Debug {
|
||||||
|
/// The type of a pattern.
|
||||||
|
type Ty: Copy + Clone + fmt::Debug; // FIXME: remove Copy
|
||||||
|
/// The index of an enum variant.
|
||||||
|
type VariantIdx: Clone + Idx;
|
||||||
|
/// A string literal
|
||||||
|
type StrLit: Clone + PartialEq + fmt::Debug;
|
||||||
|
/// Extra data to store in a match arm.
|
||||||
|
type ArmData: Copy + Clone + fmt::Debug;
|
||||||
|
/// Extra data to store in a pattern. `Default` needed when we create fictitious wildcard
|
||||||
|
/// patterns during analysis.
|
||||||
|
type PatData: Clone + Default;
|
||||||
|
|
||||||
|
fn is_opaque_ty(ty: Self::Ty) -> bool;
|
||||||
|
fn is_exhaustive_patterns_feature_on(&self) -> bool;
|
||||||
|
|
||||||
|
/// The number of fields for this constructor.
|
||||||
|
fn ctor_arity(&self, ctor: &Constructor<Self>, ty: Self::Ty) -> usize;
|
||||||
|
|
||||||
|
/// The types of the fields for this constructor. The result must have a length of
|
||||||
|
/// `ctor_arity()`.
|
||||||
|
fn ctor_sub_tys(&self, ctor: &Constructor<Self>, ty: Self::Ty) -> &[Self::Ty];
|
||||||
|
|
||||||
|
/// The set of all the constructors for `ty`.
|
||||||
|
///
|
||||||
|
/// This must follow the invariants of `ConstructorSet`
|
||||||
|
fn ctors_for_ty(&self, ty: Self::Ty) -> ConstructorSet<Self>;
|
||||||
|
|
||||||
|
/// Best-effort `Debug` implementation.
|
||||||
|
fn debug_pat(f: &mut fmt::Formatter<'_>, pat: &DeconstructedPat<'_, Self>) -> fmt::Result;
|
||||||
|
|
||||||
|
/// Raise a bug.
|
||||||
|
fn bug(&self, fmt: fmt::Arguments<'_>) -> !;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Context that provides information global to a match.
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct MatchCtxt<'a, 'p, Cx: TypeCx> {
|
||||||
|
/// The context for type information.
|
||||||
|
pub tycx: &'a Cx,
|
||||||
|
/// An arena to store the wildcards we produce during analysis.
|
||||||
|
pub wildcard_arena: &'a TypedArena<DeconstructedPat<'p, Cx>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a, 'p, Cx: TypeCx> Copy for MatchCtxt<'a, 'p, Cx> {}
|
||||||
|
|
||||||
/// The arm of a match expression.
|
/// The arm of a match expression.
|
||||||
#[derive(Clone, Copy, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub struct MatchArm<'p, 'tcx> {
|
pub struct MatchArm<'p, Cx: TypeCx> {
|
||||||
/// The pattern must have been lowered through `check_match::MatchVisitor::lower_pattern`.
|
pub pat: &'p DeconstructedPat<'p, Cx>,
|
||||||
pub pat: &'p DeconstructedPat<'p, 'tcx>,
|
|
||||||
pub hir_id: HirId,
|
|
||||||
pub has_guard: bool,
|
pub has_guard: bool,
|
||||||
|
pub arm_data: Cx::ArmData,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<'p, Cx: TypeCx> Copy for MatchArm<'p, Cx> {}
|
||||||
|
|
||||||
/// The entrypoint for this crate. Computes whether a match is exhaustive and which of its arms are
|
/// The entrypoint for this crate. Computes whether a match is exhaustive and which of its arms are
|
||||||
/// useful, and runs some lints.
|
/// useful, and runs some lints.
|
||||||
|
#[cfg(feature = "rustc")]
|
||||||
pub fn analyze_match<'p, 'tcx>(
|
pub fn analyze_match<'p, 'tcx>(
|
||||||
cx: &MatchCheckCtxt<'p, 'tcx>,
|
tycx: &RustcMatchCheckCtxt<'p, 'tcx>,
|
||||||
arms: &[MatchArm<'p, 'tcx>],
|
arms: &[rustc::MatchArm<'p, 'tcx>],
|
||||||
scrut_ty: Ty<'tcx>,
|
scrut_ty: Ty<'tcx>,
|
||||||
) -> UsefulnessReport<'p, 'tcx> {
|
) -> rustc::UsefulnessReport<'p, 'tcx> {
|
||||||
let pat_column = PatternColumn::new(arms);
|
// Arena to store the extra wildcards we construct during analysis.
|
||||||
|
let wildcard_arena = tycx.pattern_arena;
|
||||||
|
let scrut_validity = ValidityConstraint::from_bool(tycx.known_valid_scrutinee);
|
||||||
|
let cx = MatchCtxt { tycx, wildcard_arena };
|
||||||
|
|
||||||
let report = compute_match_usefulness(cx, arms, scrut_ty);
|
let report = compute_match_usefulness(cx, arms, scrut_ty, scrut_validity);
|
||||||
|
|
||||||
|
let pat_column = PatternColumn::new(arms);
|
||||||
|
|
||||||
// Lint on ranges that overlap on their endpoints, which is likely a mistake.
|
// Lint on ranges that overlap on their endpoints, which is likely a mistake.
|
||||||
lint_overlapping_range_endpoints(cx, &pat_column);
|
lint_overlapping_range_endpoints(cx, &pat_column);
|
||||||
|
|
||||||
// Run the non_exhaustive_omitted_patterns lint. Only run on refutable patterns to avoid hitting
|
// Run the non_exhaustive_omitted_patterns lint. Only run on refutable patterns to avoid hitting
|
||||||
// `if let`s. Only run if the match is exhaustive otherwise the error is redundant.
|
// `if let`s. Only run if the match is exhaustive otherwise the error is redundant.
|
||||||
if cx.refutable && report.non_exhaustiveness_witnesses.is_empty() {
|
if tycx.refutable && report.non_exhaustiveness_witnesses.is_empty() {
|
||||||
lint_nonexhaustive_missing_variants(cx, arms, &pat_column, scrut_ty)
|
lint_nonexhaustive_missing_variants(cx, arms, &pat_column, scrut_ty)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -6,15 +6,16 @@ use rustc_session::lint;
|
|||||||
use rustc_session::lint::builtin::NON_EXHAUSTIVE_OMITTED_PATTERNS;
|
use rustc_session::lint::builtin::NON_EXHAUSTIVE_OMITTED_PATTERNS;
|
||||||
use rustc_span::Span;
|
use rustc_span::Span;
|
||||||
|
|
||||||
use crate::constructor::{Constructor, IntRange, MaybeInfiniteInt, SplitConstructorSet};
|
use crate::constructor::{IntRange, MaybeInfiniteInt};
|
||||||
use crate::cx::MatchCheckCtxt;
|
|
||||||
use crate::errors::{
|
use crate::errors::{
|
||||||
NonExhaustiveOmittedPattern, NonExhaustiveOmittedPatternLintOnArm, Overlap,
|
NonExhaustiveOmittedPattern, NonExhaustiveOmittedPatternLintOnArm, Overlap,
|
||||||
OverlappingRangeEndpoints, Uncovered,
|
OverlappingRangeEndpoints, Uncovered,
|
||||||
};
|
};
|
||||||
use crate::pat::{DeconstructedPat, WitnessPat};
|
use crate::rustc::{
|
||||||
use crate::usefulness::PatCtxt;
|
Constructor, DeconstructedPat, MatchArm, MatchCtxt, PlaceCtxt, RustcMatchCheckCtxt,
|
||||||
use crate::MatchArm;
|
SplitConstructorSet, WitnessPat,
|
||||||
|
};
|
||||||
|
use crate::TypeCx;
|
||||||
|
|
||||||
/// A column of patterns in the matrix, where a column is the intuitive notion of "subpatterns that
|
/// A column of patterns in the matrix, where a column is the intuitive notion of "subpatterns that
|
||||||
/// inspect the same subvalue/place".
|
/// inspect the same subvalue/place".
|
||||||
@ -27,11 +28,11 @@ use crate::MatchArm;
|
|||||||
///
|
///
|
||||||
/// This is not used in the main algorithm; only in lints.
|
/// This is not used in the main algorithm; only in lints.
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub(crate) struct PatternColumn<'p, 'tcx> {
|
pub(crate) struct PatternColumn<'a, 'p, 'tcx> {
|
||||||
patterns: Vec<&'p DeconstructedPat<'p, 'tcx>>,
|
patterns: Vec<&'a DeconstructedPat<'p, 'tcx>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'p, 'tcx> PatternColumn<'p, 'tcx> {
|
impl<'a, 'p, 'tcx> PatternColumn<'a, 'p, 'tcx> {
|
||||||
pub(crate) fn new(arms: &[MatchArm<'p, 'tcx>]) -> Self {
|
pub(crate) fn new(arms: &[MatchArm<'p, 'tcx>]) -> Self {
|
||||||
let mut patterns = Vec::with_capacity(arms.len());
|
let mut patterns = Vec::with_capacity(arms.len());
|
||||||
for arm in arms {
|
for arm in arms {
|
||||||
@ -53,12 +54,11 @@ impl<'p, 'tcx> PatternColumn<'p, 'tcx> {
|
|||||||
}
|
}
|
||||||
// If the type is opaque and it is revealed anywhere in the column, we take the revealed
|
// If the type is opaque and it is revealed anywhere in the column, we take the revealed
|
||||||
// version. Otherwise we could encounter constructors for the revealed type and crash.
|
// version. Otherwise we could encounter constructors for the revealed type and crash.
|
||||||
let is_opaque = |ty: Ty<'tcx>| matches!(ty.kind(), ty::Alias(ty::Opaque, ..));
|
|
||||||
let first_ty = self.patterns[0].ty();
|
let first_ty = self.patterns[0].ty();
|
||||||
if is_opaque(first_ty) {
|
if RustcMatchCheckCtxt::is_opaque_ty(first_ty) {
|
||||||
for pat in &self.patterns {
|
for pat in &self.patterns {
|
||||||
let ty = pat.ty();
|
let ty = pat.ty();
|
||||||
if !is_opaque(ty) {
|
if !RustcMatchCheckCtxt::is_opaque_ty(ty) {
|
||||||
return Some(ty);
|
return Some(ty);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -67,12 +67,12 @@ impl<'p, 'tcx> PatternColumn<'p, 'tcx> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Do constructor splitting on the constructors of the column.
|
/// Do constructor splitting on the constructors of the column.
|
||||||
fn analyze_ctors(&self, pcx: &PatCtxt<'_, 'p, 'tcx>) -> SplitConstructorSet<'tcx> {
|
fn analyze_ctors(&self, pcx: &PlaceCtxt<'_, 'p, 'tcx>) -> SplitConstructorSet<'p, 'tcx> {
|
||||||
let column_ctors = self.patterns.iter().map(|p| p.ctor());
|
let column_ctors = self.patterns.iter().map(|p| p.ctor());
|
||||||
pcx.cx.ctors_for_ty(pcx.ty).split(pcx, column_ctors)
|
pcx.ctors_for_ty().split(pcx, column_ctors)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn iter<'a>(&'a self) -> impl Iterator<Item = &'p DeconstructedPat<'p, 'tcx>> + Captures<'a> {
|
fn iter<'b>(&'b self) -> impl Iterator<Item = &'a DeconstructedPat<'p, 'tcx>> + Captures<'b> {
|
||||||
self.patterns.iter().copied()
|
self.patterns.iter().copied()
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -81,7 +81,11 @@ impl<'p, 'tcx> PatternColumn<'p, 'tcx> {
|
|||||||
/// This returns one column per field of the constructor. They usually all have the same length
|
/// This returns one column per field of the constructor. They usually all have the same length
|
||||||
/// (the number of patterns in `self` that matched `ctor`), except that we expand or-patterns
|
/// (the number of patterns in `self` that matched `ctor`), except that we expand or-patterns
|
||||||
/// which may change the lengths.
|
/// which may change the lengths.
|
||||||
fn specialize(&self, pcx: &PatCtxt<'_, 'p, 'tcx>, ctor: &Constructor<'tcx>) -> Vec<Self> {
|
fn specialize(
|
||||||
|
&self,
|
||||||
|
pcx: &PlaceCtxt<'a, 'p, 'tcx>,
|
||||||
|
ctor: &Constructor<'p, 'tcx>,
|
||||||
|
) -> Vec<PatternColumn<'a, 'p, 'tcx>> {
|
||||||
let arity = ctor.arity(pcx);
|
let arity = ctor.arity(pcx);
|
||||||
if arity == 0 {
|
if arity == 0 {
|
||||||
return Vec::new();
|
return Vec::new();
|
||||||
@ -117,14 +121,14 @@ impl<'p, 'tcx> PatternColumn<'p, 'tcx> {
|
|||||||
/// Traverse the patterns to collect any variants of a non_exhaustive enum that fail to be mentioned
|
/// Traverse the patterns to collect any variants of a non_exhaustive enum that fail to be mentioned
|
||||||
/// in a given column.
|
/// in a given column.
|
||||||
#[instrument(level = "debug", skip(cx), ret)]
|
#[instrument(level = "debug", skip(cx), ret)]
|
||||||
fn collect_nonexhaustive_missing_variants<'p, 'tcx>(
|
fn collect_nonexhaustive_missing_variants<'a, 'p, 'tcx>(
|
||||||
cx: &MatchCheckCtxt<'p, 'tcx>,
|
cx: MatchCtxt<'a, 'p, 'tcx>,
|
||||||
column: &PatternColumn<'p, 'tcx>,
|
column: &PatternColumn<'a, 'p, 'tcx>,
|
||||||
) -> Vec<WitnessPat<'tcx>> {
|
) -> Vec<WitnessPat<'p, 'tcx>> {
|
||||||
let Some(ty) = column.head_ty() else {
|
let Some(ty) = column.head_ty() else {
|
||||||
return Vec::new();
|
return Vec::new();
|
||||||
};
|
};
|
||||||
let pcx = &PatCtxt::new_dummy(cx, ty);
|
let pcx = &PlaceCtxt::new_dummy(cx, ty);
|
||||||
|
|
||||||
let set = column.analyze_ctors(pcx);
|
let set = column.analyze_ctors(pcx);
|
||||||
if set.present.is_empty() {
|
if set.present.is_empty() {
|
||||||
@ -135,7 +139,7 @@ fn collect_nonexhaustive_missing_variants<'p, 'tcx>(
|
|||||||
}
|
}
|
||||||
|
|
||||||
let mut witnesses = Vec::new();
|
let mut witnesses = Vec::new();
|
||||||
if cx.is_foreign_non_exhaustive_enum(ty) {
|
if cx.tycx.is_foreign_non_exhaustive_enum(ty) {
|
||||||
witnesses.extend(
|
witnesses.extend(
|
||||||
set.missing
|
set.missing
|
||||||
.into_iter()
|
.into_iter()
|
||||||
@ -164,14 +168,15 @@ fn collect_nonexhaustive_missing_variants<'p, 'tcx>(
|
|||||||
witnesses
|
witnesses
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn lint_nonexhaustive_missing_variants<'p, 'tcx>(
|
pub(crate) fn lint_nonexhaustive_missing_variants<'a, 'p, 'tcx>(
|
||||||
cx: &MatchCheckCtxt<'p, 'tcx>,
|
cx: MatchCtxt<'a, 'p, 'tcx>,
|
||||||
arms: &[MatchArm<'p, 'tcx>],
|
arms: &[MatchArm<'p, 'tcx>],
|
||||||
pat_column: &PatternColumn<'p, 'tcx>,
|
pat_column: &PatternColumn<'a, 'p, 'tcx>,
|
||||||
scrut_ty: Ty<'tcx>,
|
scrut_ty: Ty<'tcx>,
|
||||||
) {
|
) {
|
||||||
|
let rcx: &RustcMatchCheckCtxt<'_, '_> = cx.tycx;
|
||||||
if !matches!(
|
if !matches!(
|
||||||
cx.tcx.lint_level_at_node(NON_EXHAUSTIVE_OMITTED_PATTERNS, cx.match_lint_level).0,
|
rcx.tcx.lint_level_at_node(NON_EXHAUSTIVE_OMITTED_PATTERNS, rcx.match_lint_level).0,
|
||||||
rustc_session::lint::Level::Allow
|
rustc_session::lint::Level::Allow
|
||||||
) {
|
) {
|
||||||
let witnesses = collect_nonexhaustive_missing_variants(cx, pat_column);
|
let witnesses = collect_nonexhaustive_missing_variants(cx, pat_column);
|
||||||
@ -180,13 +185,13 @@ pub(crate) fn lint_nonexhaustive_missing_variants<'p, 'tcx>(
|
|||||||
// is not exhaustive enough.
|
// is not exhaustive enough.
|
||||||
//
|
//
|
||||||
// NB: The partner lint for structs lives in `compiler/rustc_hir_analysis/src/check/pat.rs`.
|
// NB: The partner lint for structs lives in `compiler/rustc_hir_analysis/src/check/pat.rs`.
|
||||||
cx.tcx.emit_spanned_lint(
|
rcx.tcx.emit_spanned_lint(
|
||||||
NON_EXHAUSTIVE_OMITTED_PATTERNS,
|
NON_EXHAUSTIVE_OMITTED_PATTERNS,
|
||||||
cx.match_lint_level,
|
rcx.match_lint_level,
|
||||||
cx.scrut_span,
|
rcx.scrut_span,
|
||||||
NonExhaustiveOmittedPattern {
|
NonExhaustiveOmittedPattern {
|
||||||
scrut_ty,
|
scrut_ty,
|
||||||
uncovered: Uncovered::new(cx.scrut_span, cx, witnesses),
|
uncovered: Uncovered::new(rcx.scrut_span, rcx, witnesses),
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@ -196,17 +201,17 @@ pub(crate) fn lint_nonexhaustive_missing_variants<'p, 'tcx>(
|
|||||||
// usage of the lint.
|
// usage of the lint.
|
||||||
for arm in arms {
|
for arm in arms {
|
||||||
let (lint_level, lint_level_source) =
|
let (lint_level, lint_level_source) =
|
||||||
cx.tcx.lint_level_at_node(NON_EXHAUSTIVE_OMITTED_PATTERNS, arm.hir_id);
|
rcx.tcx.lint_level_at_node(NON_EXHAUSTIVE_OMITTED_PATTERNS, arm.arm_data);
|
||||||
if !matches!(lint_level, rustc_session::lint::Level::Allow) {
|
if !matches!(lint_level, rustc_session::lint::Level::Allow) {
|
||||||
let decorator = NonExhaustiveOmittedPatternLintOnArm {
|
let decorator = NonExhaustiveOmittedPatternLintOnArm {
|
||||||
lint_span: lint_level_source.span(),
|
lint_span: lint_level_source.span(),
|
||||||
suggest_lint_on_match: cx.whole_match_span.map(|span| span.shrink_to_lo()),
|
suggest_lint_on_match: rcx.whole_match_span.map(|span| span.shrink_to_lo()),
|
||||||
lint_level: lint_level.as_str(),
|
lint_level: lint_level.as_str(),
|
||||||
lint_name: "non_exhaustive_omitted_patterns",
|
lint_name: "non_exhaustive_omitted_patterns",
|
||||||
};
|
};
|
||||||
|
|
||||||
use rustc_errors::DecorateLint;
|
use rustc_errors::DecorateLint;
|
||||||
let mut err = cx.tcx.sess.struct_span_warn(arm.pat.span(), "");
|
let mut err = rcx.tcx.sess.struct_span_warn(*arm.pat.data(), "");
|
||||||
err.set_primary_message(decorator.msg());
|
err.set_primary_message(decorator.msg());
|
||||||
decorator.decorate_lint(&mut err);
|
decorator.decorate_lint(&mut err);
|
||||||
err.emit();
|
err.emit();
|
||||||
@ -217,28 +222,29 @@ pub(crate) fn lint_nonexhaustive_missing_variants<'p, 'tcx>(
|
|||||||
|
|
||||||
/// Traverse the patterns to warn the user about ranges that overlap on their endpoints.
|
/// Traverse the patterns to warn the user about ranges that overlap on their endpoints.
|
||||||
#[instrument(level = "debug", skip(cx))]
|
#[instrument(level = "debug", skip(cx))]
|
||||||
pub(crate) fn lint_overlapping_range_endpoints<'p, 'tcx>(
|
pub(crate) fn lint_overlapping_range_endpoints<'a, 'p, 'tcx>(
|
||||||
cx: &MatchCheckCtxt<'p, 'tcx>,
|
cx: MatchCtxt<'a, 'p, 'tcx>,
|
||||||
column: &PatternColumn<'p, 'tcx>,
|
column: &PatternColumn<'a, 'p, 'tcx>,
|
||||||
) {
|
) {
|
||||||
let Some(ty) = column.head_ty() else {
|
let Some(ty) = column.head_ty() else {
|
||||||
return;
|
return;
|
||||||
};
|
};
|
||||||
let pcx = &PatCtxt::new_dummy(cx, ty);
|
let pcx = &PlaceCtxt::new_dummy(cx, ty);
|
||||||
|
let rcx: &RustcMatchCheckCtxt<'_, '_> = cx.tycx;
|
||||||
|
|
||||||
let set = column.analyze_ctors(pcx);
|
let set = column.analyze_ctors(pcx);
|
||||||
|
|
||||||
if matches!(ty.kind(), ty::Char | ty::Int(_) | ty::Uint(_)) {
|
if matches!(ty.kind(), ty::Char | ty::Int(_) | ty::Uint(_)) {
|
||||||
let emit_lint = |overlap: &IntRange, this_span: Span, overlapped_spans: &[Span]| {
|
let emit_lint = |overlap: &IntRange, this_span: Span, overlapped_spans: &[Span]| {
|
||||||
let overlap_as_pat = cx.hoist_pat_range(overlap, ty);
|
let overlap_as_pat = rcx.hoist_pat_range(overlap, ty);
|
||||||
let overlaps: Vec<_> = overlapped_spans
|
let overlaps: Vec<_> = overlapped_spans
|
||||||
.iter()
|
.iter()
|
||||||
.copied()
|
.copied()
|
||||||
.map(|span| Overlap { range: overlap_as_pat.clone(), span })
|
.map(|span| Overlap { range: overlap_as_pat.clone(), span })
|
||||||
.collect();
|
.collect();
|
||||||
cx.tcx.emit_spanned_lint(
|
rcx.tcx.emit_spanned_lint(
|
||||||
lint::builtin::OVERLAPPING_RANGE_ENDPOINTS,
|
lint::builtin::OVERLAPPING_RANGE_ENDPOINTS,
|
||||||
cx.match_lint_level,
|
rcx.match_lint_level,
|
||||||
this_span,
|
this_span,
|
||||||
OverlappingRangeEndpoints { overlap: overlaps, range: this_span },
|
OverlappingRangeEndpoints { overlap: overlaps, range: this_span },
|
||||||
);
|
);
|
||||||
@ -255,7 +261,7 @@ pub(crate) fn lint_overlapping_range_endpoints<'p, 'tcx>(
|
|||||||
let mut suffixes: SmallVec<[_; 1]> = Default::default();
|
let mut suffixes: SmallVec<[_; 1]> = Default::default();
|
||||||
// Iterate on patterns that contained `overlap`.
|
// Iterate on patterns that contained `overlap`.
|
||||||
for pat in column.iter() {
|
for pat in column.iter() {
|
||||||
let this_span = pat.span();
|
let this_span = *pat.data();
|
||||||
let Constructor::IntRange(this_range) = pat.ctor() else { continue };
|
let Constructor::IntRange(this_range) = pat.ctor() else { continue };
|
||||||
if this_range.is_singleton() {
|
if this_range.is_singleton() {
|
||||||
// Don't lint when one of the ranges is a singleton.
|
// Don't lint when one of the ranges is a singleton.
|
||||||
|
@ -5,16 +5,11 @@ use std::fmt;
|
|||||||
|
|
||||||
use smallvec::{smallvec, SmallVec};
|
use smallvec::{smallvec, SmallVec};
|
||||||
|
|
||||||
use rustc_data_structures::captures::Captures;
|
use crate::constructor::{Constructor, Slice, SliceKind};
|
||||||
use rustc_middle::ty::{self, Ty};
|
use crate::usefulness::PlaceCtxt;
|
||||||
use rustc_span::{Span, DUMMY_SP};
|
use crate::{Captures, TypeCx};
|
||||||
|
|
||||||
use self::Constructor::*;
|
use self::Constructor::*;
|
||||||
use self::SliceKind::*;
|
|
||||||
|
|
||||||
use crate::constructor::{Constructor, SliceKind};
|
|
||||||
use crate::cx::MatchCheckCtxt;
|
|
||||||
use crate::usefulness::PatCtxt;
|
|
||||||
|
|
||||||
/// Values and patterns can be represented as a constructor applied to some fields. This represents
|
/// Values and patterns can be represented as a constructor applied to some fields. This represents
|
||||||
/// a pattern in this form.
|
/// a pattern in this form.
|
||||||
@ -27,34 +22,34 @@ use crate::usefulness::PatCtxt;
|
|||||||
/// This happens if a private or `non_exhaustive` field is uninhabited, because the code mustn't
|
/// This happens if a private or `non_exhaustive` field is uninhabited, because the code mustn't
|
||||||
/// observe that it is uninhabited. In that case that field is not included in `fields`. Care must
|
/// observe that it is uninhabited. In that case that field is not included in `fields`. Care must
|
||||||
/// be taken when converting to/from `thir::Pat`.
|
/// be taken when converting to/from `thir::Pat`.
|
||||||
pub struct DeconstructedPat<'p, 'tcx> {
|
pub struct DeconstructedPat<'p, Cx: TypeCx> {
|
||||||
ctor: Constructor<'tcx>,
|
ctor: Constructor<Cx>,
|
||||||
fields: &'p [DeconstructedPat<'p, 'tcx>],
|
fields: &'p [DeconstructedPat<'p, Cx>],
|
||||||
ty: Ty<'tcx>,
|
ty: Cx::Ty,
|
||||||
span: Span,
|
data: Cx::PatData,
|
||||||
/// Whether removing this arm would change the behavior of the match expression.
|
/// Whether removing this arm would change the behavior of the match expression.
|
||||||
useful: Cell<bool>,
|
useful: Cell<bool>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'p, 'tcx> DeconstructedPat<'p, 'tcx> {
|
impl<'p, Cx: TypeCx> DeconstructedPat<'p, Cx> {
|
||||||
pub fn wildcard(ty: Ty<'tcx>, span: Span) -> Self {
|
pub fn wildcard(ty: Cx::Ty, data: Cx::PatData) -> Self {
|
||||||
Self::new(Wildcard, &[], ty, span)
|
Self::new(Wildcard, &[], ty, data)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn new(
|
pub fn new(
|
||||||
ctor: Constructor<'tcx>,
|
ctor: Constructor<Cx>,
|
||||||
fields: &'p [DeconstructedPat<'p, 'tcx>],
|
fields: &'p [DeconstructedPat<'p, Cx>],
|
||||||
ty: Ty<'tcx>,
|
ty: Cx::Ty,
|
||||||
span: Span,
|
data: Cx::PatData,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
DeconstructedPat { ctor, fields, ty, span, useful: Cell::new(false) }
|
DeconstructedPat { ctor, fields, ty, data, useful: Cell::new(false) }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn is_or_pat(&self) -> bool {
|
pub(crate) fn is_or_pat(&self) -> bool {
|
||||||
matches!(self.ctor, Or)
|
matches!(self.ctor, Or)
|
||||||
}
|
}
|
||||||
/// Expand this (possibly-nested) or-pattern into its alternatives.
|
/// Expand this (possibly-nested) or-pattern into its alternatives.
|
||||||
pub(crate) fn flatten_or_pat(&'p self) -> SmallVec<[&'p Self; 1]> {
|
pub(crate) fn flatten_or_pat(&self) -> SmallVec<[&Self; 1]> {
|
||||||
if self.is_or_pat() {
|
if self.is_or_pat() {
|
||||||
self.iter_fields().flat_map(|p| p.flatten_or_pat()).collect()
|
self.iter_fields().flat_map(|p| p.flatten_or_pat()).collect()
|
||||||
} else {
|
} else {
|
||||||
@ -62,66 +57,64 @@ impl<'p, 'tcx> DeconstructedPat<'p, 'tcx> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn ctor(&self) -> &Constructor<'tcx> {
|
pub fn ctor(&self) -> &Constructor<Cx> {
|
||||||
&self.ctor
|
&self.ctor
|
||||||
}
|
}
|
||||||
pub fn ty(&self) -> Ty<'tcx> {
|
pub fn ty(&self) -> Cx::Ty {
|
||||||
self.ty
|
self.ty
|
||||||
}
|
}
|
||||||
pub fn span(&self) -> Span {
|
pub fn data(&self) -> &Cx::PatData {
|
||||||
self.span
|
&self.data
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn iter_fields<'a>(
|
pub fn iter_fields<'a>(
|
||||||
&'a self,
|
&'a self,
|
||||||
) -> impl Iterator<Item = &'p DeconstructedPat<'p, 'tcx>> + Captures<'a> {
|
) -> impl Iterator<Item = &'p DeconstructedPat<'p, Cx>> + Captures<'a> {
|
||||||
self.fields.iter()
|
self.fields.iter()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Specialize this pattern with a constructor.
|
/// Specialize this pattern with a constructor.
|
||||||
/// `other_ctor` can be different from `self.ctor`, but must be covered by it.
|
/// `other_ctor` can be different from `self.ctor`, but must be covered by it.
|
||||||
pub(crate) fn specialize<'a>(
|
pub(crate) fn specialize<'a>(
|
||||||
&'a self,
|
&self,
|
||||||
pcx: &PatCtxt<'_, 'p, 'tcx>,
|
pcx: &PlaceCtxt<'a, 'p, Cx>,
|
||||||
other_ctor: &Constructor<'tcx>,
|
other_ctor: &Constructor<Cx>,
|
||||||
) -> SmallVec<[&'p DeconstructedPat<'p, 'tcx>; 2]> {
|
) -> SmallVec<[&'a DeconstructedPat<'p, Cx>; 2]> {
|
||||||
|
let wildcard_sub_tys = || {
|
||||||
|
let tys = pcx.ctor_sub_tys(other_ctor);
|
||||||
|
tys.iter()
|
||||||
|
.map(|ty| DeconstructedPat::wildcard(*ty, Cx::PatData::default()))
|
||||||
|
.map(|pat| pcx.mcx.wildcard_arena.alloc(pat) as &_)
|
||||||
|
.collect()
|
||||||
|
};
|
||||||
match (&self.ctor, other_ctor) {
|
match (&self.ctor, other_ctor) {
|
||||||
(Wildcard, _) => {
|
// Return a wildcard for each field of `other_ctor`.
|
||||||
// We return a wildcard for each field of `other_ctor`.
|
(Wildcard, _) => wildcard_sub_tys(),
|
||||||
pcx.cx.ctor_wildcard_fields(other_ctor, pcx.ty).iter().collect()
|
// The only non-trivial case: two slices of different arity. `other_slice` is
|
||||||
}
|
// guaranteed to have a larger arity, so we fill the middle part with enough
|
||||||
(Slice(self_slice), Slice(other_slice))
|
// wildcards to reach the length of the new, larger slice.
|
||||||
if self_slice.arity() != other_slice.arity() =>
|
(
|
||||||
{
|
&Slice(self_slice @ Slice { kind: SliceKind::VarLen(prefix, suffix), .. }),
|
||||||
// The only tricky case: two slices of different arity. Since `self_slice` covers
|
&Slice(other_slice),
|
||||||
// `other_slice`, `self_slice` must be `VarLen`, i.e. of the form
|
) if self_slice.arity() != other_slice.arity() => {
|
||||||
// `[prefix, .., suffix]`. Moreover `other_slice` is guaranteed to have a larger
|
// Start with a slice of wildcards of the appropriate length.
|
||||||
// arity. So we fill the middle part with enough wildcards to reach the length of
|
let mut fields: SmallVec<[_; 2]> = wildcard_sub_tys();
|
||||||
// the new, larger slice.
|
// Fill in the fields from both ends.
|
||||||
match self_slice.kind {
|
let new_arity = fields.len();
|
||||||
FixedLen(_) => bug!("{:?} doesn't cover {:?}", self_slice, other_slice),
|
for i in 0..prefix {
|
||||||
VarLen(prefix, suffix) => {
|
fields[i] = &self.fields[i];
|
||||||
let (ty::Slice(inner_ty) | ty::Array(inner_ty, _)) = *self.ty.kind() else {
|
|
||||||
bug!("bad slice pattern {:?} {:?}", self.ctor, self.ty);
|
|
||||||
};
|
|
||||||
let prefix = &self.fields[..prefix];
|
|
||||||
let suffix = &self.fields[self_slice.arity() - suffix..];
|
|
||||||
let wildcard: &_ = pcx
|
|
||||||
.cx
|
|
||||||
.pattern_arena
|
|
||||||
.alloc(DeconstructedPat::wildcard(inner_ty, DUMMY_SP));
|
|
||||||
let extra_wildcards = other_slice.arity() - self_slice.arity();
|
|
||||||
let extra_wildcards = (0..extra_wildcards).map(|_| wildcard);
|
|
||||||
prefix.iter().chain(extra_wildcards).chain(suffix).collect()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
for i in 0..suffix {
|
||||||
|
fields[new_arity - 1 - i] = &self.fields[self.fields.len() - 1 - i];
|
||||||
|
}
|
||||||
|
fields
|
||||||
}
|
}
|
||||||
_ => self.fields.iter().collect(),
|
_ => self.fields.iter().collect(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// We keep track for each pattern if it was ever useful during the analysis. This is used
|
/// We keep track for each pattern if it was ever useful during the analysis. This is used with
|
||||||
/// with `redundant_spans` to report redundant subpatterns arising from or patterns.
|
/// `redundant_subpatterns` to report redundant subpatterns arising from or patterns.
|
||||||
pub(crate) fn set_useful(&self) {
|
pub(crate) fn set_useful(&self) {
|
||||||
self.useful.set(true)
|
self.useful.set(true)
|
||||||
}
|
}
|
||||||
@ -139,19 +132,19 @@ impl<'p, 'tcx> DeconstructedPat<'p, 'tcx> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Report the spans of subpatterns that were not useful, if any.
|
/// Report the subpatterns that were not useful, if any.
|
||||||
pub(crate) fn redundant_spans(&self) -> Vec<Span> {
|
pub(crate) fn redundant_subpatterns(&self) -> Vec<&Self> {
|
||||||
let mut spans = Vec::new();
|
let mut subpats = Vec::new();
|
||||||
self.collect_redundant_spans(&mut spans);
|
self.collect_redundant_subpatterns(&mut subpats);
|
||||||
spans
|
subpats
|
||||||
}
|
}
|
||||||
fn collect_redundant_spans(&self, spans: &mut Vec<Span>) {
|
fn collect_redundant_subpatterns<'a>(&'a self, subpats: &mut Vec<&'a Self>) {
|
||||||
// We don't look at subpatterns if we already reported the whole pattern as redundant.
|
// We don't look at subpatterns if we already reported the whole pattern as redundant.
|
||||||
if !self.is_useful() {
|
if !self.is_useful() {
|
||||||
spans.push(self.span);
|
subpats.push(self);
|
||||||
} else {
|
} else {
|
||||||
for p in self.iter_fields() {
|
for p in self.iter_fields() {
|
||||||
p.collect_redundant_spans(spans);
|
p.collect_redundant_subpatterns(subpats);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -159,47 +152,46 @@ impl<'p, 'tcx> DeconstructedPat<'p, 'tcx> {
|
|||||||
|
|
||||||
/// This is mostly copied from the `Pat` impl. This is best effort and not good enough for a
|
/// This is mostly copied from the `Pat` impl. This is best effort and not good enough for a
|
||||||
/// `Display` impl.
|
/// `Display` impl.
|
||||||
impl<'p, 'tcx> fmt::Debug for DeconstructedPat<'p, 'tcx> {
|
impl<'p, Cx: TypeCx> fmt::Debug for DeconstructedPat<'p, Cx> {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
MatchCheckCtxt::debug_pat(f, self)
|
Cx::debug_pat(f, self)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Same idea as `DeconstructedPat`, except this is a fictitious pattern built up for diagnostics
|
/// Same idea as `DeconstructedPat`, except this is a fictitious pattern built up for diagnostics
|
||||||
/// purposes. As such they don't use interning and can be cloned.
|
/// purposes. As such they don't use interning and can be cloned.
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct WitnessPat<'tcx> {
|
pub struct WitnessPat<Cx: TypeCx> {
|
||||||
ctor: Constructor<'tcx>,
|
ctor: Constructor<Cx>,
|
||||||
pub(crate) fields: Vec<WitnessPat<'tcx>>,
|
pub(crate) fields: Vec<WitnessPat<Cx>>,
|
||||||
ty: Ty<'tcx>,
|
ty: Cx::Ty,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'tcx> WitnessPat<'tcx> {
|
impl<Cx: TypeCx> WitnessPat<Cx> {
|
||||||
pub(crate) fn new(ctor: Constructor<'tcx>, fields: Vec<Self>, ty: Ty<'tcx>) -> Self {
|
pub(crate) fn new(ctor: Constructor<Cx>, fields: Vec<Self>, ty: Cx::Ty) -> Self {
|
||||||
Self { ctor, fields, ty }
|
Self { ctor, fields, ty }
|
||||||
}
|
}
|
||||||
pub(crate) fn wildcard(ty: Ty<'tcx>) -> Self {
|
pub(crate) fn wildcard(ty: Cx::Ty) -> Self {
|
||||||
Self::new(Wildcard, Vec::new(), ty)
|
Self::new(Wildcard, Vec::new(), ty)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Construct a pattern that matches everything that starts with this constructor.
|
/// Construct a pattern that matches everything that starts with this constructor.
|
||||||
/// For example, if `ctor` is a `Constructor::Variant` for `Option::Some`, we get the pattern
|
/// For example, if `ctor` is a `Constructor::Variant` for `Option::Some`, we get the pattern
|
||||||
/// `Some(_)`.
|
/// `Some(_)`.
|
||||||
pub(crate) fn wild_from_ctor(pcx: &PatCtxt<'_, '_, 'tcx>, ctor: Constructor<'tcx>) -> Self {
|
pub(crate) fn wild_from_ctor(pcx: &PlaceCtxt<'_, '_, Cx>, ctor: Constructor<Cx>) -> Self {
|
||||||
let field_tys =
|
let field_tys = pcx.ctor_sub_tys(&ctor);
|
||||||
pcx.cx.ctor_wildcard_fields(&ctor, pcx.ty).iter().map(|deco_pat| deco_pat.ty());
|
let fields = field_tys.iter().map(|ty| Self::wildcard(*ty)).collect();
|
||||||
let fields = field_tys.map(|ty| Self::wildcard(ty)).collect();
|
|
||||||
Self::new(ctor, fields, pcx.ty)
|
Self::new(ctor, fields, pcx.ty)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn ctor(&self) -> &Constructor<'tcx> {
|
pub fn ctor(&self) -> &Constructor<Cx> {
|
||||||
&self.ctor
|
&self.ctor
|
||||||
}
|
}
|
||||||
pub fn ty(&self) -> Ty<'tcx> {
|
pub fn ty(&self) -> Cx::Ty {
|
||||||
self.ty
|
self.ty
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn iter_fields<'a>(&'a self) -> impl Iterator<Item = &'a WitnessPat<'tcx>> {
|
pub fn iter_fields<'a>(&'a self) -> impl Iterator<Item = &'a WitnessPat<Cx>> {
|
||||||
self.fields.iter()
|
self.fields.iter()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,15 +1,15 @@
|
|||||||
use std::fmt;
|
use std::fmt;
|
||||||
use std::iter::once;
|
use std::iter::once;
|
||||||
|
|
||||||
use rustc_arena::TypedArena;
|
use rustc_arena::{DroplessArena, TypedArena};
|
||||||
use rustc_data_structures::captures::Captures;
|
use rustc_data_structures::captures::Captures;
|
||||||
use rustc_hir::def_id::DefId;
|
use rustc_hir::def_id::DefId;
|
||||||
use rustc_hir::{HirId, RangeEnd};
|
use rustc_hir::HirId;
|
||||||
use rustc_index::Idx;
|
use rustc_index::Idx;
|
||||||
use rustc_index::IndexVec;
|
use rustc_index::IndexVec;
|
||||||
use rustc_middle::middle::stability::EvalResult;
|
use rustc_middle::middle::stability::EvalResult;
|
||||||
use rustc_middle::mir;
|
|
||||||
use rustc_middle::mir::interpret::Scalar;
|
use rustc_middle::mir::interpret::Scalar;
|
||||||
|
use rustc_middle::mir::{self, Const};
|
||||||
use rustc_middle::thir::{FieldPat, Pat, PatKind, PatRange, PatRangeBoundary};
|
use rustc_middle::thir::{FieldPat, Pat, PatKind, PatRange, PatRangeBoundary};
|
||||||
use rustc_middle::ty::layout::IntegerExt;
|
use rustc_middle::ty::layout::IntegerExt;
|
||||||
use rustc_middle::ty::{self, Ty, TyCtxt, VariantDef};
|
use rustc_middle::ty::{self, Ty, TyCtxt, VariantDef};
|
||||||
@ -18,14 +18,31 @@ use rustc_target::abi::{FieldIdx, Integer, VariantIdx, FIRST_VARIANT};
|
|||||||
use smallvec::SmallVec;
|
use smallvec::SmallVec;
|
||||||
|
|
||||||
use crate::constructor::{
|
use crate::constructor::{
|
||||||
Constructor, ConstructorSet, IntRange, MaybeInfiniteInt, OpaqueId, Slice, SliceKind,
|
IntRange, MaybeInfiniteInt, OpaqueId, RangeEnd, Slice, SliceKind, VariantVisibility,
|
||||||
VariantVisibility,
|
|
||||||
};
|
};
|
||||||
use crate::pat::{DeconstructedPat, WitnessPat};
|
use crate::TypeCx;
|
||||||
|
|
||||||
use Constructor::*;
|
use crate::constructor::Constructor::*;
|
||||||
|
|
||||||
pub struct MatchCheckCtxt<'p, 'tcx> {
|
// Re-export rustc-specific versions of all these types.
|
||||||
|
pub type Constructor<'p, 'tcx> = crate::constructor::Constructor<RustcMatchCheckCtxt<'p, 'tcx>>;
|
||||||
|
pub type ConstructorSet<'p, 'tcx> =
|
||||||
|
crate::constructor::ConstructorSet<RustcMatchCheckCtxt<'p, 'tcx>>;
|
||||||
|
pub type DeconstructedPat<'p, 'tcx> =
|
||||||
|
crate::pat::DeconstructedPat<'p, RustcMatchCheckCtxt<'p, 'tcx>>;
|
||||||
|
pub type MatchArm<'p, 'tcx> = crate::MatchArm<'p, RustcMatchCheckCtxt<'p, 'tcx>>;
|
||||||
|
pub type MatchCtxt<'a, 'p, 'tcx> = crate::MatchCtxt<'a, 'p, RustcMatchCheckCtxt<'p, 'tcx>>;
|
||||||
|
pub(crate) type PlaceCtxt<'a, 'p, 'tcx> =
|
||||||
|
crate::usefulness::PlaceCtxt<'a, 'p, RustcMatchCheckCtxt<'p, 'tcx>>;
|
||||||
|
pub(crate) type SplitConstructorSet<'p, 'tcx> =
|
||||||
|
crate::constructor::SplitConstructorSet<RustcMatchCheckCtxt<'p, 'tcx>>;
|
||||||
|
pub type Usefulness<'p, 'tcx> = crate::usefulness::Usefulness<'p, RustcMatchCheckCtxt<'p, 'tcx>>;
|
||||||
|
pub type UsefulnessReport<'p, 'tcx> =
|
||||||
|
crate::usefulness::UsefulnessReport<'p, RustcMatchCheckCtxt<'p, 'tcx>>;
|
||||||
|
pub type WitnessPat<'p, 'tcx> = crate::pat::WitnessPat<RustcMatchCheckCtxt<'p, 'tcx>>;
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct RustcMatchCheckCtxt<'p, 'tcx> {
|
||||||
pub tcx: TyCtxt<'tcx>,
|
pub tcx: TyCtxt<'tcx>,
|
||||||
/// The module in which the match occurs. This is necessary for
|
/// The module in which the match occurs. This is necessary for
|
||||||
/// checking inhabited-ness of types because whether a type is (visibly)
|
/// checking inhabited-ness of types because whether a type is (visibly)
|
||||||
@ -35,6 +52,7 @@ pub struct MatchCheckCtxt<'p, 'tcx> {
|
|||||||
pub module: DefId,
|
pub module: DefId,
|
||||||
pub param_env: ty::ParamEnv<'tcx>,
|
pub param_env: ty::ParamEnv<'tcx>,
|
||||||
pub pattern_arena: &'p TypedArena<DeconstructedPat<'p, 'tcx>>,
|
pub pattern_arena: &'p TypedArena<DeconstructedPat<'p, 'tcx>>,
|
||||||
|
pub dropless_arena: &'p DroplessArena,
|
||||||
/// Lint level at the match.
|
/// Lint level at the match.
|
||||||
pub match_lint_level: HirId,
|
pub match_lint_level: HirId,
|
||||||
/// The span of the whole match, if applicable.
|
/// The span of the whole match, if applicable.
|
||||||
@ -48,8 +66,14 @@ pub struct MatchCheckCtxt<'p, 'tcx> {
|
|||||||
pub known_valid_scrutinee: bool,
|
pub known_valid_scrutinee: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'p, 'tcx> MatchCheckCtxt<'p, 'tcx> {
|
impl<'p, 'tcx> fmt::Debug for RustcMatchCheckCtxt<'p, 'tcx> {
|
||||||
pub(super) fn is_uninhabited(&self, ty: Ty<'tcx>) -> bool {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
f.debug_struct("RustcMatchCheckCtxt").finish()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'p, 'tcx> RustcMatchCheckCtxt<'p, 'tcx> {
|
||||||
|
pub(crate) fn is_uninhabited(&self, ty: Ty<'tcx>) -> bool {
|
||||||
!ty.is_inhabited_from(self.tcx, self.module, self.param_env)
|
!ty.is_inhabited_from(self.tcx, self.module, self.param_env)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -63,12 +87,18 @@ impl<'p, 'tcx> MatchCheckCtxt<'p, 'tcx> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn alloc_wildcard_slice(
|
/// Whether the range denotes the fictitious values before `isize::MIN` or after
|
||||||
&self,
|
/// `usize::MAX`/`isize::MAX` (see doc of [`IntRange::split`] for why these exist).
|
||||||
tys: impl IntoIterator<Item = Ty<'tcx>>,
|
pub fn is_range_beyond_boundaries(&self, range: &IntRange, ty: Ty<'tcx>) -> bool {
|
||||||
) -> &'p [DeconstructedPat<'p, 'tcx>] {
|
ty.is_ptr_sized_integral() && {
|
||||||
self.pattern_arena
|
// The two invalid ranges are `NegInfinity..isize::MIN` (represented as
|
||||||
.alloc_from_iter(tys.into_iter().map(|ty| DeconstructedPat::wildcard(ty, DUMMY_SP)))
|
// `NegInfinity..0`), and `{u,i}size::MAX+1..PosInfinity`. `hoist_pat_range_bdy`
|
||||||
|
// converts `MAX+1` to `PosInfinity`, and we couldn't have `PosInfinity` in `range.lo`
|
||||||
|
// otherwise.
|
||||||
|
let lo = self.hoist_pat_range_bdy(range.lo, ty);
|
||||||
|
matches!(lo, PatRangeBoundary::PosInfinity)
|
||||||
|
|| matches!(range.hi, MaybeInfiniteInt::Finite(0))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// In the cases of either a `#[non_exhaustive]` field list or a non-public field, we hide
|
// In the cases of either a `#[non_exhaustive]` field list or a non-public field, we hide
|
||||||
@ -100,12 +130,12 @@ impl<'p, 'tcx> MatchCheckCtxt<'p, 'tcx> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn variant_index_for_adt(
|
pub(crate) fn variant_index_for_adt(
|
||||||
ctor: &Constructor<'tcx>,
|
ctor: &Constructor<'p, 'tcx>,
|
||||||
adt: ty::AdtDef<'tcx>,
|
adt: ty::AdtDef<'tcx>,
|
||||||
) -> VariantIdx {
|
) -> VariantIdx {
|
||||||
match *ctor {
|
match *ctor {
|
||||||
Variant(idx) => idx,
|
Variant(idx) => idx,
|
||||||
Single => {
|
Struct | UnionField => {
|
||||||
assert!(!adt.is_enum());
|
assert!(!adt.is_enum());
|
||||||
FIRST_VARIANT
|
FIRST_VARIANT
|
||||||
}
|
}
|
||||||
@ -113,37 +143,36 @@ impl<'p, 'tcx> MatchCheckCtxt<'p, 'tcx> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Creates a new list of wildcard fields for a given constructor. The result must have a length
|
/// Returns the types of the fields for a given constructor. The result must have a length of
|
||||||
/// of `ctor.arity()`.
|
/// `ctor.arity()`.
|
||||||
#[instrument(level = "trace", skip(self))]
|
#[instrument(level = "trace", skip(self))]
|
||||||
pub(crate) fn ctor_wildcard_fields(
|
pub(crate) fn ctor_sub_tys(&self, ctor: &Constructor<'p, 'tcx>, ty: Ty<'tcx>) -> &[Ty<'tcx>] {
|
||||||
&self,
|
|
||||||
ctor: &Constructor<'tcx>,
|
|
||||||
ty: Ty<'tcx>,
|
|
||||||
) -> &'p [DeconstructedPat<'p, 'tcx>] {
|
|
||||||
let cx = self;
|
let cx = self;
|
||||||
match ctor {
|
match ctor {
|
||||||
Single | Variant(_) => match ty.kind() {
|
Struct | Variant(_) | UnionField => match ty.kind() {
|
||||||
ty::Tuple(fs) => cx.alloc_wildcard_slice(fs.iter()),
|
ty::Tuple(fs) => cx.dropless_arena.alloc_from_iter(fs.iter()),
|
||||||
ty::Ref(_, rty, _) => cx.alloc_wildcard_slice(once(*rty)),
|
|
||||||
ty::Adt(adt, args) => {
|
ty::Adt(adt, args) => {
|
||||||
if adt.is_box() {
|
if adt.is_box() {
|
||||||
// The only legal patterns of type `Box` (outside `std`) are `_` and box
|
// The only legal patterns of type `Box` (outside `std`) are `_` and box
|
||||||
// patterns. If we're here we can assume this is a box pattern.
|
// patterns. If we're here we can assume this is a box pattern.
|
||||||
cx.alloc_wildcard_slice(once(args.type_at(0)))
|
cx.dropless_arena.alloc_from_iter(once(args.type_at(0)))
|
||||||
} else {
|
} else {
|
||||||
let variant =
|
let variant =
|
||||||
&adt.variant(MatchCheckCtxt::variant_index_for_adt(&ctor, *adt));
|
&adt.variant(RustcMatchCheckCtxt::variant_index_for_adt(&ctor, *adt));
|
||||||
let tys = cx.list_variant_nonhidden_fields(ty, variant).map(|(_, ty)| ty);
|
let tys = cx.list_variant_nonhidden_fields(ty, variant).map(|(_, ty)| ty);
|
||||||
cx.alloc_wildcard_slice(tys)
|
cx.dropless_arena.alloc_from_iter(tys)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => bug!("Unexpected type for `Single` constructor: {:?}", ty),
|
_ => bug!("Unexpected type for constructor `{ctor:?}`: {ty:?}"),
|
||||||
|
},
|
||||||
|
Ref => match ty.kind() {
|
||||||
|
ty::Ref(_, rty, _) => cx.dropless_arena.alloc_from_iter(once(*rty)),
|
||||||
|
_ => bug!("Unexpected type for `Ref` constructor: {ty:?}"),
|
||||||
},
|
},
|
||||||
Slice(slice) => match *ty.kind() {
|
Slice(slice) => match *ty.kind() {
|
||||||
ty::Slice(ty) | ty::Array(ty, _) => {
|
ty::Slice(ty) | ty::Array(ty, _) => {
|
||||||
let arity = slice.arity();
|
let arity = slice.arity();
|
||||||
cx.alloc_wildcard_slice((0..arity).map(|_| ty))
|
cx.dropless_arena.alloc_from_iter((0..arity).map(|_| ty))
|
||||||
}
|
}
|
||||||
_ => bug!("bad slice pattern {:?} {:?}", ctor, ty),
|
_ => bug!("bad slice pattern {:?} {:?}", ctor, ty),
|
||||||
},
|
},
|
||||||
@ -163,13 +192,11 @@ impl<'p, 'tcx> MatchCheckCtxt<'p, 'tcx> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The number of fields for this constructor. This must be kept in sync with
|
/// The number of fields for this constructor.
|
||||||
/// `Fields::wildcards`.
|
pub(crate) fn ctor_arity(&self, ctor: &Constructor<'p, 'tcx>, ty: Ty<'tcx>) -> usize {
|
||||||
pub(crate) fn ctor_arity(&self, ctor: &Constructor<'tcx>, ty: Ty<'tcx>) -> usize {
|
|
||||||
match ctor {
|
match ctor {
|
||||||
Single | Variant(_) => match ty.kind() {
|
Struct | Variant(_) | UnionField => match ty.kind() {
|
||||||
ty::Tuple(fs) => fs.len(),
|
ty::Tuple(fs) => fs.len(),
|
||||||
ty::Ref(..) => 1,
|
|
||||||
ty::Adt(adt, ..) => {
|
ty::Adt(adt, ..) => {
|
||||||
if adt.is_box() {
|
if adt.is_box() {
|
||||||
// The only legal patterns of type `Box` (outside `std`) are `_` and box
|
// The only legal patterns of type `Box` (outside `std`) are `_` and box
|
||||||
@ -177,12 +204,13 @@ impl<'p, 'tcx> MatchCheckCtxt<'p, 'tcx> {
|
|||||||
1
|
1
|
||||||
} else {
|
} else {
|
||||||
let variant =
|
let variant =
|
||||||
&adt.variant(MatchCheckCtxt::variant_index_for_adt(&ctor, *adt));
|
&adt.variant(RustcMatchCheckCtxt::variant_index_for_adt(&ctor, *adt));
|
||||||
self.list_variant_nonhidden_fields(ty, variant).count()
|
self.list_variant_nonhidden_fields(ty, variant).count()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => bug!("Unexpected type for `Single` constructor: {:?}", ty),
|
_ => bug!("Unexpected type for constructor `{ctor:?}`: {ty:?}"),
|
||||||
},
|
},
|
||||||
|
Ref => 1,
|
||||||
Slice(slice) => slice.arity(),
|
Slice(slice) => slice.arity(),
|
||||||
Bool(..)
|
Bool(..)
|
||||||
| IntRange(..)
|
| IntRange(..)
|
||||||
@ -202,7 +230,7 @@ impl<'p, 'tcx> MatchCheckCtxt<'p, 'tcx> {
|
|||||||
///
|
///
|
||||||
/// See [`crate::constructor`] for considerations of emptiness.
|
/// See [`crate::constructor`] for considerations of emptiness.
|
||||||
#[instrument(level = "debug", skip(self), ret)]
|
#[instrument(level = "debug", skip(self), ret)]
|
||||||
pub fn ctors_for_ty(&self, ty: Ty<'tcx>) -> ConstructorSet {
|
pub fn ctors_for_ty(&self, ty: Ty<'tcx>) -> ConstructorSet<'p, 'tcx> {
|
||||||
let cx = self;
|
let cx = self;
|
||||||
let make_uint_range = |start, end| {
|
let make_uint_range = |start, end| {
|
||||||
IntRange::from_range(
|
IntRange::from_range(
|
||||||
@ -298,9 +326,9 @@ impl<'p, 'tcx> MatchCheckCtxt<'p, 'tcx> {
|
|||||||
ConstructorSet::Variants { variants, non_exhaustive: is_declared_nonexhaustive }
|
ConstructorSet::Variants { variants, non_exhaustive: is_declared_nonexhaustive }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ty::Adt(..) | ty::Tuple(..) | ty::Ref(..) => {
|
ty::Adt(def, _) if def.is_union() => ConstructorSet::Union,
|
||||||
ConstructorSet::Single { empty: cx.is_uninhabited(ty) }
|
ty::Adt(..) | ty::Tuple(..) => ConstructorSet::Struct { empty: cx.is_uninhabited(ty) },
|
||||||
}
|
ty::Ref(..) => ConstructorSet::Ref,
|
||||||
ty::Never => ConstructorSet::NoConstructors,
|
ty::Never => ConstructorSet::NoConstructors,
|
||||||
// This type is one for which we cannot list constructors, like `str` or `f64`.
|
// This type is one for which we cannot list constructors, like `str` or `f64`.
|
||||||
// FIXME(Nadrieril): which of these are actually allowed?
|
// FIXME(Nadrieril): which of these are actually allowed?
|
||||||
@ -359,13 +387,18 @@ impl<'p, 'tcx> MatchCheckCtxt<'p, 'tcx> {
|
|||||||
fields = &[];
|
fields = &[];
|
||||||
}
|
}
|
||||||
PatKind::Deref { subpattern } => {
|
PatKind::Deref { subpattern } => {
|
||||||
ctor = Single;
|
|
||||||
fields = singleton(self.lower_pat(subpattern));
|
fields = singleton(self.lower_pat(subpattern));
|
||||||
|
ctor = match pat.ty.kind() {
|
||||||
|
// This is a box pattern.
|
||||||
|
ty::Adt(adt, ..) if adt.is_box() => Struct,
|
||||||
|
ty::Ref(..) => Ref,
|
||||||
|
_ => bug!("pattern has unexpected type: pat: {:?}, ty: {:?}", pat, pat.ty),
|
||||||
|
};
|
||||||
}
|
}
|
||||||
PatKind::Leaf { subpatterns } | PatKind::Variant { subpatterns, .. } => {
|
PatKind::Leaf { subpatterns } | PatKind::Variant { subpatterns, .. } => {
|
||||||
match pat.ty.kind() {
|
match pat.ty.kind() {
|
||||||
ty::Tuple(fs) => {
|
ty::Tuple(fs) => {
|
||||||
ctor = Single;
|
ctor = Struct;
|
||||||
let mut wilds: SmallVec<[_; 2]> =
|
let mut wilds: SmallVec<[_; 2]> =
|
||||||
fs.iter().map(|ty| DeconstructedPat::wildcard(ty, pat.span)).collect();
|
fs.iter().map(|ty| DeconstructedPat::wildcard(ty, pat.span)).collect();
|
||||||
for pat in subpatterns {
|
for pat in subpatterns {
|
||||||
@ -380,7 +413,7 @@ impl<'p, 'tcx> MatchCheckCtxt<'p, 'tcx> {
|
|||||||
// _)` or a box pattern. As a hack to avoid an ICE with the former, we
|
// _)` or a box pattern. As a hack to avoid an ICE with the former, we
|
||||||
// ignore other fields than the first one. This will trigger an error later
|
// ignore other fields than the first one. This will trigger an error later
|
||||||
// anyway.
|
// anyway.
|
||||||
// See https://github.com/rust-lang/rust/issues/82772 ,
|
// See https://github.com/rust-lang/rust/issues/82772,
|
||||||
// explanation: https://github.com/rust-lang/rust/pull/82789#issuecomment-796921977
|
// explanation: https://github.com/rust-lang/rust/pull/82789#issuecomment-796921977
|
||||||
// The problem is that we can't know from the type whether we'll match
|
// The problem is that we can't know from the type whether we'll match
|
||||||
// normally or through box-patterns. We'll have to figure out a proper
|
// normally or through box-patterns. We'll have to figure out a proper
|
||||||
@ -392,17 +425,18 @@ impl<'p, 'tcx> MatchCheckCtxt<'p, 'tcx> {
|
|||||||
} else {
|
} else {
|
||||||
DeconstructedPat::wildcard(args.type_at(0), pat.span)
|
DeconstructedPat::wildcard(args.type_at(0), pat.span)
|
||||||
};
|
};
|
||||||
ctor = Single;
|
ctor = Struct;
|
||||||
fields = singleton(pat);
|
fields = singleton(pat);
|
||||||
}
|
}
|
||||||
ty::Adt(adt, _) => {
|
ty::Adt(adt, _) => {
|
||||||
ctor = match pat.kind {
|
ctor = match pat.kind {
|
||||||
PatKind::Leaf { .. } => Single,
|
PatKind::Leaf { .. } if adt.is_union() => UnionField,
|
||||||
|
PatKind::Leaf { .. } => Struct,
|
||||||
PatKind::Variant { variant_index, .. } => Variant(variant_index),
|
PatKind::Variant { variant_index, .. } => Variant(variant_index),
|
||||||
_ => bug!(),
|
_ => bug!(),
|
||||||
};
|
};
|
||||||
let variant =
|
let variant =
|
||||||
&adt.variant(MatchCheckCtxt::variant_index_for_adt(&ctor, *adt));
|
&adt.variant(RustcMatchCheckCtxt::variant_index_for_adt(&ctor, *adt));
|
||||||
// For each field in the variant, we store the relevant index into `self.fields` if any.
|
// For each field in the variant, we store the relevant index into `self.fields` if any.
|
||||||
let mut field_id_to_id: Vec<Option<usize>> =
|
let mut field_id_to_id: Vec<Option<usize>> =
|
||||||
(0..variant.fields.len()).map(|_| None).collect();
|
(0..variant.fields.len()).map(|_| None).collect();
|
||||||
@ -477,11 +511,11 @@ impl<'p, 'tcx> MatchCheckCtxt<'p, 'tcx> {
|
|||||||
// with other `Deref` patterns. This could have been done in `const_to_pat`,
|
// with other `Deref` patterns. This could have been done in `const_to_pat`,
|
||||||
// but that causes issues with the rest of the matching code.
|
// but that causes issues with the rest of the matching code.
|
||||||
// So here, the constructor for a `"foo"` pattern is `&` (represented by
|
// So here, the constructor for a `"foo"` pattern is `&` (represented by
|
||||||
// `Single`), and has one field. That field has constructor `Str(value)` and no
|
// `Ref`), and has one field. That field has constructor `Str(value)` and no
|
||||||
// fields.
|
// subfields.
|
||||||
// Note: `t` is `str`, not `&str`.
|
// Note: `t` is `str`, not `&str`.
|
||||||
let subpattern = DeconstructedPat::new(Str(*value), &[], *t, pat.span);
|
let subpattern = DeconstructedPat::new(Str(*value), &[], *t, pat.span);
|
||||||
ctor = Single;
|
ctor = Ref;
|
||||||
fields = singleton(subpattern)
|
fields = singleton(subpattern)
|
||||||
}
|
}
|
||||||
// All constants that can be structurally matched have already been expanded
|
// All constants that can be structurally matched have already been expanded
|
||||||
@ -495,12 +529,16 @@ impl<'p, 'tcx> MatchCheckCtxt<'p, 'tcx> {
|
|||||||
}
|
}
|
||||||
PatKind::Range(patrange) => {
|
PatKind::Range(patrange) => {
|
||||||
let PatRange { lo, hi, end, .. } = patrange.as_ref();
|
let PatRange { lo, hi, end, .. } = patrange.as_ref();
|
||||||
|
let end = match end {
|
||||||
|
rustc_hir::RangeEnd::Included => RangeEnd::Included,
|
||||||
|
rustc_hir::RangeEnd::Excluded => RangeEnd::Excluded,
|
||||||
|
};
|
||||||
let ty = pat.ty;
|
let ty = pat.ty;
|
||||||
ctor = match ty.kind() {
|
ctor = match ty.kind() {
|
||||||
ty::Char | ty::Int(_) | ty::Uint(_) => {
|
ty::Char | ty::Int(_) | ty::Uint(_) => {
|
||||||
let lo = cx.lower_pat_range_bdy(*lo, ty);
|
let lo = cx.lower_pat_range_bdy(*lo, ty);
|
||||||
let hi = cx.lower_pat_range_bdy(*hi, ty);
|
let hi = cx.lower_pat_range_bdy(*hi, ty);
|
||||||
IntRange(IntRange::from_range(lo, hi, *end))
|
IntRange(IntRange::from_range(lo, hi, end))
|
||||||
}
|
}
|
||||||
ty::Float(fty) => {
|
ty::Float(fty) => {
|
||||||
use rustc_apfloat::Float;
|
use rustc_apfloat::Float;
|
||||||
@ -511,13 +549,13 @@ impl<'p, 'tcx> MatchCheckCtxt<'p, 'tcx> {
|
|||||||
use rustc_apfloat::ieee::Single;
|
use rustc_apfloat::ieee::Single;
|
||||||
let lo = lo.map(Single::from_bits).unwrap_or(-Single::INFINITY);
|
let lo = lo.map(Single::from_bits).unwrap_or(-Single::INFINITY);
|
||||||
let hi = hi.map(Single::from_bits).unwrap_or(Single::INFINITY);
|
let hi = hi.map(Single::from_bits).unwrap_or(Single::INFINITY);
|
||||||
F32Range(lo, hi, *end)
|
F32Range(lo, hi, end)
|
||||||
}
|
}
|
||||||
ty::FloatTy::F64 => {
|
ty::FloatTy::F64 => {
|
||||||
use rustc_apfloat::ieee::Double;
|
use rustc_apfloat::ieee::Double;
|
||||||
let lo = lo.map(Double::from_bits).unwrap_or(-Double::INFINITY);
|
let lo = lo.map(Double::from_bits).unwrap_or(-Double::INFINITY);
|
||||||
let hi = hi.map(Double::from_bits).unwrap_or(Double::INFINITY);
|
let hi = hi.map(Double::from_bits).unwrap_or(Double::INFINITY);
|
||||||
F64Range(lo, hi, *end)
|
F64Range(lo, hi, end)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -597,20 +635,6 @@ impl<'p, 'tcx> MatchCheckCtxt<'p, 'tcx> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Whether the range denotes the fictitious values before `isize::MIN` or after
|
|
||||||
/// `usize::MAX`/`isize::MAX` (see doc of [`IntRange::split`] for why these exist).
|
|
||||||
pub fn is_range_beyond_boundaries(&self, range: &IntRange, ty: Ty<'tcx>) -> bool {
|
|
||||||
ty.is_ptr_sized_integral() && {
|
|
||||||
// The two invalid ranges are `NegInfinity..isize::MIN` (represented as
|
|
||||||
// `NegInfinity..0`), and `{u,i}size::MAX+1..PosInfinity`. `hoist_pat_range_bdy`
|
|
||||||
// converts `MAX+1` to `PosInfinity`, and we couldn't have `PosInfinity` in `range.lo`
|
|
||||||
// otherwise.
|
|
||||||
let lo = self.hoist_pat_range_bdy(range.lo, ty);
|
|
||||||
matches!(lo, PatRangeBoundary::PosInfinity)
|
|
||||||
|| matches!(range.hi, MaybeInfiniteInt::Finite(0))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Convert back to a `thir::Pat` for diagnostic purposes.
|
/// Convert back to a `thir::Pat` for diagnostic purposes.
|
||||||
pub(crate) fn hoist_pat_range(&self, range: &IntRange, ty: Ty<'tcx>) -> Pat<'tcx> {
|
pub(crate) fn hoist_pat_range(&self, range: &IntRange, ty: Ty<'tcx>) -> Pat<'tcx> {
|
||||||
use MaybeInfiniteInt::*;
|
use MaybeInfiniteInt::*;
|
||||||
@ -623,7 +647,7 @@ impl<'p, 'tcx> MatchCheckCtxt<'p, 'tcx> {
|
|||||||
PatKind::Constant { value }
|
PatKind::Constant { value }
|
||||||
} else {
|
} else {
|
||||||
// We convert to an inclusive range for diagnostics.
|
// We convert to an inclusive range for diagnostics.
|
||||||
let mut end = RangeEnd::Included;
|
let mut end = rustc_hir::RangeEnd::Included;
|
||||||
let mut lo = cx.hoist_pat_range_bdy(range.lo, ty);
|
let mut lo = cx.hoist_pat_range_bdy(range.lo, ty);
|
||||||
if matches!(lo, PatRangeBoundary::PosInfinity) {
|
if matches!(lo, PatRangeBoundary::PosInfinity) {
|
||||||
// The only reason to get `PosInfinity` here is the special case where
|
// The only reason to get `PosInfinity` here is the special case where
|
||||||
@ -637,7 +661,7 @@ impl<'p, 'tcx> MatchCheckCtxt<'p, 'tcx> {
|
|||||||
}
|
}
|
||||||
let hi = if matches!(range.hi, Finite(0)) {
|
let hi = if matches!(range.hi, Finite(0)) {
|
||||||
// The range encodes `..ty::MIN`, so we can't convert it to an inclusive range.
|
// The range encodes `..ty::MIN`, so we can't convert it to an inclusive range.
|
||||||
end = RangeEnd::Excluded;
|
end = rustc_hir::RangeEnd::Excluded;
|
||||||
range.hi
|
range.hi
|
||||||
} else {
|
} else {
|
||||||
range.hi.minus_one()
|
range.hi.minus_one()
|
||||||
@ -650,14 +674,14 @@ impl<'p, 'tcx> MatchCheckCtxt<'p, 'tcx> {
|
|||||||
}
|
}
|
||||||
/// Convert back to a `thir::Pat` for diagnostic purposes. This panics for patterns that don't
|
/// Convert back to a `thir::Pat` for diagnostic purposes. This panics for patterns that don't
|
||||||
/// appear in diagnostics, like float ranges.
|
/// appear in diagnostics, like float ranges.
|
||||||
pub fn hoist_witness_pat(&self, pat: &WitnessPat<'tcx>) -> Pat<'tcx> {
|
pub fn hoist_witness_pat(&self, pat: &WitnessPat<'p, 'tcx>) -> Pat<'tcx> {
|
||||||
let cx = self;
|
let cx = self;
|
||||||
let is_wildcard = |pat: &Pat<'_>| matches!(pat.kind, PatKind::Wild);
|
let is_wildcard = |pat: &Pat<'_>| matches!(pat.kind, PatKind::Wild);
|
||||||
let mut subpatterns = pat.iter_fields().map(|p| Box::new(cx.hoist_witness_pat(p)));
|
let mut subpatterns = pat.iter_fields().map(|p| Box::new(cx.hoist_witness_pat(p)));
|
||||||
let kind = match pat.ctor() {
|
let kind = match pat.ctor() {
|
||||||
Bool(b) => PatKind::Constant { value: mir::Const::from_bool(cx.tcx, *b) },
|
Bool(b) => PatKind::Constant { value: mir::Const::from_bool(cx.tcx, *b) },
|
||||||
IntRange(range) => return self.hoist_pat_range(range, pat.ty()),
|
IntRange(range) => return self.hoist_pat_range(range, pat.ty()),
|
||||||
Single | Variant(_) => match pat.ty().kind() {
|
Struct | Variant(_) | UnionField => match pat.ty().kind() {
|
||||||
ty::Tuple(..) => PatKind::Leaf {
|
ty::Tuple(..) => PatKind::Leaf {
|
||||||
subpatterns: subpatterns
|
subpatterns: subpatterns
|
||||||
.enumerate()
|
.enumerate()
|
||||||
@ -672,7 +696,7 @@ impl<'p, 'tcx> MatchCheckCtxt<'p, 'tcx> {
|
|||||||
}
|
}
|
||||||
ty::Adt(adt_def, args) => {
|
ty::Adt(adt_def, args) => {
|
||||||
let variant_index =
|
let variant_index =
|
||||||
MatchCheckCtxt::variant_index_for_adt(&pat.ctor(), *adt_def);
|
RustcMatchCheckCtxt::variant_index_for_adt(&pat.ctor(), *adt_def);
|
||||||
let variant = &adt_def.variant(variant_index);
|
let variant = &adt_def.variant(variant_index);
|
||||||
let subpatterns = cx
|
let subpatterns = cx
|
||||||
.list_variant_nonhidden_fields(pat.ty(), variant)
|
.list_variant_nonhidden_fields(pat.ty(), variant)
|
||||||
@ -686,13 +710,13 @@ impl<'p, 'tcx> MatchCheckCtxt<'p, 'tcx> {
|
|||||||
PatKind::Leaf { subpatterns }
|
PatKind::Leaf { subpatterns }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// Note: given the expansion of `&str` patterns done in `expand_pattern`, we should
|
|
||||||
// be careful to reconstruct the correct constant pattern here. However a string
|
|
||||||
// literal pattern will never be reported as a non-exhaustiveness witness, so we
|
|
||||||
// ignore this issue.
|
|
||||||
ty::Ref(..) => PatKind::Deref { subpattern: subpatterns.next().unwrap() },
|
|
||||||
_ => bug!("unexpected ctor for type {:?} {:?}", pat.ctor(), pat.ty()),
|
_ => bug!("unexpected ctor for type {:?} {:?}", pat.ctor(), pat.ty()),
|
||||||
},
|
},
|
||||||
|
// Note: given the expansion of `&str` patterns done in `expand_pattern`, we should
|
||||||
|
// be careful to reconstruct the correct constant pattern here. However a string
|
||||||
|
// literal pattern will never be reported as a non-exhaustiveness witness, so we
|
||||||
|
// ignore this issue.
|
||||||
|
Ref => PatKind::Deref { subpattern: subpatterns.next().unwrap() },
|
||||||
Slice(slice) => {
|
Slice(slice) => {
|
||||||
match slice.kind {
|
match slice.kind {
|
||||||
SliceKind::FixedLen(_) => PatKind::Slice {
|
SliceKind::FixedLen(_) => PatKind::Slice {
|
||||||
@ -744,7 +768,7 @@ impl<'p, 'tcx> MatchCheckCtxt<'p, 'tcx> {
|
|||||||
/// Best-effort `Debug` implementation.
|
/// Best-effort `Debug` implementation.
|
||||||
pub(crate) fn debug_pat(
|
pub(crate) fn debug_pat(
|
||||||
f: &mut fmt::Formatter<'_>,
|
f: &mut fmt::Formatter<'_>,
|
||||||
pat: &DeconstructedPat<'p, 'tcx>,
|
pat: &crate::pat::DeconstructedPat<'_, Self>,
|
||||||
) -> fmt::Result {
|
) -> fmt::Result {
|
||||||
let mut first = true;
|
let mut first = true;
|
||||||
let mut start_or_continue = |s| {
|
let mut start_or_continue = |s| {
|
||||||
@ -758,7 +782,7 @@ impl<'p, 'tcx> MatchCheckCtxt<'p, 'tcx> {
|
|||||||
let mut start_or_comma = || start_or_continue(", ");
|
let mut start_or_comma = || start_or_continue(", ");
|
||||||
|
|
||||||
match pat.ctor() {
|
match pat.ctor() {
|
||||||
Single | Variant(_) => match pat.ty().kind() {
|
Struct | Variant(_) | UnionField => match pat.ty().kind() {
|
||||||
ty::Adt(def, _) if def.is_box() => {
|
ty::Adt(def, _) if def.is_box() => {
|
||||||
// Without `box_patterns`, the only legal pattern of type `Box` is `_` (outside
|
// Without `box_patterns`, the only legal pattern of type `Box` is `_` (outside
|
||||||
// of `std`). So this branch is only reachable when the feature is enabled and
|
// of `std`). So this branch is only reachable when the feature is enabled and
|
||||||
@ -767,13 +791,14 @@ impl<'p, 'tcx> MatchCheckCtxt<'p, 'tcx> {
|
|||||||
write!(f, "box {subpattern:?}")
|
write!(f, "box {subpattern:?}")
|
||||||
}
|
}
|
||||||
ty::Adt(..) | ty::Tuple(..) => {
|
ty::Adt(..) | ty::Tuple(..) => {
|
||||||
let variant = match pat.ty().kind() {
|
let variant =
|
||||||
ty::Adt(adt, _) => Some(
|
match pat.ty().kind() {
|
||||||
adt.variant(MatchCheckCtxt::variant_index_for_adt(pat.ctor(), *adt)),
|
ty::Adt(adt, _) => Some(adt.variant(
|
||||||
),
|
RustcMatchCheckCtxt::variant_index_for_adt(pat.ctor(), *adt),
|
||||||
ty::Tuple(_) => None,
|
)),
|
||||||
_ => unreachable!(),
|
ty::Tuple(_) => None,
|
||||||
};
|
_ => unreachable!(),
|
||||||
|
};
|
||||||
|
|
||||||
if let Some(variant) = variant {
|
if let Some(variant) = variant {
|
||||||
write!(f, "{}", variant.name)?;
|
write!(f, "{}", variant.name)?;
|
||||||
@ -789,15 +814,15 @@ impl<'p, 'tcx> MatchCheckCtxt<'p, 'tcx> {
|
|||||||
}
|
}
|
||||||
write!(f, ")")
|
write!(f, ")")
|
||||||
}
|
}
|
||||||
// Note: given the expansion of `&str` patterns done in `expand_pattern`, we should
|
|
||||||
// be careful to detect strings here. However a string literal pattern will never
|
|
||||||
// be reported as a non-exhaustiveness witness, so we can ignore this issue.
|
|
||||||
ty::Ref(_, _, mutbl) => {
|
|
||||||
let subpattern = pat.iter_fields().next().unwrap();
|
|
||||||
write!(f, "&{}{:?}", mutbl.prefix_str(), subpattern)
|
|
||||||
}
|
|
||||||
_ => write!(f, "_"),
|
_ => write!(f, "_"),
|
||||||
},
|
},
|
||||||
|
// Note: given the expansion of `&str` patterns done in `expand_pattern`, we should
|
||||||
|
// be careful to detect strings here. However a string literal pattern will never
|
||||||
|
// be reported as a non-exhaustiveness witness, so we can ignore this issue.
|
||||||
|
Ref => {
|
||||||
|
let subpattern = pat.iter_fields().next().unwrap();
|
||||||
|
write!(f, "&{:?}", subpattern)
|
||||||
|
}
|
||||||
Slice(slice) => {
|
Slice(slice) => {
|
||||||
let mut subpatterns = pat.iter_fields();
|
let mut subpatterns = pat.iter_fields();
|
||||||
write!(f, "[")?;
|
write!(f, "[")?;
|
||||||
@ -838,6 +863,45 @@ impl<'p, 'tcx> MatchCheckCtxt<'p, 'tcx> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<'p, 'tcx> TypeCx for RustcMatchCheckCtxt<'p, 'tcx> {
|
||||||
|
type Ty = Ty<'tcx>;
|
||||||
|
type VariantIdx = VariantIdx;
|
||||||
|
type StrLit = Const<'tcx>;
|
||||||
|
type ArmData = HirId;
|
||||||
|
type PatData = Span;
|
||||||
|
|
||||||
|
fn is_exhaustive_patterns_feature_on(&self) -> bool {
|
||||||
|
self.tcx.features().exhaustive_patterns
|
||||||
|
}
|
||||||
|
fn is_opaque_ty(ty: Self::Ty) -> bool {
|
||||||
|
matches!(ty.kind(), ty::Alias(ty::Opaque, ..))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn ctor_arity(&self, ctor: &crate::constructor::Constructor<Self>, ty: Self::Ty) -> usize {
|
||||||
|
self.ctor_arity(ctor, ty)
|
||||||
|
}
|
||||||
|
fn ctor_sub_tys(
|
||||||
|
&self,
|
||||||
|
ctor: &crate::constructor::Constructor<Self>,
|
||||||
|
ty: Self::Ty,
|
||||||
|
) -> &[Self::Ty] {
|
||||||
|
self.ctor_sub_tys(ctor, ty)
|
||||||
|
}
|
||||||
|
fn ctors_for_ty(&self, ty: Self::Ty) -> crate::constructor::ConstructorSet<Self> {
|
||||||
|
self.ctors_for_ty(ty)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn debug_pat(
|
||||||
|
f: &mut fmt::Formatter<'_>,
|
||||||
|
pat: &crate::pat::DeconstructedPat<'_, Self>,
|
||||||
|
) -> fmt::Result {
|
||||||
|
Self::debug_pat(f, pat)
|
||||||
|
}
|
||||||
|
fn bug(&self, fmt: fmt::Arguments<'_>) -> ! {
|
||||||
|
span_bug!(self.scrut_span, "{}", fmt)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Recursively expand this pattern into its subpatterns. Only useful for or-patterns.
|
/// Recursively expand this pattern into its subpatterns. Only useful for or-patterns.
|
||||||
fn expand_or_pat<'p, 'tcx>(pat: &'p Pat<'tcx>) -> Vec<&'p Pat<'tcx>> {
|
fn expand_or_pat<'p, 'tcx>(pat: &'p Pat<'tcx>) -> Vec<&'p Pat<'tcx>> {
|
||||||
fn expand<'p, 'tcx>(pat: &'p Pat<'tcx>, vec: &mut Vec<&'p Pat<'tcx>>) {
|
fn expand<'p, 'tcx>(pat: &'p Pat<'tcx>, vec: &mut Vec<&'p Pat<'tcx>>) {
|
@ -242,7 +242,7 @@
|
|||||||
//! Therefore `usefulness(tp_1, tp_2, tq)` returns the single witness-tuple `[Variant2(Some(true), 0)]`.
|
//! Therefore `usefulness(tp_1, tp_2, tq)` returns the single witness-tuple `[Variant2(Some(true), 0)]`.
|
||||||
//!
|
//!
|
||||||
//!
|
//!
|
||||||
//! Computing the set of constructors for a type is done in [`MatchCheckCtxt::ctors_for_ty`]. See
|
//! Computing the set of constructors for a type is done in [`TypeCx::ctors_for_ty`]. See
|
||||||
//! the following sections for more accurate versions of the algorithm and corresponding links.
|
//! the following sections for more accurate versions of the algorithm and corresponding links.
|
||||||
//!
|
//!
|
||||||
//!
|
//!
|
||||||
@ -555,37 +555,52 @@
|
|||||||
use smallvec::{smallvec, SmallVec};
|
use smallvec::{smallvec, SmallVec};
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
|
|
||||||
use rustc_data_structures::{captures::Captures, stack::ensure_sufficient_stack};
|
|
||||||
use rustc_middle::ty::{self, Ty};
|
|
||||||
use rustc_span::{Span, DUMMY_SP};
|
|
||||||
|
|
||||||
use crate::constructor::{Constructor, ConstructorSet};
|
use crate::constructor::{Constructor, ConstructorSet};
|
||||||
use crate::cx::MatchCheckCtxt;
|
|
||||||
use crate::pat::{DeconstructedPat, WitnessPat};
|
use crate::pat::{DeconstructedPat, WitnessPat};
|
||||||
use crate::MatchArm;
|
use crate::{Captures, MatchArm, MatchCtxt, TypeCx, TypedArena};
|
||||||
|
|
||||||
use self::ValidityConstraint::*;
|
use self::ValidityConstraint::*;
|
||||||
|
|
||||||
#[derive(Copy, Clone)]
|
#[cfg(feature = "rustc")]
|
||||||
pub(crate) struct PatCtxt<'a, 'p, 'tcx> {
|
use rustc_data_structures::stack::ensure_sufficient_stack;
|
||||||
pub(crate) cx: &'a MatchCheckCtxt<'p, 'tcx>,
|
#[cfg(not(feature = "rustc"))]
|
||||||
/// Type of the current column under investigation.
|
pub fn ensure_sufficient_stack<R>(f: impl FnOnce() -> R) -> R {
|
||||||
pub(crate) ty: Ty<'tcx>,
|
f()
|
||||||
/// Whether the current pattern is the whole pattern as found in a match arm, or if it's a
|
|
||||||
/// subpattern.
|
|
||||||
pub(crate) is_top_level: bool,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, 'p, 'tcx> PatCtxt<'a, 'p, 'tcx> {
|
/// Context that provides information local to a place under investigation.
|
||||||
/// A `PatCtxt` when code other than `is_useful` needs one.
|
#[derive(Clone)]
|
||||||
pub(crate) fn new_dummy(cx: &'a MatchCheckCtxt<'p, 'tcx>, ty: Ty<'tcx>) -> Self {
|
pub(crate) struct PlaceCtxt<'a, 'p, Cx: TypeCx> {
|
||||||
PatCtxt { cx, ty, is_top_level: false }
|
pub(crate) mcx: MatchCtxt<'a, 'p, Cx>,
|
||||||
|
/// Type of the place under investigation.
|
||||||
|
pub(crate) ty: Cx::Ty,
|
||||||
|
/// Whether the place is the original scrutinee place, as opposed to a subplace of it.
|
||||||
|
pub(crate) is_scrutinee: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a, 'p, Cx: TypeCx> PlaceCtxt<'a, 'p, Cx> {
|
||||||
|
/// A `PlaceCtxt` when code other than `is_useful` needs one.
|
||||||
|
#[cfg_attr(not(feature = "rustc"), allow(dead_code))]
|
||||||
|
pub(crate) fn new_dummy(mcx: MatchCtxt<'a, 'p, Cx>, ty: Cx::Ty) -> Self {
|
||||||
|
PlaceCtxt { mcx, ty, is_scrutinee: false }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn ctor_arity(&self, ctor: &Constructor<Cx>) -> usize {
|
||||||
|
self.mcx.tycx.ctor_arity(ctor, self.ty)
|
||||||
|
}
|
||||||
|
pub(crate) fn ctor_sub_tys(&self, ctor: &Constructor<Cx>) -> &[Cx::Ty] {
|
||||||
|
self.mcx.tycx.ctor_sub_tys(ctor, self.ty)
|
||||||
|
}
|
||||||
|
pub(crate) fn ctors_for_ty(&self) -> ConstructorSet<Cx> {
|
||||||
|
self.mcx.tycx.ctors_for_ty(self.ty)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, 'p, 'tcx> fmt::Debug for PatCtxt<'a, 'p, 'tcx> {
|
impl<'a, 'p, Cx: TypeCx> Copy for PlaceCtxt<'a, 'p, Cx> {}
|
||||||
|
|
||||||
|
impl<'a, 'p, Cx: TypeCx> fmt::Debug for PlaceCtxt<'a, 'p, Cx> {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
f.debug_struct("PatCtxt").field("ty", &self.ty).finish()
|
f.debug_struct("PlaceCtxt").field("ty", &self.ty).finish()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -595,7 +610,7 @@ impl<'a, 'p, 'tcx> fmt::Debug for PatCtxt<'a, 'p, 'tcx> {
|
|||||||
/// - in the matrix, track whether a given place (aka column) is known to contain a valid value or
|
/// - in the matrix, track whether a given place (aka column) is known to contain a valid value or
|
||||||
/// not.
|
/// not.
|
||||||
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
|
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
|
||||||
enum ValidityConstraint {
|
pub enum ValidityConstraint {
|
||||||
ValidOnly,
|
ValidOnly,
|
||||||
MaybeInvalid,
|
MaybeInvalid,
|
||||||
/// Option for backwards compatibility: the place is not known to be valid but we allow omitting
|
/// Option for backwards compatibility: the place is not known to be valid but we allow omitting
|
||||||
@ -604,7 +619,7 @@ enum ValidityConstraint {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl ValidityConstraint {
|
impl ValidityConstraint {
|
||||||
fn from_bool(is_valid_only: bool) -> Self {
|
pub fn from_bool(is_valid_only: bool) -> Self {
|
||||||
if is_valid_only { ValidOnly } else { MaybeInvalid }
|
if is_valid_only { ValidOnly } else { MaybeInvalid }
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -629,12 +644,9 @@ impl ValidityConstraint {
|
|||||||
///
|
///
|
||||||
/// Pending further opsem decisions, the current behavior is: validity is preserved, except
|
/// Pending further opsem decisions, the current behavior is: validity is preserved, except
|
||||||
/// inside `&` and union fields where validity is reset to `MaybeInvalid`.
|
/// inside `&` and union fields where validity is reset to `MaybeInvalid`.
|
||||||
fn specialize<'tcx>(self, pcx: &PatCtxt<'_, '_, 'tcx>, ctor: &Constructor<'tcx>) -> Self {
|
fn specialize<Cx: TypeCx>(self, ctor: &Constructor<Cx>) -> Self {
|
||||||
// We preserve validity except when we go inside a reference or a union field.
|
// We preserve validity except when we go inside a reference or a union field.
|
||||||
if matches!(ctor, Constructor::Single)
|
if matches!(ctor, Constructor::Ref | Constructor::UnionField) {
|
||||||
&& (matches!(pcx.ty.kind(), ty::Ref(..))
|
|
||||||
|| matches!(pcx.ty.kind(), ty::Adt(def, ..) if def.is_union()))
|
|
||||||
{
|
|
||||||
// Validity of `x: &T` does not imply validity of `*x: T`.
|
// Validity of `x: &T` does not imply validity of `*x: T`.
|
||||||
MaybeInvalid
|
MaybeInvalid
|
||||||
} else {
|
} else {
|
||||||
@ -654,14 +666,18 @@ impl fmt::Display for ValidityConstraint {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Represents a pattern-tuple under investigation.
|
/// Represents a pattern-tuple under investigation.
|
||||||
|
// The three lifetimes are:
|
||||||
|
// - 'a allocated by us
|
||||||
|
// - 'p coming from the input
|
||||||
|
// - Cx global compilation context
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
struct PatStack<'p, 'tcx> {
|
struct PatStack<'a, 'p, Cx: TypeCx> {
|
||||||
// Rows of len 1 are very common, which is why `SmallVec[_; 2]` works well.
|
// Rows of len 1 are very common, which is why `SmallVec[_; 2]` works well.
|
||||||
pats: SmallVec<[&'p DeconstructedPat<'p, 'tcx>; 2]>,
|
pats: SmallVec<[&'a DeconstructedPat<'p, Cx>; 2]>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'p, 'tcx> PatStack<'p, 'tcx> {
|
impl<'a, 'p, Cx: TypeCx> PatStack<'a, 'p, Cx> {
|
||||||
fn from_pattern(pat: &'p DeconstructedPat<'p, 'tcx>) -> Self {
|
fn from_pattern(pat: &'a DeconstructedPat<'p, Cx>) -> Self {
|
||||||
PatStack { pats: smallvec![pat] }
|
PatStack { pats: smallvec![pat] }
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -673,17 +689,17 @@ impl<'p, 'tcx> PatStack<'p, 'tcx> {
|
|||||||
self.pats.len()
|
self.pats.len()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn head(&self) -> &'p DeconstructedPat<'p, 'tcx> {
|
fn head(&self) -> &'a DeconstructedPat<'p, Cx> {
|
||||||
self.pats[0]
|
self.pats[0]
|
||||||
}
|
}
|
||||||
|
|
||||||
fn iter(&self) -> impl Iterator<Item = &DeconstructedPat<'p, 'tcx>> {
|
fn iter<'b>(&'b self) -> impl Iterator<Item = &'a DeconstructedPat<'p, Cx>> + Captures<'b> {
|
||||||
self.pats.iter().copied()
|
self.pats.iter().copied()
|
||||||
}
|
}
|
||||||
|
|
||||||
// Recursively expand the first or-pattern into its subpatterns. Only useful if the pattern is
|
// Recursively expand the first or-pattern into its subpatterns. Only useful if the pattern is
|
||||||
// an or-pattern. Panics if `self` is empty.
|
// an or-pattern. Panics if `self` is empty.
|
||||||
fn expand_or_pat<'a>(&'a self) -> impl Iterator<Item = PatStack<'p, 'tcx>> + Captures<'a> {
|
fn expand_or_pat<'b>(&'b self) -> impl Iterator<Item = PatStack<'a, 'p, Cx>> + Captures<'b> {
|
||||||
self.head().flatten_or_pat().into_iter().map(move |pat| {
|
self.head().flatten_or_pat().into_iter().map(move |pat| {
|
||||||
let mut new = self.clone();
|
let mut new = self.clone();
|
||||||
new.pats[0] = pat;
|
new.pats[0] = pat;
|
||||||
@ -695,9 +711,9 @@ impl<'p, 'tcx> PatStack<'p, 'tcx> {
|
|||||||
/// Only call if `ctor.is_covered_by(self.head().ctor())` is true.
|
/// Only call if `ctor.is_covered_by(self.head().ctor())` is true.
|
||||||
fn pop_head_constructor(
|
fn pop_head_constructor(
|
||||||
&self,
|
&self,
|
||||||
pcx: &PatCtxt<'_, 'p, 'tcx>,
|
pcx: &PlaceCtxt<'a, 'p, Cx>,
|
||||||
ctor: &Constructor<'tcx>,
|
ctor: &Constructor<Cx>,
|
||||||
) -> PatStack<'p, 'tcx> {
|
) -> PatStack<'a, 'p, Cx> {
|
||||||
// We pop the head pattern and push the new fields extracted from the arguments of
|
// We pop the head pattern and push the new fields extracted from the arguments of
|
||||||
// `self.head()`.
|
// `self.head()`.
|
||||||
let mut new_pats = self.head().specialize(pcx, ctor);
|
let mut new_pats = self.head().specialize(pcx, ctor);
|
||||||
@ -706,7 +722,7 @@ impl<'p, 'tcx> PatStack<'p, 'tcx> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'p, 'tcx> fmt::Debug for PatStack<'p, 'tcx> {
|
impl<'a, 'p, Cx: TypeCx> fmt::Debug for PatStack<'a, 'p, Cx> {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
// We pretty-print similarly to the `Debug` impl of `Matrix`.
|
// We pretty-print similarly to the `Debug` impl of `Matrix`.
|
||||||
write!(f, "+")?;
|
write!(f, "+")?;
|
||||||
@ -719,9 +735,9 @@ impl<'p, 'tcx> fmt::Debug for PatStack<'p, 'tcx> {
|
|||||||
|
|
||||||
/// A row of the matrix.
|
/// A row of the matrix.
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
struct MatrixRow<'p, 'tcx> {
|
struct MatrixRow<'a, 'p, Cx: TypeCx> {
|
||||||
// The patterns in the row.
|
// The patterns in the row.
|
||||||
pats: PatStack<'p, 'tcx>,
|
pats: PatStack<'a, 'p, Cx>,
|
||||||
/// Whether the original arm had a guard. This is inherited when specializing.
|
/// Whether the original arm had a guard. This is inherited when specializing.
|
||||||
is_under_guard: bool,
|
is_under_guard: bool,
|
||||||
/// When we specialize, we remember which row of the original matrix produced a given row of the
|
/// When we specialize, we remember which row of the original matrix produced a given row of the
|
||||||
@ -734,7 +750,7 @@ struct MatrixRow<'p, 'tcx> {
|
|||||||
useful: bool,
|
useful: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'p, 'tcx> MatrixRow<'p, 'tcx> {
|
impl<'a, 'p, Cx: TypeCx> MatrixRow<'a, 'p, Cx> {
|
||||||
fn is_empty(&self) -> bool {
|
fn is_empty(&self) -> bool {
|
||||||
self.pats.is_empty()
|
self.pats.is_empty()
|
||||||
}
|
}
|
||||||
@ -743,17 +759,17 @@ impl<'p, 'tcx> MatrixRow<'p, 'tcx> {
|
|||||||
self.pats.len()
|
self.pats.len()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn head(&self) -> &'p DeconstructedPat<'p, 'tcx> {
|
fn head(&self) -> &'a DeconstructedPat<'p, Cx> {
|
||||||
self.pats.head()
|
self.pats.head()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn iter(&self) -> impl Iterator<Item = &DeconstructedPat<'p, 'tcx>> {
|
fn iter<'b>(&'b self) -> impl Iterator<Item = &'a DeconstructedPat<'p, Cx>> + Captures<'b> {
|
||||||
self.pats.iter()
|
self.pats.iter()
|
||||||
}
|
}
|
||||||
|
|
||||||
// Recursively expand the first or-pattern into its subpatterns. Only useful if the pattern is
|
// Recursively expand the first or-pattern into its subpatterns. Only useful if the pattern is
|
||||||
// an or-pattern. Panics if `self` is empty.
|
// an or-pattern. Panics if `self` is empty.
|
||||||
fn expand_or_pat<'a>(&'a self) -> impl Iterator<Item = MatrixRow<'p, 'tcx>> + Captures<'a> {
|
fn expand_or_pat<'b>(&'b self) -> impl Iterator<Item = MatrixRow<'a, 'p, Cx>> + Captures<'b> {
|
||||||
self.pats.expand_or_pat().map(|patstack| MatrixRow {
|
self.pats.expand_or_pat().map(|patstack| MatrixRow {
|
||||||
pats: patstack,
|
pats: patstack,
|
||||||
parent_row: self.parent_row,
|
parent_row: self.parent_row,
|
||||||
@ -766,10 +782,10 @@ impl<'p, 'tcx> MatrixRow<'p, 'tcx> {
|
|||||||
/// Only call if `ctor.is_covered_by(self.head().ctor())` is true.
|
/// Only call if `ctor.is_covered_by(self.head().ctor())` is true.
|
||||||
fn pop_head_constructor(
|
fn pop_head_constructor(
|
||||||
&self,
|
&self,
|
||||||
pcx: &PatCtxt<'_, 'p, 'tcx>,
|
pcx: &PlaceCtxt<'a, 'p, Cx>,
|
||||||
ctor: &Constructor<'tcx>,
|
ctor: &Constructor<Cx>,
|
||||||
parent_row: usize,
|
parent_row: usize,
|
||||||
) -> MatrixRow<'p, 'tcx> {
|
) -> MatrixRow<'a, 'p, Cx> {
|
||||||
MatrixRow {
|
MatrixRow {
|
||||||
pats: self.pats.pop_head_constructor(pcx, ctor),
|
pats: self.pats.pop_head_constructor(pcx, ctor),
|
||||||
parent_row,
|
parent_row,
|
||||||
@ -779,7 +795,7 @@ impl<'p, 'tcx> MatrixRow<'p, 'tcx> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'p, 'tcx> fmt::Debug for MatrixRow<'p, 'tcx> {
|
impl<'a, 'p, Cx: TypeCx> fmt::Debug for MatrixRow<'a, 'p, Cx> {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
self.pats.fmt(f)
|
self.pats.fmt(f)
|
||||||
}
|
}
|
||||||
@ -796,22 +812,22 @@ impl<'p, 'tcx> fmt::Debug for MatrixRow<'p, 'tcx> {
|
|||||||
/// specializing `(,)` and `Some` on a pattern of type `(Option<u32>, bool)`, the first column of
|
/// specializing `(,)` and `Some` on a pattern of type `(Option<u32>, bool)`, the first column of
|
||||||
/// the matrix will correspond to `scrutinee.0.Some.0` and the second column to `scrutinee.1`.
|
/// the matrix will correspond to `scrutinee.0.Some.0` and the second column to `scrutinee.1`.
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
struct Matrix<'p, 'tcx> {
|
struct Matrix<'a, 'p, Cx: TypeCx> {
|
||||||
/// Vector of rows. The rows must form a rectangular 2D array. Moreover, all the patterns of
|
/// Vector of rows. The rows must form a rectangular 2D array. Moreover, all the patterns of
|
||||||
/// each column must have the same type. Each column corresponds to a place within the
|
/// each column must have the same type. Each column corresponds to a place within the
|
||||||
/// scrutinee.
|
/// scrutinee.
|
||||||
rows: Vec<MatrixRow<'p, 'tcx>>,
|
rows: Vec<MatrixRow<'a, 'p, Cx>>,
|
||||||
/// Stores an extra fictitious row full of wildcards. Mostly used to keep track of the type of
|
/// Stores an extra fictitious row full of wildcards. Mostly used to keep track of the type of
|
||||||
/// each column. This must obey the same invariants as the real rows.
|
/// each column. This must obey the same invariants as the real rows.
|
||||||
wildcard_row: PatStack<'p, 'tcx>,
|
wildcard_row: PatStack<'a, 'p, Cx>,
|
||||||
/// Track for each column/place whether it contains a known valid value.
|
/// Track for each column/place whether it contains a known valid value.
|
||||||
place_validity: SmallVec<[ValidityConstraint; 2]>,
|
place_validity: SmallVec<[ValidityConstraint; 2]>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'p, 'tcx> Matrix<'p, 'tcx> {
|
impl<'a, 'p, Cx: TypeCx> Matrix<'a, 'p, Cx> {
|
||||||
/// Pushes a new row to the matrix. If the row starts with an or-pattern, this recursively
|
/// Pushes a new row to the matrix. If the row starts with an or-pattern, this recursively
|
||||||
/// expands it. Internal method, prefer [`Matrix::new`].
|
/// expands it. Internal method, prefer [`Matrix::new`].
|
||||||
fn expand_and_push(&mut self, row: MatrixRow<'p, 'tcx>) {
|
fn expand_and_push(&mut self, row: MatrixRow<'a, 'p, Cx>) {
|
||||||
if !row.is_empty() && row.head().is_or_pat() {
|
if !row.is_empty() && row.head().is_or_pat() {
|
||||||
// Expand nested or-patterns.
|
// Expand nested or-patterns.
|
||||||
for new_row in row.expand_or_pat() {
|
for new_row in row.expand_or_pat() {
|
||||||
@ -823,16 +839,14 @@ impl<'p, 'tcx> Matrix<'p, 'tcx> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Build a new matrix from an iterator of `MatchArm`s.
|
/// Build a new matrix from an iterator of `MatchArm`s.
|
||||||
fn new<'a>(
|
fn new(
|
||||||
cx: &MatchCheckCtxt<'p, 'tcx>,
|
wildcard_arena: &'a TypedArena<DeconstructedPat<'p, Cx>>,
|
||||||
arms: &[MatchArm<'p, 'tcx>],
|
arms: &'a [MatchArm<'p, Cx>],
|
||||||
scrut_ty: Ty<'tcx>,
|
scrut_ty: Cx::Ty,
|
||||||
scrut_validity: ValidityConstraint,
|
scrut_validity: ValidityConstraint,
|
||||||
) -> Self
|
) -> Self {
|
||||||
where
|
let wild_pattern =
|
||||||
'p: 'a,
|
wildcard_arena.alloc(DeconstructedPat::wildcard(scrut_ty, Default::default()));
|
||||||
{
|
|
||||||
let wild_pattern = cx.pattern_arena.alloc(DeconstructedPat::wildcard(scrut_ty, DUMMY_SP));
|
|
||||||
let wildcard_row = PatStack::from_pattern(wild_pattern);
|
let wildcard_row = PatStack::from_pattern(wild_pattern);
|
||||||
let mut matrix = Matrix {
|
let mut matrix = Matrix {
|
||||||
rows: Vec::with_capacity(arms.len()),
|
rows: Vec::with_capacity(arms.len()),
|
||||||
@ -851,7 +865,7 @@ impl<'p, 'tcx> Matrix<'p, 'tcx> {
|
|||||||
matrix
|
matrix
|
||||||
}
|
}
|
||||||
|
|
||||||
fn head_ty(&self) -> Option<Ty<'tcx>> {
|
fn head_ty(&self) -> Option<Cx::Ty> {
|
||||||
if self.column_count() == 0 {
|
if self.column_count() == 0 {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
@ -859,11 +873,10 @@ impl<'p, 'tcx> Matrix<'p, 'tcx> {
|
|||||||
let mut ty = self.wildcard_row.head().ty();
|
let mut ty = self.wildcard_row.head().ty();
|
||||||
// If the type is opaque and it is revealed anywhere in the column, we take the revealed
|
// If the type is opaque and it is revealed anywhere in the column, we take the revealed
|
||||||
// version. Otherwise we could encounter constructors for the revealed type and crash.
|
// version. Otherwise we could encounter constructors for the revealed type and crash.
|
||||||
let is_opaque = |ty: Ty<'tcx>| matches!(ty.kind(), ty::Alias(ty::Opaque, ..));
|
if Cx::is_opaque_ty(ty) {
|
||||||
if is_opaque(ty) {
|
|
||||||
for pat in self.heads() {
|
for pat in self.heads() {
|
||||||
let pat_ty = pat.ty();
|
let pat_ty = pat.ty();
|
||||||
if !is_opaque(pat_ty) {
|
if !Cx::is_opaque_ty(pat_ty) {
|
||||||
ty = pat_ty;
|
ty = pat_ty;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
@ -875,34 +888,34 @@ impl<'p, 'tcx> Matrix<'p, 'tcx> {
|
|||||||
self.wildcard_row.len()
|
self.wildcard_row.len()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn rows<'a>(
|
fn rows<'b>(
|
||||||
&'a self,
|
&'b self,
|
||||||
) -> impl Iterator<Item = &'a MatrixRow<'p, 'tcx>> + Clone + DoubleEndedIterator + ExactSizeIterator
|
) -> impl Iterator<Item = &'b MatrixRow<'a, 'p, Cx>> + Clone + DoubleEndedIterator + ExactSizeIterator
|
||||||
{
|
{
|
||||||
self.rows.iter()
|
self.rows.iter()
|
||||||
}
|
}
|
||||||
fn rows_mut<'a>(
|
fn rows_mut<'b>(
|
||||||
&'a mut self,
|
&'b mut self,
|
||||||
) -> impl Iterator<Item = &'a mut MatrixRow<'p, 'tcx>> + DoubleEndedIterator + ExactSizeIterator
|
) -> impl Iterator<Item = &'b mut MatrixRow<'a, 'p, Cx>> + DoubleEndedIterator + ExactSizeIterator
|
||||||
{
|
{
|
||||||
self.rows.iter_mut()
|
self.rows.iter_mut()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Iterate over the first pattern of each row.
|
/// Iterate over the first pattern of each row.
|
||||||
fn heads<'a>(
|
fn heads<'b>(
|
||||||
&'a self,
|
&'b self,
|
||||||
) -> impl Iterator<Item = &'p DeconstructedPat<'p, 'tcx>> + Clone + Captures<'a> {
|
) -> impl Iterator<Item = &'b DeconstructedPat<'p, Cx>> + Clone + Captures<'a> {
|
||||||
self.rows().map(|r| r.head())
|
self.rows().map(|r| r.head())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// This computes `specialize(ctor, self)`. See top of the file for explanations.
|
/// This computes `specialize(ctor, self)`. See top of the file for explanations.
|
||||||
fn specialize_constructor(
|
fn specialize_constructor(
|
||||||
&self,
|
&self,
|
||||||
pcx: &PatCtxt<'_, 'p, 'tcx>,
|
pcx: &PlaceCtxt<'a, 'p, Cx>,
|
||||||
ctor: &Constructor<'tcx>,
|
ctor: &Constructor<Cx>,
|
||||||
) -> Matrix<'p, 'tcx> {
|
) -> Matrix<'a, 'p, Cx> {
|
||||||
let wildcard_row = self.wildcard_row.pop_head_constructor(pcx, ctor);
|
let wildcard_row = self.wildcard_row.pop_head_constructor(pcx, ctor);
|
||||||
let new_validity = self.place_validity[0].specialize(pcx, ctor);
|
let new_validity = self.place_validity[0].specialize(ctor);
|
||||||
let new_place_validity = std::iter::repeat(new_validity)
|
let new_place_validity = std::iter::repeat(new_validity)
|
||||||
.take(ctor.arity(pcx))
|
.take(ctor.arity(pcx))
|
||||||
.chain(self.place_validity[1..].iter().copied())
|
.chain(self.place_validity[1..].iter().copied())
|
||||||
@ -929,7 +942,7 @@ impl<'p, 'tcx> Matrix<'p, 'tcx> {
|
|||||||
/// + _ + [_, _, tail @ ..] +
|
/// + _ + [_, _, tail @ ..] +
|
||||||
/// | ✓ | ? | // column validity
|
/// | ✓ | ? | // column validity
|
||||||
/// ```
|
/// ```
|
||||||
impl<'p, 'tcx> fmt::Debug for Matrix<'p, 'tcx> {
|
impl<'a, 'p, Cx: TypeCx> fmt::Debug for Matrix<'a, 'p, Cx> {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
write!(f, "\n")?;
|
write!(f, "\n")?;
|
||||||
|
|
||||||
@ -1020,17 +1033,17 @@ impl<'p, 'tcx> fmt::Debug for Matrix<'p, 'tcx> {
|
|||||||
///
|
///
|
||||||
/// See the top of the file for more detailed explanations and examples.
|
/// See the top of the file for more detailed explanations and examples.
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
struct WitnessStack<'tcx>(Vec<WitnessPat<'tcx>>);
|
struct WitnessStack<Cx: TypeCx>(Vec<WitnessPat<Cx>>);
|
||||||
|
|
||||||
impl<'tcx> WitnessStack<'tcx> {
|
impl<Cx: TypeCx> WitnessStack<Cx> {
|
||||||
/// Asserts that the witness contains a single pattern, and returns it.
|
/// Asserts that the witness contains a single pattern, and returns it.
|
||||||
fn single_pattern(self) -> WitnessPat<'tcx> {
|
fn single_pattern(self) -> WitnessPat<Cx> {
|
||||||
assert_eq!(self.0.len(), 1);
|
assert_eq!(self.0.len(), 1);
|
||||||
self.0.into_iter().next().unwrap()
|
self.0.into_iter().next().unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Reverses specialization by the `Missing` constructor by pushing a whole new pattern.
|
/// Reverses specialization by the `Missing` constructor by pushing a whole new pattern.
|
||||||
fn push_pattern(&mut self, pat: WitnessPat<'tcx>) {
|
fn push_pattern(&mut self, pat: WitnessPat<Cx>) {
|
||||||
self.0.push(pat);
|
self.0.push(pat);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1048,7 +1061,7 @@ impl<'tcx> WitnessStack<'tcx> {
|
|||||||
/// pats: [(false, "foo"), _, true]
|
/// pats: [(false, "foo"), _, true]
|
||||||
/// result: [Enum::Variant { a: (false, "foo"), b: _ }, true]
|
/// result: [Enum::Variant { a: (false, "foo"), b: _ }, true]
|
||||||
/// ```
|
/// ```
|
||||||
fn apply_constructor(&mut self, pcx: &PatCtxt<'_, '_, 'tcx>, ctor: &Constructor<'tcx>) {
|
fn apply_constructor(&mut self, pcx: &PlaceCtxt<'_, '_, Cx>, ctor: &Constructor<Cx>) {
|
||||||
let len = self.0.len();
|
let len = self.0.len();
|
||||||
let arity = ctor.arity(pcx);
|
let arity = ctor.arity(pcx);
|
||||||
let fields = self.0.drain((len - arity)..).rev().collect();
|
let fields = self.0.drain((len - arity)..).rev().collect();
|
||||||
@ -1067,9 +1080,9 @@ impl<'tcx> WitnessStack<'tcx> {
|
|||||||
/// Just as the `Matrix` starts with a single column, by the end of the algorithm, this has a single
|
/// Just as the `Matrix` starts with a single column, by the end of the algorithm, this has a single
|
||||||
/// column, which contains the patterns that are missing for the match to be exhaustive.
|
/// column, which contains the patterns that are missing for the match to be exhaustive.
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
struct WitnessMatrix<'tcx>(Vec<WitnessStack<'tcx>>);
|
struct WitnessMatrix<Cx: TypeCx>(Vec<WitnessStack<Cx>>);
|
||||||
|
|
||||||
impl<'tcx> WitnessMatrix<'tcx> {
|
impl<Cx: TypeCx> WitnessMatrix<Cx> {
|
||||||
/// New matrix with no witnesses.
|
/// New matrix with no witnesses.
|
||||||
fn empty() -> Self {
|
fn empty() -> Self {
|
||||||
WitnessMatrix(vec![])
|
WitnessMatrix(vec![])
|
||||||
@ -1084,12 +1097,12 @@ impl<'tcx> WitnessMatrix<'tcx> {
|
|||||||
self.0.is_empty()
|
self.0.is_empty()
|
||||||
}
|
}
|
||||||
/// Asserts that there is a single column and returns the patterns in it.
|
/// Asserts that there is a single column and returns the patterns in it.
|
||||||
fn single_column(self) -> Vec<WitnessPat<'tcx>> {
|
fn single_column(self) -> Vec<WitnessPat<Cx>> {
|
||||||
self.0.into_iter().map(|w| w.single_pattern()).collect()
|
self.0.into_iter().map(|w| w.single_pattern()).collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Reverses specialization by the `Missing` constructor by pushing a whole new pattern.
|
/// Reverses specialization by the `Missing` constructor by pushing a whole new pattern.
|
||||||
fn push_pattern(&mut self, pat: WitnessPat<'tcx>) {
|
fn push_pattern(&mut self, pat: WitnessPat<Cx>) {
|
||||||
for witness in self.0.iter_mut() {
|
for witness in self.0.iter_mut() {
|
||||||
witness.push_pattern(pat.clone())
|
witness.push_pattern(pat.clone())
|
||||||
}
|
}
|
||||||
@ -1098,9 +1111,9 @@ impl<'tcx> WitnessMatrix<'tcx> {
|
|||||||
/// Reverses specialization by `ctor`. See the section on `unspecialize` at the top of the file.
|
/// Reverses specialization by `ctor`. See the section on `unspecialize` at the top of the file.
|
||||||
fn apply_constructor(
|
fn apply_constructor(
|
||||||
&mut self,
|
&mut self,
|
||||||
pcx: &PatCtxt<'_, '_, 'tcx>,
|
pcx: &PlaceCtxt<'_, '_, Cx>,
|
||||||
missing_ctors: &[Constructor<'tcx>],
|
missing_ctors: &[Constructor<Cx>],
|
||||||
ctor: &Constructor<'tcx>,
|
ctor: &Constructor<Cx>,
|
||||||
report_individual_missing_ctors: bool,
|
report_individual_missing_ctors: bool,
|
||||||
) {
|
) {
|
||||||
if self.is_empty() {
|
if self.is_empty() {
|
||||||
@ -1160,12 +1173,12 @@ impl<'tcx> WitnessMatrix<'tcx> {
|
|||||||
/// - unspecialization, where we lift the results from the previous step into results for this step
|
/// - unspecialization, where we lift the results from the previous step into results for this step
|
||||||
/// (using `apply_constructor` and by updating `row.useful` for each parent row).
|
/// (using `apply_constructor` and by updating `row.useful` for each parent row).
|
||||||
/// This is all explained at the top of the file.
|
/// This is all explained at the top of the file.
|
||||||
#[instrument(level = "debug", skip(cx, is_top_level), ret)]
|
#[instrument(level = "debug", skip(mcx, is_top_level), ret)]
|
||||||
fn compute_exhaustiveness_and_usefulness<'p, 'tcx>(
|
fn compute_exhaustiveness_and_usefulness<'a, 'p, Cx: TypeCx>(
|
||||||
cx: &MatchCheckCtxt<'p, 'tcx>,
|
mcx: MatchCtxt<'a, 'p, Cx>,
|
||||||
matrix: &mut Matrix<'p, 'tcx>,
|
matrix: &mut Matrix<'a, 'p, Cx>,
|
||||||
is_top_level: bool,
|
is_top_level: bool,
|
||||||
) -> WitnessMatrix<'tcx> {
|
) -> WitnessMatrix<Cx> {
|
||||||
debug_assert!(matrix.rows().all(|r| r.len() == matrix.column_count()));
|
debug_assert!(matrix.rows().all(|r| r.len() == matrix.column_count()));
|
||||||
|
|
||||||
let Some(ty) = matrix.head_ty() else {
|
let Some(ty) = matrix.head_ty() else {
|
||||||
@ -1185,7 +1198,7 @@ fn compute_exhaustiveness_and_usefulness<'p, 'tcx>(
|
|||||||
};
|
};
|
||||||
|
|
||||||
debug!("ty: {ty:?}");
|
debug!("ty: {ty:?}");
|
||||||
let pcx = &PatCtxt { cx, ty, is_top_level };
|
let pcx = &PlaceCtxt { mcx, ty, is_scrutinee: is_top_level };
|
||||||
|
|
||||||
// Whether the place/column we are inspecting is known to contain valid data.
|
// Whether the place/column we are inspecting is known to contain valid data.
|
||||||
let place_validity = matrix.place_validity[0];
|
let place_validity = matrix.place_validity[0];
|
||||||
@ -1194,7 +1207,7 @@ fn compute_exhaustiveness_and_usefulness<'p, 'tcx>(
|
|||||||
|
|
||||||
// Analyze the constructors present in this column.
|
// Analyze the constructors present in this column.
|
||||||
let ctors = matrix.heads().map(|p| p.ctor());
|
let ctors = matrix.heads().map(|p| p.ctor());
|
||||||
let ctors_for_ty = &cx.ctors_for_ty(ty);
|
let ctors_for_ty = pcx.ctors_for_ty();
|
||||||
let is_integers = matches!(ctors_for_ty, ConstructorSet::Integers { .. }); // For diagnostics.
|
let is_integers = matches!(ctors_for_ty, ConstructorSet::Integers { .. }); // For diagnostics.
|
||||||
let split_set = ctors_for_ty.split(pcx, ctors);
|
let split_set = ctors_for_ty.split(pcx, ctors);
|
||||||
let all_missing = split_set.present.is_empty();
|
let all_missing = split_set.present.is_empty();
|
||||||
@ -1228,7 +1241,7 @@ fn compute_exhaustiveness_and_usefulness<'p, 'tcx>(
|
|||||||
// Dig into rows that match `ctor`.
|
// Dig into rows that match `ctor`.
|
||||||
let mut spec_matrix = matrix.specialize_constructor(pcx, &ctor);
|
let mut spec_matrix = matrix.specialize_constructor(pcx, &ctor);
|
||||||
let mut witnesses = ensure_sufficient_stack(|| {
|
let mut witnesses = ensure_sufficient_stack(|| {
|
||||||
compute_exhaustiveness_and_usefulness(cx, &mut spec_matrix, false)
|
compute_exhaustiveness_and_usefulness(mcx, &mut spec_matrix, false)
|
||||||
});
|
});
|
||||||
|
|
||||||
let counts_for_exhaustiveness = match ctor {
|
let counts_for_exhaustiveness = match ctor {
|
||||||
@ -1270,34 +1283,34 @@ fn compute_exhaustiveness_and_usefulness<'p, 'tcx>(
|
|||||||
|
|
||||||
/// Indicates whether or not a given arm is useful.
|
/// Indicates whether or not a given arm is useful.
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub enum Usefulness {
|
pub enum Usefulness<'p, Cx: TypeCx> {
|
||||||
/// The arm is useful. This additionally carries a set of or-pattern branches that have been
|
/// The arm is useful. This additionally carries a set of or-pattern branches that have been
|
||||||
/// found to be redundant despite the overall arm being useful. Used only in the presence of
|
/// found to be redundant despite the overall arm being useful. Used only in the presence of
|
||||||
/// or-patterns, otherwise it stays empty.
|
/// or-patterns, otherwise it stays empty.
|
||||||
Useful(Vec<Span>),
|
Useful(Vec<&'p DeconstructedPat<'p, Cx>>),
|
||||||
/// The arm is redundant and can be removed without changing the behavior of the match
|
/// The arm is redundant and can be removed without changing the behavior of the match
|
||||||
/// expression.
|
/// expression.
|
||||||
Redundant,
|
Redundant,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The output of checking a match for exhaustiveness and arm usefulness.
|
/// The output of checking a match for exhaustiveness and arm usefulness.
|
||||||
pub struct UsefulnessReport<'p, 'tcx> {
|
pub struct UsefulnessReport<'p, Cx: TypeCx> {
|
||||||
/// For each arm of the input, whether that arm is useful after the arms above it.
|
/// For each arm of the input, whether that arm is useful after the arms above it.
|
||||||
pub arm_usefulness: Vec<(MatchArm<'p, 'tcx>, Usefulness)>,
|
pub arm_usefulness: Vec<(MatchArm<'p, Cx>, Usefulness<'p, Cx>)>,
|
||||||
/// If the match is exhaustive, this is empty. If not, this contains witnesses for the lack of
|
/// If the match is exhaustive, this is empty. If not, this contains witnesses for the lack of
|
||||||
/// exhaustiveness.
|
/// exhaustiveness.
|
||||||
pub non_exhaustiveness_witnesses: Vec<WitnessPat<'tcx>>,
|
pub non_exhaustiveness_witnesses: Vec<WitnessPat<Cx>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Computes whether a match is exhaustive and which of its arms are useful.
|
/// Computes whether a match is exhaustive and which of its arms are useful.
|
||||||
#[instrument(skip(cx, arms), level = "debug")]
|
#[instrument(skip(cx, arms), level = "debug")]
|
||||||
pub(crate) fn compute_match_usefulness<'p, 'tcx>(
|
pub fn compute_match_usefulness<'p, Cx: TypeCx>(
|
||||||
cx: &MatchCheckCtxt<'p, 'tcx>,
|
cx: MatchCtxt<'_, 'p, Cx>,
|
||||||
arms: &[MatchArm<'p, 'tcx>],
|
arms: &[MatchArm<'p, Cx>],
|
||||||
scrut_ty: Ty<'tcx>,
|
scrut_ty: Cx::Ty,
|
||||||
) -> UsefulnessReport<'p, 'tcx> {
|
scrut_validity: ValidityConstraint,
|
||||||
let scrut_validity = ValidityConstraint::from_bool(cx.known_valid_scrutinee);
|
) -> UsefulnessReport<'p, Cx> {
|
||||||
let mut matrix = Matrix::new(cx, arms, scrut_ty, scrut_validity);
|
let mut matrix = Matrix::new(cx.wildcard_arena, arms, scrut_ty, scrut_validity);
|
||||||
let non_exhaustiveness_witnesses = compute_exhaustiveness_and_usefulness(cx, &mut matrix, true);
|
let non_exhaustiveness_witnesses = compute_exhaustiveness_and_usefulness(cx, &mut matrix, true);
|
||||||
|
|
||||||
let non_exhaustiveness_witnesses: Vec<_> = non_exhaustiveness_witnesses.single_column();
|
let non_exhaustiveness_witnesses: Vec<_> = non_exhaustiveness_witnesses.single_column();
|
||||||
@ -1308,7 +1321,7 @@ pub(crate) fn compute_match_usefulness<'p, 'tcx>(
|
|||||||
debug!(?arm);
|
debug!(?arm);
|
||||||
// We warn when a pattern is not useful.
|
// We warn when a pattern is not useful.
|
||||||
let usefulness = if arm.pat.is_useful() {
|
let usefulness = if arm.pat.is_useful() {
|
||||||
Usefulness::Useful(arm.pat.redundant_spans())
|
Usefulness::Useful(arm.pat.redundant_subpatterns())
|
||||||
} else {
|
} else {
|
||||||
Usefulness::Redundant
|
Usefulness::Redundant
|
||||||
};
|
};
|
||||||
|
@ -357,6 +357,7 @@ const PERMITTED_RUSTC_DEPENDENCIES: &[&str] = &[
|
|||||||
"tracing-tree",
|
"tracing-tree",
|
||||||
"twox-hash",
|
"twox-hash",
|
||||||
"type-map",
|
"type-map",
|
||||||
|
"typed-arena",
|
||||||
"typenum",
|
"typenum",
|
||||||
"unic-langid",
|
"unic-langid",
|
||||||
"unic-langid-impl",
|
"unic-langid-impl",
|
||||||
|
Loading…
Reference in New Issue
Block a user