mirror of
https://github.com/rust-lang/rust.git
synced 2025-01-27 23:22:58 +00:00
Replace Pat
with a new intermediate representation
This commit is contained in:
parent
fde45e96b8
commit
71abc9565f
@ -1,6 +1,6 @@
|
||||
use super::deconstruct_pat::{Constructor, DeconstructedPat};
|
||||
use super::usefulness::{
|
||||
compute_match_usefulness, expand_pattern, is_wildcard, MatchArm, MatchCheckCtxt, Reachability,
|
||||
UsefulnessReport,
|
||||
compute_match_usefulness, MatchArm, MatchCheckCtxt, Reachability, UsefulnessReport,
|
||||
};
|
||||
use super::{PatCtxt, PatternError};
|
||||
|
||||
@ -12,14 +12,12 @@ use rustc_hir::def::*;
|
||||
use rustc_hir::def_id::DefId;
|
||||
use rustc_hir::intravisit::{self, NestedVisitorMap, Visitor};
|
||||
use rustc_hir::{HirId, Pat};
|
||||
use rustc_middle::thir::PatKind;
|
||||
use rustc_middle::ty::{self, Ty, TyCtxt};
|
||||
use rustc_middle::ty::{self, AdtDef, Ty, TyCtxt};
|
||||
use rustc_session::lint::builtin::{
|
||||
BINDINGS_WITH_VARIANT_NAME, IRREFUTABLE_LET_PATTERNS, UNREACHABLE_PATTERNS,
|
||||
};
|
||||
use rustc_session::Session;
|
||||
use rustc_span::{DesugaringKind, ExpnKind, Span};
|
||||
use std::slice;
|
||||
|
||||
crate fn check_match(tcx: TyCtxt<'_>, def_id: DefId) {
|
||||
let body_id = match def_id.as_local() {
|
||||
@ -27,11 +25,12 @@ crate fn check_match(tcx: TyCtxt<'_>, def_id: DefId) {
|
||||
Some(id) => tcx.hir().body_owned_by(tcx.hir().local_def_id_to_hir_id(id)),
|
||||
};
|
||||
|
||||
let pattern_arena = TypedArena::default();
|
||||
let mut visitor = MatchVisitor {
|
||||
tcx,
|
||||
typeck_results: tcx.typeck_body(body_id),
|
||||
param_env: tcx.param_env(def_id),
|
||||
pattern_arena: TypedArena::default(),
|
||||
pattern_arena: &pattern_arena,
|
||||
};
|
||||
visitor.visit_body(tcx.hir().body(body_id));
|
||||
}
|
||||
@ -40,14 +39,14 @@ fn create_e0004(sess: &Session, sp: Span, error_message: String) -> DiagnosticBu
|
||||
struct_span_err!(sess, sp, E0004, "{}", &error_message)
|
||||
}
|
||||
|
||||
struct MatchVisitor<'a, 'tcx> {
|
||||
struct MatchVisitor<'a, 'p, 'tcx> {
|
||||
tcx: TyCtxt<'tcx>,
|
||||
typeck_results: &'a ty::TypeckResults<'tcx>,
|
||||
param_env: ty::ParamEnv<'tcx>,
|
||||
pattern_arena: TypedArena<super::Pat<'tcx>>,
|
||||
pattern_arena: &'p TypedArena<DeconstructedPat<'p, 'tcx>>,
|
||||
}
|
||||
|
||||
impl<'tcx> Visitor<'tcx> for MatchVisitor<'_, 'tcx> {
|
||||
impl<'tcx> Visitor<'tcx> for MatchVisitor<'_, '_, 'tcx> {
|
||||
type Map = intravisit::ErasedMap<'tcx>;
|
||||
|
||||
fn nested_visit_map(&mut self) -> NestedVisitorMap<Self::Map> {
|
||||
@ -113,31 +112,30 @@ impl PatCtxt<'_, '_> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> MatchVisitor<'_, 'tcx> {
|
||||
impl<'p, 'tcx> MatchVisitor<'_, 'p, 'tcx> {
|
||||
fn check_patterns(&self, pat: &Pat<'_>) {
|
||||
pat.walk_always(|pat| check_borrow_conflicts_in_at_patterns(self, pat));
|
||||
check_for_bindings_named_same_as_variants(self, pat);
|
||||
}
|
||||
|
||||
fn lower_pattern<'p>(
|
||||
fn lower_pattern(
|
||||
&self,
|
||||
cx: &mut MatchCheckCtxt<'p, 'tcx>,
|
||||
pat: &'tcx hir::Pat<'tcx>,
|
||||
have_errors: &mut bool,
|
||||
) -> (&'p super::Pat<'tcx>, Ty<'tcx>) {
|
||||
) -> &'p DeconstructedPat<'p, 'tcx> {
|
||||
let mut patcx = PatCtxt::new(self.tcx, self.param_env, self.typeck_results);
|
||||
patcx.include_lint_checks();
|
||||
let pattern = patcx.lower_pattern(pat);
|
||||
let pattern_ty = pattern.ty;
|
||||
let pattern: &_ = cx.pattern_arena.alloc(expand_pattern(pattern));
|
||||
let pattern: &_ = cx.pattern_arena.alloc(DeconstructedPat::from_pat(cx, &pattern));
|
||||
if !patcx.errors.is_empty() {
|
||||
*have_errors = true;
|
||||
patcx.report_inlining_errors();
|
||||
}
|
||||
(pattern, pattern_ty)
|
||||
pattern
|
||||
}
|
||||
|
||||
fn new_cx(&self, hir_id: HirId) -> MatchCheckCtxt<'_, 'tcx> {
|
||||
fn new_cx(&self, hir_id: HirId) -> MatchCheckCtxt<'p, 'tcx> {
|
||||
MatchCheckCtxt {
|
||||
tcx: self.tcx,
|
||||
param_env: self.param_env,
|
||||
@ -149,8 +147,8 @@ impl<'tcx> MatchVisitor<'_, 'tcx> {
|
||||
fn check_let(&mut self, pat: &'tcx hir::Pat<'tcx>, expr: &hir::Expr<'_>, span: Span) {
|
||||
self.check_patterns(pat);
|
||||
let mut cx = self.new_cx(expr.hir_id);
|
||||
let tpat = self.lower_pattern(&mut cx, pat, &mut false).0;
|
||||
check_let_reachability(&mut cx, pat.hir_id, &tpat, span);
|
||||
let tpat = self.lower_pattern(&mut cx, pat, &mut false);
|
||||
check_let_reachability(&mut cx, pat.hir_id, tpat, span);
|
||||
}
|
||||
|
||||
fn check_match(
|
||||
@ -166,8 +164,8 @@ impl<'tcx> MatchVisitor<'_, 'tcx> {
|
||||
self.check_patterns(&arm.pat);
|
||||
if let Some(hir::Guard::IfLet(ref pat, _)) = arm.guard {
|
||||
self.check_patterns(pat);
|
||||
let tpat = self.lower_pattern(&mut cx, pat, &mut false).0;
|
||||
check_let_reachability(&mut cx, pat.hir_id, &tpat, tpat.span);
|
||||
let tpat = self.lower_pattern(&mut cx, pat, &mut false);
|
||||
check_let_reachability(&mut cx, pat.hir_id, tpat, tpat.span());
|
||||
}
|
||||
}
|
||||
|
||||
@ -176,7 +174,7 @@ impl<'tcx> MatchVisitor<'_, 'tcx> {
|
||||
let arms: Vec<_> = arms
|
||||
.iter()
|
||||
.map(|hir::Arm { pat, guard, .. }| MatchArm {
|
||||
pat: self.lower_pattern(&mut cx, pat, &mut have_errors).0,
|
||||
pat: self.lower_pattern(&mut cx, pat, &mut have_errors),
|
||||
hir_id: pat.hir_id,
|
||||
has_guard: guard.is_some(),
|
||||
})
|
||||
@ -210,7 +208,8 @@ impl<'tcx> MatchVisitor<'_, 'tcx> {
|
||||
fn check_irrefutable(&self, pat: &'tcx Pat<'tcx>, origin: &str, sp: Option<Span>) {
|
||||
let mut cx = self.new_cx(pat.hir_id);
|
||||
|
||||
let (pattern, pattern_ty) = self.lower_pattern(&mut cx, pat, &mut false);
|
||||
let pattern = self.lower_pattern(&mut cx, pat, &mut false);
|
||||
let pattern_ty = pattern.ty();
|
||||
let arms = vec![MatchArm { pat: pattern, hir_id: pat.hir_id, has_guard: false }];
|
||||
let report = compute_match_usefulness(&cx, &arms, pat.hir_id, pattern_ty);
|
||||
|
||||
@ -222,7 +221,7 @@ impl<'tcx> MatchVisitor<'_, 'tcx> {
|
||||
return;
|
||||
}
|
||||
|
||||
let joined_patterns = joined_uncovered_patterns(&witnesses);
|
||||
let joined_patterns = joined_uncovered_patterns(&cx, &witnesses);
|
||||
let mut err = struct_span_err!(
|
||||
self.tcx.sess,
|
||||
pat.span,
|
||||
@ -298,7 +297,7 @@ fn const_not_var(
|
||||
}
|
||||
}
|
||||
|
||||
fn check_for_bindings_named_same_as_variants(cx: &MatchVisitor<'_, '_>, pat: &Pat<'_>) {
|
||||
fn check_for_bindings_named_same_as_variants(cx: &MatchVisitor<'_, '_, '_>, pat: &Pat<'_>) {
|
||||
pat.walk_always(|p| {
|
||||
if let hir::PatKind::Binding(_, _, ident, None) = p.kind {
|
||||
if let Some(ty::BindByValue(hir::Mutability::Not)) =
|
||||
@ -340,12 +339,11 @@ fn check_for_bindings_named_same_as_variants(cx: &MatchVisitor<'_, '_>, pat: &Pa
|
||||
}
|
||||
|
||||
/// Checks for common cases of "catchall" patterns that may not be intended as such.
|
||||
fn pat_is_catchall(pat: &super::Pat<'_>) -> bool {
|
||||
use PatKind::*;
|
||||
match &*pat.kind {
|
||||
Binding { subpattern: None, .. } => true,
|
||||
Binding { subpattern: Some(s), .. } | Deref { subpattern: s } => pat_is_catchall(s),
|
||||
Leaf { subpatterns: s } => s.iter().all(|p| pat_is_catchall(&p.pattern)),
|
||||
fn pat_is_catchall(pat: &DeconstructedPat<'_, '_>) -> bool {
|
||||
use Constructor::*;
|
||||
match pat.ctor() {
|
||||
Wildcard => true,
|
||||
Single => pat.iter_fields().all(|pat| pat_is_catchall(pat)),
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
@ -424,11 +422,11 @@ fn irrefutable_let_pattern(tcx: TyCtxt<'_>, id: HirId, span: Span) {
|
||||
fn check_let_reachability<'p, 'tcx>(
|
||||
cx: &mut MatchCheckCtxt<'p, 'tcx>,
|
||||
pat_id: HirId,
|
||||
pat: &'p super::Pat<'tcx>,
|
||||
pat: &'p DeconstructedPat<'p, 'tcx>,
|
||||
span: Span,
|
||||
) {
|
||||
let arms = [MatchArm { pat, hir_id: pat_id, has_guard: false }];
|
||||
let report = compute_match_usefulness(&cx, &arms, pat_id, pat.ty);
|
||||
let report = compute_match_usefulness(&cx, &arms, pat_id, pat.ty());
|
||||
|
||||
// Report if the pattern is unreachable, which can only occur when the type is uninhabited.
|
||||
// This also reports unreachable sub-patterns though, so we can't just replace it with an
|
||||
@ -450,7 +448,7 @@ fn report_arm_reachability<'p, 'tcx>(
|
||||
let mut catchall = None;
|
||||
for (arm, is_useful) in report.arm_usefulness.iter() {
|
||||
match is_useful {
|
||||
Unreachable => unreachable_pattern(cx.tcx, arm.pat.span, arm.hir_id, catchall),
|
||||
Unreachable => unreachable_pattern(cx.tcx, arm.pat.span(), arm.hir_id, catchall),
|
||||
Reachable(unreachables) if unreachables.is_empty() => {}
|
||||
// The arm is reachable, but contains unreachable subpatterns (from or-patterns).
|
||||
Reachable(unreachables) => {
|
||||
@ -463,7 +461,7 @@ fn report_arm_reachability<'p, 'tcx>(
|
||||
}
|
||||
}
|
||||
if !arm.has_guard && catchall.is_none() && pat_is_catchall(arm.pat) {
|
||||
catchall = Some(arm.pat.span);
|
||||
catchall = Some(arm.pat.span());
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -473,7 +471,7 @@ fn non_exhaustive_match<'p, 'tcx>(
|
||||
cx: &MatchCheckCtxt<'p, 'tcx>,
|
||||
scrut_ty: Ty<'tcx>,
|
||||
sp: Span,
|
||||
witnesses: Vec<super::Pat<'tcx>>,
|
||||
witnesses: Vec<DeconstructedPat<'p, 'tcx>>,
|
||||
is_empty_match: bool,
|
||||
) {
|
||||
let non_empty_enum = match scrut_ty.kind() {
|
||||
@ -490,7 +488,7 @@ fn non_exhaustive_match<'p, 'tcx>(
|
||||
format!("non-exhaustive patterns: type `{}` is non-empty", scrut_ty),
|
||||
);
|
||||
} else {
|
||||
let joined_patterns = joined_uncovered_patterns(&witnesses);
|
||||
let joined_patterns = joined_uncovered_patterns(cx, &witnesses);
|
||||
err = create_e0004(
|
||||
cx.tcx.sess,
|
||||
sp,
|
||||
@ -517,7 +515,7 @@ fn non_exhaustive_match<'p, 'tcx>(
|
||||
if (scrut_ty == cx.tcx.types.usize || scrut_ty == cx.tcx.types.isize)
|
||||
&& !is_empty_match
|
||||
&& witnesses.len() == 1
|
||||
&& is_wildcard(&witnesses[0])
|
||||
&& matches!(witnesses[0].ctor(), Constructor::NonExhaustive)
|
||||
{
|
||||
err.note(&format!(
|
||||
"`{}` does not have a fixed maximum value, \
|
||||
@ -540,33 +538,40 @@ fn non_exhaustive_match<'p, 'tcx>(
|
||||
err.emit();
|
||||
}
|
||||
|
||||
crate fn joined_uncovered_patterns(witnesses: &[super::Pat<'_>]) -> String {
|
||||
crate fn joined_uncovered_patterns<'p, 'tcx>(
|
||||
cx: &MatchCheckCtxt<'p, 'tcx>,
|
||||
witnesses: &[DeconstructedPat<'p, 'tcx>],
|
||||
) -> String {
|
||||
const LIMIT: usize = 3;
|
||||
let pat_to_str = |pat: &DeconstructedPat<'p, 'tcx>| pat.to_pat(cx).to_string();
|
||||
match witnesses {
|
||||
[] => bug!(),
|
||||
[witness] => format!("`{}`", witness),
|
||||
[witness] => format!("`{}`", witness.to_pat(cx)),
|
||||
[head @ .., tail] if head.len() < LIMIT => {
|
||||
let head: Vec<_> = head.iter().map(<_>::to_string).collect();
|
||||
format!("`{}` and `{}`", head.join("`, `"), tail)
|
||||
let head: Vec<_> = head.iter().map(pat_to_str).collect();
|
||||
format!("`{}` and `{}`", head.join("`, `"), tail.to_pat(cx))
|
||||
}
|
||||
_ => {
|
||||
let (head, tail) = witnesses.split_at(LIMIT);
|
||||
let head: Vec<_> = head.iter().map(<_>::to_string).collect();
|
||||
let head: Vec<_> = head.iter().map(pat_to_str).collect();
|
||||
format!("`{}` and {} more", head.join("`, `"), tail.len())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
crate fn pattern_not_covered_label(witnesses: &[super::Pat<'_>], joined_patterns: &str) -> String {
|
||||
crate fn pattern_not_covered_label(
|
||||
witnesses: &[DeconstructedPat<'_, '_>],
|
||||
joined_patterns: &str,
|
||||
) -> String {
|
||||
format!("pattern{} {} not covered", rustc_errors::pluralize!(witnesses.len()), joined_patterns)
|
||||
}
|
||||
|
||||
/// Point at the definition of non-covered `enum` variants.
|
||||
fn adt_defined_here(
|
||||
cx: &MatchCheckCtxt<'_, '_>,
|
||||
fn adt_defined_here<'p, 'tcx>(
|
||||
cx: &MatchCheckCtxt<'p, 'tcx>,
|
||||
err: &mut DiagnosticBuilder<'_>,
|
||||
ty: Ty<'_>,
|
||||
witnesses: &[super::Pat<'_>],
|
||||
ty: Ty<'tcx>,
|
||||
witnesses: &[DeconstructedPat<'p, 'tcx>],
|
||||
) {
|
||||
let ty = ty.peel_refs();
|
||||
if let ty::Adt(def, _) = ty.kind() {
|
||||
@ -575,57 +580,42 @@ fn adt_defined_here(
|
||||
}
|
||||
|
||||
if witnesses.len() < 4 {
|
||||
for sp in maybe_point_at_variant(ty, &witnesses) {
|
||||
for sp in maybe_point_at_variant(cx, def, witnesses.iter()) {
|
||||
err.span_label(sp, "not covered");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn maybe_point_at_variant(ty: Ty<'_>, patterns: &[super::Pat<'_>]) -> Vec<Span> {
|
||||
fn maybe_point_at_variant<'a, 'p: 'a, 'tcx: 'a>(
|
||||
cx: &MatchCheckCtxt<'p, 'tcx>,
|
||||
def: &AdtDef,
|
||||
patterns: impl Iterator<Item = &'a DeconstructedPat<'p, 'tcx>>,
|
||||
) -> Vec<Span> {
|
||||
use Constructor::*;
|
||||
let mut covered = vec![];
|
||||
if let ty::Adt(def, _) = ty.kind() {
|
||||
// Don't point at variants that have already been covered due to other patterns to avoid
|
||||
// visual clutter.
|
||||
for pattern in patterns {
|
||||
use PatKind::{AscribeUserType, Deref, Leaf, Or, Variant};
|
||||
match &*pattern.kind {
|
||||
AscribeUserType { subpattern, .. } | Deref { subpattern } => {
|
||||
covered.extend(maybe_point_at_variant(ty, slice::from_ref(&subpattern)));
|
||||
for pattern in patterns {
|
||||
if let Variant(variant_index) = pattern.ctor() {
|
||||
if let ty::Adt(this_def, _) = pattern.ty().kind() {
|
||||
if this_def.did != def.did {
|
||||
continue;
|
||||
}
|
||||
Variant { adt_def, variant_index, subpatterns, .. } if adt_def.did == def.did => {
|
||||
let sp = def.variants[*variant_index].ident.span;
|
||||
if covered.contains(&sp) {
|
||||
continue;
|
||||
}
|
||||
covered.push(sp);
|
||||
|
||||
let pats = subpatterns
|
||||
.iter()
|
||||
.map(|field_pattern| field_pattern.pattern.clone())
|
||||
.collect::<Box<[_]>>();
|
||||
covered.extend(maybe_point_at_variant(ty, &pats));
|
||||
}
|
||||
Leaf { subpatterns } => {
|
||||
let pats = subpatterns
|
||||
.iter()
|
||||
.map(|field_pattern| field_pattern.pattern.clone())
|
||||
.collect::<Box<[_]>>();
|
||||
covered.extend(maybe_point_at_variant(ty, &pats));
|
||||
}
|
||||
Or { pats } => {
|
||||
let pats = pats.iter().cloned().collect::<Box<[_]>>();
|
||||
covered.extend(maybe_point_at_variant(ty, &pats));
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
let sp = def.variants[*variant_index].ident.span;
|
||||
if covered.contains(&sp) {
|
||||
// Don't point at variants that have already been covered due to other patterns to avoid
|
||||
// visual clutter.
|
||||
continue;
|
||||
}
|
||||
covered.push(sp);
|
||||
}
|
||||
covered.extend(maybe_point_at_variant(cx, def, pattern.iter_fields()));
|
||||
}
|
||||
covered
|
||||
}
|
||||
|
||||
/// Check if a by-value binding is by-value. That is, check if the binding's type is not `Copy`.
|
||||
fn is_binding_by_move(cx: &MatchVisitor<'_, '_>, hir_id: HirId, span: Span) -> bool {
|
||||
fn is_binding_by_move(cx: &MatchVisitor<'_, '_, '_>, hir_id: HirId, span: Span) -> bool {
|
||||
!cx.typeck_results.node_type(hir_id).is_copy_modulo_regions(cx.tcx.at(span), cx.param_env)
|
||||
}
|
||||
|
||||
@ -639,7 +629,7 @@ fn is_binding_by_move(cx: &MatchVisitor<'_, '_>, hir_id: HirId, span: Span) -> b
|
||||
/// - `x @ Some(ref mut? y)`.
|
||||
///
|
||||
/// This analysis is *not* subsumed by NLL.
|
||||
fn check_borrow_conflicts_in_at_patterns(cx: &MatchVisitor<'_, '_>, pat: &Pat<'_>) {
|
||||
fn check_borrow_conflicts_in_at_patterns(cx: &MatchVisitor<'_, '_, '_>, pat: &Pat<'_>) {
|
||||
// Extract `sub` in `binding @ sub`.
|
||||
let (name, sub) = match &pat.kind {
|
||||
hir::PatKind::Binding(.., name, Some(sub)) => (*name, sub),
|
||||
|
@ -46,7 +46,7 @@ use self::Constructor::*;
|
||||
use self::SliceKind::*;
|
||||
|
||||
use super::compare_const_vals;
|
||||
use super::usefulness::{is_wildcard, MatchCheckCtxt, PatCtxt};
|
||||
use super::usefulness::{MatchCheckCtxt, PatCtxt};
|
||||
|
||||
use rustc_data_structures::captures::Captures;
|
||||
use rustc_index::vec::Idx;
|
||||
@ -62,10 +62,29 @@ use rustc_span::{Span, DUMMY_SP};
|
||||
use rustc_target::abi::{Integer, Size, VariantIdx};
|
||||
|
||||
use smallvec::{smallvec, SmallVec};
|
||||
use std::borrow::Cow;
|
||||
use std::cmp::{self, max, min, Ordering};
|
||||
use std::fmt;
|
||||
use std::iter::{once, IntoIterator};
|
||||
use std::ops::RangeInclusive;
|
||||
|
||||
/// Recursively expand this pattern into its subpatterns. Only useful for or-patterns.
|
||||
fn expand_or_pat<'p, 'tcx>(pat: &'p Pat<'tcx>) -> Vec<&'p Pat<'tcx>> {
|
||||
fn expand<'p, 'tcx>(pat: &'p Pat<'tcx>, vec: &mut Vec<&'p Pat<'tcx>>) {
|
||||
if let PatKind::Or { pats } = pat.kind.as_ref() {
|
||||
for pat in pats {
|
||||
expand(pat, vec);
|
||||
}
|
||||
} else {
|
||||
vec.push(pat)
|
||||
}
|
||||
}
|
||||
|
||||
let mut pats = Vec::new();
|
||||
expand(pat, &mut pats);
|
||||
pats
|
||||
}
|
||||
|
||||
/// An inclusive interval, used for precise integer exhaustiveness checking.
|
||||
/// `IntRange`s always store a contiguous range. This means that values are
|
||||
/// encoded such that `0` encodes the minimum value for the integer,
|
||||
@ -76,9 +95,13 @@ use std::ops::RangeInclusive;
|
||||
///
|
||||
/// `IntRange` is never used to encode an empty range or a "range" that wraps
|
||||
/// around the (offset) space: i.e., `range.lo <= range.hi`.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
#[derive(Clone, PartialEq, Eq)]
|
||||
pub(super) struct IntRange {
|
||||
range: RangeInclusive<u128>,
|
||||
/// Keeps the bias used for encoding the range. It depends on the type of the range and
|
||||
/// possibly the pointer size of the current architecture. The algorithm ensures we never
|
||||
/// compare `IntRange`s with different types/architectures.
|
||||
bias: u128,
|
||||
}
|
||||
|
||||
impl IntRange {
|
||||
@ -131,7 +154,7 @@ impl IntRange {
|
||||
value.try_eval_bits(tcx, param_env, ty)
|
||||
})()?;
|
||||
let val = val ^ bias;
|
||||
Some(IntRange { range: val..=val })
|
||||
Some(IntRange { range: val..=val, bias })
|
||||
} else {
|
||||
None
|
||||
}
|
||||
@ -155,7 +178,7 @@ impl IntRange {
|
||||
// This should have been caught earlier by E0030.
|
||||
bug!("malformed range pattern: {}..={}", lo, (hi - offset));
|
||||
}
|
||||
Some(IntRange { range: lo..=(hi - offset) })
|
||||
Some(IntRange { range: lo..=(hi - offset), bias })
|
||||
} else {
|
||||
None
|
||||
}
|
||||
@ -180,7 +203,7 @@ impl IntRange {
|
||||
let (lo, hi) = self.boundaries();
|
||||
let (other_lo, other_hi) = other.boundaries();
|
||||
if lo <= other_hi && other_lo <= hi {
|
||||
Some(IntRange { range: max(lo, other_lo)..=min(hi, other_hi) })
|
||||
Some(IntRange { range: max(lo, other_lo)..=min(hi, other_hi), bias: self.bias })
|
||||
} else {
|
||||
None
|
||||
}
|
||||
@ -203,10 +226,11 @@ impl IntRange {
|
||||
(lo == other_hi || hi == other_lo) && !self.is_singleton() && !other.is_singleton()
|
||||
}
|
||||
|
||||
/// Only used for displaying the range properly.
|
||||
fn to_pat<'tcx>(&self, tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> Pat<'tcx> {
|
||||
let (lo, hi) = self.boundaries();
|
||||
|
||||
let bias = IntRange::signed_bias(tcx, ty);
|
||||
let bias = self.bias;
|
||||
let (lo, hi) = (lo ^ bias, hi ^ bias);
|
||||
|
||||
let env = ty::ParamEnv::empty().and(ty);
|
||||
@ -223,10 +247,10 @@ impl IntRange {
|
||||
}
|
||||
|
||||
/// Lint on likely incorrect range patterns (#63987)
|
||||
pub(super) fn lint_overlapping_range_endpoints<'a, 'tcx: 'a>(
|
||||
pub(super) fn lint_overlapping_range_endpoints<'a, 'p: 'a, 'tcx: 'a>(
|
||||
&self,
|
||||
pcx: PatCtxt<'_, '_, 'tcx>,
|
||||
ctors: impl Iterator<Item = (&'a Constructor<'tcx>, Span)>,
|
||||
pcx: PatCtxt<'_, 'p, 'tcx>,
|
||||
pats: impl Iterator<Item = &'a DeconstructedPat<'p, 'tcx>>,
|
||||
column_count: usize,
|
||||
hir_id: HirId,
|
||||
) {
|
||||
@ -248,8 +272,8 @@ impl IntRange {
|
||||
return;
|
||||
}
|
||||
|
||||
let overlaps: Vec<_> = ctors
|
||||
.filter_map(|(ctor, span)| Some((ctor.as_int_range()?, span)))
|
||||
let overlaps: Vec<_> = pats
|
||||
.filter_map(|pat| Some((pat.ctor().as_int_range()?, pat.span())))
|
||||
.filter(|(range, _)| self.suspicious_intersection(range))
|
||||
.map(|(range, span)| (self.intersection(&range).unwrap(), span))
|
||||
.collect();
|
||||
@ -291,6 +315,19 @@ impl IntRange {
|
||||
}
|
||||
}
|
||||
|
||||
/// Note: this is often not what we want: e.g. `false` is converted into the range `0..=0` and
|
||||
/// would be displayed as such. To render properly, convert to a pattern first.
|
||||
impl fmt::Debug for IntRange {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let (lo, hi) = self.boundaries();
|
||||
let bias = self.bias;
|
||||
let (lo, hi) = (lo ^ bias, hi ^ bias);
|
||||
write!(f, "{}", lo)?;
|
||||
write!(f, "{}", RangeEnd::Included)?;
|
||||
write!(f, "{}", hi)
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents a border between 2 integers. Because the intervals spanning borders must be able to
|
||||
/// cover every integer, we need to be able to represent 2^128 + 1 such borders.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
|
||||
@ -375,13 +412,13 @@ impl SplitIntRange {
|
||||
// Skip duplicates.
|
||||
.filter(|(prev_border, border)| prev_border != border)
|
||||
// Finally, convert to ranges.
|
||||
.map(|(prev_border, border)| {
|
||||
.map(move |(prev_border, border)| {
|
||||
let range = match (prev_border, border) {
|
||||
(JustBefore(n), JustBefore(m)) if n < m => n..=(m - 1),
|
||||
(JustBefore(n), AfterMax) => n..=u128::MAX,
|
||||
_ => unreachable!(), // Ruled out by the sorting and filtering we did
|
||||
};
|
||||
IntRange { range }
|
||||
IntRange { range, bias: self.range.bias }
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -611,6 +648,8 @@ pub(super) enum Constructor<'tcx> {
|
||||
Missing { nonexhaustive_enum_missing_real_variants: bool },
|
||||
/// Wildcard pattern.
|
||||
Wildcard,
|
||||
/// Or-pattern.
|
||||
Or,
|
||||
}
|
||||
|
||||
impl<'tcx> Constructor<'tcx> {
|
||||
@ -647,61 +686,34 @@ impl<'tcx> Constructor<'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Determines the constructor that the given pattern can be specialized to.
|
||||
pub(super) fn from_pat<'p>(cx: &MatchCheckCtxt<'p, 'tcx>, pat: &'p Pat<'tcx>) -> Self {
|
||||
match pat.kind.as_ref() {
|
||||
PatKind::AscribeUserType { .. } => bug!(), // Handled by `expand_pattern`
|
||||
PatKind::Binding { .. } | PatKind::Wild => Wildcard,
|
||||
PatKind::Leaf { .. } | PatKind::Deref { .. } => Single,
|
||||
&PatKind::Variant { variant_index, .. } => Variant(variant_index),
|
||||
PatKind::Constant { value } => {
|
||||
if let Some(int_range) = IntRange::from_const(cx.tcx, cx.param_env, value) {
|
||||
IntRange(int_range)
|
||||
} else {
|
||||
match pat.ty.kind() {
|
||||
ty::Float(_) => FloatRange(value, value, RangeEnd::Included),
|
||||
// We make `&str` constants behave like `Deref` patterns, to be compatible
|
||||
// with other `Deref` patterns. See also `Fields::extract_pattern_arguments`.
|
||||
ty::Ref(_, t, _) if t.is_str() => Single,
|
||||
// In truth this carries a constant of type `&str`.
|
||||
ty::Str => Str(value),
|
||||
// All constants that can be structurally matched have already been expanded
|
||||
// into the corresponding `Pat`s by `const_to_pat`. Constants that remain are
|
||||
// opaque.
|
||||
_ => Opaque,
|
||||
/// The number of fields for this constructor. This must be kept in sync with
|
||||
/// `Fields::wildcards`.
|
||||
pub(super) fn arity(&self, pcx: PatCtxt<'_, '_, 'tcx>) -> usize {
|
||||
match self {
|
||||
Single | Variant(_) => match pcx.ty.kind() {
|
||||
ty::Tuple(fs) => fs.len(),
|
||||
ty::Ref(..) => 1,
|
||||
ty::Adt(adt, ..) => {
|
||||
if adt.is_box() {
|
||||
// The only legal patterns of type `Box` (outside `std`) are `_` and box
|
||||
// patterns. If we're here we can assume this is a box pattern.
|
||||
1
|
||||
} else {
|
||||
let variant = &adt.variants[self.variant_index_for_adt(adt)];
|
||||
Fields::list_variant_nonhidden_fields(pcx.cx, pcx.ty, variant).count()
|
||||
}
|
||||
}
|
||||
}
|
||||
&PatKind::Range(PatRange { lo, hi, end }) => {
|
||||
let ty = lo.ty;
|
||||
if let Some(int_range) = IntRange::from_range(
|
||||
cx.tcx,
|
||||
lo.eval_bits(cx.tcx, cx.param_env, lo.ty),
|
||||
hi.eval_bits(cx.tcx, cx.param_env, hi.ty),
|
||||
ty,
|
||||
&end,
|
||||
) {
|
||||
IntRange(int_range)
|
||||
} else {
|
||||
FloatRange(lo, hi, end)
|
||||
}
|
||||
}
|
||||
PatKind::Array { prefix, slice, suffix } | PatKind::Slice { prefix, slice, suffix } => {
|
||||
let array_len = match pat.ty.kind() {
|
||||
ty::Array(_, length) => Some(length.eval_usize(cx.tcx, cx.param_env) as usize),
|
||||
ty::Slice(_) => None,
|
||||
_ => span_bug!(pat.span, "bad ty {:?} for slice pattern", pat.ty),
|
||||
};
|
||||
let prefix = prefix.len();
|
||||
let suffix = suffix.len();
|
||||
let kind = if slice.is_some() {
|
||||
VarLen(prefix, suffix)
|
||||
} else {
|
||||
FixedLen(prefix + suffix)
|
||||
};
|
||||
Slice(Slice::new(array_len, kind))
|
||||
}
|
||||
PatKind::Or { .. } => bug!("Or-pattern should have been expanded earlier on."),
|
||||
_ => bug!("Unexpected type for `Single` constructor: {:?}", pcx.ty),
|
||||
},
|
||||
Slice(slice) => slice.arity(),
|
||||
Str(..)
|
||||
| FloatRange(..)
|
||||
| IntRange(..)
|
||||
| NonExhaustive
|
||||
| Opaque
|
||||
| Missing { .. }
|
||||
| Wildcard => 0,
|
||||
Or => bug!("The `Or` constructor doesn't have a fixed arity"),
|
||||
}
|
||||
}
|
||||
|
||||
@ -824,7 +836,7 @@ impl<'tcx> Constructor<'tcx> {
|
||||
match self {
|
||||
// If `self` is `Single`, `used_ctors` cannot contain anything else than `Single`s.
|
||||
Single => !used_ctors.is_empty(),
|
||||
Variant(_) => used_ctors.iter().any(|c| c == self),
|
||||
Variant(vid) => used_ctors.iter().any(|c| matches!(c, Variant(i) if i == vid)),
|
||||
IntRange(range) => used_ctors
|
||||
.iter()
|
||||
.filter_map(|c| c.as_int_range())
|
||||
@ -835,7 +847,7 @@ impl<'tcx> Constructor<'tcx> {
|
||||
.any(|other| slice.is_covered_by(other)),
|
||||
// This constructor is never covered by anything else
|
||||
NonExhaustive => false,
|
||||
Str(..) | FloatRange(..) | Opaque | Missing { .. } | Wildcard => {
|
||||
Str(..) | FloatRange(..) | Opaque | Missing { .. } | Wildcard | Or => {
|
||||
span_bug!(pcx.span, "found unexpected ctor in all_ctors: {:?}", self)
|
||||
}
|
||||
}
|
||||
@ -1097,7 +1109,7 @@ impl<'tcx> SplitWildcard<'tcx> {
|
||||
/// `index_with_declared_idx`.
|
||||
#[derive(Debug, Clone)]
|
||||
pub(super) struct Fields<'p, 'tcx> {
|
||||
fields: SmallVec<[&'p Pat<'tcx>; 2]>,
|
||||
fields: SmallVec<[&'p DeconstructedPat<'p, 'tcx>; 2]>,
|
||||
}
|
||||
|
||||
impl<'p, 'tcx> Fields<'p, 'tcx> {
|
||||
@ -1105,19 +1117,30 @@ impl<'p, 'tcx> Fields<'p, 'tcx> {
|
||||
Fields { fields: SmallVec::new() }
|
||||
}
|
||||
|
||||
fn from_iter(
|
||||
fn singleton(cx: &MatchCheckCtxt<'p, 'tcx>, field: DeconstructedPat<'p, 'tcx>) -> Self {
|
||||
let field: &_ = cx.pattern_arena.alloc(field);
|
||||
Fields { fields: smallvec![field] }
|
||||
}
|
||||
|
||||
pub(super) fn from_iter(
|
||||
cx: &MatchCheckCtxt<'p, 'tcx>,
|
||||
fields: impl IntoIterator<Item = Pat<'tcx>>,
|
||||
fields: impl IntoIterator<Item = DeconstructedPat<'p, 'tcx>>,
|
||||
) -> Self {
|
||||
let fields: &_ = cx.pattern_arena.alloc_from_iter(fields);
|
||||
Fields { fields: fields.iter().collect() }
|
||||
Fields { fields: fields.into_iter().collect() }
|
||||
}
|
||||
|
||||
pub(super) fn from_ref_iter(
|
||||
fields: impl IntoIterator<Item = &'p DeconstructedPat<'p, 'tcx>>,
|
||||
) -> Self {
|
||||
Fields { fields: fields.into_iter().collect() }
|
||||
}
|
||||
|
||||
fn wildcards_from_tys(
|
||||
cx: &MatchCheckCtxt<'p, 'tcx>,
|
||||
tys: impl IntoIterator<Item = Ty<'tcx>>,
|
||||
) -> Self {
|
||||
Fields::from_iter(cx, tys.into_iter().map(Pat::wildcard_from_ty))
|
||||
Fields::from_iter(cx, tys.into_iter().map(DeconstructedPat::wildcard))
|
||||
}
|
||||
|
||||
// In the cases of either a `#[non_exhaustive]` field list or a non-public field, we hide
|
||||
@ -1148,7 +1171,8 @@ impl<'p, 'tcx> Fields<'p, 'tcx> {
|
||||
})
|
||||
}
|
||||
|
||||
/// Creates a new list of wildcard fields for a given constructor.
|
||||
/// Creates a new list of wildcard fields for a given constructor. The result must have a
|
||||
/// length of `constructor.arity()`.
|
||||
pub(super) fn wildcards(
|
||||
cx: &MatchCheckCtxt<'p, 'tcx>,
|
||||
ty: Ty<'tcx>,
|
||||
@ -1156,15 +1180,12 @@ impl<'p, 'tcx> Fields<'p, 'tcx> {
|
||||
) -> Self {
|
||||
let ret = match constructor {
|
||||
Single | Variant(_) => match ty.kind() {
|
||||
ty::Tuple(ref fs) => {
|
||||
Fields::wildcards_from_tys(cx, fs.into_iter().map(|ty| ty.expect_ty()))
|
||||
}
|
||||
ty::Tuple(fs) => Fields::wildcards_from_tys(cx, fs.iter().map(|ty| ty.expect_ty())),
|
||||
ty::Ref(_, rty, _) => Fields::wildcards_from_tys(cx, once(*rty)),
|
||||
ty::Adt(adt, substs) => {
|
||||
if adt.is_box() {
|
||||
// Use T as the sub pattern type of Box<T>.
|
||||
// FIXME(Nadrieril): This is to make box-patterns work even though `Box` is
|
||||
// actually a struct with 2 private fields. Hacky.
|
||||
// The only legal patterns of type `Box` (outside `std`) are `_` and box
|
||||
// patterns. If we're here we can assume this is a box pattern.
|
||||
Fields::wildcards_from_tys(cx, once(substs.type_at(0)))
|
||||
} else {
|
||||
let variant = &adt.variants[constructor.variant_index_for_adt(adt)];
|
||||
@ -1189,47 +1210,193 @@ impl<'p, 'tcx> Fields<'p, 'tcx> {
|
||||
| Opaque
|
||||
| Missing { .. }
|
||||
| Wildcard => Fields::empty(),
|
||||
Or => {
|
||||
bug!("called `Fields::wildcards` on an `Or` ctor")
|
||||
}
|
||||
};
|
||||
debug!("Fields::wildcards({:?}, {:?}) = {:#?}", constructor, ty, ret);
|
||||
ret
|
||||
}
|
||||
|
||||
/// Returns the number of patterns. This is the same as the arity of the constructor used to
|
||||
/// construct `self`.
|
||||
pub(super) fn len(&self) -> usize {
|
||||
self.fields.len()
|
||||
}
|
||||
|
||||
/// Returns the list of patterns.
|
||||
pub(super) fn iter_patterns<'a>(
|
||||
&'a self,
|
||||
) -> impl Iterator<Item = &'p Pat<'tcx>> + Captures<'a> {
|
||||
) -> impl Iterator<Item = &'p DeconstructedPat<'p, 'tcx>> + Captures<'a> {
|
||||
self.fields.iter().copied()
|
||||
}
|
||||
}
|
||||
|
||||
/// Apply a constructor to a list of patterns, yielding a new pattern. `self`
|
||||
/// must have as many elements as this constructor's arity.
|
||||
///
|
||||
/// This is roughly the inverse of `specialize_constructor`.
|
||||
///
|
||||
/// Examples:
|
||||
///
|
||||
/// ```text
|
||||
/// ctor: `Constructor::Single`
|
||||
/// ty: `Foo(u32, u32, u32)`
|
||||
/// self: `[10, 20, _]`
|
||||
/// returns `Foo(10, 20, _)`
|
||||
///
|
||||
/// ctor: `Constructor::Variant(Option::Some)`
|
||||
/// ty: `Option<bool>`
|
||||
/// self: `[false]`
|
||||
/// returns `Some(false)`
|
||||
/// ```
|
||||
pub(super) fn apply(self, pcx: PatCtxt<'_, 'p, 'tcx>, ctor: &Constructor<'tcx>) -> Pat<'tcx> {
|
||||
let mut subpatterns = self.iter_patterns().cloned();
|
||||
#[derive(Clone)]
|
||||
pub(crate) struct DeconstructedPat<'p, 'tcx> {
|
||||
ctor: Constructor<'tcx>,
|
||||
fields: Fields<'p, 'tcx>,
|
||||
ty: Ty<'tcx>,
|
||||
span: Span,
|
||||
}
|
||||
|
||||
let pat = match ctor {
|
||||
Single | Variant(_) => match pcx.ty.kind() {
|
||||
impl<'p, 'tcx> DeconstructedPat<'p, 'tcx> {
|
||||
pub(super) fn wildcard(ty: Ty<'tcx>) -> Self {
|
||||
Self::new(Wildcard, Fields::empty(), ty)
|
||||
}
|
||||
|
||||
pub(super) fn new(ctor: Constructor<'tcx>, fields: Fields<'p, 'tcx>, ty: Ty<'tcx>) -> Self {
|
||||
DeconstructedPat { ctor, fields, ty, span: DUMMY_SP }
|
||||
}
|
||||
|
||||
pub(crate) fn from_pat(cx: &MatchCheckCtxt<'p, 'tcx>, pat: &Pat<'tcx>) -> Self {
|
||||
let mkpat = |pat| DeconstructedPat::from_pat(cx, pat);
|
||||
let allocpat = |pat| &*cx.pattern_arena.alloc(mkpat(pat));
|
||||
let ctor;
|
||||
let mut fields;
|
||||
match pat.kind.as_ref() {
|
||||
PatKind::AscribeUserType { subpattern, .. } => return mkpat(subpattern),
|
||||
PatKind::Binding { subpattern: Some(subpat), .. } => return mkpat(subpat),
|
||||
PatKind::Binding { subpattern: None, .. } | PatKind::Wild => {
|
||||
ctor = Wildcard;
|
||||
fields = Fields::empty();
|
||||
}
|
||||
PatKind::Deref { subpattern } => {
|
||||
ctor = Single;
|
||||
fields = Fields::singleton(cx, mkpat(subpattern));
|
||||
}
|
||||
PatKind::Leaf { subpatterns } | PatKind::Variant { subpatterns, .. } => {
|
||||
match pat.ty.kind() {
|
||||
ty::Tuple(fs) => {
|
||||
ctor = Single;
|
||||
fields = Fields::wildcards_from_tys(cx, fs.iter().map(|ty| ty.expect_ty()));
|
||||
for pat in subpatterns {
|
||||
fields.fields[pat.field.index()] = allocpat(&pat.pattern);
|
||||
}
|
||||
}
|
||||
ty::Adt(adt, substs) if adt.is_box() => {
|
||||
// The only legal patterns of type `Box` (outside `std`) are `_` and box
|
||||
// patterns. If we're here we can assume this is a box pattern.
|
||||
// FIXME(Nadrieril): A `Box` can in theory be matched either with `Box(_,
|
||||
// _)` or a box pattern. As a hack to avoid an ICE with the former, we
|
||||
// ignore other fields than the first one. This will trigger an error later
|
||||
// anyway.
|
||||
// See https://github.com/rust-lang/rust/issues/82772 ,
|
||||
// explanation: https://github.com/rust-lang/rust/pull/82789#issuecomment-796921977
|
||||
// The problem is that we can't know from the type whether we'll match
|
||||
// normally or through box-patterns. We'll have to figure out a proper
|
||||
// solution when we introduce generalized deref patterns. Also need to
|
||||
// prevent mixing of those two options.
|
||||
let pat = subpatterns.into_iter().find(|pat| pat.field.index() == 0);
|
||||
let pat = if let Some(pat) = pat {
|
||||
mkpat(&pat.pattern)
|
||||
} else {
|
||||
DeconstructedPat::wildcard(substs.type_at(0))
|
||||
};
|
||||
ctor = Single;
|
||||
fields = Fields::singleton(cx, pat);
|
||||
}
|
||||
ty::Adt(adt, _) => {
|
||||
ctor = match pat.kind.as_ref() {
|
||||
PatKind::Leaf { .. } => Single,
|
||||
PatKind::Variant { variant_index, .. } => Variant(*variant_index),
|
||||
_ => bug!(),
|
||||
};
|
||||
let variant = &adt.variants[ctor.variant_index_for_adt(adt)];
|
||||
// For each field in the variant, we store the relevant index into `self.fields` if any.
|
||||
let mut field_id_to_id: Vec<Option<usize>> =
|
||||
(0..variant.fields.len()).map(|_| None).collect();
|
||||
let tys = Fields::list_variant_nonhidden_fields(cx, pat.ty, variant)
|
||||
.enumerate()
|
||||
.map(|(i, (field, ty))| {
|
||||
field_id_to_id[field.index()] = Some(i);
|
||||
ty
|
||||
});
|
||||
fields = Fields::wildcards_from_tys(cx, tys);
|
||||
for pat in subpatterns {
|
||||
if let Some(i) = field_id_to_id[pat.field.index()] {
|
||||
fields.fields[i] = allocpat(&pat.pattern);
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => bug!("pattern has unexpected type: pat: {:?}, ty: {:?}", pat, pat.ty),
|
||||
}
|
||||
}
|
||||
PatKind::Constant { value } => {
|
||||
if let Some(int_range) = IntRange::from_const(cx.tcx, cx.param_env, value) {
|
||||
ctor = IntRange(int_range);
|
||||
fields = Fields::empty();
|
||||
} else {
|
||||
match pat.ty.kind() {
|
||||
ty::Float(_) => {
|
||||
ctor = FloatRange(value, value, RangeEnd::Included);
|
||||
fields = Fields::empty();
|
||||
}
|
||||
ty::Ref(_, t, _) if t.is_str() => {
|
||||
// We want a `&str` constant to behave like a `Deref` pattern, to be compatible
|
||||
// with other `Deref` patterns. This could have been done in `const_to_pat`,
|
||||
// but that causes issues with the rest of the matching code.
|
||||
// So here, the constructor for a `"foo"` pattern is `&` (represented by
|
||||
// `Single`), and has one field. That field has constructor `Str(value)` and no
|
||||
// fields.
|
||||
let subpattern = DeconstructedPat {
|
||||
ctor: Str(value),
|
||||
fields: Fields::empty(),
|
||||
ty: t, // `t` is `str`, not `&str`
|
||||
span: pat.span,
|
||||
};
|
||||
ctor = Single;
|
||||
fields = Fields::singleton(cx, subpattern)
|
||||
}
|
||||
// All constants that can be structurally matched have already been expanded
|
||||
// into the corresponding `Pat`s by `const_to_pat`. Constants that remain are
|
||||
// opaque.
|
||||
_ => {
|
||||
ctor = Opaque;
|
||||
fields = Fields::empty();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
&PatKind::Range(PatRange { lo, hi, end }) => {
|
||||
let ty = lo.ty;
|
||||
ctor = if let Some(int_range) = IntRange::from_range(
|
||||
cx.tcx,
|
||||
lo.eval_bits(cx.tcx, cx.param_env, lo.ty),
|
||||
hi.eval_bits(cx.tcx, cx.param_env, hi.ty),
|
||||
ty,
|
||||
&end,
|
||||
) {
|
||||
IntRange(int_range)
|
||||
} else {
|
||||
FloatRange(lo, hi, end)
|
||||
};
|
||||
fields = Fields::empty();
|
||||
}
|
||||
PatKind::Array { prefix, slice, suffix } | PatKind::Slice { prefix, slice, suffix } => {
|
||||
let array_len = match pat.ty.kind() {
|
||||
ty::Array(_, length) => Some(length.eval_usize(cx.tcx, cx.param_env) as usize),
|
||||
ty::Slice(_) => None,
|
||||
_ => span_bug!(pat.span, "bad ty {:?} for slice pattern", pat.ty),
|
||||
};
|
||||
let kind = if slice.is_some() {
|
||||
VarLen(prefix.len(), suffix.len())
|
||||
} else {
|
||||
FixedLen(prefix.len() + suffix.len())
|
||||
};
|
||||
ctor = Slice(Slice::new(array_len, kind));
|
||||
fields = Fields::from_iter(cx, prefix.iter().chain(suffix).map(mkpat));
|
||||
}
|
||||
PatKind::Or { .. } => {
|
||||
ctor = Or;
|
||||
let pats = expand_or_pat(pat);
|
||||
fields = Fields::from_iter(cx, pats.into_iter().map(mkpat));
|
||||
}
|
||||
}
|
||||
DeconstructedPat { ctor, fields, ty: pat.ty, span: pat.span }
|
||||
}
|
||||
|
||||
pub(crate) fn to_pat(&self, cx: &MatchCheckCtxt<'p, 'tcx>) -> Pat<'tcx> {
|
||||
let is_wildcard = |pat: &Pat<'_>| {
|
||||
matches!(*pat.kind, PatKind::Binding { subpattern: None, .. } | PatKind::Wild)
|
||||
};
|
||||
let mut subpatterns = self.iter_fields().map(|p| p.to_pat(cx));
|
||||
let pat = match &self.ctor {
|
||||
Single | Variant(_) => match self.ty.kind() {
|
||||
ty::Tuple(..) => PatKind::Leaf {
|
||||
subpatterns: subpatterns
|
||||
.enumerate()
|
||||
@ -1242,22 +1409,16 @@ impl<'p, 'tcx> Fields<'p, 'tcx> {
|
||||
// the pattern is a box pattern.
|
||||
PatKind::Deref { subpattern: subpatterns.next().unwrap() }
|
||||
}
|
||||
ty::Adt(adt, substs) => {
|
||||
let variant_index = ctor.variant_index_for_adt(adt);
|
||||
let variant = &adt.variants[variant_index];
|
||||
let subpatterns =
|
||||
Fields::list_variant_nonhidden_fields(pcx.cx, pcx.ty, variant)
|
||||
.zip(subpatterns)
|
||||
.map(|((field, _ty), pattern)| FieldPat { field, pattern })
|
||||
.collect();
|
||||
ty::Adt(adt_def, substs) => {
|
||||
let variant_index = self.ctor.variant_index_for_adt(adt_def);
|
||||
let variant = &adt_def.variants[variant_index];
|
||||
let subpatterns = Fields::list_variant_nonhidden_fields(cx, self.ty, variant)
|
||||
.zip(subpatterns)
|
||||
.map(|((field, _ty), pattern)| FieldPat { field, pattern })
|
||||
.collect();
|
||||
|
||||
if adt.is_enum() {
|
||||
PatKind::Variant {
|
||||
adt_def: adt,
|
||||
substs,
|
||||
variant_index: ctor.variant_index_for_adt(adt),
|
||||
subpatterns,
|
||||
}
|
||||
if adt_def.is_enum() {
|
||||
PatKind::Variant { adt_def, substs, variant_index, subpatterns }
|
||||
} else {
|
||||
PatKind::Leaf { subpatterns }
|
||||
}
|
||||
@ -1265,150 +1426,222 @@ impl<'p, 'tcx> Fields<'p, 'tcx> {
|
||||
// Note: given the expansion of `&str` patterns done in `expand_pattern`, we should
|
||||
// be careful to reconstruct the correct constant pattern here. However a string
|
||||
// literal pattern will never be reported as a non-exhaustiveness witness, so we
|
||||
// can ignore this issue.
|
||||
// ignore this issue.
|
||||
ty::Ref(..) => PatKind::Deref { subpattern: subpatterns.next().unwrap() },
|
||||
_ => bug!("unexpected ctor for type {:?} {:?}", ctor, pcx.ty),
|
||||
_ => bug!("unexpected ctor for type {:?} {:?}", self.ctor, self.ty),
|
||||
},
|
||||
Slice(slice) => match slice.kind {
|
||||
FixedLen(_) => {
|
||||
PatKind::Slice { prefix: subpatterns.collect(), slice: None, suffix: vec![] }
|
||||
}
|
||||
VarLen(prefix, _) => {
|
||||
let mut prefix: Vec<_> = subpatterns.by_ref().take(prefix).collect();
|
||||
if slice.array_len.is_some() {
|
||||
// Improves diagnostics a bit: if the type is a known-size array, instead
|
||||
// of reporting `[x, _, .., _, y]`, we prefer to report `[x, .., y]`.
|
||||
// This is incorrect if the size is not known, since `[_, ..]` captures
|
||||
// arrays of lengths `>= 1` whereas `[..]` captures any length.
|
||||
while !prefix.is_empty() && is_wildcard(prefix.last().unwrap()) {
|
||||
prefix.pop();
|
||||
}
|
||||
}
|
||||
let suffix: Vec<_> = if slice.array_len.is_some() {
|
||||
// Same as above.
|
||||
subpatterns.skip_while(is_wildcard).collect()
|
||||
} else {
|
||||
subpatterns.collect()
|
||||
};
|
||||
let wild = Pat::wildcard_from_ty(pcx.ty);
|
||||
PatKind::Slice { prefix, slice: Some(wild), suffix }
|
||||
}
|
||||
},
|
||||
&Str(value) => PatKind::Constant { value },
|
||||
&FloatRange(lo, hi, end) => PatKind::Range(PatRange { lo, hi, end }),
|
||||
IntRange(range) => return range.to_pat(pcx.cx.tcx, pcx.ty),
|
||||
NonExhaustive => PatKind::Wild,
|
||||
Wildcard => return Pat::wildcard_from_ty(pcx.ty),
|
||||
Opaque => bug!("we should not try to apply an opaque constructor"),
|
||||
Missing { .. } => bug!(
|
||||
"trying to apply the `Missing` constructor; this should have been done in `apply_constructors`"
|
||||
),
|
||||
};
|
||||
|
||||
Pat { ty: pcx.ty, span: DUMMY_SP, kind: Box::new(pat) }
|
||||
}
|
||||
|
||||
/// Replaces contained fields with the given list of patterns. There must be `len()` patterns
|
||||
/// in `pats`.
|
||||
pub(super) fn replace_fields(
|
||||
self,
|
||||
cx: &MatchCheckCtxt<'p, 'tcx>,
|
||||
pats: impl IntoIterator<Item = Pat<'tcx>>,
|
||||
) -> Self {
|
||||
Self::from_iter(cx, pats)
|
||||
}
|
||||
|
||||
/// Replaces contained fields with the arguments of the given pattern. Only use on a pattern
|
||||
/// that is compatible with the constructor used to build `self`.
|
||||
/// This is meant to be used on the result of `Fields::wildcards()`. See the comment above
|
||||
/// `Fields` for details
|
||||
/// This is guaranteed to preserve the number of patterns in `self`.
|
||||
pub(super) fn extract_pattern_arguments(
|
||||
mut self,
|
||||
cx: &MatchCheckCtxt<'p, 'tcx>,
|
||||
pat: &'p Pat<'tcx>,
|
||||
) -> Self {
|
||||
match pat.kind.as_ref() {
|
||||
PatKind::Deref { subpattern } => {
|
||||
assert_eq!(self.len(), 1);
|
||||
self.fields[0] = subpattern;
|
||||
}
|
||||
PatKind::Leaf { subpatterns } | PatKind::Variant { subpatterns, .. } => {
|
||||
match pat.ty.kind() {
|
||||
ty::Adt(adt, _) if adt.is_box() => {
|
||||
// FIXME(Nadrieril): A `Box` can in theory be matched either with `Box(_,
|
||||
// _)` or a box pattern. As a hack to avoid an ICE with the former, we
|
||||
// ignore other fields than the first one. This will trigger an error later
|
||||
// anyway.
|
||||
// See https://github.com/rust-lang/rust/issues/82772 ,
|
||||
// explanation: https://github.com/rust-lang/rust/pull/82789#issuecomment-796921977
|
||||
// The problem is that we can't know from the type whether we'll match
|
||||
// normally or through box-patterns. We'll have to figure out a proper
|
||||
// solution when we introduce generalized deref patterns. Also need to
|
||||
// prevent mixing of those two options.
|
||||
assert_eq!(self.len(), 1);
|
||||
let pat = subpatterns.into_iter().find(|pat| pat.field.index() == 0);
|
||||
if let Some(pat) = pat {
|
||||
self.fields[0] = &pat.pattern;
|
||||
}
|
||||
}
|
||||
ty::Adt(adt, _) => {
|
||||
let variant_index = match pat.kind.as_ref() {
|
||||
PatKind::Leaf { .. } => VariantIdx::new(0),
|
||||
PatKind::Variant { variant_index, .. } => *variant_index,
|
||||
_ => bug!(),
|
||||
};
|
||||
let variant = &adt.variants[variant_index];
|
||||
// For each field in the variant, we store the relevant index into `self.fields` if any.
|
||||
let mut field_id_to_id: Vec<Option<usize>> =
|
||||
(0..variant.fields.len()).map(|_| None).collect();
|
||||
for (i, (field, _ty)) in
|
||||
Fields::list_variant_nonhidden_fields(cx, pat.ty, variant).enumerate()
|
||||
{
|
||||
field_id_to_id[field.index()] = Some(i);
|
||||
}
|
||||
for pat in subpatterns {
|
||||
if let Some(i) = field_id_to_id[pat.field.index()] {
|
||||
self.fields[i] = &pat.pattern;
|
||||
Slice(slice) => {
|
||||
match slice.kind {
|
||||
FixedLen(_) => PatKind::Slice {
|
||||
prefix: subpatterns.collect(),
|
||||
slice: None,
|
||||
suffix: vec![],
|
||||
},
|
||||
VarLen(prefix, _) => {
|
||||
let mut subpatterns = subpatterns.peekable();
|
||||
let mut prefix: Vec<_> = subpatterns.by_ref().take(prefix).collect();
|
||||
if slice.array_len.is_some() {
|
||||
// Improves diagnostics a bit: if the type is a known-size array, instead
|
||||
// of reporting `[x, _, .., _, y]`, we prefer to report `[x, .., y]`.
|
||||
// This is incorrect if the size is not known, since `[_, ..]` captures
|
||||
// arrays of lengths `>= 1` whereas `[..]` captures any length.
|
||||
while !prefix.is_empty() && is_wildcard(prefix.last().unwrap()) {
|
||||
prefix.pop();
|
||||
}
|
||||
while subpatterns.peek().is_some()
|
||||
&& is_wildcard(subpatterns.peek().unwrap())
|
||||
{
|
||||
subpatterns.next();
|
||||
}
|
||||
}
|
||||
let suffix: Vec<_> = subpatterns.collect();
|
||||
let wild = Pat::wildcard_from_ty(self.ty);
|
||||
PatKind::Slice { prefix, slice: Some(wild), suffix }
|
||||
}
|
||||
_ => {
|
||||
for pat in subpatterns {
|
||||
self.fields[pat.field.index()] = &pat.pattern;
|
||||
}
|
||||
}
|
||||
&Str(value) => PatKind::Constant { value },
|
||||
&FloatRange(lo, hi, end) => PatKind::Range(PatRange { lo, hi, end }),
|
||||
IntRange(range) => return range.to_pat(cx.tcx, self.ty),
|
||||
Wildcard | NonExhaustive => PatKind::Wild,
|
||||
Missing { .. } => bug!(
|
||||
"trying to convert a `Missing` constructor into a `Pat`; this is probably a bug,
|
||||
`Missing` should have been processed in `apply_constructors`"
|
||||
),
|
||||
Opaque | Or => {
|
||||
bug!("can't convert to pattern: {:?}", self)
|
||||
}
|
||||
};
|
||||
|
||||
Pat { ty: self.ty, span: DUMMY_SP, kind: Box::new(pat) }
|
||||
}
|
||||
|
||||
/// Construct a pattern that matches everything that starts with this constructor.
|
||||
// For example, if `ctor` is a `Constructor::Variant` for `Option::Some`, we get the pattern
|
||||
// `Some(_)`.
|
||||
pub(super) fn wild_from_ctor(pcx: PatCtxt<'_, 'p, 'tcx>, ctor: Constructor<'tcx>) -> Self {
|
||||
let fields = Fields::wildcards(pcx.cx, pcx.ty, &ctor);
|
||||
DeconstructedPat::new(ctor, fields, pcx.ty)
|
||||
}
|
||||
|
||||
pub(super) fn is_or_pat(&self) -> bool {
|
||||
matches!(self.ctor, Or)
|
||||
}
|
||||
|
||||
pub(super) fn ctor(&self) -> &Constructor<'tcx> {
|
||||
&self.ctor
|
||||
}
|
||||
pub(super) fn ty(&self) -> Ty<'tcx> {
|
||||
self.ty
|
||||
}
|
||||
pub(super) fn span(&self) -> Span {
|
||||
self.span
|
||||
}
|
||||
|
||||
pub(super) fn iter_fields<'a>(
|
||||
&'a self,
|
||||
) -> impl Iterator<Item = &'p DeconstructedPat<'p, 'tcx>> + Captures<'a> {
|
||||
self.fields.iter_patterns()
|
||||
}
|
||||
|
||||
/// Specialize this pattern with a constructor.
|
||||
/// `other_ctor` can be different from `self.ctor`, but must be covered by it.
|
||||
pub(super) fn specialize<'a>(
|
||||
&'a self,
|
||||
cx: &MatchCheckCtxt<'p, 'tcx>,
|
||||
other_ctor: &Constructor<'tcx>,
|
||||
) -> Cow<'a, Fields<'p, 'tcx>> {
|
||||
match (&self.ctor, other_ctor) {
|
||||
(Wildcard, _) => {
|
||||
// We return a wildcard for each field of `other_ctor`.
|
||||
Cow::Owned(Fields::wildcards(cx, self.ty, other_ctor))
|
||||
}
|
||||
(Slice(self_slice), Slice(other_slice))
|
||||
if self_slice.arity() != other_slice.arity() =>
|
||||
{
|
||||
// The only tricky case: two slices of different arity. Since `self_slice` covers
|
||||
// `other_slice`, `self_slice` must be `VarLen`, i.e. of the form
|
||||
// `[prefix, .., suffix]`. Moreover `other_slice` is guaranteed to have a larger
|
||||
// arity. We fill the middle part with enough wildcards to reach the length of the
|
||||
// new, larger slice.
|
||||
match self_slice.kind {
|
||||
FixedLen(_) => bug!("{:?} doesn't cover {:?}", self_slice, other_slice),
|
||||
VarLen(prefix, suffix) => {
|
||||
let inner_ty = match *self.ty.kind() {
|
||||
ty::Slice(ty) | ty::Array(ty, _) => ty,
|
||||
_ => bug!("bad slice pattern {:?} {:?}", self.ctor, self.ty),
|
||||
};
|
||||
let prefix = self.fields.fields[..prefix].iter().copied();
|
||||
let suffix =
|
||||
self.fields.fields[self_slice.arity() - suffix..].iter().copied();
|
||||
let extra_wildcards = other_slice.arity() - self_slice.arity();
|
||||
let extra_wildcards: &[_] = cx.pattern_arena.alloc_from_iter(
|
||||
(0..extra_wildcards).map(|_| DeconstructedPat::wildcard(inner_ty)),
|
||||
);
|
||||
let fields = prefix.chain(extra_wildcards).chain(suffix);
|
||||
Cow::Owned(Fields::from_ref_iter(fields))
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => Cow::Borrowed(&self.fields),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// This is mostly copied from the `Pat` impl. This is best effort and not good enough for a
|
||||
/// `Display` impl.
|
||||
impl<'p, 'tcx> fmt::Debug for DeconstructedPat<'p, 'tcx> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
// Printing lists is a chore.
|
||||
let mut first = true;
|
||||
let mut start_or_continue = |s| {
|
||||
if first {
|
||||
first = false;
|
||||
""
|
||||
} else {
|
||||
s
|
||||
}
|
||||
};
|
||||
let mut start_or_comma = || start_or_continue(", ");
|
||||
|
||||
match &self.ctor {
|
||||
Single | Variant(_) => match self.ty.kind() {
|
||||
ty::Adt(def, _) if def.is_box() => {
|
||||
// Without `box_patterns`, the only legal pattern of type `Box` is `_` (outside
|
||||
// of `std`). So this branch is only reachable when the feature is enabled and
|
||||
// the pattern is a box pattern.
|
||||
let subpattern = self.iter_fields().next().unwrap();
|
||||
write!(f, "box {:?}", subpattern)
|
||||
}
|
||||
ty::Adt(..) | ty::Tuple(..) => {
|
||||
let variant = match self.ty.kind() {
|
||||
ty::Adt(adt, _) => {
|
||||
Some(&adt.variants[self.ctor.variant_index_for_adt(adt)])
|
||||
}
|
||||
ty::Tuple(_) => None,
|
||||
_ => unreachable!(),
|
||||
};
|
||||
|
||||
if let Some(variant) = variant {
|
||||
write!(f, "{}", variant.ident)?;
|
||||
}
|
||||
|
||||
// Without `cx`, we can't know which field corresponds to which, so we can't
|
||||
// get the names of the fields. Instead we just display everything as a suple
|
||||
// struct, which should be good enough.
|
||||
write!(f, "(")?;
|
||||
for p in self.iter_fields() {
|
||||
write!(f, "{}", start_or_comma())?;
|
||||
write!(f, "{:?}", p)?;
|
||||
}
|
||||
write!(f, ")")
|
||||
}
|
||||
// Note: given the expansion of `&str` patterns done in `expand_pattern`, we should
|
||||
// be careful to detect strings here. However a string literal pattern will never
|
||||
// be reported as a non-exhaustiveness witness, so we can ignore this issue.
|
||||
ty::Ref(_, _, mutbl) => {
|
||||
let subpattern = self.iter_fields().next().unwrap();
|
||||
write!(f, "&{}{:?}", mutbl.prefix_str(), subpattern)
|
||||
}
|
||||
_ => write!(f, "_"),
|
||||
},
|
||||
Slice(slice) => {
|
||||
let mut subpatterns = self.fields.iter_patterns();
|
||||
write!(f, "[")?;
|
||||
match slice.kind {
|
||||
FixedLen(_) => {
|
||||
for p in subpatterns {
|
||||
write!(f, "{}{:?}", start_or_comma(), p)?;
|
||||
}
|
||||
}
|
||||
VarLen(prefix_len, _) => {
|
||||
for p in subpatterns.by_ref().take(prefix_len) {
|
||||
write!(f, "{}{:?}", start_or_comma(), p)?;
|
||||
}
|
||||
write!(f, "{}", start_or_comma())?;
|
||||
write!(f, "..")?;
|
||||
for p in subpatterns {
|
||||
write!(f, "{}{:?}", start_or_comma(), p)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
write!(f, "]")
|
||||
}
|
||||
PatKind::Array { prefix, suffix, .. } | PatKind::Slice { prefix, suffix, .. } => {
|
||||
// Number of subpatterns for the constructor
|
||||
let ctor_arity = self.len();
|
||||
|
||||
// Replace the prefix and the suffix with the given patterns, leaving wildcards in
|
||||
// the middle if there was a subslice pattern `..`.
|
||||
let prefix = prefix.iter().enumerate();
|
||||
let suffix =
|
||||
suffix.iter().enumerate().map(|(i, p)| (ctor_arity - suffix.len() + i, p));
|
||||
|
||||
for (i, pat) in prefix.chain(suffix) {
|
||||
self.fields[i] = pat
|
||||
}
|
||||
&FloatRange(lo, hi, end) => {
|
||||
write!(f, "{}", lo)?;
|
||||
write!(f, "{}", end)?;
|
||||
write!(f, "{}", hi)
|
||||
}
|
||||
PatKind::Constant { .. } => match pat.ty.kind() {
|
||||
ty::Ref(_, t, _) if t.is_str() => {
|
||||
assert_eq!(self.len(), 1);
|
||||
// We want a `&str` constant to behave like a `Deref` pattern, to be compatible
|
||||
// with other `Deref` patterns. This could have been done in `const_to_pat`,
|
||||
// but that causes issues with the rest of the matching code.
|
||||
// The outer constructor is `&`, and the inner one carries the str value.
|
||||
let mut new_pat = pat.clone();
|
||||
new_pat.ty = t; // `t` is `str`, not `&str`
|
||||
self.fields[0] = &*cx.pattern_arena.alloc(new_pat);
|
||||
IntRange(range) => write!(f, "{:?}", range), // Best-effort, will render e.g. `false` as `0..=0`
|
||||
Wildcard | Missing { .. } | NonExhaustive => write!(f, "_"),
|
||||
Or => {
|
||||
for pat in self.iter_fields() {
|
||||
write!(f, "{}{:?}", start_or_continue(" | "), pat)?;
|
||||
}
|
||||
_ => {}
|
||||
},
|
||||
_ => {}
|
||||
};
|
||||
self
|
||||
Ok(())
|
||||
}
|
||||
Str(value) => write!(f, "{}", value),
|
||||
Opaque => write!(f, "<constant pattern>"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -284,17 +284,14 @@ use self::ArmType::*;
|
||||
use self::Usefulness::*;
|
||||
|
||||
use super::check_match::{joined_uncovered_patterns, pattern_not_covered_label};
|
||||
use super::deconstruct_pat::{Constructor, Fields, SplitWildcard};
|
||||
use super::{PatternFoldable, PatternFolder};
|
||||
use super::deconstruct_pat::{Constructor, DeconstructedPat, Fields, SplitWildcard};
|
||||
|
||||
use rustc_data_structures::captures::Captures;
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
|
||||
use hir::def_id::DefId;
|
||||
use hir::HirId;
|
||||
use rustc_arena::TypedArena;
|
||||
use rustc_hir as hir;
|
||||
use rustc_middle::thir::{Pat, PatKind};
|
||||
use rustc_hir::def_id::DefId;
|
||||
use rustc_hir::HirId;
|
||||
use rustc_middle::ty::{self, Ty, TyCtxt};
|
||||
use rustc_session::lint::builtin::NON_EXHAUSTIVE_OMITTED_PATTERNS;
|
||||
use rustc_span::Span;
|
||||
@ -302,9 +299,8 @@ use rustc_span::Span;
|
||||
use smallvec::{smallvec, SmallVec};
|
||||
use std::fmt;
|
||||
use std::iter::IntoIterator;
|
||||
use std::lazy::OnceCell;
|
||||
|
||||
crate struct MatchCheckCtxt<'a, 'tcx> {
|
||||
crate struct MatchCheckCtxt<'p, 'tcx> {
|
||||
crate tcx: TyCtxt<'tcx>,
|
||||
/// The module in which the match occurs. This is necessary for
|
||||
/// checking inhabited-ness of types because whether a type is (visibly)
|
||||
@ -313,7 +309,7 @@ crate struct MatchCheckCtxt<'a, 'tcx> {
|
||||
/// outside its module and should not be matchable with an empty match statement.
|
||||
crate module: DefId,
|
||||
crate param_env: ty::ParamEnv<'tcx>,
|
||||
crate pattern_arena: &'a TypedArena<Pat<'tcx>>,
|
||||
crate pattern_arena: &'p TypedArena<DeconstructedPat<'p, 'tcx>>,
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> MatchCheckCtxt<'a, 'tcx> {
|
||||
@ -356,64 +352,20 @@ impl<'a, 'p, 'tcx> fmt::Debug for PatCtxt<'a, 'p, 'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
crate fn expand_pattern<'tcx>(pat: Pat<'tcx>) -> Pat<'tcx> {
|
||||
LiteralExpander.fold_pattern(&pat)
|
||||
}
|
||||
|
||||
struct LiteralExpander;
|
||||
|
||||
impl<'tcx> PatternFolder<'tcx> for LiteralExpander {
|
||||
fn fold_pattern(&mut self, pat: &Pat<'tcx>) -> Pat<'tcx> {
|
||||
debug!("fold_pattern {:?} {:?} {:?}", pat, pat.ty.kind(), pat.kind);
|
||||
match pat.kind.as_ref() {
|
||||
PatKind::Binding { subpattern: Some(s), .. } => s.fold_with(self),
|
||||
PatKind::AscribeUserType { subpattern: s, .. } => s.fold_with(self),
|
||||
_ => pat.super_fold_with(self),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn is_wildcard(pat: &Pat<'_>) -> bool {
|
||||
matches!(*pat.kind, PatKind::Binding { subpattern: None, .. } | PatKind::Wild)
|
||||
}
|
||||
|
||||
fn is_or_pat(pat: &Pat<'_>) -> bool {
|
||||
matches!(*pat.kind, PatKind::Or { .. })
|
||||
}
|
||||
|
||||
/// Recursively expand this pattern into its subpatterns. Only useful for or-patterns.
|
||||
fn expand_or_pat<'p, 'tcx>(pat: &'p Pat<'tcx>) -> Vec<&'p Pat<'tcx>> {
|
||||
fn expand<'p, 'tcx>(pat: &'p Pat<'tcx>, vec: &mut Vec<&'p Pat<'tcx>>) {
|
||||
if let PatKind::Or { pats } = pat.kind.as_ref() {
|
||||
for pat in pats {
|
||||
expand(pat, vec);
|
||||
}
|
||||
} else {
|
||||
vec.push(pat)
|
||||
}
|
||||
}
|
||||
|
||||
let mut pats = Vec::new();
|
||||
expand(pat, &mut pats);
|
||||
pats
|
||||
}
|
||||
|
||||
/// A row of a matrix. Rows of len 1 are very common, which is why `SmallVec[_; 2]`
|
||||
/// works well.
|
||||
#[derive(Clone)]
|
||||
struct PatStack<'p, 'tcx> {
|
||||
pats: SmallVec<[&'p Pat<'tcx>; 2]>,
|
||||
/// Cache for the constructor of the head
|
||||
head_ctor: OnceCell<Constructor<'tcx>>,
|
||||
pats: SmallVec<[&'p DeconstructedPat<'p, 'tcx>; 2]>,
|
||||
}
|
||||
|
||||
impl<'p, 'tcx> PatStack<'p, 'tcx> {
|
||||
fn from_pattern(pat: &'p Pat<'tcx>) -> Self {
|
||||
fn from_pattern(pat: &'p DeconstructedPat<'p, 'tcx>) -> Self {
|
||||
Self::from_vec(smallvec![pat])
|
||||
}
|
||||
|
||||
fn from_vec(vec: SmallVec<[&'p Pat<'tcx>; 2]>) -> Self {
|
||||
PatStack { pats: vec, head_ctor: OnceCell::new() }
|
||||
fn from_vec(vec: SmallVec<[&'p DeconstructedPat<'p, 'tcx>; 2]>) -> Self {
|
||||
PatStack { pats: vec }
|
||||
}
|
||||
|
||||
fn is_empty(&self) -> bool {
|
||||
@ -424,30 +376,25 @@ impl<'p, 'tcx> PatStack<'p, 'tcx> {
|
||||
self.pats.len()
|
||||
}
|
||||
|
||||
fn head(&self) -> &'p Pat<'tcx> {
|
||||
fn head(&self) -> &'p DeconstructedPat<'p, 'tcx> {
|
||||
self.pats[0]
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn head_ctor<'a>(&'a self, cx: &MatchCheckCtxt<'p, 'tcx>) -> &'a Constructor<'tcx> {
|
||||
self.head_ctor.get_or_init(|| Constructor::from_pat(cx, self.head()))
|
||||
}
|
||||
|
||||
fn iter(&self) -> impl Iterator<Item = &Pat<'tcx>> {
|
||||
fn iter(&self) -> impl Iterator<Item = &DeconstructedPat<'p, 'tcx>> {
|
||||
self.pats.iter().copied()
|
||||
}
|
||||
|
||||
// Recursively expand the first pattern into its subpatterns. Only useful if the pattern is an
|
||||
// or-pattern. Panics if `self` is empty.
|
||||
fn expand_or_pat<'a>(&'a self) -> impl Iterator<Item = PatStack<'p, 'tcx>> + Captures<'a> {
|
||||
expand_or_pat(self.head()).into_iter().map(move |pat| {
|
||||
self.head().iter_fields().map(move |pat| {
|
||||
let mut new_patstack = PatStack::from_pattern(pat);
|
||||
new_patstack.pats.extend_from_slice(&self.pats[1..]);
|
||||
new_patstack
|
||||
})
|
||||
}
|
||||
|
||||
/// This computes `S(self.head_ctor(), self)`. See top of the file for explanations.
|
||||
/// This computes `S(self.head().ctor(), self)`. See top of the file for explanations.
|
||||
///
|
||||
/// Structure patterns with a partial wild pattern (Foo { a: 42, .. }) have their missing
|
||||
/// fields filled with wild patterns.
|
||||
@ -456,45 +403,30 @@ impl<'p, 'tcx> PatStack<'p, 'tcx> {
|
||||
fn pop_head_constructor(
|
||||
&self,
|
||||
cx: &MatchCheckCtxt<'p, 'tcx>,
|
||||
ctor_wild_subpatterns: &Fields<'p, 'tcx>,
|
||||
ctor: &Constructor<'tcx>,
|
||||
) -> PatStack<'p, 'tcx> {
|
||||
// We pop the head pattern and push the new fields extracted from the arguments of
|
||||
// `self.head()`.
|
||||
let mut new_fields: SmallVec<[_; 2]> = ctor_wild_subpatterns
|
||||
.clone()
|
||||
.extract_pattern_arguments(cx, self.head())
|
||||
.iter_patterns()
|
||||
.collect();
|
||||
let mut new_fields: SmallVec<[_; 2]> =
|
||||
self.head().specialize(cx, ctor).iter_patterns().collect();
|
||||
new_fields.extend_from_slice(&self.pats[1..]);
|
||||
PatStack::from_vec(new_fields)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'p, 'tcx> Default for PatStack<'p, 'tcx> {
|
||||
fn default() -> Self {
|
||||
Self::from_vec(smallvec![])
|
||||
}
|
||||
}
|
||||
|
||||
impl<'p, 'tcx> PartialEq for PatStack<'p, 'tcx> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.pats == other.pats
|
||||
}
|
||||
}
|
||||
|
||||
/// Pretty-printing for matrix row.
|
||||
impl<'p, 'tcx> fmt::Debug for PatStack<'p, 'tcx> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "+")?;
|
||||
for pat in self.iter() {
|
||||
write!(f, " {} +", pat)?;
|
||||
write!(f, " {:?} +", pat)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// A 2D matrix.
|
||||
#[derive(Clone, PartialEq)]
|
||||
#[derive(Clone)]
|
||||
pub(super) struct Matrix<'p, 'tcx> {
|
||||
patterns: Vec<PatStack<'p, 'tcx>>,
|
||||
}
|
||||
@ -512,7 +444,7 @@ impl<'p, 'tcx> Matrix<'p, 'tcx> {
|
||||
/// Pushes a new row to the matrix. If the row starts with an or-pattern, this recursively
|
||||
/// expands it.
|
||||
fn push(&mut self, row: PatStack<'p, 'tcx>) {
|
||||
if !row.is_empty() && is_or_pat(row.head()) {
|
||||
if !row.is_empty() && row.head().is_or_pat() {
|
||||
for row in row.expand_or_pat() {
|
||||
self.patterns.push(row);
|
||||
}
|
||||
@ -522,37 +454,22 @@ impl<'p, 'tcx> Matrix<'p, 'tcx> {
|
||||
}
|
||||
|
||||
/// Iterate over the first component of each row
|
||||
fn heads<'a>(&'a self) -> impl Iterator<Item = &'a Pat<'tcx>> + Captures<'p> {
|
||||
fn heads<'a>(
|
||||
&'a self,
|
||||
) -> impl Iterator<Item = &'p DeconstructedPat<'p, 'tcx>> + Clone + Captures<'a> {
|
||||
self.patterns.iter().map(|r| r.head())
|
||||
}
|
||||
|
||||
/// Iterate over the first constructor of each row.
|
||||
pub(super) fn head_ctors<'a>(
|
||||
&'a self,
|
||||
cx: &'a MatchCheckCtxt<'p, 'tcx>,
|
||||
) -> impl Iterator<Item = &'a Constructor<'tcx>> + Captures<'p> + Clone {
|
||||
self.patterns.iter().map(move |r| r.head_ctor(cx))
|
||||
}
|
||||
|
||||
/// Iterate over the first constructor and the corresponding span of each row.
|
||||
pub(super) fn head_ctors_and_spans<'a>(
|
||||
&'a self,
|
||||
cx: &'a MatchCheckCtxt<'p, 'tcx>,
|
||||
) -> impl Iterator<Item = (&'a Constructor<'tcx>, Span)> + Captures<'p> {
|
||||
self.patterns.iter().map(move |r| (r.head_ctor(cx), r.head().span))
|
||||
}
|
||||
|
||||
/// This computes `S(constructor, self)`. See top of the file for explanations.
|
||||
fn specialize_constructor(
|
||||
&self,
|
||||
pcx: PatCtxt<'_, 'p, 'tcx>,
|
||||
ctor: &Constructor<'tcx>,
|
||||
ctor_wild_subpatterns: &Fields<'p, 'tcx>,
|
||||
) -> Matrix<'p, 'tcx> {
|
||||
let mut matrix = Matrix::empty();
|
||||
for row in &self.patterns {
|
||||
if ctor.is_covered_by(pcx, row.head_ctor(pcx.cx)) {
|
||||
let new_row = row.pop_head_constructor(pcx.cx, ctor_wild_subpatterns);
|
||||
if ctor.is_covered_by(pcx, row.head().ctor()) {
|
||||
let new_row = row.pop_head_constructor(pcx.cx, ctor);
|
||||
matrix.push(new_row);
|
||||
}
|
||||
}
|
||||
@ -575,7 +492,7 @@ impl<'p, 'tcx> fmt::Debug for Matrix<'p, 'tcx> {
|
||||
|
||||
let Matrix { patterns: m, .. } = self;
|
||||
let pretty_printed_matrix: Vec<Vec<String>> =
|
||||
m.iter().map(|row| row.iter().map(|pat| format!("{}", pat)).collect()).collect();
|
||||
m.iter().map(|row| row.iter().map(|pat| format!("{:?}", pat)).collect()).collect();
|
||||
|
||||
let column_count = m.iter().map(|row| row.len()).next().unwrap_or(0);
|
||||
assert!(m.iter().all(|row| row.len() == column_count));
|
||||
@ -791,7 +708,7 @@ impl SubPatSet {
|
||||
}
|
||||
|
||||
/// When `self` refers to a patstack that was obtained from splitting an or-pattern, after
|
||||
/// running `unspecialize` it will refer to the original patstack before splitting.
|
||||
/// running `unsplit_or_pat` it will refer to the original patstack before splitting.
|
||||
///
|
||||
/// For example:
|
||||
/// ```
|
||||
@ -839,7 +756,7 @@ impl SubPatSet {
|
||||
/// witnesses of non-exhaustiveness when there are any.
|
||||
/// Which variant to use is dictated by `ArmType`.
|
||||
#[derive(Clone, Debug)]
|
||||
enum Usefulness<'tcx> {
|
||||
enum Usefulness<'p, 'tcx> {
|
||||
/// Carries a set of subpatterns that have been found to be reachable. If empty, this indicates
|
||||
/// the whole pattern is unreachable. If not, this indicates that the pattern is reachable but
|
||||
/// that some sub-patterns may be unreachable (due to or-patterns). In the absence of
|
||||
@ -848,10 +765,10 @@ enum Usefulness<'tcx> {
|
||||
NoWitnesses(SubPatSet),
|
||||
/// Carries a list of witnesses of non-exhaustiveness. If empty, indicates that the whole
|
||||
/// pattern is unreachable.
|
||||
WithWitnesses(Vec<Witness<'tcx>>),
|
||||
WithWitnesses(Vec<Witness<'p, 'tcx>>),
|
||||
}
|
||||
|
||||
impl<'tcx> Usefulness<'tcx> {
|
||||
impl<'p, 'tcx> Usefulness<'p, 'tcx> {
|
||||
fn new_useful(preference: ArmType) -> Self {
|
||||
match preference {
|
||||
FakeExtraWildcard => WithWitnesses(vec![Witness(vec![])]),
|
||||
@ -896,12 +813,11 @@ impl<'tcx> Usefulness<'tcx> {
|
||||
/// After calculating usefulness after a specialization, call this to reconstruct a usefulness
|
||||
/// that makes sense for the matrix pre-specialization. This new usefulness can then be merged
|
||||
/// with the results of specializing with the other constructors.
|
||||
fn apply_constructor<'p>(
|
||||
fn apply_constructor(
|
||||
self,
|
||||
pcx: PatCtxt<'_, 'p, 'tcx>,
|
||||
matrix: &Matrix<'p, 'tcx>, // used to compute missing ctors
|
||||
ctor: &Constructor<'tcx>,
|
||||
ctor_wild_subpatterns: &Fields<'p, 'tcx>,
|
||||
) -> Self {
|
||||
match self {
|
||||
WithWitnesses(witnesses) if witnesses.is_empty() => WithWitnesses(witnesses),
|
||||
@ -912,23 +828,18 @@ impl<'tcx> Usefulness<'tcx> {
|
||||
let new_patterns = if pcx.is_non_exhaustive {
|
||||
// Here we don't want the user to try to list all variants, we want them to add
|
||||
// a wildcard, so we only suggest that.
|
||||
vec![
|
||||
Fields::wildcards(pcx.cx, pcx.ty, &Constructor::NonExhaustive)
|
||||
.apply(pcx, &Constructor::NonExhaustive),
|
||||
]
|
||||
vec![DeconstructedPat::wildcard(pcx.ty)]
|
||||
} else {
|
||||
let mut split_wildcard = SplitWildcard::new(pcx);
|
||||
split_wildcard.split(pcx, matrix.head_ctors(pcx.cx));
|
||||
split_wildcard.split(pcx, matrix.heads().map(DeconstructedPat::ctor));
|
||||
// Construct for each missing constructor a "wild" version of this
|
||||
// constructor, that matches everything that can be built with
|
||||
// it. For example, if `ctor` is a `Constructor::Variant` for
|
||||
// `Option::Some`, we get the pattern `Some(_)`.
|
||||
split_wildcard
|
||||
.iter_missing(pcx)
|
||||
.map(|missing_ctor| {
|
||||
Fields::wildcards(pcx.cx, pcx.ty, missing_ctor)
|
||||
.apply(pcx, missing_ctor)
|
||||
})
|
||||
.cloned()
|
||||
.map(|missing_ctor| DeconstructedPat::wild_from_ctor(pcx, missing_ctor))
|
||||
.collect()
|
||||
};
|
||||
|
||||
@ -945,12 +856,12 @@ impl<'tcx> Usefulness<'tcx> {
|
||||
} else {
|
||||
witnesses
|
||||
.into_iter()
|
||||
.map(|witness| witness.apply_constructor(pcx, &ctor, ctor_wild_subpatterns))
|
||||
.map(|witness| witness.apply_constructor(pcx, &ctor))
|
||||
.collect()
|
||||
};
|
||||
WithWitnesses(new_witnesses)
|
||||
}
|
||||
NoWitnesses(subpats) => NoWitnesses(subpats.unspecialize(ctor_wild_subpatterns.len())),
|
||||
NoWitnesses(subpats) => NoWitnesses(subpats.unspecialize(ctor.arity(pcx))),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -995,11 +906,11 @@ enum ArmType {
|
||||
///
|
||||
/// The final `Pair(Some(_), true)` is then the resulting witness.
|
||||
#[derive(Clone, Debug)]
|
||||
crate struct Witness<'tcx>(Vec<Pat<'tcx>>);
|
||||
crate struct Witness<'p, 'tcx>(Vec<DeconstructedPat<'p, 'tcx>>);
|
||||
|
||||
impl<'tcx> Witness<'tcx> {
|
||||
impl<'p, 'tcx> Witness<'p, 'tcx> {
|
||||
/// Asserts that the witness contains a single pattern, and returns it.
|
||||
fn single_pattern(self) -> Pat<'tcx> {
|
||||
fn single_pattern(self) -> DeconstructedPat<'p, 'tcx> {
|
||||
assert_eq!(self.0.len(), 1);
|
||||
self.0.into_iter().next().unwrap()
|
||||
}
|
||||
@ -1017,17 +928,13 @@ impl<'tcx> Witness<'tcx> {
|
||||
///
|
||||
/// left_ty: struct X { a: (bool, &'static str), b: usize}
|
||||
/// pats: [(false, "foo"), 42] => X { a: (false, "foo"), b: 42 }
|
||||
fn apply_constructor<'p>(
|
||||
mut self,
|
||||
pcx: PatCtxt<'_, 'p, 'tcx>,
|
||||
ctor: &Constructor<'tcx>,
|
||||
ctor_wild_subpatterns: &Fields<'p, 'tcx>,
|
||||
) -> Self {
|
||||
fn apply_constructor(mut self, pcx: PatCtxt<'_, 'p, 'tcx>, ctor: &Constructor<'tcx>) -> Self {
|
||||
let pat = {
|
||||
let len = self.0.len();
|
||||
let arity = ctor_wild_subpatterns.len();
|
||||
let arity = ctor.arity(pcx);
|
||||
let pats = self.0.drain((len - arity)..).rev();
|
||||
ctor_wild_subpatterns.clone().replace_fields(pcx.cx, pats).apply(pcx, ctor)
|
||||
let fields = Fields::from_iter(pcx.cx, pats);
|
||||
DeconstructedPat::new(ctor.clone(), fields, pcx.ty)
|
||||
};
|
||||
|
||||
self.0.push(pat);
|
||||
@ -1045,9 +952,9 @@ fn lint_non_exhaustive_omitted_patterns<'p, 'tcx>(
|
||||
scrut_ty: Ty<'tcx>,
|
||||
sp: Span,
|
||||
hir_id: HirId,
|
||||
witnesses: Vec<Pat<'tcx>>,
|
||||
witnesses: Vec<DeconstructedPat<'p, 'tcx>>,
|
||||
) {
|
||||
let joined_patterns = joined_uncovered_patterns(&witnesses);
|
||||
let joined_patterns = joined_uncovered_patterns(cx, &witnesses);
|
||||
cx.tcx.struct_span_lint_hir(NON_EXHAUSTIVE_OMITTED_PATTERNS, hir_id, sp, |build| {
|
||||
let mut lint = build.build("some variants are not matched explicitly");
|
||||
lint.span_label(sp, pattern_not_covered_label(&witnesses, &joined_patterns));
|
||||
@ -1096,7 +1003,7 @@ fn is_useful<'p, 'tcx>(
|
||||
hir_id: HirId,
|
||||
is_under_guard: bool,
|
||||
is_top_level: bool,
|
||||
) -> Usefulness<'tcx> {
|
||||
) -> Usefulness<'p, 'tcx> {
|
||||
debug!("matrix,v={:?}{:?}", matrix, v);
|
||||
let Matrix { patterns: rows, .. } = matrix;
|
||||
|
||||
@ -1118,16 +1025,16 @@ fn is_useful<'p, 'tcx>(
|
||||
assert!(rows.iter().all(|r| r.len() == v.len()));
|
||||
|
||||
// FIXME(Nadrieril): Hack to work around type normalization issues (see #72476).
|
||||
let ty = matrix.heads().next().map_or(v.head().ty, |r| r.ty);
|
||||
let ty = matrix.heads().next().map_or(v.head().ty(), |r| r.ty());
|
||||
let is_non_exhaustive = cx.is_foreign_non_exhaustive_enum(ty);
|
||||
let pcx = PatCtxt { cx, ty, span: v.head().span, is_top_level, is_non_exhaustive };
|
||||
let pcx = PatCtxt { cx, ty, span: v.head().span(), is_top_level, is_non_exhaustive };
|
||||
|
||||
// If the first pattern is an or-pattern, expand it.
|
||||
let mut ret = Usefulness::new_not_useful(witness_preference);
|
||||
if is_or_pat(v.head()) {
|
||||
if v.head().is_or_pat() {
|
||||
debug!("expanding or-pattern");
|
||||
let spans: Vec<_> = v.head().iter_fields().map(|pat| pat.span()).collect();
|
||||
let vs: Vec<_> = v.expand_or_pat().collect();
|
||||
let spans: Vec<_> = vs.iter().map(|pat| pat.head().span).collect();
|
||||
// We try each or-pattern branch in turn.
|
||||
let mut matrix = matrix.clone();
|
||||
for (i, v) in vs.into_iter().enumerate() {
|
||||
@ -1143,18 +1050,18 @@ fn is_useful<'p, 'tcx>(
|
||||
}
|
||||
}
|
||||
} else {
|
||||
let v_ctor = v.head_ctor(cx);
|
||||
let v_ctor = v.head().ctor();
|
||||
if let Constructor::IntRange(ctor_range) = &v_ctor {
|
||||
// Lint on likely incorrect range patterns (#63987)
|
||||
ctor_range.lint_overlapping_range_endpoints(
|
||||
pcx,
|
||||
matrix.head_ctors_and_spans(cx),
|
||||
matrix.heads(),
|
||||
matrix.column_count().unwrap_or(0),
|
||||
hir_id,
|
||||
)
|
||||
}
|
||||
// We split the head constructor of `v`.
|
||||
let split_ctors = v_ctor.split(pcx, matrix.head_ctors(cx));
|
||||
let split_ctors = v_ctor.split(pcx, matrix.heads().map(DeconstructedPat::ctor));
|
||||
let is_non_exhaustive_and_wild = is_non_exhaustive && v_ctor.is_wildcard();
|
||||
// For each constructor, we compute whether there's a value that starts with it that would
|
||||
// witness the usefulness of `v`.
|
||||
@ -1162,14 +1069,11 @@ fn is_useful<'p, 'tcx>(
|
||||
for ctor in split_ctors {
|
||||
debug!("specialize({:?})", ctor);
|
||||
// We cache the result of `Fields::wildcards` because it is used a lot.
|
||||
let ctor_wild_subpatterns = Fields::wildcards(pcx.cx, pcx.ty, &ctor);
|
||||
let spec_matrix =
|
||||
start_matrix.specialize_constructor(pcx, &ctor, &ctor_wild_subpatterns);
|
||||
let v = v.pop_head_constructor(cx, &ctor_wild_subpatterns);
|
||||
let spec_matrix = start_matrix.specialize_constructor(pcx, &ctor);
|
||||
let v = v.pop_head_constructor(cx, &ctor);
|
||||
let usefulness =
|
||||
is_useful(cx, &spec_matrix, &v, witness_preference, hir_id, is_under_guard, false);
|
||||
let usefulness =
|
||||
usefulness.apply_constructor(pcx, start_matrix, &ctor, &ctor_wild_subpatterns);
|
||||
let usefulness = usefulness.apply_constructor(pcx, start_matrix, &ctor);
|
||||
|
||||
// When all the conditions are met we have a match with a `non_exhaustive` enum
|
||||
// that has the potential to trigger the `non_exhaustive_omitted_patterns` lint.
|
||||
@ -1186,19 +1090,18 @@ fn is_useful<'p, 'tcx>(
|
||||
{
|
||||
let patterns = {
|
||||
let mut split_wildcard = SplitWildcard::new(pcx);
|
||||
split_wildcard.split(pcx, matrix.head_ctors(pcx.cx));
|
||||
split_wildcard.split(pcx, matrix.heads().map(DeconstructedPat::ctor));
|
||||
// Construct for each missing constructor a "wild" version of this
|
||||
// constructor, that matches everything that can be built with
|
||||
// it. For example, if `ctor` is a `Constructor::Variant` for
|
||||
// `Option::Some`, we get the pattern `Some(_)`.
|
||||
split_wildcard
|
||||
.iter_missing(pcx)
|
||||
// Filter out the `Constructor::NonExhaustive` variant it's meaningless
|
||||
// to our lint
|
||||
// Filter out the `NonExhaustive` because we want to list only real
|
||||
// variants.
|
||||
.filter(|c| !c.is_non_exhaustive())
|
||||
.map(|missing_ctor| {
|
||||
Fields::wildcards(pcx.cx, pcx.ty, missing_ctor).apply(pcx, missing_ctor)
|
||||
})
|
||||
.cloned()
|
||||
.map(|missing_ctor| DeconstructedPat::wild_from_ctor(pcx, missing_ctor))
|
||||
.collect::<Vec<_>>()
|
||||
};
|
||||
|
||||
@ -1217,7 +1120,7 @@ fn is_useful<'p, 'tcx>(
|
||||
#[derive(Clone, Copy)]
|
||||
crate struct MatchArm<'p, 'tcx> {
|
||||
/// The pattern must have been lowered through `check_match::MatchVisitor::lower_pattern`.
|
||||
crate pat: &'p Pat<'tcx>,
|
||||
crate pat: &'p DeconstructedPat<'p, 'tcx>,
|
||||
crate hir_id: HirId,
|
||||
crate has_guard: bool,
|
||||
}
|
||||
@ -1239,7 +1142,7 @@ crate struct UsefulnessReport<'p, 'tcx> {
|
||||
crate arm_usefulness: Vec<(MatchArm<'p, 'tcx>, Reachability)>,
|
||||
/// If the match is exhaustive, this is empty. If not, this contains witnesses for the lack of
|
||||
/// exhaustiveness.
|
||||
crate non_exhaustiveness_witnesses: Vec<Pat<'tcx>>,
|
||||
crate non_exhaustiveness_witnesses: Vec<DeconstructedPat<'p, 'tcx>>,
|
||||
}
|
||||
|
||||
/// The entrypoint for the usefulness algorithm. Computes whether a match is exhaustive and which
|
||||
@ -1274,7 +1177,7 @@ crate fn compute_match_usefulness<'p, 'tcx>(
|
||||
})
|
||||
.collect();
|
||||
|
||||
let wild_pattern = cx.pattern_arena.alloc(Pat::wildcard_from_ty(scrut_ty));
|
||||
let wild_pattern = cx.pattern_arena.alloc(DeconstructedPat::wildcard(scrut_ty));
|
||||
let v = PatStack::from_pattern(wild_pattern);
|
||||
let usefulness = is_useful(cx, &matrix, &v, FakeExtraWildcard, scrut_hir_id, false, true);
|
||||
let non_exhaustiveness_witnesses = match usefulness {
|
||||
|
@ -7,8 +7,11 @@ LL | FOO => {},
|
||||
error: unreachable pattern
|
||||
--> $DIR/issue-78057.rs:14:9
|
||||
|
|
||||
LL | FOO => {},
|
||||
| --- matches any value
|
||||
LL |
|
||||
LL | _ => {}
|
||||
| ^
|
||||
| ^ unreachable pattern
|
||||
|
|
||||
note: the lint level is defined here
|
||||
--> $DIR/issue-78057.rs:1:9
|
||||
|
@ -7,8 +7,11 @@ LL | FOO => {}
|
||||
error: unreachable pattern
|
||||
--> $DIR/consts-opaque.rs:32:9
|
||||
|
|
||||
LL | FOO => {}
|
||||
| --- matches any value
|
||||
LL |
|
||||
LL | _ => {} // should not be emitting unreachable warning
|
||||
| ^
|
||||
| ^ unreachable pattern
|
||||
|
|
||||
note: the lint level is defined here
|
||||
--> $DIR/consts-opaque.rs:6:9
|
||||
@ -25,8 +28,11 @@ LL | FOO_REF => {}
|
||||
error: unreachable pattern
|
||||
--> $DIR/consts-opaque.rs:39:9
|
||||
|
|
||||
LL | FOO_REF => {}
|
||||
| ------- matches any value
|
||||
LL |
|
||||
LL | Foo(_) => {} // should not be emitting unreachable warning
|
||||
| ^^^^^^
|
||||
| ^^^^^^ unreachable pattern
|
||||
|
||||
warning: to use a constant of type `Foo` in a pattern, `Foo` must be annotated with `#[derive(PartialEq, Eq)]`
|
||||
--> $DIR/consts-opaque.rs:45:9
|
||||
@ -70,15 +76,18 @@ LL | BAR => {}
|
||||
error: unreachable pattern
|
||||
--> $DIR/consts-opaque.rs:63:9
|
||||
|
|
||||
LL | BAR => {}
|
||||
| --- matches any value
|
||||
LL |
|
||||
LL | Bar => {} // should not be emitting unreachable warning
|
||||
| ^^^
|
||||
| ^^^ unreachable pattern
|
||||
|
||||
error: unreachable pattern
|
||||
--> $DIR/consts-opaque.rs:65:9
|
||||
|
|
||||
LL | Bar => {} // should not be emitting unreachable warning
|
||||
LL | BAR => {}
|
||||
| --- matches any value
|
||||
LL |
|
||||
...
|
||||
LL | _ => {}
|
||||
| ^ unreachable pattern
|
||||
|
||||
@ -97,14 +106,20 @@ LL | BAR => {} // should not be emitting unreachable warning
|
||||
error: unreachable pattern
|
||||
--> $DIR/consts-opaque.rs:72:9
|
||||
|
|
||||
LL | BAR => {}
|
||||
| --- matches any value
|
||||
LL |
|
||||
LL | BAR => {} // should not be emitting unreachable warning
|
||||
| ^^^
|
||||
| ^^^ unreachable pattern
|
||||
|
||||
error: unreachable pattern
|
||||
--> $DIR/consts-opaque.rs:75:9
|
||||
|
|
||||
LL | BAR => {}
|
||||
| --- matches any value
|
||||
...
|
||||
LL | _ => {} // should not be emitting unreachable warning
|
||||
| ^
|
||||
| ^ unreachable pattern
|
||||
|
||||
error: to use a constant of type `Baz` in a pattern, `Baz` must be annotated with `#[derive(PartialEq, Eq)]`
|
||||
--> $DIR/consts-opaque.rs:80:9
|
||||
@ -115,14 +130,20 @@ LL | BAZ => {}
|
||||
error: unreachable pattern
|
||||
--> $DIR/consts-opaque.rs:82:9
|
||||
|
|
||||
LL | BAZ => {}
|
||||
| --- matches any value
|
||||
LL |
|
||||
LL | Baz::Baz1 => {} // should not be emitting unreachable warning
|
||||
| ^^^^^^^^^
|
||||
| ^^^^^^^^^ unreachable pattern
|
||||
|
||||
error: unreachable pattern
|
||||
--> $DIR/consts-opaque.rs:84:9
|
||||
|
|
||||
LL | BAZ => {}
|
||||
| --- matches any value
|
||||
...
|
||||
LL | _ => {}
|
||||
| ^
|
||||
| ^ unreachable pattern
|
||||
|
||||
error: to use a constant of type `Baz` in a pattern, `Baz` must be annotated with `#[derive(PartialEq, Eq)]`
|
||||
--> $DIR/consts-opaque.rs:90:9
|
||||
@ -133,8 +154,11 @@ LL | BAZ => {}
|
||||
error: unreachable pattern
|
||||
--> $DIR/consts-opaque.rs:92:9
|
||||
|
|
||||
LL | BAZ => {}
|
||||
| --- matches any value
|
||||
LL |
|
||||
LL | _ => {}
|
||||
| ^
|
||||
| ^ unreachable pattern
|
||||
|
||||
error: to use a constant of type `Baz` in a pattern, `Baz` must be annotated with `#[derive(PartialEq, Eq)]`
|
||||
--> $DIR/consts-opaque.rs:97:9
|
||||
@ -145,20 +169,28 @@ LL | BAZ => {}
|
||||
error: unreachable pattern
|
||||
--> $DIR/consts-opaque.rs:99:9
|
||||
|
|
||||
LL | BAZ => {}
|
||||
| --- matches any value
|
||||
LL |
|
||||
LL | Baz::Baz2 => {} // should not be emitting unreachable warning
|
||||
| ^^^^^^^^^
|
||||
| ^^^^^^^^^ unreachable pattern
|
||||
|
||||
error: unreachable pattern
|
||||
--> $DIR/consts-opaque.rs:101:9
|
||||
|
|
||||
LL | BAZ => {}
|
||||
| --- matches any value
|
||||
...
|
||||
LL | _ => {} // should not be emitting unreachable warning
|
||||
| ^
|
||||
| ^ unreachable pattern
|
||||
|
||||
error: unreachable pattern
|
||||
--> $DIR/consts-opaque.rs:127:9
|
||||
|
|
||||
LL | Wrap(_) => {}
|
||||
| ------- matches any value
|
||||
LL | WRAPQUUX => {} // detected unreachable because we do inspect the `Wrap` layer
|
||||
| ^^^^^^^^
|
||||
| ^^^^^^^^ unreachable pattern
|
||||
|
||||
error: unreachable pattern
|
||||
--> $DIR/consts-opaque.rs:141:9
|
||||
|
@ -133,8 +133,10 @@ LL | 5..15 => {},
|
||||
error: unreachable pattern
|
||||
--> $DIR/reachability.rs:83:9
|
||||
|
|
||||
LL | _ => {},
|
||||
| - matches any value
|
||||
LL | '\u{D7FF}'..='\u{E000}' => {},
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^ unreachable pattern
|
||||
|
||||
error: unreachable pattern
|
||||
--> $DIR/reachability.rs:104:9
|
||||
|
Loading…
Reference in New Issue
Block a user