Store field indices in DeconstructedPat to avoid virtual wildcards

This commit is contained in:
Nadrieril 2024-02-29 23:34:57 +01:00
parent c1e68860d0
commit 6ae9fa31f0
4 changed files with 102 additions and 84 deletions

View File

@ -917,7 +917,9 @@ fn report_arm_reachability<'p, 'tcx>(
fn pat_is_catchall(pat: &DeconstructedPat<'_, '_>) -> bool {
match pat.ctor() {
Constructor::Wildcard => true,
Constructor::Struct | Constructor::Ref => pat.iter_fields().all(|pat| pat_is_catchall(pat)),
Constructor::Struct | Constructor::Ref => {
pat.iter_fields().all(|ipat| pat_is_catchall(&ipat.pat))
}
_ => false,
}
}

View File

@ -20,12 +20,18 @@ impl PatId {
}
}
/// A pattern with an index denoting which field it corresponds to.
pub struct IndexedPat<Cx: TypeCx> {
pub idx: usize,
pub pat: DeconstructedPat<Cx>,
}
/// Values and patterns can be represented as a constructor applied to some fields. This represents
/// a pattern in this form. A `DeconstructedPat` will almost always come from user input; the only
/// exception are some `Wildcard`s introduced during pattern lowering.
pub struct DeconstructedPat<Cx: TypeCx> {
ctor: Constructor<Cx>,
fields: Vec<DeconstructedPat<Cx>>,
fields: Vec<IndexedPat<Cx>>,
/// The number of fields in this pattern. E.g. if the pattern is `SomeStruct { field12: true, ..
/// }` this would be the total number of fields of the struct.
/// This is also the same as `self.ctor.arity(self.ty)`.
@ -39,20 +45,9 @@ pub struct DeconstructedPat<Cx: TypeCx> {
}
impl<Cx: TypeCx> DeconstructedPat<Cx> {
pub fn wildcard(ty: Cx::Ty) -> Self {
DeconstructedPat {
ctor: Wildcard,
fields: Vec::new(),
arity: 0,
ty,
data: None,
uid: PatId::new(),
}
}
pub fn new(
ctor: Constructor<Cx>,
fields: Vec<DeconstructedPat<Cx>>,
fields: Vec<IndexedPat<Cx>>,
arity: usize,
ty: Cx::Ty,
data: Cx::PatData,
@ -60,6 +55,10 @@ impl<Cx: TypeCx> DeconstructedPat<Cx> {
DeconstructedPat { ctor, fields, arity, ty, data: Some(data), uid: PatId::new() }
}
pub fn at_index(self, idx: usize) -> IndexedPat<Cx> {
IndexedPat { idx, pat: self }
}
pub(crate) fn is_or_pat(&self) -> bool {
matches!(self.ctor, Or)
}
@ -75,8 +74,11 @@ impl<Cx: TypeCx> DeconstructedPat<Cx> {
pub fn data(&self) -> Option<&Cx::PatData> {
self.data.as_ref()
}
pub fn arity(&self) -> usize {
self.arity
}
pub fn iter_fields<'a>(&'a self) -> impl Iterator<Item = &'a DeconstructedPat<Cx>> {
pub fn iter_fields<'a>(&'a self) -> impl Iterator<Item = &'a IndexedPat<Cx>> {
self.fields.iter()
}
@ -85,36 +87,40 @@ impl<Cx: TypeCx> DeconstructedPat<Cx> {
pub(crate) fn specialize<'a>(
&'a self,
other_ctor: &Constructor<Cx>,
ctor_arity: usize,
other_ctor_arity: usize,
) -> SmallVec<[PatOrWild<'a, Cx>; 2]> {
let wildcard_sub_tys = || (0..ctor_arity).map(|_| PatOrWild::Wild).collect();
match (&self.ctor, other_ctor) {
// Return a wildcard for each field of `other_ctor`.
(Wildcard, _) => wildcard_sub_tys(),
if matches!(other_ctor, PrivateUninhabited) {
// Skip this column.
(_, PrivateUninhabited) => smallvec![],
// The only non-trivial case: two slices of different arity. `other_slice` is
// guaranteed to have a larger arity, so we fill the middle part with enough
// wildcards to reach the length of the new, larger slice.
(
&Slice(self_slice @ Slice { kind: SliceKind::VarLen(prefix, suffix), .. }),
&Slice(other_slice),
) if self_slice.arity() != other_slice.arity() => {
// Start with a slice of wildcards of the appropriate length.
let mut fields: SmallVec<[_; 2]> = wildcard_sub_tys();
// Fill in the fields from both ends.
let new_arity = fields.len();
for i in 0..prefix {
fields[i] = PatOrWild::Pat(&self.fields[i]);
}
for i in 0..suffix {
fields[new_arity - 1 - i] =
PatOrWild::Pat(&self.fields[self.fields.len() - 1 - i]);
}
fields
}
_ => self.fields.iter().map(PatOrWild::Pat).collect(),
return smallvec![];
}
// Start with a slice of wildcards of the appropriate length.
let mut fields: SmallVec<[_; 2]> = (0..other_ctor_arity).map(|_| PatOrWild::Wild).collect();
// Fill `fields` with our fields. The arities are known to be compatible.
match self.ctor {
// The only non-trivial case: two slices of different arity. `other_ctor` is guaranteed
// to have a larger arity, so we adjust the indices of the patterns in the suffix so
// that they are correctly positioned in the larger slice.
Slice(Slice { kind: SliceKind::VarLen(prefix, _), .. })
if self.arity != other_ctor_arity =>
{
for ipat in &self.fields {
let new_idx = if ipat.idx < prefix {
ipat.idx
} else {
// Adjust the indices in the suffix.
ipat.idx + other_ctor_arity - self.arity
};
fields[new_idx] = PatOrWild::Pat(&ipat.pat);
}
}
_ => {
for ipat in &self.fields {
fields[ipat.idx] = PatOrWild::Pat(&ipat.pat);
}
}
}
fields
}
/// Walk top-down and call `it` in each place where a pattern occurs
@ -126,7 +132,7 @@ impl<Cx: TypeCx> DeconstructedPat<Cx> {
}
for p in self.iter_fields() {
p.walk(it)
p.pat.walk(it)
}
}
}
@ -146,6 +152,11 @@ impl<Cx: TypeCx> fmt::Debug for DeconstructedPat<Cx> {
};
let mut start_or_comma = || start_or_continue(", ");
let mut fields: Vec<_> = (0..self.arity).map(|_| PatOrWild::Wild).collect();
for ipat in self.iter_fields() {
fields[ipat.idx] = PatOrWild::Pat(&ipat.pat);
}
match pat.ctor() {
Struct | Variant(_) | UnionField => {
Cx::write_variant_name(f, pat)?;
@ -153,7 +164,7 @@ impl<Cx: TypeCx> fmt::Debug for DeconstructedPat<Cx> {
// get the names of the fields. Instead we just display everything as a tuple
// struct, which should be good enough.
write!(f, "(")?;
for p in pat.iter_fields() {
for p in fields {
write!(f, "{}", start_or_comma())?;
write!(f, "{p:?}")?;
}
@ -163,25 +174,23 @@ impl<Cx: TypeCx> fmt::Debug for DeconstructedPat<Cx> {
// be careful to detect strings here. However a string literal pattern will never
// be reported as a non-exhaustiveness witness, so we can ignore this issue.
Ref => {
let subpattern = pat.iter_fields().next().unwrap();
write!(f, "&{:?}", subpattern)
write!(f, "&{:?}", &fields[0])
}
Slice(slice) => {
let mut subpatterns = pat.iter_fields();
write!(f, "[")?;
match slice.kind {
SliceKind::FixedLen(_) => {
for p in subpatterns {
for p in fields {
write!(f, "{}{:?}", start_or_comma(), p)?;
}
}
SliceKind::VarLen(prefix_len, _) => {
for p in subpatterns.by_ref().take(prefix_len) {
for p in &fields[..prefix_len] {
write!(f, "{}{:?}", start_or_comma(), p)?;
}
write!(f, "{}", start_or_comma())?;
write!(f, "..")?;
for p in subpatterns {
for p in &fields[prefix_len..] {
write!(f, "{}{:?}", start_or_comma(), p)?;
}
}
@ -196,7 +205,7 @@ impl<Cx: TypeCx> fmt::Debug for DeconstructedPat<Cx> {
Str(value) => write!(f, "{value:?}"),
Opaque(..) => write!(f, "<constant pattern>"),
Or => {
for pat in pat.iter_fields() {
for pat in fields {
write!(f, "{}{:?}", start_or_continue(" | "), pat)?;
}
Ok(())
@ -254,9 +263,10 @@ impl<'p, Cx: TypeCx> PatOrWild<'p, Cx> {
/// Expand this (possibly-nested) or-pattern into its alternatives.
pub(crate) fn flatten_or_pat(self) -> SmallVec<[Self; 1]> {
match self {
PatOrWild::Pat(pat) if pat.is_or_pat() => {
pat.iter_fields().flat_map(|p| PatOrWild::Pat(p).flatten_or_pat()).collect()
}
PatOrWild::Pat(pat) if pat.is_or_pat() => pat
.iter_fields()
.flat_map(|ipat| PatOrWild::Pat(&ipat.pat).flatten_or_pat())
.collect(),
_ => smallvec![self],
}
}

View File

@ -446,7 +446,7 @@ impl<'p, 'tcx: 'p> RustcMatchCheckCtxt<'p, 'tcx> {
let ty = cx.reveal_opaque_ty(pat.ty);
let ctor;
let arity;
let mut fields: Vec<_>;
let fields: Vec<_>;
match &pat.kind {
PatKind::AscribeUserType { subpattern, .. }
| PatKind::InlineConstant { subpattern, .. } => return self.lower_pat(subpattern),
@ -457,7 +457,7 @@ impl<'p, 'tcx: 'p> RustcMatchCheckCtxt<'p, 'tcx> {
arity = 0;
}
PatKind::Deref { subpattern } => {
fields = vec![self.lower_pat(subpattern)];
fields = vec![self.lower_pat(subpattern).at_index(0)];
arity = 1;
ctor = match ty.kind() {
// This is a box pattern.
@ -471,16 +471,12 @@ impl<'p, 'tcx: 'p> RustcMatchCheckCtxt<'p, 'tcx> {
ty::Tuple(fs) => {
ctor = Struct;
arity = fs.len();
fields = fs
fields = subpatterns
.iter()
.map(|ty| cx.reveal_opaque_ty(ty))
.map(|ty| DeconstructedPat::wildcard(ty))
.map(|ipat| self.lower_pat(&ipat.pattern).at_index(ipat.field.index()))
.collect();
for pat in subpatterns {
fields[pat.field.index()] = self.lower_pat(&pat.pattern);
}
}
ty::Adt(adt, args) if adt.is_box() => {
ty::Adt(adt, _) if adt.is_box() => {
// The only legal patterns of type `Box` (outside `std`) are `_` and box
// patterns. If we're here we can assume this is a box pattern.
// FIXME(Nadrieril): A `Box` can in theory be matched either with `Box(_,
@ -494,13 +490,12 @@ impl<'p, 'tcx: 'p> RustcMatchCheckCtxt<'p, 'tcx> {
// solution when we introduce generalized deref patterns. Also need to
// prevent mixing of those two options.
let pattern = subpatterns.into_iter().find(|pat| pat.field.index() == 0);
let pat = if let Some(pat) = pattern {
self.lower_pat(&pat.pattern)
if let Some(pat) = pattern {
fields = vec![self.lower_pat(&pat.pattern).at_index(0)];
} else {
DeconstructedPat::wildcard(self.reveal_opaque_ty(args.type_at(0)))
};
fields = vec![];
}
ctor = Struct;
fields = vec![pat];
arity = 1;
}
ty::Adt(adt, _) => {
@ -513,13 +508,10 @@ impl<'p, 'tcx: 'p> RustcMatchCheckCtxt<'p, 'tcx> {
let variant =
&adt.variant(RustcMatchCheckCtxt::variant_index_for_adt(&ctor, *adt));
arity = variant.fields.len();
fields = cx
.variant_sub_tys(ty, variant)
.map(|(_, ty)| DeconstructedPat::wildcard(ty))
fields = subpatterns
.iter()
.map(|ipat| self.lower_pat(&ipat.pattern).at_index(ipat.field.index()))
.collect();
for pat in subpatterns {
fields[pat.field.index()] = self.lower_pat(&pat.pattern);
}
}
_ => bug!("pattern has unexpected type: pat: {:?}, ty: {:?}", pat, ty),
}
@ -586,7 +578,7 @@ impl<'p, 'tcx: 'p> RustcMatchCheckCtxt<'p, 'tcx> {
let ty = self.reveal_opaque_ty(*t);
let subpattern = DeconstructedPat::new(Str(*value), Vec::new(), 0, ty, pat);
ctor = Ref;
fields = vec![subpattern];
fields = vec![subpattern.at_index(0)];
arity = 1;
}
// All constants that can be structurally matched have already been expanded
@ -651,13 +643,24 @@ impl<'p, 'tcx: 'p> RustcMatchCheckCtxt<'p, 'tcx> {
SliceKind::FixedLen(prefix.len() + suffix.len())
};
ctor = Slice(Slice::new(array_len, kind));
fields = prefix.iter().chain(suffix.iter()).map(|p| self.lower_pat(&*p)).collect();
fields = prefix
.iter()
.chain(suffix.iter())
.map(|p| self.lower_pat(&*p))
.enumerate()
.map(|(i, p)| p.at_index(i))
.collect();
arity = kind.arity();
}
PatKind::Or { .. } => {
ctor = Or;
let pats = expand_or_pat(pat);
fields = pats.into_iter().map(|p| self.lower_pat(p)).collect();
fields = pats
.into_iter()
.map(|p| self.lower_pat(p))
.enumerate()
.map(|(i, p)| p.at_index(i))
.collect();
arity = fields.len();
}
PatKind::Never => {

View File

@ -1006,15 +1006,17 @@ impl<'p, Cx: TypeCx> PatStack<'p, Cx> {
ctor_arity: usize,
ctor_is_relevant: bool,
) -> Result<PatStack<'p, Cx>, Cx::Error> {
// We pop the head pattern and push the new fields extracted from the arguments of
// `self.head()`.
let mut new_pats = self.head().specialize(ctor, ctor_arity);
if new_pats.len() != ctor_arity {
let head_pat = self.head();
if head_pat.as_pat().is_some_and(|pat| pat.arity() > ctor_arity) {
// Arity can be smaller in case of variable-length slices, but mustn't be larger.
return Err(cx.bug(format_args!(
"uncaught type error: pattern {:?} has inconsistent arity (expected arity {ctor_arity})",
self.head().as_pat().unwrap()
"uncaught type error: pattern {:?} has inconsistent arity (expected arity <= {ctor_arity})",
head_pat.as_pat().unwrap()
)));
}
// We pop the head pattern and push the new fields extracted from the arguments of
// `self.head()`.
let mut new_pats = head_pat.specialize(ctor, ctor_arity);
new_pats.extend_from_slice(&self.pats[1..]);
// `ctor` is relevant for this row if it is the actual constructor of this row, or if the
// row has a wildcard and `ctor` is relevant for wildcards.
@ -1706,7 +1708,8 @@ fn collect_pattern_usefulness<'p, Cx: TypeCx>(
) -> bool {
if useful_subpatterns.contains(&pat.uid) {
true
} else if pat.is_or_pat() && pat.iter_fields().any(|f| pat_is_useful(useful_subpatterns, f))
} else if pat.is_or_pat()
&& pat.iter_fields().any(|f| pat_is_useful(useful_subpatterns, &f.pat))
{
// We always expand or patterns in the matrix, so we will never see the actual
// or-pattern (the one with constructor `Or`) in the column. As such, it will not be