mirror of
https://github.com/rust-lang/rust.git
synced 2024-11-25 08:13:41 +00:00
Rollup merge of #122644 - Nadrieril:complexity-tests, r=compiler-errors
pattern analysis: add a custom test harness There are two features of the pattern analysis code that are hard to test: the newly-added pattern complexity limit, and the computation of arm intersections. This PR adds some crate-specific tests for that, including an unmaintainable but pretty macro to help construct patterns. r? `````@compiler-errors`````
This commit is contained in:
commit
8d12621181
@ -4440,6 +4440,8 @@ dependencies = [
|
||||
"rustc_target",
|
||||
"smallvec",
|
||||
"tracing",
|
||||
"tracing-subscriber",
|
||||
"tracing-tree",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -22,6 +22,10 @@ smallvec = { version = "1.8.1", features = ["union"] }
|
||||
tracing = "0.1"
|
||||
# tidy-alphabetical-end
|
||||
|
||||
[dev-dependencies]
|
||||
tracing-subscriber = { version = "0.3.3", default-features = false, features = ["fmt", "env-filter", "ansi"] }
|
||||
tracing-tree = "0.2.0"
|
||||
|
||||
[features]
|
||||
default = ["rustc"]
|
||||
rustc = [
|
||||
|
@ -819,6 +819,81 @@ impl<Cx: PatCx> Constructor<Cx> {
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn fmt_fields(
|
||||
&self,
|
||||
f: &mut fmt::Formatter<'_>,
|
||||
ty: &Cx::Ty,
|
||||
mut fields: impl Iterator<Item = impl fmt::Debug>,
|
||||
) -> fmt::Result {
|
||||
let mut first = true;
|
||||
let mut start_or_continue = |s| {
|
||||
if first {
|
||||
first = false;
|
||||
""
|
||||
} else {
|
||||
s
|
||||
}
|
||||
};
|
||||
let mut start_or_comma = || start_or_continue(", ");
|
||||
|
||||
match self {
|
||||
Struct | Variant(_) | UnionField => {
|
||||
Cx::write_variant_name(f, self, ty)?;
|
||||
// Without `cx`, we can't know which field corresponds to which, so we can't
|
||||
// get the names of the fields. Instead we just display everything as a tuple
|
||||
// struct, which should be good enough.
|
||||
write!(f, "(")?;
|
||||
for p in fields {
|
||||
write!(f, "{}{:?}", start_or_comma(), p)?;
|
||||
}
|
||||
write!(f, ")")?;
|
||||
}
|
||||
// Note: given the expansion of `&str` patterns done in `expand_pattern`, we should
|
||||
// be careful to detect strings here. However a string literal pattern will never
|
||||
// be reported as a non-exhaustiveness witness, so we can ignore this issue.
|
||||
Ref => {
|
||||
write!(f, "&{:?}", &fields.next().unwrap())?;
|
||||
}
|
||||
Slice(slice) => {
|
||||
write!(f, "[")?;
|
||||
match slice.kind {
|
||||
SliceKind::FixedLen(_) => {
|
||||
for p in fields {
|
||||
write!(f, "{}{:?}", start_or_comma(), p)?;
|
||||
}
|
||||
}
|
||||
SliceKind::VarLen(prefix_len, _) => {
|
||||
for p in fields.by_ref().take(prefix_len) {
|
||||
write!(f, "{}{:?}", start_or_comma(), p)?;
|
||||
}
|
||||
write!(f, "{}..", start_or_comma())?;
|
||||
for p in fields {
|
||||
write!(f, "{}{:?}", start_or_comma(), p)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
write!(f, "]")?;
|
||||
}
|
||||
Bool(b) => write!(f, "{b}")?,
|
||||
// Best-effort, will render signed ranges incorrectly
|
||||
IntRange(range) => write!(f, "{range:?}")?,
|
||||
F32Range(lo, hi, end) => write!(f, "{lo}{end}{hi}")?,
|
||||
F64Range(lo, hi, end) => write!(f, "{lo}{end}{hi}")?,
|
||||
Str(value) => write!(f, "{value:?}")?,
|
||||
Opaque(..) => write!(f, "<constant pattern>")?,
|
||||
Or => {
|
||||
for pat in fields {
|
||||
write!(f, "{}{:?}", start_or_continue(" | "), pat)?;
|
||||
}
|
||||
}
|
||||
Never => write!(f, "!")?,
|
||||
Wildcard | Missing | NonExhaustive | Hidden | PrivateUninhabited => {
|
||||
write!(f, "_ : {:?}", ty)?
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
|
@ -49,6 +49,12 @@ pub mod index {
|
||||
}
|
||||
}
|
||||
|
||||
impl<V> FromIterator<V> for IdxContainer<usize, V> {
|
||||
fn from_iter<T: IntoIterator<Item = V>>(iter: T) -> Self {
|
||||
Self(iter.into_iter().enumerate().collect())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct IdxSet<T>(pub rustc_hash::FxHashSet<T>);
|
||||
impl<T: Idx> IdxSet<T> {
|
||||
@ -120,7 +126,8 @@ pub trait PatCx: Sized + fmt::Debug {
|
||||
/// `DeconstructedPat`. Only invoqued when `pat.ctor()` is `Struct | Variant(_) | UnionField`.
|
||||
fn write_variant_name(
|
||||
f: &mut fmt::Formatter<'_>,
|
||||
pat: &crate::pat::DeconstructedPat<Self>,
|
||||
ctor: &crate::constructor::Constructor<Self>,
|
||||
ty: &Self::Ty,
|
||||
) -> fmt::Result;
|
||||
|
||||
/// Raise a bug.
|
||||
|
@ -138,81 +138,11 @@ impl<Cx: PatCx> DeconstructedPat<Cx> {
|
||||
/// This is best effort and not good enough for a `Display` impl.
|
||||
impl<Cx: PatCx> fmt::Debug for DeconstructedPat<Cx> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let pat = self;
|
||||
let mut first = true;
|
||||
let mut start_or_continue = |s| {
|
||||
if first {
|
||||
first = false;
|
||||
""
|
||||
} else {
|
||||
s
|
||||
}
|
||||
};
|
||||
let mut start_or_comma = || start_or_continue(", ");
|
||||
|
||||
let mut fields: Vec<_> = (0..self.arity).map(|_| PatOrWild::Wild).collect();
|
||||
for ipat in self.iter_fields() {
|
||||
fields[ipat.idx] = PatOrWild::Pat(&ipat.pat);
|
||||
}
|
||||
|
||||
match pat.ctor() {
|
||||
Struct | Variant(_) | UnionField => {
|
||||
Cx::write_variant_name(f, pat)?;
|
||||
// Without `cx`, we can't know which field corresponds to which, so we can't
|
||||
// get the names of the fields. Instead we just display everything as a tuple
|
||||
// struct, which should be good enough.
|
||||
write!(f, "(")?;
|
||||
for p in fields {
|
||||
write!(f, "{}", start_or_comma())?;
|
||||
write!(f, "{p:?}")?;
|
||||
}
|
||||
write!(f, ")")
|
||||
}
|
||||
// Note: given the expansion of `&str` patterns done in `expand_pattern`, we should
|
||||
// be careful to detect strings here. However a string literal pattern will never
|
||||
// be reported as a non-exhaustiveness witness, so we can ignore this issue.
|
||||
Ref => {
|
||||
write!(f, "&{:?}", &fields[0])
|
||||
}
|
||||
Slice(slice) => {
|
||||
write!(f, "[")?;
|
||||
match slice.kind {
|
||||
SliceKind::FixedLen(_) => {
|
||||
for p in fields {
|
||||
write!(f, "{}{:?}", start_or_comma(), p)?;
|
||||
}
|
||||
}
|
||||
SliceKind::VarLen(prefix_len, _) => {
|
||||
for p in &fields[..prefix_len] {
|
||||
write!(f, "{}{:?}", start_or_comma(), p)?;
|
||||
}
|
||||
write!(f, "{}", start_or_comma())?;
|
||||
write!(f, "..")?;
|
||||
for p in &fields[prefix_len..] {
|
||||
write!(f, "{}{:?}", start_or_comma(), p)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
write!(f, "]")
|
||||
}
|
||||
Bool(b) => write!(f, "{b}"),
|
||||
// Best-effort, will render signed ranges incorrectly
|
||||
IntRange(range) => write!(f, "{range:?}"),
|
||||
F32Range(lo, hi, end) => write!(f, "{lo}{end}{hi}"),
|
||||
F64Range(lo, hi, end) => write!(f, "{lo}{end}{hi}"),
|
||||
Str(value) => write!(f, "{value:?}"),
|
||||
Opaque(..) => write!(f, "<constant pattern>"),
|
||||
Or => {
|
||||
for pat in fields {
|
||||
write!(f, "{}{:?}", start_or_continue(" | "), pat)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
Never => write!(f, "!"),
|
||||
Wildcard | Missing | NonExhaustive | Hidden | PrivateUninhabited => {
|
||||
write!(f, "_ : {:?}", pat.ty())
|
||||
}
|
||||
}
|
||||
self.ctor().fmt_fields(f, self.ty(), fields.into_iter())
|
||||
}
|
||||
}
|
||||
|
||||
@ -295,7 +225,6 @@ impl<'p, Cx: PatCx> fmt::Debug for PatOrWild<'p, Cx> {
|
||||
|
||||
/// Same idea as `DeconstructedPat`, except this is a fictitious pattern built up for diagnostics
|
||||
/// purposes. As such they don't use interning and can be cloned.
|
||||
#[derive(Debug)]
|
||||
pub struct WitnessPat<Cx: PatCx> {
|
||||
ctor: Constructor<Cx>,
|
||||
pub(crate) fields: Vec<WitnessPat<Cx>>,
|
||||
@ -353,3 +282,10 @@ impl<Cx: PatCx> WitnessPat<Cx> {
|
||||
self.fields.iter()
|
||||
}
|
||||
}
|
||||
|
||||
/// This is best effort and not good enough for a `Display` impl.
|
||||
impl<Cx: PatCx> fmt::Debug for WitnessPat<Cx> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
self.ctor().fmt_fields(f, self.ty(), self.fields.iter())
|
||||
}
|
||||
}
|
||||
|
@ -880,13 +880,14 @@ impl<'p, 'tcx: 'p> PatCx for RustcPatCtxt<'p, 'tcx> {
|
||||
|
||||
fn write_variant_name(
|
||||
f: &mut fmt::Formatter<'_>,
|
||||
pat: &crate::pat::DeconstructedPat<Self>,
|
||||
ctor: &crate::constructor::Constructor<Self>,
|
||||
ty: &Self::Ty,
|
||||
) -> fmt::Result {
|
||||
if let ty::Adt(adt, _) = pat.ty().kind() {
|
||||
if let ty::Adt(adt, _) = ty.kind() {
|
||||
if adt.is_box() {
|
||||
write!(f, "Box")?
|
||||
} else {
|
||||
let variant = adt.variant(Self::variant_index_for_adt(pat.ctor(), *adt));
|
||||
let variant = adt.variant(Self::variant_index_for_adt(ctor, *adt));
|
||||
write!(f, "{}", variant.name)?;
|
||||
}
|
||||
}
|
||||
|
@ -1042,7 +1042,7 @@ struct MatrixRow<'p, Cx: PatCx> {
|
||||
is_under_guard: bool,
|
||||
/// When we specialize, we remember which row of the original matrix produced a given row of the
|
||||
/// specialized matrix. When we unspecialize, we use this to propagate usefulness back up the
|
||||
/// callstack.
|
||||
/// callstack. On creation, this stores the index of the original match arm.
|
||||
parent_row: usize,
|
||||
/// False when the matrix is just built. This is set to `true` by
|
||||
/// [`compute_exhaustiveness_and_usefulness`] if the arm is found to be useful.
|
||||
@ -1163,10 +1163,10 @@ impl<'p, Cx: PatCx> Matrix<'p, Cx> {
|
||||
place_info: smallvec![place_info],
|
||||
wildcard_row_is_relevant: true,
|
||||
};
|
||||
for (row_id, arm) in arms.iter().enumerate() {
|
||||
for (arm_id, arm) in arms.iter().enumerate() {
|
||||
let v = MatrixRow {
|
||||
pats: PatStack::from_pattern(arm.pat),
|
||||
parent_row: row_id, // dummy, we don't read it
|
||||
parent_row: arm_id,
|
||||
is_under_guard: arm.has_guard,
|
||||
useful: false,
|
||||
intersects: BitSet::new_empty(0), // Initialized in `Matrix::expand_and_push`.
|
||||
@ -1738,6 +1738,9 @@ pub struct UsefulnessReport<'p, Cx: PatCx> {
|
||||
/// If the match is exhaustive, this is empty. If not, this contains witnesses for the lack of
|
||||
/// exhaustiveness.
|
||||
pub non_exhaustiveness_witnesses: Vec<WitnessPat<Cx>>,
|
||||
/// For each arm, a set of indices of arms above it that have non-empty intersection, i.e. there
|
||||
/// is a value matched by both arms. This may miss real intersections.
|
||||
pub arm_intersections: Vec<BitSet<usize>>,
|
||||
}
|
||||
|
||||
/// Computes whether a match is exhaustive and which of its arms are useful.
|
||||
@ -1769,5 +1772,19 @@ pub fn compute_match_usefulness<'p, Cx: PatCx>(
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(UsefulnessReport { arm_usefulness, non_exhaustiveness_witnesses })
|
||||
let mut arm_intersections: Vec<_> =
|
||||
arms.iter().enumerate().map(|(i, _)| BitSet::new_empty(i)).collect();
|
||||
for row in matrix.rows() {
|
||||
let arm_id = row.parent_row;
|
||||
for intersection in row.intersects.iter() {
|
||||
// Convert the matrix row ids into arm ids (they can differ because we expand or-patterns).
|
||||
let arm_intersection = matrix.rows[intersection].parent_row;
|
||||
// Note: self-intersection can happen with or-patterns.
|
||||
if arm_intersection != arm_id {
|
||||
arm_intersections[arm_id].insert(arm_intersection);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(UsefulnessReport { arm_usefulness, non_exhaustiveness_witnesses, arm_intersections })
|
||||
}
|
||||
|
315
compiler/rustc_pattern_analysis/tests/common/mod.rs
Normal file
315
compiler/rustc_pattern_analysis/tests/common/mod.rs
Normal file
@ -0,0 +1,315 @@
|
||||
use rustc_pattern_analysis::{
|
||||
constructor::{
|
||||
Constructor, ConstructorSet, IntRange, MaybeInfiniteInt, RangeEnd, VariantVisibility,
|
||||
},
|
||||
usefulness::{PlaceValidity, UsefulnessReport},
|
||||
Captures, MatchArm, PatCx, PrivateUninhabitedField,
|
||||
};
|
||||
|
||||
/// Sets up `tracing` for easier debugging. Tries to look like the `rustc` setup.
|
||||
pub fn init_tracing() {
|
||||
use tracing_subscriber::layer::SubscriberExt;
|
||||
use tracing_subscriber::util::SubscriberInitExt;
|
||||
use tracing_subscriber::Layer;
|
||||
let _ = tracing_tree::HierarchicalLayer::default()
|
||||
.with_writer(std::io::stderr)
|
||||
.with_indent_lines(true)
|
||||
.with_ansi(true)
|
||||
.with_targets(true)
|
||||
.with_indent_amount(2)
|
||||
.with_subscriber(
|
||||
tracing_subscriber::Registry::default()
|
||||
.with(tracing_subscriber::EnvFilter::from_default_env()),
|
||||
)
|
||||
.try_init();
|
||||
}
|
||||
|
||||
/// A simple set of types.
|
||||
#[allow(dead_code)]
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub enum Ty {
|
||||
/// Booleans
|
||||
Bool,
|
||||
/// 8-bit unsigned integers
|
||||
U8,
|
||||
/// Tuples.
|
||||
Tuple(&'static [Ty]),
|
||||
/// A struct with `arity` fields of type `ty`.
|
||||
BigStruct { arity: usize, ty: &'static Ty },
|
||||
/// A enum with `arity` variants of type `ty`.
|
||||
BigEnum { arity: usize, ty: &'static Ty },
|
||||
}
|
||||
|
||||
/// The important logic.
|
||||
impl Ty {
|
||||
pub fn sub_tys(&self, ctor: &Constructor<Cx>) -> Vec<Self> {
|
||||
use Constructor::*;
|
||||
match (ctor, *self) {
|
||||
(Struct, Ty::Tuple(tys)) => tys.iter().copied().collect(),
|
||||
(Struct, Ty::BigStruct { arity, ty }) => (0..arity).map(|_| *ty).collect(),
|
||||
(Variant(_), Ty::BigEnum { ty, .. }) => vec![*ty],
|
||||
(Bool(..) | IntRange(..) | NonExhaustive | Missing | Wildcard, _) => vec![],
|
||||
_ => panic!("Unexpected ctor {ctor:?} for type {self:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn ctor_set(&self) -> ConstructorSet<Cx> {
|
||||
match *self {
|
||||
Ty::Bool => ConstructorSet::Bool,
|
||||
Ty::U8 => ConstructorSet::Integers {
|
||||
range_1: IntRange::from_range(
|
||||
MaybeInfiniteInt::new_finite_uint(0),
|
||||
MaybeInfiniteInt::new_finite_uint(255),
|
||||
RangeEnd::Included,
|
||||
),
|
||||
range_2: None,
|
||||
},
|
||||
Ty::Tuple(..) | Ty::BigStruct { .. } => ConstructorSet::Struct { empty: false },
|
||||
Ty::BigEnum { arity, .. } => ConstructorSet::Variants {
|
||||
variants: (0..arity).map(|_| VariantVisibility::Visible).collect(),
|
||||
non_exhaustive: false,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn write_variant_name(
|
||||
&self,
|
||||
f: &mut std::fmt::Formatter<'_>,
|
||||
ctor: &Constructor<Cx>,
|
||||
) -> std::fmt::Result {
|
||||
match (*self, ctor) {
|
||||
(Ty::Tuple(..), _) => Ok(()),
|
||||
(Ty::BigStruct { .. }, _) => write!(f, "BigStruct"),
|
||||
(Ty::BigEnum { .. }, Constructor::Variant(i)) => write!(f, "BigEnum::Variant{i}"),
|
||||
_ => write!(f, "{:?}::{:?}", self, ctor),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Compute usefulness in our simple context (and set up tracing for easier debugging).
|
||||
pub fn compute_match_usefulness<'p>(
|
||||
arms: &[MatchArm<'p, Cx>],
|
||||
ty: Ty,
|
||||
scrut_validity: PlaceValidity,
|
||||
complexity_limit: Option<usize>,
|
||||
) -> Result<UsefulnessReport<'p, Cx>, ()> {
|
||||
init_tracing();
|
||||
rustc_pattern_analysis::usefulness::compute_match_usefulness(
|
||||
&Cx,
|
||||
arms,
|
||||
ty,
|
||||
scrut_validity,
|
||||
complexity_limit,
|
||||
)
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Cx;
|
||||
|
||||
/// The context for pattern analysis. Forwards anything interesting to `Ty` methods.
|
||||
impl PatCx for Cx {
|
||||
type Ty = Ty;
|
||||
type Error = ();
|
||||
type VariantIdx = usize;
|
||||
type StrLit = ();
|
||||
type ArmData = ();
|
||||
type PatData = ();
|
||||
|
||||
fn is_exhaustive_patterns_feature_on(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn is_min_exhaustive_patterns_feature_on(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn ctor_arity(&self, ctor: &Constructor<Self>, ty: &Self::Ty) -> usize {
|
||||
ty.sub_tys(ctor).len()
|
||||
}
|
||||
|
||||
fn ctor_sub_tys<'a>(
|
||||
&'a self,
|
||||
ctor: &'a Constructor<Self>,
|
||||
ty: &'a Self::Ty,
|
||||
) -> impl Iterator<Item = (Self::Ty, PrivateUninhabitedField)> + ExactSizeIterator + Captures<'a>
|
||||
{
|
||||
ty.sub_tys(ctor).into_iter().map(|ty| (ty, PrivateUninhabitedField(false)))
|
||||
}
|
||||
|
||||
fn ctors_for_ty(&self, ty: &Self::Ty) -> Result<ConstructorSet<Self>, Self::Error> {
|
||||
Ok(ty.ctor_set())
|
||||
}
|
||||
|
||||
fn write_variant_name(
|
||||
f: &mut std::fmt::Formatter<'_>,
|
||||
ctor: &Constructor<Self>,
|
||||
ty: &Self::Ty,
|
||||
) -> std::fmt::Result {
|
||||
ty.write_variant_name(f, ctor)
|
||||
}
|
||||
|
||||
fn bug(&self, fmt: std::fmt::Arguments<'_>) -> Self::Error {
|
||||
panic!("{}", fmt)
|
||||
}
|
||||
|
||||
/// Abort when reaching the complexity limit. This is what we'll check in tests.
|
||||
fn complexity_exceeded(&self) -> Result<(), Self::Error> {
|
||||
Err(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Construct a single pattern; see `pats!()`.
|
||||
#[allow(unused_macros)]
|
||||
macro_rules! pat {
|
||||
($($rest:tt)*) => {{
|
||||
let mut vec = pats!($($rest)*);
|
||||
vec.pop().unwrap()
|
||||
}};
|
||||
}
|
||||
|
||||
/// A macro to construct patterns. Called like `pats!(type_expr; pattern, pattern, ..)` and returns
|
||||
/// a `Vec<DeconstructedPat>`. A pattern can be nested and looks like `Constructor(pat, pat)` or
|
||||
/// `Constructor { .i: pat, .j: pat }`, where `Constructor` is `Struct`, `Variant.i` (with index
|
||||
/// `i`), as well as booleans and integer ranges.
|
||||
///
|
||||
/// The general structure of the macro is a tt-muncher with several stages identified with
|
||||
/// `@something(args)`. The args are a key-value list (the keys ensure we don't mix the arguments
|
||||
/// around) which is passed down and modified as needed. We then parse token-trees from
|
||||
/// left-to-right. Non-trivial recursion happens when we parse the arguments to a pattern: we
|
||||
/// recurse to parse the tokens inside `{..}`/`(..)`, and then we continue parsing anything that
|
||||
/// follows.
|
||||
macro_rules! pats {
|
||||
// Entrypoint
|
||||
// Parse `type; ..`
|
||||
($ty:expr; $($rest:tt)*) => {{
|
||||
#[allow(unused_imports)]
|
||||
use rustc_pattern_analysis::{
|
||||
constructor::{Constructor, IntRange, MaybeInfiniteInt, RangeEnd},
|
||||
pat::DeconstructedPat,
|
||||
};
|
||||
let ty = $ty;
|
||||
// The heart of the macro is designed to push `IndexedPat`s into a `Vec`, so we work around
|
||||
// that.
|
||||
let sub_tys = ::std::iter::repeat(&ty);
|
||||
let mut vec = Vec::new();
|
||||
pats!(@ctor(vec:vec, sub_tys:sub_tys, idx:0) $($rest)*);
|
||||
vec.into_iter().map(|ipat| ipat.pat).collect::<Vec<_>>()
|
||||
}};
|
||||
|
||||
// Parse `constructor ..`
|
||||
|
||||
(@ctor($($args:tt)*) true $($rest:tt)*) => {{
|
||||
let ctor = Constructor::Bool(true);
|
||||
pats!(@pat($($args)*, ctor:ctor) $($rest)*)
|
||||
}};
|
||||
(@ctor($($args:tt)*) false $($rest:tt)*) => {{
|
||||
let ctor = Constructor::Bool(false);
|
||||
pats!(@pat($($args)*, ctor:ctor) $($rest)*)
|
||||
}};
|
||||
(@ctor($($args:tt)*) Struct $($rest:tt)*) => {{
|
||||
let ctor = Constructor::Struct;
|
||||
pats!(@pat($($args)*, ctor:ctor) $($rest)*)
|
||||
}};
|
||||
(@ctor($($args:tt)*) ( $($fields:tt)* ) $($rest:tt)*) => {{
|
||||
let ctor = Constructor::Struct; // tuples
|
||||
pats!(@pat($($args)*, ctor:ctor) ( $($fields)* ) $($rest)*)
|
||||
}};
|
||||
(@ctor($($args:tt)*) Variant.$variant:ident $($rest:tt)*) => {{
|
||||
let ctor = Constructor::Variant($variant);
|
||||
pats!(@pat($($args)*, ctor:ctor) $($rest)*)
|
||||
}};
|
||||
(@ctor($($args:tt)*) Variant.$variant:literal $($rest:tt)*) => {{
|
||||
let ctor = Constructor::Variant($variant);
|
||||
pats!(@pat($($args)*, ctor:ctor) $($rest)*)
|
||||
}};
|
||||
(@ctor($($args:tt)*) _ $($rest:tt)*) => {{
|
||||
let ctor = Constructor::Wildcard;
|
||||
pats!(@pat($($args)*, ctor:ctor) $($rest)*)
|
||||
}};
|
||||
|
||||
// Integers and int ranges
|
||||
(@ctor($($args:tt)*) $($start:literal)?..$end:literal $($rest:tt)*) => {{
|
||||
let ctor = Constructor::IntRange(IntRange::from_range(
|
||||
pats!(@rangeboundary- $($start)?),
|
||||
pats!(@rangeboundary+ $end),
|
||||
RangeEnd::Excluded,
|
||||
));
|
||||
pats!(@pat($($args)*, ctor:ctor) $($rest)*)
|
||||
}};
|
||||
(@ctor($($args:tt)*) $($start:literal)?.. $($rest:tt)*) => {{
|
||||
let ctor = Constructor::IntRange(IntRange::from_range(
|
||||
pats!(@rangeboundary- $($start)?),
|
||||
pats!(@rangeboundary+),
|
||||
RangeEnd::Excluded,
|
||||
));
|
||||
pats!(@pat($($args)*, ctor:ctor) $($rest)*)
|
||||
}};
|
||||
(@ctor($($args:tt)*) $($start:literal)?..=$end:literal $($rest:tt)*) => {{
|
||||
let ctor = Constructor::IntRange(IntRange::from_range(
|
||||
pats!(@rangeboundary- $($start)?),
|
||||
pats!(@rangeboundary+ $end),
|
||||
RangeEnd::Included,
|
||||
));
|
||||
pats!(@pat($($args)*, ctor:ctor) $($rest)*)
|
||||
}};
|
||||
(@ctor($($args:tt)*) $int:literal $($rest:tt)*) => {{
|
||||
let ctor = Constructor::IntRange(IntRange::from_range(
|
||||
pats!(@rangeboundary- $int),
|
||||
pats!(@rangeboundary+ $int),
|
||||
RangeEnd::Included,
|
||||
));
|
||||
pats!(@pat($($args)*, ctor:ctor) $($rest)*)
|
||||
}};
|
||||
// Utility to manage range boundaries.
|
||||
(@rangeboundary $sign:tt $int:literal) => { MaybeInfiniteInt::new_finite_uint($int) };
|
||||
(@rangeboundary -) => { MaybeInfiniteInt::NegInfinity };
|
||||
(@rangeboundary +) => { MaybeInfiniteInt::PosInfinity };
|
||||
|
||||
// Parse subfields: `(..)` or `{..}`
|
||||
|
||||
// Constructor with no fields, e.g. `bool` or `Variant.1`.
|
||||
(@pat($($args:tt)*) $(,)?) => {
|
||||
pats!(@pat($($args)*) {})
|
||||
};
|
||||
(@pat($($args:tt)*) , $($rest:tt)*) => {
|
||||
pats!(@pat($($args)*) {}, $($rest)*)
|
||||
};
|
||||
// `(..)` and `{..}` are treated the same.
|
||||
(@pat($($args:tt)*) ( $($subpat:tt)* ) $($rest:tt)*) => {{
|
||||
pats!(@pat($($args)*) { $($subpat)* } $($rest)*)
|
||||
}};
|
||||
(@pat(vec:$vec:expr, sub_tys:$sub_tys:expr, idx:$idx:expr, ctor:$ctor:expr) { $($fields:tt)* } $($rest:tt)*) => {{
|
||||
let sub_tys = $sub_tys;
|
||||
let index = $idx;
|
||||
// Silly dance to work with both a vec and `iter::repeat()`.
|
||||
let ty = *(&sub_tys).clone().into_iter().nth(index).unwrap();
|
||||
let ctor = $ctor;
|
||||
let ctor_sub_tys = &ty.sub_tys(&ctor);
|
||||
#[allow(unused_mut)]
|
||||
let mut fields = Vec::new();
|
||||
// Parse subpatterns (note the leading comma).
|
||||
pats!(@fields(idx:0, vec:fields, sub_tys:ctor_sub_tys) ,$($fields)*);
|
||||
let arity = ctor_sub_tys.len();
|
||||
let pat = DeconstructedPat::new(ctor, fields, arity, ty, ()).at_index(index);
|
||||
$vec.push(pat);
|
||||
|
||||
// Continue parsing further patterns.
|
||||
pats!(@fields(idx:index+1, vec:$vec, sub_tys:sub_tys) $($rest)*);
|
||||
}};
|
||||
|
||||
// Parse fields one by one.
|
||||
|
||||
// No fields left.
|
||||
(@fields($($args:tt)*) $(,)?) => {};
|
||||
// `.i: pat` sets the current index to `i`.
|
||||
(@fields(idx:$_idx:expr, $($args:tt)*) , .$idx:literal : $($rest:tt)*) => {{
|
||||
pats!(@ctor($($args)*, idx:$idx) $($rest)*);
|
||||
}};
|
||||
(@fields(idx:$_idx:expr, $($args:tt)*) , .$idx:ident : $($rest:tt)*) => {{
|
||||
pats!(@ctor($($args)*, idx:$idx) $($rest)*);
|
||||
}};
|
||||
// Field without an explicit index; we use the current index which gets incremented above.
|
||||
(@fields(idx:$idx:expr, $($args:tt)*) , $($rest:tt)*) => {{
|
||||
pats!(@ctor($($args)*, idx:$idx) $($rest)*);
|
||||
}};
|
||||
}
|
109
compiler/rustc_pattern_analysis/tests/complexity.rs
Normal file
109
compiler/rustc_pattern_analysis/tests/complexity.rs
Normal file
@ -0,0 +1,109 @@
|
||||
//! Test the pattern complexity limit.
|
||||
use common::*;
|
||||
use rustc_pattern_analysis::{pat::DeconstructedPat, usefulness::PlaceValidity, MatchArm};
|
||||
|
||||
#[macro_use]
|
||||
mod common;
|
||||
|
||||
/// Analyze a match made of these patterns. Ignore the report; we only care whether we exceeded the
|
||||
/// limit or not.
|
||||
fn check(patterns: &[DeconstructedPat<Cx>], complexity_limit: usize) -> Result<(), ()> {
|
||||
let ty = *patterns[0].ty();
|
||||
let arms: Vec<_> =
|
||||
patterns.iter().map(|pat| MatchArm { pat, has_guard: false, arm_data: () }).collect();
|
||||
compute_match_usefulness(arms.as_slice(), ty, PlaceValidity::ValidOnly, Some(complexity_limit))
|
||||
.map(|_report| ())
|
||||
}
|
||||
|
||||
/// Asserts that analyzing this match takes exactly `complexity` steps.
|
||||
#[track_caller]
|
||||
fn assert_complexity(patterns: Vec<DeconstructedPat<Cx>>, complexity: usize) {
|
||||
assert!(check(&patterns, complexity).is_ok());
|
||||
assert!(check(&patterns, complexity - 1).is_err());
|
||||
}
|
||||
|
||||
/// Construct a match like:
|
||||
/// ```ignore(illustrative)
|
||||
/// match ... {
|
||||
/// BigStruct { field01: true, .. } => {}
|
||||
/// BigStruct { field02: true, .. } => {}
|
||||
/// BigStruct { field03: true, .. } => {}
|
||||
/// BigStruct { field04: true, .. } => {}
|
||||
/// ...
|
||||
/// _ => {}
|
||||
/// }
|
||||
/// ```
|
||||
fn diagonal_match(arity: usize) -> Vec<DeconstructedPat<Cx>> {
|
||||
let struct_ty = Ty::BigStruct { arity, ty: &Ty::Bool };
|
||||
let mut patterns = vec![];
|
||||
for i in 0..arity {
|
||||
patterns.push(pat!(struct_ty; Struct { .i: true }));
|
||||
}
|
||||
patterns.push(pat!(struct_ty; _));
|
||||
patterns
|
||||
}
|
||||
|
||||
/// Construct a match like:
|
||||
/// ```ignore(illustrative)
|
||||
/// match ... {
|
||||
/// BigStruct { field01: true, .. } => {}
|
||||
/// BigStruct { field02: true, .. } => {}
|
||||
/// BigStruct { field03: true, .. } => {}
|
||||
/// BigStruct { field04: true, .. } => {}
|
||||
/// ...
|
||||
/// BigStruct { field01: false, .. } => {}
|
||||
/// BigStruct { field02: false, .. } => {}
|
||||
/// BigStruct { field03: false, .. } => {}
|
||||
/// BigStruct { field04: false, .. } => {}
|
||||
/// ...
|
||||
/// _ => {}
|
||||
/// }
|
||||
/// ```
|
||||
fn diagonal_exponential_match(arity: usize) -> Vec<DeconstructedPat<Cx>> {
|
||||
let struct_ty = Ty::BigStruct { arity, ty: &Ty::Bool };
|
||||
let mut patterns = vec![];
|
||||
for i in 0..arity {
|
||||
patterns.push(pat!(struct_ty; Struct { .i: true }));
|
||||
}
|
||||
for i in 0..arity {
|
||||
patterns.push(pat!(struct_ty; Struct { .i: false }));
|
||||
}
|
||||
patterns.push(pat!(struct_ty; _));
|
||||
patterns
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_diagonal_struct_match() {
|
||||
// These cases are nicely linear: we check `arity` patterns with exactly one `true`, matching
|
||||
// in 2 branches each, and a final pattern with all `false`, matching only the `_` branch.
|
||||
assert_complexity(diagonal_match(20), 41);
|
||||
assert_complexity(diagonal_match(30), 61);
|
||||
// This case goes exponential.
|
||||
assert!(check(&diagonal_exponential_match(10), 10000).is_err());
|
||||
}
|
||||
|
||||
/// Construct a match like:
|
||||
/// ```ignore(illustrative)
|
||||
/// match ... {
|
||||
/// BigEnum::Variant1(_) => {}
|
||||
/// BigEnum::Variant2(_) => {}
|
||||
/// BigEnum::Variant3(_) => {}
|
||||
/// ...
|
||||
/// _ => {}
|
||||
/// }
|
||||
/// ```
|
||||
fn big_enum(arity: usize) -> Vec<DeconstructedPat<Cx>> {
|
||||
let enum_ty = Ty::BigEnum { arity, ty: &Ty::Bool };
|
||||
let mut patterns = vec![];
|
||||
for i in 0..arity {
|
||||
patterns.push(pat!(enum_ty; Variant.i));
|
||||
}
|
||||
patterns.push(pat!(enum_ty; _));
|
||||
patterns
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_big_enum() {
|
||||
// We try 2 branches per variant.
|
||||
assert_complexity(big_enum(20), 40);
|
||||
}
|
77
compiler/rustc_pattern_analysis/tests/exhaustiveness.rs
Normal file
77
compiler/rustc_pattern_analysis/tests/exhaustiveness.rs
Normal file
@ -0,0 +1,77 @@
|
||||
//! Test exhaustiveness checking.
|
||||
use common::*;
|
||||
use rustc_pattern_analysis::{
|
||||
pat::{DeconstructedPat, WitnessPat},
|
||||
usefulness::PlaceValidity,
|
||||
MatchArm,
|
||||
};
|
||||
|
||||
#[macro_use]
|
||||
mod common;
|
||||
|
||||
/// Analyze a match made of these patterns.
|
||||
fn check(patterns: Vec<DeconstructedPat<Cx>>) -> Vec<WitnessPat<Cx>> {
|
||||
let ty = *patterns[0].ty();
|
||||
let arms: Vec<_> =
|
||||
patterns.iter().map(|pat| MatchArm { pat, has_guard: false, arm_data: () }).collect();
|
||||
let report =
|
||||
compute_match_usefulness(arms.as_slice(), ty, PlaceValidity::ValidOnly, None).unwrap();
|
||||
report.non_exhaustiveness_witnesses
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn assert_exhaustive(patterns: Vec<DeconstructedPat<Cx>>) {
|
||||
let witnesses = check(patterns);
|
||||
if !witnesses.is_empty() {
|
||||
panic!("non-exaustive match: missing {witnesses:?}");
|
||||
}
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn assert_non_exhaustive(patterns: Vec<DeconstructedPat<Cx>>) {
|
||||
let witnesses = check(patterns);
|
||||
assert!(!witnesses.is_empty())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_int_ranges() {
|
||||
let ty = Ty::U8;
|
||||
assert_exhaustive(pats!(ty;
|
||||
0..=255,
|
||||
));
|
||||
assert_exhaustive(pats!(ty;
|
||||
0..,
|
||||
));
|
||||
assert_non_exhaustive(pats!(ty;
|
||||
0..255,
|
||||
));
|
||||
assert_exhaustive(pats!(ty;
|
||||
0..255,
|
||||
255,
|
||||
));
|
||||
assert_exhaustive(pats!(ty;
|
||||
..10,
|
||||
10..
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_nested() {
|
||||
let ty = Ty::BigStruct { arity: 2, ty: &Ty::BigEnum { arity: 2, ty: &Ty::Bool } };
|
||||
assert_non_exhaustive(pats!(ty;
|
||||
Struct(Variant.0, _),
|
||||
));
|
||||
assert_exhaustive(pats!(ty;
|
||||
Struct(Variant.0, _),
|
||||
Struct(Variant.1, _),
|
||||
));
|
||||
assert_non_exhaustive(pats!(ty;
|
||||
Struct(Variant.0, _),
|
||||
Struct(_, Variant.0),
|
||||
));
|
||||
assert_exhaustive(pats!(ty;
|
||||
Struct(Variant.0, _),
|
||||
Struct(_, Variant.0),
|
||||
Struct(Variant.1, Variant.1),
|
||||
));
|
||||
}
|
69
compiler/rustc_pattern_analysis/tests/intersection.rs
Normal file
69
compiler/rustc_pattern_analysis/tests/intersection.rs
Normal file
@ -0,0 +1,69 @@
|
||||
//! Test the computation of arm intersections.
|
||||
use common::*;
|
||||
use rustc_pattern_analysis::{pat::DeconstructedPat, usefulness::PlaceValidity, MatchArm};
|
||||
|
||||
#[macro_use]
|
||||
mod common;
|
||||
|
||||
/// Analyze a match made of these patterns and returns the computed arm intersections.
|
||||
fn check(patterns: Vec<DeconstructedPat<Cx>>) -> Vec<Vec<usize>> {
|
||||
let ty = *patterns[0].ty();
|
||||
let arms: Vec<_> =
|
||||
patterns.iter().map(|pat| MatchArm { pat, has_guard: false, arm_data: () }).collect();
|
||||
let report =
|
||||
compute_match_usefulness(arms.as_slice(), ty, PlaceValidity::ValidOnly, None).unwrap();
|
||||
report.arm_intersections.into_iter().map(|bitset| bitset.iter().collect()).collect()
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn assert_intersects(patterns: Vec<DeconstructedPat<Cx>>, intersects: &[&[usize]]) {
|
||||
let computed_intersects = check(patterns);
|
||||
assert_eq!(computed_intersects, intersects);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_int_ranges() {
|
||||
let ty = Ty::U8;
|
||||
assert_intersects(
|
||||
pats!(ty;
|
||||
0..=100,
|
||||
100..,
|
||||
),
|
||||
&[&[], &[0]],
|
||||
);
|
||||
assert_intersects(
|
||||
pats!(ty;
|
||||
0..=101,
|
||||
100..,
|
||||
),
|
||||
&[&[], &[0]],
|
||||
);
|
||||
assert_intersects(
|
||||
pats!(ty;
|
||||
0..100,
|
||||
100..,
|
||||
),
|
||||
&[&[], &[]],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_nested() {
|
||||
let ty = Ty::Tuple(&[Ty::Bool; 2]);
|
||||
assert_intersects(
|
||||
pats!(ty;
|
||||
(true, true),
|
||||
(true, _),
|
||||
(_, true),
|
||||
),
|
||||
&[&[], &[0], &[0, 1]],
|
||||
);
|
||||
// Here we shortcut because `(true, true)` is irrelevant, so we fail to detect the intersection.
|
||||
assert_intersects(
|
||||
pats!(ty;
|
||||
(true, _),
|
||||
(_, true),
|
||||
),
|
||||
&[&[], &[]],
|
||||
);
|
||||
}
|
Loading…
Reference in New Issue
Block a user