rust/compiler/rustc_mir_dataflow/src/value_analysis.rs

Ignoring revisions in .git-blame-ignore-revs. Click here to bypass and see the normal blame view.

1318 lines
46 KiB
Rust
Raw Normal View History

//! This module provides a framework on top of the normal MIR dataflow framework to simplify the
2022-10-21 15:02:03 +00:00
//! implementation of analyses that track information about the values stored in certain places.
//! We are using the term "place" here to refer to a `mir::Place` (a place expression) instead of
//! an `interpret::Place` (a memory location).
//!
//! The default methods of [`ValueAnalysis`] (prefixed with `super_` instead of `handle_`)
//! provide some behavior that should be valid for all abstract domains that are based only on the
//! value stored in a certain place. On top of these default rules, an implementation should
//! override some of the `handle_` methods. For an example, see `ConstAnalysis`.
//!
2022-10-21 15:02:03 +00:00
//! An implementation must also provide a [`Map`]. Before the analysis begins, all places that
//! should be tracked during the analysis must be registered. During the analysis, no new places
//! can be registered. The [`State`] can be queried to retrieve the abstract value stored for a
//! certain place by passing the map.
//!
//! This framework is currently experimental. Originally, it supported shared references and enum
//! variants. However, it was discovered that both of these were unsound, and especially references
//! had subtle but serious issues. In the future, they could be added back in, but we should clarify
//! the rules for optimizations that rely on the aliasing model first.
//!
//!
2022-10-21 15:02:03 +00:00
//! # Notes
//!
2022-10-21 15:02:03 +00:00
//! - The bottom state denotes uninitialized memory. Because we are only doing a sound approximation
//! of the actual execution, we can also use this state for places where access would be UB.
//!
2023-02-03 16:39:10 +00:00
//! - The assignment logic in `State::insert_place_idx` assumes that the places are non-overlapping,
2022-10-21 15:02:03 +00:00
//! or identical. Note that this refers to place expressions, not memory locations.
//!
//! - Currently, places that have their reference taken cannot be tracked. Although this would be
//! possible, it has to rely on some aliasing model, which we are not ready to commit to yet.
//! Because of that, we can assume that the only way to change the value behind a tracked place is
//! by direct assignment.
2023-04-22 13:06:28 +00:00
use std::collections::VecDeque;
use std::fmt::{Debug, Formatter};
2023-04-23 18:02:05 +00:00
use std::ops::Range;
2024-06-26 21:57:59 +00:00
use rustc_data_structures::fx::{FxHashMap, StdEntry};
2023-05-08 08:48:16 +00:00
use rustc_data_structures::stack::ensure_sufficient_stack;
use rustc_index::bit_set::BitSet;
2024-06-26 19:13:38 +00:00
use rustc_index::IndexVec;
use rustc_middle::bug;
use rustc_middle::mir::visit::{MutatingUseContext, PlaceContext, Visitor};
use rustc_middle::mir::*;
use rustc_middle::ty::{self, Ty, TyCtxt};
use rustc_target::abi::{FieldIdx, VariantIdx};
use tracing::debug;
use crate::lattice::{HasBottom, HasTop};
use crate::{
fmt::DebugWithContext, Analysis, AnalysisDomain, JoinSemiLattice, SwitchIntEdgeEffects,
};
pub trait ValueAnalysis<'tcx> {
/// For each place of interest, the analysis tracks a value of the given type.
type Value: Clone + JoinSemiLattice + HasBottom + HasTop;
const NAME: &'static str;
fn map(&self) -> &Map;
fn handle_statement(&self, statement: &Statement<'tcx>, state: &mut State<Self::Value>) {
self.super_statement(statement, state)
}
fn super_statement(&self, statement: &Statement<'tcx>, state: &mut State<Self::Value>) {
match &statement.kind {
StatementKind::Assign(box (place, rvalue)) => {
self.handle_assign(*place, rvalue, state);
}
2023-04-25 19:47:15 +00:00
StatementKind::SetDiscriminant { box place, variant_index } => {
self.handle_set_discriminant(*place, *variant_index, state);
}
2022-10-04 23:24:33 +00:00
StatementKind::Intrinsic(box intrinsic) => {
self.handle_intrinsic(intrinsic, state);
}
2022-09-09 09:56:10 +00:00
StatementKind::StorageLive(local) | StatementKind::StorageDead(local) => {
2022-10-21 15:02:03 +00:00
// StorageLive leaves the local in an uninitialized state.
// StorageDead makes it UB to access the local afterwards.
2023-04-24 17:10:39 +00:00
state.flood_with(Place::from(*local).as_ref(), self.map(), Self::Value::BOTTOM);
}
StatementKind::Deinit(box place) => {
2022-10-21 15:02:03 +00:00
// Deinit makes the place uninitialized.
2023-04-24 17:10:39 +00:00
state.flood_with(place.as_ref(), self.map(), Self::Value::BOTTOM);
}
StatementKind::Retag(..) => {
// We don't track references.
}
StatementKind::ConstEvalCounter
| StatementKind::Nop
| StatementKind::FakeRead(..)
| StatementKind::PlaceMention(..)
| StatementKind::Coverage(..)
| StatementKind::AscribeUserType(..) => (),
}
}
2023-04-25 19:47:15 +00:00
fn handle_set_discriminant(
&self,
place: Place<'tcx>,
variant_index: VariantIdx,
state: &mut State<Self::Value>,
) {
self.super_set_discriminant(place, variant_index, state)
}
fn super_set_discriminant(
&self,
place: Place<'tcx>,
_variant_index: VariantIdx,
state: &mut State<Self::Value>,
) {
state.flood_discr(place.as_ref(), self.map());
}
2022-10-04 23:24:33 +00:00
fn handle_intrinsic(
&self,
intrinsic: &NonDivergingIntrinsic<'tcx>,
state: &mut State<Self::Value>,
) {
self.super_intrinsic(intrinsic, state);
}
fn super_intrinsic(
&self,
intrinsic: &NonDivergingIntrinsic<'tcx>,
2023-04-25 19:50:22 +00:00
_state: &mut State<Self::Value>,
2022-10-04 23:24:33 +00:00
) {
match intrinsic {
NonDivergingIntrinsic::Assume(..) => {
// Could use this, but ignoring it is sound.
}
2023-04-25 19:50:22 +00:00
NonDivergingIntrinsic::CopyNonOverlapping(CopyNonOverlapping {
dst: _,
src: _,
count: _,
}) => {
// This statement represents `*dst = *src`, `count` times.
}
}
2022-10-04 23:24:33 +00:00
}
fn handle_assign(
&self,
target: Place<'tcx>,
rvalue: &Rvalue<'tcx>,
state: &mut State<Self::Value>,
) {
self.super_assign(target, rvalue, state)
}
fn super_assign(
&self,
target: Place<'tcx>,
rvalue: &Rvalue<'tcx>,
state: &mut State<Self::Value>,
) {
let result = self.handle_rvalue(rvalue, state);
state.assign(target.as_ref(), result, self.map());
}
fn handle_rvalue(
&self,
rvalue: &Rvalue<'tcx>,
state: &mut State<Self::Value>,
) -> ValueOrPlace<Self::Value> {
self.super_rvalue(rvalue, state)
}
fn super_rvalue(
&self,
rvalue: &Rvalue<'tcx>,
state: &mut State<Self::Value>,
) -> ValueOrPlace<Self::Value> {
match rvalue {
Rvalue::Use(operand) => self.handle_operand(operand, state),
Rvalue::CopyForDeref(place) => self.handle_operand(&Operand::Copy(*place), state),
Rvalue::Ref(..) | Rvalue::AddressOf(..) => {
// We don't track such places.
2023-04-24 17:10:39 +00:00
ValueOrPlace::TOP
}
Rvalue::Repeat(..)
| Rvalue::ThreadLocalRef(..)
| Rvalue::Len(..)
| Rvalue::Cast(..)
| Rvalue::BinaryOp(..)
| Rvalue::NullaryOp(..)
| Rvalue::UnaryOp(..)
| Rvalue::Discriminant(..)
| Rvalue::Aggregate(..)
| Rvalue::ShallowInitBox(..) => {
// No modification is possible through these r-values.
2023-04-24 17:10:39 +00:00
ValueOrPlace::TOP
}
}
}
fn handle_operand(
&self,
operand: &Operand<'tcx>,
state: &mut State<Self::Value>,
) -> ValueOrPlace<Self::Value> {
self.super_operand(operand, state)
}
fn super_operand(
&self,
operand: &Operand<'tcx>,
state: &mut State<Self::Value>,
) -> ValueOrPlace<Self::Value> {
match operand {
Operand::Constant(box constant) => {
ValueOrPlace::Value(self.handle_constant(constant, state))
}
Operand::Copy(place) | Operand::Move(place) => {
2022-10-05 19:30:43 +00:00
// On move, we would ideally flood the place with bottom. But with the current
// framework this is not possible (similar to `InterpCx::eval_operand`).
self.map()
.find(place.as_ref())
.map(ValueOrPlace::Place)
2023-04-24 17:10:39 +00:00
.unwrap_or(ValueOrPlace::TOP)
}
}
}
fn handle_constant(
&self,
constant: &ConstOperand<'tcx>,
state: &mut State<Self::Value>,
) -> Self::Value {
self.super_constant(constant, state)
}
fn super_constant(
&self,
_constant: &ConstOperand<'tcx>,
_state: &mut State<Self::Value>,
) -> Self::Value {
2023-04-24 17:10:39 +00:00
Self::Value::TOP
}
2022-09-02 22:26:15 +00:00
/// The effect of a successful function call return should not be
/// applied here, see [`Analysis::apply_terminator_effect`].
fn handle_terminator<'mir>(
&self,
terminator: &'mir Terminator<'tcx>,
state: &mut State<Self::Value>,
2023-05-27 16:27:04 +00:00
) -> TerminatorEdges<'mir, 'tcx> {
self.super_terminator(terminator, state)
}
fn super_terminator<'mir>(
&self,
terminator: &'mir Terminator<'tcx>,
state: &mut State<Self::Value>,
2023-05-27 16:27:04 +00:00
) -> TerminatorEdges<'mir, 'tcx> {
2022-09-01 22:37:38 +00:00
match &terminator.kind {
TerminatorKind::Call { .. } | TerminatorKind::InlineAsm { .. } => {
// Effect is applied by `handle_call_return`.
}
TerminatorKind::Drop { place, .. } => {
2023-04-24 17:10:39 +00:00
state.flood_with(place.as_ref(), self.map(), Self::Value::BOTTOM);
2022-09-04 10:02:38 +00:00
}
TerminatorKind::Yield { .. } => {
2022-09-01 22:37:38 +00:00
// They would have an effect, but are not allowed in this phase.
bug!("encountered disallowed terminator");
}
TerminatorKind::SwitchInt { discr, targets } => {
return self.handle_switch_int(discr, targets, state);
}
TerminatorKind::TailCall { .. } => {
// FIXME(explicit_tail_calls): determine if we need to do something here (probably not)
}
2022-10-23 13:05:03 +00:00
TerminatorKind::Goto { .. }
| TerminatorKind::UnwindResume
| TerminatorKind::UnwindTerminate(_)
2022-10-23 13:05:03 +00:00
| TerminatorKind::Return
| TerminatorKind::Unreachable
| TerminatorKind::Assert { .. }
2023-10-19 16:06:43 +00:00
| TerminatorKind::CoroutineDrop
2022-10-23 13:05:03 +00:00
| TerminatorKind::FalseEdge { .. }
| TerminatorKind::FalseUnwind { .. } => {
// These terminators have no effect on the analysis.
2022-09-01 22:37:38 +00:00
}
}
terminator.edges()
2022-09-01 22:37:38 +00:00
}
fn handle_call_return(
&self,
return_places: CallReturnPlaces<'_, 'tcx>,
state: &mut State<Self::Value>,
) {
self.super_call_return(return_places, state)
}
fn super_call_return(
&self,
return_places: CallReturnPlaces<'_, 'tcx>,
state: &mut State<Self::Value>,
) {
return_places.for_each(|place| {
state.flood(place.as_ref(), self.map());
})
}
fn handle_switch_int<'mir>(
&self,
discr: &'mir Operand<'tcx>,
targets: &'mir SwitchTargets,
state: &mut State<Self::Value>,
2023-05-27 16:27:04 +00:00
) -> TerminatorEdges<'mir, 'tcx> {
self.super_switch_int(discr, targets, state)
}
fn super_switch_int<'mir>(
&self,
discr: &'mir Operand<'tcx>,
targets: &'mir SwitchTargets,
_state: &mut State<Self::Value>,
2023-05-27 16:27:04 +00:00
) -> TerminatorEdges<'mir, 'tcx> {
TerminatorEdges::SwitchInt { discr, targets }
}
fn wrap(self) -> ValueAnalysisWrapper<Self>
where
Self: Sized,
{
ValueAnalysisWrapper(self)
}
}
pub struct ValueAnalysisWrapper<T>(pub T);
impl<'tcx, T: ValueAnalysis<'tcx>> AnalysisDomain<'tcx> for ValueAnalysisWrapper<T> {
type Domain = State<T::Value>;
const NAME: &'static str = T::NAME;
fn bottom_value(&self, _body: &Body<'tcx>) -> Self::Domain {
2024-06-26 19:13:38 +00:00
State::Unreachable
}
fn initialize_start_block(&self, body: &Body<'tcx>, state: &mut Self::Domain) {
2022-09-02 22:26:15 +00:00
// The initial state maps all tracked places of argument projections to and the rest to ⊥.
2024-06-26 19:13:38 +00:00
assert!(matches!(state, State::Unreachable));
2024-06-26 21:57:59 +00:00
*state = State::new_reachable();
for arg in body.args_iter() {
state.flood(PlaceRef { local: arg, projection: &[] }, self.0.map());
}
}
}
impl<'tcx, T> Analysis<'tcx> for ValueAnalysisWrapper<T>
where
T: ValueAnalysis<'tcx>,
{
fn apply_statement_effect(
&mut self,
state: &mut Self::Domain,
statement: &Statement<'tcx>,
_location: Location,
) {
2022-09-02 12:41:27 +00:00
if state.is_reachable() {
self.0.handle_statement(statement, state);
}
}
fn apply_terminator_effect<'mir>(
&mut self,
state: &mut Self::Domain,
terminator: &'mir Terminator<'tcx>,
_location: Location,
2023-05-27 16:27:04 +00:00
) -> TerminatorEdges<'mir, 'tcx> {
2022-09-02 12:41:27 +00:00
if state.is_reachable() {
self.0.handle_terminator(terminator, state)
} else {
2023-05-27 16:27:04 +00:00
TerminatorEdges::None
2022-09-02 12:41:27 +00:00
}
}
fn apply_call_return_effect(
&mut self,
state: &mut Self::Domain,
_block: BasicBlock,
return_places: CallReturnPlaces<'_, 'tcx>,
) {
2022-09-02 12:41:27 +00:00
if state.is_reachable() {
self.0.handle_call_return(return_places, state)
}
}
fn apply_switch_int_edge_effects(
&mut self,
_block: BasicBlock,
_discr: &Operand<'tcx>,
_apply_edge_effects: &mut impl SwitchIntEdgeEffects<Self::Domain>,
) {
}
}
rustc_index::newtype_index!(
2022-09-02 22:26:15 +00:00
/// This index uniquely identifies a place.
///
/// Not every place has a `PlaceIndex`, and not every `PlaceIndex` corresponds to a tracked
2022-09-02 22:26:15 +00:00
/// place. However, every tracked place and all places along its projection have a `PlaceIndex`.
pub struct PlaceIndex {}
);
rustc_index::newtype_index!(
2022-09-02 22:26:15 +00:00
/// This index uniquely identifies a tracked place and therefore a slot in [`State`].
///
/// It is an implementation detail of this module.
struct ValueIndex {}
);
2022-09-02 22:26:15 +00:00
/// See [`State`].
#[derive(PartialEq, Eq, Debug)]
2024-06-26 21:57:59 +00:00
pub struct StateData<V> {
bottom: V,
/// This map only contains values that are not `⊥`.
map: FxHashMap<ValueIndex, V>,
2024-06-26 19:13:38 +00:00
}
2024-06-26 21:57:59 +00:00
impl<V: HasBottom> StateData<V> {
fn new() -> StateData<V> {
StateData { bottom: V::BOTTOM, map: FxHashMap::default() }
}
fn get(&self, idx: ValueIndex) -> &V {
self.map.get(&idx).unwrap_or(&self.bottom)
}
fn insert(&mut self, idx: ValueIndex, elem: V) {
if elem.is_bottom() {
self.map.remove(&idx);
} else {
self.map.insert(idx, elem);
}
2024-06-26 19:13:38 +00:00
}
2022-09-02 12:41:27 +00:00
}
impl<V: Clone> Clone for StateData<V> {
fn clone(&self) -> Self {
2024-06-26 21:57:59 +00:00
StateData { bottom: self.bottom.clone(), map: self.map.clone() }
}
fn clone_from(&mut self, source: &Self) {
2024-06-26 21:57:59 +00:00
self.map.clone_from(&source.map)
2024-06-26 19:13:38 +00:00
}
}
2024-06-26 21:57:59 +00:00
impl<V: JoinSemiLattice + Clone + HasBottom> JoinSemiLattice for StateData<V> {
2024-06-26 19:13:38 +00:00
fn join(&mut self, other: &Self) -> bool {
2024-06-26 21:57:59 +00:00
let mut changed = false;
#[allow(rustc::potential_query_instability)]
for (i, v) in other.map.iter() {
match self.map.entry(*i) {
StdEntry::Vacant(e) => {
e.insert(v.clone());
changed = true
}
StdEntry::Occupied(e) => changed |= e.into_mut().join(v),
}
}
changed
}
}
2022-09-02 22:26:15 +00:00
/// The dataflow state for an instance of [`ValueAnalysis`].
///
/// Every instance specifies a lattice that represents the possible values of a single tracked
2022-12-05 08:42:36 +00:00
/// place. If we call this lattice `V` and set of tracked places `P`, then a [`State`] is an
2022-09-02 22:26:15 +00:00
/// element of `{unreachable} (P -> V)`. This again forms a lattice, where the bottom element is
/// `unreachable` and the top element is the mapping `p ↦ `. Note that the mapping `p ↦ ⊥` is not
/// the bottom element (because joining an unreachable and any other reachable state yields a
/// reachable state). All operations on unreachable states are ignored.
///
/// Flooding means assigning a value (by default ``) to all tracked projections of a given place.
#[derive(PartialEq, Eq, Debug)]
2024-06-26 19:13:38 +00:00
pub enum State<V> {
Unreachable,
Reachable(StateData<V>),
}
impl<V: Clone> Clone for State<V> {
fn clone(&self) -> Self {
2024-06-26 19:13:38 +00:00
match self {
Self::Reachable(x) => Self::Reachable(x.clone()),
Self::Unreachable => Self::Unreachable,
}
}
fn clone_from(&mut self, source: &Self) {
2024-06-26 19:13:38 +00:00
match (&mut *self, source) {
(Self::Reachable(x), Self::Reachable(y)) => {
x.clone_from(&y);
}
_ => *self = source.clone(),
}
}
}
2024-06-26 21:57:59 +00:00
impl<V: Clone + HasBottom> State<V> {
pub fn new_reachable() -> State<V> {
State::Reachable(StateData::new())
2023-01-16 22:12:36 +00:00
}
2024-06-26 21:57:59 +00:00
pub fn all_bottom(&self) -> bool {
2024-06-26 19:13:38 +00:00
match self {
2024-06-26 21:57:59 +00:00
State::Unreachable => false,
State::Reachable(ref values) =>
{
#[allow(rustc::potential_query_instability)]
values.map.values().all(V::is_bottom)
}
2023-01-16 22:12:36 +00:00
}
}
fn is_reachable(&self) -> bool {
2024-06-26 19:13:38 +00:00
matches!(self, State::Reachable(_))
2022-09-02 12:41:27 +00:00
}
2023-10-14 13:26:14 +00:00
/// Assign `value` to all places that are contained in `place` or may alias one.
pub fn flood_with(&mut self, place: PlaceRef<'_>, map: &Map, value: V) {
2023-10-14 13:26:14 +00:00
self.flood_with_tail_elem(place, None, map, value)
}
2023-10-14 13:26:14 +00:00
/// Assign `TOP` to all places that are contained in `place` or may alias one.
2023-01-16 22:12:36 +00:00
pub fn flood(&mut self, place: PlaceRef<'_>, map: &Map)
where
V: HasTop,
{
2023-04-24 17:10:39 +00:00
self.flood_with(place, map, V::TOP)
}
2023-10-14 13:26:14 +00:00
/// Assign `value` to the discriminant of `place` and all places that may alias it.
fn flood_discr_with(&mut self, place: PlaceRef<'_>, map: &Map, value: V) {
2023-10-14 13:26:14 +00:00
self.flood_with_tail_elem(place, Some(TrackElem::Discriminant), map, value)
}
2023-10-14 13:26:14 +00:00
/// Assign `TOP` to the discriminant of `place` and all places that may alias it.
2023-01-16 22:12:36 +00:00
pub fn flood_discr(&mut self, place: PlaceRef<'_>, map: &Map)
where
V: HasTop,
{
2023-04-24 17:10:39 +00:00
self.flood_discr_with(place, map, V::TOP)
}
2023-10-14 13:26:14 +00:00
/// This method is the most general version of the `flood_*` method.
///
/// Assign `value` on the given place and all places that may alias it. In particular, when
/// the given place has a variant downcast, we invoke the function on all the other variants.
///
/// `tail_elem` allows to support discriminants that are not a place in MIR, but that we track
/// as such.
pub fn flood_with_tail_elem(
2023-01-16 22:12:36 +00:00
&mut self,
place: PlaceRef<'_>,
tail_elem: Option<TrackElem>,
map: &Map,
value: V,
) {
2024-06-26 19:13:38 +00:00
let State::Reachable(values) = self else { return };
2024-06-26 21:57:59 +00:00
map.for_each_aliasing_place(place, tail_elem, &mut |vi| values.insert(vi, value.clone()));
2023-01-16 22:12:36 +00:00
}
2023-02-03 16:39:10 +00:00
/// Low-level method that assigns to a place.
/// This does nothing if the place is not tracked.
///
/// The target place must have been flooded before calling this method.
fn insert_idx(&mut self, target: PlaceIndex, result: ValueOrPlace<V>, map: &Map) {
2023-02-03 16:39:10 +00:00
match result {
ValueOrPlace::Value(value) => self.insert_value_idx(target, value, map),
ValueOrPlace::Place(source) => self.insert_place_idx(target, source, map),
}
}
/// Low-level method that assigns a value to a place.
/// This does nothing if the place is not tracked.
///
/// The target place must have been flooded before calling this method.
pub fn insert_value_idx(&mut self, target: PlaceIndex, value: V, map: &Map) {
2024-06-26 19:13:38 +00:00
let State::Reachable(values) = self else { return };
2023-02-03 16:39:10 +00:00
if let Some(value_index) = map.places[target].value_index {
2024-06-26 21:57:59 +00:00
values.insert(value_index, value)
2023-02-03 16:39:10 +00:00
}
}
/// Copies `source` to `target`, including all tracked places beneath.
///
/// If `target` contains a place that is not contained in `source`, it will be overwritten with
2022-10-21 15:02:03 +00:00
/// Top. Also, because this will copy all entries one after another, it may only be used for
/// places that are non-overlapping or identical.
///
/// The target place must have been flooded before calling this method.
2023-05-13 12:30:40 +00:00
pub fn insert_place_idx(&mut self, target: PlaceIndex, source: PlaceIndex, map: &Map) {
2024-06-26 19:13:38 +00:00
let State::Reachable(values) = self else { return };
2022-10-05 19:30:43 +00:00
2023-02-03 16:39:10 +00:00
// If both places are tracked, we copy the value to the target.
// If the target is tracked, but the source is not, we do nothing, as invalidation has
// already been performed.
if let Some(target_value) = map.places[target].value_index {
if let Some(source_value) = map.places[source].value_index {
2024-06-26 21:57:59 +00:00
values.insert(target_value, values.get(source_value).clone());
}
}
for target_child in map.children(target) {
2022-09-02 22:26:15 +00:00
// Try to find corresponding child and recurse. Reasoning is similar as above.
let projection = map.places[target_child].proj_elem.unwrap();
if let Some(source_child) = map.projections.get(&(source, projection)) {
2023-02-03 16:39:10 +00:00
self.insert_place_idx(target_child, *source_child, map);
}
}
}
2023-02-03 16:39:10 +00:00
/// Helper method to interpret `target = result`.
2023-01-16 22:12:36 +00:00
pub fn assign(&mut self, target: PlaceRef<'_>, result: ValueOrPlace<V>, map: &Map)
where
V: HasTop,
{
self.flood(target, map);
if let Some(target) = map.find(target) {
2023-02-03 16:39:10 +00:00
self.insert_idx(target, result, map);
}
}
2023-02-03 16:39:10 +00:00
/// Helper method for assignments to a discriminant.
2023-01-16 22:12:36 +00:00
pub fn assign_discr(&mut self, target: PlaceRef<'_>, result: ValueOrPlace<V>, map: &Map)
where
V: HasTop,
{
self.flood_discr(target, map);
if let Some(target) = map.find_discr(target) {
2023-02-03 16:39:10 +00:00
self.insert_idx(target, result, map);
}
}
2023-10-14 13:26:14 +00:00
/// Retrieve the value stored for a place, or `None` if it is not tracked.
2023-01-16 22:12:36 +00:00
pub fn try_get(&self, place: PlaceRef<'_>, map: &Map) -> Option<V> {
let place = map.find(place)?;
self.try_get_idx(place, map)
}
2023-10-14 13:26:14 +00:00
/// Retrieve the discriminant stored for a place, or `None` if it is not tracked.
2023-01-16 22:12:36 +00:00
pub fn try_get_discr(&self, place: PlaceRef<'_>, map: &Map) -> Option<V> {
let place = map.find_discr(place)?;
self.try_get_idx(place, map)
}
2023-10-14 13:26:14 +00:00
/// Retrieve the slice length stored for a place, or `None` if it is not tracked.
pub fn try_get_len(&self, place: PlaceRef<'_>, map: &Map) -> Option<V> {
let place = map.find_len(place)?;
self.try_get_idx(place, map)
}
/// Retrieve the value stored for a place index, or `None` if it is not tracked.
2023-01-16 22:12:36 +00:00
pub fn try_get_idx(&self, place: PlaceIndex, map: &Map) -> Option<V> {
2024-06-26 19:13:38 +00:00
match self {
State::Reachable(values) => {
2024-06-26 21:57:59 +00:00
map.places[place].value_index.map(|v| values.get(v).clone())
2023-01-16 22:12:36 +00:00
}
2024-06-26 19:13:38 +00:00
State::Unreachable => None,
2023-01-16 22:12:36 +00:00
}
}
/// Retrieve the value stored for a place, or if it is not tracked.
2023-10-14 13:26:14 +00:00
///
/// This method returns ⊥ if the place is tracked and the state is unreachable.
2023-01-16 22:12:36 +00:00
pub fn get(&self, place: PlaceRef<'_>, map: &Map) -> V
where
V: HasBottom + HasTop,
{
2024-06-26 19:13:38 +00:00
match self {
State::Reachable(_) => self.try_get(place, map).unwrap_or(V::TOP),
2023-10-14 13:26:14 +00:00
// Because this is unreachable, we can return any value we want.
2024-06-26 19:13:38 +00:00
State::Unreachable => V::BOTTOM,
2023-10-14 13:26:14 +00:00
}
}
/// Retrieve the value stored for a place, or if it is not tracked.
2023-10-14 13:26:14 +00:00
///
/// This method returns ⊥ the current state is unreachable.
2023-01-16 22:12:36 +00:00
pub fn get_discr(&self, place: PlaceRef<'_>, map: &Map) -> V
where
V: HasBottom + HasTop,
{
2024-06-26 19:13:38 +00:00
match self {
State::Reachable(_) => self.try_get_discr(place, map).unwrap_or(V::TOP),
2023-10-14 13:26:14 +00:00
// Because this is unreachable, we can return any value we want.
2024-06-26 19:13:38 +00:00
State::Unreachable => V::BOTTOM,
}
}
2023-04-23 16:15:22 +00:00
/// Retrieve the value stored for a place, or if it is not tracked.
2023-10-14 13:26:14 +00:00
///
/// This method returns ⊥ the current state is unreachable.
2023-01-16 22:12:36 +00:00
pub fn get_len(&self, place: PlaceRef<'_>, map: &Map) -> V
where
V: HasBottom + HasTop,
{
2024-06-26 19:13:38 +00:00
match self {
State::Reachable(_) => self.try_get_len(place, map).unwrap_or(V::TOP),
2023-10-14 13:26:14 +00:00
// Because this is unreachable, we can return any value we want.
2024-06-26 19:13:38 +00:00
State::Unreachable => V::BOTTOM,
2023-04-23 16:15:22 +00:00
}
}
2022-11-07 09:33:28 +00:00
/// Retrieve the value stored for a place index, or if it is not tracked.
2023-10-14 13:26:14 +00:00
///
/// This method returns ⊥ the current state is unreachable.
2023-01-16 22:12:36 +00:00
pub fn get_idx(&self, place: PlaceIndex, map: &Map) -> V
where
V: HasBottom + HasTop,
{
2024-06-26 19:13:38 +00:00
match self {
State::Reachable(values) => {
2024-06-26 21:57:59 +00:00
map.places[place].value_index.map(|v| values.get(v).clone()).unwrap_or(V::TOP)
2022-09-02 12:41:27 +00:00
}
2024-06-26 19:13:38 +00:00
State::Unreachable => {
// Because this is unreachable, we can return any value we want.
2023-04-24 17:10:39 +00:00
V::BOTTOM
}
2022-09-02 12:41:27 +00:00
}
}
}
2024-06-26 21:57:59 +00:00
impl<V: JoinSemiLattice + Clone + HasBottom> JoinSemiLattice for State<V> {
fn join(&mut self, other: &Self) -> bool {
2024-06-26 19:13:38 +00:00
match (&mut *self, other) {
(_, State::Unreachable) => false,
(State::Unreachable, _) => {
2022-09-02 12:41:27 +00:00
*self = other.clone();
true
}
2024-06-26 19:13:38 +00:00
(State::Reachable(this), State::Reachable(ref other)) => this.join(other),
2022-09-02 12:41:27 +00:00
}
}
}
2022-11-10 18:12:10 +00:00
/// Partial mapping from [`Place`] to [`PlaceIndex`], where some places also have a [`ValueIndex`].
///
2022-11-10 18:12:10 +00:00
/// This data structure essentially maintains a tree of places and their projections. Some
/// additional bookkeeping is done, to speed up traversal over this tree:
/// - For iteration, every [`PlaceInfo`] contains an intrusive linked list of its children.
2022-10-21 15:02:03 +00:00
/// - To directly get the child for a specific projection, there is a `projections` map.
#[derive(Debug)]
pub struct Map {
locals: IndexVec<Local, Option<PlaceIndex>>,
projections: FxHashMap<(PlaceIndex, TrackElem), PlaceIndex>,
places: IndexVec<PlaceIndex, PlaceInfo>,
value_count: usize,
2023-04-23 18:02:05 +00:00
// The Range corresponds to a slice into `inner_values_buffer`.
inner_values: IndexVec<PlaceIndex, Range<usize>>,
inner_values_buffer: Vec<ValueIndex>,
}
impl Map {
/// Returns a map that only tracks places whose type has scalar layout.
///
/// This is currently the only way to create a [`Map`]. The way in which the tracked places are
/// chosen is an implementation detail and may not be relied upon (other than that their type
/// are scalars).
pub fn new<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'tcx>, value_limit: Option<usize>) -> Self {
let mut map = Self {
locals: IndexVec::new(),
projections: FxHashMap::default(),
places: IndexVec::new(),
value_count: 0,
2023-04-23 18:02:05 +00:00
inner_values: IndexVec::new(),
inner_values_buffer: Vec::new(),
};
let exclude = excluded_locals(body);
map.register(tcx, body, exclude, value_limit);
debug!("registered {} places ({} nodes in total)", map.value_count, map.places.len());
map
}
/// Register all non-excluded places that have scalar layout.
fn register<'tcx>(
&mut self,
tcx: TyCtxt<'tcx>,
body: &Body<'tcx>,
exclude: BitSet<Local>,
2023-05-08 08:46:21 +00:00
value_limit: Option<usize>,
) {
2023-05-08 08:46:21 +00:00
let mut worklist = VecDeque::with_capacity(value_limit.unwrap_or(body.local_decls.len()));
let param_env = tcx.param_env_reveal_all_normalized(body.source.def_id());
2023-05-08 08:46:21 +00:00
// Start by constructing the places for each bare local.
2023-04-22 13:06:28 +00:00
self.locals = IndexVec::from_elem(None, &body.local_decls);
for (local, decl) in body.local_decls.iter_enumerated() {
2023-04-22 13:06:28 +00:00
if exclude.contains(local) {
continue;
}
2023-04-22 13:06:28 +00:00
// Create a place for the local.
debug_assert!(self.locals[local].is_none());
let place = self.places.push(PlaceInfo::new(None));
self.locals[local] = Some(place);
// And push the eventual children places to the worklist.
self.register_children(tcx, param_env, place, decl.ty, &mut worklist);
2023-04-22 13:06:28 +00:00
}
// `place.elem1.elem2` with type `ty`.
2023-05-08 08:46:21 +00:00
// `elem1` is either `Some(Variant(i))` or `None`.
2023-04-22 13:06:28 +00:00
while let Some((mut place, elem1, elem2, ty)) = worklist.pop_front() {
2023-05-08 08:46:21 +00:00
// The user requires a bound on the number of created values.
if let Some(value_limit) = value_limit
&& self.value_count >= value_limit
{
2023-04-22 13:06:28 +00:00
break;
}
// Create a place for this projection.
for elem in [elem1, Some(elem2)].into_iter().flatten() {
place = *self.projections.entry((place, elem)).or_insert_with(|| {
// Prepend new child to the linked list.
let next = self.places.push(PlaceInfo::new(Some(elem)));
self.places[next].next_sibling = self.places[place].first_child;
self.places[place].first_child = Some(next);
next
});
}
// And push the eventual children places to the worklist.
self.register_children(tcx, param_env, place, ty, &mut worklist);
}
2023-04-23 18:02:05 +00:00
2023-05-08 08:46:21 +00:00
// Pre-compute the tree of ValueIndex nested in each PlaceIndex.
// `inner_values_buffer[inner_values[place]]` is the set of all the values
// reachable by projecting `place`.
2023-04-23 18:02:05 +00:00
self.inner_values_buffer = Vec::with_capacity(self.value_count);
self.inner_values = IndexVec::from_elem(0..0, &self.places);
for local in body.local_decls.indices() {
if let Some(place) = self.locals[local] {
self.cache_preorder_invoke(place);
}
}
2023-04-24 16:54:11 +00:00
// Trim useless places.
for opt_place in self.locals.iter_mut() {
if let Some(place) = *opt_place
&& self.inner_values[place].is_empty()
{
*opt_place = None;
}
}
#[allow(rustc::potential_query_instability)]
self.projections.retain(|_, child| !self.inner_values[*child].is_empty());
}
/// Potentially register the (local, projection) place and its fields, recursively.
2022-11-10 18:12:10 +00:00
///
/// Invariant: The projection must only contain trackable elements.
2023-04-22 13:06:28 +00:00
fn register_children<'tcx>(
&mut self,
tcx: TyCtxt<'tcx>,
param_env: ty::ParamEnv<'tcx>,
2023-04-22 13:06:28 +00:00
place: PlaceIndex,
ty: Ty<'tcx>,
2023-04-22 13:06:28 +00:00
worklist: &mut VecDeque<(PlaceIndex, Option<TrackElem>, TrackElem, Ty<'tcx>)>,
) {
// Allocate a value slot if it doesn't have one, and the user requested one.
assert!(self.places[place].value_index.is_none());
2023-11-25 17:12:15 +00:00
if tcx.layout_of(param_env.and(ty)).is_ok_and(|layout| layout.abi.is_scalar()) {
self.places[place].value_index = Some(self.value_count.into());
self.value_count += 1;
}
2023-04-22 13:06:28 +00:00
// For enums, directly create the `Discriminant`, as that's their main use.
if ty.is_enum() {
// Prepend new child to the linked list.
let discr = self.places.push(PlaceInfo::new(Some(TrackElem::Discriminant)));
self.places[discr].next_sibling = self.places[place].first_child;
self.places[place].first_child = Some(discr);
let old = self.projections.insert((place, TrackElem::Discriminant), discr);
assert!(old.is_none());
2023-09-07 15:45:25 +00:00
// Allocate a value slot since it doesn't have one.
assert!(self.places[discr].value_index.is_none());
self.places[discr].value_index = Some(self.value_count.into());
self.value_count += 1;
}
2022-11-10 18:12:10 +00:00
if let ty::Ref(_, ref_ty, _) | ty::RawPtr(ref_ty, _) = ty.kind()
&& let ty::Slice(..) = ref_ty.kind()
2024-06-26 14:21:13 +00:00
// The user may have written a predicate like `[T]: Sized` in their where clauses,
// which makes slices scalars.
&& self.places[place].value_index.is_none()
{
2023-04-23 16:15:22 +00:00
// Prepend new child to the linked list.
let len = self.places.push(PlaceInfo::new(Some(TrackElem::DerefLen)));
self.places[len].next_sibling = self.places[place].first_child;
self.places[place].first_child = Some(len);
let old = self.projections.insert((place, TrackElem::DerefLen), len);
assert!(old.is_none());
2023-09-07 15:45:25 +00:00
// Allocate a value slot since it doesn't have one.
assert!(self.places[len].value_index.is_none());
self.places[len].value_index = Some(self.value_count.into());
self.value_count += 1;
2023-04-23 16:15:22 +00:00
}
2022-11-10 18:12:10 +00:00
// Recurse with all fields of this place.
iter_fields(ty, tcx, param_env, |variant, field, ty| {
2023-04-22 13:06:28 +00:00
worklist.push_back((
place,
variant.map(TrackElem::Variant),
TrackElem::Field(field),
ty,
))
});
}
2023-04-23 18:02:05 +00:00
/// Precompute the list of values inside `root` and store it inside
/// as a slice within `inner_values_buffer`.
fn cache_preorder_invoke(&mut self, root: PlaceIndex) {
let start = self.inner_values_buffer.len();
if let Some(vi) = self.places[root].value_index {
self.inner_values_buffer.push(vi);
}
// We manually iterate instead of using `children` as we need to mutate `self`.
let mut next_child = self.places[root].first_child;
while let Some(child) = next_child {
2023-05-08 08:48:16 +00:00
ensure_sufficient_stack(|| self.cache_preorder_invoke(child));
2023-04-23 18:02:05 +00:00
next_child = self.places[child].next_sibling;
}
let end = self.inner_values_buffer.len();
self.inner_values[root] = start..end;
}
2022-11-10 18:12:10 +00:00
/// Applies a single projection element, yielding the corresponding child.
pub fn apply(&self, place: PlaceIndex, elem: TrackElem) -> Option<PlaceIndex> {
self.projections.get(&(place, elem)).copied()
}
2022-11-10 18:12:10 +00:00
/// Locates the given place, if it exists in the tree.
2023-11-22 03:01:52 +00:00
fn find_extra(
2023-01-29 14:20:45 +00:00
&self,
place: PlaceRef<'_>,
extra: impl IntoIterator<Item = TrackElem>,
) -> Option<PlaceIndex> {
2023-04-22 13:06:28 +00:00
let mut index = *self.locals[place.local].as_ref()?;
for &elem in place.projection {
index = self.apply(index, elem.try_into().ok()?)?;
}
2023-01-29 14:20:45 +00:00
for elem in extra {
index = self.apply(index, elem)?;
}
Some(index)
}
/// Locates the given place, if it exists in the tree.
2023-01-29 14:20:45 +00:00
pub fn find(&self, place: PlaceRef<'_>) -> Option<PlaceIndex> {
self.find_extra(place, [])
}
/// Locates the given place and applies `Discriminant`, if it exists in the tree.
pub fn find_discr(&self, place: PlaceRef<'_>) -> Option<PlaceIndex> {
2023-01-29 14:20:45 +00:00
self.find_extra(place, [TrackElem::Discriminant])
}
2023-04-23 16:15:22 +00:00
/// Locates the given place and applies `DerefLen`, if it exists in the tree.
pub fn find_len(&self, place: PlaceRef<'_>) -> Option<PlaceIndex> {
self.find_extra(place, [TrackElem::DerefLen])
}
/// Iterate over all direct children.
fn children(&self, parent: PlaceIndex) -> impl Iterator<Item = PlaceIndex> + '_ {
Children::new(self, parent)
}
/// Invoke a function on the given place and all places that may alias it.
///
/// In particular, when the given place has a variant downcast, we invoke the function on all
/// the other variants.
///
/// `tail_elem` allows to support discriminants that are not a place in MIR, but that we track
/// as such.
2023-04-23 18:02:05 +00:00
fn for_each_aliasing_place(
&self,
place: PlaceRef<'_>,
tail_elem: Option<TrackElem>,
2023-04-23 18:02:05 +00:00
f: &mut impl FnMut(ValueIndex),
) {
if place.is_indirect_first_projection() {
2023-01-30 17:37:56 +00:00
// We do not track indirect places.
return;
}
2023-04-22 13:06:28 +00:00
let Some(mut index) = self.locals[place.local] else {
2023-01-29 14:20:45 +00:00
// The local is not tracked at all, so it does not alias anything.
return;
};
let elems = place.projection.iter().map(|&elem| elem.try_into()).chain(tail_elem.map(Ok));
for elem in elems {
2023-01-30 17:37:56 +00:00
// A field aliases the parent place.
2023-04-23 18:02:05 +00:00
if let Some(vi) = self.places[index].value_index {
f(vi);
}
2023-01-30 17:37:56 +00:00
let Ok(elem) = elem else { return };
let sub = self.apply(index, elem);
if let TrackElem::Variant(..) | TrackElem::Discriminant = elem {
2023-01-29 14:20:45 +00:00
// Enum variant fields and enum discriminants alias each another.
self.for_each_variant_sibling(index, sub, f);
}
if let Some(sub) = sub {
index = sub
} else {
return;
}
}
2023-04-23 18:02:05 +00:00
self.for_each_value_inside(index, f);
}
/// Invoke the given function on all the descendants of the given place, except one branch.
2023-01-29 14:20:45 +00:00
fn for_each_variant_sibling(
&self,
parent: PlaceIndex,
preserved_child: Option<PlaceIndex>,
2023-04-23 18:02:05 +00:00
f: &mut impl FnMut(ValueIndex),
) {
for sibling in self.children(parent) {
let elem = self.places[sibling].proj_elem;
2023-10-19 21:46:28 +00:00
// Only invalidate variants and discriminant. Fields (for coroutines) are not
// invalidated by assignment to a variant.
if let Some(TrackElem::Variant(..) | TrackElem::Discriminant) = elem
// Only invalidate the other variants, the current one is fine.
&& Some(sibling) != preserved_child
{
2023-04-23 18:02:05 +00:00
self.for_each_value_inside(sibling, f);
}
}
}
2023-04-23 18:02:05 +00:00
/// Invoke a function on each value in the given place and all descendants.
fn for_each_value_inside(&self, root: PlaceIndex, f: &mut impl FnMut(ValueIndex)) {
let range = self.inner_values[root].clone();
let values = &self.inner_values_buffer[range];
for &v in values {
f(v)
}
}
2023-05-13 12:30:40 +00:00
/// Invoke a function on each value in the given place and all descendants.
pub fn for_each_projection_value<O>(
&self,
root: PlaceIndex,
value: O,
project: &mut impl FnMut(TrackElem, &O) -> Option<O>,
f: &mut impl FnMut(PlaceIndex, &O),
) {
// Fast path is there is nothing to do.
if self.inner_values[root].is_empty() {
return;
}
if self.places[root].value_index.is_some() {
f(root, &value)
}
for child in self.children(root) {
let elem = self.places[child].proj_elem.unwrap();
if let Some(value) = project(elem, &value) {
self.for_each_projection_value(child, value, project, f);
}
}
}
}
/// This is the information tracked for every [`PlaceIndex`] and is stored by [`Map`].
///
/// Together, `first_child` and `next_sibling` form an intrusive linked list, which is used to
/// model a tree structure (a replacement for a member like `children: Vec<PlaceIndex>`).
#[derive(Debug)]
struct PlaceInfo {
/// We store a [`ValueIndex`] if and only if the placed is tracked by the analysis.
value_index: Option<ValueIndex>,
/// The projection used to go from parent to this node (only None for root).
proj_elem: Option<TrackElem>,
/// The left-most child.
first_child: Option<PlaceIndex>,
/// Index of the sibling to the right of this node.
next_sibling: Option<PlaceIndex>,
}
impl PlaceInfo {
fn new(proj_elem: Option<TrackElem>) -> Self {
Self { next_sibling: None, first_child: None, proj_elem, value_index: None }
}
}
struct Children<'a> {
map: &'a Map,
next: Option<PlaceIndex>,
}
impl<'a> Children<'a> {
fn new(map: &'a Map, parent: PlaceIndex) -> Self {
Self { map, next: map.places[parent].first_child }
}
}
impl<'a> Iterator for Children<'a> {
type Item = PlaceIndex;
fn next(&mut self) -> Option<Self::Item> {
match self.next {
Some(child) => {
self.next = self.map.places[child].next_sibling;
Some(child)
}
None => None,
}
}
}
/// Used as the result of an operand or r-value.
#[derive(Debug)]
pub enum ValueOrPlace<V> {
Value(V),
Place(PlaceIndex),
}
impl<V: HasTop> ValueOrPlace<V> {
2023-04-24 17:10:39 +00:00
pub const TOP: Self = ValueOrPlace::Value(V::TOP);
}
/// The set of projection elements that can be used by a tracked place.
///
/// Although only field projections are currently allowed, this could change in the future.
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub enum TrackElem {
Field(FieldIdx),
Variant(VariantIdx),
Discriminant,
2023-04-23 16:15:22 +00:00
// Length of a slice.
DerefLen,
}
impl<V, T> TryFrom<ProjectionElem<V, T>> for TrackElem {
type Error = ();
fn try_from(value: ProjectionElem<V, T>) -> Result<Self, Self::Error> {
match value {
ProjectionElem::Field(field, _) => Ok(TrackElem::Field(field)),
ProjectionElem::Downcast(_, idx) => Ok(TrackElem::Variant(idx)),
_ => Err(()),
}
}
}
/// Invokes `f` on all direct fields of `ty`.
pub fn iter_fields<'tcx>(
ty: Ty<'tcx>,
tcx: TyCtxt<'tcx>,
2023-03-05 18:13:43 +00:00
param_env: ty::ParamEnv<'tcx>,
mut f: impl FnMut(Option<VariantIdx>, FieldIdx, Ty<'tcx>),
) {
match ty.kind() {
ty::Tuple(list) => {
for (field, ty) in list.iter().enumerate() {
f(None, field.into(), ty);
}
}
ty::Adt(def, args) => {
if def.is_union() {
return;
}
for (v_index, v_def) in def.variants().iter_enumerated() {
let variant = if def.is_struct() { None } else { Some(v_index) };
for (f_index, f_def) in v_def.fields.iter().enumerate() {
let field_ty = f_def.ty(tcx, args);
let field_ty = tcx
2023-03-05 18:13:43 +00:00
.try_normalize_erasing_regions(param_env, field_ty)
.unwrap_or_else(|_| tcx.erase_regions(field_ty));
f(variant, f_index.into(), field_ty);
}
}
}
ty::Closure(_, args) => {
iter_fields(args.as_closure().tupled_upvars_ty(), tcx, param_env, f);
}
2024-01-25 19:17:21 +00:00
ty::Coroutine(_, args) => {
iter_fields(args.as_coroutine().tupled_upvars_ty(), tcx, param_env, f);
}
ty::CoroutineClosure(_, args) => {
iter_fields(args.as_coroutine_closure().tupled_upvars_ty(), tcx, param_env, f);
}
_ => (),
}
}
/// Returns all locals with projections that have their reference or address taken.
pub fn excluded_locals(body: &Body<'_>) -> BitSet<Local> {
struct Collector {
result: BitSet<Local>,
}
impl<'tcx> Visitor<'tcx> for Collector {
fn visit_place(&mut self, place: &Place<'tcx>, context: PlaceContext, _location: Location) {
if (context.is_borrow()
|| context.is_address_of()
|| context.is_drop()
|| context == PlaceContext::MutatingUse(MutatingUseContext::AsmOutput))
&& !place.is_indirect()
{
// A pointer to a place could be used to access other places with the same local,
// hence we have to exclude the local completely.
self.result.insert(place.local);
}
}
}
let mut collector = Collector { result: BitSet::new_empty(body.local_decls.len()) };
collector.visit_body(body);
collector.result
}
/// This is used to visualize the dataflow analysis.
impl<'tcx, T> DebugWithContext<ValueAnalysisWrapper<T>> for State<T::Value>
where
T: ValueAnalysis<'tcx>,
T::Value: Debug,
{
fn fmt_with(&self, ctxt: &ValueAnalysisWrapper<T>, f: &mut Formatter<'_>) -> std::fmt::Result {
2024-06-26 19:13:38 +00:00
match self {
State::Reachable(values) => debug_with_context(values, None, ctxt.0.map(), f),
State::Unreachable => write!(f, "unreachable"),
}
}
fn fmt_diff_with(
&self,
old: &Self,
ctxt: &ValueAnalysisWrapper<T>,
f: &mut Formatter<'_>,
) -> std::fmt::Result {
2024-06-26 19:13:38 +00:00
match (self, old) {
(State::Reachable(this), State::Reachable(old)) => {
debug_with_context(this, Some(old), ctxt.0.map(), f)
}
_ => Ok(()), // Consider printing something here.
}
}
}
2024-06-26 21:57:59 +00:00
fn debug_with_context_rec<V: Debug + Eq + HasBottom>(
place: PlaceIndex,
place_str: &str,
2024-06-26 19:13:38 +00:00
new: &StateData<V>,
old: Option<&StateData<V>>,
map: &Map,
f: &mut Formatter<'_>,
) -> std::fmt::Result {
if let Some(value) = map.places[place].value_index {
match old {
2024-06-26 21:57:59 +00:00
None => writeln!(f, "{}: {:?}", place_str, new.get(value))?,
Some(old) => {
2024-06-26 21:57:59 +00:00
if new.get(value) != old.get(value) {
writeln!(f, "\u{001f}-{}: {:?}", place_str, old.get(value))?;
writeln!(f, "\u{001f}+{}: {:?}", place_str, new.get(value))?;
}
}
}
}
for child in map.children(place) {
let info_elem = map.places[child].proj_elem.unwrap();
let child_place_str = match info_elem {
TrackElem::Discriminant => {
format!("discriminant({place_str})")
}
TrackElem::Variant(idx) => {
format!("({place_str} as {idx:?})")
}
TrackElem::Field(field) => {
2022-11-18 09:30:47 +00:00
if place_str.starts_with('*') {
format!("({}).{}", place_str, field.index())
} else {
format!("{}.{}", place_str, field.index())
}
}
2023-04-23 16:15:22 +00:00
TrackElem::DerefLen => {
format!("Len(*{})", place_str)
}
};
debug_with_context_rec(child, &child_place_str, new, old, map, f)?;
}
Ok(())
}
2024-06-26 21:57:59 +00:00
fn debug_with_context<V: Debug + Eq + HasBottom>(
2024-06-26 19:13:38 +00:00
new: &StateData<V>,
old: Option<&StateData<V>>,
map: &Map,
f: &mut Formatter<'_>,
) -> std::fmt::Result {
for (local, place) in map.locals.iter_enumerated() {
if let Some(place) = place {
debug_with_context_rec(*place, &format!("{local:?}"), new, old, map, f)?;
}
}
Ok(())
}