2023-04-09 11:15:48 +00:00
|
|
|
//! Global value numbering.
|
|
|
|
//!
|
|
|
|
//! MIR may contain repeated and/or redundant computations. The objective of this pass is to detect
|
|
|
|
//! such redundancies and re-use the already-computed result when possible.
|
|
|
|
//!
|
|
|
|
//! In a first pass, we compute a symbolic representation of values that are assigned to SSA
|
|
|
|
//! locals. This symbolic representation is defined by the `Value` enum. Each produced instance of
|
|
|
|
//! `Value` is interned as a `VnIndex`, which allows us to cheaply compute identical values.
|
|
|
|
//!
|
|
|
|
//! From those assignments, we construct a mapping `VnIndex -> Vec<(Local, Location)>` of available
|
|
|
|
//! values, the locals in which they are stored, and a the assignment location.
|
|
|
|
//!
|
|
|
|
//! In a second pass, we traverse all (non SSA) assignments `x = rvalue` and operands. For each
|
|
|
|
//! one, we compute the `VnIndex` of the rvalue. If this `VnIndex` is associated to a constant, we
|
|
|
|
//! replace the rvalue/operand by that constant. Otherwise, if there is an SSA local `y`
|
|
|
|
//! associated to this `VnIndex`, and if its definition location strictly dominates the assignment
|
|
|
|
//! to `x`, we replace the assignment by `x = y`.
|
|
|
|
//!
|
|
|
|
//! By opportunity, this pass simplifies some `Rvalue`s based on the accumulated knowledge.
|
|
|
|
//!
|
2023-05-13 22:05:24 +00:00
|
|
|
//! # Operational semantic
|
|
|
|
//!
|
|
|
|
//! Operationally, this pass attempts to prove bitwise equality between locals. Given this MIR:
|
|
|
|
//! ```ignore (MIR)
|
|
|
|
//! _a = some value // has VnIndex i
|
|
|
|
//! // some MIR
|
|
|
|
//! _b = some other value // also has VnIndex i
|
|
|
|
//! ```
|
|
|
|
//!
|
|
|
|
//! We consider it to be replacable by:
|
|
|
|
//! ```ignore (MIR)
|
|
|
|
//! _a = some value // has VnIndex i
|
|
|
|
//! // some MIR
|
|
|
|
//! _c = some other value // also has VnIndex i
|
|
|
|
//! assume(_a bitwise equal to _c) // follows from having the same VnIndex
|
|
|
|
//! _b = _a // follows from the `assume`
|
|
|
|
//! ```
|
|
|
|
//!
|
|
|
|
//! Which is simplifiable to:
|
|
|
|
//! ```ignore (MIR)
|
|
|
|
//! _a = some value // has VnIndex i
|
|
|
|
//! // some MIR
|
|
|
|
//! _b = _a
|
|
|
|
//! ```
|
|
|
|
//!
|
2023-04-09 11:15:48 +00:00
|
|
|
//! # Handling of references
|
|
|
|
//!
|
|
|
|
//! We handle references by assigning a different "provenance" index to each Ref/AddressOf rvalue.
|
|
|
|
//! This ensure that we do not spuriously merge borrows that should not be merged. Meanwhile, we
|
|
|
|
//! consider all the derefs of an immutable reference to a freeze type to give the same value:
|
|
|
|
//! ```ignore (MIR)
|
|
|
|
//! _a = *_b // _b is &Freeze
|
|
|
|
//! _c = *_b // replaced by _c = _a
|
|
|
|
//! ```
|
|
|
|
|
2023-09-19 20:12:48 +00:00
|
|
|
use rustc_const_eval::interpret::{ImmTy, InterpCx, MemPlaceMeta, OpTy, Projectable, Scalar};
|
2023-03-20 18:05:07 +00:00
|
|
|
use rustc_data_structures::fx::{FxHashMap, FxIndexSet};
|
|
|
|
use rustc_data_structures::graph::dominators::Dominators;
|
|
|
|
use rustc_index::bit_set::BitSet;
|
|
|
|
use rustc_index::IndexVec;
|
|
|
|
use rustc_macros::newtype_index;
|
2023-09-19 20:12:48 +00:00
|
|
|
use rustc_middle::mir::interpret::GlobalAlloc;
|
2023-03-20 18:05:07 +00:00
|
|
|
use rustc_middle::mir::visit::*;
|
|
|
|
use rustc_middle::mir::*;
|
2023-09-19 20:12:48 +00:00
|
|
|
use rustc_middle::ty::layout::LayoutOf;
|
|
|
|
use rustc_middle::ty::{self, Ty, TyCtxt, TypeAndMut};
|
|
|
|
use rustc_span::DUMMY_SP;
|
|
|
|
use rustc_target::abi::{self, Abi, Size, VariantIdx, FIRST_VARIANT};
|
2023-03-20 18:05:07 +00:00
|
|
|
|
2023-09-19 20:12:48 +00:00
|
|
|
use crate::dataflow_const_prop::DummyMachine;
|
2023-07-20 17:03:44 +00:00
|
|
|
use crate::ssa::{AssignedValue, SsaLocals};
|
2023-03-20 18:05:07 +00:00
|
|
|
use crate::MirPass;
|
2023-09-19 20:12:48 +00:00
|
|
|
use either::Either;
|
2023-03-20 18:05:07 +00:00
|
|
|
|
|
|
|
pub struct GVN;
|
|
|
|
|
|
|
|
impl<'tcx> MirPass<'tcx> for GVN {
|
|
|
|
fn is_enabled(&self, sess: &rustc_session::Session) -> bool {
|
|
|
|
sess.mir_opt_level() >= 4
|
|
|
|
}
|
|
|
|
|
|
|
|
#[instrument(level = "trace", skip(self, tcx, body))]
|
|
|
|
fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
|
|
|
|
debug!(def_id = ?body.source.def_id());
|
|
|
|
propagate_ssa(tcx, body);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn propagate_ssa<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
|
|
|
|
let param_env = tcx.param_env_reveal_all_normalized(body.source.def_id());
|
|
|
|
let ssa = SsaLocals::new(body);
|
|
|
|
// Clone dominators as we need them while mutating the body.
|
|
|
|
let dominators = body.basic_blocks.dominators().clone();
|
|
|
|
|
2023-04-12 18:39:56 +00:00
|
|
|
let mut state = VnState::new(tcx, param_env, &ssa, &dominators, &body.local_decls);
|
2023-07-20 17:03:44 +00:00
|
|
|
ssa.for_each_assignment_mut(
|
|
|
|
body.basic_blocks.as_mut_preserves_cfg(),
|
|
|
|
|local, value, location| {
|
|
|
|
let value = match value {
|
|
|
|
// We do not know anything of this assigned value.
|
|
|
|
AssignedValue::Arg | AssignedValue::Terminator(_) => None,
|
|
|
|
// Try to get some insight.
|
|
|
|
AssignedValue::Rvalue(rvalue) => {
|
|
|
|
let value = state.simplify_rvalue(rvalue, location);
|
|
|
|
// FIXME(#112651) `rvalue` may have a subtype to `local`. We can only mark `local` as
|
|
|
|
// reusable if we have an exact type match.
|
|
|
|
if state.local_decls[local].ty != rvalue.ty(state.local_decls, tcx) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
value
|
|
|
|
}
|
|
|
|
};
|
|
|
|
// `next_opaque` is `Some`, so `new_opaque` must return `Some`.
|
|
|
|
let value = value.or_else(|| state.new_opaque()).unwrap();
|
2023-07-01 14:57:29 +00:00
|
|
|
state.assign(local, value);
|
2023-07-20 17:03:44 +00:00
|
|
|
},
|
|
|
|
);
|
2023-03-20 18:05:07 +00:00
|
|
|
|
|
|
|
// Stop creating opaques during replacement as it is useless.
|
|
|
|
state.next_opaque = None;
|
|
|
|
|
|
|
|
let reverse_postorder = body.basic_blocks.reverse_postorder().to_vec();
|
|
|
|
for bb in reverse_postorder {
|
|
|
|
let data = &mut body.basic_blocks.as_mut_preserves_cfg()[bb];
|
2023-04-12 18:39:56 +00:00
|
|
|
state.visit_basic_block_data(bb, data);
|
2023-03-20 18:05:07 +00:00
|
|
|
}
|
|
|
|
|
2023-04-09 11:15:48 +00:00
|
|
|
// For each local that is reused (`y` above), we remove its storage statements do avoid any
|
|
|
|
// difficulty. Those locals are SSA, so should be easy to optimize by LLVM without storage
|
|
|
|
// statements.
|
2023-04-12 18:39:56 +00:00
|
|
|
StorageRemover { tcx, reused_locals: state.reused_locals }.visit_body_preserves_cfg(body);
|
2023-03-20 18:05:07 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
newtype_index! {
|
|
|
|
struct VnIndex {}
|
|
|
|
}
|
|
|
|
|
2023-09-19 20:12:48 +00:00
|
|
|
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
|
|
|
|
enum AddressKind {
|
|
|
|
Ref(BorrowKind),
|
|
|
|
Address(Mutability),
|
|
|
|
}
|
|
|
|
|
2023-03-20 18:05:07 +00:00
|
|
|
#[derive(Debug, PartialEq, Eq, Hash)]
|
|
|
|
enum Value<'tcx> {
|
|
|
|
// Root values.
|
|
|
|
/// Used to represent values we know nothing about.
|
|
|
|
/// The `usize` is a counter incremented by `new_opaque`.
|
|
|
|
Opaque(usize),
|
|
|
|
/// Evaluated or unevaluated constant value.
|
|
|
|
Constant(Const<'tcx>),
|
|
|
|
/// An aggregate value, either tuple/closure/struct/enum.
|
|
|
|
/// This does not contain unions, as we cannot reason with the value.
|
|
|
|
Aggregate(Ty<'tcx>, VariantIdx, Vec<VnIndex>),
|
|
|
|
/// This corresponds to a `[value; count]` expression.
|
|
|
|
Repeat(VnIndex, ty::Const<'tcx>),
|
|
|
|
/// The address of a place.
|
|
|
|
Address {
|
|
|
|
place: Place<'tcx>,
|
2023-09-19 20:12:48 +00:00
|
|
|
kind: AddressKind,
|
2023-03-20 18:05:07 +00:00
|
|
|
/// Give each borrow and pointer a different provenance, so we don't merge them.
|
|
|
|
provenance: usize,
|
|
|
|
},
|
|
|
|
|
|
|
|
// Extractions.
|
|
|
|
/// This is the *value* obtained by projecting another value.
|
|
|
|
Projection(VnIndex, ProjectionElem<VnIndex, Ty<'tcx>>),
|
|
|
|
/// Discriminant of the given value.
|
|
|
|
Discriminant(VnIndex),
|
|
|
|
/// Length of an array or slice.
|
|
|
|
Len(VnIndex),
|
|
|
|
|
|
|
|
// Operations.
|
|
|
|
NullaryOp(NullOp<'tcx>, Ty<'tcx>),
|
|
|
|
UnaryOp(UnOp, VnIndex),
|
|
|
|
BinaryOp(BinOp, VnIndex, VnIndex),
|
|
|
|
CheckedBinaryOp(BinOp, VnIndex, VnIndex),
|
|
|
|
Cast {
|
|
|
|
kind: CastKind,
|
|
|
|
value: VnIndex,
|
|
|
|
from: Ty<'tcx>,
|
|
|
|
to: Ty<'tcx>,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
struct VnState<'body, 'tcx> {
|
|
|
|
tcx: TyCtxt<'tcx>,
|
2023-09-19 20:12:48 +00:00
|
|
|
ecx: InterpCx<'tcx, 'tcx, DummyMachine>,
|
2023-03-20 18:05:07 +00:00
|
|
|
param_env: ty::ParamEnv<'tcx>,
|
|
|
|
local_decls: &'body LocalDecls<'tcx>,
|
|
|
|
/// Value stored in each local.
|
|
|
|
locals: IndexVec<Local, Option<VnIndex>>,
|
|
|
|
/// First local to be assigned that value.
|
|
|
|
rev_locals: FxHashMap<VnIndex, Vec<Local>>,
|
|
|
|
values: FxIndexSet<Value<'tcx>>,
|
2023-09-19 20:12:48 +00:00
|
|
|
/// Values evaluated as constants if possible.
|
|
|
|
evaluated: IndexVec<VnIndex, Option<OpTy<'tcx>>>,
|
2023-03-20 18:05:07 +00:00
|
|
|
/// Counter to generate different values.
|
|
|
|
/// This is an option to stop creating opaques during replacement.
|
|
|
|
next_opaque: Option<usize>,
|
2023-04-12 18:39:56 +00:00
|
|
|
ssa: &'body SsaLocals,
|
|
|
|
dominators: &'body Dominators<BasicBlock>,
|
|
|
|
reused_locals: BitSet<Local>,
|
2023-03-20 18:05:07 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
impl<'body, 'tcx> VnState<'body, 'tcx> {
|
|
|
|
fn new(
|
|
|
|
tcx: TyCtxt<'tcx>,
|
|
|
|
param_env: ty::ParamEnv<'tcx>,
|
2023-04-12 18:39:56 +00:00
|
|
|
ssa: &'body SsaLocals,
|
|
|
|
dominators: &'body Dominators<BasicBlock>,
|
2023-03-20 18:05:07 +00:00
|
|
|
local_decls: &'body LocalDecls<'tcx>,
|
|
|
|
) -> Self {
|
|
|
|
VnState {
|
|
|
|
tcx,
|
2023-09-19 20:12:48 +00:00
|
|
|
ecx: InterpCx::new(tcx, DUMMY_SP, param_env, DummyMachine),
|
2023-03-20 18:05:07 +00:00
|
|
|
param_env,
|
|
|
|
local_decls,
|
|
|
|
locals: IndexVec::from_elem(None, local_decls),
|
|
|
|
rev_locals: FxHashMap::default(),
|
|
|
|
values: FxIndexSet::default(),
|
2023-09-19 20:12:48 +00:00
|
|
|
evaluated: IndexVec::new(),
|
2023-03-20 18:05:07 +00:00
|
|
|
next_opaque: Some(0),
|
2023-04-12 18:39:56 +00:00
|
|
|
ssa,
|
|
|
|
dominators,
|
|
|
|
reused_locals: BitSet::new_empty(local_decls.len()),
|
2023-03-20 18:05:07 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[instrument(level = "trace", skip(self), ret)]
|
|
|
|
fn insert(&mut self, value: Value<'tcx>) -> VnIndex {
|
2023-09-19 20:12:48 +00:00
|
|
|
let (index, new) = self.values.insert_full(value);
|
|
|
|
let index = VnIndex::from_usize(index);
|
|
|
|
if new {
|
|
|
|
let evaluated = self.eval_to_const(index);
|
|
|
|
let _index = self.evaluated.push(evaluated);
|
|
|
|
debug_assert_eq!(index, _index);
|
|
|
|
}
|
|
|
|
index
|
2023-03-20 18:05:07 +00:00
|
|
|
}
|
|
|
|
|
2023-04-09 11:15:48 +00:00
|
|
|
/// Create a new `Value` for which we have no information at all, except that it is distinct
|
|
|
|
/// from all the others.
|
2023-03-20 18:05:07 +00:00
|
|
|
#[instrument(level = "trace", skip(self), ret)]
|
|
|
|
fn new_opaque(&mut self) -> Option<VnIndex> {
|
|
|
|
let next_opaque = self.next_opaque.as_mut()?;
|
|
|
|
let value = Value::Opaque(*next_opaque);
|
|
|
|
*next_opaque += 1;
|
|
|
|
Some(self.insert(value))
|
|
|
|
}
|
|
|
|
|
2023-04-09 11:15:48 +00:00
|
|
|
/// Create a new `Value::Address` distinct from all the others.
|
2023-03-20 18:05:07 +00:00
|
|
|
#[instrument(level = "trace", skip(self), ret)]
|
2023-09-19 20:12:48 +00:00
|
|
|
fn new_pointer(&mut self, place: Place<'tcx>, kind: AddressKind) -> Option<VnIndex> {
|
2023-03-20 18:05:07 +00:00
|
|
|
let next_opaque = self.next_opaque.as_mut()?;
|
2023-09-19 20:12:48 +00:00
|
|
|
let value = Value::Address { place, kind, provenance: *next_opaque };
|
2023-03-20 18:05:07 +00:00
|
|
|
*next_opaque += 1;
|
|
|
|
Some(self.insert(value))
|
|
|
|
}
|
|
|
|
|
|
|
|
fn get(&self, index: VnIndex) -> &Value<'tcx> {
|
|
|
|
self.values.get_index(index.as_usize()).unwrap()
|
|
|
|
}
|
|
|
|
|
2023-04-09 11:15:48 +00:00
|
|
|
/// Record that `local` is assigned `value`. `local` must be SSA.
|
2023-03-20 18:05:07 +00:00
|
|
|
#[instrument(level = "trace", skip(self))]
|
|
|
|
fn assign(&mut self, local: Local, value: VnIndex) {
|
|
|
|
self.locals[local] = Some(value);
|
2023-05-01 09:59:00 +00:00
|
|
|
|
|
|
|
// Only register the value if its type is `Sized`, as we will emit copies of it.
|
|
|
|
let is_sized = !self.tcx.features().unsized_locals
|
|
|
|
|| self.local_decls[local].ty.is_sized(self.tcx, self.param_env);
|
|
|
|
if is_sized {
|
|
|
|
self.rev_locals.entry(value).or_default().push(local);
|
|
|
|
}
|
2023-03-20 18:05:07 +00:00
|
|
|
}
|
|
|
|
|
2023-09-19 20:12:48 +00:00
|
|
|
#[instrument(level = "trace", skip(self), ret)]
|
|
|
|
fn eval_to_const(&mut self, value: VnIndex) -> Option<OpTy<'tcx>> {
|
|
|
|
use Value::*;
|
|
|
|
let op = match *self.get(value) {
|
|
|
|
Opaque(_) => return None,
|
|
|
|
// Do not bother evaluating repeat expressions. This would uselessly consume memory.
|
|
|
|
Repeat(..) => return None,
|
|
|
|
|
|
|
|
Constant(ref constant) => self.ecx.eval_mir_constant(constant, None, None).ok()?,
|
|
|
|
Aggregate(ty, variant, ref fields) => {
|
|
|
|
let fields = fields
|
|
|
|
.iter()
|
|
|
|
.map(|&f| self.evaluated[f].as_ref())
|
|
|
|
.collect::<Option<Vec<_>>>()?;
|
|
|
|
let variant = if ty.is_enum() { Some(variant) } else { None };
|
|
|
|
let ty = self.ecx.layout_of(ty).ok()?;
|
|
|
|
let alloc_id = self
|
|
|
|
.ecx
|
|
|
|
.intern_with_temp_alloc(ty, |ecx, dest| {
|
|
|
|
let variant_dest = if let Some(variant) = variant {
|
|
|
|
ecx.project_downcast(dest, variant)?
|
|
|
|
} else {
|
|
|
|
dest.clone()
|
|
|
|
};
|
|
|
|
for (field_index, op) in fields.into_iter().enumerate() {
|
|
|
|
let field_dest = ecx.project_field(&variant_dest, field_index)?;
|
|
|
|
ecx.copy_op(op, &field_dest, /*allow_transmute*/ false)?;
|
|
|
|
}
|
|
|
|
ecx.write_discriminant(variant.unwrap_or(FIRST_VARIANT), dest)
|
|
|
|
})
|
|
|
|
.ok()?;
|
|
|
|
let mplace =
|
|
|
|
self.ecx.raw_const_to_mplace(ConstAlloc { alloc_id, ty: ty.ty }).ok()?;
|
|
|
|
mplace.into()
|
|
|
|
}
|
|
|
|
|
|
|
|
Projection(base, elem) => {
|
|
|
|
let value = self.evaluated[base].as_ref()?;
|
|
|
|
let elem = match elem {
|
|
|
|
ProjectionElem::Deref => ProjectionElem::Deref,
|
|
|
|
ProjectionElem::Downcast(name, read_variant) => {
|
|
|
|
ProjectionElem::Downcast(name, read_variant)
|
|
|
|
}
|
|
|
|
ProjectionElem::Field(f, ty) => ProjectionElem::Field(f, ty),
|
|
|
|
ProjectionElem::ConstantIndex { offset, min_length, from_end } => {
|
|
|
|
ProjectionElem::ConstantIndex { offset, min_length, from_end }
|
|
|
|
}
|
|
|
|
ProjectionElem::Subslice { from, to, from_end } => {
|
|
|
|
ProjectionElem::Subslice { from, to, from_end }
|
|
|
|
}
|
|
|
|
ProjectionElem::OpaqueCast(ty) => ProjectionElem::OpaqueCast(ty),
|
|
|
|
ProjectionElem::Subtype(ty) => ProjectionElem::Subtype(ty),
|
|
|
|
// This should have been replaced by a `ConstantIndex` earlier.
|
|
|
|
ProjectionElem::Index(_) => return None,
|
|
|
|
};
|
|
|
|
self.ecx.project(value, elem).ok()?
|
|
|
|
}
|
|
|
|
Address { place, kind, provenance: _ } => {
|
|
|
|
if !place.is_indirect_first_projection() {
|
|
|
|
return None;
|
|
|
|
}
|
|
|
|
let local = self.locals[place.local]?;
|
|
|
|
let pointer = self.evaluated[local].as_ref()?;
|
|
|
|
let mut mplace = self.ecx.deref_pointer(pointer).ok()?;
|
|
|
|
for proj in place.projection.iter().skip(1) {
|
|
|
|
// We have no call stack to associate a local with a value, so we cannot interpret indexing.
|
|
|
|
if matches!(proj, ProjectionElem::Index(_)) {
|
|
|
|
return None;
|
|
|
|
}
|
|
|
|
mplace = self.ecx.project(&mplace, proj).ok()?;
|
|
|
|
}
|
|
|
|
let pointer = mplace.to_ref(&self.ecx);
|
|
|
|
let ty = match kind {
|
|
|
|
AddressKind::Ref(bk) => Ty::new_ref(
|
|
|
|
self.tcx,
|
|
|
|
self.tcx.lifetimes.re_erased,
|
|
|
|
ty::TypeAndMut { ty: mplace.layout.ty, mutbl: bk.to_mutbl_lossy() },
|
|
|
|
),
|
|
|
|
AddressKind::Address(mutbl) => {
|
|
|
|
Ty::new_ptr(self.tcx, TypeAndMut { ty: mplace.layout.ty, mutbl })
|
|
|
|
}
|
|
|
|
};
|
|
|
|
let layout = self.ecx.layout_of(ty).ok()?;
|
|
|
|
ImmTy::from_immediate(pointer, layout).into()
|
|
|
|
}
|
|
|
|
|
|
|
|
Discriminant(base) => {
|
|
|
|
let base = self.evaluated[base].as_ref()?;
|
|
|
|
let variant = self.ecx.read_discriminant(base).ok()?;
|
|
|
|
let discr_value =
|
|
|
|
self.ecx.discriminant_for_variant(base.layout.ty, variant).ok()?;
|
|
|
|
discr_value.into()
|
|
|
|
}
|
|
|
|
Len(slice) => {
|
|
|
|
let slice = self.evaluated[slice].as_ref()?;
|
|
|
|
let usize_layout = self.ecx.layout_of(self.tcx.types.usize).unwrap();
|
|
|
|
let len = slice.len(&self.ecx).ok()?;
|
|
|
|
let imm = ImmTy::try_from_uint(len, usize_layout)?;
|
|
|
|
imm.into()
|
|
|
|
}
|
|
|
|
NullaryOp(null_op, ty) => {
|
|
|
|
let layout = self.ecx.layout_of(ty).ok()?;
|
|
|
|
if let NullOp::SizeOf | NullOp::AlignOf = null_op && layout.is_unsized() {
|
|
|
|
return None;
|
|
|
|
}
|
|
|
|
let val = match null_op {
|
|
|
|
NullOp::SizeOf => layout.size.bytes(),
|
|
|
|
NullOp::AlignOf => layout.align.abi.bytes(),
|
|
|
|
NullOp::OffsetOf(fields) => layout
|
|
|
|
.offset_of_subfield(&self.ecx, fields.iter().map(|f| f.index()))
|
|
|
|
.bytes(),
|
|
|
|
};
|
|
|
|
let usize_layout = self.ecx.layout_of(self.tcx.types.usize).unwrap();
|
|
|
|
let imm = ImmTy::try_from_uint(val, usize_layout)?;
|
|
|
|
imm.into()
|
|
|
|
}
|
|
|
|
UnaryOp(un_op, operand) => {
|
|
|
|
let operand = self.evaluated[operand].as_ref()?;
|
|
|
|
let operand = self.ecx.read_immediate(operand).ok()?;
|
|
|
|
let (val, _) = self.ecx.overflowing_unary_op(un_op, &operand).ok()?;
|
|
|
|
val.into()
|
|
|
|
}
|
|
|
|
BinaryOp(bin_op, lhs, rhs) => {
|
|
|
|
let lhs = self.evaluated[lhs].as_ref()?;
|
|
|
|
let lhs = self.ecx.read_immediate(lhs).ok()?;
|
|
|
|
let rhs = self.evaluated[rhs].as_ref()?;
|
|
|
|
let rhs = self.ecx.read_immediate(rhs).ok()?;
|
|
|
|
let (val, _) = self.ecx.overflowing_binary_op(bin_op, &lhs, &rhs).ok()?;
|
|
|
|
val.into()
|
|
|
|
}
|
|
|
|
CheckedBinaryOp(bin_op, lhs, rhs) => {
|
|
|
|
let lhs = self.evaluated[lhs].as_ref()?;
|
|
|
|
let lhs = self.ecx.read_immediate(lhs).ok()?;
|
|
|
|
let rhs = self.evaluated[rhs].as_ref()?;
|
|
|
|
let rhs = self.ecx.read_immediate(rhs).ok()?;
|
|
|
|
let (val, overflowed) = self.ecx.overflowing_binary_op(bin_op, &lhs, &rhs).ok()?;
|
|
|
|
let tuple = Ty::new_tup_from_iter(
|
|
|
|
self.tcx,
|
|
|
|
[val.layout.ty, self.tcx.types.bool].into_iter(),
|
|
|
|
);
|
|
|
|
let tuple = self.ecx.layout_of(tuple).ok()?;
|
|
|
|
ImmTy::from_scalar_pair(val.to_scalar(), Scalar::from_bool(overflowed), tuple)
|
|
|
|
.into()
|
|
|
|
}
|
|
|
|
Cast { kind, value, from: _, to } => match kind {
|
|
|
|
CastKind::IntToInt | CastKind::IntToFloat => {
|
|
|
|
let value = self.evaluated[value].as_ref()?;
|
|
|
|
let value = self.ecx.read_immediate(value).ok()?;
|
|
|
|
let to = self.ecx.layout_of(to).ok()?;
|
|
|
|
let res = self.ecx.int_to_int_or_float(&value, to).ok()?;
|
|
|
|
res.into()
|
|
|
|
}
|
|
|
|
CastKind::FloatToFloat | CastKind::FloatToInt => {
|
|
|
|
let value = self.evaluated[value].as_ref()?;
|
|
|
|
let value = self.ecx.read_immediate(value).ok()?;
|
|
|
|
let to = self.ecx.layout_of(to).ok()?;
|
|
|
|
let res = self.ecx.float_to_float_or_int(&value, to).ok()?;
|
|
|
|
res.into()
|
|
|
|
}
|
|
|
|
CastKind::Transmute => {
|
|
|
|
let value = self.evaluated[value].as_ref()?;
|
|
|
|
let to = self.ecx.layout_of(to).ok()?;
|
|
|
|
value.offset(Size::ZERO, to, &self.ecx).ok()?
|
|
|
|
}
|
|
|
|
_ => return None,
|
|
|
|
},
|
|
|
|
};
|
|
|
|
Some(op)
|
|
|
|
}
|
|
|
|
|
2023-09-26 16:39:39 +00:00
|
|
|
/// Represent the *value* which would be read from `place`, and point `place` to a preexisting
|
|
|
|
/// place with the same value (if that already exists).
|
2023-03-20 18:05:07 +00:00
|
|
|
#[instrument(level = "trace", skip(self), ret)]
|
2023-09-26 16:39:39 +00:00
|
|
|
fn simplify_place_value(
|
|
|
|
&mut self,
|
|
|
|
place: &mut Place<'tcx>,
|
|
|
|
location: Location,
|
|
|
|
) -> Option<VnIndex> {
|
|
|
|
// Invariant: `place` and `place_ref` point to the same value, even if they point to
|
|
|
|
// different memory locations.
|
2023-04-12 18:39:56 +00:00
|
|
|
let mut place_ref = place.as_ref();
|
2023-03-20 18:05:07 +00:00
|
|
|
|
2023-09-26 16:39:39 +00:00
|
|
|
// Invariant: `value` holds the value up-to the `index`th projection excluded.
|
|
|
|
let mut value = self.locals[place.local]?;
|
2023-03-20 18:05:07 +00:00
|
|
|
for (index, proj) in place.projection.iter().enumerate() {
|
2023-04-12 18:39:56 +00:00
|
|
|
if let Some(local) = self.try_as_local(value, location) {
|
2023-09-26 16:39:39 +00:00
|
|
|
// Both `local` and `Place { local: place.local, projection: projection[..index] }`
|
|
|
|
// hold the same value. Therefore, following place holds the value in the original
|
|
|
|
// `place`.
|
2023-04-12 18:39:56 +00:00
|
|
|
place_ref = PlaceRef { local, projection: &place.projection[index..] };
|
|
|
|
}
|
|
|
|
|
2023-03-20 18:05:07 +00:00
|
|
|
let proj = match proj {
|
|
|
|
ProjectionElem::Deref => {
|
|
|
|
let ty = Place::ty_from(
|
|
|
|
place.local,
|
|
|
|
&place.projection[..index],
|
|
|
|
self.local_decls,
|
|
|
|
self.tcx,
|
|
|
|
)
|
|
|
|
.ty;
|
|
|
|
if let Some(Mutability::Not) = ty.ref_mutability()
|
|
|
|
&& let Some(pointee_ty) = ty.builtin_deref(true)
|
|
|
|
&& pointee_ty.ty.is_freeze(self.tcx, self.param_env)
|
|
|
|
{
|
|
|
|
// An immutable borrow `_x` always points to the same value for the
|
|
|
|
// lifetime of the borrow, so we can merge all instances of `*_x`.
|
|
|
|
ProjectionElem::Deref
|
|
|
|
} else {
|
|
|
|
return None;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
ProjectionElem::Field(f, ty) => ProjectionElem::Field(f, ty),
|
|
|
|
ProjectionElem::Index(idx) => {
|
|
|
|
let idx = self.locals[idx]?;
|
|
|
|
ProjectionElem::Index(idx)
|
|
|
|
}
|
|
|
|
ProjectionElem::ConstantIndex { offset, min_length, from_end } => {
|
|
|
|
ProjectionElem::ConstantIndex { offset, min_length, from_end }
|
|
|
|
}
|
|
|
|
ProjectionElem::Subslice { from, to, from_end } => {
|
|
|
|
ProjectionElem::Subslice { from, to, from_end }
|
|
|
|
}
|
|
|
|
ProjectionElem::Downcast(name, index) => ProjectionElem::Downcast(name, index),
|
|
|
|
ProjectionElem::OpaqueCast(ty) => ProjectionElem::OpaqueCast(ty),
|
2023-08-28 08:19:19 +00:00
|
|
|
ProjectionElem::Subtype(ty) => ProjectionElem::Subtype(ty),
|
2023-03-20 18:05:07 +00:00
|
|
|
};
|
|
|
|
value = self.insert(Value::Projection(value, proj));
|
|
|
|
}
|
|
|
|
|
2023-04-12 18:39:56 +00:00
|
|
|
if let Some(local) = self.try_as_local(value, location)
|
2023-09-26 16:39:39 +00:00
|
|
|
&& local != place.local
|
|
|
|
// in case we had no projection to begin with.
|
2023-04-12 18:39:56 +00:00
|
|
|
{
|
|
|
|
*place = local.into();
|
|
|
|
self.reused_locals.insert(local);
|
|
|
|
} else if place_ref.local != place.local
|
|
|
|
|| place_ref.projection.len() < place.projection.len()
|
|
|
|
{
|
2023-09-26 16:39:39 +00:00
|
|
|
// By the invariant on `place_ref`.
|
2023-04-12 18:39:56 +00:00
|
|
|
*place = place_ref.project_deeper(&[], self.tcx);
|
|
|
|
self.reused_locals.insert(place_ref.local);
|
|
|
|
}
|
|
|
|
|
2023-03-20 18:05:07 +00:00
|
|
|
Some(value)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[instrument(level = "trace", skip(self), ret)]
|
2023-04-12 18:39:56 +00:00
|
|
|
fn simplify_operand(
|
|
|
|
&mut self,
|
|
|
|
operand: &mut Operand<'tcx>,
|
|
|
|
location: Location,
|
|
|
|
) -> Option<VnIndex> {
|
2023-03-20 18:05:07 +00:00
|
|
|
match *operand {
|
|
|
|
Operand::Constant(ref constant) => Some(self.insert(Value::Constant(constant.const_))),
|
2023-04-12 18:39:56 +00:00
|
|
|
Operand::Copy(ref mut place) | Operand::Move(ref mut place) => {
|
2023-09-26 16:39:39 +00:00
|
|
|
let value = self.simplify_place_value(place, location)?;
|
2023-04-12 18:39:56 +00:00
|
|
|
if let Some(const_) = self.try_as_constant(value) {
|
|
|
|
*operand = Operand::Constant(Box::new(const_));
|
|
|
|
}
|
|
|
|
Some(value)
|
|
|
|
}
|
2023-03-20 18:05:07 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[instrument(level = "trace", skip(self), ret)]
|
2023-04-12 18:39:56 +00:00
|
|
|
fn simplify_rvalue(
|
|
|
|
&mut self,
|
|
|
|
rvalue: &mut Rvalue<'tcx>,
|
|
|
|
location: Location,
|
|
|
|
) -> Option<VnIndex> {
|
2023-03-20 18:05:07 +00:00
|
|
|
let value = match *rvalue {
|
|
|
|
// Forward values.
|
2023-04-12 18:39:56 +00:00
|
|
|
Rvalue::Use(ref mut operand) => return self.simplify_operand(operand, location),
|
|
|
|
Rvalue::CopyForDeref(place) => {
|
|
|
|
let mut operand = Operand::Copy(place);
|
|
|
|
let val = self.simplify_operand(&mut operand, location);
|
|
|
|
*rvalue = Rvalue::Use(operand);
|
|
|
|
return val;
|
|
|
|
}
|
2023-03-20 18:05:07 +00:00
|
|
|
|
|
|
|
// Roots.
|
2023-04-12 18:39:56 +00:00
|
|
|
Rvalue::Repeat(ref mut op, amount) => {
|
|
|
|
let op = self.simplify_operand(op, location)?;
|
2023-03-20 18:05:07 +00:00
|
|
|
Value::Repeat(op, amount)
|
|
|
|
}
|
|
|
|
Rvalue::NullaryOp(op, ty) => Value::NullaryOp(op, ty),
|
2023-04-12 18:39:56 +00:00
|
|
|
Rvalue::Aggregate(box ref kind, ref mut fields) => {
|
2023-03-20 18:05:07 +00:00
|
|
|
let variant_index = match *kind {
|
|
|
|
AggregateKind::Array(..)
|
|
|
|
| AggregateKind::Tuple
|
|
|
|
| AggregateKind::Closure(..)
|
2023-10-19 16:06:43 +00:00
|
|
|
| AggregateKind::Coroutine(..) => FIRST_VARIANT,
|
2023-03-20 18:05:07 +00:00
|
|
|
AggregateKind::Adt(_, variant_index, _, _, None) => variant_index,
|
|
|
|
// Do not track unions.
|
|
|
|
AggregateKind::Adt(_, _, _, _, Some(_)) => return None,
|
|
|
|
};
|
|
|
|
let fields: Option<Vec<_>> = fields
|
2023-04-12 18:39:56 +00:00
|
|
|
.iter_mut()
|
|
|
|
.map(|op| self.simplify_operand(op, location).or_else(|| self.new_opaque()))
|
2023-03-20 18:05:07 +00:00
|
|
|
.collect();
|
|
|
|
let ty = rvalue.ty(self.local_decls, self.tcx);
|
|
|
|
Value::Aggregate(ty, variant_index, fields?)
|
|
|
|
}
|
2023-09-19 20:12:48 +00:00
|
|
|
Rvalue::Ref(_, borrow_kind, place) => {
|
|
|
|
return self.new_pointer(place, AddressKind::Ref(borrow_kind));
|
|
|
|
}
|
|
|
|
Rvalue::AddressOf(mutbl, place) => {
|
|
|
|
return self.new_pointer(place, AddressKind::Address(mutbl));
|
|
|
|
}
|
2023-03-20 18:05:07 +00:00
|
|
|
|
|
|
|
// Operations.
|
2023-04-12 18:39:56 +00:00
|
|
|
Rvalue::Len(ref mut place) => {
|
2023-09-26 16:39:39 +00:00
|
|
|
let place = self.simplify_place_value(place, location)?;
|
2023-03-20 18:05:07 +00:00
|
|
|
Value::Len(place)
|
|
|
|
}
|
2023-04-12 18:39:56 +00:00
|
|
|
Rvalue::Cast(kind, ref mut value, to) => {
|
2023-03-20 18:05:07 +00:00
|
|
|
let from = value.ty(self.local_decls, self.tcx);
|
2023-04-12 18:39:56 +00:00
|
|
|
let value = self.simplify_operand(value, location)?;
|
2023-03-20 18:05:07 +00:00
|
|
|
Value::Cast { kind, value, from, to }
|
|
|
|
}
|
2023-04-12 18:39:56 +00:00
|
|
|
Rvalue::BinaryOp(op, box (ref mut lhs, ref mut rhs)) => {
|
|
|
|
let lhs = self.simplify_operand(lhs, location);
|
|
|
|
let rhs = self.simplify_operand(rhs, location);
|
|
|
|
Value::BinaryOp(op, lhs?, rhs?)
|
2023-03-20 18:05:07 +00:00
|
|
|
}
|
2023-04-12 18:39:56 +00:00
|
|
|
Rvalue::CheckedBinaryOp(op, box (ref mut lhs, ref mut rhs)) => {
|
|
|
|
let lhs = self.simplify_operand(lhs, location);
|
|
|
|
let rhs = self.simplify_operand(rhs, location);
|
|
|
|
Value::CheckedBinaryOp(op, lhs?, rhs?)
|
2023-03-20 18:05:07 +00:00
|
|
|
}
|
2023-04-12 18:39:56 +00:00
|
|
|
Rvalue::UnaryOp(op, ref mut arg) => {
|
|
|
|
let arg = self.simplify_operand(arg, location)?;
|
2023-03-20 18:05:07 +00:00
|
|
|
Value::UnaryOp(op, arg)
|
|
|
|
}
|
2023-04-12 18:39:56 +00:00
|
|
|
Rvalue::Discriminant(ref mut place) => {
|
2023-09-26 16:39:39 +00:00
|
|
|
let place = self.simplify_place_value(place, location)?;
|
2023-03-20 18:05:07 +00:00
|
|
|
Value::Discriminant(place)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Unsupported values.
|
|
|
|
Rvalue::ThreadLocalRef(..) | Rvalue::ShallowInitBox(..) => return None,
|
|
|
|
};
|
|
|
|
debug!(?value);
|
|
|
|
Some(self.insert(value))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-09-19 20:12:48 +00:00
|
|
|
fn op_to_prop_const<'tcx>(
|
|
|
|
ecx: &mut InterpCx<'_, 'tcx, DummyMachine>,
|
|
|
|
op: &OpTy<'tcx>,
|
|
|
|
) -> Option<ConstValue<'tcx>> {
|
|
|
|
// Do not attempt to propagate unsized locals.
|
|
|
|
if op.layout.is_unsized() {
|
|
|
|
return None;
|
|
|
|
}
|
|
|
|
|
|
|
|
// This constant is a ZST, just return an empty value.
|
|
|
|
if op.layout.is_zst() {
|
|
|
|
return Some(ConstValue::ZeroSized);
|
|
|
|
}
|
|
|
|
|
|
|
|
// Do not synthetize too large constants. Codegen will just memcpy them, which we'd like to avoid.
|
|
|
|
if !matches!(op.layout.abi, Abi::Scalar(..) | Abi::ScalarPair(..)) {
|
|
|
|
return None;
|
|
|
|
}
|
|
|
|
|
|
|
|
// If this constant has scalar ABI, return it as a `ConstValue::Scalar`.
|
|
|
|
if let Abi::Scalar(abi::Scalar::Initialized { .. }) = op.layout.abi
|
|
|
|
&& let Ok(scalar) = ecx.read_scalar(op)
|
|
|
|
{
|
|
|
|
return Some(ConstValue::Scalar(scalar));
|
|
|
|
}
|
|
|
|
|
|
|
|
// If this constant is a projection of another, we can return it directly.
|
|
|
|
if let Either::Left(mplace) = op.as_mplace_or_imm()
|
|
|
|
&& let MemPlaceMeta::None = mplace.meta()
|
|
|
|
{
|
|
|
|
let pointer = mplace.ptr().into_pointer_or_addr().ok()?;
|
|
|
|
let (alloc_id, offset) = pointer.into_parts();
|
|
|
|
return if matches!(ecx.tcx.global_alloc(alloc_id), GlobalAlloc::Memory(_)) {
|
|
|
|
Some(ConstValue::Indirect { alloc_id, offset })
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Everything failed: create a new allocation to hold the data.
|
|
|
|
let alloc_id =
|
|
|
|
ecx.intern_with_temp_alloc(op.layout, |ecx, dest| ecx.copy_op(op, dest, false)).ok()?;
|
|
|
|
Some(ConstValue::Indirect { alloc_id, offset: Size::ZERO })
|
|
|
|
}
|
|
|
|
|
2023-04-12 18:39:56 +00:00
|
|
|
impl<'tcx> VnState<'_, 'tcx> {
|
2023-04-09 11:15:48 +00:00
|
|
|
/// If `index` is a `Value::Constant`, return the `Constant` to be put in the MIR.
|
2023-03-20 18:05:07 +00:00
|
|
|
fn try_as_constant(&mut self, index: VnIndex) -> Option<ConstOperand<'tcx>> {
|
2023-09-19 20:12:48 +00:00
|
|
|
// This was already constant in MIR, do not change it.
|
2023-04-12 18:39:56 +00:00
|
|
|
if let Value::Constant(const_) = *self.get(index) {
|
2023-09-23 07:56:59 +00:00
|
|
|
// Some constants may contain pointers. We need to preserve the provenance of these
|
|
|
|
// pointers, but not all constants guarantee this:
|
|
|
|
// - valtrees purposefully do not;
|
|
|
|
// - ConstValue::Slice does not either.
|
2023-09-19 20:12:48 +00:00
|
|
|
let const_ok = match const_ {
|
2023-09-23 07:56:59 +00:00
|
|
|
Const::Ty(c) => match c.kind() {
|
|
|
|
ty::ConstKind::Value(valtree) => match valtree {
|
|
|
|
// This is just an integer, keep it.
|
2023-09-19 20:12:48 +00:00
|
|
|
ty::ValTree::Leaf(_) => true,
|
|
|
|
ty::ValTree::Branch(_) => false,
|
2023-09-23 07:56:59 +00:00
|
|
|
},
|
|
|
|
ty::ConstKind::Param(..)
|
|
|
|
| ty::ConstKind::Unevaluated(..)
|
2023-09-19 20:12:48 +00:00
|
|
|
| ty::ConstKind::Expr(..) => true,
|
2023-09-23 07:56:59 +00:00
|
|
|
// Should not appear in runtime MIR.
|
|
|
|
ty::ConstKind::Infer(..)
|
|
|
|
| ty::ConstKind::Bound(..)
|
|
|
|
| ty::ConstKind::Placeholder(..)
|
|
|
|
| ty::ConstKind::Error(..) => bug!(),
|
|
|
|
},
|
2023-09-19 20:12:48 +00:00
|
|
|
Const::Unevaluated(..) => true,
|
2023-09-23 07:56:59 +00:00
|
|
|
// If the same slice appears twice in the MIR, we cannot guarantee that we will
|
|
|
|
// give the same `AllocId` to the data.
|
2023-09-19 20:12:48 +00:00
|
|
|
Const::Val(ConstValue::Slice { .. }, _) => false,
|
2023-09-23 07:56:59 +00:00
|
|
|
Const::Val(
|
|
|
|
ConstValue::ZeroSized | ConstValue::Scalar(_) | ConstValue::Indirect { .. },
|
|
|
|
_,
|
2023-09-19 20:12:48 +00:00
|
|
|
) => true,
|
|
|
|
};
|
|
|
|
if const_ok {
|
|
|
|
return Some(ConstOperand { span: rustc_span::DUMMY_SP, user_ty: None, const_ });
|
2023-09-23 07:56:59 +00:00
|
|
|
}
|
2023-03-20 18:05:07 +00:00
|
|
|
}
|
2023-09-19 20:12:48 +00:00
|
|
|
|
|
|
|
let op = self.evaluated[index].as_ref()?;
|
|
|
|
if op.layout.is_unsized() {
|
|
|
|
// Do not attempt to propagate unsized locals.
|
|
|
|
return None;
|
|
|
|
}
|
|
|
|
|
|
|
|
let value = op_to_prop_const(&mut self.ecx, op)?;
|
|
|
|
|
|
|
|
// Check that we do not leak a pointer.
|
|
|
|
// Those pointers may lose part of their identity in codegen.
|
|
|
|
if value.has_provenance(self.tcx, op.layout.size) {
|
|
|
|
return None;
|
|
|
|
}
|
|
|
|
|
|
|
|
let const_ = Const::Val(value, op.layout.ty);
|
|
|
|
Some(ConstOperand { span: rustc_span::DUMMY_SP, user_ty: None, const_ })
|
2023-03-20 18:05:07 +00:00
|
|
|
}
|
|
|
|
|
2023-04-09 11:15:48 +00:00
|
|
|
/// If there is a local which is assigned `index`, and its assignment strictly dominates `loc`,
|
|
|
|
/// return it.
|
2023-03-20 18:05:07 +00:00
|
|
|
fn try_as_local(&mut self, index: VnIndex, loc: Location) -> Option<Local> {
|
2023-04-12 18:39:56 +00:00
|
|
|
let other = self.rev_locals.get(&index)?;
|
2023-03-20 18:05:07 +00:00
|
|
|
other
|
|
|
|
.iter()
|
|
|
|
.copied()
|
2023-04-12 18:39:56 +00:00
|
|
|
.find(|&other| self.ssa.assignment_dominates(self.dominators, other, loc))
|
2023-03-20 18:05:07 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-04-12 18:39:56 +00:00
|
|
|
impl<'tcx> MutVisitor<'tcx> for VnState<'_, 'tcx> {
|
2023-03-20 18:05:07 +00:00
|
|
|
fn tcx(&self) -> TyCtxt<'tcx> {
|
|
|
|
self.tcx
|
|
|
|
}
|
|
|
|
|
|
|
|
fn visit_operand(&mut self, operand: &mut Operand<'tcx>, location: Location) {
|
2023-04-12 18:39:56 +00:00
|
|
|
self.simplify_operand(operand, location);
|
2023-03-20 18:05:07 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
fn visit_statement(&mut self, stmt: &mut Statement<'tcx>, location: Location) {
|
|
|
|
if let StatementKind::Assign(box (_, ref mut rvalue)) = stmt.kind
|
2023-09-23 07:56:59 +00:00
|
|
|
// Do not try to simplify a constant, it's already in canonical shape.
|
|
|
|
&& !matches!(rvalue, Rvalue::Use(Operand::Constant(_)))
|
2023-03-20 18:05:07 +00:00
|
|
|
{
|
2023-09-23 15:08:41 +00:00
|
|
|
if let Some(value) = self.simplify_rvalue(rvalue, location)
|
2023-03-20 18:05:07 +00:00
|
|
|
{
|
2023-09-23 15:08:41 +00:00
|
|
|
if let Some(const_) = self.try_as_constant(value) {
|
|
|
|
*rvalue = Rvalue::Use(Operand::Constant(Box::new(const_)));
|
|
|
|
} else if let Some(local) = self.try_as_local(value, location)
|
|
|
|
&& *rvalue != Rvalue::Use(Operand::Move(local.into()))
|
|
|
|
{
|
|
|
|
*rvalue = Rvalue::Use(Operand::Copy(local.into()));
|
|
|
|
self.reused_locals.insert(local);
|
|
|
|
}
|
2023-03-20 18:05:07 +00:00
|
|
|
}
|
2023-09-23 15:08:41 +00:00
|
|
|
} else {
|
|
|
|
self.super_statement(stmt, location);
|
2023-03-20 18:05:07 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
struct StorageRemover<'tcx> {
|
|
|
|
tcx: TyCtxt<'tcx>,
|
|
|
|
reused_locals: BitSet<Local>,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'tcx> MutVisitor<'tcx> for StorageRemover<'tcx> {
|
|
|
|
fn tcx(&self) -> TyCtxt<'tcx> {
|
|
|
|
self.tcx
|
|
|
|
}
|
|
|
|
|
|
|
|
fn visit_operand(&mut self, operand: &mut Operand<'tcx>, _: Location) {
|
|
|
|
if let Operand::Move(place) = *operand
|
|
|
|
&& let Some(local) = place.as_local()
|
|
|
|
&& self.reused_locals.contains(local)
|
|
|
|
{
|
|
|
|
*operand = Operand::Copy(place);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn visit_statement(&mut self, stmt: &mut Statement<'tcx>, loc: Location) {
|
|
|
|
match stmt.kind {
|
|
|
|
// When removing storage statements, we need to remove both (#107511).
|
|
|
|
StatementKind::StorageLive(l) | StatementKind::StorageDead(l)
|
|
|
|
if self.reused_locals.contains(l) =>
|
|
|
|
{
|
|
|
|
stmt.make_nop()
|
|
|
|
}
|
|
|
|
_ => self.super_statement(stmt, loc),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|