2019-02-10 13:59:13 +00:00
|
|
|
//! This module specifies the type based interner for constants.
|
|
|
|
//!
|
|
|
|
//! After a const evaluation has computed a value, before we destroy the const evaluator's session
|
|
|
|
//! memory, we need to extract all memory allocations to the global memory pool so they stay around.
|
2020-10-25 13:03:17 +00:00
|
|
|
//!
|
|
|
|
//! In principle, this is not very complicated: we recursively walk the final value, follow all the
|
|
|
|
//! pointers, and move all reachable allocations to the global `tcx` memory. The only complication
|
|
|
|
//! is picking the right mutability for the allocations in a `static` initializer: we want to make
|
|
|
|
//! as many allocations as possible immutable so LLVM can put them into read-only memory. At the
|
|
|
|
//! same time, we need to make memory that could be mutated by the program mutable to avoid
|
|
|
|
//! incorrect compilations. To achieve this, we do a type-based traversal of the final value,
|
|
|
|
//! tracking mutable and shared references and `UnsafeCell` to determine the current mutability.
|
|
|
|
//! (In principle, we could skip this type-based part for `const` and promoteds, as they need to be
|
|
|
|
//! always immutable. At least for `const` however we use this opportunity to reject any `const`
|
|
|
|
//! that contains allocations whose mutability we cannot identify.)
|
2019-02-10 13:59:13 +00:00
|
|
|
|
|
|
|
use super::validity::RefTracking;
|
2019-10-27 18:58:00 +00:00
|
|
|
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
|
2022-01-23 18:34:26 +00:00
|
|
|
use rustc_errors::ErrorGuaranteed;
|
2020-01-05 01:37:57 +00:00
|
|
|
use rustc_hir as hir;
|
2020-04-29 10:34:51 +00:00
|
|
|
use rustc_middle::mir::interpret::InterpResult;
|
2020-10-25 10:12:19 +00:00
|
|
|
use rustc_middle::ty::{self, layout::TyAndLayout, Ty};
|
2019-02-10 13:59:13 +00:00
|
|
|
|
2020-04-27 17:56:11 +00:00
|
|
|
use rustc_ast::Mutability;
|
2019-02-10 13:59:13 +00:00
|
|
|
|
Introduce `ConstAllocation`.
Currently some `Allocation`s are interned, some are not, and it's very
hard to tell at a use point which is which.
This commit introduces `ConstAllocation` for the known-interned ones,
which makes the division much clearer. `ConstAllocation::inner()` is
used to get the underlying `Allocation`.
In some places it's natural to use an `Allocation`, in some it's natural
to use a `ConstAllocation`, and in some places there's no clear choice.
I've tried to make things look as nice as possible, while generally
favouring `ConstAllocation`, which is the type that embodies more
information. This does require quite a few calls to `inner()`.
The commit also tweaks how `PartialOrd` works for `Interned`. The
previous code was too clever by half, building on `T: Ord` to make the
code shorter. That caused problems with deriving `PartialOrd` and `Ord`
for `ConstAllocation`, so I changed it to build on `T: PartialOrd`,
which is slightly more verbose but much more standard and avoided the
problems.
2022-03-01 20:15:04 +00:00
|
|
|
use super::{
|
|
|
|
AllocId, Allocation, ConstAllocation, InterpCx, MPlaceTy, Machine, MemoryKind, PlaceTy,
|
|
|
|
ValueVisitor,
|
|
|
|
};
|
2020-12-03 15:39:39 +00:00
|
|
|
use crate::const_eval;
|
2019-02-10 13:59:13 +00:00
|
|
|
|
2020-12-03 15:39:39 +00:00
|
|
|
pub trait CompileTimeMachine<'mir, 'tcx, T> = Machine<
|
2019-10-27 18:58:00 +00:00
|
|
|
'mir,
|
|
|
|
'tcx,
|
2020-12-03 15:39:39 +00:00
|
|
|
MemoryKind = T,
|
2022-07-18 22:47:31 +00:00
|
|
|
Provenance = AllocId,
|
2019-10-27 18:58:00 +00:00
|
|
|
ExtraFnVal = !,
|
|
|
|
FrameExtra = (),
|
|
|
|
AllocExtra = (),
|
2020-12-03 15:39:39 +00:00
|
|
|
MemoryMap = FxHashMap<AllocId, (MemoryKind<T>, Allocation)>,
|
2019-11-25 00:09:58 +00:00
|
|
|
>;
|
|
|
|
|
2020-12-03 15:39:39 +00:00
|
|
|
struct InternVisitor<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx, const_eval::MemoryKind>> {
|
2019-08-27 19:13:05 +00:00
|
|
|
/// The ectx from which we intern.
|
2019-10-27 18:58:00 +00:00
|
|
|
ecx: &'rt mut InterpCx<'mir, 'tcx, M>,
|
2019-08-27 19:13:05 +00:00
|
|
|
/// Previously encountered safe references.
|
2020-04-29 08:00:22 +00:00
|
|
|
ref_tracking: &'rt mut RefTracking<(MPlaceTy<'tcx>, InternMode)>,
|
2019-08-27 19:15:55 +00:00
|
|
|
/// A list of all encountered allocations. After type-based interning, we traverse this list to
|
|
|
|
/// also intern allocations that are only referenced by a raw pointer or inside a union.
|
|
|
|
leftover_allocations: &'rt mut FxHashSet<AllocId>,
|
2020-10-21 07:47:15 +00:00
|
|
|
/// The root kind of the value that we're looking at. This field is never mutated for a
|
|
|
|
/// particular allocation. It is primarily used to make as many allocations as possible
|
|
|
|
/// read-only so LLVM can place them in const memory.
|
2019-02-10 13:59:13 +00:00
|
|
|
mode: InternMode,
|
2020-04-29 08:00:22 +00:00
|
|
|
/// This field stores whether we are *currently* inside an `UnsafeCell`. This can affect
|
|
|
|
/// the intern mode of references we encounter.
|
|
|
|
inside_unsafe_cell: bool,
|
2019-02-10 13:59:13 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Copy, Clone, Debug, PartialEq, Hash, Eq)]
|
|
|
|
enum InternMode {
|
2020-04-29 08:00:22 +00:00
|
|
|
/// A static and its current mutability. Below shared references inside a `static mut`,
|
|
|
|
/// this is *immutable*, and below mutable references inside an `UnsafeCell`, this
|
|
|
|
/// is *mutable*.
|
|
|
|
Static(hir::Mutability),
|
2020-10-25 10:12:19 +00:00
|
|
|
/// A `const`.
|
|
|
|
Const,
|
2019-02-10 13:59:13 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Signalling data structure to ensure we don't recurse
|
|
|
|
/// into the memory of other constants or statics
|
|
|
|
struct IsStaticOrFn;
|
|
|
|
|
2019-08-27 19:13:05 +00:00
|
|
|
/// Intern an allocation without looking at its children.
|
|
|
|
/// `mode` is the mode of the environment where we found this pointer.
|
2022-03-30 05:39:38 +00:00
|
|
|
/// `mutability` is the mutability of the place to be interned; even if that says
|
2019-08-27 19:13:05 +00:00
|
|
|
/// `immutable` things might become mutable if `ty` is not frozen.
|
2019-08-28 07:44:48 +00:00
|
|
|
/// `ty` can be `None` if there is no potential interior mutability
|
|
|
|
/// to account for (e.g. for vtables).
|
2020-12-03 15:39:39 +00:00
|
|
|
fn intern_shallow<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx, const_eval::MemoryKind>>(
|
2019-10-27 18:58:00 +00:00
|
|
|
ecx: &'rt mut InterpCx<'mir, 'tcx, M>,
|
2019-08-27 19:15:55 +00:00
|
|
|
leftover_allocations: &'rt mut FxHashSet<AllocId>,
|
2019-08-27 19:13:05 +00:00
|
|
|
alloc_id: AllocId,
|
2020-04-29 08:00:22 +00:00
|
|
|
mode: InternMode,
|
2019-08-27 19:13:05 +00:00
|
|
|
ty: Option<Ty<'tcx>>,
|
2020-04-29 10:34:51 +00:00
|
|
|
) -> Option<IsStaticOrFn> {
|
2020-04-29 08:00:22 +00:00
|
|
|
trace!("intern_shallow {:?} with {:?}", alloc_id, mode);
|
2019-08-27 19:13:05 +00:00
|
|
|
// remove allocation
|
|
|
|
let tcx = ecx.tcx;
|
2022-02-18 23:47:43 +00:00
|
|
|
let Some((kind, mut alloc)) = ecx.memory.alloc_map.remove(&alloc_id) else {
|
|
|
|
// Pointer not found in local memory map. It is either a pointer to the global
|
|
|
|
// map, or dangling.
|
|
|
|
// If the pointer is dangling (neither in local nor global memory), we leave it
|
|
|
|
// to validation to error -- it has the much better error messages, pointing out where
|
|
|
|
// in the value the dangling reference lies.
|
|
|
|
// The `delay_span_bug` ensures that we don't forget such a check in validation.
|
2022-07-17 15:40:34 +00:00
|
|
|
if tcx.try_get_global_alloc(alloc_id).is_none() {
|
2022-02-18 23:47:43 +00:00
|
|
|
tcx.sess.delay_span_bug(ecx.tcx.span, "tried to intern dangling pointer");
|
2019-08-27 19:13:05 +00:00
|
|
|
}
|
2022-02-18 23:47:43 +00:00
|
|
|
// treat dangling pointers like other statics
|
|
|
|
// just to stop trying to recurse into them
|
|
|
|
return Some(IsStaticOrFn);
|
2019-08-27 19:13:05 +00:00
|
|
|
};
|
|
|
|
// This match is just a canary for future changes to `MemoryKind`, which most likely need
|
|
|
|
// changes in this function.
|
|
|
|
match kind {
|
2020-12-02 12:15:11 +00:00
|
|
|
MemoryKind::Stack
|
2020-12-03 15:39:39 +00:00
|
|
|
| MemoryKind::Machine(const_eval::MemoryKind::Heap)
|
2020-12-02 12:15:11 +00:00
|
|
|
| MemoryKind::CallerLocation => {}
|
2019-08-27 19:13:05 +00:00
|
|
|
}
|
|
|
|
// Set allocation mutability as appropriate. This is used by LLVM to put things into
|
2020-03-21 18:19:10 +00:00
|
|
|
// read-only memory, and also by Miri when evaluating other globals that
|
2019-08-27 19:13:05 +00:00
|
|
|
// access this one.
|
2020-04-29 08:00:22 +00:00
|
|
|
if let InternMode::Static(mutability) = mode {
|
|
|
|
// For this, we need to take into account `UnsafeCell`. When `ty` is `None`, we assume
|
|
|
|
// no interior mutability.
|
2020-06-21 09:20:48 +00:00
|
|
|
let frozen = ty.map_or(true, |ty| ty.is_freeze(ecx.tcx, ecx.param_env));
|
2020-10-21 07:47:15 +00:00
|
|
|
// For statics, allocation mutability is the combination of place mutability and
|
|
|
|
// type mutability.
|
2019-08-27 19:13:05 +00:00
|
|
|
// The entire allocation needs to be mutable if it contains an `UnsafeCell` anywhere.
|
2020-04-29 08:00:22 +00:00
|
|
|
let immutable = mutability == Mutability::Not && frozen;
|
|
|
|
if immutable {
|
2019-12-16 16:28:40 +00:00
|
|
|
alloc.mutability = Mutability::Not;
|
2019-08-27 19:13:05 +00:00
|
|
|
} else {
|
|
|
|
// Just making sure we are not "upgrading" an immutable allocation to mutable.
|
2019-12-16 16:28:40 +00:00
|
|
|
assert_eq!(alloc.mutability, Mutability::Mut);
|
2019-08-27 19:13:05 +00:00
|
|
|
}
|
|
|
|
} else {
|
2020-04-29 08:00:22 +00:00
|
|
|
// No matter what, *constants are never mutable*. Mutating them is UB.
|
|
|
|
// See const_eval::machine::MemoryExtra::can_access_statics for why
|
|
|
|
// immutability is so important.
|
|
|
|
|
2020-10-24 18:49:17 +00:00
|
|
|
// Validation will ensure that there is no `UnsafeCell` on an immutable allocation.
|
2019-12-16 16:28:40 +00:00
|
|
|
alloc.mutability = Mutability::Not;
|
2019-08-27 19:13:05 +00:00
|
|
|
};
|
|
|
|
// link the alloc id to the actual allocation
|
2022-08-27 18:11:19 +00:00
|
|
|
leftover_allocations.extend(alloc.provenance().iter().map(|&(_, alloc_id)| alloc_id));
|
Introduce `ConstAllocation`.
Currently some `Allocation`s are interned, some are not, and it's very
hard to tell at a use point which is which.
This commit introduces `ConstAllocation` for the known-interned ones,
which makes the division much clearer. `ConstAllocation::inner()` is
used to get the underlying `Allocation`.
In some places it's natural to use an `Allocation`, in some it's natural
to use a `ConstAllocation`, and in some places there's no clear choice.
I've tried to make things look as nice as possible, while generally
favouring `ConstAllocation`, which is the type that embodies more
information. This does require quite a few calls to `inner()`.
The commit also tweaks how `PartialOrd` works for `Interned`. The
previous code was too clever by half, building on `T: Ord` to make the
code shorter. That caused problems with deriving `PartialOrd` and `Ord`
for `ConstAllocation`, so I changed it to build on `T: PartialOrd`,
which is slightly more verbose but much more standard and avoided the
problems.
2022-03-01 20:15:04 +00:00
|
|
|
let alloc = tcx.intern_const_alloc(alloc);
|
2020-04-24 10:53:18 +00:00
|
|
|
tcx.set_alloc_id_memory(alloc_id, alloc);
|
2020-04-29 10:34:51 +00:00
|
|
|
None
|
2019-08-27 19:13:05 +00:00
|
|
|
}
|
|
|
|
|
2020-12-03 15:39:39 +00:00
|
|
|
impl<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx, const_eval::MemoryKind>>
|
|
|
|
InternVisitor<'rt, 'mir, 'tcx, M>
|
|
|
|
{
|
2019-06-13 15:04:46 +00:00
|
|
|
fn intern_shallow(
|
2019-02-10 13:59:13 +00:00
|
|
|
&mut self,
|
2019-08-27 17:54:54 +00:00
|
|
|
alloc_id: AllocId,
|
2020-04-29 08:00:22 +00:00
|
|
|
mode: InternMode,
|
2019-08-27 17:54:54 +00:00
|
|
|
ty: Option<Ty<'tcx>>,
|
2020-04-29 10:34:51 +00:00
|
|
|
) -> Option<IsStaticOrFn> {
|
2020-04-29 12:02:41 +00:00
|
|
|
intern_shallow(self.ecx, self.leftover_allocations, alloc_id, mode, ty)
|
2019-02-10 13:59:13 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-12-03 15:39:39 +00:00
|
|
|
impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx, const_eval::MemoryKind>>
|
|
|
|
ValueVisitor<'mir, 'tcx, M> for InternVisitor<'rt, 'mir, 'tcx, M>
|
2019-02-10 13:59:13 +00:00
|
|
|
{
|
|
|
|
type V = MPlaceTy<'tcx>;
|
|
|
|
|
|
|
|
#[inline(always)]
|
2019-10-27 18:58:00 +00:00
|
|
|
fn ecx(&self) -> &InterpCx<'mir, 'tcx, M> {
|
2019-02-10 13:59:13 +00:00
|
|
|
&self.ecx
|
|
|
|
}
|
|
|
|
|
|
|
|
fn visit_aggregate(
|
|
|
|
&mut self,
|
2021-02-15 00:00:00 +00:00
|
|
|
mplace: &MPlaceTy<'tcx>,
|
2019-06-11 11:23:08 +00:00
|
|
|
fields: impl Iterator<Item = InterpResult<'tcx, Self::V>>,
|
|
|
|
) -> InterpResult<'tcx> {
|
2022-06-28 20:08:28 +00:00
|
|
|
// We want to walk the aggregate to look for references to intern. While doing that we
|
2022-05-31 10:36:48 +00:00
|
|
|
// also need to take special care of interior mutability.
|
|
|
|
//
|
2022-06-28 20:08:28 +00:00
|
|
|
// As an optimization, however, if the allocation does not contain any references: we don't
|
2022-05-31 10:36:48 +00:00
|
|
|
// need to do the walk. It can be costly for big arrays for example (e.g. issue #93215).
|
2022-06-13 13:19:18 +00:00
|
|
|
let is_walk_needed = |mplace: &MPlaceTy<'tcx>| -> InterpResult<'tcx, bool> {
|
|
|
|
// ZSTs cannot contain pointers, we can avoid the interning walk.
|
|
|
|
if mplace.layout.is_zst() {
|
|
|
|
return Ok(false);
|
|
|
|
}
|
|
|
|
|
2022-06-28 20:08:28 +00:00
|
|
|
// Now, check whether this allocation could contain references.
|
2022-06-13 15:35:00 +00:00
|
|
|
//
|
|
|
|
// Note, this check may sometimes not be cheap, so we only do it when the walk we'd like
|
|
|
|
// to avoid could be expensive: on the potentially larger types, arrays and slices,
|
|
|
|
// rather than on all aggregates unconditionally.
|
|
|
|
if matches!(mplace.layout.ty.kind(), ty::Array(..) | ty::Slice(..)) {
|
|
|
|
let Some((size, align)) = self.ecx.size_and_align_of_mplace(&mplace)? else {
|
|
|
|
// We do the walk if we can't determine the size of the mplace: we may be
|
|
|
|
// dealing with extern types here in the future.
|
|
|
|
return Ok(true);
|
|
|
|
};
|
|
|
|
|
2022-08-27 18:11:19 +00:00
|
|
|
// If there is no provenance in this allocation, it does not contain references
|
2022-06-28 20:08:28 +00:00
|
|
|
// that point to another allocation, and we can avoid the interning walk.
|
2022-06-13 15:35:00 +00:00
|
|
|
if let Some(alloc) = self.ecx.get_ptr_alloc(mplace.ptr, size, align)? {
|
2022-08-27 18:11:19 +00:00
|
|
|
if !alloc.has_provenance() {
|
2022-06-13 15:35:00 +00:00
|
|
|
return Ok(false);
|
|
|
|
}
|
2022-06-28 20:25:58 +00:00
|
|
|
} else {
|
|
|
|
// We're encountering a ZST here, and can avoid the walk as well.
|
|
|
|
return Ok(false);
|
2022-06-13 15:35:00 +00:00
|
|
|
}
|
2022-06-13 13:19:18 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// In the general case, we do the walk.
|
|
|
|
Ok(true)
|
2022-05-31 10:36:48 +00:00
|
|
|
};
|
|
|
|
|
2022-06-13 13:19:18 +00:00
|
|
|
// If this allocation contains no references to intern, we avoid the potentially costly
|
|
|
|
// walk.
|
|
|
|
//
|
|
|
|
// We can do this before the checks for interior mutability below, because only references
|
|
|
|
// are relevant in that situation, and we're checking if there are any here.
|
|
|
|
if !is_walk_needed(mplace)? {
|
2020-10-25 10:12:19 +00:00
|
|
|
return Ok(());
|
|
|
|
}
|
|
|
|
|
2019-02-10 13:59:13 +00:00
|
|
|
if let Some(def) = mplace.layout.ty.ty_adt_def() {
|
2022-07-07 10:46:22 +00:00
|
|
|
if def.is_unsafe_cell() {
|
2019-08-27 17:54:54 +00:00
|
|
|
// We are crossing over an `UnsafeCell`, we can mutate again. This means that
|
|
|
|
// References we encounter inside here are interned as pointing to mutable
|
|
|
|
// allocations.
|
2020-04-29 08:00:22 +00:00
|
|
|
// Remember the `old` value to handle nested `UnsafeCell`.
|
|
|
|
let old = std::mem::replace(&mut self.inside_unsafe_cell, true);
|
2019-02-10 13:59:13 +00:00
|
|
|
let walked = self.walk_aggregate(mplace, fields);
|
2020-04-29 08:00:22 +00:00
|
|
|
self.inside_unsafe_cell = old;
|
2019-02-10 13:59:13 +00:00
|
|
|
return walked;
|
|
|
|
}
|
|
|
|
}
|
2020-10-17 19:47:27 +00:00
|
|
|
|
2019-02-10 13:59:13 +00:00
|
|
|
self.walk_aggregate(mplace, fields)
|
|
|
|
}
|
|
|
|
|
2021-02-15 00:00:00 +00:00
|
|
|
fn visit_value(&mut self, mplace: &MPlaceTy<'tcx>) -> InterpResult<'tcx> {
|
2022-08-27 18:11:19 +00:00
|
|
|
// Handle Reference types, as these are the only types with provenance supported by const eval.
|
|
|
|
// Raw pointers (and boxes) are handled by the `leftover_allocations` logic.
|
2020-06-14 13:02:51 +00:00
|
|
|
let tcx = self.ecx.tcx;
|
2019-02-10 13:59:13 +00:00
|
|
|
let ty = mplace.layout.ty;
|
2020-08-02 22:49:11 +00:00
|
|
|
if let ty::Ref(_, referenced_ty, ref_mutability) = *ty.kind() {
|
2022-06-29 21:07:24 +00:00
|
|
|
let value = self.ecx.read_immediate(&mplace.into())?;
|
2021-02-15 00:00:00 +00:00
|
|
|
let mplace = self.ecx.ref_to_mplace(&value)?;
|
2020-04-29 08:00:22 +00:00
|
|
|
assert_eq!(mplace.layout.ty, referenced_ty);
|
2019-12-22 11:55:31 +00:00
|
|
|
// Handle trait object vtables.
|
2019-11-06 07:44:15 +00:00
|
|
|
if let ty::Dynamic(..) =
|
2020-08-02 22:49:11 +00:00
|
|
|
tcx.struct_tail_erasing_lifetimes(referenced_ty, self.ecx.param_env).kind()
|
2019-11-06 07:44:15 +00:00
|
|
|
{
|
2022-07-23 14:36:57 +00:00
|
|
|
let ptr = mplace.meta.unwrap_meta().to_pointer(&tcx)?;
|
2021-07-12 16:22:15 +00:00
|
|
|
if let Some(alloc_id) = ptr.provenance {
|
2020-04-29 08:00:22 +00:00
|
|
|
// Explicitly choose const mode here, since vtables are immutable, even
|
2019-12-22 11:55:31 +00:00
|
|
|
// if the reference of the fat pointer is mutable.
|
2021-07-12 16:22:15 +00:00
|
|
|
self.intern_shallow(alloc_id, InternMode::Const, None);
|
2019-12-14 11:15:37 +00:00
|
|
|
} else {
|
2020-10-21 07:47:15 +00:00
|
|
|
// Validation will error (with a better message) on an invalid vtable pointer.
|
2020-04-29 08:00:22 +00:00
|
|
|
// Let validation show the error message, but make sure it *does* error.
|
2020-04-29 12:02:41 +00:00
|
|
|
tcx.sess
|
|
|
|
.delay_span_bug(tcx.span, "vtables pointers cannot be integer pointers");
|
2019-12-14 11:15:37 +00:00
|
|
|
}
|
2019-06-24 13:31:52 +00:00
|
|
|
}
|
2019-02-10 13:59:13 +00:00
|
|
|
// Check if we have encountered this pointer+layout combination before.
|
|
|
|
// Only recurse for allocation-backed pointers.
|
2021-07-12 16:22:15 +00:00
|
|
|
if let Some(alloc_id) = mplace.ptr.provenance {
|
2020-10-21 07:47:15 +00:00
|
|
|
// Compute the mode with which we intern this. Our goal here is to make as many
|
2020-10-25 10:12:19 +00:00
|
|
|
// statics as we can immutable so they can be placed in read-only memory by LLVM.
|
2020-04-29 08:00:22 +00:00
|
|
|
let ref_mode = match self.mode {
|
|
|
|
InternMode::Static(mutbl) => {
|
|
|
|
// In statics, merge outer mutability with reference mutability and
|
|
|
|
// take into account whether we are in an `UnsafeCell`.
|
2019-06-11 14:17:08 +00:00
|
|
|
|
2020-04-29 08:00:22 +00:00
|
|
|
// The only way a mutable reference actually works as a mutable reference is
|
|
|
|
// by being in a `static mut` directly or behind another mutable reference.
|
|
|
|
// If there's an immutable reference or we are inside a `static`, then our
|
|
|
|
// mutable reference is equivalent to an immutable one. As an example:
|
|
|
|
// `&&mut Foo` is semantically equivalent to `&&Foo`
|
|
|
|
match ref_mutability {
|
|
|
|
_ if self.inside_unsafe_cell => {
|
|
|
|
// Inside an `UnsafeCell` is like inside a `static mut`, the "outer"
|
|
|
|
// mutability does not matter.
|
|
|
|
InternMode::Static(ref_mutability)
|
|
|
|
}
|
|
|
|
Mutability::Not => {
|
|
|
|
// A shared reference, things become immutable.
|
2020-10-21 07:47:15 +00:00
|
|
|
// We do *not* consider `freeze` here: `intern_shallow` considers
|
|
|
|
// `freeze` for the actual mutability of this allocation; the intern
|
|
|
|
// mode for references contained in this allocation is tracked more
|
|
|
|
// precisely when traversing the referenced data (by tracking
|
|
|
|
// `UnsafeCell`). This makes sure that `&(&i32, &Cell<i32>)` still
|
|
|
|
// has the left inner reference interned into a read-only
|
|
|
|
// allocation.
|
2020-04-29 08:00:22 +00:00
|
|
|
InternMode::Static(Mutability::Not)
|
|
|
|
}
|
|
|
|
Mutability::Mut => {
|
|
|
|
// Mutable reference.
|
|
|
|
InternMode::Static(mutbl)
|
|
|
|
}
|
2020-04-17 00:38:52 +00:00
|
|
|
}
|
|
|
|
}
|
2020-10-25 10:12:19 +00:00
|
|
|
InternMode::Const => {
|
|
|
|
// Ignore `UnsafeCell`, everything is immutable. Validity does some sanity
|
|
|
|
// checking for mutable references that we encounter -- they must all be
|
|
|
|
// ZST.
|
|
|
|
InternMode::Const
|
2020-04-29 08:00:22 +00:00
|
|
|
}
|
2019-02-10 13:59:13 +00:00
|
|
|
};
|
2021-07-12 16:22:15 +00:00
|
|
|
match self.intern_shallow(alloc_id, ref_mode, Some(referenced_ty)) {
|
2019-02-10 13:59:13 +00:00
|
|
|
// No need to recurse, these are interned already and statics may have
|
|
|
|
// cycles, so we don't want to recurse there
|
|
|
|
Some(IsStaticOrFn) => {}
|
|
|
|
// intern everything referenced by this value. The mutability is taken from the
|
|
|
|
// reference. It is checked above that mutable references only happen in
|
|
|
|
// `static mut`
|
2020-04-29 08:00:22 +00:00
|
|
|
None => self.ref_tracking.track((mplace, ref_mode), || ()),
|
2019-02-10 13:59:13 +00:00
|
|
|
}
|
|
|
|
}
|
2020-02-17 21:59:16 +00:00
|
|
|
Ok(())
|
|
|
|
} else {
|
|
|
|
// Not a reference -- proceed recursively.
|
|
|
|
self.walk_value(mplace)
|
2019-02-10 13:59:13 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-04-29 08:00:22 +00:00
|
|
|
#[derive(Copy, Clone, Debug, PartialEq, Hash, Eq)]
|
2019-12-25 12:58:02 +00:00
|
|
|
pub enum InternKind {
|
|
|
|
/// The `mutability` of the static, ignoring the type which may have interior mutability.
|
|
|
|
Static(hir::Mutability),
|
|
|
|
Constant,
|
|
|
|
Promoted,
|
|
|
|
}
|
|
|
|
|
2020-04-29 10:34:51 +00:00
|
|
|
/// Intern `ret` and everything it references.
|
|
|
|
///
|
|
|
|
/// This *cannot raise an interpreter error*. Doing so is left to validation, which
|
2020-04-29 12:04:40 +00:00
|
|
|
/// tracks where in the value we are and thus can show much better error messages.
|
2022-08-31 13:01:10 +00:00
|
|
|
#[instrument(level = "debug", skip(ecx))]
|
2021-12-14 03:34:51 +00:00
|
|
|
pub fn intern_const_alloc_recursive<
|
|
|
|
'mir,
|
|
|
|
'tcx: 'mir,
|
|
|
|
M: CompileTimeMachine<'mir, 'tcx, const_eval::MemoryKind>,
|
|
|
|
>(
|
2019-10-27 18:58:00 +00:00
|
|
|
ecx: &mut InterpCx<'mir, 'tcx, M>,
|
2019-12-25 12:58:02 +00:00
|
|
|
intern_kind: InternKind,
|
2021-02-15 00:00:00 +00:00
|
|
|
ret: &MPlaceTy<'tcx>,
|
2022-01-23 18:34:26 +00:00
|
|
|
) -> Result<(), ErrorGuaranteed> {
|
2019-02-10 13:59:13 +00:00
|
|
|
let tcx = ecx.tcx;
|
2020-04-29 08:00:22 +00:00
|
|
|
let base_intern_mode = match intern_kind {
|
|
|
|
InternKind::Static(mutbl) => InternMode::Static(mutbl),
|
2020-08-14 09:18:55 +00:00
|
|
|
// `Constant` includes array lengths.
|
2020-10-25 10:12:19 +00:00
|
|
|
InternKind::Constant | InternKind::Promoted => InternMode::Const,
|
2019-02-10 13:59:13 +00:00
|
|
|
};
|
|
|
|
|
2019-08-27 19:15:55 +00:00
|
|
|
// Type based interning.
|
2020-04-29 08:00:22 +00:00
|
|
|
// `ref_tracking` tracks typed references we have already interned and still need to crawl for
|
2019-08-27 19:15:55 +00:00
|
|
|
// more typed information inside them.
|
|
|
|
// `leftover_allocations` collects *all* allocations we see, because some might not
|
|
|
|
// be available in a typed way. They get interned at the end.
|
2020-04-29 08:00:22 +00:00
|
|
|
let mut ref_tracking = RefTracking::empty();
|
2019-08-27 19:15:55 +00:00
|
|
|
let leftover_allocations = &mut FxHashSet::default();
|
2019-02-10 13:59:13 +00:00
|
|
|
|
|
|
|
// start with the outermost allocation
|
2019-08-27 19:13:05 +00:00
|
|
|
intern_shallow(
|
2019-02-10 13:59:13 +00:00
|
|
|
ecx,
|
2019-08-27 19:15:55 +00:00
|
|
|
leftover_allocations,
|
2019-12-13 23:04:27 +00:00
|
|
|
// The outermost allocation must exist, because we allocated it with
|
|
|
|
// `Memory::allocate`.
|
2021-07-12 16:22:15 +00:00
|
|
|
ret.ptr.provenance.unwrap(),
|
2020-04-29 08:00:22 +00:00
|
|
|
base_intern_mode,
|
2019-08-27 19:13:05 +00:00
|
|
|
Some(ret.layout.ty),
|
2020-04-29 10:34:51 +00:00
|
|
|
);
|
2019-02-10 13:59:13 +00:00
|
|
|
|
2021-02-15 00:00:00 +00:00
|
|
|
ref_tracking.track((*ret, base_intern_mode), || ());
|
2020-04-29 08:00:22 +00:00
|
|
|
|
|
|
|
while let Some(((mplace, mode), _)) = ref_tracking.todo.pop() {
|
2020-04-29 08:14:57 +00:00
|
|
|
let res = InternVisitor {
|
2019-02-10 13:59:13 +00:00
|
|
|
ref_tracking: &mut ref_tracking,
|
|
|
|
ecx,
|
|
|
|
mode,
|
2019-08-27 19:15:55 +00:00
|
|
|
leftover_allocations,
|
2020-04-29 08:00:22 +00:00
|
|
|
inside_unsafe_cell: false,
|
2019-02-10 13:59:13 +00:00
|
|
|
}
|
2021-02-15 00:00:00 +00:00
|
|
|
.visit_value(&mplace);
|
2020-04-29 08:14:57 +00:00
|
|
|
// We deliberately *ignore* interpreter errors here. When there is a problem, the remaining
|
|
|
|
// references are "leftover"-interned, and later validation will show a proper error
|
|
|
|
// and point at the right part of the value causing the problem.
|
|
|
|
match res {
|
2020-04-29 12:02:41 +00:00
|
|
|
Ok(()) => {}
|
2020-04-29 08:14:57 +00:00
|
|
|
Err(error) => {
|
|
|
|
ecx.tcx.sess.delay_span_bug(
|
2020-06-14 13:02:51 +00:00
|
|
|
ecx.tcx.span,
|
2020-06-03 11:45:56 +00:00
|
|
|
&format!(
|
|
|
|
"error during interning should later cause validation failure: {}",
|
|
|
|
error
|
|
|
|
),
|
2020-04-29 08:14:57 +00:00
|
|
|
);
|
2019-02-10 13:59:13 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Intern the rest of the allocations as mutable. These might be inside unions, padding, raw
|
|
|
|
// pointers, ... So we can't intern them according to their type rules
|
|
|
|
|
2019-08-27 19:15:55 +00:00
|
|
|
let mut todo: Vec<_> = leftover_allocations.iter().cloned().collect();
|
2022-03-08 14:05:50 +00:00
|
|
|
debug!(?todo);
|
|
|
|
debug!("dead_alloc_map: {:#?}", ecx.memory.dead_alloc_map);
|
2019-02-10 13:59:13 +00:00
|
|
|
while let Some(alloc_id) = todo.pop() {
|
2019-10-11 20:33:55 +00:00
|
|
|
if let Some((_, mut alloc)) = ecx.memory.alloc_map.remove(&alloc_id) {
|
2019-08-27 17:54:54 +00:00
|
|
|
// We can't call the `intern_shallow` method here, as its logic is tailored to safe
|
2019-08-27 19:15:55 +00:00
|
|
|
// references and a `leftover_allocations` set (where we only have a todo-list here).
|
|
|
|
// So we hand-roll the interning logic here again.
|
2019-12-25 12:58:02 +00:00
|
|
|
match intern_kind {
|
2022-08-27 18:11:19 +00:00
|
|
|
// Statics may point to mutable allocations.
|
2020-01-11 14:01:58 +00:00
|
|
|
// Even for immutable statics it would be ok to have mutable allocations behind
|
|
|
|
// raw pointers, e.g. for `static FOO: *const AtomicUsize = &AtomicUsize::new(42)`.
|
|
|
|
InternKind::Static(_) => {}
|
2019-12-25 12:58:02 +00:00
|
|
|
// Raw pointers in promoteds may only point to immutable things so we mark
|
2020-01-09 16:14:54 +00:00
|
|
|
// everything as immutable.
|
2020-04-29 08:00:22 +00:00
|
|
|
// It is UB to mutate through a raw pointer obtained via an immutable reference:
|
2020-01-08 09:16:47 +00:00
|
|
|
// Since all references and pointers inside a promoted must by their very definition
|
2020-01-11 13:45:00 +00:00
|
|
|
// be created from an immutable reference (and promotion also excludes interior
|
2020-01-15 10:33:49 +00:00
|
|
|
// mutability), mutating through them would be UB.
|
2020-01-09 16:14:54 +00:00
|
|
|
// There's no way we can check whether the user is using raw pointers correctly,
|
|
|
|
// so all we can do is mark this as immutable here.
|
2019-12-25 12:58:02 +00:00
|
|
|
InternKind::Promoted => {
|
2020-04-29 08:00:22 +00:00
|
|
|
// See const_eval::machine::MemoryExtra::can_access_statics for why
|
|
|
|
// immutability is so important.
|
2019-12-25 12:58:02 +00:00
|
|
|
alloc.mutability = Mutability::Not;
|
|
|
|
}
|
2020-06-03 11:45:56 +00:00
|
|
|
InternKind::Constant => {
|
2020-04-29 08:00:22 +00:00
|
|
|
// If it's a constant, we should not have any "leftovers" as everything
|
|
|
|
// is tracked by const-checking.
|
|
|
|
// FIXME: downgrade this to a warning? It rejects some legitimate consts,
|
|
|
|
// such as `const CONST_RAW: *const Vec<i32> = &Vec::new() as *const _;`.
|
2020-04-29 12:02:41 +00:00
|
|
|
ecx.tcx
|
|
|
|
.sess
|
2020-06-14 13:02:51 +00:00
|
|
|
.span_err(ecx.tcx.span, "untyped pointers are not allowed in constant");
|
2020-04-29 08:00:22 +00:00
|
|
|
// For better errors later, mark the allocation as immutable.
|
|
|
|
alloc.mutability = Mutability::Not;
|
2019-12-16 14:23:26 +00:00
|
|
|
}
|
2019-08-27 18:59:34 +00:00
|
|
|
}
|
2019-02-10 13:59:13 +00:00
|
|
|
let alloc = tcx.intern_const_alloc(alloc);
|
2020-04-24 10:53:18 +00:00
|
|
|
tcx.set_alloc_id_memory(alloc_id, alloc);
|
2022-08-27 18:11:19 +00:00
|
|
|
for &(_, alloc_id) in alloc.inner().provenance().iter() {
|
2021-07-12 16:22:15 +00:00
|
|
|
if leftover_allocations.insert(alloc_id) {
|
|
|
|
todo.push(alloc_id);
|
2019-02-10 13:59:13 +00:00
|
|
|
}
|
|
|
|
}
|
2019-10-11 20:33:55 +00:00
|
|
|
} else if ecx.memory.dead_alloc_map.contains_key(&alloc_id) {
|
2020-04-29 10:34:51 +00:00
|
|
|
// Codegen does not like dangling pointers, and generally `tcx` assumes that
|
|
|
|
// all allocations referenced anywhere actually exist. So, make sure we error here.
|
2022-01-23 00:49:12 +00:00
|
|
|
let reported = ecx
|
|
|
|
.tcx
|
|
|
|
.sess
|
|
|
|
.span_err(ecx.tcx.span, "encountered dangling pointer in final constant");
|
|
|
|
return Err(reported);
|
2022-07-17 15:40:34 +00:00
|
|
|
} else if ecx.tcx.try_get_global_alloc(alloc_id).is_none() {
|
2020-04-29 12:02:41 +00:00
|
|
|
// We have hit an `AllocId` that is neither in local or global memory and isn't
|
|
|
|
// marked as dangling by local memory. That should be impossible.
|
2020-06-14 13:02:51 +00:00
|
|
|
span_bug!(ecx.tcx.span, "encountered unknown alloc id {:?}", alloc_id);
|
2019-02-10 13:59:13 +00:00
|
|
|
}
|
|
|
|
}
|
2020-11-04 16:53:43 +00:00
|
|
|
Ok(())
|
2019-02-10 13:59:13 +00:00
|
|
|
}
|
2020-08-14 16:01:14 +00:00
|
|
|
|
2020-12-03 15:39:39 +00:00
|
|
|
impl<'mir, 'tcx: 'mir, M: super::intern::CompileTimeMachine<'mir, 'tcx, !>>
|
|
|
|
InterpCx<'mir, 'tcx, M>
|
|
|
|
{
|
2020-08-14 16:01:14 +00:00
|
|
|
/// A helper function that allocates memory for the layout given and gives you access to mutate
|
|
|
|
/// it. Once your own mutation code is done, the backing `Allocation` is removed from the
|
|
|
|
/// current `Memory` and returned.
|
2021-01-01 00:53:25 +00:00
|
|
|
pub fn intern_with_temp_alloc(
|
2020-08-14 16:01:14 +00:00
|
|
|
&mut self,
|
|
|
|
layout: TyAndLayout<'tcx>,
|
|
|
|
f: impl FnOnce(
|
|
|
|
&mut InterpCx<'mir, 'tcx, M>,
|
2022-07-18 22:47:31 +00:00
|
|
|
&PlaceTy<'tcx, M::Provenance>,
|
2020-08-14 16:01:14 +00:00
|
|
|
) -> InterpResult<'tcx, ()>,
|
Introduce `ConstAllocation`.
Currently some `Allocation`s are interned, some are not, and it's very
hard to tell at a use point which is which.
This commit introduces `ConstAllocation` for the known-interned ones,
which makes the division much clearer. `ConstAllocation::inner()` is
used to get the underlying `Allocation`.
In some places it's natural to use an `Allocation`, in some it's natural
to use a `ConstAllocation`, and in some places there's no clear choice.
I've tried to make things look as nice as possible, while generally
favouring `ConstAllocation`, which is the type that embodies more
information. This does require quite a few calls to `inner()`.
The commit also tweaks how `PartialOrd` works for `Interned`. The
previous code was too clever by half, building on `T: Ord` to make the
code shorter. That caused problems with deriving `PartialOrd` and `Ord`
for `ConstAllocation`, so I changed it to build on `T: PartialOrd`,
which is slightly more verbose but much more standard and avoided the
problems.
2022-03-01 20:15:04 +00:00
|
|
|
) -> InterpResult<'tcx, ConstAllocation<'tcx>> {
|
2021-06-12 23:49:48 +00:00
|
|
|
let dest = self.allocate(layout, MemoryKind::Stack)?;
|
2021-07-14 20:10:17 +00:00
|
|
|
f(self, &dest.into())?;
|
2021-07-12 16:22:15 +00:00
|
|
|
let mut alloc = self.memory.alloc_map.remove(&dest.ptr.provenance.unwrap()).unwrap().1;
|
2020-08-14 16:01:14 +00:00
|
|
|
alloc.mutability = Mutability::Not;
|
|
|
|
Ok(self.tcx.intern_const_alloc(alloc))
|
|
|
|
}
|
|
|
|
}
|