2019-02-10 13:59:13 +00:00
|
|
|
//! This module specifies the type based interner for constants.
|
|
|
|
//!
|
|
|
|
//! After a const evaluation has computed a value, before we destroy the const evaluator's session
|
|
|
|
//! memory, we need to extract all memory allocations to the global memory pool so they stay around.
|
2020-10-25 13:03:17 +00:00
|
|
|
//!
|
|
|
|
//! In principle, this is not very complicated: we recursively walk the final value, follow all the
|
|
|
|
//! pointers, and move all reachable allocations to the global `tcx` memory. The only complication
|
2023-12-16 15:24:25 +00:00
|
|
|
//! is picking the right mutability: the outermost allocation generally has a clear mutability, but
|
|
|
|
//! what about the other allocations it points to that have also been created with this value? We
|
|
|
|
//! don't want to do guesswork here. The rules are: `static`, `const`, and promoted can only create
|
|
|
|
//! immutable allocations that way. `static mut` can be initialized with expressions like `&mut 42`,
|
|
|
|
//! so all inner allocations are marked mutable. Some of them could potentially be made immutable,
|
|
|
|
//! but that would require relying on type information, and given how many ways Rust has to lie
|
|
|
|
//! about type information, we want to avoid doing that.
|
2019-02-10 13:59:13 +00:00
|
|
|
|
2024-02-26 18:03:06 +00:00
|
|
|
use hir::def::DefKind;
|
2023-12-16 15:24:25 +00:00
|
|
|
use rustc_ast::Mutability;
|
|
|
|
use rustc_data_structures::fx::{FxHashSet, FxIndexMap};
|
2022-01-23 18:34:26 +00:00
|
|
|
use rustc_errors::ErrorGuaranteed;
|
2020-01-05 01:37:57 +00:00
|
|
|
use rustc_hir as hir;
|
2024-02-26 18:03:06 +00:00
|
|
|
use rustc_middle::mir::interpret::{ConstAllocation, CtfeProvenance, InterpResult};
|
|
|
|
use rustc_middle::query::TyCtxtAt;
|
2023-12-16 15:24:25 +00:00
|
|
|
use rustc_middle::ty::layout::TyAndLayout;
|
2024-02-26 18:03:06 +00:00
|
|
|
use rustc_span::def_id::LocalDefId;
|
|
|
|
use rustc_span::sym;
|
2019-02-10 13:59:13 +00:00
|
|
|
|
2023-12-16 15:24:25 +00:00
|
|
|
use super::{AllocId, Allocation, InterpCx, MPlaceTy, Machine, MemoryKind, PlaceTy};
|
2020-12-03 15:39:39 +00:00
|
|
|
use crate::const_eval;
|
2023-12-16 15:24:25 +00:00
|
|
|
use crate::errors::{DanglingPtrInFinal, MutablePtrInFinal};
|
2019-02-10 13:59:13 +00:00
|
|
|
|
2023-07-10 20:07:07 +00:00
|
|
|
pub trait CompileTimeMachine<'mir, 'tcx: 'mir, T> = Machine<
|
2023-01-25 23:48:07 +00:00
|
|
|
'mir,
|
|
|
|
'tcx,
|
|
|
|
MemoryKind = T,
|
2023-11-25 17:41:53 +00:00
|
|
|
Provenance = CtfeProvenance,
|
2023-01-25 23:48:07 +00:00
|
|
|
ExtraFnVal = !,
|
|
|
|
FrameExtra = (),
|
|
|
|
AllocExtra = (),
|
|
|
|
MemoryMap = FxIndexMap<AllocId, (MemoryKind<T>, Allocation)>,
|
2024-02-26 18:03:06 +00:00
|
|
|
> + HasStaticRootDefId;
|
|
|
|
|
|
|
|
pub trait HasStaticRootDefId {
|
|
|
|
/// Returns the `DefId` of the static item that is currently being evaluated.
|
|
|
|
/// Used for interning to be able to handle nested allocations.
|
|
|
|
fn static_def_id(&self) -> Option<LocalDefId>;
|
|
|
|
}
|
|
|
|
|
|
|
|
impl HasStaticRootDefId for const_eval::CompileTimeInterpreter<'_, '_> {
|
|
|
|
fn static_def_id(&self) -> Option<LocalDefId> {
|
|
|
|
Some(self.static_root_ids?.1)
|
|
|
|
}
|
|
|
|
}
|
2019-11-25 00:09:58 +00:00
|
|
|
|
2023-12-16 15:24:25 +00:00
|
|
|
/// Intern an allocation. Returns `Err` if the allocation does not exist in the local memory.
|
|
|
|
///
|
|
|
|
/// `mutability` can be used to force immutable interning: if it is `Mutability::Not`, the
|
|
|
|
/// allocation is interned immutably; if it is `Mutability::Mut`, then the allocation *must be*
|
|
|
|
/// already mutable (as a sanity check).
|
|
|
|
///
|
2024-01-24 11:32:38 +00:00
|
|
|
/// Returns an iterator over all relocations referred to by this allocation.
|
2023-12-16 15:24:25 +00:00
|
|
|
fn intern_shallow<'rt, 'mir, 'tcx, T, M: CompileTimeMachine<'mir, 'tcx, T>>(
|
2019-10-27 18:58:00 +00:00
|
|
|
ecx: &'rt mut InterpCx<'mir, 'tcx, M>,
|
2019-08-27 19:13:05 +00:00
|
|
|
alloc_id: AllocId,
|
2023-12-16 15:24:25 +00:00
|
|
|
mutability: Mutability,
|
2024-01-24 11:32:38 +00:00
|
|
|
) -> Result<impl Iterator<Item = CtfeProvenance> + 'tcx, ()> {
|
2023-12-16 15:24:25 +00:00
|
|
|
trace!("intern_shallow {:?}", alloc_id);
|
2019-08-27 19:13:05 +00:00
|
|
|
// remove allocation
|
2024-01-28 20:53:28 +00:00
|
|
|
// FIXME(#120456) - is `swap_remove` correct?
|
|
|
|
let Some((_kind, mut alloc)) = ecx.memory.alloc_map.swap_remove(&alloc_id) else {
|
2023-12-16 15:24:25 +00:00
|
|
|
return Err(());
|
2019-08-27 19:13:05 +00:00
|
|
|
};
|
|
|
|
// Set allocation mutability as appropriate. This is used by LLVM to put things into
|
2020-03-21 18:19:10 +00:00
|
|
|
// read-only memory, and also by Miri when evaluating other globals that
|
2019-08-27 19:13:05 +00:00
|
|
|
// access this one.
|
2023-12-16 15:24:25 +00:00
|
|
|
match mutability {
|
|
|
|
Mutability::Not => {
|
2019-12-16 16:28:40 +00:00
|
|
|
alloc.mutability = Mutability::Not;
|
2023-12-16 15:24:25 +00:00
|
|
|
}
|
|
|
|
Mutability::Mut => {
|
|
|
|
// This must be already mutable, we won't "un-freeze" allocations ever.
|
2019-12-16 16:28:40 +00:00
|
|
|
assert_eq!(alloc.mutability, Mutability::Mut);
|
2019-08-27 19:13:05 +00:00
|
|
|
}
|
2019-02-10 13:59:13 +00:00
|
|
|
}
|
2023-12-16 15:24:25 +00:00
|
|
|
// link the alloc id to the actual allocation
|
|
|
|
let alloc = ecx.tcx.mk_const_alloc(alloc);
|
2024-02-26 18:03:06 +00:00
|
|
|
if let Some(static_id) = ecx.machine.static_def_id() {
|
|
|
|
intern_as_new_static(ecx.tcx, static_id, alloc_id, alloc);
|
|
|
|
} else {
|
|
|
|
ecx.tcx.set_alloc_id_memory(alloc_id, alloc);
|
|
|
|
}
|
2024-01-24 11:32:38 +00:00
|
|
|
Ok(alloc.0.0.provenance().ptrs().iter().map(|&(_, prov)| prov))
|
2019-02-10 13:59:13 +00:00
|
|
|
}
|
|
|
|
|
2024-02-26 18:03:06 +00:00
|
|
|
/// Creates a new `DefId` and feeds all the right queries to make this `DefId`
|
|
|
|
/// appear as if it were a user-written `static` (though it has no HIR).
|
|
|
|
fn intern_as_new_static<'tcx>(
|
|
|
|
tcx: TyCtxtAt<'tcx>,
|
|
|
|
static_id: LocalDefId,
|
|
|
|
alloc_id: AllocId,
|
|
|
|
alloc: ConstAllocation<'tcx>,
|
|
|
|
) {
|
|
|
|
let feed = tcx.create_def(
|
|
|
|
static_id,
|
|
|
|
sym::nested,
|
2024-03-11 17:33:57 +00:00
|
|
|
DefKind::Static { mutability: alloc.0.mutability, nested: true },
|
2024-02-26 18:03:06 +00:00
|
|
|
);
|
|
|
|
tcx.set_nested_alloc_id_static(alloc_id, feed.def_id());
|
|
|
|
feed.codegen_fn_attrs(tcx.codegen_fn_attrs(static_id).clone());
|
|
|
|
feed.eval_static_initializer(Ok(alloc));
|
|
|
|
feed.generics_of(tcx.generics_of(static_id).clone());
|
|
|
|
feed.def_ident_span(tcx.def_ident_span(static_id));
|
|
|
|
feed.explicit_predicates_of(tcx.explicit_predicates_of(static_id));
|
|
|
|
}
|
|
|
|
|
2023-05-17 10:30:14 +00:00
|
|
|
/// How a constant value should be interned.
|
2020-04-29 08:00:22 +00:00
|
|
|
#[derive(Copy, Clone, Debug, PartialEq, Hash, Eq)]
|
2019-12-25 12:58:02 +00:00
|
|
|
pub enum InternKind {
|
|
|
|
/// The `mutability` of the static, ignoring the type which may have interior mutability.
|
|
|
|
Static(hir::Mutability),
|
2023-05-17 10:30:14 +00:00
|
|
|
/// A `const` item
|
2019-12-25 12:58:02 +00:00
|
|
|
Constant,
|
|
|
|
Promoted,
|
|
|
|
}
|
|
|
|
|
2020-04-29 10:34:51 +00:00
|
|
|
/// Intern `ret` and everything it references.
|
|
|
|
///
|
2022-11-16 20:34:16 +00:00
|
|
|
/// This *cannot raise an interpreter error*. Doing so is left to validation, which
|
2020-04-29 12:04:40 +00:00
|
|
|
/// tracks where in the value we are and thus can show much better error messages.
|
2023-10-12 11:27:43 +00:00
|
|
|
///
|
|
|
|
/// For `InternKind::Static` the root allocation will not be interned, but must be handled by the caller.
|
2022-08-31 13:01:10 +00:00
|
|
|
#[instrument(level = "debug", skip(ecx))]
|
2021-12-14 03:34:51 +00:00
|
|
|
pub fn intern_const_alloc_recursive<
|
|
|
|
'mir,
|
|
|
|
'tcx: 'mir,
|
|
|
|
M: CompileTimeMachine<'mir, 'tcx, const_eval::MemoryKind>,
|
|
|
|
>(
|
2019-10-27 18:58:00 +00:00
|
|
|
ecx: &mut InterpCx<'mir, 'tcx, M>,
|
2019-12-25 12:58:02 +00:00
|
|
|
intern_kind: InternKind,
|
2021-02-15 00:00:00 +00:00
|
|
|
ret: &MPlaceTy<'tcx>,
|
2022-01-23 18:34:26 +00:00
|
|
|
) -> Result<(), ErrorGuaranteed> {
|
2023-12-16 15:24:25 +00:00
|
|
|
// We are interning recursively, and for mutability we are distinguishing the "root" allocation
|
|
|
|
// that we are starting in, and all other allocations that we are encountering recursively.
|
2023-10-12 11:27:43 +00:00
|
|
|
let (base_mutability, inner_mutability, is_static) = match intern_kind {
|
2023-12-16 15:24:25 +00:00
|
|
|
InternKind::Constant | InternKind::Promoted => {
|
|
|
|
// Completely immutable. Interning anything mutably here can only lead to unsoundness,
|
|
|
|
// since all consts are conceptually independent values but share the same underlying
|
|
|
|
// memory.
|
2023-10-12 11:27:43 +00:00
|
|
|
(Mutability::Not, Mutability::Not, false)
|
2019-02-10 13:59:13 +00:00
|
|
|
}
|
2023-12-16 15:24:25 +00:00
|
|
|
InternKind::Static(Mutability::Not) => {
|
|
|
|
(
|
|
|
|
// Outermost allocation is mutable if `!Freeze`.
|
|
|
|
if ret.layout.ty.is_freeze(*ecx.tcx, ecx.param_env) {
|
|
|
|
Mutability::Not
|
|
|
|
} else {
|
|
|
|
Mutability::Mut
|
|
|
|
},
|
|
|
|
// Inner allocations are never mutable. They can only arise via the "tail
|
|
|
|
// expression" / "outer scope" rule, and we treat them consistently with `const`.
|
|
|
|
Mutability::Not,
|
2023-10-12 11:27:43 +00:00
|
|
|
true,
|
2023-12-16 15:24:25 +00:00
|
|
|
)
|
2019-02-10 13:59:13 +00:00
|
|
|
}
|
2023-12-16 15:24:25 +00:00
|
|
|
InternKind::Static(Mutability::Mut) => {
|
|
|
|
// Just make everything mutable. We accept code like
|
|
|
|
// `static mut X = &mut [42]`, so even inner allocations need to be mutable.
|
2023-10-12 11:27:43 +00:00
|
|
|
(Mutability::Mut, Mutability::Mut, true)
|
2023-12-16 15:24:25 +00:00
|
|
|
}
|
|
|
|
};
|
2019-02-10 13:59:13 +00:00
|
|
|
|
2024-01-24 11:32:38 +00:00
|
|
|
// Intern the base allocation, and initialize todo list for recursive interning.
|
2023-12-16 15:24:25 +00:00
|
|
|
let base_alloc_id = ret.ptr().provenance.unwrap().alloc_id();
|
2023-10-12 11:27:43 +00:00
|
|
|
trace!(?base_alloc_id, ?base_mutability);
|
2024-01-25 10:38:17 +00:00
|
|
|
// First we intern the base allocation, as it requires a different mutability.
|
|
|
|
// This gives us the initial set of nested allocations, which will then all be processed
|
|
|
|
// recursively in the loop below.
|
2023-10-12 11:27:43 +00:00
|
|
|
let mut todo: Vec<_> = if is_static {
|
|
|
|
// Do not steal the root allocation, we need it later for `take_static_root_alloc`
|
|
|
|
// But still change its mutability to match the requested one.
|
|
|
|
let alloc = ecx.memory.alloc_map.get_mut(&base_alloc_id).unwrap();
|
|
|
|
alloc.1.mutability = base_mutability;
|
|
|
|
alloc.1.provenance().ptrs().iter().map(|&(_, prov)| prov).collect()
|
|
|
|
} else {
|
2024-02-23 18:56:35 +00:00
|
|
|
intern_shallow(ecx, base_alloc_id, base_mutability).unwrap().collect()
|
2023-10-12 11:27:43 +00:00
|
|
|
};
|
2023-12-16 15:24:25 +00:00
|
|
|
// We need to distinguish "has just been interned" from "was already in `tcx`",
|
|
|
|
// so we track this in a separate set.
|
2024-01-24 11:46:57 +00:00
|
|
|
let mut just_interned: FxHashSet<_> = std::iter::once(base_alloc_id).collect();
|
2023-12-16 15:24:25 +00:00
|
|
|
// Whether we encountered a bad mutable pointer.
|
|
|
|
// We want to first report "dangling" and then "mutable", so we need to delay reporting these
|
|
|
|
// errors.
|
|
|
|
let mut found_bad_mutable_pointer = false;
|
|
|
|
|
|
|
|
// Keep interning as long as there are things to intern.
|
|
|
|
// We show errors if there are dangling pointers, or mutable pointers in immutable contexts
|
|
|
|
// (i.e., everything except for `static mut`). When these errors affect references, it is
|
|
|
|
// unfortunate that we show these errors here and not during validation, since validation can
|
|
|
|
// show much nicer errors. However, we do need these checks to be run on all pointers, including
|
|
|
|
// raw pointers, so we cannot rely on validation to catch them -- and since interning runs
|
|
|
|
// before validation, and interning doesn't know the type of anything, this means we can't show
|
|
|
|
// better errors. Maybe we should consider doing validation before interning in the future.
|
2024-01-24 11:46:57 +00:00
|
|
|
while let Some(prov) = todo.pop() {
|
2023-10-12 11:27:43 +00:00
|
|
|
trace!(?prov);
|
2024-01-24 11:46:57 +00:00
|
|
|
let alloc_id = prov.alloc_id();
|
2023-10-12 11:27:43 +00:00
|
|
|
|
|
|
|
if base_alloc_id == alloc_id && is_static {
|
|
|
|
// This is a pointer to the static itself. It's ok for a static to refer to itself,
|
|
|
|
// even mutably. Whether that mutable pointer is legal at all is checked in validation.
|
|
|
|
// See tests/ui/statics/recursive_interior_mut.rs for how such a situation can occur.
|
|
|
|
// We also already collected all the nested allocations, so there's no need to do that again.
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
2024-02-05 21:21:40 +00:00
|
|
|
// Crucially, we check this *before* checking whether the `alloc_id`
|
|
|
|
// has already been interned. The point of this check is to ensure that when
|
|
|
|
// there are multiple pointers to the same allocation, they are *all* immutable.
|
|
|
|
// Therefore it would be bad if we only checked the first pointer to any given
|
|
|
|
// allocation.
|
|
|
|
// (It is likely not possible to actually have multiple pointers to the same allocation,
|
|
|
|
// so alternatively we could also check that and ICE if there are multiple such pointers.)
|
2024-01-24 11:46:57 +00:00
|
|
|
if intern_kind != InternKind::Promoted
|
|
|
|
&& inner_mutability == Mutability::Not
|
|
|
|
&& !prov.immutable()
|
|
|
|
{
|
|
|
|
if ecx.tcx.try_get_global_alloc(alloc_id).is_some()
|
|
|
|
&& !just_interned.contains(&alloc_id)
|
|
|
|
{
|
|
|
|
// This is a pointer to some memory from another constant. We encounter mutable
|
|
|
|
// pointers to such memory since we do not always track immutability through
|
|
|
|
// these "global" pointers. Allowing them is harmless; the point of these checks
|
|
|
|
// during interning is to justify why we intern the *new* allocations immutably,
|
|
|
|
// so we can completely ignore existing allocations. We also don't need to add
|
|
|
|
// this to the todo list, since after all it is already interned.
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
// Found a mutable pointer inside a const where inner allocations should be
|
|
|
|
// immutable. We exclude promoteds from this, since things like `&mut []` and
|
|
|
|
// `&None::<Cell<i32>>` lead to promotion that can produce mutable pointers. We rely
|
|
|
|
// on the promotion analysis not screwing up to ensure that it is sound to intern
|
|
|
|
// promoteds as immutable.
|
2023-10-12 11:27:43 +00:00
|
|
|
trace!("found bad mutable pointer");
|
2024-01-24 11:46:57 +00:00
|
|
|
found_bad_mutable_pointer = true;
|
|
|
|
}
|
2023-12-16 15:24:25 +00:00
|
|
|
if ecx.tcx.try_get_global_alloc(alloc_id).is_some() {
|
|
|
|
// Already interned.
|
|
|
|
debug_assert!(!ecx.memory.alloc_map.contains_key(&alloc_id));
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
just_interned.insert(alloc_id);
|
2024-01-24 11:46:57 +00:00
|
|
|
// We always intern with `inner_mutability`, and furthermore we ensured above that if
|
|
|
|
// that is "immutable", then there are *no* mutable pointers anywhere in the newly
|
|
|
|
// interned memory -- justifying that we can indeed intern immutably. However this also
|
|
|
|
// means we can *not* easily intern immutably here if `prov.immutable()` is true and
|
|
|
|
// `inner_mutability` is `Mut`: there might be other pointers to that allocation, and
|
|
|
|
// we'd have to somehow check that they are *all* immutable before deciding that this
|
|
|
|
// allocation can be made immutable. In the future we could consider analyzing all
|
|
|
|
// pointers before deciding which allocations can be made immutable; but for now we are
|
|
|
|
// okay with losing some potential for immutability here. This can anyway only affect
|
|
|
|
// `static mut`.
|
|
|
|
todo.extend(intern_shallow(ecx, alloc_id, inner_mutability).map_err(|()| {
|
2024-01-24 11:32:38 +00:00
|
|
|
ecx.tcx.dcx().emit_err(DanglingPtrInFinal { span: ecx.tcx.span, kind: intern_kind })
|
2024-01-24 11:46:57 +00:00
|
|
|
})?);
|
2019-02-10 13:59:13 +00:00
|
|
|
}
|
2023-12-16 15:24:25 +00:00
|
|
|
if found_bad_mutable_pointer {
|
|
|
|
return Err(ecx
|
|
|
|
.tcx
|
|
|
|
.dcx()
|
|
|
|
.emit_err(MutablePtrInFinal { span: ecx.tcx.span, kind: intern_kind }));
|
|
|
|
}
|
|
|
|
|
2020-11-04 16:53:43 +00:00
|
|
|
Ok(())
|
2019-02-10 13:59:13 +00:00
|
|
|
}
|
2020-08-14 16:01:14 +00:00
|
|
|
|
2023-10-10 15:42:23 +00:00
|
|
|
/// Intern `ret`. This function assumes that `ret` references no other allocation.
|
2023-10-07 09:19:37 +00:00
|
|
|
#[instrument(level = "debug", skip(ecx))]
|
|
|
|
pub fn intern_const_alloc_for_constprop<
|
|
|
|
'mir,
|
|
|
|
'tcx: 'mir,
|
|
|
|
T,
|
|
|
|
M: CompileTimeMachine<'mir, 'tcx, T>,
|
|
|
|
>(
|
|
|
|
ecx: &mut InterpCx<'mir, 'tcx, M>,
|
2023-10-12 16:17:06 +00:00
|
|
|
alloc_id: AllocId,
|
2023-10-07 09:19:37 +00:00
|
|
|
) -> InterpResult<'tcx, ()> {
|
2023-12-16 15:24:25 +00:00
|
|
|
if ecx.tcx.try_get_global_alloc(alloc_id).is_some() {
|
2023-10-07 09:19:37 +00:00
|
|
|
// The constant is already in global memory. Do nothing.
|
|
|
|
return Ok(());
|
2023-12-16 15:24:25 +00:00
|
|
|
}
|
|
|
|
// Move allocation to `tcx`.
|
2024-01-24 11:32:38 +00:00
|
|
|
for _ in intern_shallow(ecx, alloc_id, Mutability::Not).map_err(|()| err_ub!(DeadLocal))? {
|
2023-12-16 15:24:25 +00:00
|
|
|
// We are not doing recursive interning, so we don't currently support provenance.
|
|
|
|
// (If this assertion ever triggers, we should just implement a
|
|
|
|
// proper recursive interning loop -- or just call `intern_const_alloc_recursive`.
|
|
|
|
panic!("`intern_const_alloc_for_constprop` called on allocation with nested provenance")
|
2024-01-24 11:32:38 +00:00
|
|
|
}
|
|
|
|
Ok(())
|
2023-10-07 09:19:37 +00:00
|
|
|
}
|
|
|
|
|
2020-12-03 15:39:39 +00:00
|
|
|
impl<'mir, 'tcx: 'mir, M: super::intern::CompileTimeMachine<'mir, 'tcx, !>>
|
|
|
|
InterpCx<'mir, 'tcx, M>
|
|
|
|
{
|
2020-08-14 16:01:14 +00:00
|
|
|
/// A helper function that allocates memory for the layout given and gives you access to mutate
|
|
|
|
/// it. Once your own mutation code is done, the backing `Allocation` is removed from the
|
2023-09-11 18:01:48 +00:00
|
|
|
/// current `Memory` and interned as read-only into the global memory.
|
2021-01-01 00:53:25 +00:00
|
|
|
pub fn intern_with_temp_alloc(
|
2020-08-14 16:01:14 +00:00
|
|
|
&mut self,
|
|
|
|
layout: TyAndLayout<'tcx>,
|
|
|
|
f: impl FnOnce(
|
|
|
|
&mut InterpCx<'mir, 'tcx, M>,
|
2022-07-18 22:47:31 +00:00
|
|
|
&PlaceTy<'tcx, M::Provenance>,
|
2020-08-14 16:01:14 +00:00
|
|
|
) -> InterpResult<'tcx, ()>,
|
2023-09-11 18:01:48 +00:00
|
|
|
) -> InterpResult<'tcx, AllocId> {
|
|
|
|
// `allocate` picks a fresh AllocId that we will associate with its data below.
|
2021-06-12 23:49:48 +00:00
|
|
|
let dest = self.allocate(layout, MemoryKind::Stack)?;
|
2023-07-25 20:35:07 +00:00
|
|
|
f(self, &dest.clone().into())?;
|
2023-11-25 17:41:53 +00:00
|
|
|
let alloc_id = dest.ptr().provenance.unwrap().alloc_id(); // this was just allocated, it must have provenance
|
2024-01-24 11:32:38 +00:00
|
|
|
for prov in intern_shallow(self, alloc_id, Mutability::Not).unwrap() {
|
2023-12-16 15:24:25 +00:00
|
|
|
// We are not doing recursive interning, so we don't currently support provenance.
|
|
|
|
// (If this assertion ever triggers, we should just implement a
|
|
|
|
// proper recursive interning loop -- or just call `intern_const_alloc_recursive`.
|
2024-02-23 18:56:35 +00:00
|
|
|
if self.tcx.try_get_global_alloc(prov.alloc_id()).is_none() {
|
2023-12-16 15:24:25 +00:00
|
|
|
panic!("`intern_with_temp_alloc` with nested allocations");
|
|
|
|
}
|
2024-01-24 11:32:38 +00:00
|
|
|
}
|
2023-09-11 18:01:48 +00:00
|
|
|
Ok(alloc_id)
|
2020-08-14 16:01:14 +00:00
|
|
|
}
|
|
|
|
}
|