Auto merge of #86475 - crlf0710:miri_vtable_refactor, r=bjorn3

Change vtable memory representation to use tcx allocated allocations.

This fixes https://github.com/rust-lang/rust/issues/86324. However i suspect there's more to change before it can land.

r? `@bjorn3`
cc `@rust-lang/miri`
This commit is contained in:
bors 2021-06-29 15:52:21 +00:00
commit e98897e5dc
21 changed files with 151 additions and 250 deletions

View File

@ -233,7 +233,7 @@ pub(crate) struct FunctionCx<'m, 'clif, 'tcx: 'm> {
pub(crate) module: &'m mut dyn Module,
pub(crate) tcx: TyCtxt<'tcx>,
pub(crate) pointer_type: Type, // Cached from module
pub(crate) vtables: FxHashMap<(Ty<'tcx>, Option<ty::PolyExistentialTraitRef<'tcx>>), DataId>,
pub(crate) vtables: FxHashMap<(Ty<'tcx>, Option<ty::PolyExistentialTraitRef<'tcx>>), Pointer>,
pub(crate) constants_cx: ConstantCx,
pub(crate) instance: Instance<'tcx>,

View File

@ -249,7 +249,7 @@ pub(crate) fn codegen_const_value<'tcx>(
}
}
fn pointer_for_allocation<'tcx>(
pub(crate) fn pointer_for_allocation<'tcx>(
fx: &mut FunctionCx<'_, '_, 'tcx>,
alloc: &'tcx Allocation,
) -> crate::pointer::Pointer {

View File

@ -98,7 +98,7 @@ mod prelude {
pub(crate) use cranelift_codegen::isa::{self, CallConv};
pub(crate) use cranelift_codegen::Context;
pub(crate) use cranelift_frontend::{FunctionBuilder, FunctionBuilderContext, Variable};
pub(crate) use cranelift_module::{self, DataContext, DataId, FuncId, Linkage, Module};
pub(crate) use cranelift_module::{self, DataContext, FuncId, Linkage, Module};
pub(crate) use crate::abi::*;
pub(crate) use crate::base::{codegen_operand, codegen_place};

View File

@ -31,9 +31,7 @@ pub(crate) fn unsized_info<'tcx>(
// change to the vtable.
old_info.expect("unsized_info: missing old info for trait upcast")
}
(_, &ty::Dynamic(ref data, ..)) => {
crate::vtable::get_vtable(fx, fx.layout_of(source), data.principal())
}
(_, &ty::Dynamic(ref data, ..)) => crate::vtable::get_vtable(fx, source, data.principal()),
_ => bug!("unsized_info: invalid unsizing {:?} -> {:?}", source, target),
}
}

View File

@ -4,7 +4,7 @@
// FIXME dedup this logic between miri, cg_llvm and cg_clif
use crate::prelude::*;
use ty::VtblEntry;
use super::constant::pointer_for_allocation;
fn vtable_memflags() -> MemFlags {
let mut flags = MemFlags::trusted(); // A vtable access is always aligned and will never trap.
@ -66,105 +66,19 @@ pub(crate) fn get_ptr_and_method_ref<'tcx>(
pub(crate) fn get_vtable<'tcx>(
fx: &mut FunctionCx<'_, '_, 'tcx>,
layout: TyAndLayout<'tcx>,
ty: Ty<'tcx>,
trait_ref: Option<ty::PolyExistentialTraitRef<'tcx>>,
) -> Value {
let data_id = if let Some(data_id) = fx.vtables.get(&(layout.ty, trait_ref)) {
*data_id
let vtable_ptr = if let Some(vtable_ptr) = fx.vtables.get(&(ty, trait_ref)) {
*vtable_ptr
} else {
let data_id = build_vtable(fx, layout, trait_ref);
fx.vtables.insert((layout.ty, trait_ref), data_id);
data_id
let vtable_alloc_id = fx.tcx.vtable_allocation(ty, trait_ref);
let vtable_allocation = fx.tcx.global_alloc(vtable_alloc_id).unwrap_memory();
let vtable_ptr = pointer_for_allocation(fx, vtable_allocation);
fx.vtables.insert((ty, trait_ref), vtable_ptr);
vtable_ptr
};
let local_data_id = fx.module.declare_data_in_func(data_id, &mut fx.bcx.func);
fx.bcx.ins().global_value(fx.pointer_type, local_data_id)
}
fn build_vtable<'tcx>(
fx: &mut FunctionCx<'_, '_, 'tcx>,
layout: TyAndLayout<'tcx>,
trait_ref: Option<ty::PolyExistentialTraitRef<'tcx>>,
) -> DataId {
let tcx = fx.tcx;
let usize_size = fx.layout_of(fx.tcx.types.usize).size.bytes() as usize;
let drop_in_place_fn = import_function(
tcx,
fx.module,
Instance::resolve_drop_in_place(tcx, layout.ty).polymorphize(fx.tcx),
);
let vtable_entries = if let Some(trait_ref) = trait_ref {
tcx.vtable_entries(trait_ref.with_self_ty(tcx, layout.ty))
} else {
ty::COMMON_VTABLE_ENTRIES
};
let mut data_ctx = DataContext::new();
let mut data = ::std::iter::repeat(0u8)
.take(vtable_entries.len() * usize_size)
.collect::<Vec<u8>>()
.into_boxed_slice();
for (idx, entry) in vtable_entries.iter().enumerate() {
match entry {
VtblEntry::MetadataSize => {
write_usize(fx.tcx, &mut data, idx, layout.size.bytes());
}
VtblEntry::MetadataAlign => {
write_usize(fx.tcx, &mut data, idx, layout.align.abi.bytes());
}
VtblEntry::MetadataDropInPlace | VtblEntry::Vacant | VtblEntry::Method(_, _) => {}
}
}
data_ctx.define(data);
for (idx, entry) in vtable_entries.iter().enumerate() {
match entry {
VtblEntry::MetadataDropInPlace => {
let func_ref = fx.module.declare_func_in_data(drop_in_place_fn, &mut data_ctx);
data_ctx.write_function_addr((idx * usize_size) as u32, func_ref);
}
VtblEntry::Method(def_id, substs) => {
let func_id = import_function(
tcx,
fx.module,
Instance::resolve_for_vtable(tcx, ParamEnv::reveal_all(), *def_id, substs)
.unwrap()
.polymorphize(fx.tcx),
);
let func_ref = fx.module.declare_func_in_data(func_id, &mut data_ctx);
data_ctx.write_function_addr((idx * usize_size) as u32, func_ref);
}
VtblEntry::MetadataSize | VtblEntry::MetadataAlign | VtblEntry::Vacant => {}
}
}
data_ctx.set_align(fx.tcx.data_layout.pointer_align.pref.bytes());
let data_id = fx.module.declare_anonymous_data(false, false).unwrap();
fx.module.define_data(data_id, &data_ctx).unwrap();
data_id
}
fn write_usize(tcx: TyCtxt<'_>, buf: &mut [u8], idx: usize, num: u64) {
let pointer_size =
tcx.layout_of(ParamEnv::reveal_all().and(tcx.types.usize)).unwrap().size.bytes() as usize;
let target = &mut buf[idx * pointer_size..(idx + 1) * pointer_size];
match tcx.data_layout.endian {
rustc_target::abi::Endian::Little => match pointer_size {
4 => target.copy_from_slice(&(num as u32).to_le_bytes()),
8 => target.copy_from_slice(&(num as u64).to_le_bytes()),
_ => todo!("pointer size {} is not yet supported", pointer_size),
},
rustc_target::abi::Endian::Big => match pointer_size {
4 => target.copy_from_slice(&(num as u32).to_be_bytes()),
8 => target.copy_from_slice(&(num as u64).to_be_bytes()),
_ => todo!("pointer size {} is not yet supported", pointer_size),
},
}
vtable_ptr.get_addr(fx)
}

View File

@ -282,6 +282,10 @@ impl ConstMethods<'tcx> for CodegenCx<'ll, 'tcx> {
}
}
fn const_data_from_alloc(&self, alloc: &Allocation) -> Self::Value {
const_alloc_to_llvm(self, alloc)
}
fn from_const_alloc(
&self,
layout: TyAndLayout<'tcx>,

View File

@ -1,6 +1,6 @@
use crate::traits::*;
use rustc_middle::ty::{self, Instance, Ty, VtblEntry, COMMON_VTABLE_ENTRIES};
use rustc_middle::ty::{self, Ty};
use rustc_target::abi::call::FnAbi;
#[derive(Copy, Clone, Debug)]
@ -70,48 +70,13 @@ pub fn get_vtable<'tcx, Cx: CodegenMethods<'tcx>>(
return val;
}
// Not in the cache; build it.
let nullptr = cx.const_null(cx.type_i8p_ext(cx.data_layout().instruction_address_space));
let vtable_entries = if let Some(trait_ref) = trait_ref {
tcx.vtable_entries(trait_ref.with_self_ty(tcx, ty))
} else {
COMMON_VTABLE_ENTRIES
};
let layout = cx.layout_of(ty);
// /////////////////////////////////////////////////////////////////////////////////////////////
// If you touch this code, be sure to also make the corresponding changes to
// `get_vtable` in `rust_mir/interpret/traits.rs`.
// /////////////////////////////////////////////////////////////////////////////////////////////
let components: Vec<_> = vtable_entries
.iter()
.map(|entry| match entry {
VtblEntry::MetadataDropInPlace => {
cx.get_fn_addr(Instance::resolve_drop_in_place(cx.tcx(), ty))
}
VtblEntry::MetadataSize => cx.const_usize(layout.size.bytes()),
VtblEntry::MetadataAlign => cx.const_usize(layout.align.abi.bytes()),
VtblEntry::Vacant => nullptr,
VtblEntry::Method(def_id, substs) => cx.get_fn_addr(
ty::Instance::resolve_for_vtable(
cx.tcx(),
ty::ParamEnv::reveal_all(),
*def_id,
substs,
)
.unwrap()
.polymorphize(cx.tcx()),
),
})
.collect();
let vtable_const = cx.const_struct(&components, false);
let vtable_alloc_id = tcx.vtable_allocation(ty, trait_ref);
let vtable_allocation = tcx.global_alloc(vtable_alloc_id).unwrap_memory();
let vtable_const = cx.const_data_from_alloc(vtable_allocation);
let align = cx.data_layout().pointer_align.abi;
let vtable = cx.static_addr_of(vtable_const, align, Some("vtable"));
cx.create_vtable_metadata(ty, vtable);
cx.vtables().borrow_mut().insert((ty, trait_ref), vtable);
vtable
}

View File

@ -26,6 +26,8 @@ pub trait ConstMethods<'tcx>: BackendTypes {
fn const_to_opt_uint(&self, v: Self::Value) -> Option<u64>;
fn const_to_opt_u128(&self, v: Self::Value, sign_ext: bool) -> Option<u128>;
fn const_data_from_alloc(&self, alloc: &Allocation) -> Self::Value;
fn scalar_to_backend(&self, cv: Scalar, layout: &abi::Scalar, llty: Self::Type) -> Self::Value;
fn from_const_alloc(
&self,

View File

@ -11,7 +11,7 @@ use crate::middle;
use crate::middle::cstore::{CrateStoreDyn, EncodedMetadata};
use crate::middle::resolve_lifetime::{self, LifetimeScopeForPath, ObjectLifetimeDefault};
use crate::middle::stability;
use crate::mir::interpret::{self, Allocation, ConstValue, Scalar};
use crate::mir::interpret::{self, AllocId, Allocation, ConstValue, Scalar};
use crate::mir::{Body, Field, Local, Place, PlaceElem, ProjectionKind, Promoted};
use crate::thir::Thir;
use crate::traits;
@ -1044,6 +1044,9 @@ pub struct GlobalCtxt<'tcx> {
output_filenames: Arc<OutputFilenames>,
pub main_def: Option<MainDefinition>,
pub(super) vtables_cache:
Lock<FxHashMap<(Ty<'tcx>, Option<ty::PolyExistentialTraitRef<'tcx>>), AllocId>>,
}
impl<'tcx> TyCtxt<'tcx> {
@ -1201,6 +1204,7 @@ impl<'tcx> TyCtxt<'tcx> {
alloc_map: Lock::new(interpret::AllocMap::new()),
output_filenames: Arc::new(output_filenames),
main_def: resolutions.main_def,
vtables_cache: Default::default(),
}
}

View File

@ -18,6 +18,7 @@ pub use adt::*;
pub use assoc::*;
pub use closure::*;
pub use generics::*;
pub use vtable::*;
use crate::hir::exports::ExportMap;
use crate::ich::StableHashingContext;
@ -94,6 +95,7 @@ pub mod relate;
pub mod subst;
pub mod trait_def;
pub mod util;
pub mod vtable;
pub mod walk;
mod adt;
@ -2009,19 +2011,3 @@ impl<'tcx> fmt::Debug for SymbolName<'tcx> {
fmt::Display::fmt(&self.name, fmt)
}
}
#[derive(Clone, Copy, Debug, PartialEq, HashStable)]
pub enum VtblEntry<'tcx> {
MetadataDropInPlace,
MetadataSize,
MetadataAlign,
Vacant,
Method(DefId, SubstsRef<'tcx>),
}
pub const COMMON_VTABLE_ENTRIES: &[VtblEntry<'_>] =
&[VtblEntry::MetadataDropInPlace, VtblEntry::MetadataSize, VtblEntry::MetadataAlign];
pub const COMMON_VTABLE_ENTRIES_DROPINPLACE: usize = 0;
pub const COMMON_VTABLE_ENTRIES_SIZE: usize = 1;
pub const COMMON_VTABLE_ENTRIES_ALIGN: usize = 2;

View File

@ -0,0 +1,106 @@
use std::convert::TryFrom;
use crate::mir::interpret::{alloc_range, AllocId, Allocation, Pointer, Scalar};
use crate::ty::fold::TypeFoldable;
use crate::ty::{self, DefId, SubstsRef, Ty, TyCtxt};
use rustc_ast::Mutability;
#[derive(Clone, Copy, Debug, PartialEq, HashStable)]
pub enum VtblEntry<'tcx> {
MetadataDropInPlace,
MetadataSize,
MetadataAlign,
Vacant,
Method(DefId, SubstsRef<'tcx>),
}
pub const COMMON_VTABLE_ENTRIES: &[VtblEntry<'_>] =
&[VtblEntry::MetadataDropInPlace, VtblEntry::MetadataSize, VtblEntry::MetadataAlign];
pub const COMMON_VTABLE_ENTRIES_DROPINPLACE: usize = 0;
pub const COMMON_VTABLE_ENTRIES_SIZE: usize = 1;
pub const COMMON_VTABLE_ENTRIES_ALIGN: usize = 2;
impl<'tcx> TyCtxt<'tcx> {
/// Retrieves an allocation that represents the contents of a vtable.
/// There's a cache within `TyCtxt` so it will be deduplicated.
pub fn vtable_allocation(
self,
ty: Ty<'tcx>,
poly_trait_ref: Option<ty::PolyExistentialTraitRef<'tcx>>,
) -> AllocId {
let tcx = self;
let vtables_cache = tcx.vtables_cache.lock();
if let Some(alloc_id) = vtables_cache.get(&(ty, poly_trait_ref)).cloned() {
return alloc_id;
}
drop(vtables_cache);
// See https://github.com/rust-lang/rust/pull/86475#discussion_r655162674
assert!(
!ty.needs_subst() && !poly_trait_ref.map_or(false, |trait_ref| trait_ref.needs_subst())
);
let param_env = ty::ParamEnv::reveal_all();
let vtable_entries = if let Some(poly_trait_ref) = poly_trait_ref {
let trait_ref = poly_trait_ref.with_self_ty(tcx, ty);
let trait_ref = tcx.erase_regions(trait_ref);
tcx.vtable_entries(trait_ref)
} else {
COMMON_VTABLE_ENTRIES
};
let layout =
tcx.layout_of(param_env.and(ty)).expect("failed to build vtable representation");
assert!(!layout.is_unsized(), "can't create a vtable for an unsized type");
let size = layout.size.bytes();
let align = layout.align.abi.bytes();
let ptr_size = tcx.data_layout.pointer_size;
let ptr_align = tcx.data_layout.pointer_align.abi;
let vtable_size = ptr_size * u64::try_from(vtable_entries.len()).unwrap();
let mut vtable = Allocation::uninit(vtable_size, ptr_align);
// No need to do any alignment checks on the memory accesses below, because we know the
// allocation is correctly aligned as we created it above. Also we're only offsetting by
// multiples of `ptr_align`, which means that it will stay aligned to `ptr_align`.
for (idx, entry) in vtable_entries.iter().enumerate() {
let idx: u64 = u64::try_from(idx).unwrap();
let scalar = match entry {
VtblEntry::MetadataDropInPlace => {
let instance = ty::Instance::resolve_drop_in_place(tcx, ty);
let fn_alloc_id = tcx.create_fn_alloc(instance);
let fn_ptr = Pointer::from(fn_alloc_id);
fn_ptr.into()
}
VtblEntry::MetadataSize => Scalar::from_uint(size, ptr_size).into(),
VtblEntry::MetadataAlign => Scalar::from_uint(align, ptr_size).into(),
VtblEntry::Vacant => continue,
VtblEntry::Method(def_id, substs) => {
// See https://github.com/rust-lang/rust/pull/86475#discussion_r655162674
assert!(!substs.needs_subst());
// Prepare the fn ptr we write into the vtable.
let instance =
ty::Instance::resolve_for_vtable(tcx, param_env, *def_id, substs)
.expect("resolution failed during building vtable representation")
.polymorphize(tcx);
let fn_alloc_id = tcx.create_fn_alloc(instance);
let fn_ptr = Pointer::from(fn_alloc_id);
fn_ptr.into()
}
};
vtable
.write_scalar(&tcx, alloc_range(ptr_size * idx, ptr_size), scalar)
.expect("failed to build vtable representation");
}
vtable.mutability = Mutability::Not;
let alloc_id = tcx.create_memory_alloc(tcx.intern_const_alloc(vtable));
let mut vtables_cache = self.vtables_cache.lock();
vtables_cache.insert((ty, poly_trait_ref), alloc_id);
alloc_id
}
}

View File

@ -2,7 +2,6 @@ use std::cell::Cell;
use std::fmt;
use std::mem;
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_hir::{self as hir, def_id::DefId, definitions::DefPathData};
use rustc_index::vec::IndexVec;
@ -40,10 +39,6 @@ pub struct InterpCx<'mir, 'tcx, M: Machine<'mir, 'tcx>> {
/// The virtual memory system.
pub memory: Memory<'mir, 'tcx, M>,
/// A cache for deduplicating vtables
pub(super) vtables:
FxHashMap<(Ty<'tcx>, Option<ty::PolyExistentialTraitRef<'tcx>>), Pointer<M::PointerTag>>,
}
// The Phantomdata exists to prevent this type from being `Send`. If it were sent across a thread
@ -393,7 +388,6 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
tcx: tcx.at(root_span),
param_env,
memory: Memory::new(tcx, memory_extra),
vtables: FxHashMap::default(),
}
}

View File

@ -107,7 +107,6 @@ fn intern_shallow<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx, const_eval:
match kind {
MemoryKind::Stack
| MemoryKind::Machine(const_eval::MemoryKind::Heap)
| MemoryKind::Vtable
| MemoryKind::CallerLocation => {}
}
// Set allocation mutability as appropriate. This is used by LLVM to put things into

View File

@ -27,8 +27,6 @@ use crate::util::pretty;
pub enum MemoryKind<T> {
/// Stack memory. Error if deallocated except during a stack pop.
Stack,
/// Memory backing vtables. Error if ever deallocated.
Vtable,
/// Memory allocated by `caller_location` intrinsic. Error if ever deallocated.
CallerLocation,
/// Additional memory kinds a machine wishes to distinguish from the builtin ones.
@ -40,7 +38,6 @@ impl<T: MayLeak> MayLeak for MemoryKind<T> {
fn may_leak(self) -> bool {
match self {
MemoryKind::Stack => false,
MemoryKind::Vtable => true,
MemoryKind::CallerLocation => true,
MemoryKind::Machine(k) => k.may_leak(),
}
@ -51,7 +48,6 @@ impl<T: fmt::Display> fmt::Display for MemoryKind<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
MemoryKind::Stack => write!(f, "stack variable"),
MemoryKind::Vtable => write!(f, "vtable"),
MemoryKind::CallerLocation => write!(f, "caller location"),
MemoryKind::Machine(m) => write!(f, "{}", m),
}

View File

@ -459,7 +459,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
};
// Find and consult vtable
let vtable = receiver_place.vtable();
let drop_fn = self.get_vtable_slot(vtable, u64::try_from(idx).unwrap())?;
let fn_val = self.get_vtable_slot(vtable, u64::try_from(idx).unwrap())?;
// `*mut receiver_place.layout.ty` is almost the layout that we
// want for args[0]: We have to project to field 0 because we want
@ -472,7 +472,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
OpTy::from(ImmTy::from_immediate(receiver_place.ptr.into(), this_receiver_ptr));
trace!("Patched self operand to {:#?}", args[0]);
// recurse with concrete function
self.eval_fn_call(drop_fn, caller_abi, &args, ret, unwind)
self.eval_fn_call(fn_val, caller_abi, &args, ret, unwind)
}
}
}

View File

@ -2,13 +2,13 @@ use std::convert::TryFrom;
use rustc_middle::mir::interpret::{InterpResult, Pointer, PointerArithmetic, Scalar};
use rustc_middle::ty::{
self, Instance, Ty, VtblEntry, COMMON_VTABLE_ENTRIES, COMMON_VTABLE_ENTRIES_ALIGN,
self, Ty, COMMON_VTABLE_ENTRIES, COMMON_VTABLE_ENTRIES_ALIGN,
COMMON_VTABLE_ENTRIES_DROPINPLACE, COMMON_VTABLE_ENTRIES_SIZE,
};
use rustc_target::abi::{Align, LayoutOf, Size};
use rustc_target::abi::{Align, Size};
use super::util::ensure_monomorphic_enough;
use super::{FnVal, InterpCx, Machine, MemoryKind};
use super::{FnVal, InterpCx, Machine};
impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
/// Creates a dynamic vtable for the given type and vtable origin. This is used only for
@ -30,78 +30,11 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
ensure_monomorphic_enough(*self.tcx, ty)?;
ensure_monomorphic_enough(*self.tcx, poly_trait_ref)?;
if let Some(&vtable) = self.vtables.get(&(ty, poly_trait_ref)) {
// This means we guarantee that there are no duplicate vtables, we will
// always use the same vtable for the same (Type, Trait) combination.
// That's not what happens in rustc, but emulating per-crate deduplication
// does not sound like it actually makes anything any better.
return Ok(vtable);
}
let vtable_allocation = self.tcx.vtable_allocation(ty, poly_trait_ref);
let vtable_entries = if let Some(poly_trait_ref) = poly_trait_ref {
let trait_ref = poly_trait_ref.with_self_ty(*self.tcx, ty);
let trait_ref = self.tcx.erase_regions(trait_ref);
let vtable_ptr = self.memory.global_base_pointer(Pointer::from(vtable_allocation))?;
self.tcx.vtable_entries(trait_ref)
} else {
COMMON_VTABLE_ENTRIES
};
let layout = self.layout_of(ty)?;
assert!(!layout.is_unsized(), "can't create a vtable for an unsized type");
let size = layout.size.bytes();
let align = layout.align.abi.bytes();
let tcx = *self.tcx;
let ptr_size = self.pointer_size();
let ptr_align = tcx.data_layout.pointer_align.abi;
// /////////////////////////////////////////////////////////////////////////////////////////
// If you touch this code, be sure to also make the corresponding changes to
// `get_vtable` in `rust_codegen_llvm/meth.rs`.
// /////////////////////////////////////////////////////////////////////////////////////////
let vtable_size = ptr_size * u64::try_from(vtable_entries.len()).unwrap();
let vtable = self.memory.allocate(vtable_size, ptr_align, MemoryKind::Vtable);
let drop = Instance::resolve_drop_in_place(tcx, ty);
let drop = self.memory.create_fn_alloc(FnVal::Instance(drop));
// No need to do any alignment checks on the memory accesses below, because we know the
// allocation is correctly aligned as we created it above. Also we're only offsetting by
// multiples of `ptr_align`, which means that it will stay aligned to `ptr_align`.
let scalars = vtable_entries
.iter()
.map(|entry| -> InterpResult<'tcx, _> {
match entry {
VtblEntry::MetadataDropInPlace => Ok(Some(drop.into())),
VtblEntry::MetadataSize => Ok(Some(Scalar::from_uint(size, ptr_size).into())),
VtblEntry::MetadataAlign => Ok(Some(Scalar::from_uint(align, ptr_size).into())),
VtblEntry::Vacant => Ok(None),
VtblEntry::Method(def_id, substs) => {
// Prepare the fn ptr we write into the vtable.
let instance =
ty::Instance::resolve_for_vtable(tcx, self.param_env, *def_id, substs)
.ok_or_else(|| err_inval!(TooGeneric))?;
let fn_ptr = self.memory.create_fn_alloc(FnVal::Instance(instance));
Ok(Some(fn_ptr.into()))
}
}
})
.collect::<Result<Vec<_>, _>>()?;
let mut vtable_alloc =
self.memory.get_mut(vtable.into(), vtable_size, ptr_align)?.expect("not a ZST");
for (idx, scalar) in scalars.into_iter().enumerate() {
if let Some(scalar) = scalar {
let idx: u64 = u64::try_from(idx).unwrap();
vtable_alloc.write_ptr_sized(ptr_size * idx, scalar)?;
}
}
M::after_static_mem_initialized(self, vtable, vtable_size)?;
self.memory.mark_immutable(vtable.alloc_id)?;
assert!(self.vtables.insert((ty, poly_trait_ref), vtable).is_none());
Ok(vtable)
Ok(vtable_ptr)
}
/// Resolves the function at the specified slot in the provided

View File

@ -10,7 +10,7 @@ LL | | };
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 8, align: 4) {
╾─alloc2──╼ ╾─alloc3──╼ │ ╾──╼╾──╼
╾─alloc2──╼ ╾─alloc5──╼ │ ╾──╼╾──╼
}
error: aborting due to previous error

View File

@ -10,7 +10,7 @@ LL | | };
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 16, align: 8) {
╾───────alloc2────────╼ ╾───────alloc3────────╼ │ ╾──────╼╾──────╼
╾───────alloc2────────╼ ╾───────alloc5────────╼ │ ╾──────╼╾──────╼
}
error: aborting due to previous error

View File

@ -6,7 +6,7 @@ LL | const G: Fat = unsafe { Transmute { t: FOO }.u };
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 16, align: 8) {
╾───────alloc2────────╼ ╾───────alloc3────────╼ │ ╾──────╼╾──────╼
╾───────alloc2────────╼ ╾───────alloc5────────╼ │ ╾──────╼╾──────╼
}
error: aborting due to previous error

View File

@ -19,7 +19,7 @@ LL | const SNEAKY: &dyn Sync = &Synced { x: UnsafeCell::new(42) };
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 8, align: 4) {
╾─alloc6──╼ ╾─alloc7──╼ │ ╾──╼╾──╼
╾─alloc6──╼ ╾─alloc8──╼ │ ╾──╼╾──╼
}
error[E0080]: it is undefined behavior to use this value

View File

@ -19,7 +19,7 @@ LL | const SNEAKY: &dyn Sync = &Synced { x: UnsafeCell::new(42) };
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 16, align: 8) {
╾───────alloc6────────╼ ╾───────alloc7────────╼ │ ╾──────╼╾──────╼
╾───────alloc6────────╼ ╾───────alloc8────────╼ │ ╾──────╼╾──────╼
}
error[E0080]: it is undefined behavior to use this value