mirror of
https://github.com/rust-lang/rust.git
synced 2025-01-31 09:04:18 +00:00
Update other codegens to use tcx managed vtable allocations.
This commit is contained in:
parent
654e3345e5
commit
d3ff497bec
@ -233,7 +233,7 @@ pub(crate) struct FunctionCx<'m, 'clif, 'tcx: 'm> {
|
||||
pub(crate) module: &'m mut dyn Module,
|
||||
pub(crate) tcx: TyCtxt<'tcx>,
|
||||
pub(crate) pointer_type: Type, // Cached from module
|
||||
pub(crate) vtables: FxHashMap<(Ty<'tcx>, Option<ty::PolyExistentialTraitRef<'tcx>>), DataId>,
|
||||
pub(crate) vtables: FxHashMap<(Ty<'tcx>, Option<ty::PolyExistentialTraitRef<'tcx>>), Pointer>,
|
||||
pub(crate) constants_cx: ConstantCx,
|
||||
|
||||
pub(crate) instance: Instance<'tcx>,
|
||||
|
@ -249,7 +249,7 @@ pub(crate) fn codegen_const_value<'tcx>(
|
||||
}
|
||||
}
|
||||
|
||||
fn pointer_for_allocation<'tcx>(
|
||||
pub(crate) fn pointer_for_allocation<'tcx>(
|
||||
fx: &mut FunctionCx<'_, '_, 'tcx>,
|
||||
alloc: &'tcx Allocation,
|
||||
) -> crate::pointer::Pointer {
|
||||
|
@ -98,7 +98,7 @@ mod prelude {
|
||||
pub(crate) use cranelift_codegen::isa::{self, CallConv};
|
||||
pub(crate) use cranelift_codegen::Context;
|
||||
pub(crate) use cranelift_frontend::{FunctionBuilder, FunctionBuilderContext, Variable};
|
||||
pub(crate) use cranelift_module::{self, DataContext, DataId, FuncId, Linkage, Module};
|
||||
pub(crate) use cranelift_module::{self, DataContext, FuncId, Linkage, Module};
|
||||
|
||||
pub(crate) use crate::abi::*;
|
||||
pub(crate) use crate::base::{codegen_operand, codegen_place};
|
||||
|
@ -31,9 +31,7 @@ pub(crate) fn unsized_info<'tcx>(
|
||||
// change to the vtable.
|
||||
old_info.expect("unsized_info: missing old info for trait upcast")
|
||||
}
|
||||
(_, &ty::Dynamic(ref data, ..)) => {
|
||||
crate::vtable::get_vtable(fx, fx.layout_of(source), data.principal())
|
||||
}
|
||||
(_, &ty::Dynamic(ref data, ..)) => crate::vtable::get_vtable(fx, source, data.principal()),
|
||||
_ => bug!("unsized_info: invalid unsizing {:?} -> {:?}", source, target),
|
||||
}
|
||||
}
|
||||
|
@ -4,7 +4,7 @@
|
||||
// FIXME dedup this logic between miri, cg_llvm and cg_clif
|
||||
|
||||
use crate::prelude::*;
|
||||
use ty::VtblEntry;
|
||||
use super::constant::pointer_for_allocation;
|
||||
|
||||
fn vtable_memflags() -> MemFlags {
|
||||
let mut flags = MemFlags::trusted(); // A vtable access is always aligned and will never trap.
|
||||
@ -66,105 +66,19 @@ pub(crate) fn get_ptr_and_method_ref<'tcx>(
|
||||
|
||||
pub(crate) fn get_vtable<'tcx>(
|
||||
fx: &mut FunctionCx<'_, '_, 'tcx>,
|
||||
layout: TyAndLayout<'tcx>,
|
||||
ty: Ty<'tcx>,
|
||||
trait_ref: Option<ty::PolyExistentialTraitRef<'tcx>>,
|
||||
) -> Value {
|
||||
let data_id = if let Some(data_id) = fx.vtables.get(&(layout.ty, trait_ref)) {
|
||||
*data_id
|
||||
let vtable_ptr = if let Some(vtable_ptr) = fx.vtables.get(&(ty, trait_ref)) {
|
||||
*vtable_ptr
|
||||
} else {
|
||||
let data_id = build_vtable(fx, layout, trait_ref);
|
||||
fx.vtables.insert((layout.ty, trait_ref), data_id);
|
||||
data_id
|
||||
let vtable_alloc_id = fx.tcx.vtable_allocation(ty, trait_ref);
|
||||
let vtable_allocation = fx.tcx.global_alloc(vtable_alloc_id).unwrap_memory();
|
||||
let vtable_ptr = pointer_for_allocation(fx, vtable_allocation);
|
||||
|
||||
fx.vtables.insert((ty, trait_ref), vtable_ptr);
|
||||
vtable_ptr
|
||||
};
|
||||
|
||||
let local_data_id = fx.module.declare_data_in_func(data_id, &mut fx.bcx.func);
|
||||
fx.bcx.ins().global_value(fx.pointer_type, local_data_id)
|
||||
}
|
||||
|
||||
fn build_vtable<'tcx>(
|
||||
fx: &mut FunctionCx<'_, '_, 'tcx>,
|
||||
layout: TyAndLayout<'tcx>,
|
||||
trait_ref: Option<ty::PolyExistentialTraitRef<'tcx>>,
|
||||
) -> DataId {
|
||||
let tcx = fx.tcx;
|
||||
let usize_size = fx.layout_of(fx.tcx.types.usize).size.bytes() as usize;
|
||||
|
||||
let drop_in_place_fn = import_function(
|
||||
tcx,
|
||||
fx.module,
|
||||
Instance::resolve_drop_in_place(tcx, layout.ty).polymorphize(fx.tcx),
|
||||
);
|
||||
|
||||
let vtable_entries = if let Some(trait_ref) = trait_ref {
|
||||
tcx.vtable_entries(trait_ref.with_self_ty(tcx, layout.ty))
|
||||
} else {
|
||||
ty::COMMON_VTABLE_ENTRIES
|
||||
};
|
||||
|
||||
let mut data_ctx = DataContext::new();
|
||||
let mut data = ::std::iter::repeat(0u8)
|
||||
.take(vtable_entries.len() * usize_size)
|
||||
.collect::<Vec<u8>>()
|
||||
.into_boxed_slice();
|
||||
|
||||
for (idx, entry) in vtable_entries.iter().enumerate() {
|
||||
match entry {
|
||||
VtblEntry::MetadataSize => {
|
||||
write_usize(fx.tcx, &mut data, idx, layout.size.bytes());
|
||||
}
|
||||
VtblEntry::MetadataAlign => {
|
||||
write_usize(fx.tcx, &mut data, idx, layout.align.abi.bytes());
|
||||
}
|
||||
VtblEntry::MetadataDropInPlace | VtblEntry::Vacant | VtblEntry::Method(_, _) => {}
|
||||
}
|
||||
}
|
||||
data_ctx.define(data);
|
||||
|
||||
for (idx, entry) in vtable_entries.iter().enumerate() {
|
||||
match entry {
|
||||
VtblEntry::MetadataDropInPlace => {
|
||||
let func_ref = fx.module.declare_func_in_data(drop_in_place_fn, &mut data_ctx);
|
||||
data_ctx.write_function_addr((idx * usize_size) as u32, func_ref);
|
||||
}
|
||||
VtblEntry::Method(def_id, substs) => {
|
||||
let func_id = import_function(
|
||||
tcx,
|
||||
fx.module,
|
||||
Instance::resolve_for_vtable(tcx, ParamEnv::reveal_all(), *def_id, substs)
|
||||
.unwrap()
|
||||
.polymorphize(fx.tcx),
|
||||
);
|
||||
let func_ref = fx.module.declare_func_in_data(func_id, &mut data_ctx);
|
||||
data_ctx.write_function_addr((idx * usize_size) as u32, func_ref);
|
||||
}
|
||||
VtblEntry::MetadataSize | VtblEntry::MetadataAlign | VtblEntry::Vacant => {}
|
||||
}
|
||||
}
|
||||
|
||||
data_ctx.set_align(fx.tcx.data_layout.pointer_align.pref.bytes());
|
||||
|
||||
let data_id = fx.module.declare_anonymous_data(false, false).unwrap();
|
||||
|
||||
fx.module.define_data(data_id, &data_ctx).unwrap();
|
||||
|
||||
data_id
|
||||
}
|
||||
|
||||
fn write_usize(tcx: TyCtxt<'_>, buf: &mut [u8], idx: usize, num: u64) {
|
||||
let pointer_size =
|
||||
tcx.layout_of(ParamEnv::reveal_all().and(tcx.types.usize)).unwrap().size.bytes() as usize;
|
||||
let target = &mut buf[idx * pointer_size..(idx + 1) * pointer_size];
|
||||
|
||||
match tcx.data_layout.endian {
|
||||
rustc_target::abi::Endian::Little => match pointer_size {
|
||||
4 => target.copy_from_slice(&(num as u32).to_le_bytes()),
|
||||
8 => target.copy_from_slice(&(num as u64).to_le_bytes()),
|
||||
_ => todo!("pointer size {} is not yet supported", pointer_size),
|
||||
},
|
||||
rustc_target::abi::Endian::Big => match pointer_size {
|
||||
4 => target.copy_from_slice(&(num as u32).to_be_bytes()),
|
||||
8 => target.copy_from_slice(&(num as u64).to_be_bytes()),
|
||||
_ => todo!("pointer size {} is not yet supported", pointer_size),
|
||||
},
|
||||
}
|
||||
vtable_ptr.get_addr(fx)
|
||||
}
|
||||
|
@ -282,6 +282,10 @@ impl ConstMethods<'tcx> for CodegenCx<'ll, 'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
fn const_data_from_alloc(&self, alloc: &Allocation) -> Self::Value {
|
||||
const_alloc_to_llvm(self, alloc)
|
||||
}
|
||||
|
||||
fn from_const_alloc(
|
||||
&self,
|
||||
layout: TyAndLayout<'tcx>,
|
||||
|
@ -1,6 +1,6 @@
|
||||
use crate::traits::*;
|
||||
|
||||
use rustc_middle::ty::{self, Instance, Ty, VtblEntry, COMMON_VTABLE_ENTRIES};
|
||||
use rustc_middle::ty::{self, Ty};
|
||||
use rustc_target::abi::call::FnAbi;
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
@ -70,48 +70,13 @@ pub fn get_vtable<'tcx, Cx: CodegenMethods<'tcx>>(
|
||||
return val;
|
||||
}
|
||||
|
||||
// Not in the cache; build it.
|
||||
let nullptr = cx.const_null(cx.type_i8p_ext(cx.data_layout().instruction_address_space));
|
||||
|
||||
let vtable_entries = if let Some(trait_ref) = trait_ref {
|
||||
tcx.vtable_entries(trait_ref.with_self_ty(tcx, ty))
|
||||
} else {
|
||||
COMMON_VTABLE_ENTRIES
|
||||
};
|
||||
|
||||
let layout = cx.layout_of(ty);
|
||||
// /////////////////////////////////////////////////////////////////////////////////////////////
|
||||
// If you touch this code, be sure to also make the corresponding changes to
|
||||
// `get_vtable` in `rust_mir/interpret/traits.rs`.
|
||||
// /////////////////////////////////////////////////////////////////////////////////////////////
|
||||
let components: Vec<_> = vtable_entries
|
||||
.iter()
|
||||
.map(|entry| match entry {
|
||||
VtblEntry::MetadataDropInPlace => {
|
||||
cx.get_fn_addr(Instance::resolve_drop_in_place(cx.tcx(), ty))
|
||||
}
|
||||
VtblEntry::MetadataSize => cx.const_usize(layout.size.bytes()),
|
||||
VtblEntry::MetadataAlign => cx.const_usize(layout.align.abi.bytes()),
|
||||
VtblEntry::Vacant => nullptr,
|
||||
VtblEntry::Method(def_id, substs) => cx.get_fn_addr(
|
||||
ty::Instance::resolve_for_vtable(
|
||||
cx.tcx(),
|
||||
ty::ParamEnv::reveal_all(),
|
||||
*def_id,
|
||||
substs,
|
||||
)
|
||||
.unwrap()
|
||||
.polymorphize(cx.tcx()),
|
||||
),
|
||||
})
|
||||
.collect();
|
||||
|
||||
let vtable_const = cx.const_struct(&components, false);
|
||||
let vtable_alloc_id = tcx.vtable_allocation(ty, trait_ref);
|
||||
let vtable_allocation = tcx.global_alloc(vtable_alloc_id).unwrap_memory();
|
||||
let vtable_const = cx.const_data_from_alloc(vtable_allocation);
|
||||
let align = cx.data_layout().pointer_align.abi;
|
||||
let vtable = cx.static_addr_of(vtable_const, align, Some("vtable"));
|
||||
|
||||
cx.create_vtable_metadata(ty, vtable);
|
||||
|
||||
cx.vtables().borrow_mut().insert((ty, trait_ref), vtable);
|
||||
vtable
|
||||
}
|
||||
|
@ -26,6 +26,8 @@ pub trait ConstMethods<'tcx>: BackendTypes {
|
||||
fn const_to_opt_uint(&self, v: Self::Value) -> Option<u64>;
|
||||
fn const_to_opt_u128(&self, v: Self::Value, sign_ext: bool) -> Option<u128>;
|
||||
|
||||
fn const_data_from_alloc(&self, alloc: &Allocation) -> Self::Value;
|
||||
|
||||
fn scalar_to_backend(&self, cv: Scalar, layout: &abi::Scalar, llty: Self::Type) -> Self::Value;
|
||||
fn from_const_alloc(
|
||||
&self,
|
||||
|
@ -11,7 +11,7 @@ use crate::middle;
|
||||
use crate::middle::cstore::{CrateStoreDyn, EncodedMetadata};
|
||||
use crate::middle::resolve_lifetime::{self, LifetimeScopeForPath, ObjectLifetimeDefault};
|
||||
use crate::middle::stability;
|
||||
use crate::mir::interpret::{self, Allocation, ConstValue, Scalar};
|
||||
use crate::mir::interpret::{self, AllocId, Allocation, ConstValue, Scalar};
|
||||
use crate::mir::{Body, Field, Local, Place, PlaceElem, ProjectionKind, Promoted};
|
||||
use crate::thir::Thir;
|
||||
use crate::traits;
|
||||
@ -1045,6 +1045,9 @@ pub struct GlobalCtxt<'tcx> {
|
||||
output_filenames: Arc<OutputFilenames>,
|
||||
|
||||
pub main_def: Option<MainDefinition>,
|
||||
|
||||
pub(super) vtables_cache:
|
||||
Lock<FxHashMap<(Ty<'tcx>, Option<ty::PolyExistentialTraitRef<'tcx>>), AllocId>>,
|
||||
}
|
||||
|
||||
impl<'tcx> TyCtxt<'tcx> {
|
||||
@ -1202,6 +1205,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||
alloc_map: Lock::new(interpret::AllocMap::new()),
|
||||
output_filenames: Arc::new(output_filenames),
|
||||
main_def: resolutions.main_def,
|
||||
vtables_cache: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -18,6 +18,7 @@ pub use adt::*;
|
||||
pub use assoc::*;
|
||||
pub use closure::*;
|
||||
pub use generics::*;
|
||||
pub use vtable::*;
|
||||
|
||||
use crate::hir::exports::ExportMap;
|
||||
use crate::ich::StableHashingContext;
|
||||
@ -94,6 +95,7 @@ pub mod relate;
|
||||
pub mod subst;
|
||||
pub mod trait_def;
|
||||
pub mod util;
|
||||
pub mod vtable;
|
||||
pub mod walk;
|
||||
|
||||
mod adt;
|
||||
@ -2009,19 +2011,3 @@ impl<'tcx> fmt::Debug for SymbolName<'tcx> {
|
||||
fmt::Display::fmt(&self.name, fmt)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, HashStable)]
|
||||
pub enum VtblEntry<'tcx> {
|
||||
MetadataDropInPlace,
|
||||
MetadataSize,
|
||||
MetadataAlign,
|
||||
Vacant,
|
||||
Method(DefId, SubstsRef<'tcx>),
|
||||
}
|
||||
|
||||
pub const COMMON_VTABLE_ENTRIES: &[VtblEntry<'_>] =
|
||||
&[VtblEntry::MetadataDropInPlace, VtblEntry::MetadataSize, VtblEntry::MetadataAlign];
|
||||
|
||||
pub const COMMON_VTABLE_ENTRIES_DROPINPLACE: usize = 0;
|
||||
pub const COMMON_VTABLE_ENTRIES_SIZE: usize = 1;
|
||||
pub const COMMON_VTABLE_ENTRIES_ALIGN: usize = 2;
|
||||
|
106
compiler/rustc_middle/src/ty/vtable.rs
Normal file
106
compiler/rustc_middle/src/ty/vtable.rs
Normal file
@ -0,0 +1,106 @@
|
||||
use std::convert::TryFrom;
|
||||
|
||||
use crate::mir::interpret::{alloc_range, AllocId, Allocation, Pointer, Scalar};
|
||||
use crate::ty::fold::TypeFoldable;
|
||||
use crate::ty::{self, DefId, SubstsRef, Ty, TyCtxt};
|
||||
use rustc_ast::Mutability;
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, HashStable)]
|
||||
pub enum VtblEntry<'tcx> {
|
||||
MetadataDropInPlace,
|
||||
MetadataSize,
|
||||
MetadataAlign,
|
||||
Vacant,
|
||||
Method(DefId, SubstsRef<'tcx>),
|
||||
}
|
||||
|
||||
pub const COMMON_VTABLE_ENTRIES: &[VtblEntry<'_>] =
|
||||
&[VtblEntry::MetadataDropInPlace, VtblEntry::MetadataSize, VtblEntry::MetadataAlign];
|
||||
|
||||
pub const COMMON_VTABLE_ENTRIES_DROPINPLACE: usize = 0;
|
||||
pub const COMMON_VTABLE_ENTRIES_SIZE: usize = 1;
|
||||
pub const COMMON_VTABLE_ENTRIES_ALIGN: usize = 2;
|
||||
|
||||
impl<'tcx> TyCtxt<'tcx> {
|
||||
/// Retrieves an allocation that represents the contents of a vtable.
|
||||
/// There's a cache within `TyCtxt` so it will be deduplicated.
|
||||
pub fn vtable_allocation(
|
||||
self,
|
||||
ty: Ty<'tcx>,
|
||||
poly_trait_ref: Option<ty::PolyExistentialTraitRef<'tcx>>,
|
||||
) -> AllocId {
|
||||
let tcx = self;
|
||||
let vtables_cache = tcx.vtables_cache.lock();
|
||||
if let Some(alloc_id) = vtables_cache.get(&(ty, poly_trait_ref)).cloned() {
|
||||
return alloc_id;
|
||||
}
|
||||
drop(vtables_cache);
|
||||
|
||||
// See https://github.com/rust-lang/rust/pull/86475#discussion_r655162674
|
||||
assert!(
|
||||
!ty.needs_subst() && !poly_trait_ref.map_or(false, |trait_ref| trait_ref.needs_subst())
|
||||
);
|
||||
let param_env = ty::ParamEnv::reveal_all();
|
||||
let vtable_entries = if let Some(poly_trait_ref) = poly_trait_ref {
|
||||
let trait_ref = poly_trait_ref.with_self_ty(tcx, ty);
|
||||
let trait_ref = tcx.erase_regions(trait_ref);
|
||||
|
||||
tcx.vtable_entries(trait_ref)
|
||||
} else {
|
||||
COMMON_VTABLE_ENTRIES
|
||||
};
|
||||
|
||||
let layout =
|
||||
tcx.layout_of(param_env.and(ty)).expect("failed to build vtable representation");
|
||||
assert!(!layout.is_unsized(), "can't create a vtable for an unsized type");
|
||||
let size = layout.size.bytes();
|
||||
let align = layout.align.abi.bytes();
|
||||
|
||||
let ptr_size = tcx.data_layout.pointer_size;
|
||||
let ptr_align = tcx.data_layout.pointer_align.abi;
|
||||
|
||||
let vtable_size = ptr_size * u64::try_from(vtable_entries.len()).unwrap();
|
||||
let mut vtable = Allocation::uninit(vtable_size, ptr_align);
|
||||
|
||||
// No need to do any alignment checks on the memory accesses below, because we know the
|
||||
// allocation is correctly aligned as we created it above. Also we're only offsetting by
|
||||
// multiples of `ptr_align`, which means that it will stay aligned to `ptr_align`.
|
||||
|
||||
for (idx, entry) in vtable_entries.iter().enumerate() {
|
||||
let idx: u64 = u64::try_from(idx).unwrap();
|
||||
let scalar = match entry {
|
||||
VtblEntry::MetadataDropInPlace => {
|
||||
let instance = ty::Instance::resolve_drop_in_place(tcx, ty);
|
||||
let fn_alloc_id = tcx.create_fn_alloc(instance);
|
||||
let fn_ptr = Pointer::from(fn_alloc_id);
|
||||
fn_ptr.into()
|
||||
}
|
||||
VtblEntry::MetadataSize => Scalar::from_uint(size, ptr_size).into(),
|
||||
VtblEntry::MetadataAlign => Scalar::from_uint(align, ptr_size).into(),
|
||||
VtblEntry::Vacant => continue,
|
||||
VtblEntry::Method(def_id, substs) => {
|
||||
// See https://github.com/rust-lang/rust/pull/86475#discussion_r655162674
|
||||
assert!(!substs.needs_subst());
|
||||
|
||||
// Prepare the fn ptr we write into the vtable.
|
||||
let instance =
|
||||
ty::Instance::resolve_for_vtable(tcx, param_env, *def_id, substs)
|
||||
.expect("resolution failed during building vtable representation")
|
||||
.polymorphize(tcx);
|
||||
let fn_alloc_id = tcx.create_fn_alloc(instance);
|
||||
let fn_ptr = Pointer::from(fn_alloc_id);
|
||||
fn_ptr.into()
|
||||
}
|
||||
};
|
||||
vtable
|
||||
.write_scalar(&tcx, alloc_range(ptr_size * idx, ptr_size), scalar)
|
||||
.expect("failed to build vtable representation");
|
||||
}
|
||||
|
||||
vtable.mutability = Mutability::Not;
|
||||
let alloc_id = tcx.create_memory_alloc(tcx.intern_const_alloc(vtable));
|
||||
let mut vtables_cache = self.vtables_cache.lock();
|
||||
vtables_cache.insert((ty, poly_trait_ref), alloc_id);
|
||||
alloc_id
|
||||
}
|
||||
}
|
@ -1,29 +1,14 @@
|
||||
use std::convert::TryFrom;
|
||||
|
||||
use rustc_middle::mir::interpret::{
|
||||
AllocError, InterpError, InterpResult, Pointer, PointerArithmetic, Scalar,
|
||||
UndefinedBehaviorInfo, UnsupportedOpInfo,
|
||||
};
|
||||
use rustc_middle::mir::interpret::{InterpResult, Pointer, PointerArithmetic, Scalar};
|
||||
use rustc_middle::ty::{
|
||||
self, Instance, Ty, VtblEntry, COMMON_VTABLE_ENTRIES, COMMON_VTABLE_ENTRIES_ALIGN,
|
||||
self, Ty, COMMON_VTABLE_ENTRIES, COMMON_VTABLE_ENTRIES_ALIGN,
|
||||
COMMON_VTABLE_ENTRIES_DROPINPLACE, COMMON_VTABLE_ENTRIES_SIZE,
|
||||
};
|
||||
use rustc_target::abi::{Align, LayoutOf, Size};
|
||||
use rustc_target::abi::{Align, Size};
|
||||
|
||||
use super::alloc_range;
|
||||
use super::util::ensure_monomorphic_enough;
|
||||
use super::{Allocation, FnVal, InterpCx, Machine};
|
||||
|
||||
fn vtable_alloc_error_to_interp_error<'tcx>(error: AllocError) -> InterpError<'tcx> {
|
||||
match error {
|
||||
AllocError::ReadPointerAsBytes => {
|
||||
InterpError::Unsupported(UnsupportedOpInfo::ReadPointerAsBytes)
|
||||
}
|
||||
AllocError::InvalidUninitBytes(_info) => {
|
||||
InterpError::UndefinedBehavior(UndefinedBehaviorInfo::InvalidUninitBytes(None))
|
||||
}
|
||||
}
|
||||
}
|
||||
use super::{FnVal, InterpCx, Machine};
|
||||
|
||||
impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
||||
/// Creates a dynamic vtable for the given type and vtable origin. This is used only for
|
||||
@ -45,79 +30,9 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
||||
ensure_monomorphic_enough(*self.tcx, ty)?;
|
||||
ensure_monomorphic_enough(*self.tcx, poly_trait_ref)?;
|
||||
|
||||
if let Some(&vtable) = self.vtables.get(&(ty, poly_trait_ref)) {
|
||||
// This means we guarantee that there are no duplicate vtables, we will
|
||||
// always use the same vtable for the same (Type, Trait) combination.
|
||||
// That's not what happens in rustc, but emulating per-crate deduplication
|
||||
// does not sound like it actually makes anything any better.
|
||||
return Ok(vtable);
|
||||
}
|
||||
let vtable_allocation = self.tcx.vtable_allocation(ty, poly_trait_ref);
|
||||
|
||||
let vtable_entries = if let Some(poly_trait_ref) = poly_trait_ref {
|
||||
let trait_ref = poly_trait_ref.with_self_ty(*self.tcx, ty);
|
||||
let trait_ref = self.tcx.erase_regions(trait_ref);
|
||||
|
||||
self.tcx.vtable_entries(trait_ref)
|
||||
} else {
|
||||
COMMON_VTABLE_ENTRIES
|
||||
};
|
||||
|
||||
let layout = self.layout_of(ty)?;
|
||||
assert!(!layout.is_unsized(), "can't create a vtable for an unsized type");
|
||||
let size = layout.size.bytes();
|
||||
let align = layout.align.abi.bytes();
|
||||
|
||||
let tcx = *self.tcx;
|
||||
let ptr_size = self.pointer_size();
|
||||
let ptr_align = tcx.data_layout.pointer_align.abi;
|
||||
// /////////////////////////////////////////////////////////////////////////////////////////
|
||||
// If you touch this code, be sure to also make the corresponding changes to
|
||||
// `get_vtable` in `rust_codegen_llvm/meth.rs`.
|
||||
// /////////////////////////////////////////////////////////////////////////////////////////
|
||||
let vtable_size = ptr_size * u64::try_from(vtable_entries.len()).unwrap();
|
||||
let mut vtable = Allocation::uninit(vtable_size, ptr_align);
|
||||
|
||||
// No need to do any alignment checks on the memory accesses below, because we know the
|
||||
// allocation is correctly aligned as we created it above. Also we're only offsetting by
|
||||
// multiples of `ptr_align`, which means that it will stay aligned to `ptr_align`.
|
||||
let scalars = vtable_entries
|
||||
.iter()
|
||||
.map(|entry| -> InterpResult<'tcx, _> {
|
||||
match entry {
|
||||
VtblEntry::MetadataDropInPlace => {
|
||||
let instance = Instance::resolve_drop_in_place(tcx, ty);
|
||||
let fn_alloc_id = tcx.create_fn_alloc(instance);
|
||||
let fn_ptr = Pointer::from(fn_alloc_id);
|
||||
Ok(Some(fn_ptr.into()))
|
||||
}
|
||||
VtblEntry::MetadataSize => Ok(Some(Scalar::from_uint(size, ptr_size).into())),
|
||||
VtblEntry::MetadataAlign => Ok(Some(Scalar::from_uint(align, ptr_size).into())),
|
||||
VtblEntry::Vacant => Ok(None),
|
||||
VtblEntry::Method(def_id, substs) => {
|
||||
// Prepare the fn ptr we write into the vtable.
|
||||
let instance =
|
||||
Instance::resolve_for_vtable(tcx, self.param_env, *def_id, substs)
|
||||
.ok_or_else(|| err_inval!(TooGeneric))?;
|
||||
let fn_alloc_id = tcx.create_fn_alloc(instance);
|
||||
let fn_ptr = Pointer::from(fn_alloc_id);
|
||||
Ok(Some(fn_ptr.into()))
|
||||
}
|
||||
}
|
||||
})
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
for (idx, scalar) in scalars.into_iter().enumerate() {
|
||||
if let Some(scalar) = scalar {
|
||||
let idx: u64 = u64::try_from(idx).unwrap();
|
||||
vtable
|
||||
.write_scalar(self, alloc_range(ptr_size * idx, ptr_size), scalar)
|
||||
.map_err(vtable_alloc_error_to_interp_error)?;
|
||||
}
|
||||
}
|
||||
|
||||
let vtable_id = tcx.create_memory_alloc(tcx.intern_const_alloc(vtable));
|
||||
let vtable_ptr = self.memory.global_base_pointer(Pointer::from(vtable_id))?;
|
||||
|
||||
assert!(self.vtables.insert((ty, poly_trait_ref), vtable_ptr).is_none());
|
||||
let vtable_ptr = self.memory.global_base_pointer(Pointer::from(vtable_allocation))?;
|
||||
|
||||
Ok(vtable_ptr)
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user