mirror of
https://github.com/rust-lang/rust.git
synced 2024-11-25 08:13:41 +00:00
Rollup merge of #132801 - RalfJung:alloc-mutability, r=oli-obk
interpret: get_alloc_info: also return mutability This will be needed for https://github.com/rust-lang/miri/pull/3971 This then tuned into a larger refactor where we introduce a new type for the `get_alloc_info` return data, and we move some code to methods on `GlobalAlloc` to avoid duplicating it between the validity check and `get_alloc_info`.
This commit is contained in:
commit
61f51931b2
@ -472,8 +472,9 @@ fn report_validation_error<'tcx>(
|
|||||||
backtrace.print_backtrace();
|
backtrace.print_backtrace();
|
||||||
|
|
||||||
let bytes = ecx.print_alloc_bytes_for_diagnostics(alloc_id);
|
let bytes = ecx.print_alloc_bytes_for_diagnostics(alloc_id);
|
||||||
let (size, align, _) = ecx.get_alloc_info(alloc_id);
|
let info = ecx.get_alloc_info(alloc_id);
|
||||||
let raw_bytes = errors::RawBytesNote { size: size.bytes(), align: align.bytes(), bytes };
|
let raw_bytes =
|
||||||
|
errors::RawBytesNote { size: info.size.bytes(), align: info.align.bytes(), bytes };
|
||||||
|
|
||||||
crate::const_eval::report(
|
crate::const_eval::report(
|
||||||
*ecx.tcx,
|
*ecx.tcx,
|
||||||
|
@ -14,10 +14,9 @@ use std::{fmt, mem, ptr};
|
|||||||
use rustc_abi::{Align, HasDataLayout, Size};
|
use rustc_abi::{Align, HasDataLayout, Size};
|
||||||
use rustc_ast::Mutability;
|
use rustc_ast::Mutability;
|
||||||
use rustc_data_structures::fx::{FxHashSet, FxIndexMap};
|
use rustc_data_structures::fx::{FxHashSet, FxIndexMap};
|
||||||
use rustc_hir::def::DefKind;
|
|
||||||
use rustc_middle::bug;
|
use rustc_middle::bug;
|
||||||
use rustc_middle::mir::display_allocation;
|
use rustc_middle::mir::display_allocation;
|
||||||
use rustc_middle::ty::{self, Instance, ParamEnv, Ty, TyCtxt};
|
use rustc_middle::ty::{self, Instance, Ty, TyCtxt};
|
||||||
use tracing::{debug, instrument, trace};
|
use tracing::{debug, instrument, trace};
|
||||||
|
|
||||||
use super::{
|
use super::{
|
||||||
@ -72,6 +71,21 @@ pub enum AllocKind {
|
|||||||
Dead,
|
Dead,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Metadata about an `AllocId`.
|
||||||
|
#[derive(Copy, Clone, PartialEq, Debug)]
|
||||||
|
pub struct AllocInfo {
|
||||||
|
pub size: Size,
|
||||||
|
pub align: Align,
|
||||||
|
pub kind: AllocKind,
|
||||||
|
pub mutbl: Mutability,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AllocInfo {
|
||||||
|
fn new(size: Size, align: Align, kind: AllocKind, mutbl: Mutability) -> Self {
|
||||||
|
Self { size, align, kind, mutbl }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// The value of a function pointer.
|
/// The value of a function pointer.
|
||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
pub enum FnVal<'tcx, Other> {
|
pub enum FnVal<'tcx, Other> {
|
||||||
@ -524,17 +538,22 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
|
|||||||
match self.ptr_try_get_alloc_id(ptr, 0) {
|
match self.ptr_try_get_alloc_id(ptr, 0) {
|
||||||
Err(addr) => is_offset_misaligned(addr, align),
|
Err(addr) => is_offset_misaligned(addr, align),
|
||||||
Ok((alloc_id, offset, _prov)) => {
|
Ok((alloc_id, offset, _prov)) => {
|
||||||
let (_size, alloc_align, kind) = self.get_alloc_info(alloc_id);
|
let alloc_info = self.get_alloc_info(alloc_id);
|
||||||
if let Some(misalign) =
|
if let Some(misalign) = M::alignment_check(
|
||||||
M::alignment_check(self, alloc_id, alloc_align, kind, offset, align)
|
self,
|
||||||
{
|
alloc_id,
|
||||||
|
alloc_info.align,
|
||||||
|
alloc_info.kind,
|
||||||
|
offset,
|
||||||
|
align,
|
||||||
|
) {
|
||||||
Some(misalign)
|
Some(misalign)
|
||||||
} else if M::Provenance::OFFSET_IS_ADDR {
|
} else if M::Provenance::OFFSET_IS_ADDR {
|
||||||
is_offset_misaligned(ptr.addr().bytes(), align)
|
is_offset_misaligned(ptr.addr().bytes(), align)
|
||||||
} else {
|
} else {
|
||||||
// Check allocation alignment and offset alignment.
|
// Check allocation alignment and offset alignment.
|
||||||
if alloc_align.bytes() < align.bytes() {
|
if alloc_info.align.bytes() < align.bytes() {
|
||||||
Some(Misalignment { has: alloc_align, required: align })
|
Some(Misalignment { has: alloc_info.align, required: align })
|
||||||
} else {
|
} else {
|
||||||
is_offset_misaligned(offset.bytes(), align)
|
is_offset_misaligned(offset.bytes(), align)
|
||||||
}
|
}
|
||||||
@ -818,82 +837,45 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
|
|||||||
|
|
||||||
/// Obtain the size and alignment of an allocation, even if that allocation has
|
/// Obtain the size and alignment of an allocation, even if that allocation has
|
||||||
/// been deallocated.
|
/// been deallocated.
|
||||||
pub fn get_alloc_info(&self, id: AllocId) -> (Size, Align, AllocKind) {
|
pub fn get_alloc_info(&self, id: AllocId) -> AllocInfo {
|
||||||
// # Regular allocations
|
// # Regular allocations
|
||||||
// Don't use `self.get_raw` here as that will
|
// Don't use `self.get_raw` here as that will
|
||||||
// a) cause cycles in case `id` refers to a static
|
// a) cause cycles in case `id` refers to a static
|
||||||
// b) duplicate a global's allocation in miri
|
// b) duplicate a global's allocation in miri
|
||||||
if let Some((_, alloc)) = self.memory.alloc_map.get(id) {
|
if let Some((_, alloc)) = self.memory.alloc_map.get(id) {
|
||||||
return (alloc.size(), alloc.align, AllocKind::LiveData);
|
return AllocInfo::new(
|
||||||
|
alloc.size(),
|
||||||
|
alloc.align,
|
||||||
|
AllocKind::LiveData,
|
||||||
|
alloc.mutability,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// # Function pointers
|
// # Function pointers
|
||||||
// (both global from `alloc_map` and local from `extra_fn_ptr_map`)
|
// (both global from `alloc_map` and local from `extra_fn_ptr_map`)
|
||||||
if self.get_fn_alloc(id).is_some() {
|
if self.get_fn_alloc(id).is_some() {
|
||||||
return (Size::ZERO, Align::ONE, AllocKind::Function);
|
return AllocInfo::new(Size::ZERO, Align::ONE, AllocKind::Function, Mutability::Not);
|
||||||
}
|
}
|
||||||
|
|
||||||
// # Statics
|
// # Global allocations
|
||||||
// Can't do this in the match argument, we may get cycle errors since the lock would
|
if let Some(global_alloc) = self.tcx.try_get_global_alloc(id) {
|
||||||
// be held throughout the match.
|
let (size, align) = global_alloc.size_and_align(*self.tcx, self.param_env);
|
||||||
match self.tcx.try_get_global_alloc(id) {
|
let mutbl = global_alloc.mutability(*self.tcx, self.param_env);
|
||||||
Some(GlobalAlloc::Static(def_id)) => {
|
let kind = match global_alloc {
|
||||||
// Thread-local statics do not have a constant address. They *must* be accessed via
|
GlobalAlloc::Static { .. } | GlobalAlloc::Memory { .. } => AllocKind::LiveData,
|
||||||
// `ThreadLocalRef`; we can never have a pointer to them as a regular constant value.
|
GlobalAlloc::Function { .. } => bug!("We already checked function pointers above"),
|
||||||
assert!(!self.tcx.is_thread_local_static(def_id));
|
GlobalAlloc::VTable { .. } => AllocKind::VTable,
|
||||||
|
};
|
||||||
let DefKind::Static { nested, .. } = self.tcx.def_kind(def_id) else {
|
return AllocInfo::new(size, align, kind, mutbl);
|
||||||
bug!("GlobalAlloc::Static is not a static")
|
|
||||||
};
|
|
||||||
|
|
||||||
let (size, align) = if nested {
|
|
||||||
// Nested anonymous statics are untyped, so let's get their
|
|
||||||
// size and alignment from the allocation itself. This always
|
|
||||||
// succeeds, as the query is fed at DefId creation time, so no
|
|
||||||
// evaluation actually occurs.
|
|
||||||
let alloc = self.tcx.eval_static_initializer(def_id).unwrap();
|
|
||||||
(alloc.0.size(), alloc.0.align)
|
|
||||||
} else {
|
|
||||||
// Use size and align of the type for everything else. We need
|
|
||||||
// to do that to
|
|
||||||
// * avoid cycle errors in case of self-referential statics,
|
|
||||||
// * be able to get information on extern statics.
|
|
||||||
let ty = self
|
|
||||||
.tcx
|
|
||||||
.type_of(def_id)
|
|
||||||
.no_bound_vars()
|
|
||||||
.expect("statics should not have generic parameters");
|
|
||||||
let layout = self.tcx.layout_of(ParamEnv::empty().and(ty)).unwrap();
|
|
||||||
assert!(layout.is_sized());
|
|
||||||
(layout.size, layout.align.abi)
|
|
||||||
};
|
|
||||||
(size, align, AllocKind::LiveData)
|
|
||||||
}
|
|
||||||
Some(GlobalAlloc::Memory(alloc)) => {
|
|
||||||
// Need to duplicate the logic here, because the global allocations have
|
|
||||||
// different associated types than the interpreter-local ones.
|
|
||||||
let alloc = alloc.inner();
|
|
||||||
(alloc.size(), alloc.align, AllocKind::LiveData)
|
|
||||||
}
|
|
||||||
Some(GlobalAlloc::Function { .. }) => {
|
|
||||||
bug!("We already checked function pointers above")
|
|
||||||
}
|
|
||||||
Some(GlobalAlloc::VTable(..)) => {
|
|
||||||
// No data to be accessed here. But vtables are pointer-aligned.
|
|
||||||
return (Size::ZERO, self.tcx.data_layout.pointer_align.abi, AllocKind::VTable);
|
|
||||||
}
|
|
||||||
// The rest must be dead.
|
|
||||||
None => {
|
|
||||||
// Deallocated pointers are allowed, we should be able to find
|
|
||||||
// them in the map.
|
|
||||||
let (size, align) = *self
|
|
||||||
.memory
|
|
||||||
.dead_alloc_map
|
|
||||||
.get(&id)
|
|
||||||
.expect("deallocated pointers should all be recorded in `dead_alloc_map`");
|
|
||||||
(size, align, AllocKind::Dead)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// # Dead pointers
|
||||||
|
let (size, align) = *self
|
||||||
|
.memory
|
||||||
|
.dead_alloc_map
|
||||||
|
.get(&id)
|
||||||
|
.expect("deallocated pointers should all be recorded in `dead_alloc_map`");
|
||||||
|
AllocInfo::new(size, align, AllocKind::Dead, Mutability::Not)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Obtain the size and alignment of a *live* allocation.
|
/// Obtain the size and alignment of a *live* allocation.
|
||||||
@ -902,11 +884,11 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
|
|||||||
id: AllocId,
|
id: AllocId,
|
||||||
msg: CheckInAllocMsg,
|
msg: CheckInAllocMsg,
|
||||||
) -> InterpResult<'tcx, (Size, Align)> {
|
) -> InterpResult<'tcx, (Size, Align)> {
|
||||||
let (size, align, kind) = self.get_alloc_info(id);
|
let info = self.get_alloc_info(id);
|
||||||
if matches!(kind, AllocKind::Dead) {
|
if matches!(info.kind, AllocKind::Dead) {
|
||||||
throw_ub!(PointerUseAfterFree(id, msg))
|
throw_ub!(PointerUseAfterFree(id, msg))
|
||||||
}
|
}
|
||||||
interp_ok((size, align))
|
interp_ok((info.size, info.align))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_fn_alloc(&self, id: AllocId) -> Option<FnVal<'tcx, M::ExtraFnVal>> {
|
fn get_fn_alloc(&self, id: AllocId) -> Option<FnVal<'tcx, M::ExtraFnVal>> {
|
||||||
@ -1458,7 +1440,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
|
|||||||
let ptr = scalar.to_pointer(self)?;
|
let ptr = scalar.to_pointer(self)?;
|
||||||
match self.ptr_try_get_alloc_id(ptr, 0) {
|
match self.ptr_try_get_alloc_id(ptr, 0) {
|
||||||
Ok((alloc_id, offset, _)) => {
|
Ok((alloc_id, offset, _)) => {
|
||||||
let (size, _align, _kind) = self.get_alloc_info(alloc_id);
|
let size = self.get_alloc_info(alloc_id).size;
|
||||||
// If the pointer is out-of-bounds, it may be null.
|
// If the pointer is out-of-bounds, it may be null.
|
||||||
// Note that one-past-the-end (offset == size) is still inbounds, and never null.
|
// Note that one-past-the-end (offset == size) is still inbounds, and never null.
|
||||||
offset > size
|
offset > size
|
||||||
|
@ -31,7 +31,7 @@ pub use self::intern::{
|
|||||||
};
|
};
|
||||||
pub(crate) use self::intrinsics::eval_nullary_intrinsic;
|
pub(crate) use self::intrinsics::eval_nullary_intrinsic;
|
||||||
pub use self::machine::{AllocMap, Machine, MayLeak, ReturnAction, compile_time_machine};
|
pub use self::machine::{AllocMap, Machine, MayLeak, ReturnAction, compile_time_machine};
|
||||||
pub use self::memory::{AllocKind, AllocRef, AllocRefMut, FnVal, Memory, MemoryKind};
|
pub use self::memory::{AllocInfo, AllocKind, AllocRef, AllocRefMut, FnVal, Memory, MemoryKind};
|
||||||
use self::operand::Operand;
|
use self::operand::Operand;
|
||||||
pub use self::operand::{ImmTy, Immediate, OpTy};
|
pub use self::operand::{ImmTy, Immediate, OpTy};
|
||||||
pub use self::place::{MPlaceTy, MemPlaceMeta, PlaceTy, Writeable};
|
pub use self::place::{MPlaceTy, MemPlaceMeta, PlaceTy, Writeable};
|
||||||
|
@ -31,8 +31,8 @@ use tracing::trace;
|
|||||||
|
|
||||||
use super::machine::AllocMap;
|
use super::machine::AllocMap;
|
||||||
use super::{
|
use super::{
|
||||||
AllocId, AllocKind, CheckInAllocMsg, GlobalAlloc, ImmTy, Immediate, InterpCx, InterpResult,
|
AllocId, CheckInAllocMsg, GlobalAlloc, ImmTy, Immediate, InterpCx, InterpResult, MPlaceTy,
|
||||||
MPlaceTy, Machine, MemPlaceMeta, PlaceTy, Pointer, Projectable, Scalar, ValueVisitor, err_ub,
|
Machine, MemPlaceMeta, PlaceTy, Pointer, Projectable, Scalar, ValueVisitor, err_ub,
|
||||||
format_interp_error,
|
format_interp_error,
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -557,9 +557,20 @@ impl<'rt, 'tcx, M: Machine<'tcx>> ValidityVisitor<'rt, 'tcx, M> {
|
|||||||
if let Ok((alloc_id, _offset, _prov)) =
|
if let Ok((alloc_id, _offset, _prov)) =
|
||||||
self.ecx.ptr_try_get_alloc_id(place.ptr(), 0)
|
self.ecx.ptr_try_get_alloc_id(place.ptr(), 0)
|
||||||
{
|
{
|
||||||
if let Some(GlobalAlloc::Static(did)) =
|
// Everything should be already interned.
|
||||||
self.ecx.tcx.try_get_global_alloc(alloc_id)
|
let Some(global_alloc) = self.ecx.tcx.try_get_global_alloc(alloc_id) else {
|
||||||
{
|
assert!(self.ecx.memory.alloc_map.get(alloc_id).is_none());
|
||||||
|
// We can't have *any* references to non-existing allocations in const-eval
|
||||||
|
// as the rest of rustc isn't happy with them... so we throw an error, even
|
||||||
|
// though for zero-sized references this isn't really UB.
|
||||||
|
// A potential future alternative would be to resurrect this as a zero-sized allocation
|
||||||
|
// (which codegen will then compile to an aligned dummy pointer anyway).
|
||||||
|
throw_validation_failure!(self.path, DanglingPtrUseAfterFree { ptr_kind });
|
||||||
|
};
|
||||||
|
let (size, _align) =
|
||||||
|
global_alloc.size_and_align(*self.ecx.tcx, self.ecx.param_env);
|
||||||
|
|
||||||
|
if let GlobalAlloc::Static(did) = global_alloc {
|
||||||
let DefKind::Static { nested, .. } = self.ecx.tcx.def_kind(did) else {
|
let DefKind::Static { nested, .. } = self.ecx.tcx.def_kind(did) else {
|
||||||
bug!()
|
bug!()
|
||||||
};
|
};
|
||||||
@ -593,17 +604,6 @@ impl<'rt, 'tcx, M: Machine<'tcx>> ValidityVisitor<'rt, 'tcx, M> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Dangling and Mutability check.
|
|
||||||
let (size, _align, alloc_kind) = self.ecx.get_alloc_info(alloc_id);
|
|
||||||
if alloc_kind == AllocKind::Dead {
|
|
||||||
// This can happen for zero-sized references. We can't have *any* references to
|
|
||||||
// non-existing allocations in const-eval though, interning rejects them all as
|
|
||||||
// the rest of rustc isn't happy with them... so we throw an error, even though
|
|
||||||
// this isn't really UB.
|
|
||||||
// A potential future alternative would be to resurrect this as a zero-sized allocation
|
|
||||||
// (which codegen will then compile to an aligned dummy pointer anyway).
|
|
||||||
throw_validation_failure!(self.path, DanglingPtrUseAfterFree { ptr_kind });
|
|
||||||
}
|
|
||||||
// If this allocation has size zero, there is no actual mutability here.
|
// If this allocation has size zero, there is no actual mutability here.
|
||||||
if size != Size::ZERO {
|
if size != Size::ZERO {
|
||||||
// Determine whether this pointer expects to be pointing to something mutable.
|
// Determine whether this pointer expects to be pointing to something mutable.
|
||||||
@ -618,7 +618,8 @@ impl<'rt, 'tcx, M: Machine<'tcx>> ValidityVisitor<'rt, 'tcx, M> {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
// Determine what it actually points to.
|
// Determine what it actually points to.
|
||||||
let alloc_actual_mutbl = mutability(self.ecx, alloc_id);
|
let alloc_actual_mutbl =
|
||||||
|
global_alloc.mutability(*self.ecx.tcx, self.ecx.param_env);
|
||||||
// Mutable pointer to immutable memory is no good.
|
// Mutable pointer to immutable memory is no good.
|
||||||
if ptr_expected_mutbl == Mutability::Mut
|
if ptr_expected_mutbl == Mutability::Mut
|
||||||
&& alloc_actual_mutbl == Mutability::Not
|
&& alloc_actual_mutbl == Mutability::Not
|
||||||
@ -842,9 +843,16 @@ impl<'rt, 'tcx, M: Machine<'tcx>> ValidityVisitor<'rt, 'tcx, M> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn in_mutable_memory(&self, val: &PlaceTy<'tcx, M::Provenance>) -> bool {
|
fn in_mutable_memory(&self, val: &PlaceTy<'tcx, M::Provenance>) -> bool {
|
||||||
|
debug_assert!(self.ctfe_mode.is_some());
|
||||||
if let Some(mplace) = val.as_mplace_or_local().left() {
|
if let Some(mplace) = val.as_mplace_or_local().left() {
|
||||||
if let Some(alloc_id) = mplace.ptr().provenance.and_then(|p| p.get_alloc_id()) {
|
if let Some(alloc_id) = mplace.ptr().provenance.and_then(|p| p.get_alloc_id()) {
|
||||||
mutability(self.ecx, alloc_id).is_mut()
|
let tcx = *self.ecx.tcx;
|
||||||
|
// Everything must be already interned.
|
||||||
|
let mutbl = tcx.global_alloc(alloc_id).mutability(tcx, self.ecx.param_env);
|
||||||
|
if let Some((_, alloc)) = self.ecx.memory.alloc_map.get(alloc_id) {
|
||||||
|
assert_eq!(alloc.mutability, mutbl);
|
||||||
|
}
|
||||||
|
mutbl.is_mut()
|
||||||
} else {
|
} else {
|
||||||
// No memory at all.
|
// No memory at all.
|
||||||
false
|
false
|
||||||
@ -1016,53 +1024,6 @@ impl<'rt, 'tcx, M: Machine<'tcx>> ValidityVisitor<'rt, 'tcx, M> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns whether the allocation is mutable, and whether it's actually a static.
|
|
||||||
/// For "root" statics we look at the type to account for interior
|
|
||||||
/// mutability; for nested statics we have no type and directly use the annotated mutability.
|
|
||||||
fn mutability<'tcx>(ecx: &InterpCx<'tcx, impl Machine<'tcx>>, alloc_id: AllocId) -> Mutability {
|
|
||||||
// Let's see what kind of memory this points to.
|
|
||||||
// We're not using `try_global_alloc` since dangling pointers have already been handled.
|
|
||||||
match ecx.tcx.global_alloc(alloc_id) {
|
|
||||||
GlobalAlloc::Static(did) => {
|
|
||||||
let DefKind::Static { safety: _, mutability, nested } = ecx.tcx.def_kind(did) else {
|
|
||||||
bug!()
|
|
||||||
};
|
|
||||||
if nested {
|
|
||||||
assert!(
|
|
||||||
ecx.memory.alloc_map.get(alloc_id).is_none(),
|
|
||||||
"allocations of nested statics are already interned: {alloc_id:?}, {did:?}"
|
|
||||||
);
|
|
||||||
// Nested statics in a `static` are never interior mutable,
|
|
||||||
// so just use the declared mutability.
|
|
||||||
mutability
|
|
||||||
} else {
|
|
||||||
let mutability = match mutability {
|
|
||||||
Mutability::Not
|
|
||||||
if !ecx
|
|
||||||
.tcx
|
|
||||||
.type_of(did)
|
|
||||||
.no_bound_vars()
|
|
||||||
.expect("statics should not have generic parameters")
|
|
||||||
.is_freeze(*ecx.tcx, ty::ParamEnv::reveal_all()) =>
|
|
||||||
{
|
|
||||||
Mutability::Mut
|
|
||||||
}
|
|
||||||
_ => mutability,
|
|
||||||
};
|
|
||||||
if let Some((_, alloc)) = ecx.memory.alloc_map.get(alloc_id) {
|
|
||||||
assert_eq!(alloc.mutability, mutability);
|
|
||||||
}
|
|
||||||
mutability
|
|
||||||
}
|
|
||||||
}
|
|
||||||
GlobalAlloc::Memory(alloc) => alloc.inner().mutability,
|
|
||||||
GlobalAlloc::Function { .. } | GlobalAlloc::VTable(..) => {
|
|
||||||
// These are immutable, we better don't allow mutable pointers here.
|
|
||||||
Mutability::Not
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'rt, 'tcx, M: Machine<'tcx>> ValueVisitor<'tcx, M> for ValidityVisitor<'rt, 'tcx, M> {
|
impl<'rt, 'tcx, M: Machine<'tcx>> ValueVisitor<'tcx, M> for ValidityVisitor<'rt, 'tcx, M> {
|
||||||
type V = PlaceTy<'tcx, M::Provenance>;
|
type V = PlaceTy<'tcx, M::Provenance>;
|
||||||
|
|
||||||
|
@ -12,11 +12,12 @@ use std::io::{Read, Write};
|
|||||||
use std::num::NonZero;
|
use std::num::NonZero;
|
||||||
use std::{fmt, io};
|
use std::{fmt, io};
|
||||||
|
|
||||||
use rustc_abi::{AddressSpace, Endian, HasDataLayout};
|
use rustc_abi::{AddressSpace, Align, Endian, HasDataLayout, Size};
|
||||||
use rustc_ast::LitKind;
|
use rustc_ast::{LitKind, Mutability};
|
||||||
use rustc_data_structures::fx::FxHashMap;
|
use rustc_data_structures::fx::FxHashMap;
|
||||||
use rustc_data_structures::sync::Lock;
|
use rustc_data_structures::sync::Lock;
|
||||||
use rustc_errors::ErrorGuaranteed;
|
use rustc_errors::ErrorGuaranteed;
|
||||||
|
use rustc_hir::def::DefKind;
|
||||||
use rustc_hir::def_id::{DefId, LocalDefId};
|
use rustc_hir::def_id::{DefId, LocalDefId};
|
||||||
use rustc_macros::{HashStable, TyDecodable, TyEncodable, TypeFoldable, TypeVisitable};
|
use rustc_macros::{HashStable, TyDecodable, TyEncodable, TypeFoldable, TypeVisitable};
|
||||||
use rustc_middle::ty::print::with_no_trimmed_paths;
|
use rustc_middle::ty::print::with_no_trimmed_paths;
|
||||||
@ -45,7 +46,7 @@ pub use self::pointer::{CtfeProvenance, Pointer, PointerArithmetic, Provenance};
|
|||||||
pub use self::value::Scalar;
|
pub use self::value::Scalar;
|
||||||
use crate::mir;
|
use crate::mir;
|
||||||
use crate::ty::codec::{TyDecoder, TyEncoder};
|
use crate::ty::codec::{TyDecoder, TyEncoder};
|
||||||
use crate::ty::{self, Instance, Ty, TyCtxt};
|
use crate::ty::{self, Instance, ParamEnv, Ty, TyCtxt};
|
||||||
|
|
||||||
/// Uniquely identifies one of the following:
|
/// Uniquely identifies one of the following:
|
||||||
/// - A constant
|
/// - A constant
|
||||||
@ -310,6 +311,85 @@ impl<'tcx> GlobalAlloc<'tcx> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn mutability(&self, tcx: TyCtxt<'tcx>, param_env: ParamEnv<'tcx>) -> Mutability {
|
||||||
|
// Let's see what kind of memory we are.
|
||||||
|
match self {
|
||||||
|
GlobalAlloc::Static(did) => {
|
||||||
|
let DefKind::Static { safety: _, mutability, nested } = tcx.def_kind(did) else {
|
||||||
|
bug!()
|
||||||
|
};
|
||||||
|
if nested {
|
||||||
|
// Nested statics in a `static` are never interior mutable,
|
||||||
|
// so just use the declared mutability.
|
||||||
|
if cfg!(debug_assertions) {
|
||||||
|
let alloc = tcx.eval_static_initializer(did).unwrap();
|
||||||
|
assert_eq!(alloc.0.mutability, mutability);
|
||||||
|
}
|
||||||
|
mutability
|
||||||
|
} else {
|
||||||
|
let mutability = match mutability {
|
||||||
|
Mutability::Not
|
||||||
|
if !tcx
|
||||||
|
.type_of(did)
|
||||||
|
.no_bound_vars()
|
||||||
|
.expect("statics should not have generic parameters")
|
||||||
|
.is_freeze(tcx, param_env) =>
|
||||||
|
{
|
||||||
|
Mutability::Mut
|
||||||
|
}
|
||||||
|
_ => mutability,
|
||||||
|
};
|
||||||
|
mutability
|
||||||
|
}
|
||||||
|
}
|
||||||
|
GlobalAlloc::Memory(alloc) => alloc.inner().mutability,
|
||||||
|
GlobalAlloc::Function { .. } | GlobalAlloc::VTable(..) => {
|
||||||
|
// These are immutable.
|
||||||
|
Mutability::Not
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn size_and_align(&self, tcx: TyCtxt<'tcx>, param_env: ParamEnv<'tcx>) -> (Size, Align) {
|
||||||
|
match self {
|
||||||
|
GlobalAlloc::Static(def_id) => {
|
||||||
|
let DefKind::Static { nested, .. } = tcx.def_kind(def_id) else {
|
||||||
|
bug!("GlobalAlloc::Static is not a static")
|
||||||
|
};
|
||||||
|
|
||||||
|
if nested {
|
||||||
|
// Nested anonymous statics are untyped, so let's get their
|
||||||
|
// size and alignment from the allocation itself. This always
|
||||||
|
// succeeds, as the query is fed at DefId creation time, so no
|
||||||
|
// evaluation actually occurs.
|
||||||
|
let alloc = tcx.eval_static_initializer(def_id).unwrap();
|
||||||
|
(alloc.0.size(), alloc.0.align)
|
||||||
|
} else {
|
||||||
|
// Use size and align of the type for everything else. We need
|
||||||
|
// to do that to
|
||||||
|
// * avoid cycle errors in case of self-referential statics,
|
||||||
|
// * be able to get information on extern statics.
|
||||||
|
let ty = tcx
|
||||||
|
.type_of(def_id)
|
||||||
|
.no_bound_vars()
|
||||||
|
.expect("statics should not have generic parameters");
|
||||||
|
let layout = tcx.layout_of(param_env.and(ty)).unwrap();
|
||||||
|
assert!(layout.is_sized());
|
||||||
|
(layout.size, layout.align.abi)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
GlobalAlloc::Memory(alloc) => {
|
||||||
|
let alloc = alloc.inner();
|
||||||
|
(alloc.size(), alloc.align)
|
||||||
|
}
|
||||||
|
GlobalAlloc::Function { .. } => (Size::ZERO, Align::ONE),
|
||||||
|
GlobalAlloc::VTable(..) => {
|
||||||
|
// No data to be accessed here. But vtables are pointer-aligned.
|
||||||
|
return (Size::ZERO, tcx.data_layout.pointer_align.abi);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub const CTFE_ALLOC_SALT: usize = 0;
|
pub const CTFE_ALLOC_SALT: usize = 0;
|
||||||
|
@ -134,7 +134,7 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> {
|
|||||||
// entered for addresses that are not the base address, so even zero-sized
|
// entered for addresses that are not the base address, so even zero-sized
|
||||||
// allocations will get recognized at their base address -- but all other
|
// allocations will get recognized at their base address -- but all other
|
||||||
// allocations will *not* be recognized at their "end" address.
|
// allocations will *not* be recognized at their "end" address.
|
||||||
let size = ecx.get_alloc_info(alloc_id).0;
|
let size = ecx.get_alloc_info(alloc_id).size;
|
||||||
if offset < size.bytes() { Some(alloc_id) } else { None }
|
if offset < size.bytes() { Some(alloc_id) } else { None }
|
||||||
}
|
}
|
||||||
}?;
|
}?;
|
||||||
@ -157,25 +157,25 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> {
|
|||||||
) -> InterpResult<'tcx, u64> {
|
) -> InterpResult<'tcx, u64> {
|
||||||
let ecx = self.eval_context_ref();
|
let ecx = self.eval_context_ref();
|
||||||
let mut rng = ecx.machine.rng.borrow_mut();
|
let mut rng = ecx.machine.rng.borrow_mut();
|
||||||
let (size, align, kind) = ecx.get_alloc_info(alloc_id);
|
let info = ecx.get_alloc_info(alloc_id);
|
||||||
// This is either called immediately after allocation (and then cached), or when
|
// This is either called immediately after allocation (and then cached), or when
|
||||||
// adjusting `tcx` pointers (which never get freed). So assert that we are looking
|
// adjusting `tcx` pointers (which never get freed). So assert that we are looking
|
||||||
// at a live allocation. This also ensures that we never re-assign an address to an
|
// at a live allocation. This also ensures that we never re-assign an address to an
|
||||||
// allocation that previously had an address, but then was freed and the address
|
// allocation that previously had an address, but then was freed and the address
|
||||||
// information was removed.
|
// information was removed.
|
||||||
assert!(!matches!(kind, AllocKind::Dead));
|
assert!(!matches!(info.kind, AllocKind::Dead));
|
||||||
|
|
||||||
// This allocation does not have a base address yet, pick or reuse one.
|
// This allocation does not have a base address yet, pick or reuse one.
|
||||||
if ecx.machine.native_lib.is_some() {
|
if ecx.machine.native_lib.is_some() {
|
||||||
// In native lib mode, we use the "real" address of the bytes for this allocation.
|
// In native lib mode, we use the "real" address of the bytes for this allocation.
|
||||||
// This ensures the interpreted program and native code have the same view of memory.
|
// This ensures the interpreted program and native code have the same view of memory.
|
||||||
let base_ptr = match kind {
|
let base_ptr = match info.kind {
|
||||||
AllocKind::LiveData => {
|
AllocKind::LiveData => {
|
||||||
if ecx.tcx.try_get_global_alloc(alloc_id).is_some() {
|
if ecx.tcx.try_get_global_alloc(alloc_id).is_some() {
|
||||||
// For new global allocations, we always pre-allocate the memory to be able use the machine address directly.
|
// For new global allocations, we always pre-allocate the memory to be able use the machine address directly.
|
||||||
let prepared_bytes = MiriAllocBytes::zeroed(size, align)
|
let prepared_bytes = MiriAllocBytes::zeroed(info.size, info.align)
|
||||||
.unwrap_or_else(|| {
|
.unwrap_or_else(|| {
|
||||||
panic!("Miri ran out of memory: cannot create allocation of {size:?} bytes")
|
panic!("Miri ran out of memory: cannot create allocation of {size:?} bytes", size = info.size)
|
||||||
});
|
});
|
||||||
let ptr = prepared_bytes.as_ptr();
|
let ptr = prepared_bytes.as_ptr();
|
||||||
// Store prepared allocation space to be picked up for use later.
|
// Store prepared allocation space to be picked up for use later.
|
||||||
@ -204,7 +204,7 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> {
|
|||||||
}
|
}
|
||||||
// We are not in native lib mode, so we control the addresses ourselves.
|
// We are not in native lib mode, so we control the addresses ourselves.
|
||||||
if let Some((reuse_addr, clock)) =
|
if let Some((reuse_addr, clock)) =
|
||||||
global_state.reuse.take_addr(&mut *rng, size, align, memory_kind, ecx.active_thread())
|
global_state.reuse.take_addr(&mut *rng, info.size, info.align, memory_kind, ecx.active_thread())
|
||||||
{
|
{
|
||||||
if let Some(clock) = clock {
|
if let Some(clock) = clock {
|
||||||
ecx.acquire_clock(&clock);
|
ecx.acquire_clock(&clock);
|
||||||
@ -220,14 +220,14 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> {
|
|||||||
.next_base_addr
|
.next_base_addr
|
||||||
.checked_add(slack)
|
.checked_add(slack)
|
||||||
.ok_or_else(|| err_exhaust!(AddressSpaceFull))?;
|
.ok_or_else(|| err_exhaust!(AddressSpaceFull))?;
|
||||||
let base_addr = align_addr(base_addr, align.bytes());
|
let base_addr = align_addr(base_addr, info.align.bytes());
|
||||||
|
|
||||||
// Remember next base address. If this allocation is zero-sized, leave a gap of at
|
// Remember next base address. If this allocation is zero-sized, leave a gap of at
|
||||||
// least 1 to avoid two allocations having the same base address. (The logic in
|
// least 1 to avoid two allocations having the same base address. (The logic in
|
||||||
// `alloc_id_from_addr` assumes unique addresses, and different function/vtable pointers
|
// `alloc_id_from_addr` assumes unique addresses, and different function/vtable pointers
|
||||||
// need to be distinguishable!)
|
// need to be distinguishable!)
|
||||||
global_state.next_base_addr = base_addr
|
global_state.next_base_addr = base_addr
|
||||||
.checked_add(max(size.bytes(), 1))
|
.checked_add(max(info.size.bytes(), 1))
|
||||||
.ok_or_else(|| err_exhaust!(AddressSpaceFull))?;
|
.ok_or_else(|| err_exhaust!(AddressSpaceFull))?;
|
||||||
// Even if `Size` didn't overflow, we might still have filled up the address space.
|
// Even if `Size` didn't overflow, we might still have filled up the address space.
|
||||||
if global_state.next_base_addr > ecx.target_usize_max() {
|
if global_state.next_base_addr > ecx.target_usize_max() {
|
||||||
|
@ -363,7 +363,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> {
|
|||||||
// If it does exist, then we have the guarantee that the
|
// If it does exist, then we have the guarantee that the
|
||||||
// pointer is readable, and the implicit read access inserted
|
// pointer is readable, and the implicit read access inserted
|
||||||
// will never cause UB on the pointer itself.
|
// will never cause UB on the pointer itself.
|
||||||
let (_, _, kind) = this.get_alloc_info(*alloc_id);
|
let kind = this.get_alloc_info(*alloc_id).kind;
|
||||||
if matches!(kind, AllocKind::LiveData) {
|
if matches!(kind, AllocKind::LiveData) {
|
||||||
let alloc_extra = this.get_alloc_extra(*alloc_id)?; // can still fail for `extern static`
|
let alloc_extra = this.get_alloc_extra(*alloc_id)?; // can still fail for `extern static`
|
||||||
let alloc_borrow_tracker = &alloc_extra.borrow_tracker.as_ref().unwrap();
|
let alloc_borrow_tracker = &alloc_extra.borrow_tracker.as_ref().unwrap();
|
||||||
|
@ -626,7 +626,7 @@ trait EvalContextPrivExt<'tcx, 'ecx>: crate::MiriInterpCxExt<'tcx> {
|
|||||||
return interp_ok(())
|
return interp_ok(())
|
||||||
};
|
};
|
||||||
|
|
||||||
let (_size, _align, alloc_kind) = this.get_alloc_info(alloc_id);
|
let alloc_kind = this.get_alloc_info(alloc_id).kind;
|
||||||
match alloc_kind {
|
match alloc_kind {
|
||||||
AllocKind::LiveData => {
|
AllocKind::LiveData => {
|
||||||
// This should have alloc_extra data, but `get_alloc_extra` can still fail
|
// This should have alloc_extra data, but `get_alloc_extra` can still fail
|
||||||
@ -1017,7 +1017,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> {
|
|||||||
// Function pointers and dead objects don't have an alloc_extra so we ignore them.
|
// Function pointers and dead objects don't have an alloc_extra so we ignore them.
|
||||||
// This is okay because accessing them is UB anyway, no need for any Stacked Borrows checks.
|
// This is okay because accessing them is UB anyway, no need for any Stacked Borrows checks.
|
||||||
// NOT using `get_alloc_extra_mut` since this might be a read-only allocation!
|
// NOT using `get_alloc_extra_mut` since this might be a read-only allocation!
|
||||||
let (_size, _align, kind) = this.get_alloc_info(alloc_id);
|
let kind = this.get_alloc_info(alloc_id).kind;
|
||||||
match kind {
|
match kind {
|
||||||
AllocKind::LiveData => {
|
AllocKind::LiveData => {
|
||||||
// This should have alloc_extra data, but `get_alloc_extra` can still fail
|
// This should have alloc_extra data, but `get_alloc_extra` can still fail
|
||||||
|
@ -274,7 +274,7 @@ trait EvalContextPrivExt<'tcx>: crate::MiriInterpCxExt<'tcx> {
|
|||||||
.insert(new_tag, protect);
|
.insert(new_tag, protect);
|
||||||
}
|
}
|
||||||
|
|
||||||
let alloc_kind = this.get_alloc_info(alloc_id).2;
|
let alloc_kind = this.get_alloc_info(alloc_id).kind;
|
||||||
if !matches!(alloc_kind, AllocKind::LiveData) {
|
if !matches!(alloc_kind, AllocKind::LiveData) {
|
||||||
assert_eq!(ptr_size, Size::ZERO); // we did the deref check above, size has to be 0 here
|
assert_eq!(ptr_size, Size::ZERO); // we did the deref check above, size has to be 0 here
|
||||||
// There's not actually any bytes here where accesses could even be tracked.
|
// There's not actually any bytes here where accesses could even be tracked.
|
||||||
@ -538,7 +538,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> {
|
|||||||
// Function pointers and dead objects don't have an alloc_extra so we ignore them.
|
// Function pointers and dead objects don't have an alloc_extra so we ignore them.
|
||||||
// This is okay because accessing them is UB anyway, no need for any Tree Borrows checks.
|
// This is okay because accessing them is UB anyway, no need for any Tree Borrows checks.
|
||||||
// NOT using `get_alloc_extra_mut` since this might be a read-only allocation!
|
// NOT using `get_alloc_extra_mut` since this might be a read-only allocation!
|
||||||
let (_size, _align, kind) = this.get_alloc_info(alloc_id);
|
let kind = this.get_alloc_info(alloc_id).kind;
|
||||||
match kind {
|
match kind {
|
||||||
AllocKind::LiveData => {
|
AllocKind::LiveData => {
|
||||||
// This should have alloc_extra data, but `get_alloc_extra` can still fail
|
// This should have alloc_extra data, but `get_alloc_extra` can still fail
|
||||||
|
@ -1125,10 +1125,10 @@ impl<'tcx> Machine<'tcx> for MiriMachine<'tcx> {
|
|||||||
let Provenance::Concrete { alloc_id, .. } = ptr.provenance else {
|
let Provenance::Concrete { alloc_id, .. } = ptr.provenance else {
|
||||||
panic!("extern_statics cannot contain wildcards")
|
panic!("extern_statics cannot contain wildcards")
|
||||||
};
|
};
|
||||||
let (shim_size, shim_align, _kind) = ecx.get_alloc_info(alloc_id);
|
let info = ecx.get_alloc_info(alloc_id);
|
||||||
let def_ty = ecx.tcx.type_of(def_id).instantiate_identity();
|
let def_ty = ecx.tcx.type_of(def_id).instantiate_identity();
|
||||||
let extern_decl_layout = ecx.tcx.layout_of(ty::ParamEnv::empty().and(def_ty)).unwrap();
|
let extern_decl_layout = ecx.tcx.layout_of(ty::ParamEnv::empty().and(def_ty)).unwrap();
|
||||||
if extern_decl_layout.size != shim_size || extern_decl_layout.align.abi != shim_align {
|
if extern_decl_layout.size != info.size || extern_decl_layout.align.abi != info.align {
|
||||||
throw_unsup_format!(
|
throw_unsup_format!(
|
||||||
"extern static `{link_name}` has been declared as `{krate}::{name}` \
|
"extern static `{link_name}` has been declared as `{krate}::{name}` \
|
||||||
with a size of {decl_size} bytes and alignment of {decl_align} bytes, \
|
with a size of {decl_size} bytes and alignment of {decl_align} bytes, \
|
||||||
@ -1138,8 +1138,8 @@ impl<'tcx> Machine<'tcx> for MiriMachine<'tcx> {
|
|||||||
krate = ecx.tcx.crate_name(def_id.krate),
|
krate = ecx.tcx.crate_name(def_id.krate),
|
||||||
decl_size = extern_decl_layout.size.bytes(),
|
decl_size = extern_decl_layout.size.bytes(),
|
||||||
decl_align = extern_decl_layout.align.abi.bytes(),
|
decl_align = extern_decl_layout.align.abi.bytes(),
|
||||||
shim_size = shim_size.bytes(),
|
shim_size = info.size.bytes(),
|
||||||
shim_align = shim_align.bytes(),
|
shim_align = info.align.bytes(),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
interp_ok(ptr)
|
interp_ok(ptr)
|
||||||
|
@ -300,7 +300,7 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> {
|
|||||||
let id = this.read_scalar(id)?.to_u64()?;
|
let id = this.read_scalar(id)?.to_u64()?;
|
||||||
let show_unnamed = this.read_scalar(show_unnamed)?.to_bool()?;
|
let show_unnamed = this.read_scalar(show_unnamed)?.to_bool()?;
|
||||||
if let Some(id) = std::num::NonZero::new(id).map(AllocId)
|
if let Some(id) = std::num::NonZero::new(id).map(AllocId)
|
||||||
&& this.get_alloc_info(id).2 == AllocKind::LiveData
|
&& this.get_alloc_info(id).kind == AllocKind::LiveData
|
||||||
{
|
{
|
||||||
this.print_borrow_state(id, show_unnamed)?;
|
this.print_borrow_state(id, show_unnamed)?;
|
||||||
} else {
|
} else {
|
||||||
@ -409,7 +409,7 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
if let Ok((alloc_id, offset, ..)) = this.ptr_try_get_alloc_id(ptr, 0) {
|
if let Ok((alloc_id, offset, ..)) = this.ptr_try_get_alloc_id(ptr, 0) {
|
||||||
let (_size, alloc_align, _kind) = this.get_alloc_info(alloc_id);
|
let alloc_align = this.get_alloc_info(alloc_id).align;
|
||||||
// If the newly promised alignment is bigger than the native alignment of this
|
// If the newly promised alignment is bigger than the native alignment of this
|
||||||
// allocation, and bigger than the previously promised alignment, then set it.
|
// allocation, and bigger than the previously promised alignment, then set it.
|
||||||
if align > alloc_align
|
if align > alloc_align
|
||||||
|
Loading…
Reference in New Issue
Block a user