Auto merge of #87123 - RalfJung:miri-provenance-overhaul, r=oli-obk

CTFE/Miri engine Pointer type overhaul

This fixes the long-standing problem that we are using `Scalar` as a type to represent pointers that might be integer values (since they point to a ZST). The main problem is that with int-to-ptr casts, there are multiple ways to represent the same pointer as a `Scalar` and it is unclear if "normalization" (i.e., the cast) already happened or not. This leads to ugly methods like `force_mplace_ptr` and `force_op_ptr`.
Another problem this solves is that in Miri, it would make a lot more sense to have the `Pointer::offset` field represent the full absolute address (instead of being relative to the `AllocId`). This means we can do ptr-to-int casts without access to any machine state, and it means that the overflow checks on pointer arithmetic are (finally!) accurate.

To solve this, the `Pointer` type is made entirely parametric over the provenance, so that we can use `Pointer<AllocId>` inside `Scalar` but use `Pointer<Option<AllocId>>` when accessing memory (where `None` represents the case that we could not figure out an `AllocId`; in that case the `offset` is an absolute address). Moreover, the `Provenance` trait determines if a pointer with a given provenance can be cast to an integer by simply dropping the provenance.

I hope this can be read commit-by-commit, but the first commit does the bulk of the work. It introduces some FIXMEs that are resolved later.
Fixes https://github.com/rust-lang/miri/issues/841
Miri PR: https://github.com/rust-lang/miri/pull/1851
r? `@oli-obk`
This commit is contained in:
bors 2021-07-17 15:26:27 +00:00
commit c78ebb7bdc
106 changed files with 1317 additions and 1407 deletions

View File

@ -193,20 +193,21 @@ pub(crate) fn codegen_const_value<'tcx>(
place.to_cvalue(fx)
}
}
Scalar::Ptr(ptr) => {
let alloc_kind = fx.tcx.get_global_alloc(ptr.alloc_id);
Scalar::Ptr(ptr, _size) => {
let (alloc_id, offset) = ptr.into_parts(); // we know the `offset` is relative
let alloc_kind = fx.tcx.get_global_alloc(alloc_id);
let base_addr = match alloc_kind {
Some(GlobalAlloc::Memory(alloc)) => {
let data_id = data_id_for_alloc_id(
&mut fx.constants_cx,
fx.module,
ptr.alloc_id,
alloc_id,
alloc.mutability,
);
let local_data_id =
fx.module.declare_data_in_func(data_id, &mut fx.bcx.func);
if fx.clif_comments.enabled() {
fx.add_comment(local_data_id, format!("{:?}", ptr.alloc_id));
fx.add_comment(local_data_id, format!("{:?}", alloc_id));
}
fx.bcx.ins().global_value(fx.pointer_type, local_data_id)
}
@ -226,10 +227,10 @@ pub(crate) fn codegen_const_value<'tcx>(
}
fx.bcx.ins().global_value(fx.pointer_type, local_data_id)
}
None => bug!("missing allocation {:?}", ptr.alloc_id),
None => bug!("missing allocation {:?}", alloc_id),
};
let val = if ptr.offset.bytes() != 0 {
fx.bcx.ins().iadd_imm(base_addr, i64::try_from(ptr.offset.bytes()).unwrap())
let val = if offset.bytes() != 0 {
fx.bcx.ins().iadd_imm(base_addr, i64::try_from(offset.bytes()).unwrap())
} else {
base_addr
};
@ -406,7 +407,7 @@ fn define_all_allocs(tcx: TyCtxt<'_>, module: &mut dyn Module, cx: &mut Constant
let bytes = alloc.inspect_with_uninit_and_ptr_outside_interpreter(0..alloc.len()).to_vec();
data_ctx.define(bytes.into_boxed_slice());
for &(offset, (_tag, reloc)) in alloc.relocations().iter() {
for &(offset, alloc_id) in alloc.relocations().iter() {
let addend = {
let endianness = tcx.data_layout.endian;
let offset = offset.bytes() as usize;
@ -417,7 +418,7 @@ fn define_all_allocs(tcx: TyCtxt<'_>, module: &mut dyn Module, cx: &mut Constant
read_target_uint(endianness, bytes).unwrap()
};
let reloc_target_alloc = tcx.get_global_alloc(reloc).unwrap();
let reloc_target_alloc = tcx.get_global_alloc(alloc_id).unwrap();
let data_id = match reloc_target_alloc {
GlobalAlloc::Function(instance) => {
assert_eq!(addend, 0);
@ -427,7 +428,7 @@ fn define_all_allocs(tcx: TyCtxt<'_>, module: &mut dyn Module, cx: &mut Constant
continue;
}
GlobalAlloc::Memory(target_alloc) => {
data_id_for_alloc_id(cx, module, reloc, target_alloc.mutability)
data_id_for_alloc_id(cx, module, alloc_id, target_alloc.mutability)
}
GlobalAlloc::Static(def_id) => {
if tcx.codegen_fn_attrs(def_id).flags.contains(CodegenFnAttrFlags::THREAD_LOCAL)

View File

@ -243,8 +243,9 @@ impl ConstMethods<'tcx> for CodegenCx<'ll, 'tcx> {
self.const_bitcast(llval, llty)
}
}
Scalar::Ptr(ptr) => {
let (base_addr, base_addr_space) = match self.tcx.global_alloc(ptr.alloc_id) {
Scalar::Ptr(ptr, _size) => {
let (alloc_id, offset) = ptr.into_parts();
let (base_addr, base_addr_space) = match self.tcx.global_alloc(alloc_id) {
GlobalAlloc::Memory(alloc) => {
let init = const_alloc_to_llvm(self, alloc);
let value = match alloc.mutability {
@ -252,7 +253,7 @@ impl ConstMethods<'tcx> for CodegenCx<'ll, 'tcx> {
_ => self.static_addr_of(init, alloc.align, None),
};
if !self.sess().fewer_names() {
llvm::set_value_name(value, format!("{:?}", ptr.alloc_id).as_bytes());
llvm::set_value_name(value, format!("{:?}", alloc_id).as_bytes());
}
(value, AddressSpace::DATA)
}
@ -269,7 +270,7 @@ impl ConstMethods<'tcx> for CodegenCx<'ll, 'tcx> {
let llval = unsafe {
llvm::LLVMConstInBoundsGEP(
self.const_bitcast(base_addr, self.type_i8p_ext(base_addr_space)),
&self.const_usize(ptr.offset.bytes()),
&self.const_usize(offset.bytes()),
1,
)
};

View File

@ -11,7 +11,7 @@ use rustc_codegen_ssa::traits::*;
use rustc_hir::def_id::DefId;
use rustc_middle::middle::codegen_fn_attrs::{CodegenFnAttrFlags, CodegenFnAttrs};
use rustc_middle::mir::interpret::{
read_target_uint, Allocation, ErrorHandled, GlobalAlloc, Pointer,
read_target_uint, Allocation, ErrorHandled, GlobalAlloc, Pointer, Scalar as InterpScalar,
};
use rustc_middle::mir::mono::MonoItem;
use rustc_middle::ty::{self, Instance, Ty};
@ -25,7 +25,7 @@ pub fn const_alloc_to_llvm(cx: &CodegenCx<'ll, '_>, alloc: &Allocation) -> &'ll
let pointer_size = dl.pointer_size.bytes() as usize;
let mut next_offset = 0;
for &(offset, ((), alloc_id)) in alloc.relocations().iter() {
for &(offset, alloc_id) in alloc.relocations().iter() {
let offset = offset.bytes();
assert_eq!(offset as usize as u64, offset);
let offset = offset as usize;
@ -55,7 +55,10 @@ pub fn const_alloc_to_llvm(cx: &CodegenCx<'ll, '_>, alloc: &Allocation) -> &'ll
};
llvals.push(cx.scalar_to_backend(
Pointer::new(alloc_id, Size::from_bytes(ptr_offset)).into(),
InterpScalar::from_pointer(
Pointer::new(alloc_id, Size::from_bytes(ptr_offset)),
&cx.tcx,
),
&Scalar { value: Primitive::Pointer, valid_range: 0..=!0 },
cx.type_i8p_ext(address_space),
));

View File

@ -90,10 +90,10 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
Abi::ScalarPair(ref a, _) => a,
_ => bug!("from_const: invalid ScalarPair layout: {:#?}", layout),
};
let a = Scalar::from(Pointer::new(
bx.tcx().create_memory_alloc(data),
Size::from_bytes(start),
));
let a = Scalar::from_pointer(
Pointer::new(bx.tcx().create_memory_alloc(data), Size::from_bytes(start)),
&bx.tcx(),
);
let a_llval = bx.scalar_to_backend(
a,
a_scalar,

View File

@ -49,6 +49,7 @@
#![feature(iter_zip)]
#![feature(thread_local_const_init)]
#![feature(try_reserve)]
#![feature(nonzero_ops)]
#![recursion_limit = "512"]
#[macro_use]

View File

@ -3,7 +3,7 @@
use std::borrow::Cow;
use std::convert::TryFrom;
use std::iter;
use std::ops::{Deref, DerefMut, Range};
use std::ops::{Deref, Range};
use std::ptr;
use rustc_ast::Mutability;
@ -25,7 +25,7 @@ use crate::ty;
/// module provides higher-level access.
#[derive(Clone, Debug, Eq, PartialEq, PartialOrd, Ord, Hash, TyEncodable, TyDecodable)]
#[derive(HashStable)]
pub struct Allocation<Tag = (), Extra = ()> {
pub struct Allocation<Tag = AllocId, Extra = ()> {
/// The actual bytes of the allocation.
/// Note that the bytes of a pointer represent the offset of the pointer.
bytes: Vec<u8>,
@ -154,26 +154,32 @@ impl<Tag> Allocation<Tag> {
}
}
impl Allocation<()> {
/// Add Tag and Extra fields
pub fn with_tags_and_extra<T, E>(
impl Allocation {
/// Convert Tag and add Extra fields
pub fn convert_tag_add_extra<Tag, Extra>(
self,
mut tagger: impl FnMut(AllocId) -> T,
extra: E,
) -> Allocation<T, E> {
cx: &impl HasDataLayout,
extra: Extra,
mut tagger: impl FnMut(Pointer<AllocId>) -> Pointer<Tag>,
) -> Allocation<Tag, Extra> {
// Compute new pointer tags, which also adjusts the bytes.
let mut bytes = self.bytes;
let mut new_relocations = Vec::with_capacity(self.relocations.0.len());
let ptr_size = cx.data_layout().pointer_size.bytes_usize();
let endian = cx.data_layout().endian;
for &(offset, alloc_id) in self.relocations.iter() {
let idx = offset.bytes_usize();
let ptr_bytes = &mut bytes[idx..idx + ptr_size];
let bits = read_target_uint(endian, ptr_bytes).unwrap();
let (ptr_tag, ptr_offset) =
tagger(Pointer::new(alloc_id, Size::from_bytes(bits))).into_parts();
write_target_uint(endian, ptr_bytes, ptr_offset.bytes().into()).unwrap();
new_relocations.push((offset, ptr_tag));
}
// Create allocation.
Allocation {
bytes: self.bytes,
relocations: Relocations::from_presorted(
self.relocations
.iter()
// The allocations in the relocations (pointers stored *inside* this allocation)
// all get the base pointer tag.
.map(|&(offset, ((), alloc))| {
let tag = tagger(alloc);
(offset, (tag, alloc))
})
.collect(),
),
bytes,
relocations: Relocations::from_presorted(new_relocations),
init_mask: self.init_mask,
align: self.align,
mutability: self.mutability,
@ -279,6 +285,9 @@ impl<Tag: Copy, Extra> Allocation<Tag, Extra> {
/// A raw pointer variant of `get_bytes_mut` that avoids invalidating existing aliases into this memory.
pub fn get_bytes_mut_ptr(&mut self, cx: &impl HasDataLayout, range: AllocRange) -> *mut [u8] {
self.mark_init(range, true);
// This also clears relocations that just overlap with the written range. So writing to some
// byte can de-initialize its neighbors! See
// <https://github.com/rust-lang/rust/issues/87184> for details.
self.clear_relocations(cx, range);
assert!(range.end().bytes_usize() <= self.bytes.len()); // need to do our own bounds-check
@ -321,7 +330,11 @@ impl<Tag: Copy, Extra> Allocation<Tag, Extra> {
cx: &impl HasDataLayout,
range: AllocRange,
) -> AllocResult<ScalarMaybeUninit<Tag>> {
// `get_bytes_unchecked` tests relocation edges.
// `get_bytes_with_uninit_and_ptr` tests relocation edges.
// We deliberately error when loading data that partially has provenance, or partially
// initialized data (that's the check below), into a scalar. The LLVM semantics of this are
// unclear so we are conservative. See <https://github.com/rust-lang/rust/issues/69488> for
// further discussion.
let bytes = self.get_bytes_with_uninit_and_ptr(cx, range)?;
// Uninit check happens *after* we established that the alignment is correct.
// We must not return `Ok()` for unaligned pointers!
@ -339,9 +352,9 @@ impl<Tag: Copy, Extra> Allocation<Tag, Extra> {
self.check_relocations(cx, range)?;
} else {
// Maybe a pointer.
if let Some(&(tag, alloc_id)) = self.relocations.get(&range.start) {
let ptr = Pointer::new_with_tag(alloc_id, Size::from_bytes(bits), tag);
return Ok(ScalarMaybeUninit::Scalar(ptr.into()));
if let Some(&prov) = self.relocations.get(&range.start) {
let ptr = Pointer::new(prov, Size::from_bytes(bits));
return Ok(ScalarMaybeUninit::from_pointer(ptr, cx));
}
}
// We don't. Just return the bits.
@ -371,9 +384,14 @@ impl<Tag: Copy, Extra> Allocation<Tag, Extra> {
}
};
let bytes = match val.to_bits_or_ptr(range.size, cx) {
Err(val) => u128::from(val.offset.bytes()),
Ok(data) => data,
// `to_bits_or_ptr_internal` is the right method because we just want to store this data
// as-is into memory.
let (bytes, provenance) = match val.to_bits_or_ptr_internal(range.size) {
Err(val) => {
let (provenance, offset) = val.into_parts();
(u128::from(offset.bytes()), Some(provenance))
}
Ok(data) => (data, None),
};
let endian = cx.data_layout().endian;
@ -381,8 +399,8 @@ impl<Tag: Copy, Extra> Allocation<Tag, Extra> {
write_target_uint(endian, dst, bytes).unwrap();
// See if we have to also write a relocation.
if let Scalar::Ptr(val) = val {
self.relocations.insert(range.start, (val.tag, val.alloc_id));
if let Some(provenance) = provenance {
self.relocations.0.insert(range.start, provenance);
}
Ok(())
@ -392,11 +410,7 @@ impl<Tag: Copy, Extra> Allocation<Tag, Extra> {
/// Relocations.
impl<Tag: Copy, Extra> Allocation<Tag, Extra> {
/// Returns all relocations overlapping with the given pointer-offset pair.
pub fn get_relocations(
&self,
cx: &impl HasDataLayout,
range: AllocRange,
) -> &[(Size, (Tag, AllocId))] {
pub fn get_relocations(&self, cx: &impl HasDataLayout, range: AllocRange) -> &[(Size, Tag)] {
// We have to go back `pointer_size - 1` bytes, as that one would still overlap with
// the beginning of this range.
let start = range.start.bytes().saturating_sub(cx.data_layout().pointer_size.bytes() - 1);
@ -446,7 +460,7 @@ impl<Tag: Copy, Extra> Allocation<Tag, Extra> {
}
// Forget all the relocations.
self.relocations.remove_range(first..last);
self.relocations.0.remove_range(first..last);
}
/// Errors if there are relocations overlapping with the edges of the
@ -582,39 +596,33 @@ impl<Tag, Extra> Allocation<Tag, Extra> {
}
}
/// Relocations.
/// "Relocations" stores the provenance information of pointers stored in memory.
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, TyEncodable, TyDecodable)]
pub struct Relocations<Tag = (), Id = AllocId>(SortedMap<Size, (Tag, Id)>);
pub struct Relocations<Tag = AllocId>(SortedMap<Size, Tag>);
impl<Tag, Id> Relocations<Tag, Id> {
impl<Tag> Relocations<Tag> {
pub fn new() -> Self {
Relocations(SortedMap::new())
}
// The caller must guarantee that the given relocations are already sorted
// by address and contain no duplicates.
pub fn from_presorted(r: Vec<(Size, (Tag, Id))>) -> Self {
pub fn from_presorted(r: Vec<(Size, Tag)>) -> Self {
Relocations(SortedMap::from_presorted_elements(r))
}
}
impl<Tag> Deref for Relocations<Tag> {
type Target = SortedMap<Size, (Tag, AllocId)>;
type Target = SortedMap<Size, Tag>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl<Tag> DerefMut for Relocations<Tag> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
/// A partial, owned list of relocations to transfer into another allocation.
pub struct AllocationRelocations<Tag> {
relative_relocations: Vec<(Size, (Tag, AllocId))>,
relative_relocations: Vec<(Size, Tag)>,
}
impl<Tag: Copy, Extra> Allocation<Tag, Extra> {
@ -652,7 +660,7 @@ impl<Tag: Copy, Extra> Allocation<Tag, Extra> {
/// The affected range, as defined in the parameters to `prepare_relocation_copy` is expected
/// to be clear of relocations.
pub fn mark_relocation_range(&mut self, relocations: AllocationRelocations<Tag>) {
self.relocations.insert_presorted(relocations.relative_relocations);
self.relocations.0.insert_presorted(relocations.relative_relocations);
}
}

View File

@ -170,6 +170,8 @@ impl fmt::Display for InvalidProgramInfo<'_> {
/// Details of why a pointer had to be in-bounds.
#[derive(Debug, Copy, Clone, TyEncodable, TyDecodable, HashStable)]
pub enum CheckInAllocMsg {
/// We are dereferencing a pointer (i.e., creating a place).
DerefTest,
/// We are access memory.
MemoryAccessTest,
/// We are doing pointer arithmetic.
@ -179,13 +181,14 @@ pub enum CheckInAllocMsg {
}
impl fmt::Display for CheckInAllocMsg {
/// When this is printed as an error the context looks like this
/// "{msg}pointer must be in-bounds at offset..."
/// When this is printed as an error the context looks like this:
/// "{msg}0x01 is not a valid pointer".
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"{}",
match *self {
CheckInAllocMsg::DerefTest => "dereferencing pointer failed: ",
CheckInAllocMsg::MemoryAccessTest => "memory access failed: ",
CheckInAllocMsg::PointerArithmeticTest => "pointer arithmetic failed: ",
CheckInAllocMsg::InboundsTest => "",
@ -238,7 +241,9 @@ pub enum UndefinedBehaviorInfo<'tcx> {
PointerUseAfterFree(AllocId),
/// Used a pointer outside the bounds it is valid for.
PointerOutOfBounds {
ptr: Pointer,
alloc_id: AllocId,
offset: Size,
size: Size,
msg: CheckInAllocMsg,
allocation_size: Size,
},
@ -307,20 +312,30 @@ impl fmt::Display for UndefinedBehaviorInfo<'_> {
InvalidVtableAlignment(msg) => write!(f, "invalid vtable: alignment {}", msg),
UnterminatedCString(p) => write!(
f,
"reading a null-terminated string starting at {} with no null found before end of allocation",
"reading a null-terminated string starting at {:?} with no null found before end of allocation",
p,
),
PointerUseAfterFree(a) => {
write!(f, "pointer to {} was dereferenced after this allocation got freed", a)
}
PointerOutOfBounds { ptr, msg, allocation_size } => write!(
PointerOutOfBounds { alloc_id, offset, size: Size::ZERO, msg, allocation_size } => {
write!(
f,
"{}pointer must be in-bounds at offset {}, \
but is outside bounds of {} which has size {}",
"{}{} has size {}, so pointer at offset {} is out-of-bounds",
msg,
ptr.offset.bytes(),
ptr.alloc_id,
allocation_size.bytes()
alloc_id,
allocation_size.bytes(),
offset.bytes(),
)
}
PointerOutOfBounds { alloc_id, offset, size, msg, allocation_size } => write!(
f,
"{}{} has size {}, so pointer to {} bytes starting at offset {} is out-of-bounds",
msg,
alloc_id,
allocation_size.bytes(),
size.bytes(),
offset.bytes(),
),
DanglingIntPointer(0, CheckInAllocMsg::InboundsTest) => {
write!(f, "null pointer is not a valid pointer for this operation")
@ -348,13 +363,13 @@ impl fmt::Display for UndefinedBehaviorInfo<'_> {
}
InvalidTag(val) => write!(f, "enum value has invalid tag: {}", val),
InvalidFunctionPointer(p) => {
write!(f, "using {} as function pointer but it does not point to a function", p)
write!(f, "using {:?} as function pointer but it does not point to a function", p)
}
InvalidStr(err) => write!(f, "this string is not valid UTF-8: {}", err),
InvalidUninitBytes(Some((alloc, access))) => write!(
f,
"reading {} byte{} of memory starting at {}, \
but {} byte{} {} uninitialized starting at {}, \
"reading {} byte{} of memory starting at {:?}, \
but {} byte{} {} uninitialized starting at {:?}, \
and this operation requires initialized memory",
access.access_size.bytes(),
pluralize!(access.access_size.bytes()),
@ -392,8 +407,6 @@ pub enum UnsupportedOpInfo {
//
// The variants below are only reachable from CTFE/const prop, miri will never emit them.
//
/// Encountered raw bytes where we needed a pointer.
ReadBytesAsPointer,
/// Accessing thread local statics
ThreadLocalStatic(DefId),
/// Accessing an unsupported extern static.
@ -408,7 +421,6 @@ impl fmt::Display for UnsupportedOpInfo {
ReadExternStatic(did) => write!(f, "cannot read from extern static ({:?})", did),
NoMirFor(did) => write!(f, "no MIR body is available for {:?}", did),
ReadPointerAsBytes => write!(f, "unable to turn pointer into raw bytes",),
ReadBytesAsPointer => write!(f, "unable to turn bytes into a pointer"),
ThreadLocalStatic(did) => write!(f, "cannot access thread local static ({:?})", did),
}
}

View File

@ -99,7 +99,7 @@ use std::convert::TryFrom;
use std::fmt;
use std::io;
use std::io::{Read, Write};
use std::num::NonZeroU32;
use std::num::{NonZeroU32, NonZeroU64};
use std::sync::atomic::{AtomicU32, Ordering};
use rustc_ast::LitKind;
@ -127,7 +127,7 @@ pub use self::value::{get_slice_bytes, ConstAlloc, ConstValue, Scalar, ScalarMay
pub use self::allocation::{alloc_range, AllocRange, Allocation, InitMask, Relocations};
pub use self::pointer::{Pointer, PointerArithmetic};
pub use self::pointer::{Pointer, PointerArithmetic, Provenance};
/// Uniquely identifies one of the following:
/// - A constant
@ -176,7 +176,7 @@ pub enum LitToConstError {
}
#[derive(Copy, Clone, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub struct AllocId(pub u64);
pub struct AllocId(pub NonZeroU64);
// We want the `Debug` output to be readable as it is used by `derive(Debug)` for
// all the Miri types.
@ -427,7 +427,11 @@ crate struct AllocMap<'tcx> {
impl<'tcx> AllocMap<'tcx> {
crate fn new() -> Self {
AllocMap { alloc_map: Default::default(), dedup: Default::default(), next_id: AllocId(0) }
AllocMap {
alloc_map: Default::default(),
dedup: Default::default(),
next_id: AllocId(NonZeroU64::new(1).unwrap()),
}
}
fn reserve(&mut self) -> AllocId {
let next = self.next_id;

View File

@ -83,27 +83,38 @@ pub trait PointerArithmetic: HasDataLayout {
impl<T: HasDataLayout> PointerArithmetic for T {}
/// Represents a pointer in the Miri engine.
///
/// `Pointer` is generic over the `Tag` associated with each pointer,
/// which is used to do provenance tracking during execution.
#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, TyEncodable, TyDecodable, Hash)]
#[derive(HashStable)]
pub struct Pointer<Tag = ()> {
pub alloc_id: AllocId,
pub offset: Size,
pub tag: Tag,
/// This trait abstracts over the kind of provenance that is associated with a `Pointer`. It is
/// mostly opaque; the `Machine` trait extends it with some more operations that also have access to
/// some global state.
/// We don't actually care about this `Debug` bound (we use `Provenance::fmt` to format the entire
/// pointer), but `derive` adds some unecessary bounds.
pub trait Provenance: Copy + fmt::Debug {
/// Says whether the `offset` field of `Pointer`s with this provenance is the actual physical address.
/// If `true, ptr-to-int casts work by simply discarding the provenance.
/// If `false`, ptr-to-int casts are not supported. The offset *must* be relative in that case.
const OFFSET_IS_ADDR: bool;
/// Determines how a pointer should be printed.
fn fmt(ptr: &Pointer<Self>, f: &mut fmt::Formatter<'_>) -> fmt::Result
where
Self: Sized;
/// Provenance must always be able to identify the allocation this ptr points to.
/// (Identifying the offset in that allocation, however, is harder -- use `Memory::ptr_get_alloc` for that.)
fn get_alloc_id(self) -> AllocId;
}
static_assert_size!(Pointer, 16);
impl Provenance for AllocId {
// With the `AllocId` as provenance, the `offset` is interpreted *relative to the allocation*,
// so ptr-to-int casts are not possible (since we do not know the global physical offset).
const OFFSET_IS_ADDR: bool = false;
/// Print the address of a pointer (without the tag)
fn print_ptr_addr<Tag>(ptr: &Pointer<Tag>, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fn fmt(ptr: &Pointer<Self>, f: &mut fmt::Formatter<'_>) -> fmt::Result {
// Forward `alternate` flag to `alloc_id` printing.
if f.alternate() {
write!(f, "{:#?}", ptr.alloc_id)?;
write!(f, "{:#?}", ptr.provenance)?;
} else {
write!(f, "{:?}", ptr.alloc_id)?;
write!(f, "{:?}", ptr.provenance)?;
}
// Print offset only if it is non-zero.
if ptr.offset.bytes() > 0 {
@ -112,26 +123,37 @@ fn print_ptr_addr<Tag>(ptr: &Pointer<Tag>, f: &mut fmt::Formatter<'_>) -> fmt::R
Ok(())
}
// We want the `Debug` output to be readable as it is used by `derive(Debug)` for
// all the Miri types.
// We have to use `Debug` output for the tag, because `()` does not implement
// `Display` so we cannot specialize that.
impl<Tag: fmt::Debug> fmt::Debug for Pointer<Tag> {
default fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
print_ptr_addr(self, f)?;
write!(f, "[{:?}]", self.tag)
}
}
// Specialization for no tag
impl fmt::Debug for Pointer<()> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
print_ptr_addr(self, f)
fn get_alloc_id(self) -> AllocId {
self
}
}
impl<Tag: fmt::Debug> fmt::Display for Pointer<Tag> {
/// Represents a pointer in the Miri engine.
///
/// Pointers are "tagged" with provenance information; typically the `AllocId` they belong to.
#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, TyEncodable, TyDecodable, Hash)]
#[derive(HashStable)]
pub struct Pointer<Tag = AllocId> {
pub(super) offset: Size, // kept private to avoid accidental misinterpretation (meaning depends on `Tag` type)
pub provenance: Tag,
}
static_assert_size!(Pointer, 16);
// We want the `Debug` output to be readable as it is used by `derive(Debug)` for
// all the Miri types.
impl<Tag: Provenance> fmt::Debug for Pointer<Tag> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt(self, f)
Provenance::fmt(self, f)
}
}
impl<Tag: Provenance> fmt::Debug for Pointer<Option<Tag>> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self.provenance {
Some(tag) => Provenance::fmt(&Pointer::new(tag, self.offset), f),
None => write!(f, "0x{:x}", self.offset.bytes()),
}
}
}
@ -143,37 +165,61 @@ impl From<AllocId> for Pointer {
}
}
impl Pointer<()> {
impl<Tag> From<Pointer<Tag>> for Pointer<Option<Tag>> {
#[inline(always)]
pub fn new(alloc_id: AllocId, offset: Size) -> Self {
Pointer { alloc_id, offset, tag: () }
fn from(ptr: Pointer<Tag>) -> Self {
let (tag, offset) = ptr.into_parts();
Pointer::new(Some(tag), offset)
}
}
impl<Tag> Pointer<Option<Tag>> {
pub fn into_pointer_or_addr(self) -> Result<Pointer<Tag>, Size> {
match self.provenance {
Some(tag) => Ok(Pointer::new(tag, self.offset)),
None => Err(self.offset),
}
}
}
impl<Tag> Pointer<Option<Tag>> {
#[inline(always)]
pub fn with_tag<Tag>(self, tag: Tag) -> Pointer<Tag> {
Pointer::new_with_tag(self.alloc_id, self.offset, tag)
pub fn null() -> Self {
Pointer { provenance: None, offset: Size::ZERO }
}
}
impl<'tcx, Tag> Pointer<Tag> {
#[inline(always)]
pub fn new_with_tag(alloc_id: AllocId, offset: Size, tag: Tag) -> Self {
Pointer { alloc_id, offset, tag }
pub fn new(provenance: Tag, offset: Size) -> Self {
Pointer { provenance, offset }
}
/// Obtain the constituents of this pointer. Not that the meaning of the offset depends on the type `Tag`!
/// This function must only be used in the implementation of `Machine::ptr_get_alloc`,
/// and when a `Pointer` is taken apart to be stored efficiently in an `Allocation`.
#[inline(always)]
pub fn into_parts(self) -> (Tag, Size) {
(self.provenance, self.offset)
}
pub fn map_provenance(self, f: impl FnOnce(Tag) -> Tag) -> Self {
Pointer { provenance: f(self.provenance), ..self }
}
#[inline]
pub fn offset(self, i: Size, cx: &impl HasDataLayout) -> InterpResult<'tcx, Self> {
Ok(Pointer::new_with_tag(
self.alloc_id,
Size::from_bytes(cx.data_layout().offset(self.offset.bytes(), i.bytes())?),
self.tag,
))
Ok(Pointer {
offset: Size::from_bytes(cx.data_layout().offset(self.offset.bytes(), i.bytes())?),
..self
})
}
#[inline]
pub fn overflowing_offset(self, i: Size, cx: &impl HasDataLayout) -> (Self, bool) {
let (res, over) = cx.data_layout().overflowing_offset(self.offset.bytes(), i.bytes());
(Pointer::new_with_tag(self.alloc_id, Size::from_bytes(res), self.tag), over)
let ptr = Pointer { offset: Size::from_bytes(res), ..self };
(ptr, over)
}
#[inline(always)]
@ -183,26 +229,21 @@ impl<'tcx, Tag> Pointer<Tag> {
#[inline]
pub fn signed_offset(self, i: i64, cx: &impl HasDataLayout) -> InterpResult<'tcx, Self> {
Ok(Pointer::new_with_tag(
self.alloc_id,
Size::from_bytes(cx.data_layout().signed_offset(self.offset.bytes(), i)?),
self.tag,
))
Ok(Pointer {
offset: Size::from_bytes(cx.data_layout().signed_offset(self.offset.bytes(), i)?),
..self
})
}
#[inline]
pub fn overflowing_signed_offset(self, i: i64, cx: &impl HasDataLayout) -> (Self, bool) {
let (res, over) = cx.data_layout().overflowing_signed_offset(self.offset.bytes(), i);
(Pointer::new_with_tag(self.alloc_id, Size::from_bytes(res), self.tag), over)
let ptr = Pointer { offset: Size::from_bytes(res), ..self };
(ptr, over)
}
#[inline(always)]
pub fn wrapping_signed_offset(self, i: i64, cx: &impl HasDataLayout) -> Self {
self.overflowing_signed_offset(i, cx).0
}
#[inline(always)]
pub fn erase_tag(self) -> Pointer {
Pointer { alloc_id: self.alloc_id, offset: self.offset, tag: () }
}
}

View File

@ -6,11 +6,13 @@ use rustc_apfloat::{
Float,
};
use rustc_macros::HashStable;
use rustc_target::abi::{HasDataLayout, Size, TargetDataLayout};
use rustc_target::abi::{HasDataLayout, Size};
use crate::ty::{Lift, ParamEnv, ScalarInt, Ty, TyCtxt};
use super::{AllocId, AllocRange, Allocation, InterpResult, Pointer, PointerArithmetic};
use super::{
AllocId, AllocRange, Allocation, InterpResult, Pointer, PointerArithmetic, Provenance,
};
/// Represents the result of const evaluation via the `eval_to_allocation` query.
#[derive(Copy, Clone, HashStable, TyEncodable, TyDecodable, Debug, Hash, Eq, PartialEq)]
@ -47,12 +49,6 @@ pub enum ConstValue<'tcx> {
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
static_assert_size!(ConstValue<'_>, 32);
impl From<Scalar> for ConstValue<'tcx> {
fn from(s: Scalar) -> Self {
Self::Scalar(s)
}
}
impl<'a, 'tcx> Lift<'tcx> for ConstValue<'a> {
type Lifted = ConstValue<'tcx>;
fn lift_to_tcx(self, tcx: TyCtxt<'tcx>) -> Option<ConstValue<'tcx>> {
@ -70,7 +66,7 @@ impl<'a, 'tcx> Lift<'tcx> for ConstValue<'a> {
impl<'tcx> ConstValue<'tcx> {
#[inline]
pub fn try_to_scalar(&self) -> Option<Scalar> {
pub fn try_to_scalar(&self) -> Option<Scalar<AllocId>> {
match *self {
ConstValue::ByRef { .. } | ConstValue::Slice { .. } => None,
ConstValue::Scalar(val) => Some(val),
@ -120,16 +116,23 @@ impl<'tcx> ConstValue<'tcx> {
/// `memory::Allocation`. It is in many ways like a small chunk of a `Allocation`, up to 16 bytes in
/// size. Like a range of bytes in an `Allocation`, a `Scalar` can either represent the raw bytes
/// of a simple value or a pointer into another `Allocation`
///
/// These variants would be private if there was a convenient way to achieve that in Rust.
/// Do *not* match on a `Scalar`! Use the various `to_*` methods instead.
#[derive(Clone, Copy, Eq, PartialEq, Ord, PartialOrd, TyEncodable, TyDecodable, Hash)]
#[derive(HashStable)]
pub enum Scalar<Tag = ()> {
pub enum Scalar<Tag = AllocId> {
/// The raw bytes of a simple value.
Int(ScalarInt),
/// A pointer into an `Allocation`. An `Allocation` in the `memory` module has a list of
/// relocations, but a `Scalar` is only large enough to contain one, so we just represent the
/// relocation and its associated offset together as a `Pointer` here.
Ptr(Pointer<Tag>),
///
/// We also store the size of the pointer, such that a `Scalar` always knows how big it is.
/// The size is always the pointer size of the current target, but this is not information
/// that we always have readily available.
Ptr(Pointer<Tag>, u8),
}
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
@ -137,20 +140,20 @@ static_assert_size!(Scalar, 24);
// We want the `Debug` output to be readable as it is used by `derive(Debug)` for
// all the Miri types.
impl<Tag: fmt::Debug> fmt::Debug for Scalar<Tag> {
impl<Tag: Provenance> fmt::Debug for Scalar<Tag> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Scalar::Ptr(ptr) => write!(f, "{:?}", ptr),
Scalar::Ptr(ptr, _size) => write!(f, "{:?}", ptr),
Scalar::Int(int) => write!(f, "{:?}", int),
}
}
}
impl<Tag: fmt::Debug> fmt::Display for Scalar<Tag> {
impl<Tag: Provenance> fmt::Display for Scalar<Tag> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Scalar::Ptr(ptr) => write!(f, "pointer to {}", ptr),
Scalar::Int { .. } => fmt::Debug::fmt(self, f),
Scalar::Ptr(ptr, _size) => write!(f, "pointer to {:?}", ptr),
Scalar::Int(int) => write!(f, "{:?}", int),
}
}
}
@ -169,83 +172,34 @@ impl<Tag> From<Double> for Scalar<Tag> {
}
}
impl Scalar<()> {
/// Tag this scalar with `new_tag` if it is a pointer, leave it unchanged otherwise.
///
/// Used by `MemPlace::replace_tag`.
#[inline]
pub fn with_tag<Tag>(self, new_tag: Tag) -> Scalar<Tag> {
match self {
Scalar::Ptr(ptr) => Scalar::Ptr(ptr.with_tag(new_tag)),
Scalar::Int(int) => Scalar::Int(int),
}
impl<Tag> From<ScalarInt> for Scalar<Tag> {
#[inline(always)]
fn from(ptr: ScalarInt) -> Self {
Scalar::Int(ptr)
}
}
impl<'tcx, Tag> Scalar<Tag> {
impl<Tag> Scalar<Tag> {
pub const ZST: Self = Scalar::Int(ScalarInt::ZST);
/// Erase the tag from the scalar, if any.
///
/// Used by error reporting code to avoid having the error type depend on `Tag`.
#[inline]
pub fn erase_tag(self) -> Scalar {
match self {
Scalar::Ptr(ptr) => Scalar::Ptr(ptr.erase_tag()),
Scalar::Int(int) => Scalar::Int(int),
#[inline(always)]
pub fn from_pointer(ptr: Pointer<Tag>, cx: &impl HasDataLayout) -> Self {
Scalar::Ptr(ptr, u8::try_from(cx.pointer_size().bytes()).unwrap())
}
/// Create a Scalar from a pointer with an `Option<_>` tag (where `None` represents a plain integer).
pub fn from_maybe_pointer(ptr: Pointer<Option<Tag>>, cx: &impl HasDataLayout) -> Self {
match ptr.into_parts() {
(Some(tag), offset) => Scalar::from_pointer(Pointer::new(tag, offset), cx),
(None, offset) => {
Scalar::Int(ScalarInt::try_from_uint(offset.bytes(), cx.pointer_size()).unwrap())
}
}
}
#[inline]
pub fn null_ptr(cx: &impl HasDataLayout) -> Self {
Scalar::Int(ScalarInt::null(cx.data_layout().pointer_size))
}
#[inline(always)]
fn ptr_op(
self,
dl: &TargetDataLayout,
f_int: impl FnOnce(u64) -> InterpResult<'tcx, u64>,
f_ptr: impl FnOnce(Pointer<Tag>) -> InterpResult<'tcx, Pointer<Tag>>,
) -> InterpResult<'tcx, Self> {
match self {
Scalar::Int(int) => Ok(Scalar::Int(int.ptr_sized_op(dl, f_int)?)),
Scalar::Ptr(ptr) => Ok(Scalar::Ptr(f_ptr(ptr)?)),
}
}
#[inline]
pub fn ptr_offset(self, i: Size, cx: &impl HasDataLayout) -> InterpResult<'tcx, Self> {
let dl = cx.data_layout();
self.ptr_op(dl, |int| dl.offset(int, i.bytes()), |ptr| ptr.offset(i, dl))
}
#[inline]
pub fn ptr_wrapping_offset(self, i: Size, cx: &impl HasDataLayout) -> Self {
let dl = cx.data_layout();
self.ptr_op(
dl,
|int| Ok(dl.overflowing_offset(int, i.bytes()).0),
|ptr| Ok(ptr.wrapping_offset(i, dl)),
)
.unwrap()
}
#[inline]
pub fn ptr_signed_offset(self, i: i64, cx: &impl HasDataLayout) -> InterpResult<'tcx, Self> {
let dl = cx.data_layout();
self.ptr_op(dl, |int| dl.signed_offset(int, i), |ptr| ptr.signed_offset(i, dl))
}
#[inline]
pub fn ptr_wrapping_signed_offset(self, i: i64, cx: &impl HasDataLayout) -> Self {
let dl = cx.data_layout();
self.ptr_op(
dl,
|int| Ok(dl.overflowing_signed_offset(int, i).0),
|ptr| Ok(ptr.wrapping_signed_offset(i, dl)),
)
.unwrap()
Scalar::Int(ScalarInt::null(cx.pointer_size()))
}
#[inline]
@ -332,74 +286,71 @@ impl<'tcx, Tag> Scalar<Tag> {
Scalar::Int(f.into())
}
/// This is very rarely the method you want! You should dispatch on the type
/// and use `force_bits`/`assert_bits`/`force_ptr`/`assert_ptr`.
/// This method only exists for the benefit of low-level memory operations
/// as well as the implementation of the `force_*` methods.
/// This is almost certainly not the method you want! You should dispatch on the type
/// and use `to_{u8,u16,...}`/`scalar_to_ptr` to perform ptr-to-int / int-to-ptr casts as needed.
///
/// This method only exists for the benefit of low-level operations that truly need to treat the
/// scalar in whatever form it is.
#[inline]
pub fn to_bits_or_ptr(
self,
target_size: Size,
cx: &impl HasDataLayout,
) -> Result<u128, Pointer<Tag>> {
pub fn to_bits_or_ptr_internal(self, target_size: Size) -> Result<u128, Pointer<Tag>> {
assert_ne!(target_size.bytes(), 0, "you should never look at the bits of a ZST");
match self {
Scalar::Int(int) => Ok(int.assert_bits(target_size)),
Scalar::Ptr(ptr) => {
assert_eq!(target_size, cx.data_layout().pointer_size);
Scalar::Ptr(ptr, sz) => {
assert_eq!(target_size.bytes(), u64::from(sz));
Err(ptr)
}
}
}
}
/// This method is intentionally private!
/// It is just a helper for other methods in this file.
impl<'tcx, Tag: Provenance> Scalar<Tag> {
/// Fundamental scalar-to-int (cast) operation. Many convenience wrappers exist below, that you
/// likely want to use instead.
///
/// Will perform ptr-to-int casts if needed and possible.
/// If that fails, we know the offset is relative, so we return an "erased" Scalar
/// (which is useful for error messages but not much else).
#[inline]
fn to_bits(self, target_size: Size) -> InterpResult<'tcx, u128> {
assert_ne!(target_size.bytes(), 0, "you should never look at the bits of a ZST");
pub fn try_to_int(self) -> Result<ScalarInt, Scalar<AllocId>> {
match self {
Scalar::Int(int) => int.to_bits(target_size).map_err(|size| {
Scalar::Int(int) => Ok(int),
Scalar::Ptr(ptr, sz) => {
if Tag::OFFSET_IS_ADDR {
Ok(ScalarInt::try_from_uint(ptr.offset.bytes(), Size::from_bytes(sz)).unwrap())
} else {
// We know `offset` is relative, since `OFFSET_IS_ADDR == false`.
let (tag, offset) = ptr.into_parts();
Err(Scalar::Ptr(Pointer::new(tag.get_alloc_id(), offset), sz))
}
}
}
}
#[inline(always)]
pub fn assert_int(self) -> ScalarInt {
self.try_to_int().unwrap()
}
/// This throws UB (instead of ICEing) on a size mismatch since size mismatches can arise in
/// Miri when someone declares a function that we shim (such as `malloc`) with a wrong type.
#[inline]
pub fn to_bits(self, target_size: Size) -> InterpResult<'tcx, u128> {
assert_ne!(target_size.bytes(), 0, "you should never look at the bits of a ZST");
self.try_to_int().map_err(|_| err_unsup!(ReadPointerAsBytes))?.to_bits(target_size).map_err(
|size| {
err_ub!(ScalarSizeMismatch {
target_size: target_size.bytes(),
data_size: size.bytes(),
})
.into()
}),
Scalar::Ptr(_) => throw_unsup!(ReadPointerAsBytes),
}
},
)
}
#[inline(always)]
pub fn assert_bits(self, target_size: Size) -> u128 {
self.to_bits(target_size).expect("expected Raw bits but got a Pointer")
}
#[inline]
pub fn assert_int(self) -> ScalarInt {
match self {
Scalar::Ptr(_) => bug!("expected an int but got an abstract pointer"),
Scalar::Int(int) => int,
}
}
#[inline]
pub fn assert_ptr(self) -> Pointer<Tag> {
match self {
Scalar::Ptr(p) => p,
Scalar::Int { .. } => bug!("expected a Pointer but got Raw bits"),
}
}
/// Do not call this method! Dispatch based on the type instead.
#[inline]
pub fn is_bits(self) -> bool {
matches!(self, Scalar::Int { .. })
}
/// Do not call this method! Dispatch based on the type instead.
#[inline]
pub fn is_ptr(self) -> bool {
matches!(self, Scalar::Ptr(_))
self.to_bits(target_size).unwrap()
}
pub fn to_bool(self) -> InterpResult<'tcx, bool> {
@ -507,22 +458,8 @@ impl<'tcx, Tag> Scalar<Tag> {
}
}
impl<Tag> From<Pointer<Tag>> for Scalar<Tag> {
#[inline(always)]
fn from(ptr: Pointer<Tag>) -> Self {
Scalar::Ptr(ptr)
}
}
impl<Tag> From<ScalarInt> for Scalar<Tag> {
#[inline(always)]
fn from(ptr: ScalarInt) -> Self {
Scalar::Int(ptr)
}
}
#[derive(Clone, Copy, Eq, PartialEq, TyEncodable, TyDecodable, HashStable, Hash)]
pub enum ScalarMaybeUninit<Tag = ()> {
pub enum ScalarMaybeUninit<Tag = AllocId> {
Scalar(Scalar<Tag>),
Uninit,
}
@ -537,16 +474,9 @@ impl<Tag> From<Scalar<Tag>> for ScalarMaybeUninit<Tag> {
}
}
impl<Tag> From<Pointer<Tag>> for ScalarMaybeUninit<Tag> {
#[inline(always)]
fn from(s: Pointer<Tag>) -> Self {
ScalarMaybeUninit::Scalar(s.into())
}
}
// We want the `Debug` output to be readable as it is used by `derive(Debug)` for
// all the Miri types.
impl<Tag: fmt::Debug> fmt::Debug for ScalarMaybeUninit<Tag> {
impl<Tag: Provenance> fmt::Debug for ScalarMaybeUninit<Tag> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
ScalarMaybeUninit::Uninit => write!(f, "<uninitialized>"),
@ -555,7 +485,7 @@ impl<Tag: fmt::Debug> fmt::Debug for ScalarMaybeUninit<Tag> {
}
}
impl<Tag: fmt::Debug> fmt::Display for ScalarMaybeUninit<Tag> {
impl<Tag: Provenance> fmt::Display for ScalarMaybeUninit<Tag> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
ScalarMaybeUninit::Uninit => write!(f, "uninitialized bytes"),
@ -564,16 +494,15 @@ impl<Tag: fmt::Debug> fmt::Display for ScalarMaybeUninit<Tag> {
}
}
impl<'tcx, Tag> ScalarMaybeUninit<Tag> {
/// Erase the tag from the scalar, if any.
///
/// Used by error reporting code to avoid having the error type depend on `Tag`.
impl<Tag> ScalarMaybeUninit<Tag> {
#[inline]
pub fn erase_tag(self) -> ScalarMaybeUninit {
match self {
ScalarMaybeUninit::Scalar(s) => ScalarMaybeUninit::Scalar(s.erase_tag()),
ScalarMaybeUninit::Uninit => ScalarMaybeUninit::Uninit,
pub fn from_pointer(ptr: Pointer<Tag>, cx: &impl HasDataLayout) -> Self {
ScalarMaybeUninit::Scalar(Scalar::from_pointer(ptr, cx))
}
#[inline]
pub fn from_maybe_pointer(ptr: Pointer<Option<Tag>>, cx: &impl HasDataLayout) -> Self {
ScalarMaybeUninit::Scalar(Scalar::from_maybe_pointer(ptr, cx))
}
#[inline]
@ -583,7 +512,9 @@ impl<'tcx, Tag> ScalarMaybeUninit<Tag> {
ScalarMaybeUninit::Uninit => throw_ub!(InvalidUninitBytes(None)),
}
}
}
impl<'tcx, Tag: Provenance> ScalarMaybeUninit<Tag> {
#[inline(always)]
pub fn to_bool(self) -> InterpResult<'tcx, bool> {
self.check_init()?.to_bool()

View File

@ -3,7 +3,7 @@
//! [rustc dev guide]: https://rustc-dev-guide.rust-lang.org/mir/index.html
use crate::mir::coverage::{CodeRegion, CoverageKind};
use crate::mir::interpret::{Allocation, GlobalAlloc, Scalar};
use crate::mir::interpret::{Allocation, ConstValue, GlobalAlloc, Scalar};
use crate::mir::visit::MirVisitable;
use crate::ty::adjustment::PointerCast;
use crate::ty::codec::{TyDecoder, TyEncoder};
@ -2095,7 +2095,7 @@ impl<'tcx> Operand<'tcx> {
Operand::Constant(box Constant {
span,
user_ty: None,
literal: ConstantKind::Val(val.into(), ty),
literal: ConstantKind::Val(ConstValue::Scalar(val), ty),
})
}
@ -2458,7 +2458,7 @@ pub enum ConstantKind<'tcx> {
impl Constant<'tcx> {
pub fn check_static_ptr(&self, tcx: TyCtxt<'_>) -> Option<DefId> {
match self.literal.const_for_ty()?.val.try_to_scalar() {
Some(Scalar::Ptr(ptr)) => match tcx.global_alloc(ptr.alloc_id) {
Some(Scalar::Ptr(ptr, _size)) => match tcx.global_alloc(ptr.provenance) {
GlobalAlloc::Static(def_id) => {
assert!(!tcx.is_thread_local_static(def_id));
Some(def_id)

View File

@ -1,7 +1,7 @@
use rustc_apfloat::ieee::{Double, Single};
use rustc_apfloat::Float;
use rustc_serialize::{Decodable, Decoder, Encodable, Encoder};
use rustc_target::abi::{Size, TargetDataLayout};
use rustc_target::abi::Size;
use std::convert::{TryFrom, TryInto};
use std::fmt;
@ -193,15 +193,6 @@ impl ScalarInt {
self.data == 0
}
pub(crate) fn ptr_sized_op<E>(
self,
dl: &TargetDataLayout,
f_int: impl FnOnce(u64) -> Result<u64, E>,
) -> Result<Self, E> {
assert_eq!(u64::from(self.size), dl.pointer_size.bytes());
Ok(Self::try_from_uint(f_int(u64::try_from(self.data).unwrap())?, self.size()).unwrap())
}
#[inline]
pub fn try_from_uint(i: impl Into<u128>, size: Size) -> Option<Self> {
let data = i.into();

View File

@ -1,7 +1,6 @@
use std::convert::TryInto;
use crate::mir::interpret::ConstValue;
use crate::mir::interpret::Scalar;
use crate::mir::interpret::{AllocId, ConstValue, Scalar};
use crate::mir::Promoted;
use crate::ty::subst::{InternalSubsts, SubstsRef};
use crate::ty::ParamEnv;
@ -59,7 +58,7 @@ impl<'tcx> ConstKind<'tcx> {
}
#[inline]
pub fn try_to_scalar(self) -> Option<Scalar> {
pub fn try_to_scalar(self) -> Option<Scalar<AllocId>> {
self.try_to_value()?.try_to_scalar()
}

View File

@ -1,5 +1,5 @@
use crate::middle::cstore::{ExternCrate, ExternCrateSource};
use crate::mir::interpret::{AllocRange, ConstValue, GlobalAlloc, Pointer, Scalar};
use crate::mir::interpret::{AllocRange, ConstValue, GlobalAlloc, Pointer, Provenance, Scalar};
use crate::ty::subst::{GenericArg, GenericArgKind, Subst};
use crate::ty::{self, ConstInt, DefIdTree, ParamConst, ScalarInt, Ty, TyCtxt, TypeFoldable};
use rustc_apfloat::ieee::{Double, Single};
@ -974,7 +974,7 @@ pub trait PrettyPrinter<'tcx>:
print_ty: bool,
) -> Result<Self::Const, Self::Error> {
match scalar {
Scalar::Ptr(ptr) => self.pretty_print_const_scalar_ptr(ptr, ty, print_ty),
Scalar::Ptr(ptr, _size) => self.pretty_print_const_scalar_ptr(ptr, ty, print_ty),
Scalar::Int(int) => self.pretty_print_const_scalar_int(int, ty, print_ty),
}
}
@ -987,6 +987,7 @@ pub trait PrettyPrinter<'tcx>:
) -> Result<Self::Const, Self::Error> {
define_scoped_cx!(self);
let (alloc_id, offset) = ptr.into_parts();
match ty.kind() {
// Byte strings (&[u8; N])
ty::Ref(
@ -1002,10 +1003,10 @@ pub trait PrettyPrinter<'tcx>:
..
},
_,
) => match self.tcx().get_global_alloc(ptr.alloc_id) {
) => match self.tcx().get_global_alloc(alloc_id) {
Some(GlobalAlloc::Memory(alloc)) => {
let len = int.assert_bits(self.tcx().data_layout.pointer_size);
let range = AllocRange { start: ptr.offset, size: Size::from_bytes(len) };
let range = AllocRange { start: offset, size: Size::from_bytes(len) };
if let Ok(byte_str) = alloc.get_bytes(&self.tcx(), range) {
p!(pretty_print_byte_str(byte_str))
} else {
@ -1020,7 +1021,7 @@ pub trait PrettyPrinter<'tcx>:
ty::FnPtr(_) => {
// FIXME: We should probably have a helper method to share code with the "Byte strings"
// printing above (which also has to handle pointers to all sorts of things).
match self.tcx().get_global_alloc(ptr.alloc_id) {
match self.tcx().get_global_alloc(alloc_id) {
Some(GlobalAlloc::Function(instance)) => {
self = self.typed_value(
|this| this.print_value_path(instance.def_id(), instance.substs),
@ -1068,8 +1069,8 @@ pub trait PrettyPrinter<'tcx>:
ty::Char if char::try_from(int).is_ok() => {
p!(write("{:?}", char::try_from(int).unwrap()))
}
// Raw pointers
ty::RawPtr(_) | ty::FnPtr(_) => {
// Pointer types
ty::Ref(..) | ty::RawPtr(_) | ty::FnPtr(_) => {
let data = int.assert_bits(self.tcx().data_layout.pointer_size);
self = self.typed_value(
|mut this| {
@ -1106,9 +1107,9 @@ pub trait PrettyPrinter<'tcx>:
/// This is overridden for MIR printing because we only want to hide alloc ids from users, not
/// from MIR where it is actually useful.
fn pretty_print_const_pointer(
fn pretty_print_const_pointer<Tag: Provenance>(
mut self,
_: Pointer,
_: Pointer<Tag>,
ty: Ty<'tcx>,
print_ty: bool,
) -> Result<Self::Const, Self::Error> {
@ -1679,9 +1680,9 @@ impl<F: fmt::Write> PrettyPrinter<'tcx> for FmtPrinter<'_, 'tcx, F> {
}
}
fn pretty_print_const_pointer(
fn pretty_print_const_pointer<Tag: Provenance>(
self,
p: Pointer,
p: Pointer<Tag>,
ty: Ty<'tcx>,
print_ty: bool,
) -> Result<Self::Const, Self::Error> {

View File

@ -595,9 +595,12 @@ fn check_const_value_eq<R: TypeRelation<'tcx>>(
(ConstValue::Scalar(Scalar::Int(a_val)), ConstValue::Scalar(Scalar::Int(b_val))) => {
a_val == b_val
}
(ConstValue::Scalar(Scalar::Ptr(a_val)), ConstValue::Scalar(Scalar::Ptr(b_val))) => {
(
ConstValue::Scalar(Scalar::Ptr(a_val, _a_size)),
ConstValue::Scalar(Scalar::Ptr(b_val, _b_size)),
) => {
a_val == b_val
|| match (tcx.global_alloc(a_val.alloc_id), tcx.global_alloc(b_val.alloc_id)) {
|| match (tcx.global_alloc(a_val.provenance), tcx.global_alloc(b_val.provenance)) {
(GlobalAlloc::Function(a_instance), GlobalAlloc::Function(b_instance)) => {
a_instance == b_instance
}

View File

@ -1,6 +1,6 @@
use std::convert::TryFrom;
use crate::mir::interpret::{alloc_range, AllocId, Allocation, Pointer, Scalar};
use crate::mir::interpret::{alloc_range, AllocId, Allocation, Pointer, Scalar, ScalarMaybeUninit};
use crate::ty::fold::TypeFoldable;
use crate::ty::{self, DefId, SubstsRef, Ty, TyCtxt};
use rustc_ast::Mutability;
@ -74,7 +74,7 @@ impl<'tcx> TyCtxt<'tcx> {
let instance = ty::Instance::resolve_drop_in_place(tcx, ty);
let fn_alloc_id = tcx.create_fn_alloc(instance);
let fn_ptr = Pointer::from(fn_alloc_id);
fn_ptr.into()
ScalarMaybeUninit::from_pointer(fn_ptr, &tcx)
}
VtblEntry::MetadataSize => Scalar::from_uint(size, ptr_size).into(),
VtblEntry::MetadataAlign => Scalar::from_uint(align, ptr_size).into(),
@ -90,7 +90,7 @@ impl<'tcx> TyCtxt<'tcx> {
.polymorphize(tcx);
let fn_alloc_id = tcx.create_fn_alloc(instance);
let fn_ptr = Pointer::from(fn_alloc_id);
fn_ptr.into()
ScalarMaybeUninit::from_pointer(fn_ptr, &tcx)
}
};
vtable

View File

@ -16,7 +16,6 @@ use crate::interpret::{
#[derive(Clone, Debug)]
pub enum ConstEvalErrKind {
NeedsRfc(String),
PtrToIntCast,
ConstAccessesStatic,
ModifiedGlobal,
AssertFailure(AssertKind<ConstInt>),
@ -49,12 +48,6 @@ impl fmt::Display for ConstEvalErrKind {
NeedsRfc(ref msg) => {
write!(f, "\"{}\" needs an rfc before being allowed inside constants", msg)
}
PtrToIntCast => {
write!(
f,
"cannot cast pointer to integer because it was not created by cast from integer"
)
}
ConstAccessesStatic => write!(f, "constant accesses static"),
ModifiedGlobal => {
write!(f, "modifying a static's initial value from another static's initializer")

View File

@ -136,19 +136,19 @@ pub(super) fn op_to_const<'tcx>(
// by-val is if we are in destructure_const, i.e., if this is (a field of) something that we
// "tried to make immediate" before. We wouldn't do that for non-slice scalar pairs or
// structs containing such.
op.try_as_mplace(ecx)
op.try_as_mplace()
};
let to_const_value = |mplace: &MPlaceTy<'_>| match mplace.ptr {
Scalar::Ptr(ptr) => {
let alloc = ecx.tcx.global_alloc(ptr.alloc_id).unwrap_memory();
ConstValue::ByRef { alloc, offset: ptr.offset }
// We know `offset` is relative to the allocation, so we can use `into_parts`.
let to_const_value = |mplace: &MPlaceTy<'_>| match mplace.ptr.into_parts() {
(Some(alloc_id), offset) => {
let alloc = ecx.tcx.global_alloc(alloc_id).unwrap_memory();
ConstValue::ByRef { alloc, offset }
}
Scalar::Int(int) => {
(None, offset) => {
assert!(mplace.layout.is_zst());
assert_eq!(
int.assert_bits(ecx.tcx.data_layout.pointer_size)
% u128::from(mplace.layout.align.abi.bytes()),
offset.bytes() % mplace.layout.align.abi.bytes(),
0,
"this MPlaceTy must come from a validated constant, thus we can assume the \
alignment is correct",
@ -162,14 +162,15 @@ pub(super) fn op_to_const<'tcx>(
Err(imm) => match *imm {
Immediate::Scalar(x) => match x {
ScalarMaybeUninit::Scalar(s) => ConstValue::Scalar(s),
ScalarMaybeUninit::Uninit => to_const_value(&op.assert_mem_place(ecx)),
ScalarMaybeUninit::Uninit => to_const_value(&op.assert_mem_place()),
},
Immediate::ScalarPair(a, b) => {
let (data, start) = match a.check_init().unwrap() {
Scalar::Ptr(ptr) => {
(ecx.tcx.global_alloc(ptr.alloc_id).unwrap_memory(), ptr.offset.bytes())
// We know `offset` is relative to the allocation, so we can use `into_parts`.
let (data, start) = match ecx.scalar_to_ptr(a.check_init().unwrap()).into_parts() {
(Some(alloc_id), offset) => {
(ecx.tcx.global_alloc(alloc_id).unwrap_memory(), offset.bytes())
}
Scalar::Int { .. } => (
(None, _offset) => (
ecx.tcx.intern_const_alloc(Allocation::from_bytes_byte_aligned_immutable(
b"" as &[u8],
)),
@ -369,6 +370,7 @@ pub fn eval_to_allocation_raw_provider<'tcx>(
inner = true;
}
};
let alloc_id = mplace.ptr.provenance.unwrap();
if let Err(error) = validation {
// Validation failed, report an error. This is always a hard error.
let err = ConstEvalErr::new(&ecx, error, None);
@ -381,9 +383,7 @@ pub fn eval_to_allocation_raw_provider<'tcx>(
"the raw bytes of the constant ({}",
display_allocation(
*ecx.tcx,
ecx.tcx
.global_alloc(mplace.ptr.assert_ptr().alloc_id)
.unwrap_memory()
ecx.tcx.global_alloc(alloc_id).unwrap_memory()
)
));
diag.emit();
@ -391,7 +391,7 @@ pub fn eval_to_allocation_raw_provider<'tcx>(
))
} else {
// Convert to raw constant
Ok(ConstAlloc { alloc_id: mplace.ptr.assert_ptr().alloc_id, ty: mplace.layout.ty })
Ok(ConstAlloc { alloc_id, ty: mplace.layout.ty })
}
}
}

View File

@ -16,8 +16,8 @@ use rustc_target::abi::{Align, Size};
use rustc_target::spec::abi::Abi;
use crate::interpret::{
self, compile_time_machine, AllocId, Allocation, Frame, ImmTy, InterpCx, InterpResult, Memory,
OpTy, PlaceTy, Pointer, Scalar, StackPopUnwind,
self, compile_time_machine, AllocId, Allocation, Frame, ImmTy, InterpCx, InterpResult, OpTy,
PlaceTy, Scalar, StackPopUnwind,
};
use super::error::*;
@ -59,7 +59,7 @@ pub struct CompileTimeInterpreter<'mir, 'tcx> {
pub steps_remaining: usize,
/// The virtual call stack.
pub(crate) stack: Vec<Frame<'mir, 'tcx, (), ()>>,
pub(crate) stack: Vec<Frame<'mir, 'tcx, AllocId, ()>>,
}
#[derive(Copy, Clone, Debug)]
@ -168,11 +168,11 @@ impl<'mir, 'tcx: 'mir> CompileTimeEvalContext<'mir, 'tcx> {
// Comparisons between integers are always known.
(Scalar::Int { .. }, Scalar::Int { .. }) => a == b,
// Equality with integers can never be known for sure.
(Scalar::Int { .. }, Scalar::Ptr(_)) | (Scalar::Ptr(_), Scalar::Int { .. }) => false,
(Scalar::Int { .. }, Scalar::Ptr(..)) | (Scalar::Ptr(..), Scalar::Int { .. }) => false,
// FIXME: return `true` for when both sides are the same pointer, *except* that
// some things (like functions and vtables) do not have stable addresses
// so we need to be careful around them (see e.g. #73722).
(Scalar::Ptr(_), Scalar::Ptr(_)) => false,
(Scalar::Ptr(..), Scalar::Ptr(..)) => false,
}
}
@ -183,13 +183,13 @@ impl<'mir, 'tcx: 'mir> CompileTimeEvalContext<'mir, 'tcx> {
// Comparisons of abstract pointers with null pointers are known if the pointer
// is in bounds, because if they are in bounds, the pointer can't be null.
// Inequality with integers other than null can never be known for sure.
(Scalar::Int(int), Scalar::Ptr(ptr)) | (Scalar::Ptr(ptr), Scalar::Int(int)) => {
int.is_null() && !self.memory.ptr_may_be_null(ptr)
(Scalar::Int(int), Scalar::Ptr(ptr, _)) | (Scalar::Ptr(ptr, _), Scalar::Int(int)) => {
int.is_null() && !self.memory.ptr_may_be_null(ptr.into())
}
// FIXME: return `true` for at least some comparisons where we can reliably
// determine the result of runtime inequality tests at compile-time.
// Examples include comparison of addresses in different static items.
(Scalar::Ptr(_), Scalar::Ptr(_)) => false,
(Scalar::Ptr(..), Scalar::Ptr(..)) => false,
}
}
}
@ -312,7 +312,7 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir,
align,
interpret::MemoryKind::Machine(MemoryKind::Heap),
)?;
ecx.write_scalar(Scalar::Ptr(ptr), dest)?;
ecx.write_pointer(ptr, dest)?;
}
_ => {
return Err(ConstEvalErrKind::NeedsRfc(format!(
@ -356,10 +356,6 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir,
Err(ConstEvalErrKind::Abort(msg).into())
}
fn ptr_to_int(_mem: &Memory<'mir, 'tcx, Self>, _ptr: Pointer) -> InterpResult<'tcx, u64> {
Err(ConstEvalErrKind::PtrToIntCast.into())
}
fn binary_ptr_op(
_ecx: &InterpCx<'mir, 'tcx, Self>,
_bin_op: mir::BinOp,

View File

@ -35,7 +35,7 @@ pub(crate) fn const_caller_location(
if intern_const_alloc_recursive(&mut ecx, InternKind::Constant, &loc_place).is_err() {
bug!("intern_const_alloc_recursive should not error in this case")
}
ConstValue::Scalar(loc_place.ptr)
ConstValue::Scalar(Scalar::from_pointer(loc_place.ptr.into_pointer_or_addr().unwrap(), &tcx))
}
/// Convert an evaluated constant to a type level constant
@ -179,9 +179,9 @@ pub(crate) fn deref_const<'tcx>(
let ecx = mk_eval_cx(tcx, DUMMY_SP, param_env, false);
let op = ecx.const_to_op(val, None).unwrap();
let mplace = ecx.deref_operand(&op).unwrap();
if let Scalar::Ptr(ptr) = mplace.ptr {
if let Some(alloc_id) = mplace.ptr.provenance {
assert_eq!(
tcx.get_global_alloc(ptr.alloc_id).unwrap().unwrap_memory().mutability,
tcx.get_global_alloc(alloc_id).unwrap().unwrap_memory().mutability,
Mutability::Not,
"deref_const cannot be used with mutable allocations as \
that could allow pattern matching to observe mutable statics",

View File

@ -57,7 +57,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
.ok_or_else(|| err_inval!(TooGeneric))?;
let fn_ptr = self.memory.create_fn_alloc(FnVal::Instance(instance));
self.write_scalar(fn_ptr, dest)?;
self.write_pointer(fn_ptr, dest)?;
}
_ => span_bug!(self.cur_span(), "reify fn pointer on {:?}", src.layout.ty),
}
@ -88,7 +88,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
ty::ClosureKind::FnOnce,
);
let fn_ptr = self.memory.create_fn_alloc(FnVal::Instance(instance));
self.write_scalar(fn_ptr, dest)?;
self.write_pointer(fn_ptr, dest)?;
}
_ => span_bug!(self.cur_span(), "closure fn pointer on {:?}", src.layout.ty),
}
@ -175,7 +175,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
// (a) cast a raw ptr to usize, or
// (b) cast from an integer-like (including bool, char, enums).
// In both cases we want the bits.
let bits = self.force_bits(src.to_scalar()?, src.layout.size)?;
let bits = src.to_scalar()?.to_bits(src.layout.size)?;
Ok(self.cast_from_scalar(bits, src.layout, cast_ty).into())
}
@ -280,7 +280,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
// Initial cast from sized to dyn trait
let vtable = self.get_vtable(src_pointee_ty, data.principal())?;
let ptr = self.read_immediate(src)?.to_scalar()?;
let val = Immediate::new_dyn_trait(ptr, vtable);
let val = Immediate::new_dyn_trait(ptr, vtable, &*self.tcx);
self.write_immediate(val, dest)
}

View File

@ -8,7 +8,6 @@ use rustc_index::vec::IndexVec;
use rustc_macros::HashStable;
use rustc_middle::ich::StableHashingContext;
use rustc_middle::mir;
use rustc_middle::mir::interpret::{GlobalId, InterpResult, Pointer, Scalar};
use rustc_middle::ty::layout::{self, TyAndLayout};
use rustc_middle::ty::{
self, query::TyCtxtAt, subst::SubstsRef, ParamEnv, Ty, TyCtxt, TypeFoldable,
@ -18,8 +17,9 @@ use rustc_span::{Pos, Span};
use rustc_target::abi::{Align, HasDataLayout, LayoutOf, Size, TargetDataLayout};
use super::{
Immediate, MPlaceTy, Machine, MemPlace, MemPlaceMeta, Memory, MemoryKind, Operand, Place,
PlaceTy, ScalarMaybeUninit, StackPopJump,
AllocId, GlobalId, Immediate, InterpResult, MPlaceTy, Machine, MemPlace, MemPlaceMeta, Memory,
MemoryKind, Operand, Place, PlaceTy, Pointer, Provenance, Scalar, ScalarMaybeUninit,
StackPopJump,
};
use crate::transform::validate::equal_up_to_regions;
use crate::util::storage::AlwaysLiveLocals;
@ -80,7 +80,7 @@ impl Drop for SpanGuard {
}
/// A stack frame.
pub struct Frame<'mir, 'tcx, Tag = (), Extra = ()> {
pub struct Frame<'mir, 'tcx, Tag: Provenance = AllocId, Extra = ()> {
////////////////////////////////////////////////////////////////////////////////
// Function and callsite information
////////////////////////////////////////////////////////////////////////////////
@ -161,7 +161,7 @@ pub enum StackPopCleanup {
/// State of a local variable including a memoized layout
#[derive(Clone, PartialEq, Eq, HashStable)]
pub struct LocalState<'tcx, Tag = ()> {
pub struct LocalState<'tcx, Tag: Provenance = AllocId> {
pub value: LocalValue<Tag>,
/// Don't modify if `Some`, this is only used to prevent computing the layout twice
#[stable_hasher(ignore)]
@ -169,8 +169,8 @@ pub struct LocalState<'tcx, Tag = ()> {
}
/// Current value of a local variable
#[derive(Copy, Clone, PartialEq, Eq, Debug, HashStable)] // Miri debug-prints these
pub enum LocalValue<Tag = ()> {
#[derive(Copy, Clone, PartialEq, Eq, HashStable, Debug)] // Miri debug-prints these
pub enum LocalValue<Tag: Provenance = AllocId> {
/// This local is not currently alive, and cannot be used at all.
Dead,
/// This local is alive but not yet initialized. It can be written to
@ -186,7 +186,7 @@ pub enum LocalValue<Tag = ()> {
Live(Operand<Tag>),
}
impl<'tcx, Tag: Copy + 'static> LocalState<'tcx, Tag> {
impl<'tcx, Tag: Provenance + 'static> LocalState<'tcx, Tag> {
/// Read the local's value or error if the local is not yet live or not live anymore.
///
/// Note: This may only be invoked from the `Machine::access_local` hook and not from
@ -220,7 +220,7 @@ impl<'tcx, Tag: Copy + 'static> LocalState<'tcx, Tag> {
}
}
impl<'mir, 'tcx, Tag> Frame<'mir, 'tcx, Tag> {
impl<'mir, 'tcx, Tag: Provenance> Frame<'mir, 'tcx, Tag> {
pub fn with_extra<Extra>(self, extra: Extra) -> Frame<'mir, 'tcx, Tag, Extra> {
Frame {
body: self.body,
@ -235,7 +235,7 @@ impl<'mir, 'tcx, Tag> Frame<'mir, 'tcx, Tag> {
}
}
impl<'mir, 'tcx, Tag, Extra> Frame<'mir, 'tcx, Tag, Extra> {
impl<'mir, 'tcx, Tag: Provenance, Extra> Frame<'mir, 'tcx, Tag, Extra> {
/// Get the current location within the Frame.
///
/// If this is `Err`, we are not currently executing any particular statement in
@ -406,20 +406,8 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
}
#[inline(always)]
pub fn force_ptr(
&self,
scalar: Scalar<M::PointerTag>,
) -> InterpResult<'tcx, Pointer<M::PointerTag>> {
self.memory.force_ptr(scalar)
}
#[inline(always)]
pub fn force_bits(
&self,
scalar: Scalar<M::PointerTag>,
size: Size,
) -> InterpResult<'tcx, u128> {
self.memory.force_bits(scalar, size)
pub fn scalar_to_ptr(&self, scalar: Scalar<M::PointerTag>) -> Pointer<Option<M::PointerTag>> {
self.memory.scalar_to_ptr(scalar)
}
/// Call this to turn untagged "global" pointers (obtained via `tcx`) into
@ -650,7 +638,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
Ok(Some((size, align)))
}
ty::Dynamic(..) => {
let vtable = metadata.unwrap_meta();
let vtable = self.scalar_to_ptr(metadata.unwrap_meta());
// Read size and align from vtable (already checks size).
Ok(Some(self.read_size_and_align_from_vtable(vtable)?))
}
@ -897,9 +885,12 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
fn deallocate_local(&mut self, local: LocalValue<M::PointerTag>) -> InterpResult<'tcx> {
if let LocalValue::Live(Operand::Indirect(MemPlace { ptr, .. })) = local {
// All locals have a backing allocation, even if the allocation is empty
// due to the local having ZST type.
let ptr = ptr.assert_ptr();
trace!("deallocating local: {:?}", self.memory.dump_alloc(ptr.alloc_id));
// due to the local having ZST type. Hence we can `unwrap`.
trace!(
"deallocating local {:?}: {:?}",
local,
self.memory.dump_alloc(ptr.provenance.unwrap().get_alloc_id())
);
self.memory.deallocate(ptr, None, MemoryKind::Stack)?;
};
Ok(())
@ -975,46 +966,45 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> std::fmt::Debug
match self.ecx.stack()[frame].locals[local].value {
LocalValue::Dead => write!(fmt, " is dead")?,
LocalValue::Uninitialized => write!(fmt, " is uninitialized")?,
LocalValue::Live(Operand::Indirect(mplace)) => match mplace.ptr {
Scalar::Ptr(ptr) => {
LocalValue::Live(Operand::Indirect(mplace)) => {
write!(
fmt,
" by align({}){} ref:",
" by align({}){} ref {:?}:",
mplace.align.bytes(),
match mplace.meta {
MemPlaceMeta::Meta(meta) => format!(" meta({:?})", meta),
MemPlaceMeta::Poison | MemPlaceMeta::None => String::new(),
}
)?;
allocs.push(ptr.alloc_id);
}
ptr => write!(fmt, " by integral ref: {:?}", ptr)?,
},
mplace.ptr,
)?;
allocs.extend(mplace.ptr.provenance.map(Provenance::get_alloc_id));
}
LocalValue::Live(Operand::Immediate(Immediate::Scalar(val))) => {
write!(fmt, " {:?}", val)?;
if let ScalarMaybeUninit::Scalar(Scalar::Ptr(ptr)) = val {
allocs.push(ptr.alloc_id);
if let ScalarMaybeUninit::Scalar(Scalar::Ptr(ptr, _size)) = val {
allocs.push(ptr.provenance.get_alloc_id());
}
}
LocalValue::Live(Operand::Immediate(Immediate::ScalarPair(val1, val2))) => {
write!(fmt, " ({:?}, {:?})", val1, val2)?;
if let ScalarMaybeUninit::Scalar(Scalar::Ptr(ptr)) = val1 {
allocs.push(ptr.alloc_id);
if let ScalarMaybeUninit::Scalar(Scalar::Ptr(ptr, _size)) = val1 {
allocs.push(ptr.provenance.get_alloc_id());
}
if let ScalarMaybeUninit::Scalar(Scalar::Ptr(ptr)) = val2 {
allocs.push(ptr.alloc_id);
if let ScalarMaybeUninit::Scalar(Scalar::Ptr(ptr, _size)) = val2 {
allocs.push(ptr.provenance.get_alloc_id());
}
}
}
write!(fmt, ": {:?}", self.ecx.memory.dump_allocs(allocs))
}
Place::Ptr(mplace) => match mplace.ptr {
Scalar::Ptr(ptr) => write!(
Place::Ptr(mplace) => match mplace.ptr.provenance.map(Provenance::get_alloc_id) {
Some(alloc_id) => write!(
fmt,
"by align({}) ref: {:?}",
"by align({}) ref {:?}: {:?}",
mplace.align.bytes(),
self.ecx.memory.dump_alloc(ptr.alloc_id)
mplace.ptr,
self.ecx.memory.dump_alloc(alloc_id)
),
ptr => write!(fmt, " integral by ref: {:?}", ptr),
},
@ -1022,7 +1012,7 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> std::fmt::Debug
}
}
impl<'ctx, 'mir, 'tcx, Tag, Extra> HashStable<StableHashingContext<'ctx>>
impl<'ctx, 'mir, 'tcx, Tag: Provenance, Extra> HashStable<StableHashingContext<'ctx>>
for Frame<'mir, 'tcx, Tag, Extra>
where
Extra: HashStable<StableHashingContext<'ctx>>,

View File

@ -20,18 +20,17 @@ use rustc_errors::ErrorReported;
use rustc_hir as hir;
use rustc_middle::mir::interpret::InterpResult;
use rustc_middle::ty::{self, layout::TyAndLayout, Ty};
use rustc_target::abi::Size;
use rustc_ast::Mutability;
use super::{AllocId, Allocation, InterpCx, MPlaceTy, Machine, MemoryKind, Scalar, ValueVisitor};
use super::{AllocId, Allocation, InterpCx, MPlaceTy, Machine, MemoryKind, PlaceTy, ValueVisitor};
use crate::const_eval;
pub trait CompileTimeMachine<'mir, 'tcx, T> = Machine<
'mir,
'tcx,
MemoryKind = T,
PointerTag = (),
PointerTag = AllocId,
ExtraFnVal = !,
FrameExtra = (),
AllocExtra = (),
@ -136,7 +135,7 @@ fn intern_shallow<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx, const_eval:
};
// link the alloc id to the actual allocation
let alloc = tcx.intern_const_alloc(alloc);
leftover_allocations.extend(alloc.relocations().iter().map(|&(_, ((), reloc))| reloc));
leftover_allocations.extend(alloc.relocations().iter().map(|&(_, alloc_id)| alloc_id));
tcx.set_alloc_id_memory(alloc_id, alloc);
None
}
@ -203,10 +202,11 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx, const_eval::Memory
if let ty::Dynamic(..) =
tcx.struct_tail_erasing_lifetimes(referenced_ty, self.ecx.param_env).kind()
{
if let Scalar::Ptr(vtable) = mplace.meta.unwrap_meta() {
let ptr = self.ecx.scalar_to_ptr(mplace.meta.unwrap_meta());
if let Some(alloc_id) = ptr.provenance {
// Explicitly choose const mode here, since vtables are immutable, even
// if the reference of the fat pointer is mutable.
self.intern_shallow(vtable.alloc_id, InternMode::Const, None);
self.intern_shallow(alloc_id, InternMode::Const, None);
} else {
// Validation will error (with a better message) on an invalid vtable pointer.
// Let validation show the error message, but make sure it *does* error.
@ -216,7 +216,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx, const_eval::Memory
}
// Check if we have encountered this pointer+layout combination before.
// Only recurse for allocation-backed pointers.
if let Scalar::Ptr(ptr) = mplace.ptr {
if let Some(alloc_id) = mplace.ptr.provenance {
// Compute the mode with which we intern this. Our goal here is to make as many
// statics as we can immutable so they can be placed in read-only memory by LLVM.
let ref_mode = match self.mode {
@ -259,7 +259,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx, const_eval::Memory
InternMode::Const
}
};
match self.intern_shallow(ptr.alloc_id, ref_mode, Some(referenced_ty)) {
match self.intern_shallow(alloc_id, ref_mode, Some(referenced_ty)) {
// No need to recurse, these are interned already and statics may have
// cycles, so we don't want to recurse there
Some(IsStaticOrFn) => {}
@ -321,7 +321,7 @@ where
leftover_allocations,
// The outermost allocation must exist, because we allocated it with
// `Memory::allocate`.
ret.ptr.assert_ptr().alloc_id,
ret.ptr.provenance.unwrap(),
base_intern_mode,
Some(ret.layout.ty),
);
@ -395,9 +395,9 @@ where
}
let alloc = tcx.intern_const_alloc(alloc);
tcx.set_alloc_id_memory(alloc_id, alloc);
for &(_, ((), reloc)) in alloc.relocations().iter() {
if leftover_allocations.insert(reloc) {
todo.push(reloc);
for &(_, alloc_id) in alloc.relocations().iter() {
if leftover_allocations.insert(alloc_id) {
todo.push(alloc_id);
}
}
} else if ecx.memory.dead_alloc_map.contains_key(&alloc_id) {
@ -425,14 +425,12 @@ impl<'mir, 'tcx: 'mir, M: super::intern::CompileTimeMachine<'mir, 'tcx, !>>
layout: TyAndLayout<'tcx>,
f: impl FnOnce(
&mut InterpCx<'mir, 'tcx, M>,
&MPlaceTy<'tcx, M::PointerTag>,
&PlaceTy<'tcx, M::PointerTag>,
) -> InterpResult<'tcx, ()>,
) -> InterpResult<'tcx, &'tcx Allocation> {
let dest = self.allocate(layout, MemoryKind::Stack)?;
f(self, &dest)?;
let ptr = dest.ptr.assert_ptr();
assert_eq!(ptr.offset, Size::ZERO);
let mut alloc = self.memory.alloc_map.remove(&ptr.alloc_id).unwrap().1;
f(self, &dest.into())?;
let mut alloc = self.memory.alloc_map.remove(&dest.ptr.provenance.unwrap()).unwrap().1;
alloc.mutability = Mutability::Not;
Ok(self.tcx.intern_const_alloc(alloc))
}

View File

@ -18,6 +18,7 @@ use rustc_target::abi::{Abi, Align, LayoutOf as _, Primitive, Size};
use super::{
util::ensure_monomorphic_enough, CheckInAllocMsg, ImmTy, InterpCx, Machine, OpTy, PlaceTy,
Pointer,
};
mod caller_location;
@ -138,7 +139,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
sym::caller_location => {
let span = self.find_closest_untracked_caller_location();
let location = self.alloc_caller_location_for_span(span);
self.write_scalar(location.ptr, dest)?;
self.write_immediate(location.to_ref(self), dest)?;
}
sym::min_align_of_val | sym::size_of_val => {
@ -190,7 +191,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
let ty = substs.type_at(0);
let layout_of = self.layout_of(ty)?;
let val = self.read_scalar(&args[0])?.check_init()?;
let bits = self.force_bits(val, layout_of.size)?;
let bits = val.to_bits(layout_of.size)?;
let kind = match layout_of.abi {
Abi::Scalar(ref scalar) => scalar.value,
_ => span_bug!(
@ -238,7 +239,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
// term since the sign of the second term can be inferred from this and
// the fact that the operation has overflowed (if either is 0 no
// overflow can occur)
let first_term: u128 = self.force_bits(l.to_scalar()?, l.layout.size)?;
let first_term: u128 = l.to_scalar()?.to_bits(l.layout.size)?;
let first_term_positive = first_term & (1 << (num_bits - 1)) == 0;
if first_term_positive {
// Negative overflow not possible since the positive first term
@ -298,7 +299,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
let (val, overflowed, _ty) = self.overflowing_binary_op(bin_op, &l, &r)?;
if overflowed {
let layout = self.layout_of(substs.type_at(0))?;
let r_val = self.force_bits(r.to_scalar()?, layout.size)?;
let r_val = r.to_scalar()?.to_bits(layout.size)?;
if let sym::unchecked_shl | sym::unchecked_shr = intrinsic_name {
throw_ub_format!("overflowing shift by {} in `{}`", r_val, intrinsic_name);
} else {
@ -312,9 +313,9 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
// rotate_right: (X << ((BW - S) % BW)) | (X >> (S % BW))
let layout = self.layout_of(substs.type_at(0))?;
let val = self.read_scalar(&args[0])?.check_init()?;
let val_bits = self.force_bits(val, layout.size)?;
let val_bits = val.to_bits(layout.size)?;
let raw_shift = self.read_scalar(&args[1])?.check_init()?;
let raw_shift_bits = self.force_bits(raw_shift, layout.size)?;
let raw_shift_bits = raw_shift.to_bits(layout.size)?;
let width_bits = u128::from(layout.size.bits());
let shift_bits = raw_shift_bits % width_bits;
let inv_shift_bits = (width_bits - shift_bits) % width_bits;
@ -331,22 +332,22 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
self.copy_intrinsic(&args[0], &args[1], &args[2], /*nonoverlapping*/ false)?;
}
sym::offset => {
let ptr = self.read_scalar(&args[0])?.check_init()?;
let ptr = self.read_pointer(&args[0])?;
let offset_count = self.read_scalar(&args[1])?.to_machine_isize(self)?;
let pointee_ty = substs.type_at(0);
let offset_ptr = self.ptr_offset_inbounds(ptr, pointee_ty, offset_count)?;
self.write_scalar(offset_ptr, dest)?;
self.write_pointer(offset_ptr, dest)?;
}
sym::arith_offset => {
let ptr = self.read_scalar(&args[0])?.check_init()?;
let ptr = self.read_pointer(&args[0])?;
let offset_count = self.read_scalar(&args[1])?.to_machine_isize(self)?;
let pointee_ty = substs.type_at(0);
let pointee_size = i64::try_from(self.layout_of(pointee_ty)?.size.bytes()).unwrap();
let offset_bytes = offset_count.wrapping_mul(pointee_size);
let offset_ptr = ptr.ptr_wrapping_signed_offset(offset_bytes, self);
self.write_scalar(offset_ptr, dest)?;
let offset_ptr = ptr.wrapping_signed_offset(offset_bytes, self);
self.write_pointer(offset_ptr, dest)?;
}
sym::ptr_offset_from => {
let a = self.read_immediate(&args[0])?.to_scalar()?;
@ -361,9 +362,9 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
//
// Control flow is weird because we cannot early-return (to reach the
// `go_to_block` at the end).
let done = if a.is_bits() && b.is_bits() {
let a = a.to_machine_usize(self)?;
let b = b.to_machine_usize(self)?;
let done = if let (Ok(a), Ok(b)) = (a.try_to_int(), b.try_to_int()) {
let a = a.try_to_machine_usize(*self.tcx).unwrap();
let b = b.try_to_machine_usize(*self.tcx).unwrap();
if a == b && a != 0 {
self.write_scalar(Scalar::from_machine_isize(0, self), dest)?;
true
@ -376,9 +377,11 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
if !done {
// General case: we need two pointers.
let a = self.force_ptr(a)?;
let b = self.force_ptr(b)?;
if a.alloc_id != b.alloc_id {
let a = self.scalar_to_ptr(a);
let b = self.scalar_to_ptr(b);
let (a_alloc_id, a_offset, _) = self.memory.ptr_get_alloc(a)?;
let (b_alloc_id, b_offset, _) = self.memory.ptr_get_alloc(b)?;
if a_alloc_id != b_alloc_id {
throw_ub_format!(
"ptr_offset_from cannot compute offset of pointers into different \
allocations.",
@ -386,8 +389,8 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
}
let usize_layout = self.layout_of(self.tcx.types.usize)?;
let isize_layout = self.layout_of(self.tcx.types.isize)?;
let a_offset = ImmTy::from_uint(a.offset.bytes(), usize_layout);
let b_offset = ImmTy::from_uint(b.offset.bytes(), usize_layout);
let a_offset = ImmTy::from_uint(a_offset.bytes(), usize_layout);
let b_offset = ImmTy::from_uint(b_offset.bytes(), usize_layout);
let (val, _overflowed, _ty) =
self.overflowing_binary_op(BinOp::Sub, &a_offset, &b_offset)?;
let pointee_layout = self.layout_of(substs.type_at(0))?;
@ -513,10 +516,10 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
/// 0, so offset-by-0 (and only 0) is okay -- except that null cannot be offset by _any_ value.
pub fn ptr_offset_inbounds(
&self,
ptr: Scalar<M::PointerTag>,
ptr: Pointer<Option<M::PointerTag>>,
pointee_ty: Ty<'tcx>,
offset_count: i64,
) -> InterpResult<'tcx, Scalar<M::PointerTag>> {
) -> InterpResult<'tcx, Pointer<Option<M::PointerTag>>> {
// We cannot overflow i64 as a type's size must be <= isize::MAX.
let pointee_size = i64::try_from(self.layout_of(pointee_ty)?.size.bytes()).unwrap();
// The computed offset, in bytes, cannot overflow an isize.
@ -524,7 +527,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
offset_count.checked_mul(pointee_size).ok_or(err_ub!(PointerArithOverflow))?;
// The offset being in bounds cannot rely on "wrapping around" the address space.
// So, first rule out overflows in the pointer arithmetic.
let offset_ptr = ptr.ptr_signed_offset(offset_bytes, self)?;
let offset_ptr = ptr.signed_offset(offset_bytes, self)?;
// ptr and offset_ptr must be in bounds of the same allocated object. This means all of the
// memory between these pointers must be accessible. Note that we do not require the
// pointers to be properly aligned (unlike a read/write operation).
@ -558,8 +561,8 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
)
})?;
let src = self.read_scalar(&src)?.check_init()?;
let dst = self.read_scalar(&dst)?.check_init()?;
let src = self.read_pointer(&src)?;
let dst = self.read_pointer(&dst)?;
self.memory.copy(src, align, dst, align, size, nonoverlapping)
}
@ -572,8 +575,8 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
let layout = self.layout_of(lhs.layout.ty.builtin_deref(true).unwrap().ty)?;
assert!(!layout.is_unsized());
let lhs = self.read_scalar(lhs)?.check_init()?;
let rhs = self.read_scalar(rhs)?.check_init()?;
let lhs = self.read_pointer(lhs)?;
let rhs = self.read_pointer(rhs)?;
let lhs_bytes = self.memory.read_bytes(lhs, layout.size)?;
let rhs_bytes = self.memory.read_bytes(rhs, layout.size)?;
Ok(Scalar::from_bool(lhs_bytes == rhs_bytes))

View File

@ -96,7 +96,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
let location = self.allocate(loc_layout, MemoryKind::CallerLocation).unwrap();
// Initialize fields.
self.write_immediate(file.to_ref(), &self.mplace_field(&location, 0).unwrap().into())
self.write_immediate(file.to_ref(self), &self.mplace_field(&location, 0).unwrap().into())
.expect("writing to memory we just allocated cannot fail");
self.write_scalar(line, &self.mplace_field(&location, 1).unwrap().into())
.expect("writing to memory we just allocated cannot fail");

View File

@ -13,8 +13,8 @@ use rustc_target::abi::Size;
use rustc_target::spec::abi::Abi;
use super::{
AllocId, Allocation, CheckInAllocMsg, Frame, ImmTy, InterpCx, InterpResult, LocalValue,
MemPlace, Memory, MemoryKind, OpTy, Operand, PlaceTy, Pointer, Scalar, StackPopUnwind,
AllocId, AllocRange, Allocation, Frame, ImmTy, InterpCx, InterpResult, LocalValue, MemPlace,
Memory, MemoryKind, OpTy, Operand, PlaceTy, Pointer, Provenance, Scalar, StackPopUnwind,
};
/// Data returned by Machine::stack_pop,
@ -84,12 +84,8 @@ pub trait Machine<'mir, 'tcx>: Sized {
/// Additional memory kinds a machine wishes to distinguish from the builtin ones
type MemoryKind: Debug + std::fmt::Display + MayLeak + Eq + 'static;
/// Tag tracked alongside every pointer. This is used to implement "Stacked Borrows"
/// <https://www.ralfj.de/blog/2018/08/07/stacked-borrows.html>.
/// The `default()` is used for pointers to consts, statics, vtables and functions.
/// The `Debug` formatting is used for displaying pointers; we cannot use `Display`
/// as `()` does not implement that, but it should be "nice" output.
type PointerTag: Debug + Copy + Eq + Hash + 'static;
/// Pointers are "tagged" with provenance information; typically the `AllocId` they belong to.
type PointerTag: Provenance + Eq + Hash + 'static;
/// Machines can define extra (non-instance) things that represent values of function pointers.
/// For example, Miri uses this to return a function pointer from `dlsym`
@ -266,28 +262,39 @@ pub trait Machine<'mir, 'tcx>: Sized {
}
/// Return the `AllocId` for the given thread-local static in the current thread.
fn thread_local_static_alloc_id(
fn thread_local_static_base_pointer(
_ecx: &mut InterpCx<'mir, 'tcx, Self>,
def_id: DefId,
) -> InterpResult<'tcx, AllocId> {
) -> InterpResult<'tcx, Pointer<Self::PointerTag>> {
throw_unsup!(ThreadLocalStatic(def_id))
}
/// Return the `AllocId` backing the given `extern static`.
fn extern_static_alloc_id(
/// Return the root pointer for the given `extern static`.
fn extern_static_base_pointer(
mem: &Memory<'mir, 'tcx, Self>,
def_id: DefId,
) -> InterpResult<'tcx, AllocId> {
// Use the `AllocId` associated with the `DefId`. Any actual *access* will fail.
Ok(mem.tcx.create_static_alloc(def_id))
}
) -> InterpResult<'tcx, Pointer<Self::PointerTag>>;
/// Return the "base" tag for the given *global* allocation: the one that is used for direct
/// accesses to this static/const/fn allocation. If `id` is not a global allocation,
/// this will return an unusable tag (i.e., accesses will be UB)!
/// Return a "base" pointer for the given allocation: the one that is used for direct
/// accesses to this static/const/fn allocation, or the one returned from the heap allocator.
///
/// Called on the id returned by `thread_local_static_alloc_id` and `extern_static_alloc_id`, if needed.
fn tag_global_base_pointer(memory_extra: &Self::MemoryExtra, id: AllocId) -> Self::PointerTag;
/// Not called on `extern` or thread-local statics (those use the methods above).
fn tag_alloc_base_pointer(
mem: &Memory<'mir, 'tcx, Self>,
ptr: Pointer,
) -> Pointer<Self::PointerTag>;
/// "Int-to-pointer cast"
fn ptr_from_addr(
mem: &Memory<'mir, 'tcx, Self>,
addr: u64,
) -> Pointer<Option<Self::PointerTag>>;
/// Convert a pointer with provenance into an allocation-offset pair.
fn ptr_get_alloc(
mem: &Memory<'mir, 'tcx, Self>,
ptr: Pointer<Self::PointerTag>,
) -> (AllocId, Size);
/// Called to initialize the "extra" state of an allocation and make the pointers
/// it contains (in relocations) tagged. The way we construct allocations is
@ -302,16 +309,12 @@ pub trait Machine<'mir, 'tcx>: Sized {
/// allocation (because a copy had to be done to add tags or metadata), machine memory will
/// cache the result. (This relies on `AllocMap::get_or` being able to add the
/// owned allocation to the map even when the map is shared.)
///
/// Also return the "base" tag to use for this allocation: the one that is used for direct
/// accesses to this allocation. If `kind == STATIC_KIND`, this tag must be consistent
/// with `tag_global_base_pointer`.
fn init_allocation_extra<'b>(
memory_extra: &Self::MemoryExtra,
mem: &Memory<'mir, 'tcx, Self>,
id: AllocId,
alloc: Cow<'b, Allocation>,
kind: Option<MemoryKind<Self::MemoryKind>>,
) -> (Cow<'b, Allocation<Self::PointerTag, Self::AllocExtra>>, Self::PointerTag);
) -> Cow<'b, Allocation<Self::PointerTag, Self::AllocExtra>>;
/// Hook for performing extra checks on a memory read access.
///
@ -322,8 +325,8 @@ pub trait Machine<'mir, 'tcx>: Sized {
fn memory_read(
_memory_extra: &Self::MemoryExtra,
_alloc_extra: &Self::AllocExtra,
_ptr: Pointer<Self::PointerTag>,
_size: Size,
_tag: Self::PointerTag,
_range: AllocRange,
) -> InterpResult<'tcx> {
Ok(())
}
@ -333,8 +336,8 @@ pub trait Machine<'mir, 'tcx>: Sized {
fn memory_written(
_memory_extra: &mut Self::MemoryExtra,
_alloc_extra: &mut Self::AllocExtra,
_ptr: Pointer<Self::PointerTag>,
_size: Size,
_tag: Self::PointerTag,
_range: AllocRange,
) -> InterpResult<'tcx> {
Ok(())
}
@ -344,17 +347,8 @@ pub trait Machine<'mir, 'tcx>: Sized {
fn memory_deallocated(
_memory_extra: &mut Self::MemoryExtra,
_alloc_extra: &mut Self::AllocExtra,
_ptr: Pointer<Self::PointerTag>,
_size: Size,
) -> InterpResult<'tcx> {
Ok(())
}
/// Called after initializing static memory using the interpreter.
fn after_static_mem_initialized(
_ecx: &mut InterpCx<'mir, 'tcx, Self>,
_ptr: Pointer<Self::PointerTag>,
_size: Size,
_tag: Self::PointerTag,
_range: AllocRange,
) -> InterpResult<'tcx> {
Ok(())
}
@ -399,32 +393,12 @@ pub trait Machine<'mir, 'tcx>: Sized {
// By default, we do not support unwinding from panics
Ok(StackPopJump::Normal)
}
fn int_to_ptr(
_mem: &Memory<'mir, 'tcx, Self>,
int: u64,
) -> InterpResult<'tcx, Pointer<Self::PointerTag>> {
Err((if int == 0 {
// This is UB, seriously.
// (`DanglingIntPointer` with these exact arguments has special printing code.)
err_ub!(DanglingIntPointer(0, CheckInAllocMsg::InboundsTest))
} else {
// This is just something we cannot support during const-eval.
err_unsup!(ReadBytesAsPointer)
})
.into())
}
fn ptr_to_int(
_mem: &Memory<'mir, 'tcx, Self>,
_ptr: Pointer<Self::PointerTag>,
) -> InterpResult<'tcx, u64>;
}
// A lot of the flexibility above is just needed for `Miri`, but all "compile-time" machines
// (CTFE and ConstProp) use the same instance. Here, we share that code.
pub macro compile_time_machine(<$mir: lifetime, $tcx: lifetime>) {
type PointerTag = ();
type PointerTag = AllocId;
type ExtraFnVal = !;
type MemoryMap =
@ -466,20 +440,40 @@ pub macro compile_time_machine(<$mir: lifetime, $tcx: lifetime>) {
#[inline(always)]
fn init_allocation_extra<'b>(
_memory_extra: &Self::MemoryExtra,
_mem: &Memory<$mir, $tcx, Self>,
_id: AllocId,
alloc: Cow<'b, Allocation>,
_kind: Option<MemoryKind<Self::MemoryKind>>,
) -> (Cow<'b, Allocation<Self::PointerTag>>, Self::PointerTag) {
) -> Cow<'b, Allocation<Self::PointerTag>> {
// We do not use a tag so we can just cheaply forward the allocation
(alloc, ())
alloc
}
fn extern_static_base_pointer(
mem: &Memory<$mir, $tcx, Self>,
def_id: DefId,
) -> InterpResult<$tcx, Pointer> {
// Use the `AllocId` associated with the `DefId`. Any actual *access* will fail.
Ok(Pointer::new(mem.tcx.create_static_alloc(def_id), Size::ZERO))
}
#[inline(always)]
fn tag_global_base_pointer(
_memory_extra: &Self::MemoryExtra,
_id: AllocId,
) -> Self::PointerTag {
()
fn tag_alloc_base_pointer(
_mem: &Memory<$mir, $tcx, Self>,
ptr: Pointer<AllocId>,
) -> Pointer<AllocId> {
ptr
}
#[inline(always)]
fn ptr_from_addr(_mem: &Memory<$mir, $tcx, Self>, addr: u64) -> Pointer<Option<AllocId>> {
Pointer::new(None, Size::from_bytes(addr))
}
#[inline(always)]
fn ptr_get_alloc(_mem: &Memory<$mir, $tcx, Self>, ptr: Pointer<AllocId>) -> (AllocId, Size) {
// We know `offset` is relative to the allocation, so we can use `into_parts`.
let (alloc_id, offset) = ptr.into_parts();
(alloc_id, offset)
}
}

View File

@ -9,7 +9,7 @@
use std::assert_matches::assert_matches;
use std::borrow::Cow;
use std::collections::VecDeque;
use std::convert::{TryFrom, TryInto};
use std::convert::TryFrom;
use std::fmt;
use std::ptr;
@ -20,7 +20,8 @@ use rustc_target::abi::{Align, HasDataLayout, Size, TargetDataLayout};
use super::{
alloc_range, AllocId, AllocMap, AllocRange, Allocation, CheckInAllocMsg, GlobalAlloc,
InterpResult, Machine, MayLeak, Pointer, PointerArithmetic, Scalar, ScalarMaybeUninit,
InterpResult, Machine, MayLeak, Pointer, PointerArithmetic, Provenance, Scalar,
ScalarMaybeUninit,
};
use crate::util::pretty;
@ -163,25 +164,22 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
#[inline]
pub fn global_base_pointer(
&self,
mut ptr: Pointer,
ptr: Pointer<AllocId>,
) -> InterpResult<'tcx, Pointer<M::PointerTag>> {
// We know `offset` is relative to the allocation, so we can use `into_parts`.
let (alloc_id, offset) = ptr.into_parts();
// We need to handle `extern static`.
let ptr = match self.tcx.get_global_alloc(ptr.alloc_id) {
match self.tcx.get_global_alloc(alloc_id) {
Some(GlobalAlloc::Static(def_id)) if self.tcx.is_thread_local_static(def_id) => {
bug!("global memory cannot point to thread-local static")
}
Some(GlobalAlloc::Static(def_id)) if self.tcx.is_foreign_item(def_id) => {
ptr.alloc_id = M::extern_static_alloc_id(self, def_id)?;
ptr
return M::extern_static_base_pointer(self, def_id);
}
_ => {
// No need to change the `AllocId`.
ptr
_ => {}
}
};
// And we need to get the tag.
let tag = M::tag_global_base_pointer(&self.extra, ptr.alloc_id);
Ok(ptr.with_tag(tag))
Ok(M::tag_alloc_base_pointer(self, Pointer::new(alloc_id, offset)))
}
pub fn create_fn_alloc(
@ -235,21 +233,21 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
M::GLOBAL_KIND.map(MemoryKind::Machine),
"dynamically allocating global memory"
);
// This is a new allocation, not a new global one, so no `global_base_ptr`.
let (alloc, tag) = M::init_allocation_extra(&self.extra, id, Cow::Owned(alloc), Some(kind));
let alloc = M::init_allocation_extra(self, id, Cow::Owned(alloc), Some(kind));
self.alloc_map.insert(id, (kind, alloc.into_owned()));
Pointer::from(id).with_tag(tag)
M::tag_alloc_base_pointer(self, Pointer::from(id))
}
pub fn reallocate(
&mut self,
ptr: Pointer<M::PointerTag>,
ptr: Pointer<Option<M::PointerTag>>,
old_size_and_align: Option<(Size, Align)>,
new_size: Size,
new_align: Align,
kind: MemoryKind<M::MemoryKind>,
) -> InterpResult<'tcx, Pointer<M::PointerTag>> {
if ptr.offset.bytes() != 0 {
let (alloc_id, offset, ptr) = self.ptr_get_alloc(ptr)?;
if offset.bytes() != 0 {
throw_ub_format!(
"reallocating {:?} which does not point to the beginning of an object",
ptr
@ -261,7 +259,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
let new_ptr = self.allocate(new_size, new_align, kind)?;
let old_size = match old_size_and_align {
Some((size, _align)) => size,
None => self.get_raw(ptr.alloc_id)?.size(),
None => self.get_raw(alloc_id)?.size(),
};
// This will also call the access hooks.
self.copy(
@ -272,50 +270,51 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
old_size.min(new_size),
/*nonoverlapping*/ true,
)?;
self.deallocate(ptr, old_size_and_align, kind)?;
self.deallocate(ptr.into(), old_size_and_align, kind)?;
Ok(new_ptr)
}
pub fn deallocate(
&mut self,
ptr: Pointer<M::PointerTag>,
ptr: Pointer<Option<M::PointerTag>>,
old_size_and_align: Option<(Size, Align)>,
kind: MemoryKind<M::MemoryKind>,
) -> InterpResult<'tcx> {
trace!("deallocating: {}", ptr.alloc_id);
let (alloc_id, offset, ptr) = self.ptr_get_alloc(ptr)?;
trace!("deallocating: {}", alloc_id);
if ptr.offset.bytes() != 0 {
if offset.bytes() != 0 {
throw_ub_format!(
"deallocating {:?} which does not point to the beginning of an object",
ptr
);
}
let (alloc_kind, mut alloc) = match self.alloc_map.remove(&ptr.alloc_id) {
let (alloc_kind, mut alloc) = match self.alloc_map.remove(&alloc_id) {
Some(alloc) => alloc,
None => {
// Deallocating global memory -- always an error
return Err(match self.tcx.get_global_alloc(ptr.alloc_id) {
return Err(match self.tcx.get_global_alloc(alloc_id) {
Some(GlobalAlloc::Function(..)) => {
err_ub_format!("deallocating {}, which is a function", ptr.alloc_id)
err_ub_format!("deallocating {}, which is a function", alloc_id)
}
Some(GlobalAlloc::Static(..) | GlobalAlloc::Memory(..)) => {
err_ub_format!("deallocating {}, which is static memory", ptr.alloc_id)
err_ub_format!("deallocating {}, which is static memory", alloc_id)
}
None => err_ub!(PointerUseAfterFree(ptr.alloc_id)),
None => err_ub!(PointerUseAfterFree(alloc_id)),
}
.into());
}
};
if alloc.mutability == Mutability::Not {
throw_ub_format!("deallocating immutable allocation {}", ptr.alloc_id);
throw_ub_format!("deallocating immutable allocation {}", alloc_id);
}
if alloc_kind != kind {
throw_ub_format!(
"deallocating {}, which is {} memory, using {} deallocation operation",
ptr.alloc_id,
alloc_id,
alloc_kind,
kind
);
@ -324,7 +323,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
if size != alloc.size() || align != alloc.align {
throw_ub_format!(
"incorrect layout on deallocation: {} has size {} and alignment {}, but gave size {} and alignment {}",
ptr.alloc_id,
alloc_id,
alloc.size().bytes(),
alloc.align.bytes(),
size.bytes(),
@ -335,10 +334,15 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
// Let the machine take some extra action
let size = alloc.size();
M::memory_deallocated(&mut self.extra, &mut alloc.extra, ptr, size)?;
M::memory_deallocated(
&mut self.extra,
&mut alloc.extra,
ptr.provenance,
alloc_range(Size::ZERO, size),
)?;
// Don't forget to remember size and align of this now-dead allocation
let old = self.dead_alloc_map.insert(ptr.alloc_id, (size, alloc.align));
let old = self.dead_alloc_map.insert(alloc_id, (size, alloc.align));
if old.is_some() {
bug!("Nothing can be deallocated twice");
}
@ -346,52 +350,69 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
Ok(())
}
/// Internal helper function for APIs that offer memory access based on `Scalar` pointers.
/// Internal helper function to determine the allocation and offset of a pointer (if any).
#[inline(always)]
pub(super) fn check_ptr_access(
fn get_ptr_access(
&self,
sptr: Scalar<M::PointerTag>,
ptr: Pointer<Option<M::PointerTag>>,
size: Size,
align: Align,
) -> InterpResult<'tcx, Option<Pointer<M::PointerTag>>> {
) -> InterpResult<'tcx, Option<(AllocId, Size, Pointer<M::PointerTag>)>> {
let align = M::enforce_alignment(&self.extra).then_some(align);
self.check_and_deref_ptr(sptr, size, align, CheckInAllocMsg::MemoryAccessTest, |ptr| {
self.check_and_deref_ptr(
ptr,
size,
align,
CheckInAllocMsg::MemoryAccessTest,
|alloc_id, offset, ptr| {
let (size, align) =
self.get_size_and_align(ptr.alloc_id, AllocCheck::Dereferenceable)?;
Ok((size, align, ptr))
})
self.get_size_and_align(alloc_id, AllocCheck::Dereferenceable)?;
Ok((size, align, (alloc_id, offset, ptr)))
},
)
}
/// Check if the given scalar is allowed to do a memory access of given `size` and `align`
/// Check if the given pointerpoints to live memory of given `size` and `align`
/// (ignoring `M::enforce_alignment`). The caller can control the error message for the
/// out-of-bounds case.
#[inline(always)]
pub fn check_ptr_access_align(
&self,
sptr: Scalar<M::PointerTag>,
ptr: Pointer<Option<M::PointerTag>>,
size: Size,
align: Align,
msg: CheckInAllocMsg,
) -> InterpResult<'tcx> {
self.check_and_deref_ptr(sptr, size, Some(align), msg, |ptr| {
let (size, align) =
self.get_size_and_align(ptr.alloc_id, AllocCheck::Dereferenceable)?;
self.check_and_deref_ptr(ptr, size, Some(align), msg, |alloc_id, _, _| {
let check = match msg {
CheckInAllocMsg::DerefTest | CheckInAllocMsg::MemoryAccessTest => {
AllocCheck::Dereferenceable
}
CheckInAllocMsg::PointerArithmeticTest | CheckInAllocMsg::InboundsTest => {
AllocCheck::Live
}
};
let (size, align) = self.get_size_and_align(alloc_id, check)?;
Ok((size, align, ()))
})?;
Ok(())
}
/// Low-level helper function to check if a ptr is in-bounds and potentially return a reference
/// to the allocation it points to. Supports both shared and mutable references, to the actual
/// to the allocation it points to. Supports both shared and mutable references, as the actual
/// checking is offloaded to a helper closure. `align` defines whether and which alignment check
/// is done. Returns `None` for size 0, and otherwise `Some` of what `alloc_size` returned.
fn check_and_deref_ptr<T>(
&self,
sptr: Scalar<M::PointerTag>,
ptr: Pointer<Option<M::PointerTag>>,
size: Size,
align: Option<Align>,
msg: CheckInAllocMsg,
alloc_size: impl FnOnce(Pointer<M::PointerTag>) -> InterpResult<'tcx, (Size, Align, T)>,
alloc_size: impl FnOnce(
AllocId,
Size,
Pointer<M::PointerTag>,
) -> InterpResult<'tcx, (Size, Align, T)>,
) -> InterpResult<'tcx, Option<T>> {
fn check_offset_align(offset: u64, align: Align) -> InterpResult<'static> {
if offset % align.bytes() == 0 {
@ -406,53 +427,50 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
}
}
// Normalize to a `Pointer` if we definitely need one.
let normalized = if size.bytes() == 0 {
// Can be an integer, just take what we got. We do NOT `force_bits` here;
// if this is already a `Pointer` we want to do the bounds checks!
sptr
// Extract from the pointer an `Option<AllocId>` and an offset, which is relative to the
// allocation or (if that is `None`) an absolute address.
let ptr_or_addr = if size.bytes() == 0 {
// Let's see what we can do, but don't throw errors if there's nothing there.
self.ptr_try_get_alloc(ptr)
} else {
// A "real" access, we must get a pointer to be able to check the bounds.
Scalar::from(self.force_ptr(sptr)?)
// A "real" access, we insist on getting an `AllocId`.
Ok(self.ptr_get_alloc(ptr)?)
};
Ok(match normalized.to_bits_or_ptr(self.pointer_size(), self) {
Ok(bits) => {
let bits = u64::try_from(bits).unwrap(); // it's ptr-sized
assert!(size.bytes() == 0);
Ok(match ptr_or_addr {
Err(addr) => {
// No memory is actually being accessed.
debug_assert!(size.bytes() == 0);
// Must be non-null.
if bits == 0 {
if addr == 0 {
throw_ub!(DanglingIntPointer(0, msg))
}
// Must be aligned.
if let Some(align) = align {
check_offset_align(bits, align)?;
check_offset_align(addr, align)?;
}
None
}
Err(ptr) => {
let (allocation_size, alloc_align, ret_val) = alloc_size(ptr)?;
Ok((alloc_id, offset, ptr)) => {
let (allocation_size, alloc_align, ret_val) = alloc_size(alloc_id, offset, ptr)?;
// Test bounds. This also ensures non-null.
// It is sufficient to check this for the end pointer. The addition
// checks for overflow.
let end_ptr = ptr.offset(size, self)?;
if end_ptr.offset > allocation_size {
// equal is okay!
throw_ub!(PointerOutOfBounds { ptr: end_ptr.erase_tag(), msg, allocation_size })
// It is sufficient to check this for the end pointer. Also check for overflow!
if offset.checked_add(size, &self.tcx).map_or(true, |end| end > allocation_size) {
throw_ub!(PointerOutOfBounds { alloc_id, offset, size, allocation_size, msg })
}
// Test align. Check this last; if both bounds and alignment are violated
// we want the error to be about the bounds.
if let Some(align) = align {
if M::force_int_for_alignment_check(&self.extra) {
let bits = self
.force_bits(ptr.into(), self.pointer_size())
let addr = Scalar::from_pointer(ptr, &self.tcx)
.to_machine_usize(&self.tcx)
.expect("ptr-to-int cast for align check should never fail");
check_offset_align(bits.try_into().unwrap(), align)?;
check_offset_align(addr, align)?;
} else {
// Check allocation alignment and offset alignment.
if alloc_align.bytes() < align.bytes() {
throw_ub!(AlignmentCheckFailed { has: alloc_align, required: align });
}
check_offset_align(ptr.offset.bytes(), align)?;
check_offset_align(offset.bytes(), align)?;
}
}
@ -464,13 +482,18 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
}
/// Test if the pointer might be null.
pub fn ptr_may_be_null(&self, ptr: Pointer<M::PointerTag>) -> bool {
pub fn ptr_may_be_null(&self, ptr: Pointer<Option<M::PointerTag>>) -> bool {
match self.ptr_try_get_alloc(ptr) {
Ok((alloc_id, offset, _)) => {
let (size, _align) = self
.get_size_and_align(ptr.alloc_id, AllocCheck::MaybeDead)
.get_size_and_align(alloc_id, AllocCheck::MaybeDead)
.expect("alloc info with MaybeDead cannot fail");
// If the pointer is out-of-bounds, it may be null.
// Note that one-past-the-end (offset == size) is still inbounds, and never null.
ptr.offset > size
offset > size
}
Err(offset) => offset == 0,
}
}
}
@ -482,12 +505,11 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
/// this machine use the same pointer tag, so it is indirected through
/// `M::tag_allocation`.
fn get_global_alloc(
memory_extra: &M::MemoryExtra,
tcx: TyCtxt<'tcx>,
&self,
id: AllocId,
is_write: bool,
) -> InterpResult<'tcx, Cow<'tcx, Allocation<M::PointerTag, M::AllocExtra>>> {
let (alloc, def_id) = match tcx.get_global_alloc(id) {
let (alloc, def_id) = match self.tcx.get_global_alloc(id) {
Some(GlobalAlloc::Memory(mem)) => {
// Memory of a constant or promoted or anonymous memory referenced by a static.
(mem, None)
@ -495,8 +517,8 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
Some(GlobalAlloc::Function(..)) => throw_ub!(DerefFunctionPointer(id)),
None => throw_ub!(PointerUseAfterFree(id)),
Some(GlobalAlloc::Static(def_id)) => {
assert!(tcx.is_static(def_id));
assert!(!tcx.is_thread_local_static(def_id));
assert!(self.tcx.is_static(def_id));
assert!(!self.tcx.is_thread_local_static(def_id));
// Notice that every static has two `AllocId` that will resolve to the same
// thing here: one maps to `GlobalAlloc::Static`, this is the "lazy" ID,
// and the other one is maps to `GlobalAlloc::Memory`, this is returned by
@ -507,24 +529,22 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
// The `GlobalAlloc::Memory` branch here is still reachable though; when a static
// contains a reference to memory that was created during its evaluation (i.e., not
// to another static), those inner references only exist in "resolved" form.
if tcx.is_foreign_item(def_id) {
if self.tcx.is_foreign_item(def_id) {
throw_unsup!(ReadExternStatic(def_id));
}
(tcx.eval_static_initializer(def_id)?, Some(def_id))
(self.tcx.eval_static_initializer(def_id)?, Some(def_id))
}
};
M::before_access_global(memory_extra, id, alloc, def_id, is_write)?;
M::before_access_global(&self.extra, id, alloc, def_id, is_write)?;
let alloc = Cow::Borrowed(alloc);
// We got tcx memory. Let the machine initialize its "extra" stuff.
let (alloc, tag) = M::init_allocation_extra(
memory_extra,
let alloc = M::init_allocation_extra(
self,
id, // always use the ID we got as input, not the "hidden" one.
alloc,
M::GLOBAL_KIND.map(MemoryKind::Machine),
);
// Sanity check that this is the same pointer we would have gotten via `global_base_pointer`.
debug_assert_eq!(tag, M::tag_global_base_pointer(memory_extra, id));
Ok(alloc)
}
@ -539,8 +559,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
// `get_global_alloc` that we can actually use directly without inserting anything anywhere.
// So the error type is `InterpResult<'tcx, &Allocation<M::PointerTag>>`.
let a = self.alloc_map.get_or(id, || {
let alloc = Self::get_global_alloc(&self.extra, self.tcx, id, /*is_write*/ false)
.map_err(Err)?;
let alloc = self.get_global_alloc(id, /*is_write*/ false).map_err(Err)?;
match alloc {
Cow::Borrowed(alloc) => {
// We got a ref, cheaply return that as an "error" so that the
@ -567,30 +586,30 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
/// "Safe" (bounds and align-checked) allocation access.
pub fn get<'a>(
&'a self,
sptr: Scalar<M::PointerTag>,
ptr: Pointer<Option<M::PointerTag>>,
size: Size,
align: Align,
) -> InterpResult<'tcx, Option<AllocRef<'a, 'tcx, M::PointerTag, M::AllocExtra>>> {
let align = M::enforce_alignment(&self.extra).then_some(align);
let ptr_and_alloc = self.check_and_deref_ptr(
sptr,
ptr,
size,
align,
CheckInAllocMsg::MemoryAccessTest,
|ptr| {
let alloc = self.get_raw(ptr.alloc_id)?;
Ok((alloc.size(), alloc.align, (ptr, alloc)))
|alloc_id, offset, ptr| {
let alloc = self.get_raw(alloc_id)?;
Ok((alloc.size(), alloc.align, (alloc_id, offset, ptr, alloc)))
},
)?;
if let Some((ptr, alloc)) = ptr_and_alloc {
M::memory_read(&self.extra, &alloc.extra, ptr, size)?;
let range = alloc_range(ptr.offset, size);
Ok(Some(AllocRef { alloc, range, tcx: self.tcx, alloc_id: ptr.alloc_id }))
if let Some((alloc_id, offset, ptr, alloc)) = ptr_and_alloc {
let range = alloc_range(offset, size);
M::memory_read(&self.extra, &alloc.extra, ptr.provenance, range)?;
Ok(Some(AllocRef { alloc, range, tcx: self.tcx, alloc_id }))
} else {
// Even in this branch we have to be sure that we actually access the allocation, in
// order to ensure that `static FOO: Type = FOO;` causes a cycle error instead of
// magically pulling *any* ZST value from the ether. However, the `get_raw` above is
// always called when `sptr` is truly a `Pointer`, so we are good.
// always called when `ptr` has an `AllocId`.
Ok(None)
}
}
@ -610,48 +629,44 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
id: AllocId,
) -> InterpResult<'tcx, (&mut Allocation<M::PointerTag, M::AllocExtra>, &mut M::MemoryExtra)>
{
let tcx = self.tcx;
let memory_extra = &mut self.extra;
let a = self.alloc_map.get_mut_or(id, || {
// Need to make a copy, even if `get_global_alloc` is able
// to give us a cheap reference.
let alloc = Self::get_global_alloc(memory_extra, tcx, id, /*is_write*/ true)?;
// We have "NLL problem case #3" here, which cannot be worked around without loss of
// efficiency even for the common case where the key is in the map.
// <https://rust-lang.github.io/rfcs/2094-nll.html#problem-case-3-conditional-control-flow-across-functions>
// (Cannot use `get_mut_or` since `get_global_alloc` needs `&self`.)
if self.alloc_map.get_mut(id).is_none() {
// Slow path.
// Allocation not found locally, go look global.
let alloc = self.get_global_alloc(id, /*is_write*/ true)?;
let kind = M::GLOBAL_KIND.expect(
"I got a global allocation that I have to copy but the machine does \
not expect that to happen",
);
Ok((MemoryKind::Machine(kind), alloc.into_owned()))
});
// Unpack the error type manually because type inference doesn't
// work otherwise (and we cannot help it because `impl Trait`)
match a {
Err(e) => Err(e),
Ok(a) => {
let a = &mut a.1;
if a.mutability == Mutability::Not {
self.alloc_map.insert(id, (MemoryKind::Machine(kind), alloc.into_owned()));
}
let (_kind, alloc) = self.alloc_map.get_mut(id).unwrap();
if alloc.mutability == Mutability::Not {
throw_ub!(WriteToReadOnly(id))
}
Ok((a, memory_extra))
}
}
Ok((alloc, &mut self.extra))
}
/// "Safe" (bounds and align-checked) allocation access.
pub fn get_mut<'a>(
&'a mut self,
sptr: Scalar<M::PointerTag>,
ptr: Pointer<Option<M::PointerTag>>,
size: Size,
align: Align,
) -> InterpResult<'tcx, Option<AllocRefMut<'a, 'tcx, M::PointerTag, M::AllocExtra>>> {
let ptr = self.check_ptr_access(sptr, size, align)?;
if let Some(ptr) = ptr {
let parts = self.get_ptr_access(ptr, size, align)?;
if let Some((alloc_id, offset, ptr)) = parts {
let tcx = self.tcx;
// FIXME: can we somehow avoid looking up the allocation twice here?
// We cannot call `get_raw_mut` inside `check_and_deref_ptr` as that would duplicate `&mut self`.
let (alloc, extra) = self.get_raw_mut(ptr.alloc_id)?;
M::memory_written(extra, &mut alloc.extra, ptr, size)?;
let range = alloc_range(ptr.offset, size);
Ok(Some(AllocRefMut { alloc, range, tcx, alloc_id: ptr.alloc_id }))
let (alloc, extra) = self.get_raw_mut(alloc_id)?;
let range = alloc_range(offset, size);
M::memory_written(extra, &mut alloc.extra, ptr.provenance, range)?;
Ok(Some(AllocRefMut { alloc, range, tcx, alloc_id }))
} else {
Ok(None)
}
@ -728,7 +743,6 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
}
fn get_fn_alloc(&self, id: AllocId) -> Option<FnVal<'tcx, M::ExtraFnVal>> {
trace!("reading fn ptr: {}", id);
if let Some(extra) = self.extra_fn_ptr_map.get(&id) {
Some(FnVal::Other(*extra))
} else {
@ -741,14 +755,15 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
pub fn get_fn(
&self,
ptr: Scalar<M::PointerTag>,
ptr: Pointer<Option<M::PointerTag>>,
) -> InterpResult<'tcx, FnVal<'tcx, M::ExtraFnVal>> {
let ptr = self.force_ptr(ptr)?; // We definitely need a pointer value.
if ptr.offset.bytes() != 0 {
throw_ub!(InvalidFunctionPointer(ptr.erase_tag()))
trace!("get_fn({:?})", ptr);
let (alloc_id, offset, _ptr) = self.ptr_get_alloc(ptr)?;
if offset.bytes() != 0 {
throw_ub!(InvalidFunctionPointer(Pointer::new(alloc_id, offset)))
}
self.get_fn_alloc(ptr.alloc_id)
.ok_or_else(|| err_ub!(InvalidFunctionPointer(ptr.erase_tag())).into())
self.get_fn_alloc(alloc_id)
.ok_or_else(|| err_ub!(InvalidFunctionPointer(Pointer::new(alloc_id, offset))).into())
}
pub fn mark_immutable(&mut self, id: AllocId) -> InterpResult<'tcx> {
@ -787,7 +802,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
if reachable.insert(id) {
// This is a new allocation, add its relocations to `todo`.
if let Some((_, alloc)) = self.alloc_map.get(id) {
todo.extend(alloc.relocations().values().map(|&(_, target_id)| target_id));
todo.extend(alloc.relocations().values().map(|tag| tag.get_alloc_id()));
}
}
}
@ -821,14 +836,14 @@ pub struct DumpAllocs<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> {
impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> std::fmt::Debug for DumpAllocs<'a, 'mir, 'tcx, M> {
fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
// Cannot be a closure because it is generic in `Tag`, `Extra`.
fn write_allocation_track_relocs<'tcx, Tag: Copy + fmt::Debug, Extra>(
fn write_allocation_track_relocs<'tcx, Tag: Provenance, Extra>(
fmt: &mut std::fmt::Formatter<'_>,
tcx: TyCtxt<'tcx>,
allocs_to_print: &mut VecDeque<AllocId>,
alloc: &Allocation<Tag, Extra>,
) -> std::fmt::Result {
for &(_, target_id) in alloc.relocations().values() {
allocs_to_print.push_back(target_id);
for alloc_id in alloc.relocations().values().map(|tag| tag.get_alloc_id()) {
allocs_to_print.push_back(alloc_id);
}
write!(fmt, "{}", pretty::display_allocation(tcx, alloc))
}
@ -931,8 +946,12 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
/// Reads the given number of bytes from memory. Returns them as a slice.
///
/// Performs appropriate bounds checks.
pub fn read_bytes(&self, sptr: Scalar<M::PointerTag>, size: Size) -> InterpResult<'tcx, &[u8]> {
let alloc_ref = match self.get(sptr, size, Align::ONE)? {
pub fn read_bytes(
&self,
ptr: Pointer<Option<M::PointerTag>>,
size: Size,
) -> InterpResult<'tcx, &[u8]> {
let alloc_ref = match self.get(ptr, size, Align::ONE)? {
Some(a) => a,
None => return Ok(&[]), // zero-sized access
};
@ -949,7 +968,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
/// Performs appropriate bounds checks.
pub fn write_bytes(
&mut self,
sptr: Scalar<M::PointerTag>,
ptr: Pointer<Option<M::PointerTag>>,
src: impl IntoIterator<Item = u8>,
) -> InterpResult<'tcx> {
let mut src = src.into_iter();
@ -958,7 +977,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
assert_eq!(lower, len, "can only write iterators with a precise length");
let size = Size::from_bytes(len);
let alloc_ref = match self.get_mut(sptr, size, Align::ONE)? {
let alloc_ref = match self.get_mut(ptr, size, Align::ONE)? {
Some(alloc_ref) => alloc_ref,
None => {
// zero-sized access
@ -985,9 +1004,9 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
pub fn copy(
&mut self,
src: Scalar<M::PointerTag>,
src: Pointer<Option<M::PointerTag>>,
src_align: Align,
dest: Scalar<M::PointerTag>,
dest: Pointer<Option<M::PointerTag>>,
dest_align: Align,
size: Size,
nonoverlapping: bool,
@ -997,9 +1016,9 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
pub fn copy_repeatedly(
&mut self,
src: Scalar<M::PointerTag>,
src: Pointer<Option<M::PointerTag>>,
src_align: Align,
dest: Scalar<M::PointerTag>,
dest: Pointer<Option<M::PointerTag>>,
dest_align: Align,
size: Size,
num_copies: u64,
@ -1007,22 +1026,23 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
) -> InterpResult<'tcx> {
let tcx = self.tcx;
// We need to do our own bounds-checks.
let src = self.check_ptr_access(src, size, src_align)?;
let dest = self.check_ptr_access(dest, size * num_copies, dest_align)?; // `Size` multiplication
let src_parts = self.get_ptr_access(src, size, src_align)?;
let dest_parts = self.get_ptr_access(dest, size * num_copies, dest_align)?; // `Size` multiplication
// FIXME: we look up both allocations twice here, once ebfore for the `check_ptr_access`
// and once below to get the underlying `&[mut] Allocation`.
// Source alloc preparations and access hooks.
let src = match src {
let (src_alloc_id, src_offset, src) = match src_parts {
None => return Ok(()), // Zero-sized *source*, that means dst is also zero-sized and we have nothing to do.
Some(src_ptr) => src_ptr,
};
let src_alloc = self.get_raw(src.alloc_id)?;
M::memory_read(&self.extra, &src_alloc.extra, src, size)?;
let src_alloc = self.get_raw(src_alloc_id)?;
let src_range = alloc_range(src_offset, size);
M::memory_read(&self.extra, &src_alloc.extra, src.provenance, src_range)?;
// We need the `dest` ptr for the next operation, so we get it now.
// We already did the source checks and called the hooks so we are good to return early.
let dest = match dest {
let (dest_alloc_id, dest_offset, dest) = match dest_parts {
None => return Ok(()), // Zero-sized *destiantion*.
Some(dest_ptr) => dest_ptr,
};
@ -1032,26 +1052,21 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
// since we don't want to keep any relocations at the target.
// (`get_bytes_with_uninit_and_ptr` below checks that there are no
// relocations overlapping the edges; those would not be handled correctly).
let relocations = src_alloc.prepare_relocation_copy(
self,
alloc_range(src.offset, size),
dest.offset,
num_copies,
);
let relocations =
src_alloc.prepare_relocation_copy(self, src_range, dest_offset, num_copies);
// Prepare a copy of the initialization mask.
let compressed = src_alloc.compress_uninit_range(alloc_range(src.offset, size));
let compressed = src_alloc.compress_uninit_range(src_range);
// This checks relocation edges on the src.
let src_bytes = src_alloc
.get_bytes_with_uninit_and_ptr(&tcx, alloc_range(src.offset, size))
.map_err(|e| e.to_interp_error(src.alloc_id))?
.get_bytes_with_uninit_and_ptr(&tcx, src_range)
.map_err(|e| e.to_interp_error(src_alloc_id))?
.as_ptr(); // raw ptr, so we can also get a ptr to the destination allocation
// Destination alloc preparations and access hooks.
let (dest_alloc, extra) = self.get_raw_mut(dest.alloc_id)?;
M::memory_written(extra, &mut dest_alloc.extra, dest, size * num_copies)?;
let dest_bytes = dest_alloc
.get_bytes_mut_ptr(&tcx, alloc_range(dest.offset, size * num_copies))
.as_mut_ptr();
let (dest_alloc, extra) = self.get_raw_mut(dest_alloc_id)?;
let dest_range = alloc_range(dest_offset, size * num_copies);
M::memory_written(extra, &mut dest_alloc.extra, dest.provenance, dest_range)?;
let dest_bytes = dest_alloc.get_bytes_mut_ptr(&tcx, dest_range).as_mut_ptr();
if compressed.no_bytes_init() {
// Fast path: If all bytes are `uninit` then there is nothing to copy. The target range
@ -1060,7 +1075,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
// This also avoids writing to the target bytes so that the backing allocation is never
// touched if the bytes stay uninitialized for the whole interpreter execution. On contemporary
// operating system this can avoid physically allocating the page.
dest_alloc.mark_init(alloc_range(dest.offset, size * num_copies), false); // `Size` multiplication
dest_alloc.mark_init(dest_range, false); // `Size` multiplication
dest_alloc.mark_relocation_range(relocations);
return Ok(());
}
@ -1071,11 +1086,11 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
// The pointers above remain valid even if the `HashMap` table is moved around because they
// point into the `Vec` storing the bytes.
unsafe {
if src.alloc_id == dest.alloc_id {
if src_alloc_id == dest_alloc_id {
if nonoverlapping {
// `Size` additions
if (src.offset <= dest.offset && src.offset + size > dest.offset)
|| (dest.offset <= src.offset && dest.offset + size > src.offset)
if (src_offset <= dest_offset && src_offset + size > dest_offset)
|| (dest_offset <= src_offset && dest_offset + size > src_offset)
{
throw_ub_format!("copy_nonoverlapping called on overlapping ranges")
}
@ -1102,7 +1117,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
// now fill in all the "init" data
dest_alloc.mark_compressed_init_range(
&compressed,
alloc_range(dest.offset, size),
alloc_range(dest_offset, size), // just a single copy (i.e., not full `dest_range`)
num_copies,
);
// copy the relocations to the destination
@ -1114,24 +1129,41 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
/// Machine pointer introspection.
impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
pub fn force_ptr(
&self,
scalar: Scalar<M::PointerTag>,
) -> InterpResult<'tcx, Pointer<M::PointerTag>> {
match scalar {
Scalar::Ptr(ptr) => Ok(ptr),
_ => M::int_to_ptr(&self, scalar.to_machine_usize(self)?),
pub fn scalar_to_ptr(&self, scalar: Scalar<M::PointerTag>) -> Pointer<Option<M::PointerTag>> {
// We use `to_bits_or_ptr_internal` since we are just implementing the method people need to
// call to force getting out a pointer.
match scalar.to_bits_or_ptr_internal(self.pointer_size()) {
Err(ptr) => ptr.into(),
Ok(bits) => {
let addr = u64::try_from(bits).unwrap();
M::ptr_from_addr(&self, addr)
}
}
}
pub fn force_bits(
/// Turning a "maybe pointer" into a proper pointer (and some information
/// about where it points), or an absolute address.
pub fn ptr_try_get_alloc(
&self,
scalar: Scalar<M::PointerTag>,
size: Size,
) -> InterpResult<'tcx, u128> {
match scalar.to_bits_or_ptr(size, self) {
Ok(bits) => Ok(bits),
Err(ptr) => Ok(M::ptr_to_int(&self, ptr)?.into()),
ptr: Pointer<Option<M::PointerTag>>,
) -> Result<(AllocId, Size, Pointer<M::PointerTag>), u64> {
match ptr.into_pointer_or_addr() {
Ok(ptr) => {
let (alloc_id, offset) = M::ptr_get_alloc(self, ptr);
Ok((alloc_id, offset, ptr))
}
Err(addr) => Err(addr.bytes()),
}
}
/// Turning a "maybe pointer" into a proper pointer (and some information about where it points).
#[inline(always)]
pub fn ptr_get_alloc(
&self,
ptr: Pointer<Option<M::PointerTag>>,
) -> InterpResult<'tcx, (AllocId, Size, Pointer<M::PointerTag>)> {
self.ptr_try_get_alloc(ptr).map_err(|offset| {
err_ub!(DanglingIntPointer(offset, CheckInAllocMsg::InboundsTest)).into()
})
}
}

View File

@ -15,8 +15,9 @@ use rustc_target::abi::{Abi, HasDataLayout, LayoutOf, Size, TagEncoding};
use rustc_target::abi::{VariantIdx, Variants};
use super::{
alloc_range, from_known_layout, mir_assign_valid_types, ConstValue, GlobalId, InterpCx,
InterpResult, MPlaceTy, Machine, MemPlace, Place, PlaceTy, Pointer, Scalar, ScalarMaybeUninit,
alloc_range, from_known_layout, mir_assign_valid_types, AllocId, ConstValue, GlobalId,
InterpCx, InterpResult, MPlaceTy, Machine, MemPlace, Place, PlaceTy, Pointer, Provenance,
Scalar, ScalarMaybeUninit,
};
/// An `Immediate` represents a single immediate self-contained Rust value.
@ -26,8 +27,8 @@ use super::{
/// operations and wide pointers. This idea was taken from rustc's codegen.
/// In particular, thanks to `ScalarPair`, arithmetic operations and casts can be entirely
/// defined on `Immediate`, and do not have to work with a `Place`.
#[derive(Copy, Clone, Debug, PartialEq, Eq, HashStable, Hash)]
pub enum Immediate<Tag = ()> {
#[derive(Copy, Clone, PartialEq, Eq, HashStable, Hash, Debug)]
pub enum Immediate<Tag: Provenance = AllocId> {
Scalar(ScalarMaybeUninit<Tag>),
ScalarPair(ScalarMaybeUninit<Tag>, ScalarMaybeUninit<Tag>),
}
@ -35,34 +36,35 @@ pub enum Immediate<Tag = ()> {
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
rustc_data_structures::static_assert_size!(Immediate, 56);
impl<Tag> From<ScalarMaybeUninit<Tag>> for Immediate<Tag> {
impl<Tag: Provenance> From<ScalarMaybeUninit<Tag>> for Immediate<Tag> {
#[inline(always)]
fn from(val: ScalarMaybeUninit<Tag>) -> Self {
Immediate::Scalar(val)
}
}
impl<Tag> From<Scalar<Tag>> for Immediate<Tag> {
impl<Tag: Provenance> From<Scalar<Tag>> for Immediate<Tag> {
#[inline(always)]
fn from(val: Scalar<Tag>) -> Self {
Immediate::Scalar(val.into())
}
}
impl<Tag> From<Pointer<Tag>> for Immediate<Tag> {
#[inline(always)]
fn from(val: Pointer<Tag>) -> Self {
Immediate::Scalar(Scalar::from(val).into())
}
impl<'tcx, Tag: Provenance> Immediate<Tag> {
pub fn from_pointer(p: Pointer<Tag>, cx: &impl HasDataLayout) -> Self {
Immediate::Scalar(ScalarMaybeUninit::from_pointer(p, cx))
}
pub fn from_maybe_pointer(p: Pointer<Option<Tag>>, cx: &impl HasDataLayout) -> Self {
Immediate::Scalar(ScalarMaybeUninit::from_maybe_pointer(p, cx))
}
impl<'tcx, Tag> Immediate<Tag> {
pub fn new_slice(val: Scalar<Tag>, len: u64, cx: &impl HasDataLayout) -> Self {
Immediate::ScalarPair(val.into(), Scalar::from_machine_usize(len, cx).into())
}
pub fn new_dyn_trait(val: Scalar<Tag>, vtable: Pointer<Tag>) -> Self {
Immediate::ScalarPair(val.into(), vtable.into())
pub fn new_dyn_trait(val: Scalar<Tag>, vtable: Pointer<Tag>, cx: &impl HasDataLayout) -> Self {
Immediate::ScalarPair(val.into(), ScalarMaybeUninit::from_pointer(vtable, cx))
}
#[inline]
@ -82,7 +84,7 @@ impl<'tcx, Tag> Immediate<Tag> {
// ScalarPair needs a type to interpret, so we often have an immediate and a type together
// as input for binary and cast operations.
#[derive(Copy, Clone, Debug)]
pub struct ImmTy<'tcx, Tag = ()> {
pub struct ImmTy<'tcx, Tag: Provenance = AllocId> {
imm: Immediate<Tag>,
pub layout: TyAndLayout<'tcx>,
}
@ -90,17 +92,23 @@ pub struct ImmTy<'tcx, Tag = ()> {
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
rustc_data_structures::static_assert_size!(ImmTy<'_>, 72);
impl<Tag: Copy> std::fmt::Display for ImmTy<'tcx, Tag> {
impl<Tag: Provenance> std::fmt::Display for ImmTy<'tcx, Tag> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
/// Helper function for printing a scalar to a FmtPrinter
fn p<'a, 'tcx, F: std::fmt::Write, Tag>(
fn p<'a, 'tcx, F: std::fmt::Write, Tag: Provenance>(
cx: FmtPrinter<'a, 'tcx, F>,
s: ScalarMaybeUninit<Tag>,
ty: Ty<'tcx>,
) -> Result<FmtPrinter<'a, 'tcx, F>, std::fmt::Error> {
match s {
ScalarMaybeUninit::Scalar(s) => {
cx.pretty_print_const_scalar(s.erase_tag(), ty, true)
ScalarMaybeUninit::Scalar(Scalar::Int(int)) => {
cx.pretty_print_const_scalar_int(int, ty, true)
}
ScalarMaybeUninit::Scalar(Scalar::Ptr(ptr, _sz)) => {
// Just print the ptr value. `pretty_print_const_scalar_ptr` would also try to
// print what is points to, which would fail since it has no access to the local
// memory.
cx.pretty_print_const_pointer(ptr, ty, true)
}
ScalarMaybeUninit::Uninit => cx.typed_value(
|mut this| {
@ -120,18 +128,18 @@ impl<Tag: Copy> std::fmt::Display for ImmTy<'tcx, Tag> {
p(cx, s, ty)?;
return Ok(());
}
write!(f, "{}: {}", s.erase_tag(), self.layout.ty)
write!(f, "{}: {}", s, self.layout.ty)
}
Immediate::ScalarPair(a, b) => {
// FIXME(oli-obk): at least print tuples and slices nicely
write!(f, "({}, {}): {}", a.erase_tag(), b.erase_tag(), self.layout.ty,)
write!(f, "({}, {}): {}", a, b, self.layout.ty,)
}
}
})
}
}
impl<'tcx, Tag> std::ops::Deref for ImmTy<'tcx, Tag> {
impl<'tcx, Tag: Provenance> std::ops::Deref for ImmTy<'tcx, Tag> {
type Target = Immediate<Tag>;
#[inline(always)]
fn deref(&self) -> &Immediate<Tag> {
@ -142,22 +150,22 @@ impl<'tcx, Tag> std::ops::Deref for ImmTy<'tcx, Tag> {
/// An `Operand` is the result of computing a `mir::Operand`. It can be immediate,
/// or still in memory. The latter is an optimization, to delay reading that chunk of
/// memory and to avoid having to store arbitrary-sized data here.
#[derive(Copy, Clone, Debug, PartialEq, Eq, HashStable, Hash)]
pub enum Operand<Tag = ()> {
#[derive(Copy, Clone, PartialEq, Eq, HashStable, Hash, Debug)]
pub enum Operand<Tag: Provenance = AllocId> {
Immediate(Immediate<Tag>),
Indirect(MemPlace<Tag>),
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub struct OpTy<'tcx, Tag = ()> {
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
pub struct OpTy<'tcx, Tag: Provenance = AllocId> {
op: Operand<Tag>, // Keep this private; it helps enforce invariants.
pub layout: TyAndLayout<'tcx>,
}
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
rustc_data_structures::static_assert_size!(OpTy<'_, ()>, 80);
rustc_data_structures::static_assert_size!(OpTy<'_>, 80);
impl<'tcx, Tag> std::ops::Deref for OpTy<'tcx, Tag> {
impl<'tcx, Tag: Provenance> std::ops::Deref for OpTy<'tcx, Tag> {
type Target = Operand<Tag>;
#[inline(always)]
fn deref(&self) -> &Operand<Tag> {
@ -165,28 +173,28 @@ impl<'tcx, Tag> std::ops::Deref for OpTy<'tcx, Tag> {
}
}
impl<'tcx, Tag: Copy> From<MPlaceTy<'tcx, Tag>> for OpTy<'tcx, Tag> {
impl<'tcx, Tag: Provenance> From<MPlaceTy<'tcx, Tag>> for OpTy<'tcx, Tag> {
#[inline(always)]
fn from(mplace: MPlaceTy<'tcx, Tag>) -> Self {
OpTy { op: Operand::Indirect(*mplace), layout: mplace.layout }
}
}
impl<'tcx, Tag: Copy> From<&'_ MPlaceTy<'tcx, Tag>> for OpTy<'tcx, Tag> {
impl<'tcx, Tag: Provenance> From<&'_ MPlaceTy<'tcx, Tag>> for OpTy<'tcx, Tag> {
#[inline(always)]
fn from(mplace: &MPlaceTy<'tcx, Tag>) -> Self {
OpTy { op: Operand::Indirect(**mplace), layout: mplace.layout }
}
}
impl<'tcx, Tag> From<ImmTy<'tcx, Tag>> for OpTy<'tcx, Tag> {
impl<'tcx, Tag: Provenance> From<ImmTy<'tcx, Tag>> for OpTy<'tcx, Tag> {
#[inline(always)]
fn from(val: ImmTy<'tcx, Tag>) -> Self {
OpTy { op: Operand::Immediate(val.imm), layout: val.layout }
}
}
impl<'tcx, Tag: Copy> ImmTy<'tcx, Tag> {
impl<'tcx, Tag: Provenance> ImmTy<'tcx, Tag> {
#[inline]
pub fn from_scalar(val: Scalar<Tag>, layout: TyAndLayout<'tcx>) -> Self {
ImmTy { imm: val.into(), layout }
@ -225,19 +233,6 @@ impl<'tcx, Tag: Copy> ImmTy<'tcx, Tag> {
}
impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
/// Normalize `place.ptr` to a `Pointer` if this is a place and not a ZST.
/// Can be helpful to avoid lots of `force_ptr` calls later, if this place is used a lot.
#[inline]
pub fn force_op_ptr(
&self,
op: &OpTy<'tcx, M::PointerTag>,
) -> InterpResult<'tcx, OpTy<'tcx, M::PointerTag>> {
match op.try_as_mplace(self) {
Ok(mplace) => Ok(self.force_mplace_ptr(mplace)?.into()),
Err(imm) => Ok(imm.into()), // Nothing to cast/force
}
}
/// Try reading an immediate in memory; this is interesting particularly for `ScalarPair`.
/// Returns `None` if the layout does not permit loading this as a value.
fn try_read_immediate_from_mplace(
@ -291,7 +286,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
&self,
src: &OpTy<'tcx, M::PointerTag>,
) -> InterpResult<'tcx, Result<ImmTy<'tcx, M::PointerTag>, MPlaceTy<'tcx, M::PointerTag>>> {
Ok(match src.try_as_mplace(self) {
Ok(match src.try_as_mplace() {
Ok(ref mplace) => {
if let Some(val) = self.try_read_immediate_from_mplace(mplace)? {
Ok(val)
@ -324,6 +319,14 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
Ok(self.read_immediate(op)?.to_scalar_or_uninit())
}
/// Read a pointer from a place.
pub fn read_pointer(
&self,
op: &OpTy<'tcx, M::PointerTag>,
) -> InterpResult<'tcx, Pointer<Option<M::PointerTag>>> {
Ok(self.scalar_to_ptr(self.read_scalar(op)?.check_init()?))
}
// Turn the wide MPlace into a string (must already be dereferenced!)
pub fn read_str(&self, mplace: &MPlaceTy<'tcx, M::PointerTag>) -> InterpResult<'tcx, &str> {
let len = mplace.len(self)?;
@ -338,7 +341,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
op: &OpTy<'tcx, M::PointerTag>,
field: usize,
) -> InterpResult<'tcx, OpTy<'tcx, M::PointerTag>> {
let base = match op.try_as_mplace(self) {
let base = match op.try_as_mplace() {
Ok(ref mplace) => {
// We can reuse the mplace field computation logic for indirect operands.
let field = self.mplace_field(mplace, field)?;
@ -381,7 +384,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
self.operand_field(op, index)
} else {
// Indexing into a big array. This must be an mplace.
let mplace = op.assert_mem_place(self);
let mplace = op.assert_mem_place();
Ok(self.mplace_index(&mplace, index)?.into())
}
}
@ -392,7 +395,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
variant: VariantIdx,
) -> InterpResult<'tcx, OpTy<'tcx, M::PointerTag>> {
// Downcasts only change the layout
Ok(match op.try_as_mplace(self) {
Ok(match op.try_as_mplace() {
Ok(ref mplace) => self.mplace_downcast(mplace, variant)?.into(),
Err(..) => {
let layout = op.layout.for_variant(self, variant);
@ -414,7 +417,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
Subslice { .. } | ConstantIndex { .. } | Index(_) => {
// The rest should only occur as mplace, we do not use Immediates for types
// allowing such operations. This matches place_projection forcing an allocation.
let mplace = base.assert_mem_place(self);
let mplace = base.assert_mem_place();
self.mplace_projection(&mplace, proj_elem)?.into()
}
})
@ -569,7 +572,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
// Other cases need layout.
let tag_scalar = |scalar| -> InterpResult<'tcx, _> {
Ok(match scalar {
Scalar::Ptr(ptr) => Scalar::Ptr(self.global_base_pointer(ptr)?),
Scalar::Ptr(ptr, size) => Scalar::Ptr(self.global_base_pointer(ptr)?, size),
Scalar::Int(int) => Scalar::Int(int),
})
};
@ -580,9 +583,9 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
// We rely on mutability being set correctly in that allocation to prevent writes
// where none should happen.
let ptr = self.global_base_pointer(Pointer::new(id, offset))?;
Operand::Indirect(MemPlace::from_ptr(ptr, layout.align.abi))
Operand::Indirect(MemPlace::from_ptr(ptr.into(), layout.align.abi))
}
ConstValue::Scalar(x) => Operand::Immediate(tag_scalar(x)?.into()),
ConstValue::Scalar(x) => Operand::Immediate(tag_scalar(x.into())?.into()),
ConstValue::Slice { data, start, end } => {
// We rely on mutability being set correctly in `data` to prevent writes
// where none should happen.
@ -591,7 +594,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
Size::from_bytes(start), // offset: `start`
);
Operand::Immediate(Immediate::new_slice(
self.global_base_pointer(ptr)?.into(),
Scalar::from_pointer(self.global_base_pointer(ptr)?, &*self.tcx),
u64::try_from(end.checked_sub(start).unwrap()).unwrap(), // len: `end - start`
self,
))
@ -658,9 +661,10 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
// Figure out which discriminant and variant this corresponds to.
Ok(match *tag_encoding {
TagEncoding::Direct => {
let tag_bits = self
.force_bits(tag_val, tag_layout.size)
.map_err(|_| err_ub!(InvalidTag(tag_val.erase_tag())))?;
let tag_bits = tag_val
.try_to_int()
.map_err(|dbg_val| err_ub!(InvalidTag(dbg_val)))?
.assert_bits(tag_layout.size);
// Cast bits from tag layout to discriminant layout.
let discr_val = self.cast_from_scalar(tag_bits, tag_layout, discr_layout.ty);
let discr_bits = discr_val.assert_bits(discr_layout.size);
@ -677,7 +681,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
}
_ => span_bug!(self.cur_span(), "tagged layout for non-adt non-generator"),
}
.ok_or_else(|| err_ub!(InvalidTag(tag_val.erase_tag())))?;
.ok_or_else(|| err_ub!(InvalidTag(Scalar::from_uint(tag_bits, tag_layout.size))))?;
// Return the cast value, and the index.
(discr_val, index.0)
}
@ -686,18 +690,23 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
// discriminant (encoded in niche/tag) and variant index are the same.
let variants_start = niche_variants.start().as_u32();
let variants_end = niche_variants.end().as_u32();
let variant = match tag_val.to_bits_or_ptr(tag_layout.size, self) {
Err(ptr) => {
// The niche must be just 0 (which an inbounds pointer value never is)
let variant = match tag_val.try_to_int() {
Err(dbg_val) => {
// So this is a pointer then, and casting to an int failed.
// Can only happen during CTFE.
let ptr = self.scalar_to_ptr(tag_val);
// The niche must be just 0, and the ptr not null, then we know this is
// okay. Everything else, we conservatively reject.
let ptr_valid = niche_start == 0
&& variants_start == variants_end
&& !self.memory.ptr_may_be_null(ptr);
if !ptr_valid {
throw_ub!(InvalidTag(tag_val.erase_tag()))
throw_ub!(InvalidTag(dbg_val))
}
dataful_variant
}
Ok(tag_bits) => {
let tag_bits = tag_bits.assert_bits(tag_layout.size);
// We need to use machine arithmetic to get the relative variant idx:
// variant_index_relative = tag_val - niche_start_val
let tag_val = ImmTy::from_uint(tag_bits, tag_layout);

View File

@ -318,8 +318,8 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
right.layout.ty
);
let l = self.force_bits(left.to_scalar()?, left.layout.size)?;
let r = self.force_bits(right.to_scalar()?, right.layout.size)?;
let l = left.to_scalar()?.to_bits(left.layout.size)?;
let r = right.to_scalar()?.to_bits(right.layout.size)?;
self.binary_int_op(bin_op, l, left.layout, r, right.layout)
}
_ if left.layout.ty.is_any_ptr() => {
@ -386,7 +386,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
}
_ => {
assert!(layout.ty.is_integral());
let val = self.force_bits(val, layout.size)?;
let val = val.to_bits(layout.size)?;
let (res, overflow) = match un_op {
Not => (self.truncate(!val, layout), false), // bitwise negation, then truncate
Neg => {

View File

@ -3,7 +3,6 @@
//! All high-level functions to write to memory work on places as destinations.
use std::convert::TryFrom;
use std::fmt::Debug;
use std::hash::Hash;
use rustc_ast::Mutability;
@ -15,14 +14,14 @@ use rustc_target::abi::{Abi, Align, FieldsShape, TagEncoding};
use rustc_target::abi::{HasDataLayout, LayoutOf, Size, VariantIdx, Variants};
use super::{
alloc_range, mir_assign_valid_types, AllocRef, AllocRefMut, ConstAlloc, ImmTy, Immediate,
InterpCx, InterpResult, LocalValue, Machine, MemoryKind, OpTy, Operand, Pointer,
PointerArithmetic, Scalar, ScalarMaybeUninit,
alloc_range, mir_assign_valid_types, AllocId, AllocRef, AllocRefMut, CheckInAllocMsg,
ConstAlloc, ImmTy, Immediate, InterpCx, InterpResult, LocalValue, Machine, MemoryKind, OpTy,
Operand, Pointer, PointerArithmetic, Provenance, Scalar, ScalarMaybeUninit,
};
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, HashStable)]
#[derive(Copy, Clone, Hash, PartialEq, Eq, HashStable, Debug)]
/// Information required for the sound usage of a `MemPlace`.
pub enum MemPlaceMeta<Tag = ()> {
pub enum MemPlaceMeta<Tag: Provenance = AllocId> {
/// The unsized payload (e.g. length for slices or vtable pointer for trait objects).
Meta(Scalar<Tag>),
/// `Sized` types or unsized `extern type`
@ -37,7 +36,7 @@ pub enum MemPlaceMeta<Tag = ()> {
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
rustc_data_structures::static_assert_size!(MemPlaceMeta, 24);
impl<Tag> MemPlaceMeta<Tag> {
impl<Tag: Provenance> MemPlaceMeta<Tag> {
pub fn unwrap_meta(self) -> Scalar<Tag> {
match self {
Self::Meta(s) => s,
@ -52,22 +51,12 @@ impl<Tag> MemPlaceMeta<Tag> {
Self::None | Self::Poison => false,
}
}
pub fn erase_tag(self) -> MemPlaceMeta<()> {
match self {
Self::Meta(s) => MemPlaceMeta::Meta(s.erase_tag()),
Self::None => MemPlaceMeta::None,
Self::Poison => MemPlaceMeta::Poison,
}
}
}
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, HashStable)]
pub struct MemPlace<Tag = ()> {
/// A place may have an integral pointer for ZSTs, and since it might
/// be turned back into a reference before ever being dereferenced.
/// However, it may never be uninit.
pub ptr: Scalar<Tag>,
#[derive(Copy, Clone, Hash, PartialEq, Eq, HashStable, Debug)]
pub struct MemPlace<Tag: Provenance = AllocId> {
/// The pointer can be a pure integer, with the `None` tag.
pub ptr: Pointer<Option<Tag>>,
pub align: Align,
/// Metadata for unsized places. Interpretation is up to the type.
/// Must not be present for sized types, but can be missing for unsized types
@ -76,10 +65,10 @@ pub struct MemPlace<Tag = ()> {
}
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
rustc_data_structures::static_assert_size!(MemPlace, 56);
rustc_data_structures::static_assert_size!(MemPlace, 48);
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, HashStable)]
pub enum Place<Tag = ()> {
#[derive(Copy, Clone, Hash, PartialEq, Eq, HashStable, Debug)]
pub enum Place<Tag: Provenance = AllocId> {
/// A place referring to a value allocated in the `Memory` system.
Ptr(MemPlace<Tag>),
@ -89,18 +78,18 @@ pub enum Place<Tag = ()> {
}
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
rustc_data_structures::static_assert_size!(Place, 64);
rustc_data_structures::static_assert_size!(Place, 56);
#[derive(Copy, Clone, Debug)]
pub struct PlaceTy<'tcx, Tag = ()> {
pub struct PlaceTy<'tcx, Tag: Provenance = AllocId> {
place: Place<Tag>, // Keep this private; it helps enforce invariants.
pub layout: TyAndLayout<'tcx>,
}
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
rustc_data_structures::static_assert_size!(PlaceTy<'_>, 80);
rustc_data_structures::static_assert_size!(PlaceTy<'_>, 72);
impl<'tcx, Tag> std::ops::Deref for PlaceTy<'tcx, Tag> {
impl<'tcx, Tag: Provenance> std::ops::Deref for PlaceTy<'tcx, Tag> {
type Target = Place<Tag>;
#[inline(always)]
fn deref(&self) -> &Place<Tag> {
@ -109,16 +98,16 @@ impl<'tcx, Tag> std::ops::Deref for PlaceTy<'tcx, Tag> {
}
/// A MemPlace with its layout. Constructing it is only possible in this module.
#[derive(Copy, Clone, Debug, Hash, Eq, PartialEq)]
pub struct MPlaceTy<'tcx, Tag = ()> {
#[derive(Copy, Clone, Hash, Eq, PartialEq, Debug)]
pub struct MPlaceTy<'tcx, Tag: Provenance = AllocId> {
mplace: MemPlace<Tag>,
pub layout: TyAndLayout<'tcx>,
}
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
rustc_data_structures::static_assert_size!(MPlaceTy<'_>, 72);
rustc_data_structures::static_assert_size!(MPlaceTy<'_>, 64);
impl<'tcx, Tag> std::ops::Deref for MPlaceTy<'tcx, Tag> {
impl<'tcx, Tag: Provenance> std::ops::Deref for MPlaceTy<'tcx, Tag> {
type Target = MemPlace<Tag>;
#[inline(always)]
fn deref(&self) -> &MemPlace<Tag> {
@ -126,42 +115,33 @@ impl<'tcx, Tag> std::ops::Deref for MPlaceTy<'tcx, Tag> {
}
}
impl<'tcx, Tag> From<MPlaceTy<'tcx, Tag>> for PlaceTy<'tcx, Tag> {
impl<'tcx, Tag: Provenance> From<MPlaceTy<'tcx, Tag>> for PlaceTy<'tcx, Tag> {
#[inline(always)]
fn from(mplace: MPlaceTy<'tcx, Tag>) -> Self {
PlaceTy { place: Place::Ptr(mplace.mplace), layout: mplace.layout }
}
}
impl<Tag> MemPlace<Tag> {
/// Replace ptr tag, maintain vtable tag (if any)
#[inline]
pub fn replace_tag(self, new_tag: Tag) -> Self {
MemPlace { ptr: self.ptr.erase_tag().with_tag(new_tag), align: self.align, meta: self.meta }
}
#[inline]
pub fn erase_tag(self) -> MemPlace {
MemPlace { ptr: self.ptr.erase_tag(), align: self.align, meta: self.meta.erase_tag() }
}
impl<Tag: Provenance> MemPlace<Tag> {
#[inline(always)]
fn from_scalar_ptr(ptr: Scalar<Tag>, align: Align) -> Self {
pub fn from_ptr(ptr: Pointer<Option<Tag>>, align: Align) -> Self {
MemPlace { ptr, align, meta: MemPlaceMeta::None }
}
#[inline(always)]
pub fn from_ptr(ptr: Pointer<Tag>, align: Align) -> Self {
Self::from_scalar_ptr(ptr.into(), align)
/// Adjust the provenance of the main pointer (metadata is unaffected).
pub fn map_provenance(self, f: impl FnOnce(Option<Tag>) -> Option<Tag>) -> Self {
MemPlace { ptr: self.ptr.map_provenance(f), ..self }
}
/// Turn a mplace into a (thin or wide) pointer, as a reference, pointing to the same space.
/// This is the inverse of `ref_to_mplace`.
#[inline(always)]
pub fn to_ref(self) -> Immediate<Tag> {
pub fn to_ref(self, cx: &impl HasDataLayout) -> Immediate<Tag> {
match self.meta {
MemPlaceMeta::None => Immediate::Scalar(self.ptr.into()),
MemPlaceMeta::Meta(meta) => Immediate::ScalarPair(self.ptr.into(), meta.into()),
MemPlaceMeta::None => Immediate::from(Scalar::from_maybe_pointer(self.ptr, cx)),
MemPlaceMeta::Meta(meta) => {
Immediate::ScalarPair(Scalar::from_maybe_pointer(self.ptr, cx).into(), meta.into())
}
MemPlaceMeta::Poison => bug!(
"MPlaceTy::dangling may never be used to produce a \
place that will have the address of its pointee taken"
@ -177,29 +157,23 @@ impl<Tag> MemPlace<Tag> {
cx: &impl HasDataLayout,
) -> InterpResult<'tcx, Self> {
Ok(MemPlace {
ptr: self.ptr.ptr_offset(offset, cx)?,
ptr: self.ptr.offset(offset, cx)?,
align: self.align.restrict_for_offset(offset),
meta,
})
}
}
impl<'tcx, Tag: Copy> MPlaceTy<'tcx, Tag> {
impl<'tcx, Tag: Provenance> MPlaceTy<'tcx, Tag> {
/// Produces a MemPlace that works for ZST but nothing else
#[inline]
pub fn dangling(layout: TyAndLayout<'tcx>, cx: &impl HasDataLayout) -> Self {
pub fn dangling(layout: TyAndLayout<'tcx>) -> Self {
let align = layout.align.abi;
let ptr = Scalar::from_machine_usize(align.bytes(), cx);
let ptr = Pointer::new(None, Size::from_bytes(align.bytes())); // no provenance, absolute address
// `Poison` this to make sure that the pointer value `ptr` is never observable by the program.
MPlaceTy { mplace: MemPlace { ptr, align, meta: MemPlaceMeta::Poison }, layout }
}
/// Replace ptr tag, maintain vtable tag (if any)
#[inline]
pub fn replace_tag(&self, new_tag: Tag) -> Self {
MPlaceTy { mplace: self.mplace.replace_tag(new_tag), layout: self.layout }
}
#[inline]
pub fn offset(
&self,
@ -212,7 +186,7 @@ impl<'tcx, Tag: Copy> MPlaceTy<'tcx, Tag> {
}
#[inline]
fn from_aligned_ptr(ptr: Pointer<Tag>, layout: TyAndLayout<'tcx>) -> Self {
pub fn from_aligned_ptr(ptr: Pointer<Option<Tag>>, layout: TyAndLayout<'tcx>) -> Self {
MPlaceTy { mplace: MemPlace::from_ptr(ptr, layout.align.abi), layout }
}
@ -244,19 +218,14 @@ impl<'tcx, Tag: Copy> MPlaceTy<'tcx, Tag> {
}
// These are defined here because they produce a place.
impl<'tcx, Tag: Debug + Copy> OpTy<'tcx, Tag> {
impl<'tcx, Tag: Provenance> OpTy<'tcx, Tag> {
#[inline(always)]
/// Note: do not call `as_ref` on the resulting place. This function should only be used to
/// read from the resulting mplace, not to get its address back.
pub fn try_as_mplace(
&self,
cx: &impl HasDataLayout,
) -> Result<MPlaceTy<'tcx, Tag>, ImmTy<'tcx, Tag>> {
pub fn try_as_mplace(&self) -> Result<MPlaceTy<'tcx, Tag>, ImmTy<'tcx, Tag>> {
match **self {
Operand::Indirect(mplace) => Ok(MPlaceTy { mplace, layout: self.layout }),
Operand::Immediate(_) if self.layout.is_zst() => {
Ok(MPlaceTy::dangling(self.layout, cx))
}
Operand::Immediate(_) if self.layout.is_zst() => Ok(MPlaceTy::dangling(self.layout)),
Operand::Immediate(imm) => Err(ImmTy::from_immediate(imm, self.layout)),
}
}
@ -264,12 +233,12 @@ impl<'tcx, Tag: Debug + Copy> OpTy<'tcx, Tag> {
#[inline(always)]
/// Note: do not call `as_ref` on the resulting place. This function should only be used to
/// read from the resulting mplace, not to get its address back.
pub fn assert_mem_place(&self, cx: &impl HasDataLayout) -> MPlaceTy<'tcx, Tag> {
self.try_as_mplace(cx).unwrap()
pub fn assert_mem_place(&self) -> MPlaceTy<'tcx, Tag> {
self.try_as_mplace().unwrap()
}
}
impl<Tag: Debug> Place<Tag> {
impl<Tag: Provenance> Place<Tag> {
#[inline]
pub fn assert_mem_place(self) -> MemPlace<Tag> {
match self {
@ -279,7 +248,7 @@ impl<Tag: Debug> Place<Tag> {
}
}
impl<'tcx, Tag: Debug> PlaceTy<'tcx, Tag> {
impl<'tcx, Tag: Provenance> PlaceTy<'tcx, Tag> {
#[inline]
pub fn assert_mem_place(self) -> MPlaceTy<'tcx, Tag> {
MPlaceTy { mplace: self.place.assert_mem_place(), layout: self.layout }
@ -290,7 +259,7 @@ impl<'tcx, Tag: Debug> PlaceTy<'tcx, Tag> {
impl<'mir, 'tcx: 'mir, Tag, M> InterpCx<'mir, 'tcx, M>
where
// FIXME: Working around https://github.com/rust-lang/rust/issues/54385
Tag: Debug + Copy + Eq + Hash + 'static,
Tag: Provenance + Eq + Hash + 'static,
M: Machine<'mir, 'tcx, PointerTag = Tag>,
{
/// Take a value, which represents a (thin or wide) reference, and make it a place.
@ -307,14 +276,12 @@ where
val.layout.ty.builtin_deref(true).expect("`ref_to_mplace` called on non-ptr type").ty;
let layout = self.layout_of(pointee_type)?;
let (ptr, meta) = match **val {
Immediate::Scalar(ptr) => (ptr.check_init()?, MemPlaceMeta::None),
Immediate::ScalarPair(ptr, meta) => {
(ptr.check_init()?, MemPlaceMeta::Meta(meta.check_init()?))
}
Immediate::Scalar(ptr) => (ptr, MemPlaceMeta::None),
Immediate::ScalarPair(ptr, meta) => (ptr, MemPlaceMeta::Meta(meta.check_init()?)),
};
let mplace = MemPlace {
ptr,
ptr: self.scalar_to_ptr(ptr.check_init()?),
// We could use the run-time alignment here. For now, we do not, because
// the point of tracking the alignment here is to make sure that the *static*
// alignment information emitted with the loads is correct. The run-time
@ -333,8 +300,9 @@ where
) -> InterpResult<'tcx, MPlaceTy<'tcx, M::PointerTag>> {
let val = self.read_immediate(src)?;
trace!("deref to {} on {:?}", val.layout.ty, *val);
let place = self.ref_to_mplace(&val)?;
self.mplace_access_checked(place, None)
let mplace = self.ref_to_mplace(&val)?;
self.check_mplace_access(mplace, CheckInAllocMsg::DerefTest)?;
Ok(mplace)
}
#[inline]
@ -359,38 +327,19 @@ where
self.memory.get_mut(place.ptr, size, place.align)
}
/// Return the "access-checked" version of this `MPlace`, where for non-ZST
/// this is definitely a `Pointer`.
///
/// `force_align` must only be used when correct alignment does not matter,
/// like in Stacked Borrows.
pub fn mplace_access_checked(
/// Check if this mplace is dereferencable and sufficiently aligned.
fn check_mplace_access(
&self,
mut place: MPlaceTy<'tcx, M::PointerTag>,
force_align: Option<Align>,
) -> InterpResult<'tcx, MPlaceTy<'tcx, M::PointerTag>> {
mplace: MPlaceTy<'tcx, M::PointerTag>,
msg: CheckInAllocMsg,
) -> InterpResult<'tcx> {
let (size, align) = self
.size_and_align_of_mplace(&place)?
.unwrap_or((place.layout.size, place.layout.align.abi));
assert!(place.mplace.align <= align, "dynamic alignment less strict than static one?");
let align = force_align.unwrap_or(align);
// Record new (stricter, unless forced) alignment requirement in place.
place.mplace.align = align;
// When dereferencing a pointer, it must be non-null, aligned, and live.
if let Some(ptr) = self.memory.check_ptr_access(place.ptr, size, align)? {
place.mplace.ptr = ptr.into();
}
Ok(place)
}
/// Force `place.ptr` to a `Pointer`.
/// Can be helpful to avoid lots of `force_ptr` calls later, if this place is used a lot.
pub(super) fn force_mplace_ptr(
&self,
mut place: MPlaceTy<'tcx, M::PointerTag>,
) -> InterpResult<'tcx, MPlaceTy<'tcx, M::PointerTag>> {
place.mplace.ptr = self.force_ptr(place.mplace.ptr)?.into();
Ok(place)
.size_and_align_of_mplace(&mplace)?
.unwrap_or((mplace.layout.size, mplace.layout.align.abi));
assert!(mplace.mplace.align <= align, "dynamic alignment less strict than static one?");
let align = M::enforce_alignment(&self.memory.extra).then_some(align);
self.memory.check_ptr_access_align(mplace.ptr, size, align.unwrap_or(Align::ONE), msg)?;
Ok(())
}
/// Offset a pointer to project to a field of a struct/union. Unlike `place_field`, this is
@ -558,10 +507,7 @@ where
let layout = self.layout_of(self.tcx.types.usize)?;
let n = self.access_local(self.frame(), local, Some(layout))?;
let n = self.read_scalar(&n)?;
let n = u64::try_from(
self.force_bits(n.check_init()?, self.tcx.data_layout.pointer_size)?,
)
.unwrap();
let n = n.to_machine_usize(self)?;
self.mplace_index(base, n)?
}
@ -677,16 +623,6 @@ where
Ok(place_ty)
}
/// Write a scalar to a place
#[inline(always)]
pub fn write_scalar(
&mut self,
val: impl Into<ScalarMaybeUninit<M::PointerTag>>,
dest: &PlaceTy<'tcx, M::PointerTag>,
) -> InterpResult<'tcx> {
self.write_immediate(Immediate::Scalar(val.into()), dest)
}
/// Write an immediate to a place
#[inline(always)]
pub fn write_immediate(
@ -704,21 +640,24 @@ where
Ok(())
}
/// Write an `Immediate` to memory.
/// Write a scalar to a place
#[inline(always)]
pub fn write_immediate_to_mplace(
pub fn write_scalar(
&mut self,
src: Immediate<M::PointerTag>,
dest: &MPlaceTy<'tcx, M::PointerTag>,
val: impl Into<ScalarMaybeUninit<M::PointerTag>>,
dest: &PlaceTy<'tcx, M::PointerTag>,
) -> InterpResult<'tcx> {
self.write_immediate_to_mplace_no_validate(src, dest)?;
if M::enforce_validity(self) {
// Data got changed, better make sure it matches the type!
self.validate_operand(&dest.into())?;
self.write_immediate(Immediate::Scalar(val.into()), dest)
}
Ok(())
/// Write a pointer to a place
#[inline(always)]
pub fn write_pointer(
&mut self,
ptr: impl Into<Pointer<Option<M::PointerTag>>>,
dest: &PlaceTy<'tcx, M::PointerTag>,
) -> InterpResult<'tcx> {
self.write_scalar(Scalar::from_maybe_pointer(ptr.into(), self), dest)
}
/// Write an immediate to a place.
@ -733,7 +672,7 @@ where
// This is a very common path, avoid some checks in release mode
assert!(!dest.layout.is_unsized(), "Cannot write unsized data");
match src {
Immediate::Scalar(ScalarMaybeUninit::Scalar(Scalar::Ptr(_))) => assert_eq!(
Immediate::Scalar(ScalarMaybeUninit::Scalar(Scalar::Ptr(..))) => assert_eq!(
self.pointer_size(),
dest.layout.size,
"Size mismatch when writing pointer"
@ -1020,7 +959,7 @@ where
kind: MemoryKind<M::MemoryKind>,
) -> InterpResult<'static, MPlaceTy<'tcx, M::PointerTag>> {
let ptr = self.memory.allocate(layout.size, layout.align.abi, kind)?;
Ok(MPlaceTy::from_aligned_ptr(ptr, layout))
Ok(MPlaceTy::from_aligned_ptr(ptr.into(), layout))
}
/// Returns a wide MPlace of type `&'static [mut] str` to a new 1-aligned allocation.
@ -1125,7 +1064,7 @@ where
let _ = self.tcx.global_alloc(raw.alloc_id);
let ptr = self.global_base_pointer(Pointer::from(raw.alloc_id))?;
let layout = self.layout_of(raw.ty)?;
Ok(MPlaceTy::from_aligned_ptr(ptr, layout))
Ok(MPlaceTy::from_aligned_ptr(ptr.into(), layout))
}
/// Turn a place with a `dyn Trait` type into a place with the actual dynamic type.
@ -1134,7 +1073,7 @@ where
&self,
mplace: &MPlaceTy<'tcx, M::PointerTag>,
) -> InterpResult<'tcx, (ty::Instance<'tcx>, MPlaceTy<'tcx, M::PointerTag>)> {
let vtable = mplace.vtable(); // also sanity checks the type
let vtable = self.scalar_to_ptr(mplace.vtable()); // also sanity checks the type
let (instance, ty) = self.read_drop_type_from_vtable(vtable)?;
let layout = self.layout_of(ty)?;

View File

@ -162,9 +162,8 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
use rustc_middle::mir::Rvalue::*;
match *rvalue {
ThreadLocalRef(did) => {
let id = M::thread_local_static_alloc_id(self, did)?;
let val = self.global_base_pointer(id.into())?;
self.write_scalar(val, &dest)?;
let ptr = M::thread_local_static_base_pointer(self, did)?;
self.write_pointer(ptr, &dest)?;
}
Use(ref operand) => {
@ -240,7 +239,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
// of the first element.
let elem_size = first.layout.size;
let first_ptr = first.ptr;
let rest_ptr = first_ptr.ptr_offset(elem_size, self)?;
let rest_ptr = first_ptr.offset(elem_size, self)?;
self.memory.copy_repeatedly(
first_ptr,
first.align,
@ -264,11 +263,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
AddressOf(_, place) | Ref(_, _, place) => {
let src = self.eval_place(place)?;
let place = self.force_allocation(&src)?;
if place.layout.size.bytes() > 0 {
// definitely not a ZST
assert!(place.ptr.is_ptr(), "non-ZST places should be normalized to `Pointer`");
}
self.write_immediate(place.to_ref(), &dest)?;
self.write_immediate(place.to_ref(self), &dest)?;
}
NullaryOp(mir::NullOp::Box, _) => {

View File

@ -12,8 +12,8 @@ use rustc_target::abi::{self, LayoutOf as _};
use rustc_target::spec::abi::Abi;
use super::{
FnVal, ImmTy, InterpCx, InterpResult, MPlaceTy, Machine, OpTy, PlaceTy, StackPopCleanup,
StackPopUnwind,
FnVal, ImmTy, InterpCx, InterpResult, MPlaceTy, Machine, OpTy, PlaceTy, Scalar,
StackPopCleanup, StackPopUnwind,
};
impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
@ -72,8 +72,8 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
let (fn_val, abi, caller_can_unwind) = match *func.layout.ty.kind() {
ty::FnPtr(sig) => {
let caller_abi = sig.abi();
let fn_ptr = self.read_scalar(&func)?.check_init()?;
let fn_val = self.memory.get_fn(fn_ptr)?;
let fn_ptr = self.read_pointer(&func)?;
let fn_val = self.memory.get_fn(fn_ptr.into())?;
(
fn_val,
caller_abi,
@ -454,11 +454,11 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
}
None => {
// Unsized self.
args[0].assert_mem_place(self)
args[0].assert_mem_place()
}
};
// Find and consult vtable
let vtable = receiver_place.vtable();
let vtable = self.scalar_to_ptr(receiver_place.vtable());
let fn_val = self.get_vtable_slot(vtable, u64::try_from(idx).unwrap())?;
// `*mut receiver_place.layout.ty` is almost the layout that we
@ -468,8 +468,10 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
let receiver_ptr_ty = self.tcx.mk_mut_ptr(receiver_place.layout.ty);
let this_receiver_ptr = self.layout_of(receiver_ptr_ty)?.field(self, 0)?;
// Adjust receiver argument.
args[0] =
OpTy::from(ImmTy::from_immediate(receiver_place.ptr.into(), this_receiver_ptr));
args[0] = OpTy::from(ImmTy::from_immediate(
Scalar::from_maybe_pointer(receiver_place.ptr, self).into(),
this_receiver_ptr,
));
trace!("Patched self operand to {:#?}", args[0]);
// recurse with concrete function
self.eval_fn_call(fn_val, caller_abi, &args, ret, unwind)
@ -499,12 +501,12 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
};
let arg = ImmTy::from_immediate(
place.to_ref(),
place.to_ref(self),
self.layout_of(self.tcx.mk_mut_ptr(place.layout.ty))?,
);
let ty = self.tcx.mk_unit(); // return type is ()
let dest = MPlaceTy::dangling(self.layout_of(ty)?, self);
let dest = MPlaceTy::dangling(self.layout_of(ty)?);
self.eval_fn_call(
FnVal::Instance(instance),

View File

@ -1,6 +1,6 @@
use std::convert::TryFrom;
use rustc_middle::mir::interpret::{InterpResult, Pointer, PointerArithmetic, Scalar};
use rustc_middle::mir::interpret::{InterpResult, Pointer, PointerArithmetic};
use rustc_middle::ty::{
self, Ty, COMMON_VTABLE_ENTRIES, COMMON_VTABLE_ENTRIES_ALIGN,
COMMON_VTABLE_ENTRIES_DROPINPLACE, COMMON_VTABLE_ENTRIES_SIZE,
@ -42,23 +42,23 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
/// corresponds to the first method declared in the trait of the provided vtable.
pub fn get_vtable_slot(
&self,
vtable: Scalar<M::PointerTag>,
vtable: Pointer<Option<M::PointerTag>>,
idx: u64,
) -> InterpResult<'tcx, FnVal<'tcx, M::ExtraFnVal>> {
let ptr_size = self.pointer_size();
let vtable_slot = vtable.ptr_offset(ptr_size * idx, self)?;
let vtable_slot = vtable.offset(ptr_size * idx, self)?;
let vtable_slot = self
.memory
.get(vtable_slot, ptr_size, self.tcx.data_layout.pointer_align.abi)?
.expect("cannot be a ZST");
let fn_ptr = vtable_slot.read_ptr_sized(Size::ZERO)?.check_init()?;
let fn_ptr = self.scalar_to_ptr(vtable_slot.read_ptr_sized(Size::ZERO)?.check_init()?);
self.memory.get_fn(fn_ptr)
}
/// Returns the drop fn instance as well as the actual dynamic type.
pub fn read_drop_type_from_vtable(
&self,
vtable: Scalar<M::PointerTag>,
vtable: Pointer<Option<M::PointerTag>>,
) -> InterpResult<'tcx, (ty::Instance<'tcx>, Ty<'tcx>)> {
let pointer_size = self.pointer_size();
// We don't care about the pointee type; we just want a pointer.
@ -77,7 +77,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
.check_init()?;
// We *need* an instance here, no other kind of function value, to be able
// to determine the type.
let drop_instance = self.memory.get_fn(drop_fn)?.as_instance()?;
let drop_instance = self.memory.get_fn(self.scalar_to_ptr(drop_fn))?.as_instance()?;
trace!("Found drop fn: {:?}", drop_instance);
let fn_sig = drop_instance.ty(*self.tcx, self.param_env).fn_sig(*self.tcx);
let fn_sig = self.tcx.normalize_erasing_late_bound_regions(self.param_env, fn_sig);
@ -93,7 +93,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
pub fn read_size_and_align_from_vtable(
&self,
vtable: Scalar<M::PointerTag>,
vtable: Pointer<Option<M::PointerTag>>,
) -> InterpResult<'tcx, (Size, Align)> {
let pointer_size = self.pointer_size();
// We check for `size = 3 * ptr_size`, which covers the drop fn (unused here),
@ -109,11 +109,11 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
let size = vtable
.read_ptr_sized(pointer_size * u64::try_from(COMMON_VTABLE_ENTRIES_SIZE).unwrap())?
.check_init()?;
let size = u64::try_from(self.force_bits(size, pointer_size)?).unwrap();
let size = size.to_machine_usize(self)?;
let align = vtable
.read_ptr_sized(pointer_size * u64::try_from(COMMON_VTABLE_ENTRIES_ALIGN).unwrap())?
.check_init()?;
let align = u64::try_from(self.force_bits(align, pointer_size)?).unwrap();
let align = align.to_machine_usize(self)?;
let align = Align::from_bytes(align).map_err(|e| err_ub!(InvalidVtableAlignment(e)))?;
if size >= self.tcx.data_layout.obj_size_bound() {

View File

@ -21,7 +21,7 @@ use std::hash::Hash;
use super::{
alloc_range, CheckInAllocMsg, GlobalAlloc, InterpCx, InterpResult, MPlaceTy, Machine,
MemPlaceMeta, OpTy, Scalar, ScalarMaybeUninit, ValueVisitor,
MemPlaceMeta, OpTy, ScalarMaybeUninit, ValueVisitor,
};
macro_rules! throw_validation_failure {
@ -324,7 +324,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, '
let tail = self.ecx.tcx.struct_tail_erasing_lifetimes(pointee.ty, self.ecx.param_env);
match tail.kind() {
ty::Dynamic(..) => {
let vtable = meta.unwrap_meta();
let vtable = self.ecx.scalar_to_ptr(meta.unwrap_meta());
// Direct call to `check_ptr_access_align` checks alignment even on CTFE machines.
try_validation!(
self.ecx.memory.check_ptr_access_align(
@ -335,8 +335,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, '
),
self.path,
err_ub!(DanglingIntPointer(..)) |
err_ub!(PointerUseAfterFree(..)) |
err_unsup!(ReadBytesAsPointer) =>
err_ub!(PointerUseAfterFree(..)) =>
{ "dangling vtable pointer in wide pointer" },
err_ub!(AlignmentCheckFailed { .. }) =>
{ "unaligned vtable pointer in wide pointer" },
@ -347,8 +346,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, '
self.ecx.read_drop_type_from_vtable(vtable),
self.path,
err_ub!(DanglingIntPointer(..)) |
err_ub!(InvalidFunctionPointer(..)) |
err_unsup!(ReadBytesAsPointer) =>
err_ub!(InvalidFunctionPointer(..)) =>
{ "invalid drop function pointer in vtable (not pointing to a function)" },
err_ub!(InvalidVtableDropFn(..)) =>
{ "invalid drop function pointer in vtable (function has incompatible signature)" },
@ -437,8 +435,6 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, '
{ "a dangling {} (address 0x{:x} is unallocated)", kind, i },
err_ub!(PointerOutOfBounds { .. }) =>
{ "a dangling {} (going beyond the bounds of its allocation)", kind },
err_unsup!(ReadBytesAsPointer) =>
{ "a dangling {} (created from integer)", kind },
// This cannot happen during const-eval (because interning already detects
// dangling pointers), but it can happen in Miri.
err_ub!(PointerUseAfterFree(..)) =>
@ -448,17 +444,10 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, '
if let Some(ref mut ref_tracking) = self.ref_tracking {
// Proceed recursively even for ZST, no reason to skip them!
// `!` is a ZST and we want to validate it.
// Normalize before handing `place` to tracking because that will
// check for duplicates.
let place = if size.bytes() > 0 {
self.ecx.force_mplace_ptr(place).expect("we already bounds-checked")
} else {
place
};
// Skip validation entirely for some external statics
if let Scalar::Ptr(ptr) = place.ptr {
if let Ok((alloc_id, _offset, _ptr)) = self.ecx.memory.ptr_try_get_alloc(place.ptr) {
// not a ZST
let alloc_kind = self.ecx.tcx.get_global_alloc(ptr.alloc_id);
let alloc_kind = self.ecx.tcx.get_global_alloc(alloc_id);
if let Some(GlobalAlloc::Static(did)) = alloc_kind {
assert!(!self.ecx.tcx.is_thread_local_static(did));
assert!(self.ecx.tcx.is_static(did));
@ -546,7 +535,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, '
// types below!
if self.ctfe_mode.is_some() {
// Integers/floats in CTFE: Must be scalar bits, pointers are dangerous
let is_bits = value.check_init().map_or(false, |v| v.is_bits());
let is_bits = value.check_init().map_or(false, |v| v.try_to_int().is_ok());
if !is_bits {
throw_validation_failure!(self.path,
{ "{}", value } expected { "initialized plain (non-pointer) bytes" }
@ -601,12 +590,11 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, '
// message below.
let value = value.to_scalar_or_uninit();
let _fn = try_validation!(
value.check_init().and_then(|ptr| self.ecx.memory.get_fn(ptr)),
value.check_init().and_then(|ptr| self.ecx.memory.get_fn(self.ecx.scalar_to_ptr(ptr))),
self.path,
err_ub!(DanglingIntPointer(..)) |
err_ub!(InvalidFunctionPointer(..)) |
err_ub!(InvalidUninitBytes(None)) |
err_unsup!(ReadBytesAsPointer) =>
err_ub!(InvalidUninitBytes(None)) =>
{ "{}", value } expected { "a function pointer" },
);
// FIXME: Check if the signature matches
@ -664,8 +652,11 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, '
err_ub!(InvalidUninitBytes(None)) => { "{}", value }
expected { "something {}", wrapping_range_format(valid_range, max_hi) },
);
let bits = match value.to_bits_or_ptr(op.layout.size, self.ecx) {
Err(ptr) => {
let bits = match value.try_to_int() {
Err(_) => {
// So this is a pointer then, and casting to an int failed.
// Can only happen during CTFE.
let ptr = self.ecx.scalar_to_ptr(value);
if lo == 1 && hi == max_hi {
// Only null is the niche. So make sure the ptr is NOT null.
if self.ecx.memory.ptr_may_be_null(ptr) {
@ -690,7 +681,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, '
)
}
}
Ok(data) => data,
Ok(int) => int.assert_bits(op.layout.size),
};
// Now compare. This is slightly subtle because this is a special "wrap-around" range.
if wrapping_range_contains(&valid_range, bits) {
@ -832,7 +823,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M>
) -> InterpResult<'tcx> {
match op.layout.ty.kind() {
ty::Str => {
let mplace = op.assert_mem_place(self.ecx); // strings are never immediate
let mplace = op.assert_mem_place(); // strings are never immediate
let len = mplace.len(self.ecx)?;
try_validation!(
self.ecx.memory.read_bytes(mplace.ptr, Size::from_bytes(len)),
@ -853,7 +844,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M>
// Optimized handling for arrays of integer/float type.
// Arrays cannot be immediate, slices are never immediate.
let mplace = op.assert_mem_place(self.ecx);
let mplace = op.assert_mem_place();
// This is the length of the array/slice.
let len = mplace.len(self.ecx)?;
// This is the element type size.
@ -940,9 +931,6 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
// Construct a visitor
let mut visitor = ValidityVisitor { path, ref_tracking, ctfe_mode, ecx: self };
// Try to cast to ptr *once* instead of all the time.
let op = self.force_op_ptr(&op).unwrap_or(*op);
// Run it.
match visitor.visit_value(&op) {
Ok(()) => Ok(()),

View File

@ -211,7 +211,8 @@ macro_rules! make_value_visitor {
// If it is a trait object, switch to the real type that was used to create it.
ty::Dynamic(..) => {
// immediate trait objects are not a thing
let dest = v.to_op(self.ecx())?.assert_mem_place(self.ecx());
let op = v.to_op(self.ecx())?;
let dest = op.assert_mem_place();
let inner = self.ecx().unpack_dyn_trait(&dest)?.1;
trace!("walk_value: dyn object layout: {:#?}", inner.layout);
// recurse with the inner type
@ -241,7 +242,8 @@ macro_rules! make_value_visitor {
},
FieldsShape::Array { .. } => {
// Let's get an mplace first.
let mplace = v.to_op(self.ecx())?.assert_mem_place(self.ecx());
let op = v.to_op(self.ecx())?;
let mplace = op.assert_mem_place();
// Now we can go over all the fields.
// This uses the *run-time length*, i.e., if we are a slice,
// the dynamic info from the metadata is used.

View File

@ -403,7 +403,7 @@ fn collect_items_rec<'tcx>(
recursion_depth_reset = None;
if let Ok(alloc) = tcx.eval_static_initializer(def_id) {
for &((), id) in alloc.relocations().values() {
for &id in alloc.relocations().values() {
collect_miri(tcx, id, &mut neighbors);
}
}
@ -1369,7 +1369,7 @@ fn collect_miri<'tcx>(
}
GlobalAlloc::Memory(alloc) => {
trace!("collecting {:?} with {:#?}", alloc_id, alloc);
for &((), inner) in alloc.relocations().values() {
for &inner in alloc.relocations().values() {
rustc_data_structures::stack::ensure_sufficient_stack(|| {
collect_miri(tcx, inner, output);
});
@ -1402,9 +1402,9 @@ fn collect_const_value<'tcx>(
output: &mut Vec<Spanned<MonoItem<'tcx>>>,
) {
match value {
ConstValue::Scalar(Scalar::Ptr(ptr)) => collect_miri(tcx, ptr.alloc_id, output),
ConstValue::Scalar(Scalar::Ptr(ptr, _size)) => collect_miri(tcx, ptr.provenance, output),
ConstValue::Slice { data: alloc, start: _, end: _ } | ConstValue::ByRef { alloc, .. } => {
for &((), id) in alloc.relocations().values() {
for &id in alloc.relocations().values() {
collect_miri(tcx, id, output);
}
}

View File

@ -31,9 +31,8 @@ use rustc_trait_selection::traits;
use crate::const_eval::ConstEvalErr;
use crate::interpret::{
self, compile_time_machine, AllocId, Allocation, ConstValue, CtfeValidationMode, Frame, ImmTy,
Immediate, InterpCx, InterpResult, LocalState, LocalValue, MemPlace, Memory, MemoryKind, OpTy,
Operand as InterpOperand, PlaceTy, Pointer, Scalar, ScalarMaybeUninit, StackPopCleanup,
StackPopUnwind,
Immediate, InterpCx, InterpResult, LocalState, LocalValue, MemPlace, MemoryKind, OpTy,
Operand as InterpOperand, PlaceTy, Scalar, ScalarMaybeUninit, StackPopCleanup, StackPopUnwind,
};
use crate::transform::MirPass;
@ -157,7 +156,7 @@ impl<'tcx> MirPass<'tcx> for ConstProp {
struct ConstPropMachine<'mir, 'tcx> {
/// The virtual call stack.
stack: Vec<Frame<'mir, 'tcx, (), ()>>,
stack: Vec<Frame<'mir, 'tcx>>,
/// `OnlyInsideOwnBlock` locals that were written in the current block get erased at the end.
written_only_inside_own_block_locals: FxHashSet<Local>,
/// Locals that need to be cleared after every block terminates.
@ -223,10 +222,6 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for ConstPropMachine<'mir, 'tcx>
bug!("panics terminators are not evaluated in ConstProp")
}
fn ptr_to_int(_mem: &Memory<'mir, 'tcx, Self>, _ptr: Pointer) -> InterpResult<'tcx, u64> {
throw_unsup!(ReadPointerAsBytes)
}
fn binary_ptr_op(
_ecx: &InterpCx<'mir, 'tcx, Self>,
_bin_op: BinOp,
@ -587,8 +582,7 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
let left_size = self.ecx.layout_of(left_ty).ok()?.size;
let right_size = r.layout.size;
let r_bits = r.to_scalar().ok();
// This is basically `force_bits`.
let r_bits = r_bits.and_then(|r| r.to_bits_or_ptr(right_size, &self.tcx).ok());
let r_bits = r_bits.and_then(|r| r.to_bits(right_size).ok());
if r_bits.map_or(false, |b| b >= left_size.bits() as u128) {
debug!("check_binary_op: reporting assert for {:?}", source_info);
self.report_assert_as_lint(
@ -759,8 +753,7 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
}
};
let arg_value =
this.ecx.force_bits(const_arg.to_scalar()?, const_arg.layout.size)?;
let arg_value = const_arg.to_scalar()?.to_bits(const_arg.layout.size)?;
let dest = this.ecx.eval_place(place)?;
match op {
@ -876,7 +869,7 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
let alloc = this
.ecx
.intern_with_temp_alloc(value.layout, |ecx, dest| {
ecx.write_immediate_to_mplace(*imm, dest)
ecx.write_immediate(*imm, dest)
})
.unwrap();
Ok(Some(alloc))
@ -928,12 +921,12 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
match **op {
interpret::Operand::Immediate(Immediate::Scalar(ScalarMaybeUninit::Scalar(s))) => {
s.is_bits()
s.try_to_int().is_ok()
}
interpret::Operand::Immediate(Immediate::ScalarPair(
ScalarMaybeUninit::Scalar(l),
ScalarMaybeUninit::Scalar(r),
)) => l.is_bits() && r.is_bits(),
)) => l.try_to_int().is_ok() && r.try_to_int().is_ok(),
_ => false,
}
}

View File

@ -46,7 +46,7 @@ impl<'tcx> MirPass<'tcx> for SimplifyComparisonIntegral {
.expect("if we have an evaluated constant we must know the layout");
int.assert_bits(layout.size)
}
Scalar::Ptr(_) => continue,
Scalar::Ptr(..) => continue,
};
const FALSE: u128 = 0;
@ -211,7 +211,7 @@ fn find_branch_value_info<'tcx>(
return None;
};
let branch_value_scalar = branch_value.literal.try_to_scalar()?;
Some((branch_value_scalar, branch_value_ty, *to_switch_on))
Some((branch_value_scalar.into(), branch_value_ty, *to_switch_on))
}
_ => None,
}

View File

@ -1,6 +1,6 @@
use std::collections::BTreeSet;
use std::fmt::Display;
use std::fmt::Write as _;
use std::fmt::{Debug, Display};
use std::fs;
use std::io::{self, Write};
use std::path::{Path, PathBuf};
@ -13,7 +13,7 @@ use rustc_data_structures::fx::FxHashMap;
use rustc_hir::def_id::DefId;
use rustc_index::vec::Idx;
use rustc_middle::mir::interpret::{
read_target_uint, AllocId, Allocation, ConstValue, GlobalAlloc, Pointer,
read_target_uint, AllocId, Allocation, ConstValue, GlobalAlloc, Pointer, Provenance,
};
use rustc_middle::mir::visit::Visitor;
use rustc_middle::mir::*;
@ -665,12 +665,12 @@ pub fn write_allocations<'tcx>(
w: &mut dyn Write,
) -> io::Result<()> {
fn alloc_ids_from_alloc(alloc: &Allocation) -> impl DoubleEndedIterator<Item = AllocId> + '_ {
alloc.relocations().values().map(|(_, id)| *id)
alloc.relocations().values().map(|id| *id)
}
fn alloc_ids_from_const(val: ConstValue<'_>) -> impl Iterator<Item = AllocId> + '_ {
match val {
ConstValue::Scalar(interpret::Scalar::Ptr(ptr)) => {
Either::Left(Either::Left(std::iter::once(ptr.alloc_id)))
ConstValue::Scalar(interpret::Scalar::Ptr(ptr, _size)) => {
Either::Left(Either::Left(std::iter::once(ptr.provenance)))
}
ConstValue::Scalar(interpret::Scalar::Int { .. }) => {
Either::Left(Either::Right(std::iter::empty()))
@ -755,7 +755,7 @@ pub fn write_allocations<'tcx>(
/// After the hex dump, an ascii dump follows, replacing all unprintable characters (control
/// characters or characters whose value is larger than 127) with a `.`
/// This also prints relocations adequately.
pub fn display_allocation<Tag: Copy + Debug, Extra>(
pub fn display_allocation<Tag, Extra>(
tcx: TyCtxt<'tcx>,
alloc: &'a Allocation<Tag, Extra>,
) -> RenderAllocation<'a, 'tcx, Tag, Extra> {
@ -768,7 +768,7 @@ pub struct RenderAllocation<'a, 'tcx, Tag, Extra> {
alloc: &'a Allocation<Tag, Extra>,
}
impl<Tag: Copy + Debug, Extra> std::fmt::Display for RenderAllocation<'a, 'tcx, Tag, Extra> {
impl<Tag: Provenance, Extra> std::fmt::Display for RenderAllocation<'a, 'tcx, Tag, Extra> {
fn fmt(&self, w: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let RenderAllocation { tcx, alloc } = *self;
write!(w, "size: {}, align: {})", alloc.size().bytes(), alloc.align.bytes())?;
@ -811,7 +811,7 @@ fn write_allocation_newline(
/// The `prefix` argument allows callers to add an arbitrary prefix before each line (even if there
/// is only one line). Note that your prefix should contain a trailing space as the lines are
/// printed directly after it.
fn write_allocation_bytes<Tag: Copy + Debug, Extra>(
fn write_allocation_bytes<Tag: Provenance, Extra>(
tcx: TyCtxt<'tcx>,
alloc: &Allocation<Tag, Extra>,
w: &mut dyn std::fmt::Write,
@ -847,7 +847,7 @@ fn write_allocation_bytes<Tag: Copy + Debug, Extra>(
if i != line_start {
write!(w, " ")?;
}
if let Some(&(tag, target_id)) = alloc.relocations().get(&i) {
if let Some(&tag) = alloc.relocations().get(&i) {
// Memory with a relocation must be defined
let j = i.bytes_usize();
let offset = alloc
@ -855,7 +855,7 @@ fn write_allocation_bytes<Tag: Copy + Debug, Extra>(
let offset = read_target_uint(tcx.data_layout.endian, offset).unwrap();
let offset = Size::from_bytes(offset);
let relocation_width = |bytes| bytes * 3;
let ptr = Pointer::new_with_tag(target_id, offset, tag);
let ptr = Pointer::new(tag, offset);
let mut target = format!("{:?}", ptr);
if target.len() > relocation_width(ptr_size.bytes_usize() - 1) {
// This is too long, try to save some space.

View File

@ -38,7 +38,7 @@ crate fn lit_to_const<'tcx>(
}
(ast::LitKind::ByteStr(data), ty::Ref(_, inner_ty, _)) if inner_ty.is_array() => {
let id = tcx.allocate_bytes(data);
ConstValue::Scalar(Scalar::Ptr(id.into()))
ConstValue::Scalar(Scalar::from_pointer(id.into(), &tcx))
}
(ast::LitKind::Byte(n), ty::Uint(ty::UintTy::U8)) => {
ConstValue::Scalar(Scalar::from_uint(*n, Size::from_bytes(1)))

View File

@ -928,7 +928,11 @@ impl<'tcx> Cx<'tcx> {
} else {
let ptr = self.tcx.create_static_alloc(id);
ExprKind::StaticRef {
literal: ty::Const::from_scalar(self.tcx, Scalar::Ptr(ptr.into()), ty),
literal: ty::Const::from_scalar(
self.tcx,
Scalar::from_pointer(ptr.into(), &self.tcx),
ty,
),
def_id: id,
}
};

View File

@ -123,7 +123,7 @@ impl IntRange {
// straight to the result, after doing a bit of checking. (We
// could remove this branch and just fall through, which
// is more general but much slower.)
if let Ok(bits) = scalar.to_bits_or_ptr(target_size, &tcx) {
if let Ok(bits) = scalar.to_bits_or_ptr_internal(target_size) {
return Some(bits);
}
}

View File

@ -10,7 +10,7 @@
// CHECK: @STATIC = {{.*}}, align 4
// This checks the constants from inline_enum_const
// CHECK: @alloc8 = {{.*}}, align 2
// CHECK: @alloc9 = {{.*}}, align 2
// This checks the constants from {low,high}_align_const, they share the same
// constant, but the alignment differs, so the higher one should be used

View File

@ -12,7 +12,7 @@ mod aux_mod;
include!("aux_mod.rs");
// Here we check that the expansion of the file!() macro is mapped.
// CHECK: @alloc1 = private unnamed_addr constant <{ [34 x i8] }> <{ [34 x i8] c"/the/src/remap_path_prefix/main.rs" }>, align 1
// CHECK: @alloc2 = private unnamed_addr constant <{ [34 x i8] }> <{ [34 x i8] c"/the/src/remap_path_prefix/main.rs" }>, align 1
pub static FILE_PATH: &'static str = file!();
fn main() {

View File

@ -16,10 +16,10 @@ fn main() -> () {
_1 = const b"foo"; // scope 0 at $DIR/byte_slice.rs:5:13: 5:19
// ty::Const
// + ty: &[u8; 3]
// + val: Value(Scalar(alloc0))
// + val: Value(Scalar(alloc1))
// mir::Constant
// + span: $DIR/byte_slice.rs:5:13: 5:19
// + literal: Const { ty: &[u8; 3], val: Value(Scalar(alloc0)) }
// + literal: Const { ty: &[u8; 3], val: Value(Scalar(alloc1)) }
StorageLive(_2); // scope 1 at $DIR/byte_slice.rs:6:9: 6:10
_2 = [const 5_u8, const 120_u8]; // scope 1 at $DIR/byte_slice.rs:6:13: 6:24
_0 = const (); // scope 0 at $DIR/byte_slice.rs:4:11: 7:2
@ -29,6 +29,6 @@ fn main() -> () {
}
}
alloc0 (size: 3, align: 1) {
alloc1 (size: 3, align: 1) {
66 6f 6f foo
}

View File

@ -8,13 +8,13 @@ fn main() -> () {
bb0: {
StorageLive(_1); // scope 0 at $DIR/const_allocation.rs:8:5: 8:8
StorageLive(_2); // scope 0 at $DIR/const_allocation.rs:8:5: 8:8
_2 = const {alloc0: &&[(Option<i32>, &[&str])]}; // scope 0 at $DIR/const_allocation.rs:8:5: 8:8
_2 = const {alloc1: &&[(Option<i32>, &[&str])]}; // scope 0 at $DIR/const_allocation.rs:8:5: 8:8
// ty::Const
// + ty: &&[(std::option::Option<i32>, &[&str])]
// + val: Value(Scalar(alloc0))
// + val: Value(Scalar(alloc1))
// mir::Constant
// + span: $DIR/const_allocation.rs:8:5: 8:8
// + literal: Const { ty: &&[(std::option::Option<i32>, &[&str])], val: Value(Scalar(alloc0)) }
// + literal: Const { ty: &&[(std::option::Option<i32>, &[&str])], val: Value(Scalar(alloc1)) }
_1 = (*_2); // scope 0 at $DIR/const_allocation.rs:8:5: 8:8
StorageDead(_2); // scope 0 at $DIR/const_allocation.rs:8:8: 8:9
StorageDead(_1); // scope 0 at $DIR/const_allocation.rs:8:8: 8:9
@ -23,43 +23,43 @@ fn main() -> () {
}
}
alloc0 (static: FOO, size: 8, align: 4) {
alloc17 03 00 00 00 ....
alloc1 (static: FOO, size: 8, align: 4) {
alloc18 03 00 00 00 ....
}
alloc17 (size: 48, align: 4) {
0x00 00 00 00 00 __ __ __ __ alloc4 00 00 00 00 ........
0x10 00 00 00 00 __ __ __ __ alloc8 02 00 00 00 ........
0x20 01 00 00 00 2a 00 00 00 alloc13 03 00 00 00 ....*.......
alloc18 (size: 48, align: 4) {
0x00 00 00 00 00 __ __ __ __ alloc5 00 00 00 00 ........
0x10 00 00 00 00 __ __ __ __ alloc9 02 00 00 00 ........
0x20 01 00 00 00 2a 00 00 00 alloc14 03 00 00 00 ....*.......
}
alloc4 (size: 0, align: 4) {}
alloc5 (size: 0, align: 4) {}
alloc8 (size: 16, align: 4) {
alloc7 03 00 00 00 alloc9 03 00 00 00 ........
alloc9 (size: 16, align: 4) {
alloc8 03 00 00 00 alloc10 03 00 00 00 ........
}
alloc7 (size: 3, align: 1) {
alloc8 (size: 3, align: 1) {
66 6f 6f foo
}
alloc9 (size: 3, align: 1) {
alloc10 (size: 3, align: 1) {
62 61 72 bar
}
alloc13 (size: 24, align: 4) {
0x00 alloc12 03 00 00 00 alloc14 03 00 00 00 ........
0x10 alloc15 04 00 00 00 ....
alloc14 (size: 24, align: 4) {
0x00 alloc13 03 00 00 00 alloc15 03 00 00 00 ........
0x10 alloc16 04 00 00 00 ....
}
alloc12 (size: 3, align: 1) {
alloc13 (size: 3, align: 1) {
6d 65 68 meh
}
alloc14 (size: 3, align: 1) {
alloc15 (size: 3, align: 1) {
6d 6f 70 mop
}
alloc15 (size: 4, align: 1) {
alloc16 (size: 4, align: 1) {
6d c3 b6 70 m..p
}

View File

@ -8,13 +8,13 @@ fn main() -> () {
bb0: {
StorageLive(_1); // scope 0 at $DIR/const_allocation.rs:8:5: 8:8
StorageLive(_2); // scope 0 at $DIR/const_allocation.rs:8:5: 8:8
_2 = const {alloc0: &&[(Option<i32>, &[&str])]}; // scope 0 at $DIR/const_allocation.rs:8:5: 8:8
_2 = const {alloc1: &&[(Option<i32>, &[&str])]}; // scope 0 at $DIR/const_allocation.rs:8:5: 8:8
// ty::Const
// + ty: &&[(std::option::Option<i32>, &[&str])]
// + val: Value(Scalar(alloc0))
// + val: Value(Scalar(alloc1))
// mir::Constant
// + span: $DIR/const_allocation.rs:8:5: 8:8
// + literal: Const { ty: &&[(std::option::Option<i32>, &[&str])], val: Value(Scalar(alloc0)) }
// + literal: Const { ty: &&[(std::option::Option<i32>, &[&str])], val: Value(Scalar(alloc1)) }
_1 = (*_2); // scope 0 at $DIR/const_allocation.rs:8:5: 8:8
StorageDead(_2); // scope 0 at $DIR/const_allocation.rs:8:8: 8:9
StorageDead(_1); // scope 0 at $DIR/const_allocation.rs:8:8: 8:9
@ -23,47 +23,47 @@ fn main() -> () {
}
}
alloc0 (static: FOO, size: 16, align: 8) {
alloc17 03 00 00 00 00 00 00 00 ........
alloc1 (static: FOO, size: 16, align: 8) {
alloc18 03 00 00 00 00 00 00 00 ........
}
alloc17 (size: 72, align: 8) {
0x00 00 00 00 00 __ __ __ __ alloc4 ....
alloc18 (size: 72, align: 8) {
0x00 00 00 00 00 __ __ __ __ alloc5 ....
0x10 00 00 00 00 00 00 00 00 00 00 00 00 __ __ __ __ ............
0x20 alloc8 02 00 00 00 00 00 00 00 ........
0x30 01 00 00 00 2a 00 00 00 alloc13 ....*...
0x20 alloc9 02 00 00 00 00 00 00 00 ........
0x30 01 00 00 00 2a 00 00 00 alloc14 ....*...
0x40 03 00 00 00 00 00 00 00 ........
}
alloc4 (size: 0, align: 8) {}
alloc5 (size: 0, align: 8) {}
alloc8 (size: 32, align: 8) {
0x00 alloc7 03 00 00 00 00 00 00 00 ........
0x10 alloc9 03 00 00 00 00 00 00 00 ........
alloc9 (size: 32, align: 8) {
0x00 alloc8 03 00 00 00 00 00 00 00 ........
0x10 alloc10 03 00 00 00 00 00 00 00 ........
}
alloc7 (size: 3, align: 1) {
alloc8 (size: 3, align: 1) {
66 6f 6f foo
}
alloc9 (size: 3, align: 1) {
alloc10 (size: 3, align: 1) {
62 61 72 bar
}
alloc13 (size: 48, align: 8) {
0x00 alloc12 03 00 00 00 00 00 00 00 ........
0x10 alloc14 03 00 00 00 00 00 00 00 ........
0x20 alloc15 04 00 00 00 00 00 00 00 ........
alloc14 (size: 48, align: 8) {
0x00 alloc13 03 00 00 00 00 00 00 00 ........
0x10 alloc15 03 00 00 00 00 00 00 00 ........
0x20 alloc16 04 00 00 00 00 00 00 00 ........
}
alloc12 (size: 3, align: 1) {
alloc13 (size: 3, align: 1) {
6d 65 68 meh
}
alloc14 (size: 3, align: 1) {
alloc15 (size: 3, align: 1) {
6d 6f 70 mop
}
alloc15 (size: 4, align: 1) {
alloc16 (size: 4, align: 1) {
6d c3 b6 70 m..p
}

View File

@ -8,13 +8,13 @@ fn main() -> () {
bb0: {
StorageLive(_1); // scope 0 at $DIR/const_allocation2.rs:5:5: 5:8
StorageLive(_2); // scope 0 at $DIR/const_allocation2.rs:5:5: 5:8
_2 = const {alloc0: &&[(Option<i32>, &[&u8])]}; // scope 0 at $DIR/const_allocation2.rs:5:5: 5:8
_2 = const {alloc1: &&[(Option<i32>, &[&u8])]}; // scope 0 at $DIR/const_allocation2.rs:5:5: 5:8
// ty::Const
// + ty: &&[(std::option::Option<i32>, &[&u8])]
// + val: Value(Scalar(alloc0))
// + val: Value(Scalar(alloc1))
// mir::Constant
// + span: $DIR/const_allocation2.rs:5:5: 5:8
// + literal: Const { ty: &&[(std::option::Option<i32>, &[&u8])], val: Value(Scalar(alloc0)) }
// + literal: Const { ty: &&[(std::option::Option<i32>, &[&u8])], val: Value(Scalar(alloc1)) }
_1 = (*_2); // scope 0 at $DIR/const_allocation2.rs:5:5: 5:8
StorageDead(_2); // scope 0 at $DIR/const_allocation2.rs:5:8: 5:9
StorageDead(_1); // scope 0 at $DIR/const_allocation2.rs:5:8: 5:9
@ -23,42 +23,42 @@ fn main() -> () {
}
}
alloc0 (static: FOO, size: 8, align: 4) {
alloc27 03 00 00 00 ....
alloc1 (static: FOO, size: 8, align: 4) {
alloc28 03 00 00 00 ....
}
alloc27 (size: 48, align: 4) {
0x00 00 00 00 00 __ __ __ __ alloc12 00 00 00 00 ........
0x10 00 00 00 00 __ __ __ __ alloc17 02 00 00 00 ........
0x20 01 00 00 00 2a 00 00 00 alloc25 03 00 00 00 ....*.......
alloc28 (size: 48, align: 4) {
0x00 00 00 00 00 __ __ __ __ alloc13 00 00 00 00 ........
0x10 00 00 00 00 __ __ __ __ alloc18 02 00 00 00 ........
0x20 01 00 00 00 2a 00 00 00 alloc26 03 00 00 00 ....*.......
}
alloc12 (size: 0, align: 4) {}
alloc13 (size: 0, align: 4) {}
alloc17 (size: 8, align: 4) {
alloc15 alloc16
}
alloc15 (size: 1, align: 1) {
05 .
alloc18 (size: 8, align: 4) {
alloc16 alloc17
}
alloc16 (size: 1, align: 1) {
05 .
}
alloc17 (size: 1, align: 1) {
06 .
}
alloc25 (size: 12, align: 4) {
a21+0x3 alloc22 a24+0x2
alloc26 (size: 12, align: 4) {
a22+0x3 alloc23 a25+0x2
}
alloc21 (size: 4, align: 1) {
alloc22 (size: 4, align: 1) {
2a 45 15 6f *E.o
}
alloc22 (size: 1, align: 1) {
alloc23 (size: 1, align: 1) {
2a *
}
alloc24 (size: 4, align: 1) {
alloc25 (size: 4, align: 1) {
2a 45 15 6f *E.o
}

View File

@ -8,13 +8,13 @@ fn main() -> () {
bb0: {
StorageLive(_1); // scope 0 at $DIR/const_allocation2.rs:5:5: 5:8
StorageLive(_2); // scope 0 at $DIR/const_allocation2.rs:5:5: 5:8
_2 = const {alloc0: &&[(Option<i32>, &[&u8])]}; // scope 0 at $DIR/const_allocation2.rs:5:5: 5:8
_2 = const {alloc1: &&[(Option<i32>, &[&u8])]}; // scope 0 at $DIR/const_allocation2.rs:5:5: 5:8
// ty::Const
// + ty: &&[(std::option::Option<i32>, &[&u8])]
// + val: Value(Scalar(alloc0))
// + val: Value(Scalar(alloc1))
// mir::Constant
// + span: $DIR/const_allocation2.rs:5:5: 5:8
// + literal: Const { ty: &&[(std::option::Option<i32>, &[&u8])], val: Value(Scalar(alloc0)) }
// + literal: Const { ty: &&[(std::option::Option<i32>, &[&u8])], val: Value(Scalar(alloc1)) }
_1 = (*_2); // scope 0 at $DIR/const_allocation2.rs:5:5: 5:8
StorageDead(_2); // scope 0 at $DIR/const_allocation2.rs:5:8: 5:9
StorageDead(_1); // scope 0 at $DIR/const_allocation2.rs:5:8: 5:9
@ -23,45 +23,45 @@ fn main() -> () {
}
}
alloc0 (static: FOO, size: 16, align: 8) {
alloc27 03 00 00 00 00 00 00 00 ........
alloc1 (static: FOO, size: 16, align: 8) {
alloc28 03 00 00 00 00 00 00 00 ........
}
alloc27 (size: 72, align: 8) {
0x00 00 00 00 00 __ __ __ __ alloc12 ....
alloc28 (size: 72, align: 8) {
0x00 00 00 00 00 __ __ __ __ alloc13 ....
0x10 00 00 00 00 00 00 00 00 00 00 00 00 __ __ __ __ ............
0x20 alloc17 02 00 00 00 00 00 00 00 ........
0x30 01 00 00 00 2a 00 00 00 alloc25 ....*...
0x20 alloc18 02 00 00 00 00 00 00 00 ........
0x30 01 00 00 00 2a 00 00 00 alloc26 ....*...
0x40 03 00 00 00 00 00 00 00 ........
}
alloc12 (size: 0, align: 8) {}
alloc13 (size: 0, align: 8) {}
alloc17 (size: 16, align: 8) {
alloc15 alloc16
}
alloc15 (size: 1, align: 1) {
05 .
alloc18 (size: 16, align: 8) {
alloc16 alloc17
}
alloc16 (size: 1, align: 1) {
05 .
}
alloc17 (size: 1, align: 1) {
06 .
}
alloc25 (size: 24, align: 8) {
0x00 alloc21+0x3 alloc22
0x10 alloc24+0x2
alloc26 (size: 24, align: 8) {
0x00 alloc22+0x3 alloc23
0x10 alloc25+0x2
}
alloc21 (size: 4, align: 1) {
alloc22 (size: 4, align: 1) {
2a 45 15 6f *E.o
}
alloc22 (size: 1, align: 1) {
alloc23 (size: 1, align: 1) {
2a *
}
alloc24 (size: 4, align: 1) {
alloc25 (size: 4, align: 1) {
2a 45 15 6f *E.o
}

View File

@ -8,13 +8,13 @@ fn main() -> () {
bb0: {
StorageLive(_1); // scope 0 at $DIR/const_allocation3.rs:5:5: 5:8
StorageLive(_2); // scope 0 at $DIR/const_allocation3.rs:5:5: 5:8
_2 = const {alloc0: &&Packed}; // scope 0 at $DIR/const_allocation3.rs:5:5: 5:8
_2 = const {alloc1: &&Packed}; // scope 0 at $DIR/const_allocation3.rs:5:5: 5:8
// ty::Const
// + ty: &&Packed
// + val: Value(Scalar(alloc0))
// + val: Value(Scalar(alloc1))
// mir::Constant
// + span: $DIR/const_allocation3.rs:5:5: 5:8
// + literal: Const { ty: &&Packed, val: Value(Scalar(alloc0)) }
// + literal: Const { ty: &&Packed, val: Value(Scalar(alloc1)) }
_1 = (*_2); // scope 0 at $DIR/const_allocation3.rs:5:5: 5:8
StorageDead(_2); // scope 0 at $DIR/const_allocation3.rs:5:8: 5:9
StorageDead(_1); // scope 0 at $DIR/const_allocation3.rs:5:8: 5:9
@ -23,31 +23,31 @@ fn main() -> () {
}
}
alloc0 (static: FOO, size: 4, align: 4) {
alloc10
alloc1 (static: FOO, size: 4, align: 4) {
alloc11
}
alloc10 (size: 168, align: 1) {
alloc11 (size: 168, align: 1) {
0x00 ab ab ab ab ab ab ab ab ab ab ab ab ab ab ab ab ................
0x10 ab ab ab ab ab ab ab ab ab ab ab ab alloc5 ............
0x10 ab ab ab ab ab ab ab ab ab ab ab ab alloc6 ............
0x20 01 ef cd ab 00 00 00 00 00 00 00 00 00 00 00 00 ................
0x30 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 ................
0x40 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 ................
0x50 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 ................
0x60 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 ................
0x70 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 ................
0x80 00 00 00 00 00 00 00 00 00 00 alloc7 00 00 ............
0x90 a8+0x63 00 00 00 00 00 00 00 00 00 00 00 00 ............
0x80 00 00 00 00 00 00 00 00 00 00 alloc8 00 00 ............
0x90 a9+0x63 00 00 00 00 00 00 00 00 00 00 00 00 ............
0xa0 00 00 00 00 00 00 00 00 ........
}
alloc5 (size: 4, align: 4) {
alloc6 (size: 4, align: 4) {
2a 00 00 00 *...
}
alloc7 (fn: main)
alloc8 (fn: main)
alloc8 (size: 100, align: 1) {
alloc9 (size: 100, align: 1) {
0x00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 ................
0x10 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 ................
0x20 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 ................

View File

@ -8,13 +8,13 @@ fn main() -> () {
bb0: {
StorageLive(_1); // scope 0 at $DIR/const_allocation3.rs:5:5: 5:8
StorageLive(_2); // scope 0 at $DIR/const_allocation3.rs:5:5: 5:8
_2 = const {alloc0: &&Packed}; // scope 0 at $DIR/const_allocation3.rs:5:5: 5:8
_2 = const {alloc1: &&Packed}; // scope 0 at $DIR/const_allocation3.rs:5:5: 5:8
// ty::Const
// + ty: &&Packed
// + val: Value(Scalar(alloc0))
// + val: Value(Scalar(alloc1))
// mir::Constant
// + span: $DIR/const_allocation3.rs:5:5: 5:8
// + literal: Const { ty: &&Packed, val: Value(Scalar(alloc0)) }
// + literal: Const { ty: &&Packed, val: Value(Scalar(alloc1)) }
_1 = (*_2); // scope 0 at $DIR/const_allocation3.rs:5:5: 5:8
StorageDead(_2); // scope 0 at $DIR/const_allocation3.rs:5:8: 5:9
StorageDead(_1); // scope 0 at $DIR/const_allocation3.rs:5:8: 5:9
@ -23,13 +23,13 @@ fn main() -> () {
}
}
alloc0 (static: FOO, size: 8, align: 8) {
alloc10
alloc1 (static: FOO, size: 8, align: 8) {
alloc11
}
alloc10 (size: 180, align: 1) {
alloc11 (size: 180, align: 1) {
0x00 ab ab ab ab ab ab ab ab ab ab ab ab ab ab ab ab ................
0x10 ab ab ab ab ab ab ab ab ab ab ab ab alloc5 ............
0x10 ab ab ab ab ab ab ab ab ab ab ab ab alloc6 ............
0x20 01 ef cd ab 00 00 00 00 00 00 00 00 ............
0x30 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 ................
0x40 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 ................
@ -37,18 +37,18 @@ alloc10 (size: 180, align: 1) {
0x60 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 ................
0x70 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 ................
0x80 00 00 00 00 00 00 00 00 00 00 00 00 00 00 ..............
0x90 alloc7 00 00 alloc8+0x63 ..
0x90 alloc8 00 00 alloc9+0x63 ..
0xa0 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 ................
0xb0 00 00 00 00 ....
}
alloc5 (size: 4, align: 4) {
alloc6 (size: 4, align: 4) {
2a 00 00 00 *...
}
alloc7 (fn: main)
alloc8 (fn: main)
alloc8 (size: 100, align: 1) {
alloc9 (size: 100, align: 1) {
0x00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 ................
0x10 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 ................
0x20 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 ................

View File

@ -7,13 +7,13 @@ promoted[0] in BAR: &[&i32; 1] = {
let mut _3: &i32; // in scope 0 at $DIR/const-promotion-extern-static.rs:9:33: 9:34
bb0: {
_3 = const {alloc0: &i32}; // scope 0 at $DIR/const-promotion-extern-static.rs:9:33: 9:34
_3 = const {alloc1: &i32}; // scope 0 at $DIR/const-promotion-extern-static.rs:9:33: 9:34
// ty::Const
// + ty: &i32
// + val: Value(Scalar(alloc0))
// + val: Value(Scalar(alloc1))
// mir::Constant
// + span: $DIR/const-promotion-extern-static.rs:9:33: 9:34
// + literal: Const { ty: &i32, val: Value(Scalar(alloc0)) }
// + literal: Const { ty: &i32, val: Value(Scalar(alloc1)) }
_2 = &(*_3); // scope 0 at $DIR/const-promotion-extern-static.rs:9:32: 9:34
_1 = [move _2]; // scope 0 at $DIR/const-promotion-extern-static.rs:9:31: 9:35
_0 = &_1; // scope 0 at $DIR/const-promotion-extern-static.rs:9:31: 9:35
@ -21,6 +21,6 @@ promoted[0] in BAR: &[&i32; 1] = {
}
}
alloc0 (static: Y, size: 4, align: 4) {
alloc1 (static: Y, size: 4, align: 4) {
2a 00 00 00 *...
}

View File

@ -16,16 +16,16 @@
- StorageLive(_3); // scope 0 at $DIR/const-promotion-extern-static.rs:9:31: 9:35
- StorageLive(_4); // scope 0 at $DIR/const-promotion-extern-static.rs:9:32: 9:34
- StorageLive(_5); // scope 0 at $DIR/const-promotion-extern-static.rs:9:33: 9:34
- _5 = const {alloc0: &i32}; // scope 0 at $DIR/const-promotion-extern-static.rs:9:33: 9:34
- _5 = const {alloc1: &i32}; // scope 0 at $DIR/const-promotion-extern-static.rs:9:33: 9:34
+ _6 = const BAR::promoted[0]; // scope 0 at $DIR/const-promotion-extern-static.rs:9:31: 9:35
// ty::Const
- // + ty: &i32
- // + val: Value(Scalar(alloc0))
- // + val: Value(Scalar(alloc1))
+ // + ty: &[&i32; 1]
+ // + val: Unevaluated(BAR, [], Some(promoted[0]))
// mir::Constant
- // + span: $DIR/const-promotion-extern-static.rs:9:33: 9:34
- // + literal: Const { ty: &i32, val: Value(Scalar(alloc0)) }
- // + literal: Const { ty: &i32, val: Value(Scalar(alloc1)) }
- _4 = &(*_5); // scope 0 at $DIR/const-promotion-extern-static.rs:9:32: 9:34
- _3 = [move _4]; // scope 0 at $DIR/const-promotion-extern-static.rs:9:31: 9:35
- _2 = &_3; // scope 0 at $DIR/const-promotion-extern-static.rs:9:31: 9:35
@ -53,7 +53,7 @@
}
- }
-
- alloc0 (static: Y, size: 4, align: 4) {
- alloc1 (static: Y, size: 4, align: 4) {
- 2a 00 00 00 │ *...
}

View File

@ -7,13 +7,13 @@ promoted[0] in FOO: &[&i32; 1] = {
let mut _3: *const i32; // in scope 0 at $DIR/const-promotion-extern-static.rs:13:42: 13:43
bb0: {
_3 = const {alloc2: *const i32}; // scope 0 at $DIR/const-promotion-extern-static.rs:13:42: 13:43
_3 = const {alloc3: *const i32}; // scope 0 at $DIR/const-promotion-extern-static.rs:13:42: 13:43
// ty::Const
// + ty: *const i32
// + val: Value(Scalar(alloc2))
// + val: Value(Scalar(alloc3))
// mir::Constant
// + span: $DIR/const-promotion-extern-static.rs:13:42: 13:43
// + literal: Const { ty: *const i32, val: Value(Scalar(alloc2)) }
// + literal: Const { ty: *const i32, val: Value(Scalar(alloc3)) }
_2 = &(*_3); // scope 0 at $DIR/const-promotion-extern-static.rs:13:41: 13:43
_1 = [move _2]; // scope 0 at $DIR/const-promotion-extern-static.rs:13:31: 13:46
_0 = &_1; // scope 0 at $DIR/const-promotion-extern-static.rs:13:31: 13:46
@ -21,4 +21,4 @@ promoted[0] in FOO: &[&i32; 1] = {
}
}
alloc2 (extern static: X)
alloc3 (extern static: X)

View File

@ -18,16 +18,16 @@
- StorageLive(_3); // scope 0 at $DIR/const-promotion-extern-static.rs:13:31: 13:46
- StorageLive(_4); // scope 0 at $DIR/const-promotion-extern-static.rs:13:32: 13:45
- StorageLive(_5); // scope 1 at $DIR/const-promotion-extern-static.rs:13:42: 13:43
- _5 = const {alloc2: *const i32}; // scope 1 at $DIR/const-promotion-extern-static.rs:13:42: 13:43
- _5 = const {alloc3: *const i32}; // scope 1 at $DIR/const-promotion-extern-static.rs:13:42: 13:43
+ _6 = const FOO::promoted[0]; // scope 0 at $DIR/const-promotion-extern-static.rs:13:31: 13:46
// ty::Const
- // + ty: *const i32
- // + val: Value(Scalar(alloc2))
- // + val: Value(Scalar(alloc3))
+ // + ty: &[&i32; 1]
+ // + val: Unevaluated(FOO, [], Some(promoted[0]))
// mir::Constant
- // + span: $DIR/const-promotion-extern-static.rs:13:42: 13:43
- // + literal: Const { ty: *const i32, val: Value(Scalar(alloc2)) }
- // + literal: Const { ty: *const i32, val: Value(Scalar(alloc3)) }
- _4 = &(*_5); // scope 1 at $DIR/const-promotion-extern-static.rs:13:41: 13:43
- _3 = [move _4]; // scope 0 at $DIR/const-promotion-extern-static.rs:13:31: 13:46
- _2 = &_3; // scope 0 at $DIR/const-promotion-extern-static.rs:13:31: 13:46
@ -55,5 +55,5 @@
}
}
-
- alloc2 (extern static: X)
- alloc3 (extern static: X)

View File

@ -23,13 +23,13 @@
StorageLive(_2); // scope 1 at $DIR/mutable_variable_no_prop.rs:8:5: 10:6
StorageLive(_3); // scope 2 at $DIR/mutable_variable_no_prop.rs:9:13: 9:19
StorageLive(_4); // scope 2 at $DIR/mutable_variable_no_prop.rs:9:13: 9:19
_4 = const {alloc0: *mut u32}; // scope 2 at $DIR/mutable_variable_no_prop.rs:9:13: 9:19
_4 = const {alloc1: *mut u32}; // scope 2 at $DIR/mutable_variable_no_prop.rs:9:13: 9:19
// ty::Const
// + ty: *mut u32
// + val: Value(Scalar(alloc0))
// + val: Value(Scalar(alloc1))
// mir::Constant
// + span: $DIR/mutable_variable_no_prop.rs:9:13: 9:19
// + literal: Const { ty: *mut u32, val: Value(Scalar(alloc0)) }
// + literal: Const { ty: *mut u32, val: Value(Scalar(alloc1)) }
_3 = (*_4); // scope 2 at $DIR/mutable_variable_no_prop.rs:9:13: 9:19
_1 = move _3; // scope 2 at $DIR/mutable_variable_no_prop.rs:9:9: 9:19
StorageDead(_3); // scope 2 at $DIR/mutable_variable_no_prop.rs:9:18: 9:19
@ -45,7 +45,7 @@
}
}
alloc0 (static: STATIC, size: 4, align: 4) {
alloc1 (static: STATIC, size: 4, align: 4) {
2a 00 00 00 │ *...
}

View File

@ -16,24 +16,24 @@
StorageLive(_1); // scope 0 at $DIR/read_immutable_static.rs:7:9: 7:10
StorageLive(_2); // scope 0 at $DIR/read_immutable_static.rs:7:13: 7:16
StorageLive(_3); // scope 0 at $DIR/read_immutable_static.rs:7:13: 7:16
_3 = const {alloc0: &u8}; // scope 0 at $DIR/read_immutable_static.rs:7:13: 7:16
_3 = const {alloc1: &u8}; // scope 0 at $DIR/read_immutable_static.rs:7:13: 7:16
// ty::Const
// + ty: &u8
// + val: Value(Scalar(alloc0))
// + val: Value(Scalar(alloc1))
// mir::Constant
// + span: $DIR/read_immutable_static.rs:7:13: 7:16
// + literal: Const { ty: &u8, val: Value(Scalar(alloc0)) }
// + literal: Const { ty: &u8, val: Value(Scalar(alloc1)) }
- _2 = (*_3); // scope 0 at $DIR/read_immutable_static.rs:7:13: 7:16
+ _2 = const 2_u8; // scope 0 at $DIR/read_immutable_static.rs:7:13: 7:16
StorageLive(_4); // scope 0 at $DIR/read_immutable_static.rs:7:19: 7:22
StorageLive(_5); // scope 0 at $DIR/read_immutable_static.rs:7:19: 7:22
_5 = const {alloc0: &u8}; // scope 0 at $DIR/read_immutable_static.rs:7:19: 7:22
_5 = const {alloc1: &u8}; // scope 0 at $DIR/read_immutable_static.rs:7:19: 7:22
// ty::Const
// + ty: &u8
// + val: Value(Scalar(alloc0))
// + val: Value(Scalar(alloc1))
// mir::Constant
// + span: $DIR/read_immutable_static.rs:7:19: 7:22
// + literal: Const { ty: &u8, val: Value(Scalar(alloc0)) }
// + literal: Const { ty: &u8, val: Value(Scalar(alloc1)) }
- _4 = (*_5); // scope 0 at $DIR/read_immutable_static.rs:7:19: 7:22
- _1 = Add(move _2, move _4); // scope 0 at $DIR/read_immutable_static.rs:7:13: 7:22
+ _4 = const 2_u8; // scope 0 at $DIR/read_immutable_static.rs:7:19: 7:22
@ -48,7 +48,7 @@
}
}
alloc0 (static: FOO, size: 1, align: 1) {
alloc1 (static: FOO, size: 1, align: 1) {
02 │ .
}

View File

@ -4,7 +4,7 @@ error[E0080]: evaluation of constant value failed
LL | unsafe { copy_nonoverlapping(src, dst, count) }
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
| |
| memory access failed: pointer must be in-bounds at offset 8, but is outside bounds of alloc6 which has size 4
| memory access failed: alloc7 has size 4, so pointer to 4 bytes starting at offset 4 is out-of-bounds
| inside `copy_nonoverlapping::<u32>` at $SRC_DIR/core/src/intrinsics.rs:LL:COL
|
::: $SRC_DIR/core/src/ptr/mod.rs:LL:COL
@ -23,7 +23,7 @@ error[E0080]: evaluation of constant value failed
LL | unsafe { copy_nonoverlapping(src, dst, count) }
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
| |
| memory access failed: pointer must be in-bounds at offset 8, but is outside bounds of alloc6 which has size 4
| memory access failed: alloc7 has size 4, so pointer to 4 bytes starting at offset 4 is out-of-bounds
| inside `copy_nonoverlapping::<u32>` at $SRC_DIR/core/src/intrinsics.rs:LL:COL
|
::: $SRC_DIR/core/src/ptr/mod.rs:LL:COL
@ -47,7 +47,7 @@ error[E0080]: evaluation of constant value failed
LL | unsafe { copy_nonoverlapping(src, dst, count) }
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
| |
| memory access failed: pointer must be in-bounds at offset 8, but is outside bounds of alloc6 which has size 4
| memory access failed: alloc7 has size 4, so pointer to 4 bytes starting at offset 4 is out-of-bounds
| inside `copy_nonoverlapping::<u32>` at $SRC_DIR/core/src/intrinsics.rs:LL:COL
|
::: $SRC_DIR/core/src/ptr/mod.rs:LL:COL

View File

@ -2,11 +2,11 @@ error[E0080]: it is undefined behavior to use this value
--> $DIR/const-pointer-values-in-various-types.rs:26:5
|
LL | const I32_REF_USIZE_UNION: usize = unsafe { Nonsense { int_32_ref: &3 }.u };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed: encountered pointer to alloc2, but expected initialized plain (non-pointer) bytes
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed: encountered pointer to alloc3, but expected initialized plain (non-pointer) bytes
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 8, align: 8) {
╾───────alloc2────────╼ │ ╾──────╼
╾───────alloc3────────╼ │ ╾──────╼
}
error: any use of this value will cause an error
@ -47,11 +47,11 @@ error[E0080]: it is undefined behavior to use this value
--> $DIR/const-pointer-values-in-various-types.rs:41:5
|
LL | const I32_REF_U64_UNION: u64 = unsafe { Nonsense { int_32_ref: &3 }.uint_64 };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed: encountered pointer to alloc18, but expected initialized plain (non-pointer) bytes
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed: encountered pointer to alloc19, but expected initialized plain (non-pointer) bytes
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 8, align: 8) {
╾───────alloc18───────╼ │ ╾──────╼
╾───────alloc19───────╼ │ ╾──────╼
}
error[E0080]: it is undefined behavior to use this value
@ -102,11 +102,11 @@ error[E0080]: it is undefined behavior to use this value
--> $DIR/const-pointer-values-in-various-types.rs:59:5
|
LL | const I32_REF_I64_UNION: i64 = unsafe { Nonsense { int_32_ref: &3 }.int_64 };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed: encountered pointer to alloc38, but expected initialized plain (non-pointer) bytes
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed: encountered pointer to alloc39, but expected initialized plain (non-pointer) bytes
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 8, align: 8) {
╾───────alloc38───────╼ │ ╾──────╼
╾───────alloc39───────╼ │ ╾──────╼
}
error[E0080]: it is undefined behavior to use this value
@ -135,11 +135,11 @@ error[E0080]: it is undefined behavior to use this value
--> $DIR/const-pointer-values-in-various-types.rs:69:5
|
LL | const I32_REF_F64_UNION: f64 = unsafe { Nonsense { int_32_ref: &3 }.float_64 };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed: encountered pointer to alloc50, but expected initialized plain (non-pointer) bytes
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed: encountered pointer to alloc51, but expected initialized plain (non-pointer) bytes
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 8, align: 8) {
╾───────alloc50───────╼ │ ╾──────╼
╾───────alloc51───────╼ │ ╾──────╼
}
error: any use of this value will cause an error
@ -201,11 +201,11 @@ error[E0080]: it is undefined behavior to use this value
--> $DIR/const-pointer-values-in-various-types.rs:92:5
|
LL | const STR_U64_UNION: u64 = unsafe { Nonsense { stringy: "3" }.uint_64 };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed: encountered pointer to alloc71, but expected initialized plain (non-pointer) bytes
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed: encountered pointer to alloc72, but expected initialized plain (non-pointer) bytes
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 8, align: 8) {
╾───────alloc71───────╼ │ ╾──────╼
╾───────alloc72───────╼ │ ╾──────╼
}
error: any use of this value will cause an error
@ -256,11 +256,11 @@ error[E0080]: it is undefined behavior to use this value
--> $DIR/const-pointer-values-in-various-types.rs:111:5
|
LL | const STR_I64_UNION: i64 = unsafe { Nonsense { stringy: "3" }.int_64 };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed: encountered pointer to alloc86, but expected initialized plain (non-pointer) bytes
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed: encountered pointer to alloc87, but expected initialized plain (non-pointer) bytes
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 8, align: 8) {
╾───────alloc86───────╼ │ ╾──────╼
╾───────alloc87───────╼ │ ╾──────╼
}
error: any use of this value will cause an error
@ -289,11 +289,11 @@ error[E0080]: it is undefined behavior to use this value
--> $DIR/const-pointer-values-in-various-types.rs:122:5
|
LL | const STR_F64_UNION: f64 = unsafe { Nonsense { stringy: "3" }.float_64 };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed: encountered pointer to alloc95, but expected initialized plain (non-pointer) bytes
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed: encountered pointer to alloc96, but expected initialized plain (non-pointer) bytes
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 8, align: 8) {
╾───────alloc95───────╼ │ ╾──────╼
╾───────alloc96───────╼ │ ╾──────╼
}
error: any use of this value will cause an error

View File

@ -6,7 +6,7 @@ fn main() {}
const Z: i32 = unsafe { *(&1 as *const i32) };
// bad, will thus error in miri
const Z2: i32 = unsafe { *(42 as *const i32) }; //~ ERROR any use of this value will cause
//~| WARN this was previously accepted by the compiler but is being phased out
const Z3: i32 = unsafe { *(44 as *const i32) }; //~ ERROR any use of this value will cause
//~| WARN this was previously accepted by the compiler but is being phased out
const Z2: i32 = unsafe { *(42 as *const i32) }; //~ ERROR evaluation of constant value failed
//~| is not a valid pointer
const Z3: i32 = unsafe { *(44 as *const i32) }; //~ ERROR evaluation of constant value failed
//~| is not a valid pointer

View File

@ -1,25 +1,15 @@
error: any use of this value will cause an error
error[E0080]: evaluation of constant value failed
--> $DIR/const_raw_ptr_ops2.rs:9:26
|
LL | const Z2: i32 = unsafe { *(42 as *const i32) };
| -------------------------^^^^^^^^^^^^^^^^^^^---
| |
| unable to turn bytes into a pointer
|
= note: `#[deny(const_err)]` on by default
= warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
= note: for more information, see issue #71800 <https://github.com/rust-lang/rust/issues/71800>
| ^^^^^^^^^^^^^^^^^^^ 0x2a is not a valid pointer
error: any use of this value will cause an error
error[E0080]: evaluation of constant value failed
--> $DIR/const_raw_ptr_ops2.rs:11:26
|
LL | const Z3: i32 = unsafe { *(44 as *const i32) };
| -------------------------^^^^^^^^^^^^^^^^^^^---
| |
| unable to turn bytes into a pointer
|
= warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
= note: for more information, see issue #71800 <https://github.com/rust-lang/rust/issues/71800>
| ^^^^^^^^^^^^^^^^^^^ 0x2c is not a valid pointer
error: aborting due to 2 previous errors
For more information about this error, try `rustc --explain E0080`.

View File

@ -6,7 +6,7 @@ LL | const BAR: &i32 = unsafe { &*(intrinsics::const_allocate(4, 4) as *mut i32)
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 4, align: 4) {
╾─alloc1──╼ │ ╾──╼
╾─alloc2──╼ │ ╾──╼
}
error: aborting due to previous error

View File

@ -6,7 +6,7 @@ LL | const BAR: &i32 = unsafe { &*(intrinsics::const_allocate(4, 4) as *mut i32)
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 8, align: 8) {
╾───────alloc1────────╼ │ ╾──────╼
╾───────alloc2────────╼ │ ╾──────╼
}
error: aborting due to previous error

View File

@ -2,7 +2,7 @@ error[E0080]: evaluation of constant value failed
--> $DIR/issue-49296.rs:19:16
|
LL | const X: u64 = *wat(42);
| ^^^^^^^^ pointer to alloc1 was dereferenced after this allocation got freed
| ^^^^^^^^ pointer to alloc2 was dereferenced after this allocation got freed
error: aborting due to previous error

View File

@ -2,11 +2,11 @@ error[E0080]: it is undefined behavior to use this value
--> $DIR/ref_to_int_match.rs:26:1
|
LL | const BAR: Int = unsafe { Foo { r: &42 }.f };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed: encountered pointer to alloc2, but expected initialized plain (non-pointer) bytes
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed: encountered pointer to alloc3, but expected initialized plain (non-pointer) bytes
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 4, align: 4) {
╾─alloc2──╼ │ ╾──╼
╾─alloc3──╼ │ ╾──╼
}
error: could not evaluate constant pattern

View File

@ -2,11 +2,11 @@ error[E0080]: it is undefined behavior to use this value
--> $DIR/ref_to_int_match.rs:26:1
|
LL | const BAR: Int = unsafe { Foo { r: &42 }.f };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed: encountered pointer to alloc2, but expected initialized plain (non-pointer) bytes
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed: encountered pointer to alloc3, but expected initialized plain (non-pointer) bytes
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 8, align: 8) {
╾───────alloc2────────╼ │ ╾──────╼
╾───────alloc3────────╼ │ ╾──────╼
}
error: could not evaluate constant pattern

View File

@ -13,22 +13,22 @@ error[E0080]: it is undefined behavior to use this value
--> $DIR/ub-enum.rs:27:1
|
LL | const BAD_ENUM_PTR: Enum = unsafe { mem::transmute(&1) };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed at .<enum-tag>: encountered pointer to alloc8, but expected initialized plain (non-pointer) bytes
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed at .<enum-tag>: encountered pointer to alloc9, but expected initialized plain (non-pointer) bytes
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 4, align: 4) {
╾─alloc8──╼ │ ╾──╼
╾─alloc9──╼ │ ╾──╼
}
error[E0080]: it is undefined behavior to use this value
--> $DIR/ub-enum.rs:30:1
|
LL | const BAD_ENUM_WRAPPED: Wrap<Enum> = unsafe { mem::transmute(&1) };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed at .0.<enum-tag>: encountered pointer to alloc12, but expected initialized plain (non-pointer) bytes
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed at .0.<enum-tag>: encountered pointer to alloc13, but expected initialized plain (non-pointer) bytes
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 4, align: 4) {
╾─alloc12─╼ │ ╾──╼
╾─alloc13─╼ │ ╾──╼
}
error[E0080]: it is undefined behavior to use this value
@ -46,22 +46,22 @@ error[E0080]: it is undefined behavior to use this value
--> $DIR/ub-enum.rs:44:1
|
LL | const BAD_ENUM2_PTR: Enum2 = unsafe { mem::transmute(&0) };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed at .<enum-tag>: encountered pointer to alloc18, but expected initialized plain (non-pointer) bytes
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed at .<enum-tag>: encountered pointer to alloc19, but expected initialized plain (non-pointer) bytes
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 4, align: 4) {
╾─alloc18─╼ │ ╾──╼
╾─alloc19─╼ │ ╾──╼
}
error[E0080]: it is undefined behavior to use this value
--> $DIR/ub-enum.rs:47:1
|
LL | const BAD_ENUM2_WRAPPED: Wrap<Enum2> = unsafe { mem::transmute(&0) };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed at .0.<enum-tag>: encountered pointer to alloc22, but expected initialized plain (non-pointer) bytes
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed at .0.<enum-tag>: encountered pointer to alloc23, but expected initialized plain (non-pointer) bytes
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 4, align: 4) {
╾─alloc22─╼ │ ╾──╼
╾─alloc23─╼ │ ╾──╼
}
error[E0080]: it is undefined behavior to use this value
@ -79,11 +79,11 @@ error[E0080]: it is undefined behavior to use this value
--> $DIR/ub-enum.rs:60:1
|
LL | const BAD_ENUM2_OPTION_PTR: Option<Enum2> = unsafe { mem::transmute(&0) };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed at .<enum-tag>: encountered pointer to alloc28, but expected initialized plain (non-pointer) bytes
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed at .<enum-tag>: encountered pointer to alloc29, but expected initialized plain (non-pointer) bytes
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 4, align: 4) {
╾─alloc28─╼ │ ╾──╼
╾─alloc29─╼ │ ╾──╼
}
error[E0080]: it is undefined behavior to use this value

View File

@ -13,22 +13,22 @@ error[E0080]: it is undefined behavior to use this value
--> $DIR/ub-enum.rs:27:1
|
LL | const BAD_ENUM_PTR: Enum = unsafe { mem::transmute(&1) };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed at .<enum-tag>: encountered pointer to alloc8, but expected initialized plain (non-pointer) bytes
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed at .<enum-tag>: encountered pointer to alloc9, but expected initialized plain (non-pointer) bytes
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 8, align: 8) {
╾───────alloc8────────╼ │ ╾──────╼
╾───────alloc9────────╼ │ ╾──────╼
}
error[E0080]: it is undefined behavior to use this value
--> $DIR/ub-enum.rs:30:1
|
LL | const BAD_ENUM_WRAPPED: Wrap<Enum> = unsafe { mem::transmute(&1) };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed at .0.<enum-tag>: encountered pointer to alloc12, but expected initialized plain (non-pointer) bytes
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed at .0.<enum-tag>: encountered pointer to alloc13, but expected initialized plain (non-pointer) bytes
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 8, align: 8) {
╾───────alloc12───────╼ │ ╾──────╼
╾───────alloc13───────╼ │ ╾──────╼
}
error[E0080]: it is undefined behavior to use this value
@ -46,22 +46,22 @@ error[E0080]: it is undefined behavior to use this value
--> $DIR/ub-enum.rs:44:1
|
LL | const BAD_ENUM2_PTR: Enum2 = unsafe { mem::transmute(&0) };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed at .<enum-tag>: encountered pointer to alloc18, but expected initialized plain (non-pointer) bytes
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed at .<enum-tag>: encountered pointer to alloc19, but expected initialized plain (non-pointer) bytes
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 8, align: 8) {
╾───────alloc18───────╼ │ ╾──────╼
╾───────alloc19───────╼ │ ╾──────╼
}
error[E0080]: it is undefined behavior to use this value
--> $DIR/ub-enum.rs:47:1
|
LL | const BAD_ENUM2_WRAPPED: Wrap<Enum2> = unsafe { mem::transmute(&0) };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed at .0.<enum-tag>: encountered pointer to alloc22, but expected initialized plain (non-pointer) bytes
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed at .0.<enum-tag>: encountered pointer to alloc23, but expected initialized plain (non-pointer) bytes
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 8, align: 8) {
╾───────alloc22───────╼ │ ╾──────╼
╾───────alloc23───────╼ │ ╾──────╼
}
error[E0080]: it is undefined behavior to use this value
@ -79,11 +79,11 @@ error[E0080]: it is undefined behavior to use this value
--> $DIR/ub-enum.rs:60:1
|
LL | const BAD_ENUM2_OPTION_PTR: Option<Enum2> = unsafe { mem::transmute(&0) };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed at .<enum-tag>: encountered pointer to alloc28, but expected initialized plain (non-pointer) bytes
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed at .<enum-tag>: encountered pointer to alloc29, but expected initialized plain (non-pointer) bytes
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 8, align: 8) {
╾───────alloc28───────╼ │ ╾──────╼
╾───────alloc29───────╼ │ ╾──────╼
}
error[E0080]: it is undefined behavior to use this value

View File

@ -13,7 +13,7 @@ error[E0080]: evaluation of constant value failed
--> $DIR/ub-nonnull.rs:19:30
|
LL | let out_of_bounds_ptr = &ptr[255];
| ^^^^^^^^ memory access failed: pointer must be in-bounds at offset 256, but is outside bounds of alloc10 which has size 1
| ^^^^^^^^ dereferencing pointer failed: alloc11 has size 1, so pointer to 256 bytes starting at offset 0 is out-of-bounds
error[E0080]: it is undefined behavior to use this value
--> $DIR/ub-nonnull.rs:23:1

View File

@ -13,7 +13,7 @@ error[E0080]: evaluation of constant value failed
--> $DIR/ub-nonnull.rs:19:30
|
LL | let out_of_bounds_ptr = &ptr[255];
| ^^^^^^^^ memory access failed: pointer must be in-bounds at offset 256, but is outside bounds of alloc10 which has size 1
| ^^^^^^^^ dereferencing pointer failed: alloc11 has size 1, so pointer to 256 bytes starting at offset 0 is out-of-bounds
error[E0080]: it is undefined behavior to use this value
--> $DIR/ub-nonnull.rs:23:1

View File

@ -6,7 +6,7 @@ LL | const UNALIGNED: &u16 = unsafe { mem::transmute(&[0u8; 4]) };
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 4, align: 4) {
╾─alloc2──╼ │ ╾──╼
╾─alloc3──╼ │ ╾──╼
}
error[E0080]: it is undefined behavior to use this value
@ -17,7 +17,7 @@ LL | const UNALIGNED_BOX: Box<u16> = unsafe { mem::transmute(&[0u8; 4]) };
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 4, align: 4) {
╾─alloc6──╼ │ ╾──╼
╾─alloc7──╼ │ ╾──╼
}
error[E0080]: it is undefined behavior to use this value
@ -46,11 +46,11 @@ error[E0080]: it is undefined behavior to use this value
--> $DIR/ub-ref-ptr.rs:30:1
|
LL | const REF_AS_USIZE: usize = unsafe { mem::transmute(&0) };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed: encountered pointer to alloc14, but expected initialized plain (non-pointer) bytes
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed: encountered pointer to alloc15, but expected initialized plain (non-pointer) bytes
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 4, align: 4) {
╾─alloc14─╼ │ ╾──╼
╾─alloc15─╼ │ ╾──╼
}
error[E0080]: it is undefined behavior to use this value
@ -61,7 +61,7 @@ LL | const REF_AS_USIZE_SLICE: &[usize] = &[unsafe { mem::transmute(&0) }];
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 4, align: 4) {
╾─alloc20─╼ │ ╾──╼
╾─alloc21─╼ │ ╾──╼
}
error[E0080]: it is undefined behavior to use this value
@ -72,14 +72,14 @@ LL | const REF_AS_USIZE_BOX_SLICE: Box<[usize]> = unsafe { mem::transmute::<&[us
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 4, align: 4) {
╾─alloc25─╼ │ ╾──╼
╾─alloc26─╼ │ ╾──╼
}
error[E0080]: it is undefined behavior to use this value
--> $DIR/ub-ref-ptr.rs:39:1
|
LL | const USIZE_AS_REF: &'static u8 = unsafe { mem::transmute(1337usize) };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed: encountered a dangling reference (created from integer)
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed: encountered a dangling reference (address 0x539 is unallocated)
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 4, align: 4) {
@ -90,7 +90,7 @@ error[E0080]: it is undefined behavior to use this value
--> $DIR/ub-ref-ptr.rs:42:1
|
LL | const USIZE_AS_BOX: Box<u8> = unsafe { mem::transmute(1337usize) };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed: encountered a dangling box (created from integer)
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed: encountered a dangling box (address 0x539 is unallocated)
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 4, align: 4) {

View File

@ -6,7 +6,7 @@ LL | const UNALIGNED: &u16 = unsafe { mem::transmute(&[0u8; 4]) };
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 8, align: 8) {
╾───────alloc2────────╼ │ ╾──────╼
╾───────alloc3────────╼ │ ╾──────╼
}
error[E0080]: it is undefined behavior to use this value
@ -17,7 +17,7 @@ LL | const UNALIGNED_BOX: Box<u16> = unsafe { mem::transmute(&[0u8; 4]) };
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 8, align: 8) {
╾───────alloc6────────╼ │ ╾──────╼
╾───────alloc7────────╼ │ ╾──────╼
}
error[E0080]: it is undefined behavior to use this value
@ -46,11 +46,11 @@ error[E0080]: it is undefined behavior to use this value
--> $DIR/ub-ref-ptr.rs:30:1
|
LL | const REF_AS_USIZE: usize = unsafe { mem::transmute(&0) };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed: encountered pointer to alloc14, but expected initialized plain (non-pointer) bytes
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed: encountered pointer to alloc15, but expected initialized plain (non-pointer) bytes
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 8, align: 8) {
╾───────alloc14───────╼ │ ╾──────╼
╾───────alloc15───────╼ │ ╾──────╼
}
error[E0080]: it is undefined behavior to use this value
@ -61,7 +61,7 @@ LL | const REF_AS_USIZE_SLICE: &[usize] = &[unsafe { mem::transmute(&0) }];
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 8, align: 8) {
╾───────alloc20───────╼ │ ╾──────╼
╾───────alloc21───────╼ │ ╾──────╼
}
error[E0080]: it is undefined behavior to use this value
@ -72,14 +72,14 @@ LL | const REF_AS_USIZE_BOX_SLICE: Box<[usize]> = unsafe { mem::transmute::<&[us
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 8, align: 8) {
╾───────alloc25───────╼ │ ╾──────╼
╾───────alloc26───────╼ │ ╾──────╼
}
error[E0080]: it is undefined behavior to use this value
--> $DIR/ub-ref-ptr.rs:39:1
|
LL | const USIZE_AS_REF: &'static u8 = unsafe { mem::transmute(1337usize) };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed: encountered a dangling reference (created from integer)
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed: encountered a dangling reference (address 0x539 is unallocated)
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 8, align: 8) {
@ -90,7 +90,7 @@ error[E0080]: it is undefined behavior to use this value
--> $DIR/ub-ref-ptr.rs:42:1
|
LL | const USIZE_AS_BOX: Box<u8> = unsafe { mem::transmute(1337usize) };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed: encountered a dangling box (created from integer)
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed: encountered a dangling box (address 0x539 is unallocated)
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 8, align: 8) {

View File

@ -10,7 +10,7 @@ LL | | };
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 8, align: 4) {
╾─alloc2──╼ ╾─alloc5──╼ │ ╾──╼╾──╼
╾─alloc3──╼ ╾─alloc6──╼ │ ╾──╼╾──╼
}
error: aborting due to previous error

View File

@ -10,7 +10,7 @@ LL | | };
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 16, align: 8) {
╾───────alloc2────────╼ ╾───────alloc5────────╼ │ ╾──────╼╾──────╼
╾───────alloc3────────╼ ╾───────alloc6────────╼ │ ╾──────╼╾──────╼
}
error: aborting due to previous error

View File

@ -302,7 +302,7 @@ error[E0080]: could not evaluate static initializer
--> $DIR/ub-wide-ptr.rs:139:5
|
LL | mem::transmute::<_, &dyn Trait>((&92u8, &3u64))
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ memory access failed: pointer must be in-bounds at offset N, but is outside bounds of allocN which has size N
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ memory access failed: allocN has size N, so pointer to 12 bytes starting at offset N is out-of-bounds
error: aborting due to 28 previous errors

View File

@ -302,7 +302,7 @@ error[E0080]: could not evaluate static initializer
--> $DIR/ub-wide-ptr.rs:139:5
|
LL | mem::transmute::<_, &dyn Trait>((&92u8, &3u64))
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ memory access failed: pointer must be in-bounds at offset N, but is outside bounds of allocN which has size N
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ memory access failed: allocN has size N, so pointer to 24 bytes starting at offset N is out-of-bounds
error: aborting due to 28 previous errors

View File

@ -27,7 +27,7 @@ LL | const S: &'static mut str = &mut " hello ";
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 8, align: 4) {
╾─alloc2──╼ 07 00 00 00 │ ╾──╼....
╾─alloc3──╼ 07 00 00 00 │ ╾──╼....
}
error: aborting due to 4 previous errors

View File

@ -27,7 +27,7 @@ LL | const S: &'static mut str = &mut " hello ";
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 16, align: 8) {
╾───────alloc2────────╼ 07 00 00 00 00 00 00 00 │ ╾──────╼........
╾───────alloc3────────╼ 07 00 00 00 00 00 00 00 │ ╾──────╼........
}
error: aborting due to 4 previous errors

View File

@ -11,8 +11,8 @@
const fn helper() -> Option<&'static mut i32> { unsafe {
// Undefined behaviour (integer as pointer), who doesn't love tests like this.
// This code never gets executed, because the static checks fail before that.
Some(&mut *(42 as *mut i32)) //~ ERROR any use of this value will cause an error
//~| WARN this was previously accepted by the compiler but is being phased out
Some(&mut *(42 as *mut i32)) //~ ERROR evaluation of constant value failed
//~| 0x2a is not a valid pointer
} }
// The error is an evaluation error and not a validation error, so the error is reported
// directly at the site where it occurs.

View File

@ -1,19 +1,14 @@
error: any use of this value will cause an error
error[E0080]: evaluation of constant value failed
--> $DIR/mut_ref_in_final_dynamic_check.rs:14:10
|
LL | Some(&mut *(42 as *mut i32))
| ^^^^^^^^^^^^^^^^^^^^^^
| |
| unable to turn bytes into a pointer
| 0x2a is not a valid pointer
| inside `helper` at $DIR/mut_ref_in_final_dynamic_check.rs:14:10
| inside `A` at $DIR/mut_ref_in_final_dynamic_check.rs:19:29
...
LL | const A: Option<&mut i32> = helper();
| -------------------------------------
|
= note: `#[deny(const_err)]` on by default
= warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
= note: for more information, see issue #71800 <https://github.com/rust-lang/rust/issues/71800>
| -------- inside `A` at $DIR/mut_ref_in_final_dynamic_check.rs:19:29
error: encountered dangling pointer in final constant
--> $DIR/mut_ref_in_final_dynamic_check.rs:26:1
@ -23,3 +18,4 @@ LL | const B: Option<&mut i32> = helper2();
error: aborting due to 2 previous errors
For more information about this error, try `rustc --explain E0080`.

View File

@ -6,7 +6,7 @@ LL | const TEST: &u8 = &MY_STATIC;
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 4, align: 4) {
╾─alloc0──╼ │ ╾──╼
╾─alloc1──╼ │ ╾──╼
}
warning: skipping const checks

View File

@ -6,7 +6,7 @@ LL | const TEST: &u8 = &MY_STATIC;
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 8, align: 8) {
╾───────alloc0────────╼ │ ╾──────╼
╾───────alloc1────────╼ │ ╾──────╼
}
warning: skipping const checks

View File

@ -24,20 +24,21 @@ const COPY_OOB_1: () = unsafe {
let mut x = 0i32;
let dangle = (&mut x as *mut i32).wrapping_add(10);
// Even if the first ptr is an int ptr and this is a ZST copy, we should detect dangling 2nd ptrs.
copy_nonoverlapping(0x100 as *const i32, dangle, 0); //~ evaluation of constant value failed [E0080]
copy_nonoverlapping(0x100 as *const i32, dangle, 0); //~ ERROR evaluation of constant value failed [E0080]
//~| pointer at offset 40 is out-of-bounds
};
const COPY_OOB_2: () = unsafe {
let x = 0i32;
let dangle = (&x as *const i32).wrapping_add(10);
// Even if the second ptr is an int ptr and this is a ZST copy, we should detect dangling 1st ptrs.
copy_nonoverlapping(dangle, 0x100 as *mut i32, 0); //~ evaluation of constant value failed [E0080]
//~| memory access failed: pointer must be in-bounds
copy_nonoverlapping(dangle, 0x100 as *mut i32, 0); //~ ERROR evaluation of constant value failed [E0080]
//~| pointer at offset 40 is out-of-bounds
};
const COPY_SIZE_OVERFLOW: () = unsafe {
let x = 0;
let mut y = 0;
copy(&x, &mut y, 1usize << (mem::size_of::<usize>() * 8 - 1)); //~ evaluation of constant value failed [E0080]
copy(&x, &mut y, 1usize << (mem::size_of::<usize>() * 8 - 1)); //~ ERROR evaluation of constant value failed [E0080]
//~| overflow computing total size of `copy`
};
const COPY_NONOVERLAPPING_SIZE_OVERFLOW: () = unsafe {

View File

@ -2,22 +2,22 @@ error[E0080]: evaluation of constant value failed
--> $DIR/copy-intrinsic.rs:27:5
|
LL | copy_nonoverlapping(0x100 as *const i32, dangle, 0);
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ memory access failed: pointer must be in-bounds at offset 40, but is outside bounds of alloc4 which has size 4
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ memory access failed: alloc5 has size 4, so pointer at offset 40 is out-of-bounds
error[E0080]: evaluation of constant value failed
--> $DIR/copy-intrinsic.rs:33:5
--> $DIR/copy-intrinsic.rs:34:5
|
LL | copy_nonoverlapping(dangle, 0x100 as *mut i32, 0);
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ memory access failed: pointer must be in-bounds at offset 40, but is outside bounds of alloc6 which has size 4
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ memory access failed: alloc7 has size 4, so pointer at offset 40 is out-of-bounds
error[E0080]: evaluation of constant value failed
--> $DIR/copy-intrinsic.rs:40:5
--> $DIR/copy-intrinsic.rs:41:5
|
LL | copy(&x, &mut y, 1usize << (mem::size_of::<usize>() * 8 - 1));
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ overflow computing total size of `copy`
error[E0080]: evaluation of constant value failed
--> $DIR/copy-intrinsic.rs:46:5
--> $DIR/copy-intrinsic.rs:47:5
|
LL | copy_nonoverlapping(&x, &mut y, 1usize << (mem::size_of::<usize>() * 8 - 1));
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ overflow computing total size of `copy_nonoverlapping`

View File

@ -12,7 +12,7 @@ LL | | };
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 8, align: 4) {
╾─alloc3──╼ ff ff ff ff │ ╾──╼....
╾─alloc4──╼ ff ff ff ff │ ╾──╼....
}
error: aborting due to previous error

View File

@ -12,7 +12,7 @@ LL | | };
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 16, align: 8) {
╾───────alloc3────────╼ ff ff ff ff ff ff ff ff │ ╾──────╼........
╾───────alloc4────────╼ ff ff ff ff ff ff ff ff │ ╾──────╼........
}
error: aborting due to previous error

View File

@ -6,7 +6,7 @@ LL | const G: Fat = unsafe { Transmute { t: FOO }.u };
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 16, align: 8) {
╾───────alloc2────────╼ ╾───────alloc5────────╼ │ ╾──────╼╾──────╼
╾───────alloc3────────╼ ╾───────alloc6────────╼ │ ╾──────╼╾──────╼
}
error: aborting due to previous error

View File

@ -6,7 +6,7 @@ LL | const MYSTR_NO_INIT: &MyStr = unsafe { mem::transmute::<&[_], _>(&[&()]) };
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 8, align: 4) {
╾─alloc3──╼ 01 00 00 00 │ ╾──╼....
╾─alloc4──╼ 01 00 00 00 │ ╾──╼....
}
error: aborting due to previous error

View File

@ -6,7 +6,7 @@ LL | const MYSTR_NO_INIT: &MyStr = unsafe { mem::transmute::<&[_], _>(&[&()]) };
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 16, align: 8) {
╾───────alloc3────────╼ 01 00 00 00 00 00 00 00 │ ╾──────╼........
╾───────alloc4────────╼ 01 00 00 00 00 00 00 00 │ ╾──────╼........
}
error: aborting due to previous error

View File

@ -10,7 +10,7 @@ LL | | };
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 4, align: 4) {
╾─alloc0──╼ │ ╾──╼
╾─alloc1──╼ │ ╾──╼
}
error[E0080]: it is undefined behavior to use this value
@ -25,7 +25,7 @@ LL | | };
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 4, align: 4) {
╾─alloc1──╼ │ ╾──╼
╾─alloc2──╼ │ ╾──╼
}
warning: skipping const checks

View File

@ -10,7 +10,7 @@ LL | | };
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 8, align: 8) {
╾───────alloc0────────╼ │ ╾──────╼
╾───────alloc1────────╼ │ ╾──────╼
}
error[E0080]: it is undefined behavior to use this value
@ -25,7 +25,7 @@ LL | | };
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 8, align: 8) {
╾───────alloc1────────╼ │ ╾──────╼
╾───────alloc2────────╼ │ ╾──────╼
}
warning: skipping const checks

View File

@ -9,7 +9,7 @@ LL | | };
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 4, align: 4) {
╾─alloc0──╼ │ ╾──╼
╾─alloc1──╼ │ ╾──╼
}
error: could not evaluate constant pattern
@ -29,7 +29,7 @@ LL | | };
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 4, align: 4) {
╾─alloc0──╼ │ ╾──╼
╾─alloc1──╼ │ ╾──╼
}
error: could not evaluate constant pattern

View File

@ -9,7 +9,7 @@ LL | | };
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 8, align: 8) {
╾───────alloc0────────╼ │ ╾──────╼
╾───────alloc1────────╼ │ ╾──────╼
}
error: could not evaluate constant pattern
@ -29,7 +29,7 @@ LL | | };
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 8, align: 8) {
╾───────alloc0────────╼ │ ╾──────╼
╾───────alloc1────────╼ │ ╾──────╼
}
error: could not evaluate constant pattern

View File

@ -8,7 +8,7 @@ LL | | };
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 4, align: 4) {
╾─alloc2──╼ │ ╾──╼
╾─alloc3──╼ │ ╾──╼
}
error[E0080]: it is undefined behavior to use this value
@ -19,7 +19,7 @@ LL | const SNEAKY: &dyn Sync = &Synced { x: UnsafeCell::new(42) };
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 8, align: 4) {
╾─alloc6──╼ ╾─alloc8──╼ │ ╾──╼╾──╼
╾─alloc7──╼ ╾─alloc9──╼ │ ╾──╼╾──╼
}
error[E0080]: it is undefined behavior to use this value
@ -30,7 +30,7 @@ LL | const BLUNT: &mut i32 = &mut 42;
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 4, align: 4) {
╾─alloc10─╼ │ ╾──╼
╾─alloc11─╼ │ ╾──╼
}
warning: skipping const checks

View File

@ -8,7 +8,7 @@ LL | | };
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 8, align: 8) {
╾───────alloc2────────╼ │ ╾──────╼
╾───────alloc3────────╼ │ ╾──────╼
}
error[E0080]: it is undefined behavior to use this value
@ -19,7 +19,7 @@ LL | const SNEAKY: &dyn Sync = &Synced { x: UnsafeCell::new(42) };
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 16, align: 8) {
╾───────alloc6────────╼ ╾───────alloc8────────╼ │ ╾──────╼╾──────╼
╾───────alloc7────────╼ ╾───────alloc9────────╼ │ ╾──────╼╾──────╼
}
error[E0080]: it is undefined behavior to use this value
@ -30,7 +30,7 @@ LL | const BLUNT: &mut i32 = &mut 42;
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 8, align: 8) {
╾───────alloc10───────╼ │ ╾──────╼
╾───────alloc11───────╼ │ ╾──────╼
}
warning: skipping const checks

View File

@ -8,14 +8,21 @@ static CMP: () = {
let x = &0 as *const _;
let _v = x == x;
//~^ ERROR could not evaluate static initializer
//~| NOTE pointer arithmetic or comparison
//~| "pointer arithmetic or comparison" needs an rfc before being allowed inside constants
};
static INT_PTR_ARITH: () = unsafe {
static PTR_INT_CAST: () = {
let x = &0 as *const _ as usize;
//~^ ERROR could not evaluate static initializer
//~| unable to turn pointer into raw bytes
let _v = x == x;
};
static PTR_INT_TRANSMUTE: () = unsafe {
let x: usize = std::mem::transmute(&0);
let _v = x + 0;
//~^ ERROR could not evaluate static initializer
//~| NOTE cannot cast pointer to integer
//~| unable to turn pointer into raw bytes
};
fn main() {}

View File

@ -5,10 +5,16 @@ LL | let _v = x == x;
| ^^^^^^ "pointer arithmetic or comparison" needs an rfc before being allowed inside constants
error[E0080]: could not evaluate static initializer
--> $DIR/ptr_arith.rs:16:14
--> $DIR/ptr_arith.rs:15:13
|
LL | let x = &0 as *const _ as usize;
| ^^^^^^^^^^^^^^^^^^^^^^^ unable to turn pointer into raw bytes
error[E0080]: could not evaluate static initializer
--> $DIR/ptr_arith.rs:23:14
|
LL | let _v = x + 0;
| ^^^^^ cannot cast pointer to integer because it was not created by cast from integer
| ^^^^^ unable to turn pointer into raw bytes
warning: skipping const checks
|
@ -17,7 +23,12 @@ help: skipping check that does not even have a feature gate
|
LL | let _v = x == x;
| ^^^^^^
help: skipping check that does not even have a feature gate
--> $DIR/ptr_arith.rs:15:13
|
LL | let x = &0 as *const _ as usize;
| ^^^^^^^^^^^^^^^^^^^^^^^
error: aborting due to 2 previous errors; 1 warning emitted
error: aborting due to 3 previous errors; 1 warning emitted
For more information about this error, try `rustc --explain E0080`.

View File

@ -41,8 +41,8 @@ pub const OFFSET_FROM_NULL: isize = {
pub const DIFFERENT_INT: isize = { // offset_from with two different integers: like DIFFERENT_ALLOC
let ptr1 = 8 as *const u8;
let ptr2 = 16 as *const u8;
unsafe { ptr_offset_from(ptr2, ptr1) } //~ERROR any use of this value will cause an error
//~| WARN previously accepted
unsafe { ptr_offset_from(ptr2, ptr1) } //~ERROR evaluation of constant value failed
//~| 0x10 is not a valid pointer
};
fn main() {}

View File

@ -4,26 +4,19 @@ error[E0080]: evaluation of constant value failed
LL | let offset = unsafe { ptr_offset_from(field_ptr, base_ptr) };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ptr_offset_from cannot compute offset of pointers into different allocations.
error: any use of this value will cause an error
error[E0080]: evaluation of constant value failed
--> $SRC_DIR/core/src/ptr/const_ptr.rs:LL:COL
|
LL | unsafe { intrinsics::ptr_offset_from(self, origin) }
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
| |
| unable to turn bytes into a pointer
| 0x2a is not a valid pointer
| inside `ptr::const_ptr::<impl *const u8>::offset_from` at $SRC_DIR/core/src/ptr/const_ptr.rs:LL:COL
| inside `NOT_PTR` at $DIR/offset_from_ub.rs:24:14
|
::: $DIR/offset_from_ub.rs:23:1
::: $DIR/offset_from_ub.rs:24:14
|
LL | / pub const NOT_PTR: usize = {
LL | | unsafe { (42 as *const u8).offset_from(&5u8) as usize }
LL | | };
| |__-
|
= note: `#[deny(const_err)]` on by default
= warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
= note: for more information, see issue #71800 <https://github.com/rust-lang/rust/issues/71800>
LL | unsafe { (42 as *const u8).offset_from(&5u8) as usize }
| ----------------------------------- inside `NOT_PTR` at $DIR/offset_from_ub.rs:24:14
error[E0080]: evaluation of constant value failed
--> $DIR/offset_from_ub.rs:31:14
@ -37,20 +30,11 @@ error[E0080]: evaluation of constant value failed
LL | unsafe { ptr_offset_from(ptr, ptr) }
| ^^^^^^^^^^^^^^^^^^^^^^^^^ null pointer is not a valid pointer for this operation
error: any use of this value will cause an error
error[E0080]: evaluation of constant value failed
--> $DIR/offset_from_ub.rs:44:14
|
LL | / pub const DIFFERENT_INT: isize = { // offset_from with two different integers: like DIFFERENT_ALLOC
LL | | let ptr1 = 8 as *const u8;
LL | | let ptr2 = 16 as *const u8;
LL | | unsafe { ptr_offset_from(ptr2, ptr1) }
| | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ unable to turn bytes into a pointer
LL | |
LL | | };
| |__-
|
= warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
= note: for more information, see issue #71800 <https://github.com/rust-lang/rust/issues/71800>
LL | unsafe { ptr_offset_from(ptr2, ptr1) }
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^ 0x10 is not a valid pointer
error: aborting due to 5 previous errors

View File

@ -2,6 +2,8 @@
use std::ptr;
// normalize-stderr-test "alloc\d+" -> "allocN"
// normalize-stderr-test "0x7f+" -> "0x7f..f"
pub const BEFORE_START: *const u8 = unsafe { (&0u8 as *const u8).offset(-1) }; //~NOTE
pub const AFTER_END: *const u8 = unsafe { (&0u8 as *const u8).offset(2) }; //~NOTE

Some files were not shown because too many files have changed in this diff Show More