mirror of
https://github.com/rust-lang/rust.git
synced 2025-01-25 22:22:44 +00:00
Make interning explicitly care about types and the mutability of memory
This commit is contained in:
parent
8d5728a7c8
commit
4b6f3868b3
@ -9,7 +9,7 @@ use std::convert::TryInto;
|
||||
|
||||
use rustc::hir::def::DefKind;
|
||||
use rustc::hir::def_id::DefId;
|
||||
use rustc::mir::interpret::{ConstEvalErr, ErrorHandled};
|
||||
use rustc::mir::interpret::{ConstEvalErr, ErrorHandled, ScalarMaybeUndef};
|
||||
use rustc::mir;
|
||||
use rustc::ty::{self, TyCtxt, query::TyCtxtAt};
|
||||
use rustc::ty::layout::{self, LayoutOf, VariantIdx};
|
||||
@ -18,15 +18,14 @@ use rustc::traits::Reveal;
|
||||
use rustc::util::common::ErrorReported;
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
|
||||
use syntax::ast::Mutability;
|
||||
use syntax::source_map::{Span, DUMMY_SP};
|
||||
|
||||
use crate::interpret::{self,
|
||||
PlaceTy, MPlaceTy, MemPlace, OpTy, ImmTy, Immediate, Scalar,
|
||||
PlaceTy, MPlaceTy, OpTy, ImmTy, Immediate, Scalar,
|
||||
RawConst, ConstValue,
|
||||
InterpResult, InterpErrorInfo, InterpError, GlobalId, InterpretCx, StackPopCleanup,
|
||||
Allocation, AllocId, MemoryKind,
|
||||
snapshot, RefTracking,
|
||||
snapshot, RefTracking, intern_const_alloc_recursive,
|
||||
};
|
||||
|
||||
/// Number of steps until the detector even starts doing anything.
|
||||
@ -63,33 +62,19 @@ pub(crate) fn eval_promoted<'mir, 'tcx>(
|
||||
eval_body_using_ecx(&mut ecx, cid, body, param_env)
|
||||
}
|
||||
|
||||
fn mplace_to_const<'tcx>(
|
||||
ecx: &CompileTimeEvalContext<'_, 'tcx>,
|
||||
mplace: MPlaceTy<'tcx>,
|
||||
) -> &'tcx ty::Const<'tcx> {
|
||||
let MemPlace { ptr, align, meta } = *mplace;
|
||||
// extract alloc-offset pair
|
||||
assert!(meta.is_none());
|
||||
let ptr = ptr.to_ptr().unwrap();
|
||||
let alloc = ecx.memory.get(ptr.alloc_id).unwrap();
|
||||
assert!(alloc.align >= align);
|
||||
assert!(alloc.bytes.len() as u64 - ptr.offset.bytes() >= mplace.layout.size.bytes());
|
||||
let mut alloc = alloc.clone();
|
||||
alloc.align = align;
|
||||
// FIXME shouldn't it be the case that `mark_static_initialized` has already
|
||||
// interned this? I thought that is the entire point of that `FinishStatic` stuff?
|
||||
let alloc = ecx.tcx.intern_const_alloc(alloc);
|
||||
let val = ConstValue::ByRef(ptr, alloc);
|
||||
ecx.tcx.mk_const(ty::Const { val, ty: mplace.layout.ty })
|
||||
}
|
||||
|
||||
fn op_to_const<'tcx>(
|
||||
ecx: &CompileTimeEvalContext<'_, 'tcx>,
|
||||
op: OpTy<'tcx>,
|
||||
) -> &'tcx ty::Const<'tcx> {
|
||||
// We do not normalize just any data. Only non-union scalars and slices.
|
||||
let normalize = match op.layout.abi {
|
||||
layout::Abi::Scalar(..) => op.layout.ty.ty_adt_def().map_or(true, |adt| !adt.is_union()),
|
||||
// We do not have value optmizations for everything.
|
||||
// Only scalars and slices, since they are very common.
|
||||
// Note that further down we turn scalars of undefined bits back to `ByRef`. These can result
|
||||
// from scalar unions that are initialized with one of their zero sized variants. We could
|
||||
// instead allow `ConstValue::Scalar` to store `ScalarMaybeUndef`, but that would affect all
|
||||
// the usual cases of extracting e.g. a `usize`, without there being a real use case for the
|
||||
// `Undef` situation.
|
||||
let try_as_immediate = match op.layout.abi {
|
||||
layout::Abi::Scalar(..) => true,
|
||||
layout::Abi::ScalarPair(..) => match op.layout.ty.sty {
|
||||
ty::Ref(_, inner, _) => match inner.sty {
|
||||
ty::Slice(elem) => elem == ecx.tcx.types.u8,
|
||||
@ -100,16 +85,38 @@ fn op_to_const<'tcx>(
|
||||
},
|
||||
_ => false,
|
||||
};
|
||||
let normalized_op = if normalize {
|
||||
Err(*ecx.read_immediate(op).expect("normalization works on validated constants"))
|
||||
let immediate = if try_as_immediate {
|
||||
Err(ecx.read_immediate(op).expect("normalization works on validated constants"))
|
||||
} else {
|
||||
// It is guaranteed that any non-slice scalar pair is actually ByRef here.
|
||||
// When we come back from raw const eval, we are always by-ref. The only way our op here is
|
||||
// by-val is if we are in const_field, i.e., if this is (a field of) something that we
|
||||
// "tried to make immediate" before. We wouldn't do that for non-slice scalar pairs or
|
||||
// structs containing such.
|
||||
op.try_as_mplace()
|
||||
};
|
||||
let val = match normalized_op {
|
||||
Ok(mplace) => return mplace_to_const(ecx, mplace),
|
||||
Err(Immediate::Scalar(x)) =>
|
||||
ConstValue::Scalar(x.not_undef().unwrap()),
|
||||
Err(Immediate::ScalarPair(a, b)) => {
|
||||
let val = match immediate {
|
||||
Ok(mplace) => {
|
||||
let ptr = mplace.ptr.to_ptr().unwrap();
|
||||
let alloc = ecx.tcx.alloc_map.lock().unwrap_memory(ptr.alloc_id);
|
||||
ConstValue::ByRef(ptr, alloc)
|
||||
},
|
||||
// see comment on `let try_as_immediate` above
|
||||
Err(ImmTy { imm: Immediate::Scalar(x), .. }) => match x {
|
||||
ScalarMaybeUndef::Scalar(s) => ConstValue::Scalar(s),
|
||||
ScalarMaybeUndef::Undef => {
|
||||
// When coming out of "normal CTFE", we'll always have an `Indirect` operand as
|
||||
// argument and we will not need this. The only way we can already have an
|
||||
// `Immediate` is when we are called from `const_field`, and that `Immediate`
|
||||
// comes from a constant so it can happen have `Undef`, because the indirect
|
||||
// memory that was read had undefined bytes.
|
||||
let mplace = op.to_mem_place();
|
||||
let ptr = mplace.ptr.to_ptr().unwrap();
|
||||
let alloc = ecx.tcx.alloc_map.lock().unwrap_memory(ptr.alloc_id);
|
||||
ConstValue::ByRef(ptr, alloc)
|
||||
},
|
||||
},
|
||||
Err(ImmTy { imm: Immediate::ScalarPair(a, b), .. }) => {
|
||||
let (data, start) = match a.not_undef().unwrap() {
|
||||
Scalar::Ptr(ptr) => (
|
||||
ecx.tcx.alloc_map.lock().unwrap_memory(ptr.alloc_id),
|
||||
@ -164,13 +171,12 @@ fn eval_body_using_ecx<'mir, 'tcx>(
|
||||
ecx.run()?;
|
||||
|
||||
// Intern the result
|
||||
let mutability = if tcx.is_mutable_static(cid.instance.def_id()) ||
|
||||
!layout.ty.is_freeze(tcx, param_env, body.span) {
|
||||
Mutability::Mutable
|
||||
} else {
|
||||
Mutability::Immutable
|
||||
};
|
||||
ecx.memory.intern_static(ret.ptr.to_ptr()?.alloc_id, mutability)?;
|
||||
intern_const_alloc_recursive(
|
||||
ecx,
|
||||
cid.instance.def_id(),
|
||||
ret,
|
||||
param_env,
|
||||
)?;
|
||||
|
||||
debug!("eval_body_using_ecx done: {:?}", *ret);
|
||||
Ok(ret)
|
||||
@ -297,7 +303,7 @@ impl<K: Hash + Eq, V> interpret::AllocMap<K, V> for FxHashMap<K, V> {
|
||||
}
|
||||
}
|
||||
|
||||
type CompileTimeEvalContext<'mir, 'tcx> =
|
||||
crate type CompileTimeEvalContext<'mir, 'tcx> =
|
||||
InterpretCx<'mir, 'tcx, CompileTimeInterpreter<'mir, 'tcx>>;
|
||||
|
||||
impl interpret::MayLeak for ! {
|
||||
@ -526,13 +532,16 @@ fn validate_and_turn_into_const<'tcx>(
|
||||
mplace.into(),
|
||||
path,
|
||||
Some(&mut ref_tracking),
|
||||
true, // const mode
|
||||
)?;
|
||||
}
|
||||
// Now that we validated, turn this into a proper constant.
|
||||
let def_id = cid.instance.def.def_id();
|
||||
if tcx.is_static(def_id) || cid.promoted.is_some() {
|
||||
Ok(mplace_to_const(&ecx, mplace))
|
||||
let ptr = mplace.ptr.to_ptr()?;
|
||||
Ok(tcx.mk_const(ty::Const {
|
||||
val: ConstValue::ByRef(ptr, ecx.tcx.alloc_map.lock().unwrap_memory(ptr.alloc_id)),
|
||||
ty: mplace.layout.ty,
|
||||
}))
|
||||
} else {
|
||||
Ok(op_to_const(&ecx, mplace.into()))
|
||||
}
|
||||
|
@ -576,7 +576,6 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpretCx<'mir, 'tcx, M> {
|
||||
self.place_to_op(return_place)?,
|
||||
vec![],
|
||||
None,
|
||||
/*const_mode*/false,
|
||||
)?;
|
||||
}
|
||||
} else {
|
||||
|
326
src/librustc_mir/interpret/intern.rs
Normal file
326
src/librustc_mir/interpret/intern.rs
Normal file
@ -0,0 +1,326 @@
|
||||
//! This module specifies the type based interner for constants.
|
||||
//!
|
||||
//! After a const evaluation has computed a value, before we destroy the const evaluator's session
|
||||
//! memory, we need to extract all memory allocations to the global memory pool so they stay around.
|
||||
|
||||
use rustc::ty::layout::LayoutOf;
|
||||
use rustc::ty::{Ty, TyCtxt, ParamEnv, self};
|
||||
use rustc::mir::interpret::{
|
||||
EvalResult, ErrorHandled,
|
||||
};
|
||||
use rustc::hir;
|
||||
use rustc::hir::def_id::DefId;
|
||||
use super::validity::RefTracking;
|
||||
use rustc_data_structures::fx::FxHashSet;
|
||||
|
||||
use syntax::ast::Mutability;
|
||||
use syntax_pos::Span;
|
||||
|
||||
use super::{
|
||||
ValueVisitor, MemoryKind, Pointer, AllocId, MPlaceTy, InterpError, Scalar,
|
||||
};
|
||||
use crate::const_eval::{CompileTimeInterpreter, CompileTimeEvalContext};
|
||||
|
||||
struct InternVisitor<'rt, 'a: 'rt, 'mir: 'rt, 'tcx: 'a+'rt+'mir> {
|
||||
/// previously encountered safe references
|
||||
ref_tracking: &'rt mut RefTracking<(MPlaceTy<'tcx>, Mutability, InternMode)>,
|
||||
ecx: &'rt mut CompileTimeEvalContext<'a, 'mir, 'tcx>,
|
||||
param_env: ParamEnv<'tcx>,
|
||||
/// The root node of the value that we're looking at. This field is never mutated and only used
|
||||
/// for sanity assertions that will ICE when `const_qualif` screws up.
|
||||
mode: InternMode,
|
||||
/// This field stores the mutability of the value *currently* being checked.
|
||||
/// It is set to mutable when an `UnsafeCell` is encountered
|
||||
/// When recursing across a reference, we don't recurse but store the
|
||||
/// value to be checked in `ref_tracking` together with the mutability at which we are checking
|
||||
/// the value.
|
||||
/// When encountering an immutable reference, we treat everything as immutable that is behind
|
||||
/// it.
|
||||
mutability: Mutability,
|
||||
/// A list of all encountered relocations. After type-based interning, we traverse this list to
|
||||
/// also intern allocations that are only referenced by a raw pointer or inside a union.
|
||||
leftover_relocations: &'rt mut FxHashSet<AllocId>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Hash, Eq)]
|
||||
enum InternMode {
|
||||
/// Mutable references don't change the `mutability` field to `Immutable`
|
||||
StaticMut,
|
||||
/// Mutable references must in fact be immutable due to their surrounding immutability
|
||||
Static,
|
||||
/// UnsafeCell is OK in the value of a constant, but not behind references in a constant
|
||||
ConstBase,
|
||||
/// `UnsafeCell` ICEs
|
||||
Const,
|
||||
}
|
||||
|
||||
/// Signalling data structure to ensure we don't recurse
|
||||
/// into the memory of other constants or statics
|
||||
struct IsStaticOrFn;
|
||||
|
||||
impl<'rt, 'a, 'mir, 'tcx> InternVisitor<'rt, 'a, 'mir, 'tcx> {
|
||||
fn intern(
|
||||
&mut self,
|
||||
ptr: Pointer,
|
||||
mutability: Mutability,
|
||||
) -> EvalResult<'tcx, Option<IsStaticOrFn>> {
|
||||
trace!(
|
||||
"InternVisitor::intern {:?} with {:?}",
|
||||
ptr, mutability,
|
||||
);
|
||||
// remove allocation
|
||||
let tcx = self.ecx.tcx;
|
||||
let memory = self.ecx.memory_mut();
|
||||
let (kind, mut alloc) = match memory.alloc_map.remove(&ptr.alloc_id) {
|
||||
Some(entry) => entry,
|
||||
None => {
|
||||
// if the pointer is dangling (neither in local nor global memory), we leave it
|
||||
// to validation to error. The `delay_span_bug` ensures that we don't forget such
|
||||
// a check in validation.
|
||||
if tcx.alloc_map.lock().get(ptr.alloc_id).is_none() {
|
||||
tcx.sess.delay_span_bug(self.ecx.tcx.span, "tried to intern dangling pointer");
|
||||
}
|
||||
// treat dangling pointers like other statics
|
||||
// just to stop trying to recurse into them
|
||||
return Ok(Some(IsStaticOrFn));
|
||||
},
|
||||
};
|
||||
// This match is just a canary for future changes to `MemoryKind`, which most likely need
|
||||
// changes in this function.
|
||||
match kind {
|
||||
MemoryKind::Stack | MemoryKind::Vtable => {},
|
||||
}
|
||||
// Ensure llvm knows to only put this into immutable memory if the value is immutable either
|
||||
// by being behind a reference or by being part of a static or const without interior
|
||||
// mutability
|
||||
alloc.mutability = mutability;
|
||||
// link the alloc id to the actual allocation
|
||||
let alloc = tcx.intern_const_alloc(alloc);
|
||||
self.leftover_relocations.extend(alloc.relocations.iter().map(|&(_, ((), reloc))| reloc));
|
||||
tcx.alloc_map.lock().set_alloc_id_memory(ptr.alloc_id, alloc);
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'rt, 'a, 'mir, 'tcx>
|
||||
ValueVisitor<'a, 'mir, 'tcx, CompileTimeInterpreter<'a, 'mir, 'tcx>>
|
||||
for
|
||||
InternVisitor<'rt, 'a, 'mir, 'tcx>
|
||||
{
|
||||
type V = MPlaceTy<'tcx>;
|
||||
|
||||
#[inline(always)]
|
||||
fn ecx(&self) -> &CompileTimeEvalContext<'a, 'mir, 'tcx> {
|
||||
&self.ecx
|
||||
}
|
||||
|
||||
fn visit_aggregate(
|
||||
&mut self,
|
||||
mplace: MPlaceTy<'tcx>,
|
||||
fields: impl Iterator<Item=EvalResult<'tcx, Self::V>>,
|
||||
) -> EvalResult<'tcx> {
|
||||
if let Some(def) = mplace.layout.ty.ty_adt_def() {
|
||||
if Some(def.did) == self.ecx.tcx.lang_items().unsafe_cell_type() {
|
||||
// We are crossing over an `UnsafeCell`, we can mutate again
|
||||
let old = std::mem::replace(&mut self.mutability, Mutability::Mutable);
|
||||
assert_ne!(
|
||||
self.mode, InternMode::Const,
|
||||
"UnsafeCells are not allowed behind references in constants. This should have \
|
||||
been prevented statically by const qualification. If this were allowed one \
|
||||
would be able to change a constant at one use site and other use sites may \
|
||||
arbitrarily decide to change, too.",
|
||||
);
|
||||
let walked = self.walk_aggregate(mplace, fields);
|
||||
self.mutability = old;
|
||||
return walked;
|
||||
}
|
||||
}
|
||||
self.walk_aggregate(mplace, fields)
|
||||
}
|
||||
|
||||
fn visit_primitive(&mut self, mplace: MPlaceTy<'tcx>) -> EvalResult<'tcx> {
|
||||
// Handle Reference types, as these are the only relocations supported by const eval.
|
||||
// Raw pointers (and boxes) are handled by the `leftover_relocations` logic.
|
||||
let ty = mplace.layout.ty;
|
||||
if let ty::Ref(_, _, mutability) = ty.sty {
|
||||
let value = self.ecx.read_immediate(mplace.into())?;
|
||||
// Handle trait object vtables
|
||||
if let Ok(meta) = value.to_meta() {
|
||||
let layout = self.ecx.layout_of(ty.builtin_deref(true).unwrap().ty)?;
|
||||
if layout.is_unsized() {
|
||||
if let ty::Dynamic(..) = self.ecx.tcx.struct_tail(layout.ty).sty {
|
||||
if let Ok(vtable) = meta.unwrap().to_ptr() {
|
||||
// explitly choose `Immutable` here, since vtables are immutable, even
|
||||
// if the reference of the fat pointer is mutable
|
||||
self.intern(vtable, Mutability::Immutable)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
let mplace = self.ecx.ref_to_mplace(value)?;
|
||||
// Check if we have encountered this pointer+layout combination before.
|
||||
// Only recurse for allocation-backed pointers.
|
||||
if let Scalar::Ptr(ptr) = mplace.ptr {
|
||||
// In the future we will probably allow `& &mut T`, and thus will want to merge
|
||||
// `mutability` with `self.mutability` to only choose `Mutable` if both are
|
||||
// `Mutable`.
|
||||
|
||||
// We do not have any `frozen` logic here, because it's essentially equivalent to
|
||||
// the mutability except for the outermost item. Only `UnsafeCell` can "unfreeze",
|
||||
// and we check that in `visit_aggregate`.
|
||||
match (self.mode, mutability) {
|
||||
// all is "good and well" in the unsoundness of `static mut`
|
||||
(InternMode::StaticMut, _) => {},
|
||||
// immutable references are fine everywhere
|
||||
(_, hir::Mutability::MutImmutable) => {},
|
||||
// mutable references are ok in `static`. Either they are treated as immutable
|
||||
// because they are behind an immutable one, or they are behind an `UnsafeCell`
|
||||
// and thus ok.
|
||||
(InternMode::Static, hir::Mutability::MutMutable) => {},
|
||||
// we statically prevent `&mut T` via `const_qualif` and double check this here
|
||||
(InternMode::ConstBase, hir::Mutability::MutMutable) |
|
||||
(InternMode::Const, hir::Mutability::MutMutable) =>
|
||||
bug!("const qualif failed to prevent mutable references"),
|
||||
}
|
||||
let mutability = match (self.mutability, mutability) {
|
||||
// The only way a mutable reference actually works as a mutable reference is
|
||||
// by being in a `static mut` directly or behind another mutable reference.
|
||||
// If there's an immutable reference or we are inside a static, then our
|
||||
// mutable reference is equivalent to an immutable one. As an example:
|
||||
// `&&mut Foo` is semantically equivalent to `&&Foo`
|
||||
(Mutability::Mutable, hir::Mutability::MutMutable) => Mutability::Mutable,
|
||||
_ => Mutability::Immutable,
|
||||
};
|
||||
let intern_mutability = intern_mutability(
|
||||
self.ecx.tcx.tcx,
|
||||
self.param_env,
|
||||
mplace.layout.ty,
|
||||
self.ecx.tcx.span,
|
||||
mutability,
|
||||
);
|
||||
// Recursing behind references changes the intern mode for constants in order to
|
||||
// cause assertions to trigger if we encounter any `UnsafeCell`s.
|
||||
let mode = match self.mode {
|
||||
InternMode::ConstBase => InternMode::Const,
|
||||
other => other,
|
||||
};
|
||||
match self.intern(ptr, intern_mutability)? {
|
||||
// No need to recurse, these are interned already and statics may have
|
||||
// cycles, so we don't want to recurse there
|
||||
Some(IsStaticOrFn) => {},
|
||||
// intern everything referenced by this value. The mutability is taken from the
|
||||
// reference. It is checked above that mutable references only happen in
|
||||
// `static mut`
|
||||
None => self.ref_tracking.track((mplace, mutability, mode), || ()),
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Figure out the mutability of the allocation.
|
||||
/// Mutable if it has interior mutability *anywhere* in the type.
|
||||
fn intern_mutability<'a, 'tcx>(
|
||||
tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
param_env: ParamEnv<'tcx>,
|
||||
ty: Ty<'tcx>,
|
||||
span: Span,
|
||||
mutability: Mutability,
|
||||
) -> Mutability {
|
||||
let has_interior_mutability = !ty.is_freeze(tcx, param_env, span);
|
||||
if has_interior_mutability {
|
||||
Mutability::Mutable
|
||||
} else {
|
||||
mutability
|
||||
}
|
||||
}
|
||||
|
||||
pub fn intern_const_alloc_recursive(
|
||||
ecx: &mut CompileTimeEvalContext<'a, 'mir, 'tcx>,
|
||||
def_id: DefId,
|
||||
ret: MPlaceTy<'tcx>,
|
||||
// FIXME(oli-obk): can we scrap the param env? I think we can, the final value of a const eval
|
||||
// must always be monomorphic, right?
|
||||
param_env: ty::ParamEnv<'tcx>,
|
||||
) -> EvalResult<'tcx> {
|
||||
let tcx = ecx.tcx;
|
||||
let (mutability, base_intern_mode) = match tcx.static_mutability(def_id) {
|
||||
Some(hir::Mutability::MutImmutable) => (Mutability::Immutable, InternMode::Static),
|
||||
None => (Mutability::Immutable, InternMode::ConstBase),
|
||||
// `static mut` doesn't care about interior mutability, it's mutable anyway
|
||||
Some(hir::Mutability::MutMutable) => (Mutability::Mutable, InternMode::StaticMut),
|
||||
};
|
||||
|
||||
// type based interning
|
||||
let mut ref_tracking = RefTracking::new((ret, mutability, base_intern_mode));
|
||||
let leftover_relocations = &mut FxHashSet::default();
|
||||
|
||||
let alloc_mutability = intern_mutability(
|
||||
tcx.tcx, param_env, ret.layout.ty, tcx.span, mutability,
|
||||
);
|
||||
|
||||
// start with the outermost allocation
|
||||
InternVisitor {
|
||||
ref_tracking: &mut ref_tracking,
|
||||
ecx,
|
||||
mode: base_intern_mode,
|
||||
leftover_relocations,
|
||||
param_env,
|
||||
mutability,
|
||||
}.intern(ret.ptr.to_ptr()?, alloc_mutability)?;
|
||||
|
||||
while let Some(((mplace, mutability, mode), _)) = ref_tracking.todo.pop() {
|
||||
let interned = InternVisitor {
|
||||
ref_tracking: &mut ref_tracking,
|
||||
ecx,
|
||||
mode,
|
||||
leftover_relocations,
|
||||
param_env,
|
||||
mutability,
|
||||
}.visit_value(mplace);
|
||||
if let Err(error) = interned {
|
||||
// This can happen when e.g. the tag of an enum is not a valid discriminant. We do have
|
||||
// to read enum discriminants in order to find references in enum variant fields.
|
||||
if let InterpError::ValidationFailure(_) = error.kind {
|
||||
let err = crate::const_eval::error_to_const_error(&ecx, error);
|
||||
match err.struct_error(ecx.tcx, "it is undefined behavior to use this value") {
|
||||
Ok(mut diag) => {
|
||||
diag.note("The rules on what exactly is undefined behavior aren't clear, \
|
||||
so this check might be overzealous. Please open an issue on the rust \
|
||||
compiler repository if you believe it should not be considered \
|
||||
undefined behavior",
|
||||
);
|
||||
diag.emit();
|
||||
}
|
||||
Err(ErrorHandled::TooGeneric) |
|
||||
Err(ErrorHandled::Reported) => {},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Intern the rest of the allocations as mutable. These might be inside unions, padding, raw
|
||||
// pointers, ... So we can't intern them according to their type rules
|
||||
|
||||
let mut todo: Vec<_> = leftover_relocations.iter().cloned().collect();
|
||||
while let Some(alloc_id) = todo.pop() {
|
||||
if let Some((_, alloc)) = ecx.memory_mut().alloc_map.remove(&alloc_id) {
|
||||
// We can't call the `intern` method here, as its logic is tailored to safe references.
|
||||
// So we hand-roll the interning logic here again
|
||||
let alloc = tcx.intern_const_alloc(alloc);
|
||||
tcx.alloc_map.lock().set_alloc_id_memory(alloc_id, alloc);
|
||||
for &(_, ((), reloc)) in alloc.relocations.iter() {
|
||||
if leftover_relocations.insert(reloc) {
|
||||
todo.push(reloc);
|
||||
}
|
||||
}
|
||||
} else if ecx.memory().dead_alloc_map.contains_key(&alloc_id) {
|
||||
// dangling pointer
|
||||
return err!(ValidationFailure(
|
||||
"encountered dangling pointer in final constant".into(),
|
||||
))
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
@ -20,6 +20,7 @@ use super::{
|
||||
Pointer, AllocId, Allocation, GlobalId, AllocationExtra,
|
||||
InterpResult, Scalar, InterpError, GlobalAlloc, PointerArithmetic,
|
||||
Machine, AllocMap, MayLeak, ErrorHandled, CheckInAllocMsg, InboundsCheck,
|
||||
InterpError::ValidationFailure,
|
||||
};
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Copy, Clone, Hash)]
|
||||
@ -55,12 +56,12 @@ pub struct Memory<'mir, 'tcx, M: Machine<'mir, 'tcx>> {
|
||||
/// the wrong type), so we let the machine override this type.
|
||||
/// Either way, if the machine allows writing to a static, doing so will
|
||||
/// create a copy of the static allocation here.
|
||||
alloc_map: M::MemoryMap,
|
||||
pub(super) alloc_map: M::MemoryMap,
|
||||
|
||||
/// To be able to compare pointers with NULL, and to check alignment for accesses
|
||||
/// to ZSTs (where pointers may dangle), we keep track of the size even for allocations
|
||||
/// that do not exist any more.
|
||||
dead_alloc_map: FxHashMap<AllocId, (Size, Align)>,
|
||||
pub(super) dead_alloc_map: FxHashMap<AllocId, (Size, Align)>,
|
||||
|
||||
/// Extra data added by the machine.
|
||||
pub extra: M::MemoryExtra,
|
||||
@ -455,6 +456,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
|
||||
// Could also be a fn ptr or extern static
|
||||
match self.tcx.alloc_map.lock().get(id) {
|
||||
Some(GlobalAlloc::Function(..)) => Ok((Size::ZERO, Align::from_bytes(1).unwrap())),
|
||||
// `self.get` would also work, but can cause cycles if a static refers to itself
|
||||
Some(GlobalAlloc::Static(did)) => {
|
||||
// The only way `get` couldn't have worked here is if this is an extern static
|
||||
assert!(self.tcx.is_foreign_item(did));
|
||||
@ -463,14 +465,20 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
|
||||
let layout = self.tcx.layout_of(ParamEnv::empty().and(ty)).unwrap();
|
||||
Ok((layout.size, layout.align.abi))
|
||||
}
|
||||
_ => match liveness {
|
||||
InboundsCheck::MaybeDead => {
|
||||
// Must be a deallocated pointer
|
||||
Ok(*self.dead_alloc_map.get(&id).expect(
|
||||
"allocation missing in dead_alloc_map"
|
||||
))
|
||||
},
|
||||
InboundsCheck::Live => err!(DanglingPointerDeref),
|
||||
_ => {
|
||||
if let Ok(alloc) = self.get(id) {
|
||||
return Ok((Size::from_bytes(alloc.bytes.len() as u64), alloc.align));
|
||||
}
|
||||
match liveness {
|
||||
InboundsCheck::MaybeDead => {
|
||||
// Must be a deallocated pointer
|
||||
self.dead_alloc_map.get(&id).cloned().ok_or_else(||
|
||||
ValidationFailure("allocation missing in dead_alloc_map".to_string())
|
||||
.into()
|
||||
)
|
||||
},
|
||||
InboundsCheck::Live => err!(DanglingPointerDeref),
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
@ -633,56 +641,6 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Interning (for CTFE)
|
||||
impl<'mir, 'tcx, M> Memory<'mir, 'tcx, M>
|
||||
where
|
||||
M: Machine<'mir, 'tcx, PointerTag = (), AllocExtra = (), MemoryExtra = ()>,
|
||||
// FIXME: Working around https://github.com/rust-lang/rust/issues/24159
|
||||
M::MemoryMap: AllocMap<AllocId, (MemoryKind<M::MemoryKinds>, Allocation)>,
|
||||
{
|
||||
/// mark an allocation as static and initialized, either mutable or not
|
||||
pub fn intern_static(
|
||||
&mut self,
|
||||
alloc_id: AllocId,
|
||||
mutability: Mutability,
|
||||
) -> InterpResult<'tcx> {
|
||||
trace!(
|
||||
"mark_static_initialized {:?}, mutability: {:?}",
|
||||
alloc_id,
|
||||
mutability
|
||||
);
|
||||
// remove allocation
|
||||
let (kind, mut alloc) = self.alloc_map.remove(&alloc_id).unwrap();
|
||||
match kind {
|
||||
MemoryKind::Machine(_) => bug!("Static cannot refer to machine memory"),
|
||||
MemoryKind::Stack | MemoryKind::Vtable => {},
|
||||
}
|
||||
// ensure llvm knows not to put this into immutable memory
|
||||
alloc.mutability = mutability;
|
||||
let alloc = self.tcx.intern_const_alloc(alloc);
|
||||
self.tcx.alloc_map.lock().set_alloc_id_memory(alloc_id, alloc);
|
||||
// recurse into inner allocations
|
||||
for &(_, alloc) in alloc.relocations.values() {
|
||||
// FIXME: Reusing the mutability here is likely incorrect. It is originally
|
||||
// determined via `is_freeze`, and data is considered frozen if there is no
|
||||
// `UnsafeCell` *immediately* in that data -- however, this search stops
|
||||
// at references. So whenever we follow a reference, we should likely
|
||||
// assume immutability -- and we should make sure that the compiler
|
||||
// does not permit code that would break this!
|
||||
if self.alloc_map.contains_key(&alloc) {
|
||||
// Not yet interned, so proceed recursively
|
||||
self.intern_static(alloc, mutability)?;
|
||||
} else if self.dead_alloc_map.contains_key(&alloc) {
|
||||
// dangling pointer
|
||||
return err!(ValidationFailure(
|
||||
"encountered dangling pointer in final constant".into(),
|
||||
))
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Reading and writing.
|
||||
impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
|
||||
pub fn copy(
|
||||
|
@ -14,6 +14,7 @@ mod traits;
|
||||
mod validity;
|
||||
mod intrinsics;
|
||||
mod visitor;
|
||||
mod intern;
|
||||
|
||||
pub use rustc::mir::interpret::*; // have all the `interpret` symbols in one place: here
|
||||
|
||||
@ -34,3 +35,5 @@ pub use self::visitor::{ValueVisitor, MutValueVisitor};
|
||||
pub use self::validity::RefTracking;
|
||||
|
||||
pub(super) use self::intrinsics::type_name;
|
||||
|
||||
pub use self::intern::intern_const_alloc_recursive;
|
||||
|
@ -217,7 +217,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpretCx<'mir, 'tcx, M> {
|
||||
fn try_read_immediate_from_mplace(
|
||||
&self,
|
||||
mplace: MPlaceTy<'tcx, M::PointerTag>,
|
||||
) -> InterpResult<'tcx, Option<Immediate<M::PointerTag>>> {
|
||||
) -> InterpResult<'tcx, Option<ImmTy<'tcx, M::PointerTag>>> {
|
||||
if mplace.layout.is_unsized() {
|
||||
// Don't touch unsized
|
||||
return Ok(None);
|
||||
@ -228,7 +228,10 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpretCx<'mir, 'tcx, M> {
|
||||
// Not all ZSTs have a layout we would handle below, so just short-circuit them
|
||||
// all here.
|
||||
self.memory.check_align(ptr, ptr_align)?;
|
||||
return Ok(Some(Immediate::Scalar(Scalar::zst().into())));
|
||||
return Ok(Some(ImmTy {
|
||||
imm: Immediate::Scalar(Scalar::zst().into()),
|
||||
layout: mplace.layout,
|
||||
}));
|
||||
}
|
||||
|
||||
// check for integer pointers before alignment to report better errors
|
||||
@ -239,7 +242,10 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpretCx<'mir, 'tcx, M> {
|
||||
let scalar = self.memory
|
||||
.get(ptr.alloc_id)?
|
||||
.read_scalar(self, ptr, mplace.layout.size)?;
|
||||
Ok(Some(Immediate::Scalar(scalar)))
|
||||
Ok(Some(ImmTy {
|
||||
imm: Immediate::Scalar(scalar),
|
||||
layout: mplace.layout,
|
||||
}))
|
||||
}
|
||||
layout::Abi::ScalarPair(ref a, ref b) => {
|
||||
let (a, b) = (&a.value, &b.value);
|
||||
@ -256,7 +262,10 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpretCx<'mir, 'tcx, M> {
|
||||
let b_val = self.memory
|
||||
.get(ptr.alloc_id)?
|
||||
.read_scalar(self, b_ptr, b_size)?;
|
||||
Ok(Some(Immediate::ScalarPair(a_val, b_val)))
|
||||
Ok(Some(ImmTy {
|
||||
imm: Immediate::ScalarPair(a_val, b_val),
|
||||
layout: mplace.layout,
|
||||
}))
|
||||
}
|
||||
_ => Ok(None),
|
||||
}
|
||||
@ -271,13 +280,13 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpretCx<'mir, 'tcx, M> {
|
||||
pub(crate) fn try_read_immediate(
|
||||
&self,
|
||||
src: OpTy<'tcx, M::PointerTag>,
|
||||
) -> InterpResult<'tcx, Result<Immediate<M::PointerTag>, MemPlace<M::PointerTag>>> {
|
||||
) -> InterpResult<'tcx, Result<ImmTy<'tcx, M::PointerTag>, MPlaceTy<'tcx, M::PointerTag>>> {
|
||||
Ok(match src.try_as_mplace() {
|
||||
Ok(mplace) => {
|
||||
if let Some(val) = self.try_read_immediate_from_mplace(mplace)? {
|
||||
Ok(val)
|
||||
} else {
|
||||
Err(*mplace)
|
||||
Err(mplace)
|
||||
}
|
||||
},
|
||||
Err(val) => Ok(val),
|
||||
@ -291,7 +300,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpretCx<'mir, 'tcx, M> {
|
||||
op: OpTy<'tcx, M::PointerTag>
|
||||
) -> InterpResult<'tcx, ImmTy<'tcx, M::PointerTag>> {
|
||||
if let Ok(imm) = self.try_read_immediate(op)? {
|
||||
Ok(ImmTy { imm, layout: op.layout })
|
||||
Ok(imm)
|
||||
} else {
|
||||
bug!("primitive read failed for type: {:?}", op.layout.ty);
|
||||
}
|
||||
@ -339,9 +348,9 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpretCx<'mir, 'tcx, M> {
|
||||
return Ok(OpTy { op: Operand::Immediate(immediate), layout: field_layout });
|
||||
}
|
||||
let offset = op.layout.fields.offset(field);
|
||||
let immediate = match base {
|
||||
let immediate = match *base {
|
||||
// the field covers the entire type
|
||||
_ if offset.bytes() == 0 && field_layout.size == op.layout.size => base,
|
||||
_ if offset.bytes() == 0 && field_layout.size == op.layout.size => *base,
|
||||
// extract fields from types with `ScalarPair` ABI
|
||||
Immediate::ScalarPair(a, b) => {
|
||||
let val = if offset.bytes() == 0 { a } else { b };
|
||||
|
@ -232,10 +232,10 @@ impl<'tcx, Tag> MPlaceTy<'tcx, Tag> {
|
||||
|
||||
impl<'tcx, Tag: ::std::fmt::Debug + Copy> OpTy<'tcx, Tag> {
|
||||
#[inline(always)]
|
||||
pub fn try_as_mplace(self) -> Result<MPlaceTy<'tcx, Tag>, Immediate<Tag>> {
|
||||
pub fn try_as_mplace(self) -> Result<MPlaceTy<'tcx, Tag>, ImmTy<'tcx, Tag>> {
|
||||
match *self {
|
||||
Operand::Indirect(mplace) => Ok(MPlaceTy { mplace, layout: self.layout }),
|
||||
Operand::Immediate(imm) => Err(imm),
|
||||
Operand::Immediate(imm) => Err(ImmTy { imm, layout: self.layout }),
|
||||
}
|
||||
}
|
||||
|
||||
@ -660,7 +660,7 @@ where
|
||||
|
||||
if M::enforce_validity(self) {
|
||||
// Data got changed, better make sure it matches the type!
|
||||
self.validate_operand(self.place_to_op(dest)?, vec![], None, /*const_mode*/false)?;
|
||||
self.validate_operand(self.place_to_op(dest)?, vec![], None)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@ -809,7 +809,7 @@ where
|
||||
|
||||
if M::enforce_validity(self) {
|
||||
// Data got changed, better make sure it matches the type!
|
||||
self.validate_operand(self.place_to_op(dest)?, vec![], None, /*const_mode*/false)?;
|
||||
self.validate_operand(self.place_to_op(dest)?, vec![], None)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@ -836,7 +836,7 @@ where
|
||||
// Yay, we got a value that we can write directly.
|
||||
// FIXME: Add a check to make sure that if `src` is indirect,
|
||||
// it does not overlap with `dest`.
|
||||
return self.write_immediate_no_validate(src_val, dest);
|
||||
return self.write_immediate_no_validate(*src_val, dest);
|
||||
}
|
||||
Err(mplace) => mplace,
|
||||
};
|
||||
@ -897,7 +897,7 @@ where
|
||||
|
||||
if M::enforce_validity(self) {
|
||||
// Data got changed, better make sure it matches the type!
|
||||
self.validate_operand(dest.into(), vec![], None, /*const_mode*/false)?;
|
||||
self.validate_operand(dest.into(), vec![], None)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
@ -1,5 +1,4 @@
|
||||
use std::fmt::Write;
|
||||
use std::hash::Hash;
|
||||
use std::ops::RangeInclusive;
|
||||
|
||||
use syntax_pos::symbol::{sym, Symbol};
|
||||
@ -11,6 +10,8 @@ use rustc::mir::interpret::{
|
||||
Scalar, GlobalAlloc, InterpResult, InterpError, CheckInAllocMsg,
|
||||
};
|
||||
|
||||
use std::hash::Hash;
|
||||
|
||||
use super::{
|
||||
OpTy, Machine, InterpretCx, ValueVisitor, MPlaceTy,
|
||||
};
|
||||
@ -76,19 +77,34 @@ pub enum PathElem {
|
||||
}
|
||||
|
||||
/// State for tracking recursive validation of references
|
||||
pub struct RefTracking<T> {
|
||||
pub struct RefTracking<T, PATH = ()> {
|
||||
pub seen: FxHashSet<T>,
|
||||
pub todo: Vec<(T, Vec<PathElem>)>,
|
||||
pub todo: Vec<(T, PATH)>,
|
||||
}
|
||||
|
||||
impl<T: Copy + Eq + Hash> RefTracking<T> {
|
||||
pub fn new(op: T) -> Self {
|
||||
let mut ref_tracking = RefTracking {
|
||||
impl<T: Copy + Eq + Hash + std::fmt::Debug, PATH: Default> RefTracking<T, PATH> {
|
||||
pub fn empty() -> Self {
|
||||
RefTracking {
|
||||
seen: FxHashSet::default(),
|
||||
todo: vec![(op, Vec::new())],
|
||||
todo: vec![],
|
||||
}
|
||||
}
|
||||
pub fn new(op: T) -> Self {
|
||||
let mut ref_tracking_for_consts = RefTracking {
|
||||
seen: FxHashSet::default(),
|
||||
todo: vec![(op, PATH::default())],
|
||||
};
|
||||
ref_tracking.seen.insert(op);
|
||||
ref_tracking
|
||||
ref_tracking_for_consts.seen.insert(op);
|
||||
ref_tracking_for_consts
|
||||
}
|
||||
|
||||
pub fn track(&mut self, op: T, path: impl FnOnce() -> PATH) {
|
||||
if self.seen.insert(op) {
|
||||
trace!("Recursing below ptr {:#?}", op);
|
||||
let path = path();
|
||||
// Remember to come back to this later.
|
||||
self.todo.push((op, path));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -154,8 +170,10 @@ struct ValidityVisitor<'rt, 'mir, 'tcx, M: Machine<'mir, 'tcx>> {
|
||||
/// starts must not be changed! `visit_fields` and `visit_array` rely on
|
||||
/// this stack discipline.
|
||||
path: Vec<PathElem>,
|
||||
ref_tracking: Option<&'rt mut RefTracking<MPlaceTy<'tcx, M::PointerTag>>>,
|
||||
const_mode: bool,
|
||||
ref_tracking_for_consts: Option<&'rt mut RefTracking<
|
||||
MPlaceTy<'tcx, M::PointerTag>,
|
||||
Vec<PathElem>,
|
||||
>>,
|
||||
ecx: &'rt InterpretCx<'mir, 'tcx, M>,
|
||||
}
|
||||
|
||||
@ -314,7 +332,7 @@ impl<'rt, 'mir, 'tcx, M: Machine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M>
|
||||
// types below!
|
||||
let size = value.layout.size;
|
||||
let value = value.to_scalar_or_undef();
|
||||
if self.const_mode {
|
||||
if self.ref_tracking_for_consts.is_some() {
|
||||
// Integers/floats in CTFE: Must be scalar bits, pointers are dangerous
|
||||
try_validation!(value.to_bits(size),
|
||||
value, self.path, "initialized plain (non-pointer) bytes");
|
||||
@ -324,7 +342,7 @@ impl<'rt, 'mir, 'tcx, M: Machine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M>
|
||||
}
|
||||
}
|
||||
ty::RawPtr(..) => {
|
||||
if self.const_mode {
|
||||
if self.ref_tracking_for_consts.is_some() {
|
||||
// Integers/floats in CTFE: For consistency with integers, we do not
|
||||
// accept undef.
|
||||
let _ptr = try_validation!(value.to_scalar_ptr(),
|
||||
@ -393,8 +411,7 @@ impl<'rt, 'mir, 'tcx, M: Machine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M>
|
||||
}
|
||||
}
|
||||
// Recursive checking
|
||||
if let Some(ref mut ref_tracking) = self.ref_tracking {
|
||||
assert!(self.const_mode, "We should only do recursie checking in const mode");
|
||||
if let Some(ref mut ref_tracking) = self.ref_tracking_for_consts {
|
||||
let place = self.ecx.ref_to_mplace(value)?;
|
||||
// FIXME(RalfJ): check ZST for inbound pointers
|
||||
if size != Size::ZERO {
|
||||
@ -424,16 +441,15 @@ impl<'rt, 'mir, 'tcx, M: Machine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M>
|
||||
// before. Proceed recursively even for integer pointers, no
|
||||
// reason to skip them! They are (recursively) valid for some ZST,
|
||||
// but not for others (e.g., `!` is a ZST).
|
||||
if ref_tracking.seen.insert(place) {
|
||||
trace!("Recursing below ptr {:#?}", *place);
|
||||
let path = &self.path;
|
||||
ref_tracking.track(place, || {
|
||||
// We need to clone the path anyway, make sure it gets created
|
||||
// with enough space for the additional `Deref`.
|
||||
let mut new_path = Vec::with_capacity(self.path.len()+1);
|
||||
new_path.clone_from(&self.path);
|
||||
let mut new_path = Vec::with_capacity(path.len() + 1);
|
||||
new_path.clone_from(path);
|
||||
new_path.push(PathElem::Deref);
|
||||
// Remember to come back to this later.
|
||||
ref_tracking.todo.push((place, new_path));
|
||||
}
|
||||
new_path
|
||||
});
|
||||
}
|
||||
}
|
||||
ty::FnPtr(_sig) => {
|
||||
@ -489,10 +505,17 @@ impl<'rt, 'mir, 'tcx, M: Machine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M>
|
||||
let non_null =
|
||||
self.ecx.memory.check_align(
|
||||
Scalar::Ptr(ptr), Align::from_bytes(1).unwrap()
|
||||
).is_ok() ||
|
||||
self.ecx.memory.get_fn(ptr).is_ok();
|
||||
).is_ok();
|
||||
if !non_null {
|
||||
// could be NULL
|
||||
// These conditions are just here to improve the diagnostics so we can
|
||||
// differentiate between null pointers and dangling pointers
|
||||
if self.ref_tracking_for_consts.is_some() &&
|
||||
self.ecx.memory.get(ptr.alloc_id).is_err() &&
|
||||
self.ecx.memory.get_fn(ptr).is_err() {
|
||||
return validation_failure!(
|
||||
"encountered dangling pointer", self.path
|
||||
);
|
||||
}
|
||||
return validation_failure!("a potentially NULL pointer", self.path);
|
||||
}
|
||||
return Ok(());
|
||||
@ -575,7 +598,7 @@ impl<'rt, 'mir, 'tcx, M: Machine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M>
|
||||
self.ecx,
|
||||
ptr,
|
||||
size,
|
||||
/*allow_ptr_and_undef*/!self.const_mode,
|
||||
/*allow_ptr_and_undef*/ self.ref_tracking_for_consts.is_none(),
|
||||
) {
|
||||
// In the happy case, we needn't check anything else.
|
||||
Ok(()) => {},
|
||||
@ -613,23 +636,25 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpretCx<'mir, 'tcx, M> {
|
||||
/// is an indirect operand.
|
||||
/// It will error if the bits at the destination do not match the ones described by the layout.
|
||||
///
|
||||
/// `ref_tracking` can be `None` to avoid recursive checking below references.
|
||||
/// `ref_tracking_for_consts` can be `None` to avoid recursive checking below references.
|
||||
/// This also toggles between "run-time" (no recursion) and "compile-time" (with recursion)
|
||||
/// validation (e.g., pointer values are fine in integers at runtime).
|
||||
/// validation (e.g., pointer values are fine in integers at runtime) and various other const
|
||||
/// specific validation checks
|
||||
pub fn validate_operand(
|
||||
&self,
|
||||
op: OpTy<'tcx, M::PointerTag>,
|
||||
path: Vec<PathElem>,
|
||||
ref_tracking: Option<&mut RefTracking<MPlaceTy<'tcx, M::PointerTag>>>,
|
||||
const_mode: bool,
|
||||
ref_tracking_for_consts: Option<&mut RefTracking<
|
||||
MPlaceTy<'tcx, M::PointerTag>,
|
||||
Vec<PathElem>,
|
||||
>>,
|
||||
) -> InterpResult<'tcx> {
|
||||
trace!("validate_operand: {:?}, {:?}", *op, op.layout.ty);
|
||||
|
||||
// Construct a visitor
|
||||
let mut visitor = ValidityVisitor {
|
||||
path,
|
||||
ref_tracking,
|
||||
const_mode,
|
||||
ref_tracking_for_consts,
|
||||
ecx: self,
|
||||
};
|
||||
|
||||
|
@ -551,7 +551,12 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
|
||||
source_info: SourceInfo,
|
||||
) {
|
||||
trace!("attepting to replace {:?} with {:?}", rval, value);
|
||||
if let Err(e) = self.ecx.validate_operand(value, vec![], None, true) {
|
||||
if let Err(e) = self.ecx.validate_operand(
|
||||
value,
|
||||
vec![],
|
||||
// FIXME: is ref tracking too expensive?
|
||||
Some(&mut interpret::RefTracking::empty()),
|
||||
) {
|
||||
trace!("validation error, attempt failed: {:?}", e);
|
||||
return;
|
||||
}
|
||||
|
@ -738,27 +738,29 @@ impl<'a, 'tcx> Checker<'a, 'tcx> {
|
||||
qualifs[IsNotPromotable] = true;
|
||||
|
||||
if self.mode.requires_const_checking() {
|
||||
if let BorrowKind::Mut { .. } = kind {
|
||||
let mut err = struct_span_err!(self.tcx.sess, self.span, E0017,
|
||||
"references in {}s may only refer \
|
||||
to immutable values", self.mode);
|
||||
err.span_label(self.span, format!("{}s require immutable values",
|
||||
self.mode));
|
||||
if self.tcx.sess.teach(&err.get_code().unwrap()) {
|
||||
err.note("References in statics and constants may only refer to \
|
||||
immutable values.\n\n\
|
||||
Statics are shared everywhere, and if they refer to \
|
||||
mutable data one might violate memory safety since \
|
||||
holding multiple mutable references to shared data is \
|
||||
not allowed.\n\n\
|
||||
If you really want global mutable state, try using \
|
||||
static mut or a global UnsafeCell.");
|
||||
if !self.tcx.sess.opts.debugging_opts.unleash_the_miri_inside_of_you {
|
||||
if let BorrowKind::Mut { .. } = kind {
|
||||
let mut err = struct_span_err!(self.tcx.sess, self.span, E0017,
|
||||
"references in {}s may only refer \
|
||||
to immutable values", self.mode);
|
||||
err.span_label(self.span, format!("{}s require immutable values",
|
||||
self.mode));
|
||||
if self.tcx.sess.teach(&err.get_code().unwrap()) {
|
||||
err.note("References in statics and constants may only refer to \
|
||||
immutable values.\n\n\
|
||||
Statics are shared everywhere, and if they refer to \
|
||||
mutable data one might violate memory safety since \
|
||||
holding multiple mutable references to shared data is \
|
||||
not allowed.\n\n\
|
||||
If you really want global mutable state, try using \
|
||||
static mut or a global UnsafeCell.");
|
||||
}
|
||||
err.emit();
|
||||
} else {
|
||||
span_err!(self.tcx.sess, self.span, E0492,
|
||||
"cannot borrow a constant which may contain \
|
||||
interior mutability, create a static instead");
|
||||
}
|
||||
err.emit();
|
||||
} else {
|
||||
span_err!(self.tcx.sess, self.span, E0492,
|
||||
"cannot borrow a constant which may contain \
|
||||
interior mutability, create a static instead");
|
||||
}
|
||||
}
|
||||
} else if let BorrowKind::Mut { .. } | BorrowKind::Shared = kind {
|
||||
|
35
src/test/ui/consts/miri_unleashed/mutable_references.rs
Normal file
35
src/test/ui/consts/miri_unleashed/mutable_references.rs
Normal file
@ -0,0 +1,35 @@
|
||||
// compile-flags: -Zunleash-the-miri-inside-of-you
|
||||
#![allow(const_err)]
|
||||
|
||||
use std::cell::UnsafeCell;
|
||||
|
||||
// a test demonstrating what things we could allow with a smarter const qualification
|
||||
|
||||
static FOO: &&mut u32 = &&mut 42;
|
||||
|
||||
static BAR: &mut () = &mut ();
|
||||
|
||||
struct Foo<T>(T);
|
||||
|
||||
static BOO: &mut Foo<()> = &mut Foo(());
|
||||
|
||||
struct Meh {
|
||||
x: &'static UnsafeCell<i32>,
|
||||
}
|
||||
|
||||
unsafe impl Sync for Meh {}
|
||||
|
||||
static MEH: Meh = Meh {
|
||||
x: &UnsafeCell::new(42),
|
||||
};
|
||||
|
||||
static OH_YES: &mut i32 = &mut 42;
|
||||
|
||||
fn main() {
|
||||
unsafe {
|
||||
*MEH.x.get() = 99; //~ WARN skipping const checks
|
||||
//~^ WARN skipping const checks
|
||||
}
|
||||
*OH_YES = 99; //~ ERROR cannot assign to `*OH_YES`, as `OH_YES` is an immutable static item
|
||||
//~^ WARN skipping const checks
|
||||
}
|
26
src/test/ui/consts/miri_unleashed/mutable_references.stderr
Normal file
26
src/test/ui/consts/miri_unleashed/mutable_references.stderr
Normal file
@ -0,0 +1,26 @@
|
||||
warning: skipping const checks
|
||||
--> $DIR/mutable_references.rs:30:10
|
||||
|
|
||||
LL | *MEH.x.get() = 99;
|
||||
| ^^^^^
|
||||
|
||||
warning: skipping const checks
|
||||
--> $DIR/mutable_references.rs:30:9
|
||||
|
|
||||
LL | *MEH.x.get() = 99;
|
||||
| ^^^^^^^^^^^^^^^^^
|
||||
|
||||
warning: skipping const checks
|
||||
--> $DIR/mutable_references.rs:33:5
|
||||
|
|
||||
LL | *OH_YES = 99;
|
||||
| ^^^^^^^^^^^^
|
||||
|
||||
error[E0594]: cannot assign to `*OH_YES`, as `OH_YES` is an immutable static item
|
||||
--> $DIR/mutable_references.rs:33:5
|
||||
|
|
||||
LL | *OH_YES = 99;
|
||||
| ^^^^^^^^^^^^ cannot assign
|
||||
|
||||
error: aborting due to previous error
|
||||
|
28
src/test/ui/consts/miri_unleashed/mutable_references_ice.rs
Normal file
28
src/test/ui/consts/miri_unleashed/mutable_references_ice.rs
Normal file
@ -0,0 +1,28 @@
|
||||
// compile-flags: -Zunleash-the-miri-inside-of-you
|
||||
// failure-status: 101
|
||||
// rustc-env:RUST_BACKTRACE=0
|
||||
// normalize-stderr-test "note: rustc 1.* running on .*" -> "note: rustc VERSION running on TARGET"
|
||||
// normalize-stderr-test "note: compiler flags: .*" -> "note: compiler flags: FLAGS"
|
||||
|
||||
#![allow(const_err)]
|
||||
|
||||
use std::cell::UnsafeCell;
|
||||
|
||||
// this test ICEs to ensure that our mutability story is sound
|
||||
|
||||
struct Meh {
|
||||
x: &'static UnsafeCell<i32>,
|
||||
}
|
||||
|
||||
unsafe impl Sync for Meh {}
|
||||
|
||||
// the following will never be ok!
|
||||
const MUH: Meh = Meh {
|
||||
x: &UnsafeCell::new(42),
|
||||
};
|
||||
|
||||
fn main() {
|
||||
unsafe {
|
||||
*MUH.x.get() = 99; //~ WARN skipping const checks
|
||||
}
|
||||
}
|
@ -0,0 +1,21 @@
|
||||
warning: skipping const checks
|
||||
--> $DIR/mutable_references_ice.rs:26:9
|
||||
|
|
||||
LL | *MUH.x.get() = 99;
|
||||
| ^^^^^^^^^^^^^^^^^
|
||||
|
||||
thread 'rustc' panicked at 'assertion failed: `(left != right)`
|
||||
left: `Const`,
|
||||
right: `Const`: UnsafeCells are not allowed behind references in constants. This should have been prevented statically by const qualification. If this were allowed one would be able to change a constant at one use site and other use sites may arbitrarily decide to change, too.', src/librustc_mir/interpret/intern.rs:126:17
|
||||
note: Run with `RUST_BACKTRACE=1` environment variable to display a backtrace.
|
||||
|
||||
error: internal compiler error: unexpected panic
|
||||
|
||||
note: the compiler unexpectedly panicked. this is a bug.
|
||||
|
||||
note: we would appreciate a bug report: https://github.com/rust-lang/rust/blob/master/CONTRIBUTING.md#bug-reports
|
||||
|
||||
note: rustc VERSION running on TARGET
|
||||
|
||||
note: compiler flags: FLAGS
|
||||
|
19
src/test/ui/consts/packed_pattern.rs
Normal file
19
src/test/ui/consts/packed_pattern.rs
Normal file
@ -0,0 +1,19 @@
|
||||
// run-pass
|
||||
|
||||
#[derive(PartialEq, Eq, Copy, Clone)]
|
||||
#[repr(packed)]
|
||||
struct Foo {
|
||||
field: (i64, u32, u32, u32),
|
||||
}
|
||||
|
||||
const FOO: Foo = Foo {
|
||||
field: (5, 6, 7, 8),
|
||||
};
|
||||
|
||||
fn main() {
|
||||
match FOO {
|
||||
Foo { field: (5, 6, 7, 8) } => {},
|
||||
FOO => unreachable!(),
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
15
src/test/ui/consts/static-raw-pointer-interning.rs
Normal file
15
src/test/ui/consts/static-raw-pointer-interning.rs
Normal file
@ -0,0 +1,15 @@
|
||||
// run-pass
|
||||
|
||||
static FOO: Foo = Foo {
|
||||
field: &42 as *const i32,
|
||||
};
|
||||
|
||||
struct Foo {
|
||||
field: *const i32,
|
||||
}
|
||||
|
||||
unsafe impl Sync for Foo {}
|
||||
|
||||
fn main() {
|
||||
assert_eq!(unsafe { *FOO.field }, 42);
|
||||
}
|
15
src/test/ui/consts/static-raw-pointer-interning2.rs
Normal file
15
src/test/ui/consts/static-raw-pointer-interning2.rs
Normal file
@ -0,0 +1,15 @@
|
||||
// run-pass
|
||||
|
||||
static mut FOO: Foo = Foo {
|
||||
field: &mut [42] as *mut [i32] as *mut i32,
|
||||
};
|
||||
|
||||
struct Foo {
|
||||
field: *mut i32,
|
||||
}
|
||||
|
||||
unsafe impl Sync for Foo {}
|
||||
|
||||
fn main() {
|
||||
assert_eq!(unsafe { *FOO.field = 69; *FOO.field }, 69);
|
||||
}
|
@ -6,5 +6,6 @@ union Uninit {
|
||||
}
|
||||
|
||||
const UNINIT: Uninit = Uninit { uninit: () };
|
||||
const UNINIT2: (Uninit,) = (Uninit { uninit: () }, );
|
||||
|
||||
fn main() {}
|
||||
|
Loading…
Reference in New Issue
Block a user