2020-03-29 14:41:09 +00:00
|
|
|
use rustc_middle::mir;
|
|
|
|
use rustc_middle::ty::layout::HasTyCtxt;
|
|
|
|
use rustc_middle::ty::{self, Ty};
|
2020-04-27 17:01:30 +00:00
|
|
|
use std::borrow::Borrow;
|
2019-12-25 00:04:32 +00:00
|
|
|
use std::collections::hash_map::Entry;
|
|
|
|
use std::hash::Hash;
|
|
|
|
|
|
|
|
use rustc_data_structures::fx::FxHashMap;
|
|
|
|
|
2020-04-27 17:56:11 +00:00
|
|
|
use rustc_ast::Mutability;
|
2020-03-30 20:54:15 +00:00
|
|
|
use rustc_hir::def_id::DefId;
|
2020-03-29 15:19:48 +00:00
|
|
|
use rustc_middle::mir::AssertMessage;
|
2020-05-26 18:48:08 +00:00
|
|
|
use rustc_session::Limit;
|
2020-09-12 08:10:13 +00:00
|
|
|
use rustc_span::symbol::{sym, Symbol};
|
2019-12-25 00:04:32 +00:00
|
|
|
|
|
|
|
use crate::interpret::{
|
2020-04-27 17:01:30 +00:00
|
|
|
self, compile_time_machine, AllocId, Allocation, Frame, GlobalId, ImmTy, InterpCx,
|
|
|
|
InterpResult, Memory, OpTy, PlaceTy, Pointer, Scalar,
|
2019-12-25 00:04:32 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
use super::error::*;
|
|
|
|
|
2020-03-16 22:12:42 +00:00
|
|
|
impl<'mir, 'tcx> InterpCx<'mir, 'tcx, CompileTimeInterpreter<'mir, 'tcx>> {
|
2019-12-22 20:10:43 +00:00
|
|
|
/// Evaluate a const function where all arguments (if any) are zero-sized types.
|
|
|
|
/// The evaluation is memoized thanks to the query system.
|
|
|
|
///
|
|
|
|
/// Returns `true` if the call has been evaluated.
|
|
|
|
fn try_eval_const_fn_call(
|
|
|
|
&mut self,
|
|
|
|
instance: ty::Instance<'tcx>,
|
|
|
|
ret: Option<(PlaceTy<'tcx>, mir::BasicBlock)>,
|
|
|
|
args: &[OpTy<'tcx>],
|
|
|
|
) -> InterpResult<'tcx, bool> {
|
|
|
|
trace!("try_eval_const_fn_call: {:?}", instance);
|
|
|
|
// Because `#[track_caller]` adds an implicit non-ZST argument, we also cannot
|
|
|
|
// perform this optimization on items tagged with it.
|
|
|
|
if instance.def.requires_caller_location(self.tcx()) {
|
|
|
|
return Ok(false);
|
|
|
|
}
|
|
|
|
// For the moment we only do this for functions which take no arguments
|
|
|
|
// (or all arguments are ZSTs) so that we don't memoize too much.
|
|
|
|
if args.iter().any(|a| !a.layout.is_zst()) {
|
|
|
|
return Ok(false);
|
|
|
|
}
|
|
|
|
|
|
|
|
let dest = match ret {
|
|
|
|
Some((dest, _)) => dest,
|
|
|
|
// Don't memoize diverging function calls.
|
|
|
|
None => return Ok(false),
|
|
|
|
};
|
|
|
|
|
2019-12-23 16:33:09 +00:00
|
|
|
let gid = GlobalId { instance, promoted: None };
|
|
|
|
|
2020-08-20 16:55:07 +00:00
|
|
|
let place = self.eval_to_allocation(gid)?;
|
2019-12-23 16:33:09 +00:00
|
|
|
|
2019-12-22 20:10:43 +00:00
|
|
|
self.copy_op(place.into(), dest)?;
|
|
|
|
|
|
|
|
self.return_to_block(ret.map(|r| r.1))?;
|
2020-07-28 14:15:40 +00:00
|
|
|
trace!("{:?}", self.dump_place(*dest));
|
2020-03-20 14:03:11 +00:00
|
|
|
Ok(true)
|
2019-12-22 20:10:43 +00:00
|
|
|
}
|
2020-02-08 21:21:20 +00:00
|
|
|
|
|
|
|
/// "Intercept" a function call to a panic-related function
|
|
|
|
/// because we have something special to do for it.
|
2020-02-09 15:54:40 +00:00
|
|
|
/// If this returns successfully (`Ok`), the function should just be evaluated normally.
|
|
|
|
fn hook_panic_fn(
|
2020-02-08 21:21:20 +00:00
|
|
|
&mut self,
|
|
|
|
instance: ty::Instance<'tcx>,
|
|
|
|
args: &[OpTy<'tcx>],
|
2020-02-09 15:54:40 +00:00
|
|
|
) -> InterpResult<'tcx> {
|
2020-02-08 21:21:20 +00:00
|
|
|
let def_id = instance.def_id();
|
|
|
|
if Some(def_id) == self.tcx.lang_items().panic_fn()
|
|
|
|
|| Some(def_id) == self.tcx.lang_items().begin_panic_fn()
|
|
|
|
{
|
|
|
|
// &'static str
|
|
|
|
assert!(args.len() == 1);
|
|
|
|
|
|
|
|
let msg_place = self.deref_operand(args[0])?;
|
|
|
|
let msg = Symbol::intern(self.read_str(msg_place)?);
|
2020-03-30 20:54:15 +00:00
|
|
|
let span = self.find_closest_untracked_caller_location();
|
2020-02-08 21:21:20 +00:00
|
|
|
let (file, line, col) = self.location_triple_for_span(span);
|
2020-02-09 15:51:36 +00:00
|
|
|
Err(ConstEvalErrKind::Panic { msg, file, line, col }.into())
|
2020-02-08 21:21:20 +00:00
|
|
|
} else {
|
2020-02-09 15:54:40 +00:00
|
|
|
Ok(())
|
2020-02-08 21:21:20 +00:00
|
|
|
}
|
|
|
|
}
|
2019-12-22 20:10:43 +00:00
|
|
|
}
|
|
|
|
|
2020-03-17 23:07:29 +00:00
|
|
|
/// Extra machine state for CTFE, and the Machine instance
|
2020-03-16 22:12:42 +00:00
|
|
|
pub struct CompileTimeInterpreter<'mir, 'tcx> {
|
2020-03-17 23:07:29 +00:00
|
|
|
/// For now, the number of terminators that can be evaluated before we throw a resource
|
|
|
|
/// exhuastion error.
|
|
|
|
///
|
|
|
|
/// Setting this to `0` disables the limit and allows the interpreter to run forever.
|
|
|
|
pub steps_remaining: usize,
|
2020-03-16 22:12:42 +00:00
|
|
|
|
|
|
|
/// The virtual call stack.
|
|
|
|
pub(crate) stack: Vec<Frame<'mir, 'tcx, (), ()>>,
|
2019-12-25 00:04:32 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Copy, Clone, Debug)]
|
|
|
|
pub struct MemoryExtra {
|
2020-04-28 21:48:22 +00:00
|
|
|
/// We need to make sure consts never point to anything mutable, even recursively. That is
|
|
|
|
/// relied on for pattern matching on consts with references.
|
|
|
|
/// To achieve this, two pieces have to work together:
|
|
|
|
/// * Interning makes everything outside of statics immutable.
|
|
|
|
/// * Pointers to allocations inside of statics can never leak outside, to a non-static global.
|
|
|
|
/// This boolean here controls the second part.
|
2019-12-25 00:28:30 +00:00
|
|
|
pub(super) can_access_statics: bool,
|
2019-12-25 00:04:32 +00:00
|
|
|
}
|
|
|
|
|
2020-03-16 22:12:42 +00:00
|
|
|
impl<'mir, 'tcx> CompileTimeInterpreter<'mir, 'tcx> {
|
2020-05-26 18:48:08 +00:00
|
|
|
pub(super) fn new(const_eval_limit: Limit) -> Self {
|
|
|
|
CompileTimeInterpreter { steps_remaining: const_eval_limit.0, stack: Vec::new() }
|
2019-12-25 00:04:32 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<K: Hash + Eq, V> interpret::AllocMap<K, V> for FxHashMap<K, V> {
|
|
|
|
#[inline(always)]
|
|
|
|
fn contains_key<Q: ?Sized + Hash + Eq>(&mut self, k: &Q) -> bool
|
|
|
|
where
|
|
|
|
K: Borrow<Q>,
|
|
|
|
{
|
|
|
|
FxHashMap::contains_key(self, k)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[inline(always)]
|
|
|
|
fn insert(&mut self, k: K, v: V) -> Option<V> {
|
|
|
|
FxHashMap::insert(self, k, v)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[inline(always)]
|
|
|
|
fn remove<Q: ?Sized + Hash + Eq>(&mut self, k: &Q) -> Option<V>
|
|
|
|
where
|
|
|
|
K: Borrow<Q>,
|
|
|
|
{
|
|
|
|
FxHashMap::remove(self, k)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[inline(always)]
|
|
|
|
fn filter_map_collect<T>(&self, mut f: impl FnMut(&K, &V) -> Option<T>) -> Vec<T> {
|
|
|
|
self.iter().filter_map(move |(k, v)| f(k, &*v)).collect()
|
|
|
|
}
|
|
|
|
|
|
|
|
#[inline(always)]
|
|
|
|
fn get_or<E>(&self, k: K, vacant: impl FnOnce() -> Result<V, E>) -> Result<&V, E> {
|
|
|
|
match self.get(&k) {
|
|
|
|
Some(v) => Ok(v),
|
|
|
|
None => {
|
|
|
|
vacant()?;
|
|
|
|
bug!("The CTFE machine shouldn't ever need to extend the alloc_map when reading")
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[inline(always)]
|
|
|
|
fn get_mut_or<E>(&mut self, k: K, vacant: impl FnOnce() -> Result<V, E>) -> Result<&mut V, E> {
|
|
|
|
match self.entry(k) {
|
|
|
|
Entry::Occupied(e) => Ok(e.into_mut()),
|
|
|
|
Entry::Vacant(e) => {
|
|
|
|
let v = vacant()?;
|
|
|
|
Ok(e.insert(v))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-03-16 22:12:42 +00:00
|
|
|
crate type CompileTimeEvalContext<'mir, 'tcx> =
|
|
|
|
InterpCx<'mir, 'tcx, CompileTimeInterpreter<'mir, 'tcx>>;
|
2019-12-25 00:04:32 +00:00
|
|
|
|
|
|
|
impl interpret::MayLeak for ! {
|
|
|
|
#[inline(always)]
|
|
|
|
fn may_leak(self) -> bool {
|
|
|
|
// `self` is uninhabited
|
|
|
|
self
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-09-12 08:10:13 +00:00
|
|
|
impl<'mir, 'tcx: 'mir> CompileTimeEvalContext<'mir, 'tcx> {
|
|
|
|
fn guaranteed_eq(&mut self, a: Scalar, b: Scalar) -> bool {
|
|
|
|
match (a, b) {
|
|
|
|
// Comparisons between integers are always known.
|
|
|
|
(Scalar::Raw { .. }, Scalar::Raw { .. }) => a == b,
|
|
|
|
// Equality with integers can never be known for sure.
|
|
|
|
(Scalar::Raw { .. }, Scalar::Ptr(_)) | (Scalar::Ptr(_), Scalar::Raw { .. }) => false,
|
|
|
|
// FIXME: return `true` for when both sides are the same pointer, *except* that
|
|
|
|
// some things (like functions and vtables) do not have stable addresses
|
|
|
|
// so we need to be careful around them (see e.g. #73722).
|
|
|
|
(Scalar::Ptr(_), Scalar::Ptr(_)) => false,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn guaranteed_ne(&mut self, a: Scalar, b: Scalar) -> bool {
|
|
|
|
match (a, b) {
|
|
|
|
// Comparisons between integers are always known.
|
|
|
|
(Scalar::Raw { .. }, Scalar::Raw { .. }) => a != b,
|
|
|
|
// Comparisons of abstract pointers with null pointers are known if the pointer
|
|
|
|
// is in bounds, because if they are in bounds, the pointer can't be null.
|
|
|
|
(Scalar::Raw { data: 0, .. }, Scalar::Ptr(ptr))
|
|
|
|
| (Scalar::Ptr(ptr), Scalar::Raw { data: 0, .. }) => !self.memory.ptr_may_be_null(ptr),
|
|
|
|
// Inequality with integers other than null can never be known for sure.
|
|
|
|
(Scalar::Raw { .. }, Scalar::Ptr(_)) | (Scalar::Ptr(_), Scalar::Raw { .. }) => false,
|
|
|
|
// FIXME: return `true` for at least some comparisons where we can reliably
|
|
|
|
// determine the result of runtime inequality tests at compile-time.
|
|
|
|
// Examples include comparison of addresses in different static items.
|
|
|
|
(Scalar::Ptr(_), Scalar::Ptr(_)) => false,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-03-16 22:12:42 +00:00
|
|
|
impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir, 'tcx> {
|
2020-04-27 17:01:30 +00:00
|
|
|
compile_time_machine!(<'mir, 'tcx>);
|
2019-12-25 00:04:32 +00:00
|
|
|
|
|
|
|
type MemoryExtra = MemoryExtra;
|
|
|
|
|
|
|
|
fn find_mir_or_eval_fn(
|
|
|
|
ecx: &mut InterpCx<'mir, 'tcx, Self>,
|
|
|
|
instance: ty::Instance<'tcx>,
|
|
|
|
args: &[OpTy<'tcx>],
|
|
|
|
ret: Option<(PlaceTy<'tcx>, mir::BasicBlock)>,
|
|
|
|
_unwind: Option<mir::BasicBlock>, // unwinding is not supported in consts
|
|
|
|
) -> InterpResult<'tcx, Option<&'mir mir::Body<'tcx>>> {
|
|
|
|
debug!("find_mir_or_eval_fn: {:?}", instance);
|
|
|
|
|
|
|
|
// Only check non-glue functions
|
2020-07-03 17:13:39 +00:00
|
|
|
if let ty::InstanceDef::Item(def) = instance.def {
|
2019-12-25 00:04:32 +00:00
|
|
|
// Execution might have wandered off into other crates, so we cannot do a stability-
|
|
|
|
// sensitive check here. But we can at least rule out functions that are not const
|
|
|
|
// at all.
|
2020-07-03 17:13:39 +00:00
|
|
|
if ecx.tcx.is_const_fn_raw(def.did) {
|
2019-12-25 00:08:39 +00:00
|
|
|
// If this function is a `const fn` then under certain circumstances we
|
|
|
|
// can evaluate call via the query system, thus memoizing all future calls.
|
|
|
|
if ecx.try_eval_const_fn_call(instance, ret, args)? {
|
2019-12-25 00:04:32 +00:00
|
|
|
return Ok(None);
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
// Some functions we support even if they are non-const -- but avoid testing
|
2020-02-09 15:54:40 +00:00
|
|
|
// that for const fn!
|
2020-03-30 20:54:15 +00:00
|
|
|
ecx.hook_panic_fn(instance, args)?;
|
2020-02-09 15:54:40 +00:00
|
|
|
// We certainly do *not* want to actually call the fn
|
2019-12-25 00:04:32 +00:00
|
|
|
// though, so be sure we return here.
|
2020-02-09 15:54:40 +00:00
|
|
|
throw_unsup_format!("calling non-const function `{}`", instance)
|
2019-12-25 00:04:32 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
// This is a const fn. Call it.
|
|
|
|
Ok(Some(match ecx.load_mir(instance.def, None) {
|
2020-04-12 17:28:41 +00:00
|
|
|
Ok(body) => body,
|
2019-12-25 00:04:32 +00:00
|
|
|
Err(err) => {
|
2020-03-08 17:52:30 +00:00
|
|
|
if let err_unsup!(NoMirFor(did)) = err.kind {
|
|
|
|
let path = ecx.tcx.def_path_str(did);
|
2020-02-08 21:21:20 +00:00
|
|
|
return Err(ConstEvalErrKind::NeedsRfc(format!(
|
2019-12-25 00:04:32 +00:00
|
|
|
"calling extern function `{}`",
|
|
|
|
path
|
|
|
|
))
|
|
|
|
.into());
|
|
|
|
}
|
|
|
|
return Err(err);
|
|
|
|
}
|
|
|
|
}))
|
|
|
|
}
|
|
|
|
|
|
|
|
fn call_intrinsic(
|
|
|
|
ecx: &mut InterpCx<'mir, 'tcx, Self>,
|
|
|
|
instance: ty::Instance<'tcx>,
|
|
|
|
args: &[OpTy<'tcx>],
|
|
|
|
ret: Option<(PlaceTy<'tcx>, mir::BasicBlock)>,
|
|
|
|
_unwind: Option<mir::BasicBlock>,
|
|
|
|
) -> InterpResult<'tcx> {
|
2020-09-12 08:10:13 +00:00
|
|
|
// Shared intrinsics.
|
2020-03-30 20:54:15 +00:00
|
|
|
if ecx.emulate_intrinsic(instance, args, ret)? {
|
2019-12-25 00:04:32 +00:00
|
|
|
return Ok(());
|
|
|
|
}
|
|
|
|
let intrinsic_name = ecx.tcx.item_name(instance.def_id());
|
2020-09-12 08:10:13 +00:00
|
|
|
|
|
|
|
// CTFE-specific intrinsics.
|
|
|
|
let (dest, ret) = match ret {
|
|
|
|
None => {
|
|
|
|
return Err(ConstEvalErrKind::NeedsRfc(format!(
|
|
|
|
"calling intrinsic `{}`",
|
|
|
|
intrinsic_name
|
|
|
|
))
|
|
|
|
.into());
|
|
|
|
}
|
|
|
|
Some(p) => p,
|
|
|
|
};
|
|
|
|
match intrinsic_name {
|
|
|
|
sym::ptr_guaranteed_eq | sym::ptr_guaranteed_ne => {
|
|
|
|
let a = ecx.read_immediate(args[0])?.to_scalar()?;
|
|
|
|
let b = ecx.read_immediate(args[1])?.to_scalar()?;
|
|
|
|
let cmp = if intrinsic_name == sym::ptr_guaranteed_eq {
|
|
|
|
ecx.guaranteed_eq(a, b)
|
|
|
|
} else {
|
|
|
|
ecx.guaranteed_ne(a, b)
|
|
|
|
};
|
|
|
|
ecx.write_scalar(Scalar::from_bool(cmp), dest)?;
|
|
|
|
}
|
|
|
|
_ => {
|
|
|
|
return Err(ConstEvalErrKind::NeedsRfc(format!(
|
|
|
|
"calling intrinsic `{}`",
|
|
|
|
intrinsic_name
|
|
|
|
))
|
|
|
|
.into());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
ecx.go_to_block(ret);
|
|
|
|
Ok(())
|
2019-12-25 00:04:32 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
fn assert_panic(
|
|
|
|
ecx: &mut InterpCx<'mir, 'tcx, Self>,
|
|
|
|
msg: &AssertMessage<'tcx>,
|
|
|
|
_unwind: Option<mir::BasicBlock>,
|
|
|
|
) -> InterpResult<'tcx> {
|
2020-03-29 14:41:09 +00:00
|
|
|
use rustc_middle::mir::AssertKind::*;
|
2020-06-19 16:57:15 +00:00
|
|
|
// Convert `AssertKind<Operand>` to `AssertKind<Scalar>`.
|
|
|
|
let eval_to_int =
|
|
|
|
|op| ecx.read_immediate(ecx.eval_operand(op, None)?).map(|x| x.to_const_int());
|
2020-02-08 21:21:20 +00:00
|
|
|
let err = match msg {
|
2019-12-25 00:04:32 +00:00
|
|
|
BoundsCheck { ref len, ref index } => {
|
2020-06-19 16:57:15 +00:00
|
|
|
let len = eval_to_int(len)?;
|
|
|
|
let index = eval_to_int(index)?;
|
2020-02-08 21:21:20 +00:00
|
|
|
BoundsCheck { len, index }
|
2019-12-25 00:04:32 +00:00
|
|
|
}
|
2020-06-19 16:57:15 +00:00
|
|
|
Overflow(op, l, r) => Overflow(*op, eval_to_int(l)?, eval_to_int(r)?),
|
|
|
|
OverflowNeg(op) => OverflowNeg(eval_to_int(op)?),
|
|
|
|
DivisionByZero(op) => DivisionByZero(eval_to_int(op)?),
|
|
|
|
RemainderByZero(op) => RemainderByZero(eval_to_int(op)?),
|
2020-02-08 21:21:20 +00:00
|
|
|
ResumedAfterReturn(generator_kind) => ResumedAfterReturn(*generator_kind),
|
|
|
|
ResumedAfterPanic(generator_kind) => ResumedAfterPanic(*generator_kind),
|
|
|
|
};
|
2020-02-09 15:51:36 +00:00
|
|
|
Err(ConstEvalErrKind::AssertFailure(err).into())
|
2019-12-25 00:04:32 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
fn ptr_to_int(_mem: &Memory<'mir, 'tcx, Self>, _ptr: Pointer) -> InterpResult<'tcx, u64> {
|
2020-02-08 21:21:20 +00:00
|
|
|
Err(ConstEvalErrKind::NeedsRfc("pointer-to-integer cast".to_string()).into())
|
2019-12-25 00:04:32 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
fn binary_ptr_op(
|
|
|
|
_ecx: &InterpCx<'mir, 'tcx, Self>,
|
|
|
|
_bin_op: mir::BinOp,
|
|
|
|
_left: ImmTy<'tcx>,
|
|
|
|
_right: ImmTy<'tcx>,
|
|
|
|
) -> InterpResult<'tcx, (Scalar, bool, Ty<'tcx>)> {
|
2020-02-08 21:21:20 +00:00
|
|
|
Err(ConstEvalErrKind::NeedsRfc("pointer arithmetic or comparison".to_string()).into())
|
2019-12-25 00:04:32 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
fn box_alloc(
|
|
|
|
_ecx: &mut InterpCx<'mir, 'tcx, Self>,
|
|
|
|
_dest: PlaceTy<'tcx>,
|
|
|
|
) -> InterpResult<'tcx> {
|
2020-02-08 21:21:20 +00:00
|
|
|
Err(ConstEvalErrKind::NeedsRfc("heap allocations via `box` keyword".to_string()).into())
|
2019-12-25 00:04:32 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
fn before_terminator(ecx: &mut InterpCx<'mir, 'tcx, Self>) -> InterpResult<'tcx> {
|
2020-03-17 23:07:29 +00:00
|
|
|
// The step limit has already been hit in a previous call to `before_terminator`.
|
|
|
|
if ecx.machine.steps_remaining == 0 {
|
2020-01-21 15:46:07 +00:00
|
|
|
return Ok(());
|
|
|
|
}
|
|
|
|
|
2020-03-17 23:07:29 +00:00
|
|
|
ecx.machine.steps_remaining -= 1;
|
|
|
|
if ecx.machine.steps_remaining == 0 {
|
2020-03-22 19:49:58 +00:00
|
|
|
throw_exhaust!(StepLimitReached)
|
2019-12-25 00:04:32 +00:00
|
|
|
}
|
|
|
|
|
2020-03-17 23:07:29 +00:00
|
|
|
Ok(())
|
2019-12-25 00:04:32 +00:00
|
|
|
}
|
|
|
|
|
2020-08-12 08:18:21 +00:00
|
|
|
#[inline(always)]
|
|
|
|
fn init_frame_extra(
|
|
|
|
ecx: &mut InterpCx<'mir, 'tcx, Self>,
|
|
|
|
frame: Frame<'mir, 'tcx>,
|
|
|
|
) -> InterpResult<'tcx, Frame<'mir, 'tcx>> {
|
|
|
|
// Enforce stack size limit. Add 1 because this is run before the new frame is pushed.
|
|
|
|
if !ecx.tcx.sess.recursion_limit().value_within_limit(ecx.stack().len() + 1) {
|
2020-08-09 16:01:37 +00:00
|
|
|
throw_exhaust!(StackFrameLimitReached)
|
|
|
|
} else {
|
2020-08-12 08:18:21 +00:00
|
|
|
Ok(frame)
|
2020-08-09 16:01:37 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-04-16 16:57:12 +00:00
|
|
|
#[inline(always)]
|
|
|
|
fn stack(
|
|
|
|
ecx: &'a InterpCx<'mir, 'tcx, Self>,
|
|
|
|
) -> &'a [Frame<'mir, 'tcx, Self::PointerTag, Self::FrameExtra>] {
|
|
|
|
&ecx.machine.stack
|
|
|
|
}
|
|
|
|
|
|
|
|
#[inline(always)]
|
|
|
|
fn stack_mut(
|
|
|
|
ecx: &'a mut InterpCx<'mir, 'tcx, Self>,
|
|
|
|
) -> &'a mut Vec<Frame<'mir, 'tcx, Self::PointerTag, Self::FrameExtra>> {
|
|
|
|
&mut ecx.machine.stack
|
|
|
|
}
|
|
|
|
|
2020-03-21 18:19:10 +00:00
|
|
|
fn before_access_global(
|
2019-12-25 00:04:32 +00:00
|
|
|
memory_extra: &MemoryExtra,
|
2020-03-21 19:44:39 +00:00
|
|
|
alloc_id: AllocId,
|
|
|
|
allocation: &Allocation,
|
2020-03-25 07:47:59 +00:00
|
|
|
static_def_id: Option<DefId>,
|
2020-03-21 18:19:10 +00:00
|
|
|
is_write: bool,
|
2019-12-25 00:04:32 +00:00
|
|
|
) -> InterpResult<'tcx> {
|
2020-04-09 09:32:43 +00:00
|
|
|
if is_write {
|
|
|
|
// Write access. These are never allowed, but we give a targeted error message.
|
|
|
|
if allocation.mutability == Mutability::Not {
|
|
|
|
Err(err_ub!(WriteToReadOnly(alloc_id)).into())
|
|
|
|
} else {
|
|
|
|
Err(ConstEvalErrKind::ModifiedGlobal.into())
|
|
|
|
}
|
2019-12-25 00:04:32 +00:00
|
|
|
} else {
|
2020-04-09 09:32:43 +00:00
|
|
|
// Read access. These are usually allowed, with some exceptions.
|
|
|
|
if memory_extra.can_access_statics {
|
2020-04-10 09:28:51 +00:00
|
|
|
// Machine configuration allows us read from anything (e.g., `static` initializer).
|
2020-04-09 09:32:43 +00:00
|
|
|
Ok(())
|
2020-04-10 09:28:51 +00:00
|
|
|
} else if static_def_id.is_some() {
|
|
|
|
// Machine configuration does not allow us to read statics
|
|
|
|
// (e.g., `const` initializer).
|
2020-04-28 21:48:22 +00:00
|
|
|
// See const_eval::machine::MemoryExtra::can_access_statics for why
|
2020-04-28 21:54:47 +00:00
|
|
|
// this check is so important: if we could read statics, we could read pointers
|
|
|
|
// to mutable allocations *inside* statics. These allocations are not themselves
|
|
|
|
// statics, so pointers to them can get around the check in `validity.rs`.
|
2020-04-09 09:32:43 +00:00
|
|
|
Err(ConstEvalErrKind::ConstAccessesStatic.into())
|
|
|
|
} else {
|
|
|
|
// Immutable global, this read is fine.
|
2020-04-10 09:28:51 +00:00
|
|
|
// But make sure we never accept a read from something mutable, that would be
|
|
|
|
// unsound. The reason is that as the content of this allocation may be different
|
|
|
|
// now and at run-time, so if we permit reading now we might return the wrong value.
|
|
|
|
assert_eq!(allocation.mutability, Mutability::Not);
|
2020-04-09 09:32:43 +00:00
|
|
|
Ok(())
|
|
|
|
}
|
2019-12-25 00:04:32 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2019-12-25 00:08:39 +00:00
|
|
|
|
2019-12-22 20:10:43 +00:00
|
|
|
// Please do not add any code below the above `Machine` trait impl. I (oli-obk) plan more cleanups
|
|
|
|
// so we can end up having a file with just that impl, but for now, let's keep the impl discoverable
|
|
|
|
// at the bottom of this file.
|