2024-08-29 17:24:31 +00:00
|
|
|
use std::borrow::{Borrow, Cow};
|
2023-11-26 15:57:13 +00:00
|
|
|
use std::fmt;
|
2019-12-25 00:04:32 +00:00
|
|
|
use std::hash::Hash;
|
|
|
|
|
2024-11-16 09:04:30 +00:00
|
|
|
use rustc_abi::{Align, Size};
|
2023-11-26 15:57:13 +00:00
|
|
|
use rustc_ast::Mutability;
|
2024-08-29 17:24:31 +00:00
|
|
|
use rustc_data_structures::fx::{FxHashMap, FxIndexMap, IndexEntry};
|
2024-02-26 18:03:06 +00:00
|
|
|
use rustc_hir::def_id::{DefId, LocalDefId};
|
2024-06-13 09:30:24 +00:00
|
|
|
use rustc_hir::{self as hir, CRATE_HIR_ID, LangItem};
|
2020-03-29 15:19:48 +00:00
|
|
|
use rustc_middle::mir::AssertMessage;
|
2024-12-07 16:22:09 +00:00
|
|
|
use rustc_middle::mir::interpret::ReportedErrorInfo;
|
2023-11-26 15:57:13 +00:00
|
|
|
use rustc_middle::query::TyCtxtAt;
|
2024-11-20 10:05:53 +00:00
|
|
|
use rustc_middle::ty::layout::{HasTypingEnv, TyAndLayout};
|
2024-08-29 17:24:31 +00:00
|
|
|
use rustc_middle::ty::{self, Ty, TyCtxt};
|
2023-11-26 15:57:13 +00:00
|
|
|
use rustc_middle::{bug, mir};
|
2024-12-12 23:29:23 +00:00
|
|
|
use rustc_span::{Span, Symbol, sym};
|
2024-11-16 09:04:30 +00:00
|
|
|
use rustc_target::callconv::FnAbi;
|
2024-05-22 04:20:23 +00:00
|
|
|
use tracing::debug;
|
2019-12-25 00:04:32 +00:00
|
|
|
|
|
|
|
use super::error::*;
|
2022-11-02 11:57:40 +00:00
|
|
|
use crate::errors::{LongRunning, LongRunningWarn};
|
2023-09-04 07:25:39 +00:00
|
|
|
use crate::fluent_generated as fluent;
|
2019-12-25 00:04:32 +00:00
|
|
|
use crate::interpret::{
|
2025-01-24 23:54:51 +00:00
|
|
|
self, AllocId, AllocInit, AllocRange, ConstAllocation, CtfeProvenance, FnArg, Frame,
|
|
|
|
GlobalAlloc, ImmTy, InterpCx, InterpResult, MPlaceTy, OpTy, RangeSet, Scalar,
|
|
|
|
compile_time_machine, interp_ok, throw_exhaust, throw_inval, throw_ub, throw_ub_custom,
|
|
|
|
throw_unsup, throw_unsup_format,
|
2019-12-25 00:04:32 +00:00
|
|
|
};
|
|
|
|
|
2022-11-02 11:57:40 +00:00
|
|
|
/// When hitting this many interpreted terminators we emit a deny by default lint
|
|
|
|
/// that notfies the user that their constant takes a long time to evaluate. If that's
|
|
|
|
/// what they intended, they can just allow the lint.
|
|
|
|
const LINT_TERMINATOR_LIMIT: usize = 2_000_000;
|
|
|
|
/// The limit used by `-Z tiny-const-eval-limit`. This smaller limit is useful for internal
|
|
|
|
/// tests not needing to run 30s or more to show some behaviour.
|
|
|
|
const TINY_LINT_TERMINATOR_LIMIT: usize = 20;
|
|
|
|
/// After this many interpreted terminators, we start emitting progress indicators at every
|
|
|
|
/// power of two of interpreted terminators.
|
|
|
|
const PROGRESS_INDICATOR_START: usize = 4_000_000;
|
|
|
|
|
2024-08-20 12:58:10 +00:00
|
|
|
/// Extra machine state for CTFE, and the Machine instance.
|
|
|
|
//
|
|
|
|
// Should be public because out-of-tree rustc consumers need this
|
|
|
|
// if they want to interact with constant values.
|
2024-06-13 09:15:55 +00:00
|
|
|
pub struct CompileTimeMachine<'tcx> {
|
2022-11-02 11:57:40 +00:00
|
|
|
/// The number of terminators that have been evaluated.
|
2020-03-17 23:07:29 +00:00
|
|
|
///
|
2022-11-02 11:57:40 +00:00
|
|
|
/// This is used to produce lints informing the user that the compiler is not stuck.
|
|
|
|
/// Set to `usize::MAX` to never report anything.
|
|
|
|
pub(super) num_evaluated_steps: usize,
|
2020-03-16 22:12:42 +00:00
|
|
|
|
|
|
|
/// The virtual call stack.
|
2024-05-26 18:20:43 +00:00
|
|
|
pub(super) stack: Vec<Frame<'tcx>>,
|
2019-12-25 00:04:32 +00:00
|
|
|
|
2024-01-05 11:18:11 +00:00
|
|
|
/// Pattern matching on consts with references would be unsound if those references
|
|
|
|
/// could point to anything mutable. Therefore, when evaluating consts and when constructing valtrees,
|
|
|
|
/// we ensure that only immutable global memory can be accessed.
|
|
|
|
pub(super) can_access_mut_global: CanAccessMutGlobal,
|
2022-08-22 00:00:38 +00:00
|
|
|
|
|
|
|
/// Whether to check alignment during evaluation.
|
2022-11-21 16:51:16 +00:00
|
|
|
pub(super) check_alignment: CheckAlignment,
|
2023-10-12 11:27:43 +00:00
|
|
|
|
2024-02-26 18:03:06 +00:00
|
|
|
/// If `Some`, we are evaluating the initializer of the static with the given `LocalDefId`,
|
|
|
|
/// storing the result in the given `AllocId`.
|
|
|
|
/// Used to prevent reads from a static's base allocation, as that may allow for self-initialization loops.
|
|
|
|
pub(crate) static_root_ids: Option<(AllocId, LocalDefId)>,
|
2024-08-29 17:24:31 +00:00
|
|
|
|
|
|
|
/// A cache of "data range" computations for unions (i.e., the offsets of non-padding bytes).
|
|
|
|
union_data_ranges: FxHashMap<Ty<'tcx>, RangeSet>,
|
2022-11-21 16:51:16 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Copy, Clone)]
|
|
|
|
pub enum CheckAlignment {
|
2023-09-04 07:25:39 +00:00
|
|
|
/// Ignore all alignment requirements.
|
2022-11-21 16:51:16 +00:00
|
|
|
/// This is mainly used in interning.
|
|
|
|
No,
|
|
|
|
/// Hard error when dereferencing a misaligned pointer.
|
|
|
|
Error,
|
2019-12-25 00:04:32 +00:00
|
|
|
}
|
|
|
|
|
2023-06-24 20:40:40 +00:00
|
|
|
#[derive(Copy, Clone, PartialEq)]
|
2024-01-05 11:18:11 +00:00
|
|
|
pub(crate) enum CanAccessMutGlobal {
|
2023-06-24 20:40:40 +00:00
|
|
|
No,
|
|
|
|
Yes,
|
|
|
|
}
|
|
|
|
|
2024-01-05 11:18:11 +00:00
|
|
|
impl From<bool> for CanAccessMutGlobal {
|
2023-06-24 20:40:40 +00:00
|
|
|
fn from(value: bool) -> Self {
|
|
|
|
if value { Self::Yes } else { Self::No }
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-06-13 09:15:55 +00:00
|
|
|
impl<'tcx> CompileTimeMachine<'tcx> {
|
2023-06-24 20:40:40 +00:00
|
|
|
pub(crate) fn new(
|
2024-01-05 11:18:11 +00:00
|
|
|
can_access_mut_global: CanAccessMutGlobal,
|
2023-06-24 20:40:40 +00:00
|
|
|
check_alignment: CheckAlignment,
|
|
|
|
) -> Self {
|
2024-06-13 09:15:55 +00:00
|
|
|
CompileTimeMachine {
|
2022-11-02 11:57:40 +00:00
|
|
|
num_evaluated_steps: 0,
|
2022-04-03 17:05:49 +00:00
|
|
|
stack: Vec::new(),
|
2024-01-05 11:18:11 +00:00
|
|
|
can_access_mut_global,
|
2022-08-22 00:00:38 +00:00
|
|
|
check_alignment,
|
2024-02-26 18:03:06 +00:00
|
|
|
static_root_ids: None,
|
2024-08-29 17:24:31 +00:00
|
|
|
union_data_ranges: FxHashMap::default(),
|
2022-04-03 17:05:49 +00:00
|
|
|
}
|
2019-12-25 00:04:32 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-10-04 19:18:15 +00:00
|
|
|
impl<K: Hash + Eq, V> interpret::AllocMap<K, V> for FxIndexMap<K, V> {
|
2019-12-25 00:04:32 +00:00
|
|
|
#[inline(always)]
|
|
|
|
fn contains_key<Q: ?Sized + Hash + Eq>(&mut self, k: &Q) -> bool
|
|
|
|
where
|
|
|
|
K: Borrow<Q>,
|
|
|
|
{
|
2022-10-04 19:18:15 +00:00
|
|
|
FxIndexMap::contains_key(self, k)
|
2019-12-25 00:04:32 +00:00
|
|
|
}
|
|
|
|
|
2023-11-18 01:33:44 +00:00
|
|
|
#[inline(always)]
|
|
|
|
fn contains_key_ref<Q: ?Sized + Hash + Eq>(&self, k: &Q) -> bool
|
|
|
|
where
|
|
|
|
K: Borrow<Q>,
|
|
|
|
{
|
|
|
|
FxIndexMap::contains_key(self, k)
|
|
|
|
}
|
|
|
|
|
2019-12-25 00:04:32 +00:00
|
|
|
#[inline(always)]
|
|
|
|
fn insert(&mut self, k: K, v: V) -> Option<V> {
|
2022-10-04 19:18:15 +00:00
|
|
|
FxIndexMap::insert(self, k, v)
|
2019-12-25 00:04:32 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
#[inline(always)]
|
|
|
|
fn remove<Q: ?Sized + Hash + Eq>(&mut self, k: &Q) -> Option<V>
|
|
|
|
where
|
|
|
|
K: Borrow<Q>,
|
|
|
|
{
|
2024-01-28 20:53:28 +00:00
|
|
|
// FIXME(#120456) - is `swap_remove` correct?
|
|
|
|
FxIndexMap::swap_remove(self, k)
|
2019-12-25 00:04:32 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
#[inline(always)]
|
|
|
|
fn filter_map_collect<T>(&self, mut f: impl FnMut(&K, &V) -> Option<T>) -> Vec<T> {
|
2024-10-12 10:14:28 +00:00
|
|
|
self.iter().filter_map(move |(k, v)| f(k, v)).collect()
|
2019-12-25 00:04:32 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
#[inline(always)]
|
|
|
|
fn get_or<E>(&self, k: K, vacant: impl FnOnce() -> Result<V, E>) -> Result<&V, E> {
|
|
|
|
match self.get(&k) {
|
|
|
|
Some(v) => Ok(v),
|
|
|
|
None => {
|
|
|
|
vacant()?;
|
|
|
|
bug!("The CTFE machine shouldn't ever need to extend the alloc_map when reading")
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[inline(always)]
|
|
|
|
fn get_mut_or<E>(&mut self, k: K, vacant: impl FnOnce() -> Result<V, E>) -> Result<&mut V, E> {
|
|
|
|
match self.entry(k) {
|
2022-10-04 19:18:15 +00:00
|
|
|
IndexEntry::Occupied(e) => Ok(e.into_mut()),
|
|
|
|
IndexEntry::Vacant(e) => {
|
2019-12-25 00:04:32 +00:00
|
|
|
let v = vacant()?;
|
|
|
|
Ok(e.insert(v))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-08-20 12:58:10 +00:00
|
|
|
pub type CompileTimeInterpCx<'tcx> = InterpCx<'tcx, CompileTimeMachine<'tcx>>;
|
2019-12-25 00:04:32 +00:00
|
|
|
|
2020-12-03 15:39:39 +00:00
|
|
|
#[derive(Debug, PartialEq, Eq, Copy, Clone)]
|
|
|
|
pub enum MemoryKind {
|
|
|
|
Heap,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl fmt::Display for MemoryKind {
|
|
|
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
|
|
match self {
|
|
|
|
MemoryKind::Heap => write!(f, "heap allocation"),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl interpret::MayLeak for MemoryKind {
|
|
|
|
#[inline(always)]
|
|
|
|
fn may_leak(self) -> bool {
|
|
|
|
match self {
|
|
|
|
MemoryKind::Heap => false,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-12-25 00:04:32 +00:00
|
|
|
impl interpret::MayLeak for ! {
|
|
|
|
#[inline(always)]
|
|
|
|
fn may_leak(self) -> bool {
|
|
|
|
// `self` is uninhabited
|
|
|
|
self
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-06-13 09:15:55 +00:00
|
|
|
impl<'tcx> CompileTimeInterpCx<'tcx> {
|
2023-10-28 13:39:54 +00:00
|
|
|
fn location_triple_for_span(&self, span: Span) -> (Symbol, u32, u32) {
|
|
|
|
let topmost = span.ctxt().outer_expn().expansion_cause().unwrap_or(span);
|
|
|
|
let caller = self.tcx.sess.source_map().lookup_char_pos(topmost.lo());
|
|
|
|
|
|
|
|
use rustc_session::RemapFileNameExt;
|
|
|
|
use rustc_session::config::RemapPathScopeComponents;
|
|
|
|
(
|
|
|
|
Symbol::intern(
|
|
|
|
&caller
|
|
|
|
.file
|
|
|
|
.name
|
2023-11-21 19:07:32 +00:00
|
|
|
.for_scope(self.tcx.sess, RemapPathScopeComponents::DIAGNOSTICS)
|
2023-10-28 13:39:54 +00:00
|
|
|
.to_string_lossy(),
|
|
|
|
),
|
|
|
|
u32::try_from(caller.line).unwrap(),
|
|
|
|
u32::try_from(caller.col_display).unwrap().checked_add(1).unwrap(),
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
2022-10-07 20:23:34 +00:00
|
|
|
/// "Intercept" a function call, because we have something special to do for it.
|
2024-10-06 17:59:19 +00:00
|
|
|
/// All `#[rustc_do_not_const_check]` functions MUST be hooked here.
|
2022-10-07 20:23:34 +00:00
|
|
|
/// If this returns `Some` function, which may be `instance` or a different function with
|
|
|
|
/// compatible arguments, then evaluation should continue with that function.
|
|
|
|
/// If this returns `None`, the function call has been handled and the function has returned.
|
2022-10-20 18:15:37 +00:00
|
|
|
fn hook_special_const_fn(
|
|
|
|
&mut self,
|
|
|
|
instance: ty::Instance<'tcx>,
|
2023-07-10 20:07:07 +00:00
|
|
|
args: &[FnArg<'tcx>],
|
2024-10-31 20:30:23 +00:00
|
|
|
_dest: &MPlaceTy<'tcx>,
|
|
|
|
_ret: Option<mir::BasicBlock>,
|
2022-10-20 18:15:37 +00:00
|
|
|
) -> InterpResult<'tcx, Option<ty::Instance<'tcx>>> {
|
|
|
|
let def_id = instance.def_id();
|
|
|
|
|
2023-09-03 04:31:56 +00:00
|
|
|
if self.tcx.has_attr(def_id, sym::rustc_const_panic_str)
|
2024-06-14 18:46:32 +00:00
|
|
|
|| self.tcx.is_lang_item(def_id, LangItem::BeginPanic)
|
2022-10-20 18:15:37 +00:00
|
|
|
{
|
2024-03-06 09:39:31 +00:00
|
|
|
let args = self.copy_fn_args(args);
|
2022-10-20 18:15:37 +00:00
|
|
|
// &str or &&str
|
|
|
|
assert!(args.len() == 1);
|
|
|
|
|
2023-08-01 11:32:18 +00:00
|
|
|
let mut msg_place = self.deref_pointer(&args[0])?;
|
2022-10-20 18:15:37 +00:00
|
|
|
while msg_place.layout.ty.is_ref() {
|
2023-08-01 11:32:18 +00:00
|
|
|
msg_place = self.deref_pointer(&msg_place)?;
|
2022-10-20 18:15:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
let msg = Symbol::intern(self.read_str(&msg_place)?);
|
|
|
|
let span = self.find_closest_untracked_caller_location();
|
|
|
|
let (file, line, col) = self.location_triple_for_span(span);
|
2024-09-29 09:53:23 +00:00
|
|
|
return Err(ConstEvalErrKind::Panic { msg, file, line, col }).into();
|
2024-06-14 18:46:32 +00:00
|
|
|
} else if self.tcx.is_lang_item(def_id, LangItem::PanicFmt) {
|
2022-10-20 18:15:37 +00:00
|
|
|
// For panic_fmt, call const_panic_fmt instead.
|
2022-11-11 09:01:06 +00:00
|
|
|
let const_def_id = self.tcx.require_lang_item(LangItem::ConstPanicFmt, None);
|
2024-03-10 10:49:27 +00:00
|
|
|
let new_instance = ty::Instance::expect_resolve(
|
2022-10-07 20:23:34 +00:00
|
|
|
*self.tcx,
|
2024-12-09 08:27:18 +00:00
|
|
|
self.typing_env(),
|
2022-10-07 20:23:34 +00:00
|
|
|
const_def_id,
|
2023-07-11 21:35:29 +00:00
|
|
|
instance.args,
|
2024-07-01 20:32:32 +00:00
|
|
|
self.cur_span(),
|
2024-03-10 10:49:27 +00:00
|
|
|
);
|
2022-10-07 20:23:34 +00:00
|
|
|
|
2024-09-29 09:53:23 +00:00
|
|
|
return interp_ok(Some(new_instance));
|
2022-10-07 20:23:34 +00:00
|
|
|
}
|
2024-09-29 09:53:23 +00:00
|
|
|
interp_ok(Some(instance))
|
2022-10-07 20:23:34 +00:00
|
|
|
}
|
|
|
|
|
2022-09-06 14:08:59 +00:00
|
|
|
/// See documentation on the `ptr_guaranteed_cmp` intrinsic.
|
2024-11-16 17:32:25 +00:00
|
|
|
/// Returns `2` if the result is unknown.
|
|
|
|
/// Returns `1` if the pointers are guaranteed equal.
|
|
|
|
/// Returns `0` if the pointers are guaranteed inequal.
|
|
|
|
///
|
|
|
|
/// Note that this intrinsic is exposed on stable for comparison with null. In other words, any
|
|
|
|
/// change to this function that affects comparison with null is insta-stable!
|
2022-09-06 14:08:59 +00:00
|
|
|
fn guaranteed_cmp(&mut self, a: Scalar, b: Scalar) -> InterpResult<'tcx, u8> {
|
2024-09-29 09:53:23 +00:00
|
|
|
interp_ok(match (a, b) {
|
2020-09-12 08:10:13 +00:00
|
|
|
// Comparisons between integers are always known.
|
2022-09-06 14:08:59 +00:00
|
|
|
(Scalar::Int { .. }, Scalar::Int { .. }) => {
|
|
|
|
if a == b {
|
|
|
|
1
|
|
|
|
} else {
|
|
|
|
0
|
|
|
|
}
|
|
|
|
}
|
2020-09-12 08:10:13 +00:00
|
|
|
// Comparisons of abstract pointers with null pointers are known if the pointer
|
|
|
|
// is in bounds, because if they are in bounds, the pointer can't be null.
|
|
|
|
// Inequality with integers other than null can never be known for sure.
|
2022-02-25 00:38:37 +00:00
|
|
|
(Scalar::Int(int), ptr @ Scalar::Ptr(..))
|
2022-09-06 14:08:59 +00:00
|
|
|
| (ptr @ Scalar::Ptr(..), Scalar::Int(int))
|
2023-07-22 05:35:57 +00:00
|
|
|
if int.is_null() && !self.scalar_may_be_null(ptr)? =>
|
2022-09-06 14:08:59 +00:00
|
|
|
{
|
|
|
|
0
|
2020-09-26 13:15:35 +00:00
|
|
|
}
|
2022-09-06 14:08:59 +00:00
|
|
|
// Equality with integers can never be known for sure.
|
|
|
|
(Scalar::Int { .. }, Scalar::Ptr(..)) | (Scalar::Ptr(..), Scalar::Int { .. }) => 2,
|
|
|
|
// FIXME: return a `1` for when both sides are the same pointer, *except* that
|
|
|
|
// some things (like functions and vtables) do not have stable addresses
|
|
|
|
// so we need to be careful around them (see e.g. #73722).
|
|
|
|
// FIXME: return `0` for at least some comparisons where we can reliably
|
2020-09-12 08:10:13 +00:00
|
|
|
// determine the result of runtime inequality tests at compile-time.
|
|
|
|
// Examples include comparison of addresses in different static items.
|
2022-09-06 14:08:59 +00:00
|
|
|
(Scalar::Ptr(..), Scalar::Ptr(..)) => 2,
|
2022-04-07 20:22:09 +00:00
|
|
|
})
|
2020-09-12 08:10:13 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-06-13 09:30:24 +00:00
|
|
|
impl<'tcx> CompileTimeMachine<'tcx> {
|
|
|
|
#[inline(always)]
|
|
|
|
/// Find the first stack frame that is within the current crate, if any.
|
|
|
|
/// Otherwise, return the crate's HirId
|
|
|
|
pub fn best_lint_scope(&self, tcx: TyCtxt<'tcx>) -> hir::HirId {
|
|
|
|
self.stack.iter().find_map(|frame| frame.lint_root(tcx)).unwrap_or(CRATE_HIR_ID)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-06-13 09:15:55 +00:00
|
|
|
impl<'tcx> interpret::Machine<'tcx> for CompileTimeMachine<'tcx> {
|
2024-05-27 06:24:23 +00:00
|
|
|
compile_time_machine!(<'tcx>);
|
2019-12-25 00:04:32 +00:00
|
|
|
|
2020-12-03 15:39:39 +00:00
|
|
|
type MemoryKind = MemoryKind;
|
|
|
|
|
2021-07-02 20:06:12 +00:00
|
|
|
const PANIC_ON_ALLOC_FAIL: bool = false; // will be raised as a proper error
|
|
|
|
|
2022-08-07 12:30:03 +00:00
|
|
|
#[inline(always)]
|
2024-05-27 06:24:23 +00:00
|
|
|
fn enforce_alignment(ecx: &InterpCx<'tcx, Self>) -> bool {
|
2023-09-04 07:25:39 +00:00
|
|
|
matches!(ecx.machine.check_alignment, CheckAlignment::Error)
|
2022-08-07 12:30:03 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
#[inline(always)]
|
2024-05-27 06:24:23 +00:00
|
|
|
fn enforce_validity(ecx: &InterpCx<'tcx, Self>, layout: TyAndLayout<'tcx>) -> bool {
|
2024-10-28 04:34:49 +00:00
|
|
|
ecx.tcx.sess.opts.unstable_opts.extra_const_ub_checks || layout.is_uninhabited()
|
2022-08-07 12:30:03 +00:00
|
|
|
}
|
|
|
|
|
2020-12-07 14:27:46 +00:00
|
|
|
fn load_mir(
|
2024-05-27 06:24:23 +00:00
|
|
|
ecx: &InterpCx<'tcx, Self>,
|
2024-06-17 01:35:16 +00:00
|
|
|
instance: ty::InstanceKind<'tcx>,
|
2020-12-07 14:27:46 +00:00
|
|
|
) -> InterpResult<'tcx, &'tcx mir::Body<'tcx>> {
|
|
|
|
match instance {
|
2024-09-29 09:53:23 +00:00
|
|
|
ty::InstanceKind::Item(def) => interp_ok(ecx.tcx.mir_for_ctfe(def)),
|
|
|
|
_ => interp_ok(ecx.tcx.instance_mir(instance)),
|
2020-12-07 14:27:46 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-12-25 00:04:32 +00:00
|
|
|
fn find_mir_or_eval_fn(
|
2024-05-27 06:24:23 +00:00
|
|
|
ecx: &mut InterpCx<'tcx, Self>,
|
2023-08-19 11:51:13 +00:00
|
|
|
orig_instance: ty::Instance<'tcx>,
|
2024-11-16 09:04:30 +00:00
|
|
|
_abi: &FnAbi<'tcx, Ty<'tcx>>,
|
2023-07-10 20:07:07 +00:00
|
|
|
args: &[FnArg<'tcx>],
|
2024-03-04 22:40:26 +00:00
|
|
|
dest: &MPlaceTy<'tcx>,
|
2022-10-07 20:23:34 +00:00
|
|
|
ret: Option<mir::BasicBlock>,
|
2022-10-10 18:50:49 +00:00
|
|
|
_unwind: mir::UnwindAction, // unwinding is not supported in consts
|
2024-05-26 18:20:43 +00:00
|
|
|
) -> InterpResult<'tcx, Option<(&'tcx mir::Body<'tcx>, ty::Instance<'tcx>)>> {
|
2023-08-19 11:51:13 +00:00
|
|
|
debug!("find_mir_or_eval_fn: {:?}", orig_instance);
|
|
|
|
|
|
|
|
// Replace some functions.
|
|
|
|
let Some(instance) = ecx.hook_special_const_fn(orig_instance, args, dest, ret)? else {
|
|
|
|
// Call has already been handled.
|
2024-09-29 09:53:23 +00:00
|
|
|
return interp_ok(None);
|
2023-08-19 11:51:13 +00:00
|
|
|
};
|
2019-12-25 00:04:32 +00:00
|
|
|
|
|
|
|
// Only check non-glue functions
|
2024-06-17 01:35:16 +00:00
|
|
|
if let ty::InstanceKind::Item(def) = instance.def {
|
2019-12-25 00:04:32 +00:00
|
|
|
// Execution might have wandered off into other crates, so we cannot do a stability-
|
2023-08-19 11:51:13 +00:00
|
|
|
// sensitive check here. But we can at least rule out functions that are not const at
|
|
|
|
// all. That said, we have to allow calling functions inside a trait marked with
|
|
|
|
// #[const_trait]. These *are* const-checked!
|
2025-01-15 10:50:07 +00:00
|
|
|
if !ecx.tcx.is_const_fn(def) || ecx.tcx.has_attr(def, sym::rustc_do_not_const_check) {
|
2023-08-19 11:51:13 +00:00
|
|
|
// We certainly do *not* want to actually call the fn
|
|
|
|
// though, so be sure we return here.
|
|
|
|
throw_unsup_format!("calling non-const function `{}`", instance)
|
2021-10-25 16:07:16 +00:00
|
|
|
}
|
2019-12-25 00:04:32 +00:00
|
|
|
}
|
2022-10-07 20:23:34 +00:00
|
|
|
|
2019-12-25 00:04:32 +00:00
|
|
|
// This is a const fn. Call it.
|
2023-08-19 11:51:13 +00:00
|
|
|
// In case of replacement, we return the *original* instance to make backtraces work out
|
|
|
|
// (and we hope this does not confuse the FnAbi checks too much).
|
2024-09-29 09:53:23 +00:00
|
|
|
interp_ok(Some((ecx.load_mir(instance.def, None)?, orig_instance)))
|
2019-12-25 00:04:32 +00:00
|
|
|
}
|
|
|
|
|
2024-05-27 06:24:23 +00:00
|
|
|
fn panic_nounwind(ecx: &mut InterpCx<'tcx, Self>, msg: &str) -> InterpResult<'tcx> {
|
2023-08-19 12:20:41 +00:00
|
|
|
let msg = Symbol::intern(msg);
|
|
|
|
let span = ecx.find_closest_untracked_caller_location();
|
|
|
|
let (file, line, col) = ecx.location_triple_for_span(span);
|
2024-09-29 09:53:23 +00:00
|
|
|
Err(ConstEvalErrKind::Panic { msg, file, line, col }).into()
|
2023-08-19 12:20:41 +00:00
|
|
|
}
|
|
|
|
|
2019-12-25 00:04:32 +00:00
|
|
|
fn call_intrinsic(
|
2024-05-27 06:24:23 +00:00
|
|
|
ecx: &mut InterpCx<'tcx, Self>,
|
2019-12-25 00:04:32 +00:00
|
|
|
instance: ty::Instance<'tcx>,
|
|
|
|
args: &[OpTy<'tcx>],
|
2024-03-04 22:40:26 +00:00
|
|
|
dest: &MPlaceTy<'tcx, Self::Provenance>,
|
2022-04-16 13:27:54 +00:00
|
|
|
target: Option<mir::BasicBlock>,
|
2022-10-10 18:50:49 +00:00
|
|
|
_unwind: mir::UnwindAction,
|
2024-04-23 13:12:17 +00:00
|
|
|
) -> InterpResult<'tcx, Option<ty::Instance<'tcx>>> {
|
2020-09-12 08:10:13 +00:00
|
|
|
// Shared intrinsics.
|
2024-08-05 18:35:52 +00:00
|
|
|
if ecx.eval_intrinsic(instance, args, dest, target)? {
|
2024-09-29 09:53:23 +00:00
|
|
|
return interp_ok(None);
|
2019-12-25 00:04:32 +00:00
|
|
|
}
|
|
|
|
let intrinsic_name = ecx.tcx.item_name(instance.def_id());
|
2020-09-12 08:10:13 +00:00
|
|
|
|
|
|
|
// CTFE-specific intrinsics.
|
|
|
|
match intrinsic_name {
|
2022-09-06 14:08:59 +00:00
|
|
|
sym::ptr_guaranteed_cmp => {
|
2022-08-01 23:05:20 +00:00
|
|
|
let a = ecx.read_scalar(&args[0])?;
|
|
|
|
let b = ecx.read_scalar(&args[1])?;
|
2022-09-06 14:08:59 +00:00
|
|
|
let cmp = ecx.guaranteed_cmp(a, b)?;
|
|
|
|
ecx.write_scalar(Scalar::from_u8(cmp), dest)?;
|
2020-09-12 08:10:13 +00:00
|
|
|
}
|
2020-12-03 06:51:47 +00:00
|
|
|
sym::const_allocate => {
|
2023-02-14 14:31:26 +00:00
|
|
|
let size = ecx.read_scalar(&args[0])?.to_target_usize(ecx)?;
|
|
|
|
let align = ecx.read_scalar(&args[1])?.to_target_usize(ecx)?;
|
2020-12-03 06:51:47 +00:00
|
|
|
|
|
|
|
let align = match Align::from_bytes(align) {
|
|
|
|
Ok(a) => a,
|
2023-05-17 10:30:14 +00:00
|
|
|
Err(err) => throw_ub_custom!(
|
|
|
|
fluent::const_eval_invalid_align_details,
|
|
|
|
name = "const_allocate",
|
|
|
|
err_kind = err.diag_ident(),
|
|
|
|
align = err.align()
|
|
|
|
),
|
2020-12-03 06:51:47 +00:00
|
|
|
};
|
|
|
|
|
2022-04-03 17:05:49 +00:00
|
|
|
let ptr = ecx.allocate_ptr(
|
2023-11-21 20:46:17 +00:00
|
|
|
Size::from_bytes(size),
|
2020-12-03 06:51:47 +00:00
|
|
|
align,
|
2020-12-03 15:39:39 +00:00
|
|
|
interpret::MemoryKind::Machine(MemoryKind::Heap),
|
2025-01-24 23:54:51 +00:00
|
|
|
AllocInit::Uninit,
|
2021-06-12 23:49:48 +00:00
|
|
|
)?;
|
2021-07-14 20:10:17 +00:00
|
|
|
ecx.write_pointer(ptr, dest)?;
|
2020-12-03 06:51:47 +00:00
|
|
|
}
|
2021-12-25 13:35:11 +00:00
|
|
|
sym::const_deallocate => {
|
|
|
|
let ptr = ecx.read_pointer(&args[0])?;
|
2023-02-14 14:31:26 +00:00
|
|
|
let size = ecx.read_scalar(&args[1])?.to_target_usize(ecx)?;
|
|
|
|
let align = ecx.read_scalar(&args[2])?.to_target_usize(ecx)?;
|
2021-12-25 13:35:11 +00:00
|
|
|
|
|
|
|
let size = Size::from_bytes(size);
|
|
|
|
let align = match Align::from_bytes(align) {
|
|
|
|
Ok(a) => a,
|
2023-05-17 10:30:14 +00:00
|
|
|
Err(err) => throw_ub_custom!(
|
|
|
|
fluent::const_eval_invalid_align_details,
|
|
|
|
name = "const_deallocate",
|
|
|
|
err_kind = err.diag_ident(),
|
|
|
|
align = err.align()
|
|
|
|
),
|
2021-12-25 13:35:11 +00:00
|
|
|
};
|
|
|
|
|
2022-01-26 04:06:09 +00:00
|
|
|
// If an allocation is created in an another const,
|
|
|
|
// we don't deallocate it.
|
2024-07-29 14:40:21 +00:00
|
|
|
let (alloc_id, _, _) = ecx.ptr_get_alloc_id(ptr, 0)?;
|
2022-01-26 04:06:09 +00:00
|
|
|
let is_allocated_in_another_const = matches!(
|
2022-07-17 15:40:34 +00:00
|
|
|
ecx.tcx.try_get_global_alloc(alloc_id),
|
2022-01-26 04:06:09 +00:00
|
|
|
Some(interpret::GlobalAlloc::Memory(_))
|
|
|
|
);
|
|
|
|
|
|
|
|
if !is_allocated_in_another_const {
|
2022-04-03 17:05:49 +00:00
|
|
|
ecx.deallocate_ptr(
|
2022-01-26 04:06:09 +00:00
|
|
|
ptr,
|
|
|
|
Some((size, align)),
|
|
|
|
interpret::MemoryKind::Machine(MemoryKind::Heap),
|
|
|
|
)?;
|
|
|
|
}
|
2021-12-25 13:35:11 +00:00
|
|
|
}
|
2023-08-01 15:35:12 +00:00
|
|
|
// The intrinsic represents whether the value is known to the optimizer (LLVM).
|
|
|
|
// We're not doing any optimizations here, so there is no optimizer that could know the value.
|
|
|
|
// (We know the value here in the machine of course, but this is the runtime of that code,
|
|
|
|
// not the optimization stage.)
|
|
|
|
sym::is_val_statically_known => ecx.write_scalar(Scalar::from_bool(false), dest)?,
|
2020-09-12 08:10:13 +00:00
|
|
|
_ => {
|
2024-04-23 13:12:17 +00:00
|
|
|
// We haven't handled the intrinsic, let's see if we can use a fallback body.
|
|
|
|
if ecx.tcx.intrinsic(instance.def_id()).unwrap().must_be_overridden {
|
|
|
|
throw_unsup_format!(
|
|
|
|
"intrinsic `{intrinsic_name}` is not supported at compile-time"
|
|
|
|
);
|
|
|
|
}
|
2024-09-29 09:53:23 +00:00
|
|
|
return interp_ok(Some(ty::Instance {
|
2024-06-17 01:35:16 +00:00
|
|
|
def: ty::InstanceKind::Item(instance.def_id()),
|
2024-04-23 13:12:17 +00:00
|
|
|
args: instance.args,
|
|
|
|
}));
|
2020-09-12 08:10:13 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-05-04 15:39:29 +00:00
|
|
|
// Intrinsic is done, jump to next block.
|
|
|
|
ecx.return_to_block(target)?;
|
2024-09-29 09:53:23 +00:00
|
|
|
interp_ok(None)
|
2019-12-25 00:04:32 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
fn assert_panic(
|
2024-05-27 06:24:23 +00:00
|
|
|
ecx: &mut InterpCx<'tcx, Self>,
|
2019-12-25 00:04:32 +00:00
|
|
|
msg: &AssertMessage<'tcx>,
|
2022-10-08 22:47:59 +00:00
|
|
|
_unwind: mir::UnwindAction,
|
2019-12-25 00:04:32 +00:00
|
|
|
) -> InterpResult<'tcx> {
|
2020-03-29 14:41:09 +00:00
|
|
|
use rustc_middle::mir::AssertKind::*;
|
2020-06-19 16:57:15 +00:00
|
|
|
// Convert `AssertKind<Operand>` to `AssertKind<Scalar>`.
|
|
|
|
let eval_to_int =
|
2021-02-15 00:00:00 +00:00
|
|
|
|op| ecx.read_immediate(&ecx.eval_operand(op, None)?).map(|x| x.to_const_int());
|
2020-02-08 21:21:20 +00:00
|
|
|
let err = match msg {
|
2022-12-23 15:15:21 +00:00
|
|
|
BoundsCheck { len, index } => {
|
2020-06-19 16:57:15 +00:00
|
|
|
let len = eval_to_int(len)?;
|
|
|
|
let index = eval_to_int(index)?;
|
2020-02-08 21:21:20 +00:00
|
|
|
BoundsCheck { len, index }
|
2019-12-25 00:04:32 +00:00
|
|
|
}
|
2020-06-19 16:57:15 +00:00
|
|
|
Overflow(op, l, r) => Overflow(*op, eval_to_int(l)?, eval_to_int(r)?),
|
|
|
|
OverflowNeg(op) => OverflowNeg(eval_to_int(op)?),
|
|
|
|
DivisionByZero(op) => DivisionByZero(eval_to_int(op)?),
|
|
|
|
RemainderByZero(op) => RemainderByZero(eval_to_int(op)?),
|
2023-10-19 21:46:28 +00:00
|
|
|
ResumedAfterReturn(coroutine_kind) => ResumedAfterReturn(*coroutine_kind),
|
|
|
|
ResumedAfterPanic(coroutine_kind) => ResumedAfterPanic(*coroutine_kind),
|
2022-11-10 16:37:28 +00:00
|
|
|
MisalignedPointerDereference { ref required, ref found } => {
|
|
|
|
MisalignedPointerDereference {
|
|
|
|
required: eval_to_int(required)?,
|
|
|
|
found: eval_to_int(found)?,
|
|
|
|
}
|
|
|
|
}
|
2024-12-17 13:00:22 +00:00
|
|
|
NullPointerDereference => NullPointerDereference,
|
2020-02-08 21:21:20 +00:00
|
|
|
};
|
2024-09-29 09:53:23 +00:00
|
|
|
Err(ConstEvalErrKind::AssertFailure(err)).into()
|
2019-12-25 00:04:32 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
fn binary_ptr_op(
|
2024-05-27 06:24:23 +00:00
|
|
|
_ecx: &InterpCx<'tcx, Self>,
|
2023-04-28 11:35:50 +00:00
|
|
|
_bin_op: mir::BinOp,
|
|
|
|
_left: &ImmTy<'tcx>,
|
|
|
|
_right: &ImmTy<'tcx>,
|
2024-05-21 10:17:34 +00:00
|
|
|
) -> InterpResult<'tcx, ImmTy<'tcx>> {
|
2022-08-28 17:31:36 +00:00
|
|
|
throw_unsup_format!("pointer arithmetic or comparison is not supported at compile-time");
|
2019-12-25 00:04:32 +00:00
|
|
|
}
|
|
|
|
|
2024-05-27 06:24:23 +00:00
|
|
|
fn increment_const_eval_counter(ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx> {
|
2022-12-29 23:14:29 +00:00
|
|
|
// The step limit has already been hit in a previous call to `increment_const_eval_counter`.
|
2020-01-21 15:46:07 +00:00
|
|
|
|
2022-11-02 11:57:40 +00:00
|
|
|
if let Some(new_steps) = ecx.machine.num_evaluated_steps.checked_add(1) {
|
|
|
|
let (limit, start) = if ecx.tcx.sess.opts.unstable_opts.tiny_const_eval_limit {
|
|
|
|
(TINY_LINT_TERMINATOR_LIMIT, TINY_LINT_TERMINATOR_LIMIT)
|
|
|
|
} else {
|
|
|
|
(LINT_TERMINATOR_LIMIT, PROGRESS_INDICATOR_START)
|
|
|
|
};
|
|
|
|
|
|
|
|
ecx.machine.num_evaluated_steps = new_steps;
|
|
|
|
// By default, we have a *deny* lint kicking in after some time
|
|
|
|
// to ensure `loop {}` doesn't just go forever.
|
|
|
|
// In case that lint got reduced, in particular for `--cap-lint` situations, we also
|
|
|
|
// have a hard warning shown every now and then for really long executions.
|
|
|
|
if new_steps == limit {
|
|
|
|
// By default, we stop after a million steps, but the user can disable this lint
|
|
|
|
// to be able to run until the heat death of the universe or power loss, whichever
|
|
|
|
// comes first.
|
2024-06-13 09:30:24 +00:00
|
|
|
let hir_id = ecx.machine.best_lint_scope(*ecx.tcx);
|
2022-11-02 11:57:40 +00:00
|
|
|
let is_error = ecx
|
|
|
|
.tcx
|
|
|
|
.lint_level_at_node(
|
|
|
|
rustc_session::lint::builtin::LONG_RUNNING_CONST_EVAL,
|
|
|
|
hir_id,
|
|
|
|
)
|
|
|
|
.0
|
|
|
|
.is_error();
|
|
|
|
let span = ecx.cur_span();
|
2024-01-16 05:27:02 +00:00
|
|
|
ecx.tcx.emit_node_span_lint(
|
2022-11-02 11:57:40 +00:00
|
|
|
rustc_session::lint::builtin::LONG_RUNNING_CONST_EVAL,
|
|
|
|
hir_id,
|
|
|
|
span,
|
|
|
|
LongRunning { item_span: ecx.tcx.span },
|
|
|
|
);
|
|
|
|
// If this was a hard error, don't bother continuing evaluation.
|
|
|
|
if is_error {
|
2024-02-19 22:36:28 +00:00
|
|
|
let guard = ecx
|
2022-11-02 11:57:40 +00:00
|
|
|
.tcx
|
2023-12-18 11:21:37 +00:00
|
|
|
.dcx()
|
2023-11-30 04:01:11 +00:00
|
|
|
.span_delayed_bug(span, "The deny lint should have already errored");
|
2024-12-07 16:22:09 +00:00
|
|
|
throw_inval!(AlreadyReported(ReportedErrorInfo::allowed_in_infallible(guard)));
|
2022-11-02 11:57:40 +00:00
|
|
|
}
|
|
|
|
} else if new_steps > start && new_steps.is_power_of_two() {
|
|
|
|
// Only report after a certain number of terminators have been evaluated and the
|
|
|
|
// current number of evaluated terminators is a power of 2. The latter gives us a cheap
|
|
|
|
// way to implement exponential backoff.
|
|
|
|
let span = ecx.cur_span();
|
2024-09-21 15:23:34 +00:00
|
|
|
// We store a unique number in `force_duplicate` to evade `-Z deduplicate-diagnostics`.
|
|
|
|
// `new_steps` is guaranteed to be unique because `ecx.machine.num_evaluated_steps` is
|
|
|
|
// always increasing.
|
|
|
|
ecx.tcx.dcx().emit_warn(LongRunningWarn {
|
|
|
|
span,
|
|
|
|
item_span: ecx.tcx.span,
|
|
|
|
force_duplicate: new_steps,
|
|
|
|
});
|
2022-11-02 11:57:40 +00:00
|
|
|
}
|
2019-12-25 00:04:32 +00:00
|
|
|
}
|
|
|
|
|
2024-09-29 09:53:23 +00:00
|
|
|
interp_ok(())
|
2019-12-25 00:04:32 +00:00
|
|
|
}
|
|
|
|
|
2022-05-13 17:30:25 +00:00
|
|
|
#[inline(always)]
|
2024-09-30 18:29:05 +00:00
|
|
|
fn expose_provenance(
|
|
|
|
_ecx: &InterpCx<'tcx, Self>,
|
|
|
|
_provenance: Self::Provenance,
|
|
|
|
) -> InterpResult<'tcx> {
|
2022-08-28 17:13:13 +00:00
|
|
|
// This is only reachable with -Zunleash-the-miri-inside-of-you.
|
|
|
|
throw_unsup_format!("exposing pointers is not possible at compile-time")
|
2022-05-13 17:30:25 +00:00
|
|
|
}
|
|
|
|
|
2020-08-12 08:18:21 +00:00
|
|
|
#[inline(always)]
|
2024-05-27 21:28:32 +00:00
|
|
|
fn init_frame(
|
2024-05-27 06:24:23 +00:00
|
|
|
ecx: &mut InterpCx<'tcx, Self>,
|
2024-05-26 18:20:43 +00:00
|
|
|
frame: Frame<'tcx>,
|
|
|
|
) -> InterpResult<'tcx, Frame<'tcx>> {
|
2020-08-12 08:18:21 +00:00
|
|
|
// Enforce stack size limit. Add 1 because this is run before the new frame is pushed.
|
2021-06-25 23:48:26 +00:00
|
|
|
if !ecx.recursion_limit.value_within_limit(ecx.stack().len() + 1) {
|
2020-08-09 16:01:37 +00:00
|
|
|
throw_exhaust!(StackFrameLimitReached)
|
|
|
|
} else {
|
2024-09-29 09:53:23 +00:00
|
|
|
interp_ok(frame)
|
2020-08-09 16:01:37 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-04-16 16:57:12 +00:00
|
|
|
#[inline(always)]
|
2021-12-14 03:34:51 +00:00
|
|
|
fn stack<'a>(
|
2024-05-27 06:24:23 +00:00
|
|
|
ecx: &'a InterpCx<'tcx, Self>,
|
2024-05-26 18:20:43 +00:00
|
|
|
) -> &'a [Frame<'tcx, Self::Provenance, Self::FrameExtra>] {
|
2020-04-16 16:57:12 +00:00
|
|
|
&ecx.machine.stack
|
|
|
|
}
|
|
|
|
|
|
|
|
#[inline(always)]
|
2021-12-14 03:34:51 +00:00
|
|
|
fn stack_mut<'a>(
|
2024-05-27 06:24:23 +00:00
|
|
|
ecx: &'a mut InterpCx<'tcx, Self>,
|
2024-05-26 18:20:43 +00:00
|
|
|
) -> &'a mut Vec<Frame<'tcx, Self::Provenance, Self::FrameExtra>> {
|
2020-04-16 16:57:12 +00:00
|
|
|
&mut ecx.machine.stack
|
|
|
|
}
|
|
|
|
|
2020-03-21 18:19:10 +00:00
|
|
|
fn before_access_global(
|
2023-11-26 15:57:13 +00:00
|
|
|
_tcx: TyCtxtAt<'tcx>,
|
2022-04-03 17:05:49 +00:00
|
|
|
machine: &Self,
|
2020-03-21 19:44:39 +00:00
|
|
|
alloc_id: AllocId,
|
Introduce `ConstAllocation`.
Currently some `Allocation`s are interned, some are not, and it's very
hard to tell at a use point which is which.
This commit introduces `ConstAllocation` for the known-interned ones,
which makes the division much clearer. `ConstAllocation::inner()` is
used to get the underlying `Allocation`.
In some places it's natural to use an `Allocation`, in some it's natural
to use a `ConstAllocation`, and in some places there's no clear choice.
I've tried to make things look as nice as possible, while generally
favouring `ConstAllocation`, which is the type that embodies more
information. This does require quite a few calls to `inner()`.
The commit also tweaks how `PartialOrd` works for `Interned`. The
previous code was too clever by half, building on `T: Ord` to make the
code shorter. That caused problems with deriving `PartialOrd` and `Ord`
for `ConstAllocation`, so I changed it to build on `T: PartialOrd`,
which is slightly more verbose but much more standard and avoided the
problems.
2022-03-01 20:15:04 +00:00
|
|
|
alloc: ConstAllocation<'tcx>,
|
2024-01-05 11:18:11 +00:00
|
|
|
_static_def_id: Option<DefId>,
|
2020-03-21 18:19:10 +00:00
|
|
|
is_write: bool,
|
2019-12-25 00:04:32 +00:00
|
|
|
) -> InterpResult<'tcx> {
|
Introduce `ConstAllocation`.
Currently some `Allocation`s are interned, some are not, and it's very
hard to tell at a use point which is which.
This commit introduces `ConstAllocation` for the known-interned ones,
which makes the division much clearer. `ConstAllocation::inner()` is
used to get the underlying `Allocation`.
In some places it's natural to use an `Allocation`, in some it's natural
to use a `ConstAllocation`, and in some places there's no clear choice.
I've tried to make things look as nice as possible, while generally
favouring `ConstAllocation`, which is the type that embodies more
information. This does require quite a few calls to `inner()`.
The commit also tweaks how `PartialOrd` works for `Interned`. The
previous code was too clever by half, building on `T: Ord` to make the
code shorter. That caused problems with deriving `PartialOrd` and `Ord`
for `ConstAllocation`, so I changed it to build on `T: PartialOrd`,
which is slightly more verbose but much more standard and avoided the
problems.
2022-03-01 20:15:04 +00:00
|
|
|
let alloc = alloc.inner();
|
2020-04-09 09:32:43 +00:00
|
|
|
if is_write {
|
|
|
|
// Write access. These are never allowed, but we give a targeted error message.
|
2023-01-30 11:03:32 +00:00
|
|
|
match alloc.mutability {
|
2024-09-29 09:53:23 +00:00
|
|
|
Mutability::Not => throw_ub!(WriteToReadOnly(alloc_id)),
|
|
|
|
Mutability::Mut => Err(ConstEvalErrKind::ModifiedGlobal).into(),
|
2020-04-09 09:32:43 +00:00
|
|
|
}
|
2019-12-25 00:04:32 +00:00
|
|
|
} else {
|
2020-04-09 09:32:43 +00:00
|
|
|
// Read access. These are usually allowed, with some exceptions.
|
2024-01-05 11:18:11 +00:00
|
|
|
if machine.can_access_mut_global == CanAccessMutGlobal::Yes {
|
2020-04-10 09:28:51 +00:00
|
|
|
// Machine configuration allows us read from anything (e.g., `static` initializer).
|
2024-09-29 09:53:23 +00:00
|
|
|
interp_ok(())
|
2024-01-05 11:18:11 +00:00
|
|
|
} else if alloc.mutability == Mutability::Mut {
|
|
|
|
// Machine configuration does not allow us to read statics (e.g., `const`
|
|
|
|
// initializer).
|
2024-09-29 09:53:23 +00:00
|
|
|
Err(ConstEvalErrKind::ConstAccessesMutGlobal).into()
|
2020-04-09 09:32:43 +00:00
|
|
|
} else {
|
|
|
|
// Immutable global, this read is fine.
|
Introduce `ConstAllocation`.
Currently some `Allocation`s are interned, some are not, and it's very
hard to tell at a use point which is which.
This commit introduces `ConstAllocation` for the known-interned ones,
which makes the division much clearer. `ConstAllocation::inner()` is
used to get the underlying `Allocation`.
In some places it's natural to use an `Allocation`, in some it's natural
to use a `ConstAllocation`, and in some places there's no clear choice.
I've tried to make things look as nice as possible, while generally
favouring `ConstAllocation`, which is the type that embodies more
information. This does require quite a few calls to `inner()`.
The commit also tweaks how `PartialOrd` works for `Interned`. The
previous code was too clever by half, building on `T: Ord` to make the
code shorter. That caused problems with deriving `PartialOrd` and `Ord`
for `ConstAllocation`, so I changed it to build on `T: PartialOrd`,
which is slightly more verbose but much more standard and avoided the
problems.
2022-03-01 20:15:04 +00:00
|
|
|
assert_eq!(alloc.mutability, Mutability::Not);
|
2024-09-29 09:53:23 +00:00
|
|
|
interp_ok(())
|
2020-04-09 09:32:43 +00:00
|
|
|
}
|
2019-12-25 00:04:32 +00:00
|
|
|
}
|
|
|
|
}
|
2023-11-26 15:57:13 +00:00
|
|
|
|
|
|
|
fn retag_ptr_value(
|
2024-05-27 06:24:23 +00:00
|
|
|
ecx: &mut InterpCx<'tcx, Self>,
|
2023-11-26 15:57:13 +00:00
|
|
|
_kind: mir::RetagKind,
|
|
|
|
val: &ImmTy<'tcx, CtfeProvenance>,
|
|
|
|
) -> InterpResult<'tcx, ImmTy<'tcx, CtfeProvenance>> {
|
2024-08-02 13:34:59 +00:00
|
|
|
// If it's a frozen shared reference that's not already immutable, potentially make it immutable.
|
2023-11-28 21:30:55 +00:00
|
|
|
// (Do nothing on `None` provenance, that cannot store immutability anyway.)
|
2023-11-26 15:57:13 +00:00
|
|
|
if let ty::Ref(_, ty, mutbl) = val.layout.ty.kind()
|
|
|
|
&& *mutbl == Mutability::Not
|
2024-08-02 13:34:59 +00:00
|
|
|
&& val
|
|
|
|
.to_scalar_and_meta()
|
|
|
|
.0
|
|
|
|
.to_pointer(ecx)?
|
|
|
|
.provenance
|
|
|
|
.is_some_and(|p| !p.immutable())
|
2023-11-26 15:57:13 +00:00
|
|
|
{
|
2024-08-02 13:34:59 +00:00
|
|
|
// That next check is expensive, that's why we have all the guards above.
|
2024-11-20 10:05:53 +00:00
|
|
|
let is_immutable = ty.is_freeze(*ecx.tcx, ecx.typing_env());
|
2023-11-26 15:57:13 +00:00
|
|
|
let place = ecx.ref_to_mplace(val)?;
|
2024-08-02 13:34:59 +00:00
|
|
|
let new_place = if is_immutable {
|
|
|
|
place.map_provenance(CtfeProvenance::as_immutable)
|
|
|
|
} else {
|
|
|
|
// Even if it is not immutable, remember that it is a shared reference.
|
|
|
|
// This allows it to become part of the final value of the constant.
|
|
|
|
// (See <https://github.com/rust-lang/rust/pull/128543> for why we allow this
|
|
|
|
// even when there is interior mutability.)
|
|
|
|
place.map_provenance(CtfeProvenance::as_shared_ref)
|
|
|
|
};
|
2024-09-29 09:53:23 +00:00
|
|
|
interp_ok(ImmTy::from_immediate(new_place.to_ref(ecx), val.layout))
|
2023-11-26 15:57:13 +00:00
|
|
|
} else {
|
2024-09-29 09:53:23 +00:00
|
|
|
interp_ok(val.clone())
|
2023-11-26 15:57:13 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn before_memory_write(
|
2024-08-17 12:49:35 +00:00
|
|
|
_tcx: TyCtxtAt<'tcx>,
|
|
|
|
_machine: &mut Self,
|
2023-11-26 15:57:13 +00:00
|
|
|
_alloc_extra: &mut Self::AllocExtra,
|
|
|
|
(_alloc_id, immutable): (AllocId, bool),
|
|
|
|
range: AllocRange,
|
|
|
|
) -> InterpResult<'tcx> {
|
|
|
|
if range.size == Size::ZERO {
|
|
|
|
// Nothing to check.
|
2024-09-29 09:53:23 +00:00
|
|
|
return interp_ok(());
|
2023-11-26 15:57:13 +00:00
|
|
|
}
|
|
|
|
// Reject writes through immutable pointers.
|
|
|
|
if immutable {
|
2024-09-29 09:53:23 +00:00
|
|
|
return Err(ConstEvalErrKind::WriteThroughImmutablePointer).into();
|
2023-11-26 15:57:13 +00:00
|
|
|
}
|
|
|
|
// Everything else is fine.
|
2024-09-29 09:53:23 +00:00
|
|
|
interp_ok(())
|
2023-11-26 15:57:13 +00:00
|
|
|
}
|
2023-10-12 11:27:43 +00:00
|
|
|
|
2024-05-27 06:24:23 +00:00
|
|
|
fn before_alloc_read(ecx: &InterpCx<'tcx, Self>, alloc_id: AllocId) -> InterpResult<'tcx> {
|
2024-05-09 10:35:11 +00:00
|
|
|
// Check if this is the currently evaluated static.
|
2024-02-26 18:03:06 +00:00
|
|
|
if Some(alloc_id) == ecx.machine.static_root_ids.map(|(id, _)| id) {
|
2024-09-29 09:53:23 +00:00
|
|
|
return Err(ConstEvalErrKind::RecursiveStatic).into();
|
2023-10-12 11:27:43 +00:00
|
|
|
}
|
2024-05-09 10:35:11 +00:00
|
|
|
// If this is another static, make sure we fire off the query to detect cycles.
|
|
|
|
// But only do that when checks for static recursion are enabled.
|
|
|
|
if ecx.machine.static_root_ids.is_some() {
|
|
|
|
if let Some(GlobalAlloc::Static(def_id)) = ecx.tcx.try_get_global_alloc(alloc_id) {
|
|
|
|
if ecx.tcx.is_foreign_item(def_id) {
|
|
|
|
throw_unsup!(ExternStatic(def_id));
|
|
|
|
}
|
|
|
|
ecx.ctfe_query(|tcx| tcx.eval_static_initializer(def_id))?;
|
|
|
|
}
|
|
|
|
}
|
2024-09-29 09:53:23 +00:00
|
|
|
interp_ok(())
|
2023-10-12 11:27:43 +00:00
|
|
|
}
|
2024-08-29 17:24:31 +00:00
|
|
|
|
|
|
|
fn cached_union_data_range<'e>(
|
|
|
|
ecx: &'e mut InterpCx<'tcx, Self>,
|
|
|
|
ty: Ty<'tcx>,
|
|
|
|
compute_range: impl FnOnce() -> RangeSet,
|
|
|
|
) -> Cow<'e, RangeSet> {
|
|
|
|
if ecx.tcx.sess.opts.unstable_opts.extra_const_ub_checks {
|
|
|
|
Cow::Borrowed(ecx.machine.union_data_ranges.entry(ty).or_insert_with(compute_range))
|
|
|
|
} else {
|
|
|
|
// Don't bother caching, we're only doing one validation at the end anyway.
|
|
|
|
Cow::Owned(compute_range())
|
|
|
|
}
|
|
|
|
}
|
2019-12-25 00:04:32 +00:00
|
|
|
}
|
2019-12-25 00:08:39 +00:00
|
|
|
|
2019-12-22 20:10:43 +00:00
|
|
|
// Please do not add any code below the above `Machine` trait impl. I (oli-obk) plan more cleanups
|
|
|
|
// so we can end up having a file with just that impl, but for now, let's keep the impl discoverable
|
|
|
|
// at the bottom of this file.
|