2019-08-30 06:37:02 +00:00
|
|
|
//! Check the validity invariant of a given value, and tell the user
|
|
|
|
//! where in the value it got violated.
|
|
|
|
//! In const context, this goes even further and tries to approximate const safety.
|
|
|
|
//! That's useful because it means other passes (e.g. promotion) can rely on `const`s
|
|
|
|
//! to be const-safe.
|
|
|
|
|
2023-05-17 10:30:14 +00:00
|
|
|
use std::fmt::Write;
|
2024-01-29 22:59:09 +00:00
|
|
|
use std::num::NonZero;
|
2018-08-17 10:18:02 +00:00
|
|
|
|
2022-11-18 09:18:32 +00:00
|
|
|
use either::{Left, Right};
|
|
|
|
|
2023-12-16 15:24:25 +00:00
|
|
|
use hir::def::DefKind;
|
2022-08-23 01:25:03 +00:00
|
|
|
use rustc_ast::Mutability;
|
2018-08-17 10:18:02 +00:00
|
|
|
use rustc_data_structures::fx::FxHashSet;
|
2020-01-05 01:37:57 +00:00
|
|
|
use rustc_hir as hir;
|
2024-05-08 09:03:14 +00:00
|
|
|
use rustc_middle::bug;
|
2023-05-17 10:30:14 +00:00
|
|
|
use rustc_middle::mir::interpret::{
|
2023-12-16 15:24:25 +00:00
|
|
|
ExpectedKind, InterpError, InvalidMetaKind, Misalignment, PointerKind, Provenance,
|
|
|
|
ValidationErrorInfo, ValidationErrorKind, ValidationErrorKind::*,
|
2023-05-17 10:30:14 +00:00
|
|
|
};
|
2021-08-30 14:38:27 +00:00
|
|
|
use rustc_middle::ty::layout::{LayoutOf, TyAndLayout};
|
2024-03-09 12:05:13 +00:00
|
|
|
use rustc_middle::ty::{self, Ty};
|
2019-12-31 17:15:40 +00:00
|
|
|
use rustc_span::symbol::{sym, Symbol};
|
2023-07-22 05:35:57 +00:00
|
|
|
use rustc_target::abi::{
|
2023-09-26 19:08:21 +00:00
|
|
|
Abi, FieldIdx, Scalar as ScalarAbi, Size, VariantIdx, Variants, WrappingRange,
|
2023-07-22 05:35:57 +00:00
|
|
|
};
|
2018-08-17 10:18:02 +00:00
|
|
|
|
2019-02-10 13:59:13 +00:00
|
|
|
use std::hash::Hash;
|
|
|
|
|
2018-08-17 10:18:02 +00:00
|
|
|
use super::{
|
2024-05-08 09:03:14 +00:00
|
|
|
err_ub, format_interp_error, machine::AllocMap, throw_ub, AllocId, CheckInAllocMsg,
|
|
|
|
GlobalAlloc, ImmTy, Immediate, InterpCx, InterpResult, MPlaceTy, Machine, MemPlaceMeta, OpTy,
|
|
|
|
Pointer, Projectable, Scalar, ValueVisitor,
|
2018-08-17 10:18:02 +00:00
|
|
|
};
|
|
|
|
|
2023-08-02 14:14:36 +00:00
|
|
|
// for the validation errors
|
|
|
|
use super::InterpError::UndefinedBehavior as Ub;
|
|
|
|
use super::InterpError::Unsupported as Unsup;
|
|
|
|
use super::UndefinedBehaviorInfo::*;
|
|
|
|
use super::UnsupportedOpInfo::*;
|
|
|
|
|
2019-07-31 07:18:54 +00:00
|
|
|
macro_rules! throw_validation_failure {
|
2023-05-17 10:30:14 +00:00
|
|
|
($where:expr, $kind: expr) => {{
|
2023-03-22 00:00:00 +00:00
|
|
|
let where_ = &$where;
|
|
|
|
let path = if !where_.is_empty() {
|
|
|
|
let mut path = String::new();
|
|
|
|
write_path(&mut path, where_);
|
|
|
|
Some(path)
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
};
|
2023-05-17 10:30:14 +00:00
|
|
|
|
2023-08-02 14:14:36 +00:00
|
|
|
throw_ub!(ValidationError(ValidationErrorInfo { path, kind: $kind }))
|
2018-08-17 10:18:02 +00:00
|
|
|
}};
|
|
|
|
}
|
|
|
|
|
2020-05-06 07:22:52 +00:00
|
|
|
/// If $e throws an error matching the pattern, throw a validation failure.
|
|
|
|
/// Other errors are passed back to the caller, unchanged -- and if they reach the root of
|
|
|
|
/// the visitor, we make sure only validation errors and `InvalidProgram` errors are left.
|
2020-07-07 15:12:44 +00:00
|
|
|
/// This lets you use the patterns as a kind of validation list, asserting which errors
|
2020-05-06 07:22:52 +00:00
|
|
|
/// can possibly happen:
|
2020-04-23 12:00:06 +00:00
|
|
|
///
|
2022-09-16 11:11:46 +00:00
|
|
|
/// ```ignore(illustrative)
|
2020-05-05 22:07:53 +00:00
|
|
|
/// let v = try_validation!(some_fn(), some_path, {
|
2020-05-01 07:52:42 +00:00
|
|
|
/// Foo | Bar | Baz => { "some failure" },
|
|
|
|
/// });
|
2020-04-23 12:00:06 +00:00
|
|
|
/// ```
|
2020-05-01 07:52:42 +00:00
|
|
|
///
|
2022-08-28 15:48:55 +00:00
|
|
|
/// The patterns must be of type `UndefinedBehaviorInfo`.
|
2020-05-01 07:52:42 +00:00
|
|
|
/// An additional expected parameter can also be added to the failure message:
|
|
|
|
///
|
2022-09-16 11:11:46 +00:00
|
|
|
/// ```ignore(illustrative)
|
2020-05-05 22:07:53 +00:00
|
|
|
/// let v = try_validation!(some_fn(), some_path, {
|
2020-05-01 07:52:42 +00:00
|
|
|
/// Foo | Bar | Baz => { "some failure" } expected { "something that wasn't a failure" },
|
|
|
|
/// });
|
|
|
|
/// ```
|
|
|
|
///
|
2020-05-01 11:49:42 +00:00
|
|
|
/// An additional nicety is that both parameters actually take format args, so you can just write
|
|
|
|
/// the format string in directly:
|
|
|
|
///
|
2022-09-16 11:11:46 +00:00
|
|
|
/// ```ignore(illustrative)
|
2020-05-05 22:07:53 +00:00
|
|
|
/// let v = try_validation!(some_fn(), some_path, {
|
2020-05-01 11:49:42 +00:00
|
|
|
/// Foo | Bar | Baz => { "{:?}", some_failure } expected { "{}", expected_value },
|
|
|
|
/// });
|
|
|
|
/// ```
|
|
|
|
///
|
2020-05-05 22:07:53 +00:00
|
|
|
macro_rules! try_validation {
|
|
|
|
($e:expr, $where:expr,
|
2023-05-17 10:30:14 +00:00
|
|
|
$( $( $p:pat_param )|+ => $kind: expr ),+ $(,)?
|
2020-05-05 22:07:53 +00:00
|
|
|
) => {{
|
2018-10-02 15:02:58 +00:00
|
|
|
match $e {
|
|
|
|
Ok(x) => x,
|
2020-04-23 12:52:27 +00:00
|
|
|
// We catch the error and turn it into a validation failure. We are okay with
|
|
|
|
// allocation here as this can only slow down builds that fail anyway.
|
2023-08-02 14:14:36 +00:00
|
|
|
Err(e) => match e.kind() {
|
2021-02-14 00:00:00 +00:00
|
|
|
$(
|
2023-08-02 14:14:36 +00:00
|
|
|
$($p)|+ =>
|
2021-02-14 00:00:00 +00:00
|
|
|
throw_validation_failure!(
|
|
|
|
$where,
|
2023-05-17 10:30:14 +00:00
|
|
|
$kind
|
2021-02-14 00:00:00 +00:00
|
|
|
)
|
|
|
|
),+,
|
|
|
|
#[allow(unreachable_patterns)]
|
2023-08-02 14:14:36 +00:00
|
|
|
_ => Err::<!, _>(e)?,
|
2021-02-14 00:00:00 +00:00
|
|
|
}
|
2018-10-02 15:02:58 +00:00
|
|
|
}
|
2019-12-12 14:23:46 +00:00
|
|
|
}};
|
2018-10-02 15:02:58 +00:00
|
|
|
}
|
|
|
|
|
2018-11-12 18:05:20 +00:00
|
|
|
/// We want to show a nice path to the invalid field for diagnostics,
|
2018-08-18 11:46:52 +00:00
|
|
|
/// but avoid string operations in the happy case where no error happens.
|
|
|
|
/// So we track a `Vec<PathElem>` where `PathElem` contains all the data we
|
|
|
|
/// need to later print something for the user.
|
|
|
|
#[derive(Copy, Clone, Debug)]
|
|
|
|
pub enum PathElem {
|
|
|
|
Field(Symbol),
|
2018-11-07 15:45:07 +00:00
|
|
|
Variant(Symbol),
|
2023-10-19 16:06:43 +00:00
|
|
|
CoroutineState(VariantIdx),
|
2020-02-26 11:00:33 +00:00
|
|
|
CapturedVar(Symbol),
|
2018-08-18 11:46:52 +00:00
|
|
|
ArrayElem(usize),
|
|
|
|
TupleElem(usize),
|
|
|
|
Deref,
|
2020-02-26 11:00:33 +00:00
|
|
|
EnumTag,
|
2023-10-19 16:06:43 +00:00
|
|
|
CoroutineTag,
|
2018-10-31 17:44:00 +00:00
|
|
|
DynDowncast,
|
2018-08-18 11:46:52 +00:00
|
|
|
}
|
|
|
|
|
2020-10-24 18:49:17 +00:00
|
|
|
/// Extra things to check for during validation of CTFE results.
|
2023-12-16 15:24:25 +00:00
|
|
|
#[derive(Copy, Clone)]
|
2020-10-24 18:49:17 +00:00
|
|
|
pub enum CtfeValidationMode {
|
2023-12-16 15:24:25 +00:00
|
|
|
/// Validation of a `static`
|
|
|
|
Static { mutbl: Mutability },
|
2024-02-12 07:51:41 +00:00
|
|
|
/// Validation of a promoted.
|
|
|
|
Promoted,
|
|
|
|
/// Validation of a `const`.
|
2023-12-16 15:24:25 +00:00
|
|
|
/// `allow_immutable_unsafe_cell` says whether we allow `UnsafeCell` in immutable memory (which is the
|
|
|
|
/// case for the top-level allocation of a `const`, where this is fine because the allocation will be
|
|
|
|
/// copied at each use site).
|
2024-02-12 07:51:41 +00:00
|
|
|
Const { allow_immutable_unsafe_cell: bool },
|
2023-12-16 15:24:25 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
impl CtfeValidationMode {
|
|
|
|
fn allow_immutable_unsafe_cell(self) -> bool {
|
|
|
|
match self {
|
|
|
|
CtfeValidationMode::Static { .. } => false,
|
2024-02-12 07:51:41 +00:00
|
|
|
CtfeValidationMode::Promoted { .. } => false,
|
2023-12-16 15:24:25 +00:00
|
|
|
CtfeValidationMode::Const { allow_immutable_unsafe_cell, .. } => {
|
|
|
|
allow_immutable_unsafe_cell
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2020-10-24 18:49:17 +00:00
|
|
|
}
|
|
|
|
|
2018-10-02 14:06:50 +00:00
|
|
|
/// State for tracking recursive validation of references
|
2019-02-10 13:59:13 +00:00
|
|
|
pub struct RefTracking<T, PATH = ()> {
|
2019-02-15 20:05:47 +00:00
|
|
|
pub seen: FxHashSet<T>,
|
2019-02-10 13:59:13 +00:00
|
|
|
pub todo: Vec<(T, PATH)>,
|
2018-10-02 14:06:50 +00:00
|
|
|
}
|
|
|
|
|
2023-07-25 20:35:07 +00:00
|
|
|
impl<T: Clone + Eq + Hash + std::fmt::Debug, PATH: Default> RefTracking<T, PATH> {
|
2019-02-10 13:59:13 +00:00
|
|
|
pub fn empty() -> Self {
|
|
|
|
RefTracking { seen: FxHashSet::default(), todo: vec![] }
|
|
|
|
}
|
2019-02-15 20:05:47 +00:00
|
|
|
pub fn new(op: T) -> Self {
|
2019-02-10 13:59:13 +00:00
|
|
|
let mut ref_tracking_for_consts =
|
2023-07-25 20:35:07 +00:00
|
|
|
RefTracking { seen: FxHashSet::default(), todo: vec![(op.clone(), PATH::default())] };
|
2019-02-10 13:59:13 +00:00
|
|
|
ref_tracking_for_consts.seen.insert(op);
|
|
|
|
ref_tracking_for_consts
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn track(&mut self, op: T, path: impl FnOnce() -> PATH) {
|
2023-07-25 20:35:07 +00:00
|
|
|
if self.seen.insert(op.clone()) {
|
2019-02-10 13:59:13 +00:00
|
|
|
trace!("Recursing below ptr {:#?}", op);
|
|
|
|
let path = path();
|
|
|
|
// Remember to come back to this later.
|
|
|
|
self.todo.push((op, path));
|
|
|
|
}
|
2018-10-02 14:06:50 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-05-17 10:30:14 +00:00
|
|
|
// FIXME make this translatable as well?
|
2018-08-18 11:46:52 +00:00
|
|
|
/// Format a path
|
2020-12-30 11:59:07 +00:00
|
|
|
fn write_path(out: &mut String, path: &[PathElem]) {
|
2018-08-18 11:46:52 +00:00
|
|
|
use self::PathElem::*;
|
|
|
|
|
|
|
|
for elem in path.iter() {
|
|
|
|
match elem {
|
2023-07-25 21:17:39 +00:00
|
|
|
Field(name) => write!(out, ".{name}"),
|
2020-02-26 11:00:33 +00:00
|
|
|
EnumTag => write!(out, ".<enum-tag>"),
|
2023-07-25 21:17:39 +00:00
|
|
|
Variant(name) => write!(out, ".<enum-variant({name})>"),
|
2023-10-19 21:46:28 +00:00
|
|
|
CoroutineTag => write!(out, ".<coroutine-tag>"),
|
|
|
|
CoroutineState(idx) => write!(out, ".<coroutine-state({})>", idx.index()),
|
2023-07-25 21:17:39 +00:00
|
|
|
CapturedVar(name) => write!(out, ".<captured-var({name})>"),
|
|
|
|
TupleElem(idx) => write!(out, ".{idx}"),
|
|
|
|
ArrayElem(idx) => write!(out, "[{idx}]"),
|
2020-01-06 10:35:55 +00:00
|
|
|
// `.<deref>` does not match Rust syntax, but it is more readable for long paths -- and
|
2022-11-16 20:34:16 +00:00
|
|
|
// some of the other items here also are not Rust syntax. Actually we can't
|
2018-08-18 11:46:52 +00:00
|
|
|
// even use the usual syntax because we are just showing the projections,
|
|
|
|
// not the root.
|
2020-01-06 10:35:55 +00:00
|
|
|
Deref => write!(out, ".<deref>"),
|
2018-10-31 17:44:00 +00:00
|
|
|
DynDowncast => write!(out, ".<dyn-downcast>"),
|
|
|
|
}
|
|
|
|
.unwrap()
|
2018-08-18 11:46:52 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-06-11 19:03:44 +00:00
|
|
|
struct ValidityVisitor<'rt, 'mir, 'tcx, M: Machine<'mir, 'tcx>> {
|
2018-10-31 15:46:33 +00:00
|
|
|
/// The `path` may be pushed to, but the part that is present when a function
|
|
|
|
/// starts must not be changed! `visit_fields` and `visit_array` rely on
|
|
|
|
/// this stack discipline.
|
|
|
|
path: Vec<PathElem>,
|
2022-07-18 22:47:31 +00:00
|
|
|
ref_tracking: Option<&'rt mut RefTracking<MPlaceTy<'tcx, M::Provenance>, Vec<PathElem>>>,
|
2020-10-24 18:49:17 +00:00
|
|
|
/// `None` indicates this is not validating for CTFE (but for runtime).
|
|
|
|
ctfe_mode: Option<CtfeValidationMode>,
|
2019-06-27 09:36:01 +00:00
|
|
|
ecx: &'rt InterpCx<'mir, 'tcx, M>,
|
2018-10-31 15:46:33 +00:00
|
|
|
}
|
|
|
|
|
2020-03-16 22:12:42 +00:00
|
|
|
impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, 'tcx, M> {
|
2020-03-04 14:50:21 +00:00
|
|
|
fn aggregate_field_path_elem(&mut self, layout: TyAndLayout<'tcx>, field: usize) -> PathElem {
|
2020-02-26 11:00:33 +00:00
|
|
|
// First, check if we are projecting to a variant.
|
|
|
|
match layout.variants {
|
2020-05-23 11:22:45 +00:00
|
|
|
Variants::Multiple { tag_field, .. } => {
|
|
|
|
if tag_field == field {
|
2020-08-02 22:49:11 +00:00
|
|
|
return match layout.ty.kind() {
|
2020-02-26 11:00:33 +00:00
|
|
|
ty::Adt(def, ..) if def.is_enum() => PathElem::EnumTag,
|
2023-10-19 16:06:43 +00:00
|
|
|
ty::Coroutine(..) => PathElem::CoroutineTag,
|
2020-02-26 11:00:33 +00:00
|
|
|
_ => bug!("non-variant type {:?}", layout.ty),
|
|
|
|
};
|
|
|
|
}
|
|
|
|
}
|
2020-03-31 16:16:47 +00:00
|
|
|
Variants::Single { .. } => {}
|
2020-02-26 11:00:33 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Now we know we are projecting to a field, so figure out which one.
|
2020-08-02 22:49:11 +00:00
|
|
|
match layout.ty.kind() {
|
2024-02-11 22:09:28 +00:00
|
|
|
// coroutines, closures, and coroutine-closures all have upvars that may be named.
|
|
|
|
ty::Closure(def_id, _) | ty::Coroutine(def_id, _) | ty::CoroutineClosure(def_id, _) => {
|
2018-11-26 18:58:59 +00:00
|
|
|
let mut name = None;
|
2020-12-23 20:38:22 +00:00
|
|
|
// FIXME this should be more descriptive i.e. CapturePlace instead of CapturedVar
|
|
|
|
// https://github.com/rust-lang/project-rfc-2229/issues/46
|
|
|
|
if let Some(local_def_id) = def_id.as_local() {
|
2022-10-25 17:59:18 +00:00
|
|
|
let captures = self.ecx.tcx.closure_captures(local_def_id);
|
|
|
|
if let Some(captured_place) = captures.get(field) {
|
2019-05-04 00:57:46 +00:00
|
|
|
// Sometimes the index is beyond the number of upvars (seen
|
2023-10-19 21:46:28 +00:00
|
|
|
// for a coroutine).
|
2020-12-23 20:38:22 +00:00
|
|
|
let var_hir_id = captured_place.get_root_variable();
|
2023-12-01 13:28:34 +00:00
|
|
|
let node = self.ecx.tcx.hir_node(var_hir_id);
|
2022-06-28 18:15:30 +00:00
|
|
|
if let hir::Node::Pat(pat) = node {
|
2020-12-23 20:38:22 +00:00
|
|
|
if let hir::PatKind::Binding(_, _, ident, _) = pat.kind {
|
|
|
|
name = Some(ident.name);
|
2018-11-26 18:58:59 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2018-10-31 17:44:00 +00:00
|
|
|
}
|
2018-11-26 18:58:59 +00:00
|
|
|
|
2020-02-26 11:00:33 +00:00
|
|
|
PathElem::CapturedVar(name.unwrap_or_else(|| {
|
2018-11-26 18:58:59 +00:00
|
|
|
// Fall back to showing the field index.
|
2019-05-22 09:25:39 +00:00
|
|
|
sym::integer(field)
|
2018-11-26 18:58:59 +00:00
|
|
|
}))
|
2018-10-31 17:44:00 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// tuples
|
|
|
|
ty::Tuple(_) => PathElem::TupleElem(field),
|
|
|
|
|
|
|
|
// enums
|
|
|
|
ty::Adt(def, ..) if def.is_enum() => {
|
2020-02-17 21:59:16 +00:00
|
|
|
// we might be projecting *to* a variant, or to a field *in* a variant.
|
2018-11-01 12:53:21 +00:00
|
|
|
match layout.variants {
|
2020-03-31 16:16:47 +00:00
|
|
|
Variants::Single { index } => {
|
2020-01-03 12:31:56 +00:00
|
|
|
// Inside a variant
|
2023-03-29 06:32:25 +00:00
|
|
|
PathElem::Field(def.variant(index).fields[FieldIdx::from_usize(field)].name)
|
2019-12-22 22:42:04 +00:00
|
|
|
}
|
2020-03-31 16:16:47 +00:00
|
|
|
Variants::Multiple { .. } => bug!("we handled variants above"),
|
2018-11-01 12:53:21 +00:00
|
|
|
}
|
2018-10-31 17:44:00 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// other ADTs
|
2023-03-29 06:32:25 +00:00
|
|
|
ty::Adt(def, _) => {
|
|
|
|
PathElem::Field(def.non_enum_variant().fields[FieldIdx::from_usize(field)].name)
|
|
|
|
}
|
2018-10-31 17:44:00 +00:00
|
|
|
|
|
|
|
// arrays/slices
|
|
|
|
ty::Array(..) | ty::Slice(..) => PathElem::ArrayElem(field),
|
|
|
|
|
|
|
|
// dyn traits
|
|
|
|
ty::Dynamic(..) => PathElem::DynDowncast,
|
|
|
|
|
|
|
|
// nothing else has an aggregate layout
|
|
|
|
_ => bug!("aggregate_field_path_elem: got non-aggregate type {:?}", layout.ty),
|
2018-11-08 16:06:27 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-07-05 11:40:27 +00:00
|
|
|
fn with_elem<R>(
|
2018-11-08 16:06:27 +00:00
|
|
|
&mut self,
|
|
|
|
elem: PathElem,
|
2020-07-05 11:40:27 +00:00
|
|
|
f: impl FnOnce(&mut Self) -> InterpResult<'tcx, R>,
|
|
|
|
) -> InterpResult<'tcx, R> {
|
2018-11-08 16:06:27 +00:00
|
|
|
// Remember the old state
|
|
|
|
let path_len = self.path.len();
|
2020-07-05 11:40:27 +00:00
|
|
|
// Record new element
|
2018-10-31 17:44:00 +00:00
|
|
|
self.path.push(elem);
|
2020-07-05 11:40:27 +00:00
|
|
|
// Perform operation
|
|
|
|
let r = f(self)?;
|
2018-11-08 16:06:27 +00:00
|
|
|
// Undo changes
|
|
|
|
self.path.truncate(path_len);
|
2020-07-05 11:40:27 +00:00
|
|
|
// Done
|
|
|
|
Ok(r)
|
2018-10-31 15:46:33 +00:00
|
|
|
}
|
2019-08-25 11:57:46 +00:00
|
|
|
|
2022-08-01 23:05:20 +00:00
|
|
|
fn read_immediate(
|
|
|
|
&self,
|
|
|
|
op: &OpTy<'tcx, M::Provenance>,
|
2023-05-17 10:30:14 +00:00
|
|
|
expected: ExpectedKind,
|
2022-08-01 23:05:20 +00:00
|
|
|
) -> InterpResult<'tcx, ImmTy<'tcx, M::Provenance>> {
|
|
|
|
Ok(try_validation!(
|
|
|
|
self.ecx.read_immediate(op),
|
|
|
|
self.path,
|
2023-08-02 14:14:36 +00:00
|
|
|
Ub(InvalidUninitBytes(None)) =>
|
|
|
|
Uninit { expected },
|
|
|
|
// The `Unsup` cases can only occur during CTFE
|
|
|
|
Unsup(ReadPointerAsInt(_)) =>
|
|
|
|
PointerAsInt { expected },
|
|
|
|
Unsup(ReadPartialPointer(_)) =>
|
|
|
|
PartialPointer,
|
2022-08-01 23:05:20 +00:00
|
|
|
))
|
|
|
|
}
|
|
|
|
|
|
|
|
fn read_scalar(
|
|
|
|
&self,
|
|
|
|
op: &OpTy<'tcx, M::Provenance>,
|
2023-05-17 10:30:14 +00:00
|
|
|
expected: ExpectedKind,
|
2022-08-01 23:05:20 +00:00
|
|
|
) -> InterpResult<'tcx, Scalar<M::Provenance>> {
|
|
|
|
Ok(self.read_immediate(op, expected)?.to_scalar())
|
|
|
|
}
|
|
|
|
|
2019-08-25 11:57:46 +00:00
|
|
|
fn check_wide_ptr_meta(
|
|
|
|
&mut self,
|
2022-07-18 22:47:31 +00:00
|
|
|
meta: MemPlaceMeta<M::Provenance>,
|
2020-03-04 14:50:21 +00:00
|
|
|
pointee: TyAndLayout<'tcx>,
|
2019-08-25 11:57:46 +00:00
|
|
|
) -> InterpResult<'tcx> {
|
|
|
|
let tail = self.ecx.tcx.struct_tail_erasing_lifetimes(pointee.ty, self.ecx.param_env);
|
2020-08-02 22:49:11 +00:00
|
|
|
match tail.kind() {
|
2024-04-21 09:35:02 +00:00
|
|
|
ty::Dynamic(data, _, ty::Dyn) => {
|
2022-07-23 14:36:57 +00:00
|
|
|
let vtable = meta.unwrap_meta().to_pointer(self.ecx)?;
|
2022-07-17 20:02:49 +00:00
|
|
|
// Make sure it is a genuine vtable pointer.
|
2024-04-21 09:35:02 +00:00
|
|
|
let (_dyn_ty, dyn_trait) = try_validation!(
|
2022-07-17 20:02:49 +00:00
|
|
|
self.ecx.get_ptr_vtable(vtable),
|
2020-05-06 07:22:52 +00:00
|
|
|
self.path,
|
2023-08-02 14:14:36 +00:00
|
|
|
Ub(DanglingIntPointer(..) | InvalidVTablePointer(..)) =>
|
|
|
|
InvalidVTablePtr { value: format!("{vtable}") }
|
2019-08-25 11:57:46 +00:00
|
|
|
);
|
2024-04-21 09:35:02 +00:00
|
|
|
// Make sure it is for the right trait.
|
|
|
|
if dyn_trait != data.principal() {
|
|
|
|
throw_validation_failure!(
|
|
|
|
self.path,
|
|
|
|
InvalidMetaWrongTrait { expected_trait: data, vtable_trait: dyn_trait }
|
|
|
|
);
|
|
|
|
}
|
2019-08-25 11:57:46 +00:00
|
|
|
}
|
|
|
|
ty::Slice(..) | ty::Str => {
|
2023-02-14 14:31:26 +00:00
|
|
|
let _len = meta.unwrap_meta().to_target_usize(self.ecx)?;
|
2019-08-26 17:48:56 +00:00
|
|
|
// We do not check that `len * elem_size <= isize::MAX`:
|
|
|
|
// that is only required for references, and there it falls out of the
|
2019-11-22 17:11:28 +00:00
|
|
|
// "dereferenceable" check performed by Stacked Borrows.
|
2019-08-25 11:57:46 +00:00
|
|
|
}
|
|
|
|
ty::Foreign(..) => {
|
|
|
|
// Unsized, but not wide.
|
|
|
|
}
|
|
|
|
_ => bug!("Unexpected unsized type tail: {:?}", tail),
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
2018-10-31 15:46:33 +00:00
|
|
|
|
2020-03-02 12:09:13 +00:00
|
|
|
/// Check a reference or `Box`.
|
2020-03-02 20:17:34 +00:00
|
|
|
fn check_safe_pointer(
|
|
|
|
&mut self,
|
2022-07-18 22:47:31 +00:00
|
|
|
value: &OpTy<'tcx, M::Provenance>,
|
2023-05-17 10:30:14 +00:00
|
|
|
ptr_kind: PointerKind,
|
2020-03-02 20:17:34 +00:00
|
|
|
) -> InterpResult<'tcx> {
|
2023-08-01 15:11:00 +00:00
|
|
|
// Not using `deref_pointer` since we want to use our `read_immediate` wrapper.
|
2023-05-17 10:30:14 +00:00
|
|
|
let place = self.ecx.ref_to_mplace(&self.read_immediate(value, ptr_kind.into())?)?;
|
2020-03-02 12:09:13 +00:00
|
|
|
// Handle wide pointers.
|
|
|
|
// Check metadata early, for better diagnostics
|
|
|
|
if place.layout.is_unsized() {
|
2023-09-04 15:53:38 +00:00
|
|
|
self.check_wide_ptr_meta(place.meta(), place.layout)?;
|
2020-03-02 12:09:13 +00:00
|
|
|
}
|
|
|
|
// Make sure this is dereferenceable and all.
|
2020-05-06 11:26:24 +00:00
|
|
|
let size_and_align = try_validation!(
|
2021-02-15 00:00:00 +00:00
|
|
|
self.ecx.size_and_align_of_mplace(&place),
|
2020-05-06 11:26:24 +00:00
|
|
|
self.path,
|
2023-08-02 14:14:36 +00:00
|
|
|
Ub(InvalidMeta(msg)) => match msg {
|
2023-05-17 10:30:14 +00:00
|
|
|
InvalidMetaKind::SliceTooBig => InvalidMetaSliceTooLarge { ptr_kind },
|
|
|
|
InvalidMetaKind::TooBig => InvalidMetaTooLarge { ptr_kind },
|
|
|
|
}
|
2020-05-06 11:26:24 +00:00
|
|
|
);
|
2020-03-05 22:31:39 +00:00
|
|
|
let (size, align) = size_and_align
|
2020-03-02 12:09:13 +00:00
|
|
|
// for the purpose of validity, consider foreign types to have
|
|
|
|
// alignment and size determined by the layout (size will be 0,
|
|
|
|
// alignment should take attributes into account).
|
|
|
|
.unwrap_or_else(|| (place.layout.size, place.layout.align.abi));
|
2020-05-05 22:07:53 +00:00
|
|
|
// Direct call to `check_ptr_access_align` checks alignment even on CTFE machines.
|
2021-05-16 16:53:20 +00:00
|
|
|
try_validation!(
|
2023-09-26 19:08:21 +00:00
|
|
|
self.ecx.check_ptr_access(
|
2023-09-04 15:53:38 +00:00
|
|
|
place.ptr(),
|
2020-05-06 11:26:24 +00:00
|
|
|
size,
|
2021-05-05 22:16:27 +00:00
|
|
|
CheckInAllocMsg::InboundsTest, // will anyway be replaced by validity message
|
2020-05-06 11:26:24 +00:00
|
|
|
),
|
|
|
|
self.path,
|
2023-08-02 14:14:36 +00:00
|
|
|
Ub(DanglingIntPointer(0, _)) => NullPtr { ptr_kind },
|
|
|
|
Ub(DanglingIntPointer(i, _)) => DanglingPtrNoProvenance {
|
2023-05-17 10:30:14 +00:00
|
|
|
ptr_kind,
|
|
|
|
// FIXME this says "null pointer" when null but we need translate
|
2023-08-02 14:14:36 +00:00
|
|
|
pointer: format!("{}", Pointer::<Option<AllocId>>::from_addr_invalid(*i))
|
2023-05-17 10:30:14 +00:00
|
|
|
},
|
2023-08-02 14:14:36 +00:00
|
|
|
Ub(PointerOutOfBounds { .. }) => DanglingPtrOutOfBounds {
|
2023-05-17 10:30:14 +00:00
|
|
|
ptr_kind
|
|
|
|
},
|
2020-05-06 11:26:24 +00:00
|
|
|
// This cannot happen during const-eval (because interning already detects
|
|
|
|
// dangling pointers), but it can happen in Miri.
|
2023-08-02 14:14:36 +00:00
|
|
|
Ub(PointerUseAfterFree(..)) => DanglingPtrUseAfterFree {
|
2023-05-17 10:30:14 +00:00
|
|
|
ptr_kind,
|
|
|
|
},
|
2020-05-06 11:26:24 +00:00
|
|
|
);
|
2023-09-26 19:08:21 +00:00
|
|
|
try_validation!(
|
|
|
|
self.ecx.check_ptr_align(
|
|
|
|
place.ptr(),
|
|
|
|
align,
|
|
|
|
),
|
|
|
|
self.path,
|
|
|
|
Ub(AlignmentCheckFailed(Misalignment { required, has }, _msg)) => UnalignedPtr {
|
|
|
|
ptr_kind,
|
|
|
|
required_bytes: required.bytes(),
|
|
|
|
found_bytes: has.bytes()
|
|
|
|
},
|
|
|
|
);
|
2022-05-17 15:32:36 +00:00
|
|
|
// Do not allow pointers to uninhabited types.
|
|
|
|
if place.layout.abi.is_uninhabited() {
|
2023-05-17 10:30:14 +00:00
|
|
|
let ty = place.layout.ty;
|
|
|
|
throw_validation_failure!(self.path, PtrToUninhabited { ptr_kind, ty })
|
2022-05-17 15:32:36 +00:00
|
|
|
}
|
2020-03-02 12:09:13 +00:00
|
|
|
// Recursive checking
|
2022-12-23 15:15:21 +00:00
|
|
|
if let Some(ref_tracking) = self.ref_tracking.as_deref_mut() {
|
2023-12-16 15:24:25 +00:00
|
|
|
// Determine whether this pointer expects to be pointing to something mutable.
|
|
|
|
let ptr_expected_mutbl = match ptr_kind {
|
|
|
|
PointerKind::Box => Mutability::Mut,
|
2024-02-16 08:58:53 +00:00
|
|
|
PointerKind::Ref(mutbl) => {
|
|
|
|
// We do not take into account interior mutability here since we cannot know if
|
|
|
|
// there really is an `UnsafeCell` inside `Option<UnsafeCell>` -- so we check
|
|
|
|
// that in the recursive descent behind this reference (controlled by
|
|
|
|
// `allow_immutable_unsafe_cell`).
|
|
|
|
mutbl
|
2023-12-16 15:24:25 +00:00
|
|
|
}
|
|
|
|
};
|
2021-05-16 16:53:20 +00:00
|
|
|
// Proceed recursively even for ZST, no reason to skip them!
|
|
|
|
// `!` is a ZST and we want to validate it.
|
2023-09-04 15:53:38 +00:00
|
|
|
if let Ok((alloc_id, _offset, _prov)) = self.ecx.ptr_try_get_alloc_id(place.ptr()) {
|
2024-02-16 08:58:53 +00:00
|
|
|
let mut skip_recursive_check = false;
|
2024-04-08 10:57:46 +00:00
|
|
|
let alloc_actual_mutbl = mutability(self.ecx, alloc_id);
|
|
|
|
if let GlobalAlloc::Static(did) = self.ecx.tcx.global_alloc(alloc_id) {
|
|
|
|
let DefKind::Static { nested, .. } = self.ecx.tcx.def_kind(did) else { bug!() };
|
2024-04-02 15:15:48 +00:00
|
|
|
// Special handling for pointers to statics (irrespective of their type).
|
|
|
|
assert!(!self.ecx.tcx.is_thread_local_static(did));
|
|
|
|
assert!(self.ecx.tcx.is_static(did));
|
|
|
|
// Mode-specific checks
|
|
|
|
match self.ctfe_mode {
|
|
|
|
Some(
|
|
|
|
CtfeValidationMode::Static { .. } | CtfeValidationMode::Promoted { .. },
|
|
|
|
) => {
|
|
|
|
// We skip recursively checking other statics. These statics must be sound by
|
|
|
|
// themselves, and the only way to get broken statics here is by using
|
|
|
|
// unsafe code.
|
|
|
|
// The reasons we don't check other statics is twofold. For one, in all
|
|
|
|
// sound cases, the static was already validated on its own, and second, we
|
|
|
|
// trigger cycle errors if we try to compute the value of the other static
|
|
|
|
// and that static refers back to us (potentially through a promoted).
|
|
|
|
// This could miss some UB, but that's fine.
|
|
|
|
// We still walk nested allocations, as they are fundamentally part of this validation run.
|
|
|
|
// This means we will also recurse into nested statics of *other*
|
|
|
|
// statics, even though we do not recurse into other statics directly.
|
|
|
|
// That's somewhat inconsistent but harmless.
|
|
|
|
skip_recursive_check = !nested;
|
2024-01-06 12:48:48 +00:00
|
|
|
}
|
2024-04-02 15:15:48 +00:00
|
|
|
Some(CtfeValidationMode::Const { .. }) => {
|
|
|
|
// We can't recursively validate `extern static`, so we better reject them.
|
|
|
|
if self.ecx.tcx.is_foreign_item(did) {
|
|
|
|
throw_validation_failure!(self.path, ConstRefToExtern);
|
2024-02-26 18:03:06 +00:00
|
|
|
}
|
|
|
|
}
|
2024-04-02 15:15:48 +00:00
|
|
|
None => {}
|
2022-08-23 01:25:03 +00:00
|
|
|
}
|
2024-04-02 15:15:48 +00:00
|
|
|
}
|
|
|
|
|
2024-02-16 08:58:53 +00:00
|
|
|
// Mutability check.
|
|
|
|
// If this allocation has size zero, there is no actual mutability here.
|
|
|
|
let (size, _align, _alloc_kind) = self.ecx.get_alloc_info(alloc_id);
|
|
|
|
if size != Size::ZERO {
|
2024-02-29 07:15:22 +00:00
|
|
|
// Mutable pointer to immutable memory is no good.
|
2024-02-16 08:58:53 +00:00
|
|
|
if ptr_expected_mutbl == Mutability::Mut
|
|
|
|
&& alloc_actual_mutbl == Mutability::Not
|
|
|
|
{
|
|
|
|
throw_validation_failure!(self.path, MutableRefToImmutable);
|
|
|
|
}
|
2024-02-29 07:15:22 +00:00
|
|
|
// In a const, everything must be completely immutable.
|
|
|
|
if matches!(self.ctfe_mode, Some(CtfeValidationMode::Const { .. })) {
|
|
|
|
if ptr_expected_mutbl == Mutability::Mut
|
|
|
|
|| alloc_actual_mutbl == Mutability::Mut
|
|
|
|
{
|
|
|
|
throw_validation_failure!(self.path, ConstRefToMutable);
|
|
|
|
}
|
2023-12-16 15:24:25 +00:00
|
|
|
}
|
2020-03-02 12:09:13 +00:00
|
|
|
}
|
2024-02-16 08:58:53 +00:00
|
|
|
// Potentially skip recursive check.
|
|
|
|
if skip_recursive_check {
|
|
|
|
return Ok(());
|
|
|
|
}
|
2020-03-02 12:09:13 +00:00
|
|
|
}
|
|
|
|
let path = &self.path;
|
|
|
|
ref_tracking.track(place, || {
|
|
|
|
// We need to clone the path anyway, make sure it gets created
|
|
|
|
// with enough space for the additional `Deref`.
|
|
|
|
let mut new_path = Vec::with_capacity(path.len() + 1);
|
2022-03-22 22:58:23 +00:00
|
|
|
new_path.extend(path);
|
2020-03-02 12:09:13 +00:00
|
|
|
new_path.push(PathElem::Deref);
|
|
|
|
new_path
|
|
|
|
});
|
|
|
|
}
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Check if this is a value of primitive type, and if yes check the validity of the value
|
2022-11-16 20:34:16 +00:00
|
|
|
/// at that type. Return `true` if the type is indeed primitive.
|
2023-07-24 09:44:58 +00:00
|
|
|
///
|
|
|
|
/// Note that not all of these have `FieldsShape::Primitive`, e.g. wide references.
|
2020-03-02 21:47:28 +00:00
|
|
|
fn try_visit_primitive(
|
|
|
|
&mut self,
|
2022-07-18 22:47:31 +00:00
|
|
|
value: &OpTy<'tcx, M::Provenance>,
|
2020-03-02 21:47:28 +00:00
|
|
|
) -> InterpResult<'tcx, bool> {
|
2018-10-02 16:07:40 +00:00
|
|
|
// Go over all the primitive types
|
2018-10-08 11:41:16 +00:00
|
|
|
let ty = value.layout.ty;
|
2020-08-02 22:49:11 +00:00
|
|
|
match ty.kind() {
|
2018-10-02 16:07:40 +00:00
|
|
|
ty::Bool => {
|
2023-05-17 10:30:14 +00:00
|
|
|
let value = self.read_scalar(value, ExpectedKind::Bool)?;
|
2020-05-05 22:07:53 +00:00
|
|
|
try_validation!(
|
|
|
|
value.to_bool(),
|
|
|
|
self.path,
|
2023-08-02 14:14:36 +00:00
|
|
|
Ub(InvalidBool(..)) => ValidationErrorKind::InvalidBool {
|
2023-05-17 10:30:14 +00:00
|
|
|
value: format!("{value:x}"),
|
|
|
|
}
|
2020-05-05 22:07:53 +00:00
|
|
|
);
|
2020-03-02 12:09:13 +00:00
|
|
|
Ok(true)
|
2018-10-02 16:07:40 +00:00
|
|
|
}
|
2018-08-22 10:54:46 +00:00
|
|
|
ty::Char => {
|
2023-05-17 10:30:14 +00:00
|
|
|
let value = self.read_scalar(value, ExpectedKind::Char)?;
|
2020-05-05 22:07:53 +00:00
|
|
|
try_validation!(
|
|
|
|
value.to_char(),
|
|
|
|
self.path,
|
2023-08-02 14:14:36 +00:00
|
|
|
Ub(InvalidChar(..)) => ValidationErrorKind::InvalidChar {
|
2023-05-17 10:30:14 +00:00
|
|
|
value: format!("{value:x}"),
|
|
|
|
}
|
2020-05-05 22:07:53 +00:00
|
|
|
);
|
2020-03-02 12:09:13 +00:00
|
|
|
Ok(true)
|
2018-10-02 16:07:40 +00:00
|
|
|
}
|
2018-10-03 09:38:16 +00:00
|
|
|
ty::Float(_) | ty::Int(_) | ty::Uint(_) => {
|
2018-10-16 06:37:27 +00:00
|
|
|
// NOTE: Keep this in sync with the array optimization for int/float
|
|
|
|
// types below!
|
2023-08-02 14:14:36 +00:00
|
|
|
self.read_scalar(
|
2022-08-01 23:05:20 +00:00
|
|
|
value,
|
|
|
|
if matches!(ty.kind(), ty::Float(..)) {
|
2023-05-17 10:30:14 +00:00
|
|
|
ExpectedKind::Float
|
2022-08-01 23:05:20 +00:00
|
|
|
} else {
|
2023-05-17 10:30:14 +00:00
|
|
|
ExpectedKind::Int
|
2022-08-01 23:05:20 +00:00
|
|
|
},
|
|
|
|
)?;
|
2020-03-02 12:09:13 +00:00
|
|
|
Ok(true)
|
2018-10-03 09:38:16 +00:00
|
|
|
}
|
2018-10-19 15:11:23 +00:00
|
|
|
ty::RawPtr(..) => {
|
2022-08-01 23:05:20 +00:00
|
|
|
let place =
|
2023-05-17 10:30:14 +00:00
|
|
|
self.ecx.ref_to_mplace(&self.read_immediate(value, ExpectedKind::RawPtr)?)?;
|
2019-11-06 07:44:15 +00:00
|
|
|
if place.layout.is_unsized() {
|
2023-09-04 15:53:38 +00:00
|
|
|
self.check_wide_ptr_meta(place.meta(), place.layout)?;
|
2019-08-25 12:26:56 +00:00
|
|
|
}
|
2020-03-02 12:09:13 +00:00
|
|
|
Ok(true)
|
2018-10-19 15:11:23 +00:00
|
|
|
}
|
2024-02-16 08:58:53 +00:00
|
|
|
ty::Ref(_, _ty, mutbl) => {
|
|
|
|
self.check_safe_pointer(value, PointerKind::Ref(*mutbl))?;
|
2020-03-02 12:09:13 +00:00
|
|
|
Ok(true)
|
|
|
|
}
|
2018-10-02 16:07:40 +00:00
|
|
|
ty::FnPtr(_sig) => {
|
2023-05-17 10:30:14 +00:00
|
|
|
let value = self.read_scalar(value, ExpectedKind::FnPtr)?;
|
2022-02-25 00:38:37 +00:00
|
|
|
|
2022-02-22 23:49:12 +00:00
|
|
|
// If we check references recursively, also check that this points to a function.
|
|
|
|
if let Some(_) = self.ref_tracking {
|
2022-07-23 14:36:57 +00:00
|
|
|
let ptr = value.to_pointer(self.ecx)?;
|
2022-02-22 23:49:12 +00:00
|
|
|
let _fn = try_validation!(
|
2022-04-03 17:05:49 +00:00
|
|
|
self.ecx.get_ptr_fn(ptr),
|
2022-02-22 23:49:12 +00:00
|
|
|
self.path,
|
2023-08-02 14:14:36 +00:00
|
|
|
Ub(DanglingIntPointer(..) | InvalidFunctionPointer(..)) =>
|
|
|
|
InvalidFnPtr { value: format!("{ptr}") },
|
2022-02-22 23:49:12 +00:00
|
|
|
);
|
|
|
|
// FIXME: Check if the signature matches
|
2022-02-25 00:38:37 +00:00
|
|
|
} else {
|
|
|
|
// Otherwise (for standalone Miri), we have to still check it to be non-null.
|
2023-07-22 05:35:57 +00:00
|
|
|
if self.ecx.scalar_may_be_null(value)? {
|
2023-05-17 10:30:14 +00:00
|
|
|
throw_validation_failure!(self.path, NullFnPtr);
|
2022-02-25 00:38:37 +00:00
|
|
|
}
|
2022-02-22 23:49:12 +00:00
|
|
|
}
|
2020-03-02 12:09:13 +00:00
|
|
|
Ok(true)
|
|
|
|
}
|
2023-05-17 10:30:14 +00:00
|
|
|
ty::Never => throw_validation_failure!(self.path, NeverVal),
|
2020-03-02 12:09:13 +00:00
|
|
|
ty::Foreign(..) | ty::FnDef(..) => {
|
|
|
|
// Nothing to check.
|
|
|
|
Ok(true)
|
2018-10-02 16:07:40 +00:00
|
|
|
}
|
2020-10-28 09:39:21 +00:00
|
|
|
// The above should be all the primitive types. The rest is compound, we
|
2020-03-02 12:09:13 +00:00
|
|
|
// check them by visiting their fields/variants.
|
|
|
|
ty::Adt(..)
|
|
|
|
| ty::Tuple(..)
|
|
|
|
| ty::Array(..)
|
|
|
|
| ty::Slice(..)
|
|
|
|
| ty::Str
|
|
|
|
| ty::Dynamic(..)
|
|
|
|
| ty::Closure(..)
|
2023-02-02 13:57:36 +00:00
|
|
|
| ty::Pat(..)
|
2024-01-24 18:01:56 +00:00
|
|
|
| ty::CoroutineClosure(..)
|
2023-10-19 16:06:43 +00:00
|
|
|
| ty::Coroutine(..) => Ok(false),
|
2020-03-02 21:24:23 +00:00
|
|
|
// Some types only occur during typechecking, they have no layout.
|
|
|
|
// We should not see them here and we could not check them anyway.
|
2020-05-06 04:02:09 +00:00
|
|
|
ty::Error(_)
|
2020-03-02 12:09:13 +00:00
|
|
|
| ty::Infer(..)
|
|
|
|
| ty::Placeholder(..)
|
|
|
|
| ty::Bound(..)
|
|
|
|
| ty::Param(..)
|
2022-11-27 17:52:17 +00:00
|
|
|
| ty::Alias(..)
|
2023-10-19 16:06:43 +00:00
|
|
|
| ty::CoroutineWitness(..) => bug!("Encountered invalid type {:?}", ty),
|
2018-08-17 10:18:02 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-11-02 08:33:26 +00:00
|
|
|
fn visit_scalar(
|
|
|
|
&mut self,
|
2022-08-01 23:05:20 +00:00
|
|
|
scalar: Scalar<M::Provenance>,
|
2021-08-29 09:06:55 +00:00
|
|
|
scalar_layout: ScalarAbi,
|
2019-06-07 16:56:27 +00:00
|
|
|
) -> InterpResult<'tcx> {
|
2022-05-04 20:47:46 +00:00
|
|
|
let size = scalar_layout.size(self.ecx);
|
2022-03-03 12:02:12 +00:00
|
|
|
let valid_range = scalar_layout.valid_range(self.ecx);
|
2023-07-22 05:35:57 +00:00
|
|
|
let WrappingRange { start, end } = valid_range;
|
2022-05-04 20:47:46 +00:00
|
|
|
let max_value = size.unsigned_int_max();
|
2023-07-22 05:35:57 +00:00
|
|
|
assert!(end <= max_value);
|
|
|
|
let bits = match scalar.try_to_int() {
|
|
|
|
Ok(int) => int.assert_bits(size),
|
2021-07-16 07:39:35 +00:00
|
|
|
Err(_) => {
|
|
|
|
// So this is a pointer then, and casting to an int failed.
|
|
|
|
// Can only happen during CTFE.
|
2023-07-22 05:35:57 +00:00
|
|
|
// We support 2 kinds of ranges here: full range, and excluding zero.
|
|
|
|
if start == 1 && end == max_value {
|
|
|
|
// Only null is the niche. So make sure the ptr is NOT null.
|
|
|
|
if self.ecx.scalar_may_be_null(scalar)? {
|
|
|
|
throw_validation_failure!(
|
|
|
|
self.path,
|
|
|
|
NullablePtrOutOfRange { range: valid_range, max_value }
|
|
|
|
)
|
|
|
|
} else {
|
|
|
|
return Ok(());
|
|
|
|
}
|
|
|
|
} else if scalar_layout.is_always_valid(self.ecx) {
|
|
|
|
// Easy. (This is reachable if `enforce_number_validity` is set.)
|
|
|
|
return Ok(());
|
2018-10-03 09:38:16 +00:00
|
|
|
} else {
|
2023-07-22 05:35:57 +00:00
|
|
|
// Conservatively, we reject, because the pointer *could* have a bad
|
|
|
|
// value.
|
2023-05-17 10:30:14 +00:00
|
|
|
throw_validation_failure!(
|
|
|
|
self.path,
|
|
|
|
PtrOutOfRange { range: valid_range, max_value }
|
2019-07-30 14:48:50 +00:00
|
|
|
)
|
2018-10-03 09:38:16 +00:00
|
|
|
}
|
2018-10-02 18:05:12 +00:00
|
|
|
}
|
2023-07-22 05:35:57 +00:00
|
|
|
};
|
|
|
|
// Now compare.
|
|
|
|
if valid_range.contains(bits) {
|
|
|
|
Ok(())
|
|
|
|
} else {
|
|
|
|
throw_validation_failure!(
|
|
|
|
self.path,
|
|
|
|
OutOfRange { value: format!("{bits}"), range: valid_range, max_value }
|
|
|
|
)
|
2018-10-02 14:06:50 +00:00
|
|
|
}
|
|
|
|
}
|
2023-12-16 15:24:25 +00:00
|
|
|
|
|
|
|
fn in_mutable_memory(&self, op: &OpTy<'tcx, M::Provenance>) -> bool {
|
|
|
|
if let Some(mplace) = op.as_mplace_or_imm().left() {
|
|
|
|
if let Some(alloc_id) = mplace.ptr().provenance.and_then(|p| p.get_alloc_id()) {
|
2024-04-08 10:57:46 +00:00
|
|
|
return mutability(self.ecx, alloc_id).is_mut();
|
2023-12-16 15:24:25 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
false
|
|
|
|
}
|
2020-02-17 21:59:16 +00:00
|
|
|
}
|
|
|
|
|
2024-04-02 15:15:48 +00:00
|
|
|
/// Returns whether the allocation is mutable, and whether it's actually a static.
|
|
|
|
/// For "root" statics we look at the type to account for interior
|
|
|
|
/// mutability; for nested statics we have no type and directly use the annotated mutability.
|
|
|
|
fn mutability<'mir, 'tcx: 'mir>(
|
|
|
|
ecx: &InterpCx<'mir, 'tcx, impl Machine<'mir, 'tcx>>,
|
|
|
|
alloc_id: AllocId,
|
2024-04-08 10:57:46 +00:00
|
|
|
) -> Mutability {
|
2024-04-02 15:15:48 +00:00
|
|
|
// Let's see what kind of memory this points to.
|
|
|
|
// We're not using `try_global_alloc` since dangling pointers have already been handled.
|
|
|
|
match ecx.tcx.global_alloc(alloc_id) {
|
|
|
|
GlobalAlloc::Static(did) => {
|
|
|
|
let DefKind::Static { mutability, nested } = ecx.tcx.def_kind(did) else { bug!() };
|
2024-04-08 10:57:46 +00:00
|
|
|
if nested {
|
|
|
|
assert!(
|
|
|
|
ecx.memory.alloc_map.get(alloc_id).is_none(),
|
|
|
|
"allocations of nested statics are already interned: {alloc_id:?}, {did:?}"
|
|
|
|
);
|
|
|
|
// Nested statics in a `static` are never interior mutable,
|
|
|
|
// so just use the declared mutability.
|
2024-04-02 15:15:48 +00:00
|
|
|
mutability
|
|
|
|
} else {
|
|
|
|
let mutability = match mutability {
|
|
|
|
Mutability::Not
|
|
|
|
if !ecx
|
|
|
|
.tcx
|
|
|
|
.type_of(did)
|
|
|
|
.no_bound_vars()
|
|
|
|
.expect("statics should not have generic parameters")
|
|
|
|
.is_freeze(*ecx.tcx, ty::ParamEnv::reveal_all()) =>
|
|
|
|
{
|
|
|
|
Mutability::Mut
|
|
|
|
}
|
|
|
|
_ => mutability,
|
|
|
|
};
|
|
|
|
if let Some((_, alloc)) = ecx.memory.alloc_map.get(alloc_id) {
|
|
|
|
assert_eq!(alloc.mutability, mutability);
|
|
|
|
}
|
|
|
|
mutability
|
2024-04-08 10:57:46 +00:00
|
|
|
}
|
2024-04-02 15:15:48 +00:00
|
|
|
}
|
2024-04-08 10:57:46 +00:00
|
|
|
GlobalAlloc::Memory(alloc) => alloc.inner().mutability,
|
2024-04-02 15:15:48 +00:00
|
|
|
GlobalAlloc::Function(..) | GlobalAlloc::VTable(..) => {
|
|
|
|
// These are immutable, we better don't allow mutable pointers here.
|
2024-04-08 10:57:46 +00:00
|
|
|
Mutability::Not
|
2024-04-02 15:15:48 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-03-16 22:12:42 +00:00
|
|
|
impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M>
|
2020-02-17 21:59:16 +00:00
|
|
|
for ValidityVisitor<'rt, 'mir, 'tcx, M>
|
|
|
|
{
|
2022-07-18 22:47:31 +00:00
|
|
|
type V = OpTy<'tcx, M::Provenance>;
|
2020-02-17 21:59:16 +00:00
|
|
|
|
|
|
|
#[inline(always)]
|
|
|
|
fn ecx(&self) -> &InterpCx<'mir, 'tcx, M> {
|
2023-08-01 15:11:00 +00:00
|
|
|
self.ecx
|
2020-02-17 21:59:16 +00:00
|
|
|
}
|
|
|
|
|
2020-07-05 14:01:18 +00:00
|
|
|
fn read_discriminant(
|
|
|
|
&mut self,
|
2022-07-18 22:47:31 +00:00
|
|
|
op: &OpTy<'tcx, M::Provenance>,
|
2020-07-05 14:01:18 +00:00
|
|
|
) -> InterpResult<'tcx, VariantIdx> {
|
2020-07-05 11:40:27 +00:00
|
|
|
self.with_elem(PathElem::EnumTag, move |this| {
|
|
|
|
Ok(try_validation!(
|
2023-07-24 15:45:27 +00:00
|
|
|
this.ecx.read_discriminant(op),
|
2020-07-05 11:40:27 +00:00
|
|
|
this.path,
|
2023-08-02 14:14:36 +00:00
|
|
|
Ub(InvalidTag(val)) => InvalidEnumTag {
|
2023-05-17 10:30:14 +00:00
|
|
|
value: format!("{val:x}"),
|
|
|
|
},
|
2023-08-02 14:14:36 +00:00
|
|
|
Ub(UninhabitedEnumVariantRead(_)) => UninhabitedEnumVariant,
|
|
|
|
// Uninit / bad provenance are not possible since the field was already previously
|
|
|
|
// checked at its integer type.
|
2023-07-24 09:44:58 +00:00
|
|
|
))
|
2020-07-05 11:40:27 +00:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2020-02-17 21:59:16 +00:00
|
|
|
#[inline]
|
|
|
|
fn visit_field(
|
|
|
|
&mut self,
|
2022-07-18 22:47:31 +00:00
|
|
|
old_op: &OpTy<'tcx, M::Provenance>,
|
2020-02-17 21:59:16 +00:00
|
|
|
field: usize,
|
2022-07-18 22:47:31 +00:00
|
|
|
new_op: &OpTy<'tcx, M::Provenance>,
|
2020-02-17 21:59:16 +00:00
|
|
|
) -> InterpResult<'tcx> {
|
|
|
|
let elem = self.aggregate_field_path_elem(old_op.layout, field);
|
2020-07-05 11:40:27 +00:00
|
|
|
self.with_elem(elem, move |this| this.visit_value(new_op))
|
2020-02-17 21:59:16 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
#[inline]
|
|
|
|
fn visit_variant(
|
|
|
|
&mut self,
|
2022-07-18 22:47:31 +00:00
|
|
|
old_op: &OpTy<'tcx, M::Provenance>,
|
2020-02-17 21:59:16 +00:00
|
|
|
variant_id: VariantIdx,
|
2022-07-18 22:47:31 +00:00
|
|
|
new_op: &OpTy<'tcx, M::Provenance>,
|
2020-02-17 21:59:16 +00:00
|
|
|
) -> InterpResult<'tcx> {
|
2020-08-02 22:49:11 +00:00
|
|
|
let name = match old_op.layout.ty.kind() {
|
2022-03-04 20:28:41 +00:00
|
|
|
ty::Adt(adt, _) => PathElem::Variant(adt.variant(variant_id).name),
|
2023-10-19 16:06:43 +00:00
|
|
|
// Coroutines also have variants
|
|
|
|
ty::Coroutine(..) => PathElem::CoroutineState(variant_id),
|
2020-02-17 21:59:16 +00:00
|
|
|
_ => bug!("Unexpected type with variant: {:?}", old_op.layout.ty),
|
|
|
|
};
|
2020-07-05 11:40:27 +00:00
|
|
|
self.with_elem(name, move |this| this.visit_value(new_op))
|
2020-02-17 21:59:16 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
#[inline(always)]
|
2020-04-16 15:15:46 +00:00
|
|
|
fn visit_union(
|
|
|
|
&mut self,
|
2022-07-18 22:47:31 +00:00
|
|
|
op: &OpTy<'tcx, M::Provenance>,
|
2024-01-29 22:59:09 +00:00
|
|
|
_fields: NonZero<usize>,
|
2020-04-16 15:15:46 +00:00
|
|
|
) -> InterpResult<'tcx> {
|
2023-12-16 15:24:25 +00:00
|
|
|
// Special check for CTFE validation, preventing `UnsafeCell` inside unions in immutable memory.
|
|
|
|
if self.ctfe_mode.is_some_and(|c| !c.allow_immutable_unsafe_cell()) {
|
|
|
|
if !op.layout.is_zst() && !op.layout.ty.is_freeze(*self.ecx.tcx, self.ecx.param_env) {
|
|
|
|
if !self.in_mutable_memory(op) {
|
|
|
|
throw_validation_failure!(self.path, UnsafeCellInImmutable);
|
|
|
|
}
|
2021-10-28 00:00:00 +00:00
|
|
|
}
|
|
|
|
}
|
2020-02-17 21:59:16 +00:00
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2022-07-04 02:55:25 +00:00
|
|
|
#[inline]
|
2024-03-09 12:05:13 +00:00
|
|
|
fn visit_box(
|
|
|
|
&mut self,
|
|
|
|
_box_ty: Ty<'tcx>,
|
|
|
|
op: &OpTy<'tcx, M::Provenance>,
|
|
|
|
) -> InterpResult<'tcx> {
|
2023-05-17 10:30:14 +00:00
|
|
|
self.check_safe_pointer(op, PointerKind::Box)?;
|
2022-07-04 02:55:25 +00:00
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2020-02-17 21:59:16 +00:00
|
|
|
#[inline]
|
2022-07-18 22:47:31 +00:00
|
|
|
fn visit_value(&mut self, op: &OpTy<'tcx, M::Provenance>) -> InterpResult<'tcx> {
|
2020-02-17 21:59:16 +00:00
|
|
|
trace!("visit_value: {:?}, {:?}", *op, op.layout);
|
|
|
|
|
2022-02-25 00:38:37 +00:00
|
|
|
// Check primitive types -- the leaves of our recursive descent.
|
2020-03-02 21:24:23 +00:00
|
|
|
if self.try_visit_primitive(op)? {
|
2020-03-02 12:09:13 +00:00
|
|
|
return Ok(());
|
2020-02-17 21:59:16 +00:00
|
|
|
}
|
|
|
|
|
2020-12-20 18:34:29 +00:00
|
|
|
// Special check preventing `UnsafeCell` in the inner part of constants
|
2023-12-16 15:24:25 +00:00
|
|
|
if self.ctfe_mode.is_some_and(|c| !c.allow_immutable_unsafe_cell()) {
|
|
|
|
if !op.layout.is_zst()
|
|
|
|
&& let Some(def) = op.layout.ty.ty_adt_def()
|
2022-07-07 10:46:22 +00:00
|
|
|
&& def.is_unsafe_cell()
|
2020-10-24 18:49:17 +00:00
|
|
|
{
|
2023-12-16 15:24:25 +00:00
|
|
|
if !self.in_mutable_memory(op) {
|
|
|
|
throw_validation_failure!(self.path, UnsafeCellInImmutable);
|
|
|
|
}
|
2020-10-24 18:49:17 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-07-24 09:44:58 +00:00
|
|
|
// Recursively walk the value at its type. Apply optimizations for some large types.
|
2020-08-02 22:49:11 +00:00
|
|
|
match op.layout.ty.kind() {
|
2018-10-31 17:44:00 +00:00
|
|
|
ty::Str => {
|
2022-07-04 12:48:05 +00:00
|
|
|
let mplace = op.assert_mem_place(); // strings are unsized and hence never immediate
|
2020-05-12 07:46:41 +00:00
|
|
|
let len = mplace.len(self.ecx)?;
|
2019-12-26 23:38:10 +00:00
|
|
|
try_validation!(
|
2023-09-04 15:53:38 +00:00
|
|
|
self.ecx.read_bytes_ptr_strip_provenance(mplace.ptr(), Size::from_bytes(len)),
|
2020-05-05 22:07:53 +00:00
|
|
|
self.path,
|
2023-08-02 14:14:36 +00:00
|
|
|
Ub(InvalidUninitBytes(..)) => Uninit { expected: ExpectedKind::Str },
|
|
|
|
Unsup(ReadPointerAsInt(_)) => PointerAsInt { expected: ExpectedKind::Str }
|
2019-12-26 23:38:10 +00:00
|
|
|
);
|
2018-10-31 17:44:00 +00:00
|
|
|
}
|
2018-10-31 15:46:33 +00:00
|
|
|
ty::Array(tys, ..) | ty::Slice(tys)
|
2020-10-21 07:26:11 +00:00
|
|
|
// This optimization applies for types that can hold arbitrary bytes (such as
|
|
|
|
// integer and floating point types) or for structs or tuples with no fields.
|
|
|
|
// FIXME(wesleywiser) This logic could be extended further to arbitrary structs
|
|
|
|
// or tuples made up of integer/floating point types or inhabited ZSTs with no
|
|
|
|
// padding.
|
|
|
|
if matches!(tys.kind(), ty::Int(..) | ty::Uint(..) | ty::Float(..))
|
|
|
|
=>
|
2018-10-31 15:46:33 +00:00
|
|
|
{
|
2023-08-02 14:14:36 +00:00
|
|
|
let expected = if tys.is_integral() { ExpectedKind::Int } else { ExpectedKind::Float };
|
2019-11-02 16:46:11 +00:00
|
|
|
// Optimized handling for arrays of integer/float type.
|
|
|
|
|
2018-10-31 15:46:33 +00:00
|
|
|
// This is the length of the array/slice.
|
2022-07-04 12:48:05 +00:00
|
|
|
let len = op.len(self.ecx)?;
|
2018-10-31 15:46:33 +00:00
|
|
|
// This is the element type size.
|
Overhaul `TyS` and `Ty`.
Specifically, change `Ty` from this:
```
pub type Ty<'tcx> = &'tcx TyS<'tcx>;
```
to this
```
pub struct Ty<'tcx>(Interned<'tcx, TyS<'tcx>>);
```
There are two benefits to this.
- It's now a first class type, so we can define methods on it. This
means we can move a lot of methods away from `TyS`, leaving `TyS` as a
barely-used type, which is appropriate given that it's not meant to
be used directly.
- The uniqueness requirement is now explicit, via the `Interned` type.
E.g. the pointer-based `Eq` and `Hash` comes from `Interned`, rather
than via `TyS`, which wasn't obvious at all.
Much of this commit is boring churn. The interesting changes are in
these files:
- compiler/rustc_middle/src/arena.rs
- compiler/rustc_middle/src/mir/visit.rs
- compiler/rustc_middle/src/ty/context.rs
- compiler/rustc_middle/src/ty/mod.rs
Specifically:
- Most mentions of `TyS` are removed. It's very much a dumb struct now;
`Ty` has all the smarts.
- `TyS` now has `crate` visibility instead of `pub`.
- `TyS::make_for_test` is removed in favour of the static `BOOL_TY`,
which just works better with the new structure.
- The `Eq`/`Ord`/`Hash` impls are removed from `TyS`. `Interned`s impls
of `Eq`/`Hash` now suffice. `Ord` is now partly on `Interned`
(pointer-based, for the `Equal` case) and partly on `TyS`
(contents-based, for the other cases).
- There are many tedious sigil adjustments, i.e. adding or removing `*`
or `&`. They seem to be unavoidable.
2022-01-25 03:13:38 +00:00
|
|
|
let layout = self.ecx.layout_of(*tys)?;
|
2020-03-24 15:43:50 +00:00
|
|
|
// This is the size in bytes of the whole array. (This checks for overflow.)
|
|
|
|
let size = layout.size * len;
|
2022-07-04 12:48:05 +00:00
|
|
|
// If the size is 0, there is nothing to check.
|
|
|
|
// (`size` can only be 0 of `len` is 0, and empty arrays are always valid.)
|
|
|
|
if size == Size::ZERO {
|
|
|
|
return Ok(());
|
|
|
|
}
|
|
|
|
// Now that we definitely have a non-ZST array, we know it lives in memory.
|
2022-11-18 09:18:32 +00:00
|
|
|
let mplace = match op.as_mplace_or_imm() {
|
|
|
|
Left(mplace) => mplace,
|
|
|
|
Right(imm) => match *imm {
|
2022-07-04 12:48:05 +00:00
|
|
|
Immediate::Uninit =>
|
2023-08-02 14:14:36 +00:00
|
|
|
throw_validation_failure!(self.path, Uninit { expected }),
|
2022-07-04 12:48:05 +00:00
|
|
|
Immediate::Scalar(..) | Immediate::ScalarPair(..) =>
|
|
|
|
bug!("arrays/slices can never have Scalar/ScalarPair layout"),
|
|
|
|
}
|
|
|
|
};
|
2018-11-12 12:26:53 +00:00
|
|
|
|
2019-12-27 19:33:22 +00:00
|
|
|
// Optimization: we just check the entire range at once.
|
2018-10-31 15:46:33 +00:00
|
|
|
// NOTE: Keep this in sync with the handling of integer and float
|
|
|
|
// types above, in `visit_primitive`.
|
2023-09-05 13:12:18 +00:00
|
|
|
// No need for an alignment check here, this is not an actual memory access.
|
2023-09-26 19:08:21 +00:00
|
|
|
let alloc = self.ecx.get_ptr_alloc(mplace.ptr(), size)?.expect("we already excluded size 0");
|
2021-05-16 16:53:20 +00:00
|
|
|
|
2022-08-27 18:54:02 +00:00
|
|
|
match alloc.get_bytes_strip_provenance() {
|
2018-10-31 15:46:33 +00:00
|
|
|
// In the happy case, we needn't check anything else.
|
2022-08-27 18:54:02 +00:00
|
|
|
Ok(_) => {}
|
2018-10-31 15:46:33 +00:00
|
|
|
// Some error happened, try to provide a more detailed description.
|
|
|
|
Err(err) => {
|
2020-05-06 11:26:24 +00:00
|
|
|
// For some errors we might be able to provide extra information.
|
|
|
|
// (This custom logic does not fit the `try_validation!` macro.)
|
2021-02-14 00:00:00 +00:00
|
|
|
match err.kind() {
|
2023-08-02 14:14:36 +00:00
|
|
|
Ub(InvalidUninitBytes(Some((_alloc_id, access)))) | Unsup(ReadPointerAsInt(Some((_alloc_id, access)))) => {
|
2020-04-15 10:18:20 +00:00
|
|
|
// Some byte was uninitialized, determine which
|
2018-10-31 15:46:33 +00:00
|
|
|
// element that byte belongs to so we can
|
|
|
|
// provide an index.
|
2020-05-14 12:46:43 +00:00
|
|
|
let i = usize::try_from(
|
2023-08-02 14:14:36 +00:00
|
|
|
access.bad.start.bytes() / layout.size.bytes(),
|
2020-05-14 12:46:43 +00:00
|
|
|
)
|
|
|
|
.unwrap();
|
2018-10-31 15:46:33 +00:00
|
|
|
self.path.push(PathElem::ArrayElem(i));
|
|
|
|
|
2023-08-02 14:14:36 +00:00
|
|
|
if matches!(err.kind(), Ub(InvalidUninitBytes(_))) {
|
|
|
|
throw_validation_failure!(self.path, Uninit { expected })
|
|
|
|
} else {
|
|
|
|
throw_validation_failure!(self.path, PointerAsInt { expected })
|
|
|
|
}
|
2018-10-31 15:46:33 +00:00
|
|
|
}
|
2020-07-05 11:40:27 +00:00
|
|
|
|
2020-04-30 03:01:36 +00:00
|
|
|
// Propagate upwards (that will also check for unexpected errors).
|
2018-10-31 15:46:33 +00:00
|
|
|
_ => return Err(err),
|
2018-08-25 12:36:24 +00:00
|
|
|
}
|
2018-08-24 13:27:05 +00:00
|
|
|
}
|
2018-08-17 10:18:02 +00:00
|
|
|
}
|
2018-10-31 15:46:33 +00:00
|
|
|
}
|
2020-01-13 16:58:37 +00:00
|
|
|
// Fast path for arrays and slices of ZSTs. We only need to check a single ZST element
|
|
|
|
// of an array and not all of them, because there's only a single value of a specific
|
|
|
|
// ZST type, so either validation fails for all elements or none.
|
Overhaul `TyS` and `Ty`.
Specifically, change `Ty` from this:
```
pub type Ty<'tcx> = &'tcx TyS<'tcx>;
```
to this
```
pub struct Ty<'tcx>(Interned<'tcx, TyS<'tcx>>);
```
There are two benefits to this.
- It's now a first class type, so we can define methods on it. This
means we can move a lot of methods away from `TyS`, leaving `TyS` as a
barely-used type, which is appropriate given that it's not meant to
be used directly.
- The uniqueness requirement is now explicit, via the `Interned` type.
E.g. the pointer-based `Eq` and `Hash` comes from `Interned`, rather
than via `TyS`, which wasn't obvious at all.
Much of this commit is boring churn. The interesting changes are in
these files:
- compiler/rustc_middle/src/arena.rs
- compiler/rustc_middle/src/mir/visit.rs
- compiler/rustc_middle/src/ty/context.rs
- compiler/rustc_middle/src/ty/mod.rs
Specifically:
- Most mentions of `TyS` are removed. It's very much a dumb struct now;
`Ty` has all the smarts.
- `TyS` now has `crate` visibility instead of `pub`.
- `TyS::make_for_test` is removed in favour of the static `BOOL_TY`,
which just works better with the new structure.
- The `Eq`/`Ord`/`Hash` impls are removed from `TyS`. `Interned`s impls
of `Eq`/`Hash` now suffice. `Ord` is now partly on `Interned`
(pointer-based, for the `Equal` case) and partly on `TyS`
(contents-based, for the other cases).
- There are many tedious sigil adjustments, i.e. adding or removing `*`
or `&`. They seem to be unavoidable.
2022-01-25 03:13:38 +00:00
|
|
|
ty::Array(tys, ..) | ty::Slice(tys) if self.ecx.layout_of(*tys)?.is_zst() => {
|
2020-10-21 07:26:11 +00:00
|
|
|
// Validate just the first element (if any).
|
2023-07-24 09:44:58 +00:00
|
|
|
if op.len(self.ecx)? > 0 {
|
|
|
|
self.visit_field(op, 0, &self.ecx.project_index(op, 0)?)?;
|
|
|
|
}
|
2020-01-13 16:58:37 +00:00
|
|
|
}
|
2018-11-02 07:17:40 +00:00
|
|
|
_ => {
|
2024-04-21 09:35:02 +00:00
|
|
|
// default handler
|
|
|
|
try_validation!(
|
|
|
|
self.walk_value(op),
|
|
|
|
self.path,
|
|
|
|
// It's not great to catch errors here, since we can't give a very good path,
|
|
|
|
// but it's better than ICEing.
|
|
|
|
Ub(InvalidVTableTrait { expected_trait, vtable_trait }) => {
|
|
|
|
InvalidMetaWrongTrait { expected_trait, vtable_trait: *vtable_trait }
|
|
|
|
},
|
|
|
|
);
|
2023-07-24 09:44:58 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// *After* all of this, check the ABI. We need to check the ABI to handle
|
|
|
|
// types like `NonNull` where the `Scalar` info is more restrictive than what
|
|
|
|
// the fields say (`rustc_layout_scalar_valid_range_start`).
|
|
|
|
// But in most cases, this will just propagate what the fields say,
|
|
|
|
// and then we want the error to point at the field -- so, first recurse,
|
|
|
|
// then check ABI.
|
|
|
|
//
|
|
|
|
// FIXME: We could avoid some redundant checks here. For newtypes wrapping
|
|
|
|
// scalars, we do the same check on every "level" (e.g., first we check
|
|
|
|
// MyNewtype and then the scalar in there).
|
|
|
|
match op.layout.abi {
|
|
|
|
Abi::Uninhabited => {
|
|
|
|
let ty = op.layout.ty;
|
|
|
|
throw_validation_failure!(self.path, UninhabitedVal { ty });
|
|
|
|
}
|
|
|
|
Abi::Scalar(scalar_layout) => {
|
|
|
|
if !scalar_layout.is_uninit_valid() {
|
|
|
|
// There is something to check here.
|
|
|
|
let scalar = self.read_scalar(op, ExpectedKind::InitScalar)?;
|
|
|
|
self.visit_scalar(scalar, scalar_layout)?;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Abi::ScalarPair(a_layout, b_layout) => {
|
|
|
|
// We can only proceed if *both* scalars need to be initialized.
|
|
|
|
// FIXME: find a way to also check ScalarPair when one side can be uninit but
|
|
|
|
// the other must be init.
|
|
|
|
if !a_layout.is_uninit_valid() && !b_layout.is_uninit_valid() {
|
|
|
|
let (a, b) =
|
|
|
|
self.read_immediate(op, ExpectedKind::InitScalar)?.to_scalar_pair();
|
|
|
|
self.visit_scalar(a, a_layout)?;
|
|
|
|
self.visit_scalar(b, b_layout)?;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Abi::Vector { .. } => {
|
|
|
|
// No checks here, we assume layout computation gets this right.
|
|
|
|
// (This is harder to check since Miri does not represent these as `Immediate`. We
|
|
|
|
// also cannot use field projections since this might be a newtype around a vector.)
|
|
|
|
}
|
|
|
|
Abi::Aggregate { .. } => {
|
|
|
|
// Nothing to do.
|
2018-11-02 07:17:40 +00:00
|
|
|
}
|
|
|
|
}
|
2023-07-24 09:44:58 +00:00
|
|
|
|
2018-11-02 07:17:40 +00:00
|
|
|
Ok(())
|
2018-08-17 10:18:02 +00:00
|
|
|
}
|
2018-10-31 15:46:33 +00:00
|
|
|
}
|
2018-08-17 10:18:02 +00:00
|
|
|
|
2020-03-16 22:12:42 +00:00
|
|
|
impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
2020-03-05 22:31:39 +00:00
|
|
|
fn validate_operand_internal(
|
2018-11-02 13:06:43 +00:00
|
|
|
&self,
|
2022-07-18 22:47:31 +00:00
|
|
|
op: &OpTy<'tcx, M::Provenance>,
|
2018-10-31 15:46:33 +00:00
|
|
|
path: Vec<PathElem>,
|
2022-07-18 22:47:31 +00:00
|
|
|
ref_tracking: Option<&mut RefTracking<MPlaceTy<'tcx, M::Provenance>, Vec<PathElem>>>,
|
2020-10-24 18:49:17 +00:00
|
|
|
ctfe_mode: Option<CtfeValidationMode>,
|
2019-06-07 16:56:27 +00:00
|
|
|
) -> InterpResult<'tcx> {
|
2020-03-05 22:31:39 +00:00
|
|
|
trace!("validate_operand_internal: {:?}, {:?}", *op, op.layout.ty);
|
2018-08-17 10:18:02 +00:00
|
|
|
|
2018-10-31 15:46:33 +00:00
|
|
|
// Construct a visitor
|
2020-10-24 18:49:17 +00:00
|
|
|
let mut visitor = ValidityVisitor { path, ref_tracking, ctfe_mode, ecx: self };
|
2018-08-17 10:18:02 +00:00
|
|
|
|
2020-03-04 07:43:03 +00:00
|
|
|
// Run it.
|
2024-03-09 18:13:18 +00:00
|
|
|
match self.run_for_validation(|| visitor.visit_value(op)) {
|
2020-03-04 07:43:03 +00:00
|
|
|
Ok(()) => Ok(()),
|
2023-08-02 14:14:36 +00:00
|
|
|
// Pass through validation failures and "invalid program" issues.
|
|
|
|
Err(err)
|
|
|
|
if matches!(
|
|
|
|
err.kind(),
|
|
|
|
err_ub!(ValidationError { .. }) | InterpError::InvalidProgram(_)
|
|
|
|
) =>
|
|
|
|
{
|
|
|
|
Err(err)
|
|
|
|
}
|
|
|
|
// Complain about any other kind of error -- those are bad because we'd like to
|
2022-08-27 18:54:02 +00:00
|
|
|
// report them in a way that shows *where* in the value the issue lies.
|
2023-08-02 14:14:36 +00:00
|
|
|
Err(err) => {
|
2024-01-06 12:48:48 +00:00
|
|
|
bug!(
|
|
|
|
"Unexpected error during validation: {}",
|
|
|
|
format_interp_error(self.tcx.dcx(), err)
|
|
|
|
);
|
2020-05-06 07:22:52 +00:00
|
|
|
}
|
2020-03-04 07:43:03 +00:00
|
|
|
}
|
2018-08-17 10:18:02 +00:00
|
|
|
}
|
2020-03-05 22:31:39 +00:00
|
|
|
|
|
|
|
/// This function checks the data at `op` to be const-valid.
|
|
|
|
/// `op` is assumed to cover valid memory if it is an indirect operand.
|
|
|
|
/// It will error if the bits at the destination do not match the ones described by the layout.
|
|
|
|
///
|
|
|
|
/// `ref_tracking` is used to record references that we encounter so that they
|
|
|
|
/// can be checked recursively by an outside driving loop.
|
|
|
|
///
|
2020-10-24 18:49:17 +00:00
|
|
|
/// `constant` controls whether this must satisfy the rules for constants:
|
|
|
|
/// - no pointers to statics.
|
|
|
|
/// - no `UnsafeCell` or non-ZST `&mut`.
|
2020-03-05 22:31:39 +00:00
|
|
|
#[inline(always)]
|
2023-10-10 07:02:45 +00:00
|
|
|
pub(crate) fn const_validate_operand(
|
2020-03-05 22:31:39 +00:00
|
|
|
&self,
|
2022-07-18 22:47:31 +00:00
|
|
|
op: &OpTy<'tcx, M::Provenance>,
|
2020-03-05 22:31:39 +00:00
|
|
|
path: Vec<PathElem>,
|
2022-07-18 22:47:31 +00:00
|
|
|
ref_tracking: &mut RefTracking<MPlaceTy<'tcx, M::Provenance>, Vec<PathElem>>,
|
2020-10-24 18:49:17 +00:00
|
|
|
ctfe_mode: CtfeValidationMode,
|
2020-03-05 22:31:39 +00:00
|
|
|
) -> InterpResult<'tcx> {
|
2020-10-24 18:49:17 +00:00
|
|
|
self.validate_operand_internal(op, path, Some(ref_tracking), Some(ctfe_mode))
|
2020-03-05 22:31:39 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/// This function checks the data at `op` to be runtime-valid.
|
|
|
|
/// `op` is assumed to cover valid memory if it is an indirect operand.
|
|
|
|
/// It will error if the bits at the destination do not match the ones described by the layout.
|
|
|
|
#[inline(always)]
|
2022-07-18 22:47:31 +00:00
|
|
|
pub fn validate_operand(&self, op: &OpTy<'tcx, M::Provenance>) -> InterpResult<'tcx> {
|
2022-08-07 12:30:03 +00:00
|
|
|
// Note that we *could* actually be in CTFE here with `-Zextra-const-ub-checks`, but it's
|
|
|
|
// still correct to not use `ctfe_mode`: that mode is for validation of the final constant
|
|
|
|
// value, it rules out things like `UnsafeCell` in awkward places. It also can make checking
|
|
|
|
// recurse through references which, for now, we don't want here, either.
|
2020-10-24 18:49:17 +00:00
|
|
|
self.validate_operand_internal(op, vec![], None, None)
|
2020-03-05 22:31:39 +00:00
|
|
|
}
|
2018-08-17 10:18:02 +00:00
|
|
|
}
|