rust/compiler/rustc_const_eval/src/interpret/cast.rs

Ignoring revisions in .git-blame-ignore-revs. Click here to bypass and see the normal blame view.

488 lines
21 KiB
Rust
Raw Normal View History

use std::assert_matches::assert_matches;
use rustc_apfloat::ieee::{Double, Half, Quad, Single};
2020-03-29 14:41:09 +00:00
use rustc_apfloat::{Float, FloatConvert};
use rustc_middle::mir::interpret::{InterpResult, PointerArithmetic, Scalar};
use rustc_middle::mir::CastKind;
use rustc_middle::ty::adjustment::PointerCoercion;
use rustc_middle::ty::layout::{IntegerExt, LayoutOf, TyAndLayout};
use rustc_middle::ty::{self, FloatTy, Ty};
use rustc_middle::{bug, span_bug};
use rustc_target::abi::Integer;
2023-10-19 18:10:19 +00:00
use rustc_type_ir::TyKind::*;
use tracing::trace;
2016-09-07 16:34:59 +00:00
use super::{
err_inval, throw_ub, throw_ub_custom, util::ensure_monomorphic_enough, FnVal, ImmTy, Immediate,
InterpCx, Machine, OpTy, PlaceTy,
};
use crate::fluent_generated as fluent;
impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
pub fn cast(
&mut self,
src: &OpTy<'tcx, M::Provenance>,
cast_kind: CastKind,
cast_ty: Ty<'tcx>,
dest: &PlaceTy<'tcx, M::Provenance>,
) -> InterpResult<'tcx> {
2023-09-21 05:26:11 +00:00
// `cast_ty` will often be the same as `dest.ty`, but not always, since subtyping is still
// possible.
let cast_layout =
if cast_ty == dest.layout.ty { dest.layout } else { self.layout_of(cast_ty)? };
// FIXME: In which cases should we trigger UB when the source is uninit?
match cast_kind {
CastKind::PointerCoercion(PointerCoercion::Unsize) => {
2023-09-21 05:26:11 +00:00
self.unsize_into(src, cast_layout, dest)?;
}
CastKind::PointerExposeProvenance => {
let src = self.read_immediate(src)?;
let res = self.pointer_expose_provenance_cast(&src, cast_layout)?;
self.write_immediate(*res, dest)?;
}
CastKind::PointerWithExposedProvenance => {
let src = self.read_immediate(src)?;
2024-03-23 22:00:53 +00:00
let res = self.pointer_with_exposed_provenance_cast(&src, cast_layout)?;
self.write_immediate(*res, dest)?;
}
2022-10-16 18:30:32 +00:00
CastKind::IntToInt | CastKind::IntToFloat => {
2022-10-16 18:30:32 +00:00
let src = self.read_immediate(src)?;
2023-09-21 05:26:11 +00:00
let res = self.int_to_int_or_float(&src, cast_layout)?;
self.write_immediate(*res, dest)?;
2022-10-16 18:30:32 +00:00
}
CastKind::FloatToFloat | CastKind::FloatToInt => {
2018-10-26 10:33:26 +00:00
let src = self.read_immediate(src)?;
2023-09-21 05:26:11 +00:00
let res = self.float_to_float_or_int(&src, cast_layout)?;
self.write_immediate(*res, dest)?;
2022-10-16 18:30:32 +00:00
}
CastKind::FnPtrToPtr | CastKind::PtrToPtr => {
let src = self.read_immediate(src)?;
2023-09-21 05:26:11 +00:00
let res = self.ptr_to_ptr(&src, cast_layout)?;
self.write_immediate(*res, dest)?;
}
CastKind::PointerCoercion(
PointerCoercion::MutToConstPointer | PointerCoercion::ArrayToPointer,
) => {
bug!("{cast_kind:?} casts are for borrowck only, not runtime MIR");
}
CastKind::PointerCoercion(PointerCoercion::ReifyFnPointer) => {
// All reifications must be monomorphic, bail out otherwise.
ensure_monomorphic_enough(*self.tcx, src.layout.ty)?;
// The src operand does not matter, just its type
2020-08-02 22:49:11 +00:00
match *src.layout.ty.kind() {
ty::FnDef(def_id, args) => {
let instance = ty::Instance::resolve_for_fn_ptr(
2020-06-14 13:02:51 +00:00
*self.tcx,
self.param_env,
def_id,
args,
)
.ok_or_else(|| err_inval!(TooGeneric))?;
let fn_ptr = self.fn_ptr(FnVal::Instance(instance));
self.write_pointer(fn_ptr, dest)?;
}
_ => span_bug!(self.cur_span(), "reify fn pointer on {}", src.layout.ty),
}
}
CastKind::PointerCoercion(PointerCoercion::UnsafeFnPointer) => {
2018-10-26 10:33:26 +00:00
let src = self.read_immediate(src)?;
2020-08-02 22:49:11 +00:00
match cast_ty.kind() {
ty::FnPtr(_) => {
// No change to value
2018-10-26 10:33:26 +00:00
self.write_immediate(*src, dest)?;
}
_ => span_bug!(self.cur_span(), "fn to unsafe fn cast on {}", cast_ty),
}
}
CastKind::PointerCoercion(PointerCoercion::ClosureFnPointer(_)) => {
// All reifications must be monomorphic, bail out otherwise.
ensure_monomorphic_enough(*self.tcx, src.layout.ty)?;
// The src operand does not matter, just its type
2020-08-02 22:49:11 +00:00
match *src.layout.ty.kind() {
ty::Closure(def_id, args) => {
let instance = ty::Instance::resolve_closure(
2020-06-14 13:02:51 +00:00
*self.tcx,
def_id,
args,
ty::ClosureKind::FnOnce,
);
let fn_ptr = self.fn_ptr(FnVal::Instance(instance));
self.write_pointer(fn_ptr, dest)?;
}
_ => span_bug!(self.cur_span(), "closure fn pointer on {}", src.layout.ty),
}
}
2022-06-28 21:02:30 +00:00
CastKind::DynStar => {
2022-08-29 03:53:33 +00:00
if let ty::Dynamic(data, _, ty::DynStar) = cast_ty.kind() {
2022-08-29 03:24:30 +00:00
// Initial cast from sized to dyn trait
let vtable = self.get_vtable_ptr(src.layout.ty, data.principal())?;
2022-08-30 19:44:00 +00:00
let vtable = Scalar::from_maybe_pointer(vtable, self);
let data = self.read_immediate(src)?.to_scalar();
2023-02-07 18:02:20 +00:00
let _assert_pointer_like = data.to_pointer(self)?;
2022-08-30 19:44:00 +00:00
let val = Immediate::ScalarPair(data, vtable);
2022-08-29 03:24:30 +00:00
self.write_immediate(val, dest)?;
} else {
bug!()
}
2022-06-28 21:02:30 +00:00
}
CastKind::Transmute => {
assert!(src.layout.is_sized());
assert!(dest.layout.is_sized());
2023-09-21 05:26:11 +00:00
assert_eq!(cast_ty, dest.layout.ty); // we otherwise ignore `cast_ty` enirely...
if src.layout.size != dest.layout.size {
throw_ub_custom!(
fluent::const_eval_invalid_transmute,
src_bytes = src.layout.size.bytes(),
dest_bytes = dest.layout.size.bytes(),
src = src.layout.ty,
dest = dest.layout.ty,
);
}
self.copy_op_allow_transmute(src, dest)?;
}
}
Ok(())
}
2022-10-25 20:09:35 +00:00
/// Handles 'IntToInt' and 'IntToFloat' casts.
2022-10-16 18:30:32 +00:00
pub fn int_to_int_or_float(
&self,
2022-10-16 18:30:32 +00:00
src: &ImmTy<'tcx, M::Provenance>,
2023-09-21 05:26:11 +00:00
cast_to: TyAndLayout<'tcx>,
) -> InterpResult<'tcx, ImmTy<'tcx, M::Provenance>> {
2022-10-25 20:09:35 +00:00
assert!(src.layout.ty.is_integral() || src.layout.ty.is_char() || src.layout.ty.is_bool());
2023-09-21 05:26:11 +00:00
assert!(cast_to.ty.is_floating_point() || cast_to.ty.is_integral() || cast_to.ty.is_char());
2022-10-25 20:09:35 +00:00
Ok(ImmTy::from_scalar(
2023-09-21 05:26:11 +00:00
self.cast_from_int_like(src.to_scalar(), src.layout, cast_to.ty)?,
cast_to,
))
2022-10-16 18:30:32 +00:00
}
2022-10-25 20:09:35 +00:00
/// Handles 'FloatToFloat' and 'FloatToInt' casts.
2022-10-16 18:30:32 +00:00
pub fn float_to_float_or_int(
&self,
src: &ImmTy<'tcx, M::Provenance>,
2023-09-21 05:26:11 +00:00
cast_to: TyAndLayout<'tcx>,
) -> InterpResult<'tcx, ImmTy<'tcx, M::Provenance>> {
2023-10-19 18:10:19 +00:00
use rustc_type_ir::TyKind::*;
2016-11-27 06:58:01 +00:00
let Float(fty) = src.layout.ty.kind() else {
bug!("FloatToFloat/FloatToInt cast: source type {} is not a float type", src.layout.ty)
};
let val = match fty {
FloatTy::F16 => self.cast_from_float(src.to_scalar().to_f16()?, cast_to.ty),
FloatTy::F32 => self.cast_from_float(src.to_scalar().to_f32()?, cast_to.ty),
FloatTy::F64 => self.cast_from_float(src.to_scalar().to_f64()?, cast_to.ty),
FloatTy::F128 => self.cast_from_float(src.to_scalar().to_f128()?, cast_to.ty),
};
2023-09-21 05:26:11 +00:00
Ok(ImmTy::from_scalar(val, cast_to))
2022-10-16 18:30:32 +00:00
}
2022-10-16 18:30:32 +00:00
/// Handles 'FnPtrToPtr' and 'PtrToPtr' casts.
pub fn ptr_to_ptr(
&self,
2022-10-16 18:30:32 +00:00
src: &ImmTy<'tcx, M::Provenance>,
2023-09-21 05:26:11 +00:00
cast_to: TyAndLayout<'tcx>,
) -> InterpResult<'tcx, ImmTy<'tcx, M::Provenance>> {
2022-10-25 20:09:35 +00:00
assert!(src.layout.ty.is_any_ptr());
2023-09-21 05:26:11 +00:00
assert!(cast_to.ty.is_unsafe_ptr());
// Handle casting any ptr to raw ptr (might be a fat ptr).
2023-09-21 05:26:11 +00:00
if cast_to.size == src.layout.size {
2024-04-21 23:11:01 +00:00
// Thin or fat pointer that just has the ptr kind of target type changed.
2023-09-21 05:26:11 +00:00
return Ok(ImmTy::from_immediate(**src, cast_to));
2022-10-16 18:30:32 +00:00
} else {
2022-10-25 20:09:35 +00:00
// Casting the metadata away from a fat ptr.
assert_eq!(src.layout.size, 2 * self.pointer_size());
2023-09-21 05:26:11 +00:00
assert_eq!(cast_to.size, self.pointer_size());
2022-10-25 20:09:35 +00:00
assert!(src.layout.ty.is_unsafe_ptr());
return match **src {
2023-09-21 05:26:11 +00:00
Immediate::ScalarPair(data, _) => Ok(ImmTy::from_scalar(data, cast_to)),
2022-10-25 20:09:35 +00:00
Immediate::Scalar(..) => span_bug!(
self.cur_span(),
"{:?} input to a fat-to-thin cast ({} -> {})",
2022-10-25 20:09:35 +00:00
*src,
src.layout.ty,
2023-09-21 05:26:11 +00:00
cast_to.ty
2022-10-25 20:09:35 +00:00
),
Immediate::Uninit => throw_ub!(InvalidUninitBytes(None)),
};
2019-07-24 18:09:18 +00:00
}
}
pub fn pointer_expose_provenance_cast(
&mut self,
src: &ImmTy<'tcx, M::Provenance>,
2023-09-21 05:26:11 +00:00
cast_to: TyAndLayout<'tcx>,
) -> InterpResult<'tcx, ImmTy<'tcx, M::Provenance>> {
assert_matches!(src.layout.ty.kind(), ty::RawPtr(_, _) | ty::FnPtr(_));
2023-09-21 05:26:11 +00:00
assert!(cast_to.ty.is_integral());
let scalar = src.to_scalar();
let ptr = scalar.to_pointer(self)?;
match ptr.into_pointer_or_addr() {
Ok(ptr) => M::expose_ptr(self, ptr)?,
2022-07-01 21:57:32 +00:00
Err(_) => {} // Do nothing, exposing an invalid pointer (`None` provenance) is a NOP.
};
2023-09-21 05:26:11 +00:00
Ok(ImmTy::from_scalar(self.cast_from_int_like(scalar, src.layout, cast_to.ty)?, cast_to))
2016-09-07 16:34:59 +00:00
}
2024-03-23 22:00:53 +00:00
pub fn pointer_with_exposed_provenance_cast(
&self,
src: &ImmTy<'tcx, M::Provenance>,
2023-09-21 05:26:11 +00:00
cast_to: TyAndLayout<'tcx>,
) -> InterpResult<'tcx, ImmTy<'tcx, M::Provenance>> {
assert!(src.layout.ty.is_integral());
assert_matches!(cast_to.ty.kind(), ty::RawPtr(_, _));
// First cast to usize.
let scalar = src.to_scalar();
let addr = self.cast_from_int_like(scalar, src.layout, self.tcx.types.usize)?;
let addr = addr.to_target_usize(self)?;
// Then turn address into pointer.
let ptr = M::ptr_from_addr_cast(self, addr)?;
2023-09-21 05:26:11 +00:00
Ok(ImmTy::from_scalar(Scalar::from_maybe_pointer(ptr, self), cast_to))
}
2022-10-25 20:09:35 +00:00
/// Low-level cast helper function. This works directly on scalars and can take 'int-like' input
/// type (basically everything with a scalar layout) to int/float/char types.
fn cast_from_int_like(
&self,
scalar: Scalar<M::Provenance>, // input value (there is no ScalarTy so we separate data+layout)
2020-03-04 14:50:21 +00:00
src_layout: TyAndLayout<'tcx>,
cast_ty: Ty<'tcx>,
) -> InterpResult<'tcx, Scalar<M::Provenance>> {
// Let's make sure v is sign-extended *if* it has a signed type.
2020-05-24 17:28:44 +00:00
let signed = src_layout.abi.is_signed(); // Also asserts that abi is `Scalar`.
let v = match src_layout.ty.kind() {
Uint(_) | RawPtr(..) | FnPtr(..) => scalar.to_uint(src_layout.size)?,
Int(_) => scalar.to_int(src_layout.size)? as u128, // we will cast back to `i128` below if the sign matters
Bool => scalar.to_bool()?.into(),
Char => scalar.to_char()?.into(),
_ => span_bug!(self.cur_span(), "invalid int-like cast from {}", src_layout.ty),
};
Ok(match *cast_ty.kind() {
// int -> int
Int(_) | Uint(_) => {
2020-08-02 22:49:11 +00:00
let size = match *cast_ty.kind() {
Int(t) => Integer::from_int_ty(self, t).size(),
Uint(t) => Integer::from_uint_ty(self, t).size(),
_ => bug!(),
};
let v = size.truncate(v);
Scalar::from_uint(v, size)
}
// signed int -> float
Float(fty) if signed => {
let v = v as i128;
match fty {
FloatTy::F16 => Scalar::from_f16(Half::from_i128(v).value),
FloatTy::F32 => Scalar::from_f32(Single::from_i128(v).value),
FloatTy::F64 => Scalar::from_f64(Double::from_i128(v).value),
FloatTy::F128 => Scalar::from_f128(Quad::from_i128(v).value),
}
}
// unsigned int -> float
Float(fty) => match fty {
FloatTy::F16 => Scalar::from_f16(Half::from_u128(v).value),
FloatTy::F32 => Scalar::from_f32(Single::from_u128(v).value),
FloatTy::F64 => Scalar::from_f64(Double::from_u128(v).value),
FloatTy::F128 => Scalar::from_f128(Quad::from_u128(v).value),
},
// u8 -> char
Char => Scalar::from_u32(u8::try_from(v).unwrap().into()),
// Casts to bool are not permitted by rustc, no need to handle them here.
_ => span_bug!(self.cur_span(), "invalid int to {} cast", cast_ty),
})
2016-09-07 16:34:59 +00:00
}
2022-10-25 20:09:35 +00:00
/// Low-level cast helper function. Converts an apfloat `f` into int or float types.
fn cast_from_float<F>(&self, f: F, dest_ty: Ty<'tcx>) -> Scalar<M::Provenance>
where
F: Float
+ Into<Scalar<M::Provenance>>
+ FloatConvert<Half>
+ FloatConvert<Single>
+ FloatConvert<Double>
+ FloatConvert<Quad>,
{
2023-10-19 18:10:19 +00:00
use rustc_type_ir::TyKind::*;
fn adjust_nan<
'tcx,
M: Machine<'tcx>,
F1: rustc_apfloat::Float + FloatConvert<F2>,
F2: rustc_apfloat::Float,
>(
ecx: &InterpCx<'tcx, M>,
f1: F1,
f2: F2,
) -> F2 {
if f2.is_nan() { M::generate_nan(ecx, &[f1]) } else { f2 }
}
2020-08-02 22:49:11 +00:00
match *dest_ty.kind() {
// float -> uint
Uint(t) => {
let size = Integer::from_uint_ty(self, t).size();
// `to_u128` is a saturating cast, which is what we need
// (https://doc.rust-lang.org/nightly/nightly-rustc/rustc_apfloat/trait.Float.html#method.to_i128_r).
let v = f.to_u128(size.bits_usize()).value;
// This should already fit the bit width
Scalar::from_uint(v, size)
2017-12-06 08:25:29 +00:00
}
// float -> int
Int(t) => {
let size = Integer::from_int_ty(self, t).size();
// `to_i128` is a saturating cast, which is what we need
// (https://doc.rust-lang.org/nightly/nightly-rustc/rustc_apfloat/trait.Float.html#method.to_i128_r).
let v = f.to_i128(size.bits_usize()).value;
Scalar::from_int(v, size)
2017-12-06 08:25:29 +00:00
}
// float -> float
Float(fty) => match fty {
FloatTy::F16 => Scalar::from_f16(adjust_nan(self, f, f.convert(&mut false).value)),
FloatTy::F32 => Scalar::from_f32(adjust_nan(self, f, f.convert(&mut false).value)),
FloatTy::F64 => Scalar::from_f64(adjust_nan(self, f, f.convert(&mut false).value)),
FloatTy::F128 => {
Scalar::from_f128(adjust_nan(self, f, f.convert(&mut false).value))
}
},
// That's it.
_ => span_bug!(self.cur_span(), "invalid float to {} cast", dest_ty),
2016-09-07 16:34:59 +00:00
}
}
2023-02-06 15:00:54 +00:00
/// `src` is a *pointer to* a `source_ty`, and in `dest` we should store a pointer to th same
/// data at type `cast_ty`.
fn unsize_into_ptr(
&mut self,
src: &OpTy<'tcx, M::Provenance>,
dest: &PlaceTy<'tcx, M::Provenance>,
// The pointee types
2019-09-16 17:59:31 +00:00
source_ty: Ty<'tcx>,
2020-05-24 13:10:15 +00:00
cast_ty: Ty<'tcx>,
) -> InterpResult<'tcx> {
// A<Struct> -> A<Trait> conversion
let (src_pointee_ty, dest_pointee_ty) =
2020-05-24 13:10:15 +00:00
self.tcx.struct_lockstep_tails_erasing_lifetimes(source_ty, cast_ty, self.param_env);
2020-08-02 22:49:11 +00:00
match (&src_pointee_ty.kind(), &dest_pointee_ty.kind()) {
2018-08-22 10:54:46 +00:00
(&ty::Array(_, length), &ty::Slice(_)) => {
let ptr = self.read_pointer(src)?;
let val = Immediate::new_slice(
ptr,
length.eval_target_usize(*self.tcx, self.param_env),
self,
);
2018-10-26 10:33:26 +00:00
self.write_immediate(val, dest)
}
2023-02-06 15:00:54 +00:00
(ty::Dynamic(data_a, _, ty::Dyn), ty::Dynamic(data_b, _, ty::Dyn)) => {
let val = self.read_immediate(src)?;
if data_a.principal() == data_b.principal() {
// A NOP cast that doesn't actually change anything, should be allowed even with mismatching vtables.
// (But currently mismatching vtables violate the validity invariant so UB is triggered anyway.)
return self.write_immediate(*val, dest);
}
let (old_data, old_vptr) = val.to_scalar_pair();
let old_data = old_data.to_pointer(self)?;
let old_vptr = old_vptr.to_pointer(self)?;
let ty = self.get_ptr_vtable_ty(old_vptr, Some(data_a))?;
2022-07-19 23:50:04 +00:00
let new_vptr = self.get_vtable_ptr(ty, data_b.principal())?;
self.write_immediate(Immediate::new_dyn_trait(old_data, new_vptr, self), dest)
}
(_, &ty::Dynamic(data, _, ty::Dyn)) => {
// Initial cast from sized to dyn trait
let vtable = self.get_vtable_ptr(src_pointee_ty, data.principal())?;
let ptr = self.read_pointer(src)?;
let val = Immediate::new_dyn_trait(ptr, vtable, &*self.tcx);
2018-10-26 10:33:26 +00:00
self.write_immediate(val, dest)
}
2020-06-21 14:13:31 +00:00
_ => {
// Do not ICE if we are not monomorphic enough.
ensure_monomorphic_enough(*self.tcx, src.layout.ty)?;
ensure_monomorphic_enough(*self.tcx, cast_ty)?;
2023-02-06 15:00:54 +00:00
span_bug!(
self.cur_span(),
"invalid pointer unsizing {} -> {}",
2023-02-06 15:00:54 +00:00
src.layout.ty,
cast_ty
)
2020-06-21 14:13:31 +00:00
}
}
}
2023-12-17 22:30:20 +00:00
pub fn unsize_into(
&mut self,
src: &OpTy<'tcx, M::Provenance>,
2020-05-24 13:10:15 +00:00
cast_ty: TyAndLayout<'tcx>,
dest: &PlaceTy<'tcx, M::Provenance>,
) -> InterpResult<'tcx> {
trace!("Unsizing {:?} of type {} into {}", *src, src.layout.ty, cast_ty.ty);
2020-08-02 22:49:11 +00:00
match (&src.layout.ty.kind(), &cast_ty.ty.kind()) {
(&ty::Ref(_, s, _), &ty::Ref(_, c, _) | &ty::RawPtr(c, _))
| (&ty::RawPtr(s, _), &ty::RawPtr(c, _)) => self.unsize_into_ptr(src, dest, *s, *c),
2018-08-22 10:54:46 +00:00
(&ty::Adt(def_a, _), &ty::Adt(def_b, _)) => {
assert_eq!(def_a, def_b); // implies same number of fields
// Unsizing of generic struct with pointer fields, like `Arc<T>` -> `Arc<Trait>`.
// There can be extra fields as long as they don't change their type or are 1-ZST.
// There might also be no field that actually needs unsizing.
let mut found_cast_field = false;
for i in 0..src.layout.fields.count() {
let cast_ty_field = cast_ty.field(self, i);
let src_field = self.project_field(src, i)?;
let dst_field = self.project_field(dest, i)?;
if src_field.layout.is_1zst() && cast_ty_field.is_1zst() {
// Skip 1-ZST fields.
} else if src_field.layout.ty == cast_ty_field.ty {
self.copy_op(&src_field, &dst_field)?;
} else {
if found_cast_field {
span_bug!(self.cur_span(), "unsize_into: more than one field to cast");
}
found_cast_field = true;
2021-02-15 00:00:00 +00:00
self.unsize_into(&src_field, cast_ty_field, &dst_field)?;
}
}
Ok(())
}
_ => {
// Do not ICE if we are not monomorphic enough.
ensure_monomorphic_enough(*self.tcx, src.layout.ty)?;
ensure_monomorphic_enough(*self.tcx, cast_ty.ty)?;
span_bug!(
self.cur_span(),
"unsize_into: invalid conversion: {:?} -> {:?}",
src.layout,
dest.layout
)
}
}
}
2016-09-07 16:34:59 +00:00
}