Auto merge of #52712 - oli-obk:const_eval_cleanups, r=RalfJung

Reintroduce `Undef` and properly check constant value sizes

r? @RalfJung

cc @eddyb

basically all kinds of silent failures that never occurred are assertions now
This commit is contained in:
bors 2018-08-03 14:28:12 +00:00
commit 59fa6bd6c1
28 changed files with 628 additions and 618 deletions

View File

@ -392,6 +392,11 @@ for ::mir::interpret::ConstValue<'gcx> {
}
}
impl_stable_hash_for!(enum mir::interpret::ScalarMaybeUndef {
Scalar(v),
Undef
});
impl_stable_hash_for!(enum mir::interpret::Value {
Scalar(v),
ScalarPair(a, b),
@ -466,9 +471,9 @@ for ::mir::interpret::Scalar {
mem::discriminant(self).hash_stable(hcx, hasher);
match *self {
Bits { bits, defined } => {
Bits { bits, size } => {
bits.hash_stable(hcx, hasher);
defined.hash_stable(hcx, hasher);
size.hash_stable(hcx, hasher);
},
Ptr(ptr) => ptr.hash_stable(hcx, hasher),
}

View File

@ -13,7 +13,7 @@ pub use self::error::{
FrameInfo, ConstEvalResult,
};
pub use self::value::{Scalar, Value, ConstValue};
pub use self::value::{Scalar, Value, ConstValue, ScalarMaybeUndef};
use std::fmt;
use mir;

View File

@ -15,22 +15,24 @@ pub enum ConstValue<'tcx> {
/// to allow HIR creation to happen for everything before needing to be able to run constant
/// evaluation
Unevaluated(DefId, &'tcx Substs<'tcx>),
/// Used only for types with layout::abi::Scalar ABI and ZSTs which use Scalar::undef()
/// Used only for types with layout::abi::Scalar ABI and ZSTs
Scalar(Scalar),
/// Used only for types with layout::abi::ScalarPair
ScalarPair(Scalar, Scalar),
///
/// The second field may be undef in case of `Option<usize>::None`
ScalarPair(Scalar, ScalarMaybeUndef),
/// Used only for the remaining cases. An allocation + offset into the allocation
ByRef(&'tcx Allocation, Size),
}
impl<'tcx> ConstValue<'tcx> {
#[inline]
pub fn from_byval_value(val: Value) -> Self {
match val {
pub fn from_byval_value(val: Value) -> EvalResult<'static, Self> {
Ok(match val {
Value::ByRef(..) => bug!(),
Value::ScalarPair(a, b) => ConstValue::ScalarPair(a, b),
Value::Scalar(val) => ConstValue::Scalar(val),
}
Value::ScalarPair(a, b) => ConstValue::ScalarPair(a.unwrap_or_err()?, b),
Value::Scalar(val) => ConstValue::Scalar(val.unwrap_or_err()?),
})
}
#[inline]
@ -38,18 +40,13 @@ impl<'tcx> ConstValue<'tcx> {
match *self {
ConstValue::Unevaluated(..) |
ConstValue::ByRef(..) => None,
ConstValue::ScalarPair(a, b) => Some(Value::ScalarPair(a, b)),
ConstValue::Scalar(val) => Some(Value::Scalar(val)),
ConstValue::ScalarPair(a, b) => Some(Value::ScalarPair(a.into(), b)),
ConstValue::Scalar(val) => Some(Value::Scalar(val.into())),
}
}
#[inline]
pub fn from_scalar(val: Scalar) -> Self {
ConstValue::Scalar(val)
}
#[inline]
pub fn to_scalar(&self) -> Option<Scalar> {
pub fn try_to_scalar(&self) -> Option<Scalar> {
match *self {
ConstValue::Unevaluated(..) |
ConstValue::ByRef(..) |
@ -60,12 +57,12 @@ impl<'tcx> ConstValue<'tcx> {
#[inline]
pub fn to_bits(&self, size: Size) -> Option<u128> {
self.to_scalar()?.to_bits(size).ok()
self.try_to_scalar()?.to_bits(size).ok()
}
#[inline]
pub fn to_ptr(&self) -> Option<Pointer> {
self.to_scalar()?.to_ptr().ok()
self.try_to_scalar()?.to_ptr().ok()
}
}
@ -81,8 +78,8 @@ impl<'tcx> ConstValue<'tcx> {
#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, RustcEncodable, RustcDecodable, Hash)]
pub enum Value {
ByRef(Scalar, Align),
Scalar(Scalar),
ScalarPair(Scalar, Scalar),
Scalar(ScalarMaybeUndef),
ScalarPair(ScalarMaybeUndef, ScalarMaybeUndef),
}
impl<'tcx> ty::TypeFoldable<'tcx> for Value {
@ -98,23 +95,27 @@ impl<'tcx> Scalar {
pub fn ptr_null<C: HasDataLayout>(cx: C) -> Self {
Scalar::Bits {
bits: 0,
defined: cx.data_layout().pointer_size.bits() as u8,
size: cx.data_layout().pointer_size.bytes() as u8,
}
}
pub fn to_value_with_len<C: HasDataLayout>(self, len: u64, cx: C) -> Value {
ScalarMaybeUndef::Scalar(self).to_value_with_len(len, cx)
}
pub fn to_value_with_vtable(self, vtable: Pointer) -> Value {
ScalarMaybeUndef::Scalar(self).to_value_with_vtable(vtable)
}
pub fn ptr_signed_offset<C: HasDataLayout>(self, i: i64, cx: C) -> EvalResult<'tcx, Self> {
let layout = cx.data_layout();
match self {
Scalar::Bits { bits, defined } => {
let pointer_size = layout.pointer_size.bits() as u8;
if defined < pointer_size {
err!(ReadUndefBytes)
} else {
Ok(Scalar::Bits {
bits: layout.signed_offset(bits as u64, i)? as u128,
defined: pointer_size,
})
}
Scalar::Bits { bits, size } => {
assert_eq!(size as u64, layout.pointer_size.bytes());
Ok(Scalar::Bits {
bits: layout.signed_offset(bits as u64, i)? as u128,
size,
})
}
Scalar::Ptr(ptr) => ptr.signed_offset(i, layout).map(Scalar::Ptr),
}
@ -123,65 +124,43 @@ impl<'tcx> Scalar {
pub fn ptr_offset<C: HasDataLayout>(self, i: Size, cx: C) -> EvalResult<'tcx, Self> {
let layout = cx.data_layout();
match self {
Scalar::Bits { bits, defined } => {
let pointer_size = layout.pointer_size.bits() as u8;
if defined < pointer_size {
err!(ReadUndefBytes)
} else {
Ok(Scalar::Bits {
bits: layout.offset(bits as u64, i.bytes())? as u128,
defined: pointer_size,
})
}
Scalar::Bits { bits, size } => {
assert_eq!(size as u64, layout.pointer_size.bytes());
Ok(Scalar::Bits {
bits: layout.offset(bits as u64, i.bytes())? as u128,
size,
})
}
Scalar::Ptr(ptr) => ptr.offset(i, layout).map(Scalar::Ptr),
}
}
pub fn ptr_wrapping_signed_offset<C: HasDataLayout>(self, i: i64, cx: C) -> EvalResult<'tcx, Self> {
pub fn ptr_wrapping_signed_offset<C: HasDataLayout>(self, i: i64, cx: C) -> Self {
let layout = cx.data_layout();
match self {
Scalar::Bits { bits, defined } => {
let pointer_size = layout.pointer_size.bits() as u8;
if defined < pointer_size {
err!(ReadUndefBytes)
} else {
Ok(Scalar::Bits {
bits: layout.wrapping_signed_offset(bits as u64, i) as u128,
defined: pointer_size,
})
Scalar::Bits { bits, size } => {
assert_eq!(size as u64, layout.pointer_size.bytes());
Scalar::Bits {
bits: layout.wrapping_signed_offset(bits as u64, i) as u128,
size,
}
}
}
Scalar::Ptr(ptr) => Ok(Scalar::Ptr(ptr.wrapping_signed_offset(i, layout))),
Scalar::Ptr(ptr) => Scalar::Ptr(ptr.wrapping_signed_offset(i, layout)),
}
}
pub fn is_null_ptr<C: HasDataLayout>(self, cx: C) -> EvalResult<'tcx, bool> {
pub fn is_null_ptr<C: HasDataLayout>(self, cx: C) -> bool {
match self {
Scalar::Bits {
bits, defined,
} => if defined < cx.data_layout().pointer_size.bits() as u8 {
err!(ReadUndefBytes)
} else {
Ok(bits == 0)
Scalar::Bits { bits, size } => {
assert_eq!(size as u64, cx.data_layout().pointer_size.bytes());
bits == 0
},
Scalar::Ptr(_) => Ok(false),
Scalar::Ptr(_) => false,
}
}
pub fn to_value_with_len<C: HasDataLayout>(self, len: u64, cx: C) -> Value {
Value::ScalarPair(self, Scalar::Bits {
bits: len as u128,
defined: cx.data_layout().pointer_size.bits() as u8,
})
}
pub fn to_value_with_vtable(self, vtable: Pointer) -> Value {
Value::ScalarPair(self, Scalar::Ptr(vtable))
}
pub fn to_value(self) -> Value {
Value::Scalar(self)
Value::Scalar(ScalarMaybeUndef::Scalar(self))
}
}
@ -199,8 +178,9 @@ impl From<Pointer> for Scalar {
pub enum Scalar {
/// The raw bytes of a simple value.
Bits {
/// The first `defined` number of bits are valid
defined: u8,
/// The first `size` bytes are the value.
/// Do not try to read less or more bytes that that
size: u8,
bits: u128,
},
@ -210,25 +190,63 @@ pub enum Scalar {
Ptr(Pointer),
}
impl<'tcx> Scalar {
pub fn undef() -> Self {
Scalar::Bits { bits: 0, defined: 0 }
#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, RustcEncodable, RustcDecodable, Hash)]
pub enum ScalarMaybeUndef {
Scalar(Scalar),
Undef,
}
impl From<Scalar> for ScalarMaybeUndef {
fn from(s: Scalar) -> Self {
ScalarMaybeUndef::Scalar(s)
}
}
impl ScalarMaybeUndef {
pub fn unwrap_or_err(self) -> EvalResult<'static, Scalar> {
match self {
ScalarMaybeUndef::Scalar(scalar) => Ok(scalar),
ScalarMaybeUndef::Undef => err!(ReadUndefBytes),
}
}
pub fn to_value_with_len<C: HasDataLayout>(self, len: u64, cx: C) -> Value {
Value::ScalarPair(self, Scalar::Bits {
bits: len as u128,
size: cx.data_layout().pointer_size.bytes() as u8,
}.into())
}
pub fn to_value_with_vtable(self, vtable: Pointer) -> Value {
Value::ScalarPair(self, Scalar::Ptr(vtable).into())
}
pub fn ptr_offset<C: HasDataLayout>(self, i: Size, cx: C) -> EvalResult<'tcx, Self> {
match self {
ScalarMaybeUndef::Scalar(scalar) => {
scalar.ptr_offset(i, cx).map(ScalarMaybeUndef::Scalar)
},
ScalarMaybeUndef::Undef => Ok(ScalarMaybeUndef::Undef)
}
}
}
impl<'tcx> Scalar {
pub fn from_bool(b: bool) -> Self {
// FIXME: can we make defined `1`?
Scalar::Bits { bits: b as u128, defined: 8 }
Scalar::Bits { bits: b as u128, size: 1 }
}
pub fn from_char(c: char) -> Self {
Scalar::Bits { bits: c as u128, defined: 32 }
Scalar::Bits { bits: c as u128, size: 4 }
}
pub fn to_bits(self, size: Size) -> EvalResult<'tcx, u128> {
pub fn to_bits(self, target_size: Size) -> EvalResult<'tcx, u128> {
match self {
Scalar::Bits { .. } if size.bits() == 0 => bug!("to_bits cannot be used with zsts"),
Scalar::Bits { bits, defined } if size.bits() <= defined as u64 => Ok(bits),
Scalar::Bits { .. } => err!(ReadUndefBytes),
Scalar::Bits { bits, size } => {
assert_eq!(target_size.bytes(), size as u64);
assert_ne!(size, 0, "to_bits cannot be used with zsts");
Ok(bits)
}
Scalar::Ptr(_) => err!(ReadPointerAsBytes),
}
}
@ -256,8 +274,8 @@ impl<'tcx> Scalar {
pub fn to_bool(self) -> EvalResult<'tcx, bool> {
match self {
Scalar::Bits { bits: 0, defined: 8 } => Ok(false),
Scalar::Bits { bits: 1, defined: 8 } => Ok(true),
Scalar::Bits { bits: 0, size: 1 } => Ok(false),
Scalar::Bits { bits: 1, size: 1 } => Ok(true),
_ => err!(InvalidBool),
}
}

View File

@ -17,7 +17,7 @@ use hir::def::CtorKind;
use hir::def_id::DefId;
use hir::{self, HirId, InlineAsm};
use middle::region;
use mir::interpret::{EvalErrorKind, Scalar, Value};
use mir::interpret::{EvalErrorKind, Scalar, Value, ScalarMaybeUndef};
use mir::visit::MirVisitable;
use rustc_apfloat::ieee::{Double, Single};
use rustc_apfloat::Float;
@ -1465,10 +1465,10 @@ impl<'tcx> TerminatorKind<'tcx> {
.map(|&u| {
let mut s = String::new();
print_miri_value(
Value::Scalar(Scalar::Bits {
Scalar::Bits {
bits: u,
defined: size.bits() as u8,
}),
size: size.bytes() as u8,
}.to_value(),
switch_ty,
&mut s,
).unwrap();
@ -2225,45 +2225,58 @@ pub fn fmt_const_val<W: Write>(fmt: &mut W, const_val: &ty::Const) -> fmt::Resul
pub fn print_miri_value<W: Write>(value: Value, ty: Ty, f: &mut W) -> fmt::Result {
use ty::TypeVariants::*;
match (value, &ty.sty) {
(Value::Scalar(Scalar::Bits { bits: 0, .. }), &TyBool) => write!(f, "false"),
(Value::Scalar(Scalar::Bits { bits: 1, .. }), &TyBool) => write!(f, "true"),
(Value::Scalar(Scalar::Bits { bits, .. }), &TyFloat(ast::FloatTy::F32)) => {
write!(f, "{}f32", Single::from_bits(bits))
}
(Value::Scalar(Scalar::Bits { bits, .. }), &TyFloat(ast::FloatTy::F64)) => {
write!(f, "{}f64", Double::from_bits(bits))
}
(Value::Scalar(Scalar::Bits { bits, .. }), &TyUint(ui)) => write!(f, "{:?}{}", bits, ui),
(Value::Scalar(Scalar::Bits { bits, .. }), &TyInt(i)) => {
let bit_width = ty::tls::with(|tcx| {
let ty = tcx.lift_to_global(&ty).unwrap();
tcx.layout_of(ty::ParamEnv::empty().and(ty))
.unwrap()
.size
.bits()
});
let shift = 128 - bit_width;
write!(f, "{:?}{}", ((bits as i128) << shift) >> shift, i)
}
(Value::Scalar(Scalar::Bits { bits, .. }), &TyChar) => {
write!(f, "{:?}", ::std::char::from_u32(bits as u32).unwrap())
}
(_, &TyFnDef(did, _)) => write!(f, "{}", item_path_str(did)),
(
Value::ScalarPair(Scalar::Ptr(ptr), Scalar::Bits { bits: len, .. }),
&TyRef(_, &ty::TyS { sty: TyStr, .. }, _),
) => ty::tls::with(|tcx| match tcx.alloc_map.lock().get(ptr.alloc_id) {
Some(interpret::AllocType::Memory(alloc)) => {
assert_eq!(len as usize as u128, len);
let slice = &alloc.bytes[(ptr.offset.bytes() as usize)..][..(len as usize)];
let s = ::std::str::from_utf8(slice).expect("non utf8 str from miri");
write!(f, "{:?}", s)
// print some primitives
if let Value::Scalar(ScalarMaybeUndef::Scalar(Scalar::Bits { bits, .. })) = value {
match ty.sty {
TyBool if bits == 0 => return write!(f, "false"),
TyBool if bits == 1 => return write!(f, "true"),
TyFloat(ast::FloatTy::F32) => return write!(f, "{}f32", Single::from_bits(bits)),
TyFloat(ast::FloatTy::F64) => return write!(f, "{}f64", Double::from_bits(bits)),
TyUint(ui) => return write!(f, "{:?}{}", bits, ui),
TyInt(i) => {
let bit_width = ty::tls::with(|tcx| {
let ty = tcx.lift_to_global(&ty).unwrap();
tcx.layout_of(ty::ParamEnv::empty().and(ty))
.unwrap()
.size
.bits()
});
let shift = 128 - bit_width;
return write!(f, "{:?}{}", ((bits as i128) << shift) >> shift, i);
}
_ => write!(f, "pointer to erroneous constant {:?}, {:?}", ptr, len),
}),
_ => write!(f, "{:?}:{}", value, ty),
TyChar => return write!(f, "{:?}", ::std::char::from_u32(bits as u32).unwrap()),
_ => {},
}
}
// print function definitons
if let TyFnDef(did, _) = ty.sty {
return write!(f, "{}", item_path_str(did));
}
// print string literals
if let Value::ScalarPair(ptr, len) = value {
if let ScalarMaybeUndef::Scalar(Scalar::Ptr(ptr)) = ptr {
if let ScalarMaybeUndef::Scalar(Scalar::Bits { bits: len, .. }) = len {
if let TyRef(_, &ty::TyS { sty: TyStr, .. }, _) = ty.sty {
return ty::tls::with(|tcx| {
let alloc = tcx.alloc_map.lock().get(ptr.alloc_id);
if let Some(interpret::AllocType::Memory(alloc)) = alloc {
assert_eq!(len as usize as u128, len);
let slice = &alloc
.bytes
[(ptr.offset.bytes() as usize)..]
[..(len as usize)];
let s = ::std::str::from_utf8(slice).expect("non utf8 str from miri");
write!(f, "{:?}", s)
} else {
write!(f, "pointer to erroneous constant {:?}, {:?}", ptr, len)
}
});
}
}
}
}
// just raw dump everything else
write!(f, "{:?}:{}", value, ty)
}
fn item_path_str(def_id: DefId) -> String {

View File

@ -1887,22 +1887,13 @@ impl<'tcx> Const<'tcx> {
})
}
#[inline]
pub fn from_byval_value(
tcx: TyCtxt<'_, '_, 'tcx>,
val: Value,
ty: Ty<'tcx>,
) -> &'tcx Self {
Self::from_const_value(tcx, ConstValue::from_byval_value(val), ty)
}
#[inline]
pub fn from_scalar(
tcx: TyCtxt<'_, '_, 'tcx>,
val: Scalar,
ty: Ty<'tcx>,
) -> &'tcx Self {
Self::from_const_value(tcx, ConstValue::from_scalar(val), ty)
Self::from_const_value(tcx, ConstValue::Scalar(val), ty)
}
#[inline]
@ -1918,12 +1909,12 @@ impl<'tcx> Const<'tcx> {
let shift = 128 - size.bits();
let truncated = (bits << shift) >> shift;
assert_eq!(truncated, bits, "from_bits called with untruncated value");
Self::from_scalar(tcx, Scalar::Bits { bits, defined: size.bits() as u8 }, ty.value)
Self::from_scalar(tcx, Scalar::Bits { bits, size: size.bytes() as u8 }, ty.value)
}
#[inline]
pub fn zero_sized(tcx: TyCtxt<'_, '_, 'tcx>, ty: Ty<'tcx>) -> &'tcx Self {
Self::from_scalar(tcx, Scalar::undef(), ty)
Self::from_scalar(tcx, Scalar::Bits { bits: 0, size: 0 }, ty)
}
#[inline]
@ -1960,11 +1951,6 @@ impl<'tcx> Const<'tcx> {
self.val.to_byval_value()
}
#[inline]
pub fn to_scalar(&self) -> Option<Scalar> {
self.val.to_scalar()
}
#[inline]
pub fn assert_bits(
&self,

View File

@ -39,10 +39,12 @@ pub fn scalar_to_llvm(
) -> &'ll Value {
let bitsize = if layout.is_bool() { 1 } else { layout.value.size(cx).bits() };
match cv {
Scalar::Bits { defined, .. } if (defined as u64) < bitsize || defined == 0 => {
C_undef(Type::ix(cx, bitsize))
Scalar::Bits { size: 0, .. } => {
assert_eq!(0, layout.value.size(cx).bytes());
C_undef(Type::ix(cx, 0))
},
Scalar::Bits { bits, .. } => {
Scalar::Bits { bits, size } => {
assert_eq!(size as u64, layout.value.size(cx).bytes());
let llval = C_uint_big(Type::ix(cx, bitsize), bits);
if layout.value == layout::Pointer {
unsafe { llvm::LLVMConstIntToPtr(llval, llty) }
@ -192,7 +194,7 @@ impl FunctionCx<'a, 'll, 'tcx> {
mir::Field::new(field as usize),
c,
)?;
if let Some(prim) = field.to_scalar() {
if let Some(prim) = field.val.try_to_scalar() {
let layout = bx.cx.layout_of(field_ty);
let scalar = match layout.abi {
layout::Abi::Scalar(ref x) => x,

View File

@ -10,7 +10,7 @@
use rustc::mir::interpret::ConstEvalErr;
use rustc::mir;
use rustc::mir::interpret::ConstValue;
use rustc::mir::interpret::{ConstValue, ScalarMaybeUndef};
use rustc::ty;
use rustc::ty::layout::{self, Align, LayoutOf, TyLayout};
use rustc_data_structures::indexed_vec::Idx;
@ -110,12 +110,16 @@ impl OperandRef<'ll, 'tcx> {
a_scalar,
layout.scalar_pair_element_llvm_type(bx.cx, 0, true),
);
let b_llval = scalar_to_llvm(
bx.cx,
b,
b_scalar,
layout.scalar_pair_element_llvm_type(bx.cx, 1, true),
);
let b_layout = layout.scalar_pair_element_llvm_type(bx.cx, 1, true);
let b_llval = match b {
ScalarMaybeUndef::Scalar(b) => scalar_to_llvm(
bx.cx,
b,
b_scalar,
b_layout,
),
ScalarMaybeUndef::Undef => C_undef(b_layout),
};
OperandValue::Pair(a_llval, b_llval)
},
ConstValue::ByRef(alloc, offset) => {

View File

@ -151,14 +151,14 @@ impl<'a, 'gcx, 'tcx> Cx<'a, 'gcx, 'tcx> {
let trunc = |n| {
let param_ty = self.param_env.and(self.tcx.lift_to_global(&ty).unwrap());
let bit_width = self.tcx.layout_of(param_ty).unwrap().size.bits();
trace!("trunc {} with size {} and shift {}", n, bit_width, 128 - bit_width);
let shift = 128 - bit_width;
let width = self.tcx.layout_of(param_ty).unwrap().size;
trace!("trunc {} with size {} and shift {}", n, width.bits(), 128 - width.bits());
let shift = 128 - width.bits();
let result = (n << shift) >> shift;
trace!("trunc result: {}", result);
ConstValue::Scalar(Scalar::Bits {
bits: result,
defined: bit_width as u8,
size: width.bytes() as u8,
})
};
@ -168,7 +168,7 @@ impl<'a, 'gcx, 'tcx> Cx<'a, 'gcx, 'tcx> {
let s = s.as_str();
let id = self.tcx.allocate_bytes(s.as_bytes());
let value = Scalar::Ptr(id.into()).to_value_with_len(s.len() as u64, self.tcx);
ConstValue::from_byval_value(value)
ConstValue::from_byval_value(value).unwrap()
},
LitKind::ByteStr(ref data) => {
let id = self.tcx.allocate_bytes(data);
@ -176,7 +176,7 @@ impl<'a, 'gcx, 'tcx> Cx<'a, 'gcx, 'tcx> {
},
LitKind::Byte(n) => ConstValue::Scalar(Scalar::Bits {
bits: n as u128,
defined: 8,
size: 1,
}),
LitKind::Int(n, _) if neg => {
let n = n as i128;
@ -194,14 +194,8 @@ impl<'a, 'gcx, 'tcx> Cx<'a, 'gcx, 'tcx> {
};
parse_float(n, fty)
}
LitKind::Bool(b) => ConstValue::Scalar(Scalar::Bits {
bits: b as u128,
defined: 8,
}),
LitKind::Char(c) => ConstValue::Scalar(Scalar::Bits {
bits: c as u128,
defined: 32,
}),
LitKind::Bool(b) => ConstValue::Scalar(Scalar::from_bool(b)),
LitKind::Char(c) => ConstValue::Scalar(Scalar::from_char(c)),
};
ty::Const::from_const_value(self.tcx, lit, ty)
}

View File

@ -19,7 +19,7 @@ pub(crate) use self::check_match::check_match;
use interpret::{const_val_field, const_variant_index, self};
use rustc::mir::{fmt_const_val, Field, BorrowKind, Mutability};
use rustc::mir::interpret::{Scalar, GlobalId, ConstValue, Value};
use rustc::mir::interpret::{Scalar, GlobalId, ConstValue};
use rustc::ty::{self, TyCtxt, AdtDef, Ty, Region};
use rustc::ty::subst::{Substs, Kind};
use rustc::hir::{self, PatKind, RangeEnd};
@ -1080,8 +1080,9 @@ pub fn compare_const_vals<'a, 'tcx>(
l.partial_cmp(&r)
},
ty::TyInt(_) => {
let a = interpret::sign_extend(tcx, a, ty.value).expect("layout error for TyInt");
let b = interpret::sign_extend(tcx, b, ty.value).expect("layout error for TyInt");
let layout = tcx.layout_of(ty).ok()?;
let a = interpret::sign_extend(a, layout);
let b = interpret::sign_extend(b, layout);
Some((a as i128).cmp(&(b as i128)))
},
_ => Some(a.cmp(&b)),
@ -1090,17 +1091,24 @@ pub fn compare_const_vals<'a, 'tcx>(
if let ty::TyRef(_, rty, _) = ty.value.sty {
if let ty::TyStr = rty.sty {
match (a.to_byval_value(), b.to_byval_value()) {
match (a.val, b.val) {
(
Some(Value::ScalarPair(
ConstValue::ScalarPair(
Scalar::Ptr(ptr_a),
len_a,
)),
Some(Value::ScalarPair(
),
ConstValue::ScalarPair(
Scalar::Ptr(ptr_b),
len_b,
))
),
) if ptr_a.offset.bytes() == 0 && ptr_b.offset.bytes() == 0 => {
let len_a = len_a.unwrap_or_err().ok();
let len_b = len_b.unwrap_or_err().ok();
if len_a.is_none() || len_b.is_none() {
tcx.sess.struct_err("str slice len is undef").delay_as_bug();
}
let len_a = len_a?;
let len_b = len_b?;
if let Ok(len_a) = len_a.to_bits(tcx.data_layout.pointer_size) {
if let Ok(len_b) = len_b.to_bits(tcx.data_layout.pointer_size) {
if len_a == len_b {
@ -1142,7 +1150,7 @@ fn lit_to_const<'a, 'tcx>(lit: &'tcx ast::LitKind,
let s = s.as_str();
let id = tcx.allocate_bytes(s.as_bytes());
let value = Scalar::Ptr(id.into()).to_value_with_len(s.len() as u64, tcx);
ConstValue::from_byval_value(value)
ConstValue::from_byval_value(value).unwrap()
},
LitKind::ByteStr(ref data) => {
let id = tcx.allocate_bytes(data);
@ -1150,7 +1158,7 @@ fn lit_to_const<'a, 'tcx>(lit: &'tcx ast::LitKind,
},
LitKind::Byte(n) => ConstValue::Scalar(Scalar::Bits {
bits: n as u128,
defined: 8,
size: 1,
}),
LitKind::Int(n, _) => {
enum Int {
@ -1188,10 +1196,10 @@ fn lit_to_const<'a, 'tcx>(lit: &'tcx ast::LitKind,
Int::Signed(IntTy::I128)| Int::Unsigned(UintTy::U128) => n,
_ => bug!(),
};
let defined = tcx.layout_of(ty::ParamEnv::empty().and(ty)).unwrap().size.bits() as u8;
let size = tcx.layout_of(ty::ParamEnv::empty().and(ty)).unwrap().size.bytes() as u8;
ConstValue::Scalar(Scalar::Bits {
bits: n,
defined,
size,
})
},
LitKind::Float(n, fty) => {
@ -1204,14 +1212,8 @@ fn lit_to_const<'a, 'tcx>(lit: &'tcx ast::LitKind,
};
parse_float(n, fty, neg).map_err(|_| LitToConstError::UnparseableFloat)?
}
LitKind::Bool(b) => ConstValue::Scalar(Scalar::Bits {
bits: b as u128,
defined: 8,
}),
LitKind::Char(c) => ConstValue::Scalar(Scalar::Bits {
bits: c as u128,
defined: 32,
}),
LitKind::Bool(b) => ConstValue::Scalar(Scalar::from_bool(b)),
LitKind::Char(c) => ConstValue::Scalar(Scalar::from_char(c)),
};
Ok(ty::Const::from_const_value(tcx, lit, ty))
}
@ -1224,7 +1226,7 @@ pub fn parse_float<'tcx>(
let num = num.as_str();
use rustc_apfloat::ieee::{Single, Double};
use rustc_apfloat::Float;
let (bits, defined) = match fty {
let (bits, size) = match fty {
ast::FloatTy::F32 => {
num.parse::<f32>().map_err(|_| ())?;
let mut f = num.parse::<Single>().unwrap_or_else(|e| {
@ -1233,7 +1235,7 @@ pub fn parse_float<'tcx>(
if neg {
f = -f;
}
(f.to_bits(), 32)
(f.to_bits(), 4)
}
ast::FloatTy::F64 => {
num.parse::<f64>().map_err(|_| ())?;
@ -1243,9 +1245,9 @@ pub fn parse_float<'tcx>(
if neg {
f = -f;
}
(f.to_bits(), 64)
(f.to_bits(), 8)
}
};
Ok(ConstValue::Scalar(Scalar::Bits { bits, defined }))
Ok(ConstValue::Scalar(Scalar::Bits { bits, size }))
}

View File

@ -1,5 +1,5 @@
use rustc::ty::{self, Ty};
use rustc::ty::layout::{self, LayoutOf};
use rustc::ty::layout::{self, LayoutOf, TyLayout};
use syntax::ast::{FloatTy, IntTy, UintTy};
use rustc_apfloat::ieee::{Single, Double};
@ -18,11 +18,11 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
dest_ty: Ty<'tcx>,
dest: Place,
) -> EvalResult<'tcx> {
let src_layout = self.layout_of(src.ty)?;
let dst_layout = self.layout_of(dest_ty)?;
use rustc::mir::CastKind::*;
match kind {
Unsize => {
let src_layout = self.layout_of(src.ty)?;
let dst_layout = self.layout_of(dest_ty)?;
self.unsize_into(src.value, src_layout, dest, dst_layout)?;
}
@ -57,16 +57,11 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
let discr_val = def
.discriminant_for_variant(*self.tcx, index)
.val;
let defined = self
.layout_of(dest_ty)
.unwrap()
.size
.bits() as u8;
return self.write_scalar(
dest,
Scalar::Bits {
bits: discr_val,
defined,
size: dst_layout.size.bytes() as u8,
},
dest_ty);
}
@ -76,9 +71,9 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
}
let src_val = self.value_to_scalar(src)?;
let dest_val = self.cast_scalar(src_val, src.ty, dest_ty)?;
let dest_val = self.cast_scalar(src_val, src_layout, dst_layout)?;
let valty = ValTy {
value: Value::Scalar(dest_val),
value: Value::Scalar(dest_val.into()),
ty: dest_ty,
};
self.write_value(valty, dest)?;
@ -100,7 +95,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
).ok_or_else(|| EvalErrorKind::TooGeneric.into());
let fn_ptr = self.memory.create_fn_alloc(instance?);
let valty = ValTy {
value: Value::Scalar(fn_ptr.into()),
value: Value::Scalar(Scalar::Ptr(fn_ptr.into()).into()),
ty: dest_ty,
};
self.write_value(valty, dest)?;
@ -136,7 +131,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
);
let fn_ptr = self.memory.create_fn_alloc(instance);
let valty = ValTy {
value: Value::Scalar(fn_ptr.into()),
value: Value::Scalar(Scalar::Ptr(fn_ptr.into()).into()),
ty: dest_ty,
};
self.write_value(valty, dest)?;
@ -151,20 +146,19 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
pub(super) fn cast_scalar(
&self,
val: Scalar,
src_ty: Ty<'tcx>,
dest_ty: Ty<'tcx>,
src_layout: TyLayout<'tcx>,
dest_layout: TyLayout<'tcx>,
) -> EvalResult<'tcx, Scalar> {
use rustc::ty::TypeVariants::*;
trace!("Casting {:?}: {:?} to {:?}", val, src_ty, dest_ty);
trace!("Casting {:?}: {:?} to {:?}", val, src_layout.ty, dest_layout.ty);
match val {
Scalar::Bits { defined: 0, .. } => Ok(val),
Scalar::Ptr(ptr) => self.cast_from_ptr(ptr, dest_ty),
Scalar::Bits { bits, .. } => {
// TODO(oli-obk): check defined bits here
match src_ty.sty {
TyFloat(fty) => self.cast_from_float(bits, fty, dest_ty),
_ => self.cast_from_int(bits, src_ty, dest_ty),
Scalar::Ptr(ptr) => self.cast_from_ptr(ptr, dest_layout.ty),
Scalar::Bits { bits, size } => {
assert_eq!(size as u64, src_layout.size.bytes());
match src_layout.ty.sty {
TyFloat(fty) => self.cast_from_float(bits, fty, dest_layout.ty),
_ => self.cast_from_int(bits, src_layout, dest_layout),
}
}
}
@ -173,56 +167,58 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
fn cast_from_int(
&self,
v: u128,
src_ty: Ty<'tcx>,
dest_ty: Ty<'tcx>,
src_layout: TyLayout<'tcx>,
dest_layout: TyLayout<'tcx>,
) -> EvalResult<'tcx, Scalar> {
let signed = self.layout_of(src_ty)?.abi.is_signed();
let signed = src_layout.abi.is_signed();
let v = if signed {
self.sign_extend(v, src_ty)?
self.sign_extend(v, src_layout)
} else {
v
};
trace!("cast_from_int: {}, {}, {}", v, src_ty, dest_ty);
trace!("cast_from_int: {}, {}, {}", v, src_layout.ty, dest_layout.ty);
use rustc::ty::TypeVariants::*;
match dest_ty.sty {
match dest_layout.ty.sty {
TyInt(_) | TyUint(_) => {
let v = self.truncate(v, dest_ty)?;
let v = self.truncate(v, dest_layout);
Ok(Scalar::Bits {
bits: v,
defined: self.layout_of(dest_ty).unwrap().size.bits() as u8,
size: dest_layout.size.bytes() as u8,
})
}
TyFloat(FloatTy::F32) if signed => Ok(Scalar::Bits {
bits: Single::from_i128(v as i128).value.to_bits(),
defined: 32,
size: 4,
}),
TyFloat(FloatTy::F64) if signed => Ok(Scalar::Bits {
bits: Double::from_i128(v as i128).value.to_bits(),
defined: 64,
size: 8,
}),
TyFloat(FloatTy::F32) => Ok(Scalar::Bits {
bits: Single::from_u128(v).value.to_bits(),
defined: 32,
size: 4,
}),
TyFloat(FloatTy::F64) => Ok(Scalar::Bits {
bits: Double::from_u128(v).value.to_bits(),
defined: 64,
size: 8,
}),
TyChar if v as u8 as u128 == v => Ok(Scalar::Bits { bits: v, defined: 32 }),
TyChar => err!(InvalidChar(v)),
TyChar => {
assert_eq!(v as u8 as u128, v);
Ok(Scalar::Bits { bits: v, size: 4 })
},
// No alignment check needed for raw pointers. But we have to truncate to target ptr size.
TyRawPtr(_) => {
Ok(Scalar::Bits {
bits: self.memory.truncate_to_ptr(v).0 as u128,
defined: self.memory.pointer_size().bits() as u8,
size: self.memory.pointer_size().bytes() as u8,
})
},
// Casts to bool are not permitted by rustc, no need to handle them here.
_ => err!(Unimplemented(format!("int to {:?} cast", dest_ty))),
_ => err!(Unimplemented(format!("int to {:?} cast", dest_layout.ty))),
}
}
@ -236,11 +232,11 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
match fty {
FloatTy::F32 => Ok(Scalar::Bits {
bits: Single::from_bits(bits).to_u128(width).value,
defined: width as u8,
size: (width / 8) as u8,
}),
FloatTy::F64 => Ok(Scalar::Bits {
bits: Double::from_bits(bits).to_u128(width).value,
defined: width as u8,
size: (width / 8) as u8,
}),
}
},
@ -250,11 +246,11 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
match fty {
FloatTy::F32 => Ok(Scalar::Bits {
bits: Single::from_bits(bits).to_i128(width).value as u128,
defined: width as u8,
size: (width / 8) as u8,
}),
FloatTy::F64 => Ok(Scalar::Bits {
bits: Double::from_bits(bits).to_i128(width).value as u128,
defined: width as u8,
size: (width / 8) as u8,
}),
}
},
@ -262,24 +258,24 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
TyFloat(FloatTy::F32) if fty == FloatTy::F64 => {
Ok(Scalar::Bits {
bits: Single::to_bits(Double::from_bits(bits).convert(&mut false).value),
defined: 32,
size: 4,
})
},
// f32 -> f64
TyFloat(FloatTy::F64) if fty == FloatTy::F32 => {
Ok(Scalar::Bits {
bits: Double::to_bits(Single::from_bits(bits).convert(&mut false).value),
defined: 64,
size: 8,
})
},
// identity cast
TyFloat(FloatTy:: F64) => Ok(Scalar::Bits {
bits,
defined: 64,
size: 8,
}),
TyFloat(FloatTy:: F32) => Ok(Scalar::Bits {
bits,
defined: 32,
size: 4,
}),
_ => err!(Unimplemented(format!("float to {:?} cast", dest_ty))),
}

View File

@ -2,11 +2,12 @@ use std::fmt;
use std::error::Error;
use rustc::hir;
use rustc::mir::interpret::{ConstEvalErr};
use rustc::mir::interpret::{ConstEvalErr, ScalarMaybeUndef};
use rustc::mir;
use rustc::ty::{self, TyCtxt, Ty, Instance};
use rustc::ty::layout::{self, LayoutOf, Primitive, TyLayout};
use rustc::ty::subst::Subst;
use rustc_data_structures::indexed_vec::IndexVec;
use syntax::ast::Mutability;
use syntax::codemap::Span;
@ -28,13 +29,16 @@ pub fn mk_borrowck_eval_cx<'a, 'mir, 'tcx>(
let param_env = tcx.param_env(instance.def_id());
let mut ecx = EvalContext::new(tcx.at(span), param_env, CompileTimeEvaluator, ());
// insert a stack frame so any queries have the correct substs
ecx.push_stack_frame(
ecx.stack.push(super::eval_context::Frame {
block: mir::START_BLOCK,
locals: IndexVec::new(),
instance,
span,
mir,
Place::undef(),
StackPopCleanup::None,
)?;
return_place: Place::undef(),
return_to_block: StackPopCleanup::None,
stmt: 0,
});
Ok(ecx)
}
@ -72,48 +76,30 @@ pub fn eval_promoted<'a, 'mir, 'tcx>(
pub fn value_to_const_value<'tcx>(
ecx: &EvalContext<'_, '_, 'tcx, CompileTimeEvaluator>,
val: Value,
ty: Ty<'tcx>,
) -> &'tcx ty::Const<'tcx> {
let layout = ecx.layout_of(ty).unwrap();
layout: TyLayout<'tcx>,
) -> EvalResult<'tcx, &'tcx ty::Const<'tcx>> {
match (val, &layout.abi) {
(Value::Scalar(Scalar::Bits { defined: 0, ..}), _) if layout.is_zst() => {},
(Value::Scalar(ScalarMaybeUndef::Scalar(Scalar::Bits { size: 0, ..})), _) if layout.is_zst() => {},
(Value::ByRef(..), _) |
(Value::Scalar(_), &layout::Abi::Scalar(_)) |
(Value::ScalarPair(..), &layout::Abi::ScalarPair(..)) => {},
_ => bug!("bad value/layout combo: {:#?}, {:#?}", val, layout),
}
let val = (|| {
match val {
Value::Scalar(val) => Ok(ConstValue::Scalar(val)),
Value::ScalarPair(a, b) => Ok(ConstValue::ScalarPair(a, b)),
Value::ByRef(ptr, align) => {
let ptr = ptr.to_ptr().unwrap();
let alloc = ecx.memory.get(ptr.alloc_id)?;
assert!(alloc.align.abi() >= align.abi());
assert!(alloc.bytes.len() as u64 - ptr.offset.bytes() >= layout.size.bytes());
let mut alloc = alloc.clone();
alloc.align = align;
let alloc = ecx.tcx.intern_const_alloc(alloc);
Ok(ConstValue::ByRef(alloc, ptr.offset))
}
let val = match val {
Value::Scalar(val) => ConstValue::Scalar(val.unwrap_or_err()?),
Value::ScalarPair(a, b) => ConstValue::ScalarPair(a.unwrap_or_err()?, b),
Value::ByRef(ptr, align) => {
let ptr = ptr.to_ptr().unwrap();
let alloc = ecx.memory.get(ptr.alloc_id)?;
assert!(alloc.align.abi() >= align.abi());
assert!(alloc.bytes.len() as u64 - ptr.offset.bytes() >= layout.size.bytes());
let mut alloc = alloc.clone();
alloc.align = align;
let alloc = ecx.tcx.intern_const_alloc(alloc);
ConstValue::ByRef(alloc, ptr.offset)
}
})();
match val {
Ok(val) => ty::Const::from_const_value(ecx.tcx.tcx, val, ty),
Err(err) => {
let (frames, span) = ecx.generate_stacktrace(None);
let err = ConstEvalErr {
span,
error: err,
stacktrace: frames,
};
err.report_as_error(
ecx.tcx,
"failed to convert Value to ConstValue, this is a bug",
);
span_bug!(span, "miri error occured when converting Value to ConstValue")
}
}
};
Ok(ty::Const::from_const_value(ecx.tcx.tcx, val, layout.ty))
}
fn eval_body_and_ecx<'a, 'mir, 'tcx>(
@ -307,7 +293,7 @@ impl<'mir, 'tcx> super::Machine<'mir, 'tcx> for CompileTimeEvaluator {
let elem_align = ecx.layout_of(elem_ty)?.align.abi();
let align_val = Scalar::Bits {
bits: elem_align as u128,
defined: dest_layout.size.bits() as u8,
size: dest_layout.size.bytes() as u8,
};
ecx.write_scalar(dest, align_val, dest_layout.ty)?;
}
@ -317,7 +303,7 @@ impl<'mir, 'tcx> super::Machine<'mir, 'tcx> for CompileTimeEvaluator {
let size = ecx.layout_of(ty)?.size.bytes() as u128;
let size_val = Scalar::Bits {
bits: size,
defined: dest_layout.size.bits() as u8,
size: dest_layout.size.bytes() as u8,
};
ecx.write_scalar(dest, size_val, dest_layout.ty)?;
}
@ -327,7 +313,7 @@ impl<'mir, 'tcx> super::Machine<'mir, 'tcx> for CompileTimeEvaluator {
let type_id = ecx.tcx.type_id_hash(ty) as u128;
let id_val = Scalar::Bits {
bits: type_id,
defined: dest_layout.size.bits() as u8,
size: dest_layout.size.bytes() as u8,
};
ecx.write_scalar(dest, id_val, dest_layout.ty)?;
}
@ -437,7 +423,7 @@ pub fn const_val_field<'a, 'tcx>(
let place = ecx.allocate_place_for_value(value, layout, variant)?;
let (place, layout) = ecx.place_field(place, field, layout)?;
let (ptr, align) = place.to_ptr_align();
let mut new_value = Value::ByRef(ptr, align);
let mut new_value = Value::ByRef(ptr.unwrap_or_err()?, align);
new_value = ecx.try_read_by_ref(new_value, layout.ty)?;
use rustc_data_structures::indexed_vec::Idx;
match (value, new_value) {
@ -451,7 +437,7 @@ pub fn const_val_field<'a, 'tcx>(
),
_ => {},
}
Ok(value_to_const_value(&ecx, new_value, layout.ty))
value_to_const_value(&ecx, new_value, layout)
})();
result.map_err(|err| {
let (trace, span) = ecx.generate_stacktrace(None);
@ -481,7 +467,7 @@ pub fn const_variant_index<'a, 'tcx>(
},
Value::ByRef(ptr, align) => (ptr, align),
};
let place = Place::from_scalar_ptr(ptr, align);
let place = Place::from_scalar_ptr(ptr.into(), align);
ecx.read_discriminant_as_variant_index(place, layout)
}
@ -552,7 +538,7 @@ pub fn const_eval_provider<'a, 'tcx>(
if tcx.is_static(def_id).is_none() && cid.promoted.is_none() {
val = ecx.try_read_by_ref(val, layout.ty)?;
}
Ok(value_to_const_value(&ecx, val, layout.ty))
value_to_const_value(&ecx, val, layout)
}).map_err(|err| {
let (trace, span) = ecx.generate_stacktrace(None);
let err = ConstEvalErr {
@ -562,6 +548,9 @@ pub fn const_eval_provider<'a, 'tcx>(
};
if tcx.is_static(def_id).is_some() {
err.report_as_error(ecx.tcx, "could not evaluate static initializer");
if tcx.sess.err_count() == 0 {
span_bug!(span, "static eval failure didn't emit an error: {:#?}", err);
}
}
err.into()
})
@ -572,11 +561,11 @@ fn numeric_intrinsic<'tcx>(
bits: u128,
kind: Primitive,
) -> EvalResult<'tcx, Scalar> {
let defined = match kind {
Primitive::Int(integer, _) => integer.size().bits() as u8,
let size = match kind {
Primitive::Int(integer, _) => integer.size(),
_ => bug!("invalid `{}` argument: {:?}", name, bits),
};
let extra = 128 - defined as u128;
let extra = 128 - size.bits() as u128;
let bits_out = match name {
"ctpop" => bits.count_ones() as u128,
"ctlz" => bits.leading_zeros() as u128 - extra,
@ -584,5 +573,5 @@ fn numeric_intrinsic<'tcx>(
"bswap" => (bits << extra).swap_bytes(),
_ => bug!("not a numeric intrinsic: {}", name),
};
Ok(Scalar::Bits { bits: bits_out, defined })
Ok(Scalar::Bits { bits: bits_out, size: size.bytes() as u8 })
}

View File

@ -15,6 +15,7 @@ use rustc_data_structures::indexed_vec::{IndexVec, Idx};
use rustc::mir::interpret::{
GlobalId, Value, Scalar, FrameInfo, AllocType,
EvalResult, EvalErrorKind, Pointer, ConstValue,
ScalarMaybeUndef,
};
use syntax::codemap::{self, Span};
@ -105,9 +106,7 @@ pub struct Frame<'mir, 'tcx: 'mir> {
/// `[return_ptr, arguments..., variables..., temporaries...]`. The locals are stored as `Option<Value>`s.
/// `None` represents a local that is currently dead, while a live local
/// can either directly contain `Scalar` or refer to some part of an `Allocation`.
///
/// Before being initialized, arguments are `Value::Scalar(Scalar::undef())` and other locals are `None`.
pub locals: IndexVec<mir::Local, Option<Value>>,
pub locals: IndexVec<mir::Local, LocalValue>,
////////////////////////////////////////////////////////////////////////////////
// Current position within the function
@ -120,6 +119,21 @@ pub struct Frame<'mir, 'tcx: 'mir> {
pub stmt: usize,
}
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
pub enum LocalValue {
Dead,
Live(Value),
}
impl LocalValue {
pub fn access(self) -> EvalResult<'static, Value> {
match self {
LocalValue::Dead => err!(DeadLocal),
LocalValue::Live(val) => Ok(val),
}
}
}
impl<'mir, 'tcx: 'mir> Eq for Frame<'mir, 'tcx> {}
impl<'mir, 'tcx: 'mir> PartialEq for Frame<'mir, 'tcx> {
@ -395,8 +409,8 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
let id = self.memory.allocate_value(alloc.clone(), MemoryKind::Stack)?;
Ok(Value::ByRef(Pointer::new(id, offset).into(), alloc.align))
},
ConstValue::ScalarPair(a, b) => Ok(Value::ScalarPair(a, b)),
ConstValue::Scalar(val) => Ok(Value::Scalar(val)),
ConstValue::ScalarPair(a, b) => Ok(Value::ScalarPair(a.into(), b.into())),
ConstValue::Scalar(val) => Ok(Value::Scalar(val.into())),
}
}
@ -452,7 +466,7 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
/// Note that the value does not matter if the type is sized. For unsized types,
/// the value has to be a fat pointer, and we only care about the "extra" data in it.
pub fn size_and_align_of_dst(
&mut self,
&self,
ty: Ty<'tcx>,
value: Value,
) -> EvalResult<'tcx, (Size, Align)> {
@ -480,7 +494,7 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
// Recurse to get the size of the dynamically sized field (must be
// the last field).
let field_ty = layout.field(&self, layout.fields.count() - 1)?.ty;
let field_ty = layout.field(self, layout.fields.count() - 1)?.ty;
let (unsized_size, unsized_align) =
self.size_and_align_of_dst(field_ty, value)?;
@ -518,7 +532,7 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
}
ty::TySlice(_) | ty::TyStr => {
let (elem_size, align) = layout.field(&self, 0)?.size_and_align();
let (elem_size, align) = layout.field(self, 0)?.size_and_align();
let (_, len) = self.into_slice(value)?;
Ok((elem_size * len, align))
}
@ -538,8 +552,26 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
) -> EvalResult<'tcx> {
::log_settings::settings().indentation += 1;
let locals = if mir.local_decls.len() > 1 {
let mut locals = IndexVec::from_elem(Some(Value::Scalar(Scalar::undef())), &mir.local_decls);
// first push a stack frame so we have access to the local substs
self.stack.push(Frame {
mir,
block: mir::START_BLOCK,
return_to_block,
return_place,
// empty local array, we fill it in below, after we are inside the stack frame and
// all methods actually know about the frame
locals: IndexVec::new(),
span,
instance,
stmt: 0,
});
// don't allocate at all for trivial constants
if mir.local_decls.len() > 1 {
let mut locals = IndexVec::from_elem(LocalValue::Dead, &mir.local_decls);
for (local, decl) in locals.iter_mut().zip(mir.local_decls.iter()) {
*local = LocalValue::Live(self.init_value(decl.ty)?);
}
match self.tcx.describe_def(instance.def_id()) {
// statics and constants don't have `Storage*` statements, no need to look for them
Some(Def::Static(..)) | Some(Def::Const(..)) | Some(Def::AssociatedConst(..)) => {},
@ -550,29 +582,15 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
use rustc::mir::StatementKind::{StorageDead, StorageLive};
match stmt.kind {
StorageLive(local) |
StorageDead(local) => locals[local] = None,
StorageDead(local) => locals[local] = LocalValue::Dead,
_ => {}
}
}
}
},
}
locals
} else {
// don't allocate at all for trivial constants
IndexVec::new()
};
self.stack.push(Frame {
mir,
block: mir::START_BLOCK,
return_to_block,
return_place,
locals,
span,
instance,
stmt: 0,
});
self.frame_mut().locals = locals;
}
self.memory.cur_frame = self.cur_frame();
@ -598,7 +616,7 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
if let Place::Ptr { ptr, .. } = frame.return_place {
// FIXME: to_ptr()? might be too extreme here, static zsts might reach this under certain conditions
self.memory.mark_static_initialized(
ptr.to_ptr()?.alloc_id,
ptr.unwrap_or_err()?.to_ptr()?.alloc_id,
mutable,
)?
} else {
@ -616,8 +634,9 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
Ok(())
}
pub fn deallocate_local(&mut self, local: Option<Value>) -> EvalResult<'tcx> {
if let Some(Value::ByRef(ptr, _align)) = local {
pub fn deallocate_local(&mut self, local: LocalValue) -> EvalResult<'tcx> {
// FIXME: should we tell the user that there was a local which was never written to?
if let LocalValue::Live(Value::ByRef(ptr, _align)) = local {
trace!("deallocating local");
let ptr = ptr.to_ptr()?;
self.memory.dump_alloc(ptr.alloc_id);
@ -637,6 +656,7 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
) -> EvalResult<'tcx> {
let dest = self.eval_place(place)?;
let dest_ty = self.place_ty(place);
let dest_layout = self.layout_of(dest_ty)?;
use rustc::mir::Rvalue::*;
match *rvalue {
@ -675,7 +695,7 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
UnaryOp(un_op, ref operand) => {
let val = self.eval_operand_to_scalar(operand)?;
let val = self.unary_op(un_op, val, dest_ty)?;
let val = self.unary_op(un_op, val, dest_layout)?;
self.write_scalar(
dest,
val,
@ -724,6 +744,7 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
let (dest, dest_align) = self.force_allocation(dest)?.to_ptr_align();
if length > 0 {
let dest = dest.unwrap_or_err()?;
//write the first value
self.write_value_to_ptr(value, dest, dest_align, elem_ty)?;
@ -739,12 +760,12 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
let src = self.eval_place(place)?;
let ty = self.place_ty(place);
let (_, len) = src.elem_ty_and_len(ty, self.tcx.tcx);
let defined = self.memory.pointer_size().bits() as u8;
let size = self.memory.pointer_size().bytes() as u8;
self.write_scalar(
dest,
Scalar::Bits {
bits: len as u128,
defined,
size,
},
dest_ty,
)?;
@ -757,7 +778,7 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
let (ptr, _align, extra) = self.force_allocation(src)?.to_ptr_align_extra();
let val = match extra {
PlaceExtra::None => ptr.to_value(),
PlaceExtra::None => Value::Scalar(ptr),
PlaceExtra::Length(len) => ptr.to_value_with_len(len, self.tcx.tcx),
PlaceExtra::Vtable(vtable) => ptr.to_value_with_vtable(vtable),
PlaceExtra::DowncastVariant(..) => {
@ -781,12 +802,12 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
let layout = self.layout_of(ty)?;
assert!(!layout.is_unsized(),
"SizeOf nullary MIR operator called for unsized type");
let defined = self.memory.pointer_size().bits() as u8;
let size = self.memory.pointer_size().bytes() as u8;
self.write_scalar(
dest,
Scalar::Bits {
bits: layout.size.bytes() as u128,
defined,
size,
},
dest_ty,
)?;
@ -803,10 +824,10 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
let layout = self.layout_of(ty)?;
let place = self.eval_place(place)?;
let discr_val = self.read_discriminant_value(place, layout)?;
let defined = self.layout_of(dest_ty).unwrap().size.bits() as u8;
let size = self.layout_of(dest_ty).unwrap().size.bytes() as u8;
self.write_scalar(dest, Scalar::Bits {
bits: discr_val,
defined,
size,
}, dest_ty)?;
}
}
@ -957,10 +978,8 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
assert!(variants_start == variants_end);
dataful_variant as u128
},
Scalar::Bits { bits: raw_discr, defined } => {
if defined < discr.size.bits() as u8 {
return err!(ReadUndefBytes);
}
Scalar::Bits { bits: raw_discr, size } => {
assert_eq!(size as u64, discr.size.bytes());
let discr = raw_discr.wrapping_sub(niche_start)
.wrapping_add(variants_start);
if variants_start <= discr && discr <= variants_end {
@ -1002,14 +1021,14 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
// raw discriminants for enums are isize or bigger during
// their computation, but the in-memory tag is the smallest possible
// representation
let size = tag.value.size(self.tcx.tcx).bits();
let shift = 128 - size;
let size = tag.value.size(self.tcx.tcx);
let shift = 128 - size.bits();
let discr_val = (discr_val << shift) >> shift;
let (discr_dest, tag) = self.place_field(dest, mir::Field::new(0), layout)?;
self.write_scalar(discr_dest, Scalar::Bits {
bits: discr_val,
defined: size as u8,
size: size.bytes() as u8,
}, tag.ty)?;
}
layout::Variants::NicheFilling {
@ -1025,7 +1044,7 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
.wrapping_add(niche_start);
self.write_scalar(niche_dest, Scalar::Bits {
bits: niche_value,
defined: niche.size.bits() as u8,
size: niche.size.bytes() as u8,
}, niche.ty)?;
}
}
@ -1063,7 +1082,7 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
},
};
Ok(Place::Ptr {
ptr,
ptr: ptr.into(),
align,
extra: variant.map_or(PlaceExtra::None, PlaceExtra::DowncastVariant),
})
@ -1072,22 +1091,22 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
pub fn force_allocation(&mut self, place: Place) -> EvalResult<'tcx, Place> {
let new_place = match place {
Place::Local { frame, local } => {
match self.stack[frame].locals[local] {
None => return err!(DeadLocal),
Some(Value::ByRef(ptr, align)) => {
match self.stack[frame].locals[local].access()? {
Value::ByRef(ptr, align) => {
Place::Ptr {
ptr,
ptr: ptr.into(),
align,
extra: PlaceExtra::None,
}
}
Some(val) => {
val => {
let ty = self.stack[frame].mir.local_decls[local].ty;
let ty = self.monomorphize(ty, self.stack[frame].instance.substs);
let layout = self.layout_of(ty)?;
let ptr = self.alloc_ptr(layout)?;
self.stack[frame].locals[local] =
Some(Value::ByRef(ptr.into(), layout.align)); // it stays live
LocalValue::Live(Value::ByRef(ptr.into(), layout.align)); // it stays live
let place = Place::from_ptr(ptr, layout.align);
self.write_value(ValTy { value: val, ty }, place)?;
place
@ -1120,7 +1139,7 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
match self.follow_by_ref_value(value, ty)? {
Value::ByRef { .. } => bug!("follow_by_ref_value can't result in `ByRef`"),
Value::Scalar(scalar) => Ok(scalar),
Value::Scalar(scalar) => scalar.unwrap_or_err(),
Value::ScalarPair(..) => bug!("value_to_scalar can't work with fat pointers"),
}
@ -1137,11 +1156,11 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
pub fn write_scalar(
&mut self,
dest: Place,
val: Scalar,
val: impl Into<ScalarMaybeUndef>,
dest_ty: Ty<'tcx>,
) -> EvalResult<'tcx> {
let valty = ValTy {
value: Value::Scalar(val),
value: Value::Scalar(val.into()),
ty: dest_ty,
};
self.write_value(valty, dest)
@ -1160,15 +1179,15 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
match dest {
Place::Ptr { ptr, align, extra } => {
assert_eq!(extra, PlaceExtra::None);
self.write_value_to_ptr(src_val, ptr, align, dest_ty)
self.write_value_to_ptr(src_val, ptr.unwrap_or_err()?, align, dest_ty)
}
Place::Local { frame, local } => {
let dest = self.stack[frame].get_local(local)?;
let old_val = self.stack[frame].locals[local].access()?;
self.write_value_possibly_by_val(
src_val,
|this, val| this.stack[frame].set_local(local, val),
dest,
old_val,
dest_ty,
)
}
@ -1183,6 +1202,7 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
old_dest_val: Value,
dest_ty: Ty<'tcx>,
) -> EvalResult<'tcx> {
// FIXME: this should be a layout check, not underlying value
if let Value::ByRef(dest_ptr, align) = old_dest_val {
// If the value is already `ByRef` (that is, backed by an `Allocation`),
// then we must write the new value into this allocation, because there may be
@ -1239,12 +1259,9 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
layout::Primitive::Int(_, signed) => signed,
_ => false,
},
_ => match scalar {
Scalar::Bits { defined: 0, .. } => false,
_ => bug!("write_value_to_ptr: invalid ByVal layout: {:#?}", layout),
}
_ => false,
};
self.memory.write_scalar(dest, dest_align, scalar, layout.size, signed)
self.memory.write_scalar(dest, dest_align, scalar, layout.size, layout.align, signed)
}
Value::ScalarPair(a_val, b_val) => {
trace!("write_value_to_ptr valpair: {:#?}", layout);
@ -1253,12 +1270,13 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
_ => bug!("write_value_to_ptr: invalid ScalarPair layout: {:#?}", layout)
};
let (a_size, b_size) = (a.size(&self), b.size(&self));
let (a_align, b_align) = (a.align(&self), b.align(&self));
let a_ptr = dest;
let b_offset = a_size.abi_align(b.align(&self));
let b_offset = a_size.abi_align(b_align);
let b_ptr = dest.ptr_offset(b_offset, &self)?.into();
// TODO: What about signedess?
self.memory.write_scalar(a_ptr, dest_align, a_val, a_size, false)?;
self.memory.write_scalar(b_ptr, dest_align, b_val, b_size, false)
self.memory.write_scalar(a_ptr, dest_align, a_val, a_size, a_align, false)?;
self.memory.write_scalar(b_ptr, dest_align, b_val, b_size, b_align, false)
}
}
}
@ -1271,38 +1289,9 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
}
}
pub(crate) fn read_ptr(
&self,
ptr: Pointer,
ptr_align: Align,
pointee_ty: Ty<'tcx>,
) -> EvalResult<'tcx, Value> {
let ptr_size = self.memory.pointer_size();
let p: Scalar = self.memory.read_ptr_sized(ptr, ptr_align)?.into();
if self.type_is_sized(pointee_ty) {
Ok(p.to_value())
} else {
trace!("reading fat pointer extra of type {}", pointee_ty);
let extra = ptr.offset(ptr_size, self)?;
match self.tcx.struct_tail(pointee_ty).sty {
ty::TyDynamic(..) => Ok(p.to_value_with_vtable(
self.memory.read_ptr_sized(extra, ptr_align)?.to_ptr()?,
)),
ty::TySlice(..) | ty::TyStr => {
let len = self
.memory
.read_ptr_sized(extra, ptr_align)?
.to_bits(ptr_size)?;
Ok(p.to_value_with_len(len as u64, self.tcx.tcx))
},
_ => bug!("unsized scalar ptr read from {:?}", pointee_ty),
}
}
}
fn validate_scalar(
&self,
value: Scalar,
value: ScalarMaybeUndef,
size: Size,
scalar: &layout::Scalar,
path: &str,
@ -1311,8 +1300,16 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
trace!("validate scalar: {:#?}, {:#?}, {:#?}, {}", value, size, scalar, ty);
let (lo, hi) = scalar.valid_range.clone().into_inner();
let (bits, defined) = match value {
Scalar::Bits { bits, defined } => (bits, defined),
let value = match value {
ScalarMaybeUndef::Scalar(scalar) => scalar,
ScalarMaybeUndef::Undef => return validation_failure!("undefined bytes", path),
};
let bits = match value {
Scalar::Bits { bits, size: value_size } => {
assert_eq!(value_size as u64, size.bytes());
bits
},
Scalar::Ptr(_) => {
let ptr_size = self.memory.pointer_size();
let ptr_max = u128::max_value() >> (128 - ptr_size.bits());
@ -1346,7 +1343,7 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
// has no special checks for chars
match ty.sty {
ty::TyChar => {
assert_eq!(size.bytes(), 4);
debug_assert_eq!(size.bytes(), 4);
if ::std::char::from_u32(bits as u32).is_none() {
return err!(InvalidChar(bits));
}
@ -1355,32 +1352,26 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
}
use std::ops::RangeInclusive;
let in_range = |bound: RangeInclusive<u128>| {
defined as u64 >= size.bits() && bound.contains(&bits)
};
let in_range = |bound: RangeInclusive<u128>| bound.contains(&bits);
if lo > hi {
if in_range(0..=hi) || in_range(lo..=u128::max_value()) {
Ok(())
} else if defined as u64 >= size.bits() {
} else {
validation_failure!(
bits,
path,
format!("something in the range {:?} or {:?}", ..=hi, lo..)
)
} else {
validation_failure!("undefined bytes", path)
}
} else {
if in_range(scalar.valid_range.clone()) {
Ok(())
} else if defined as u64 >= size.bits() {
} else {
validation_failure!(
bits,
path,
format!("something in the range {:?}", scalar.valid_range)
)
} else {
validation_failure!("undefined bytes", path)
}
}
}
@ -1410,10 +1401,10 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
mir::Field::new(0),
layout,
)?;
let tag_value = self.value_to_scalar(ValTy {
value: tag_value,
ty: tag_layout.ty,
})?;
let tag_value = match self.follow_by_ref_value(tag_value, tag_layout.ty)? {
Value::Scalar(val) => val,
_ => bug!("tag must be scalar"),
};
let path = format!("{}.TAG", path);
self.validate_scalar(tag_value, size, tag, &path, tag_layout.ty)?;
let variant_index = self.read_discriminant_as_variant_index(
@ -1440,7 +1431,7 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
self.validate_scalar(value, size, scalar, &path, layout.ty)?;
if scalar.value == Primitive::Pointer {
// ignore integer pointers, we can't reason about the final hardware
if let Scalar::Ptr(ptr) = value {
if let Scalar::Ptr(ptr) = value.unwrap_or_err()? {
let alloc_kind = self.tcx.alloc_map.lock().get(ptr.alloc_id);
if let Some(AllocType::Static(did)) = alloc_kind {
// statics from other crates are already checked
@ -1534,7 +1525,7 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
self.memory.check_align(ptr, ptr_align)?;
if layout.size.bytes() == 0 {
return Ok(Some(Value::Scalar(Scalar::undef())));
return Ok(Some(Value::Scalar(ScalarMaybeUndef::Scalar(Scalar::Bits { bits: 0, size: 0 }))));
}
let ptr = ptr.to_ptr()?;
@ -1670,7 +1661,7 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
}
let (src_f_value, src_field) = match src {
Value::ByRef(ptr, align) => {
let src_place = Place::from_scalar_ptr(ptr, align);
let src_place = Place::from_scalar_ptr(ptr.into(), align);
let (src_f_place, src_field) =
self.place_field(src_place, mir::Field::new(i), src_layout)?;
(self.read_place(src_f_place)?, src_field)
@ -1717,7 +1708,7 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
}
write!(msg, ":").unwrap();
match self.stack[frame].get_local(local) {
match self.stack[frame].locals[local].access() {
Err(err) => {
if let EvalErrorKind::DeadLocal = err.kind {
write!(msg, " is dead").unwrap();
@ -1736,16 +1727,16 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
}
Ok(Value::Scalar(val)) => {
write!(msg, " {:?}", val).unwrap();
if let Scalar::Ptr(ptr) = val {
if let ScalarMaybeUndef::Scalar(Scalar::Ptr(ptr)) = val {
allocs.push(ptr.alloc_id);
}
}
Ok(Value::ScalarPair(val1, val2)) => {
write!(msg, " ({:?}, {:?})", val1, val2).unwrap();
if let Scalar::Ptr(ptr) = val1 {
if let ScalarMaybeUndef::Scalar(Scalar::Ptr(ptr)) = val1 {
allocs.push(ptr.alloc_id);
}
if let Scalar::Ptr(ptr) = val2 {
if let ScalarMaybeUndef::Scalar(Scalar::Ptr(ptr)) = val2 {
allocs.push(ptr.alloc_id);
}
}
@ -1756,7 +1747,7 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
}
Place::Ptr { ptr, align, .. } => {
match ptr {
Scalar::Ptr(ptr) => {
ScalarMaybeUndef::Scalar(Scalar::Ptr(ptr)) => {
trace!("by align({}) ref:", align.abi());
self.memory.dump_alloc(ptr.alloc_id);
}
@ -1766,21 +1757,6 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
}
}
/// Convenience function to ensure correct usage of locals
pub fn modify_local<F>(&mut self, frame: usize, local: mir::Local, f: F) -> EvalResult<'tcx>
where
F: FnOnce(&mut Self, Value) -> EvalResult<'tcx, Value>,
{
let val = self.stack[frame].get_local(local)?;
let new_val = f(self, val)?;
self.stack[frame].set_local(local, new_val)?;
// FIXME(solson): Run this when setting to Undef? (See previous version of this code.)
// if let Value::ByRef(ptr) = self.stack[frame].get_local(local) {
// self.memory.deallocate(ptr)?;
// }
Ok(())
}
pub fn generate_stacktrace(&self, explicit_span: Option<Span>) -> (Vec<FrameInfo>, Span) {
let mut last_span = None;
let mut frames = Vec::new();
@ -1819,12 +1795,12 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
(frames, self.tcx.span)
}
pub fn sign_extend(&self, value: u128, ty: Ty<'tcx>) -> EvalResult<'tcx, u128> {
super::sign_extend(self.tcx.tcx, value, ty)
pub fn sign_extend(&self, value: u128, ty: TyLayout<'_>) -> u128 {
super::sign_extend(value, ty)
}
pub fn truncate(&self, value: u128, ty: Ty<'tcx>) -> EvalResult<'tcx, u128> {
super::truncate(self.tcx.tcx, value, ty)
pub fn truncate(&self, value: u128, ty: TyLayout<'_>) -> u128 {
super::truncate(value, ty)
}
fn write_field_name(&self, s: &mut String, ty: Ty<'tcx>, i: usize, variant: usize) -> ::std::fmt::Result {
@ -1893,34 +1869,45 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M
}
}
}
pub fn storage_live(&mut self, local: mir::Local) -> EvalResult<'tcx, LocalValue> {
trace!("{:?} is now live", local);
let ty = self.frame().mir.local_decls[local].ty;
let init = self.init_value(ty)?;
// StorageLive *always* kills the value that's currently stored
Ok(mem::replace(&mut self.frame_mut().locals[local], LocalValue::Live(init)))
}
fn init_value(&mut self, ty: Ty<'tcx>) -> EvalResult<'tcx, Value> {
let ty = self.monomorphize(ty, self.substs());
let layout = self.layout_of(ty)?;
Ok(match layout.abi {
layout::Abi::Scalar(..) => Value::Scalar(ScalarMaybeUndef::Undef),
layout::Abi::ScalarPair(..) => Value::ScalarPair(
ScalarMaybeUndef::Undef,
ScalarMaybeUndef::Undef,
),
_ => Value::ByRef(self.alloc_ptr(layout)?.into(), layout.align),
})
}
}
impl<'mir, 'tcx> Frame<'mir, 'tcx> {
pub fn get_local(&self, local: mir::Local) -> EvalResult<'tcx, Value> {
self.locals[local].ok_or_else(|| EvalErrorKind::DeadLocal.into())
}
fn set_local(&mut self, local: mir::Local, value: Value) -> EvalResult<'tcx> {
match self.locals[local] {
None => err!(DeadLocal),
Some(ref mut local) => {
LocalValue::Dead => err!(DeadLocal),
LocalValue::Live(ref mut local) => {
*local = value;
Ok(())
}
}
}
pub fn storage_live(&mut self, local: mir::Local) -> Option<Value> {
trace!("{:?} is now live", local);
// StorageLive *always* kills the value that's currently stored
mem::replace(&mut self.locals[local], Some(Value::Scalar(Scalar::undef())))
}
/// Returns the old value of the local
pub fn storage_dead(&mut self, local: mir::Local) -> Option<Value> {
pub fn storage_dead(&mut self, local: mir::Local) -> LocalValue {
trace!("{:?} is now dead", local);
self.locals[local].take()
mem::replace(&mut self.locals[local], LocalValue::Dead)
}
}

View File

@ -7,7 +7,7 @@ use rustc::ty::Instance;
use rustc::ty::ParamEnv;
use rustc::ty::query::TyCtxtAt;
use rustc::ty::layout::{self, Align, TargetDataLayout, Size};
use rustc::mir::interpret::{Pointer, AllocId, Allocation, AccessKind, Value,
use rustc::mir::interpret::{Pointer, AllocId, Allocation, AccessKind, Value, ScalarMaybeUndef,
EvalResult, Scalar, EvalErrorKind, GlobalId, AllocType};
pub use rustc::mir::interpret::{write_target_uint, write_target_int, read_target_uint};
use rustc_data_structures::fx::{FxHashSet, FxHashMap, FxHasher};
@ -272,10 +272,8 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
let alloc = self.get(ptr.alloc_id)?;
(ptr.offset.bytes(), alloc.align)
}
Scalar::Bits { bits, defined } => {
if (defined as u64) < self.pointer_size().bits() {
return err!(ReadUndefBytes);
}
Scalar::Bits { bits, size } => {
assert_eq!(size as u64, self.pointer_size().bytes());
// FIXME: what on earth does this line do? docs or fix needed!
let v = ((bits as u128) % (1 << self.pointer_size().bytes())) as u64;
if v == 0 {
@ -756,7 +754,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
Ok(())
}
pub fn read_scalar(&self, ptr: Pointer, ptr_align: Align, size: Size) -> EvalResult<'tcx, Scalar> {
pub fn read_scalar(&self, ptr: Pointer, ptr_align: Align, size: Size) -> EvalResult<'tcx, ScalarMaybeUndef> {
self.check_relocation_edges(ptr, size)?; // Make sure we don't read part of a pointer as a pointer
let endianness = self.endianness();
let bytes = self.get_bytes_unchecked(ptr, size, ptr_align.min(self.int_align(size)))?;
@ -764,7 +762,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
// We must not return Ok() for unaligned pointers!
if self.check_defined(ptr, size).is_err() {
// this inflates undefined bytes to the entire scalar, even if only a few bytes are undefined
return Ok(Scalar::undef().into());
return Ok(ScalarMaybeUndef::Undef);
}
// Now we do the actual reading
let bits = read_target_uint(endianness, bytes).unwrap();
@ -776,44 +774,60 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
} else {
let alloc = self.get(ptr.alloc_id)?;
match alloc.relocations.get(&ptr.offset) {
Some(&alloc_id) => return Ok(Pointer::new(alloc_id, Size::from_bytes(bits as u64)).into()),
Some(&alloc_id) => return Ok(ScalarMaybeUndef::Scalar(Pointer::new(alloc_id, Size::from_bytes(bits as u64)).into())),
None => {},
}
}
// We don't. Just return the bits.
Ok(Scalar::Bits {
Ok(ScalarMaybeUndef::Scalar(Scalar::Bits {
bits,
defined: size.bits() as u8,
})
size: size.bytes() as u8,
}))
}
pub fn read_ptr_sized(&self, ptr: Pointer, ptr_align: Align) -> EvalResult<'tcx, Scalar> {
pub fn read_ptr_sized(&self, ptr: Pointer, ptr_align: Align) -> EvalResult<'tcx, ScalarMaybeUndef> {
self.read_scalar(ptr, ptr_align, self.pointer_size())
}
pub fn write_scalar(&mut self, ptr: Scalar, ptr_align: Align, val: Scalar, size: Size, signed: bool) -> EvalResult<'tcx> {
pub fn write_scalar(
&mut self,
ptr: Scalar,
ptr_align: Align,
val: ScalarMaybeUndef,
type_size: Size,
type_align: Align,
signed: bool,
) -> EvalResult<'tcx> {
let endianness = self.endianness();
self.check_align(ptr, ptr_align)?;
let val = match val {
ScalarMaybeUndef::Scalar(scalar) => scalar,
ScalarMaybeUndef::Undef => return self.mark_definedness(ptr, type_size, false),
};
let bytes = match val {
Scalar::Ptr(val) => {
assert_eq!(size, self.pointer_size());
assert_eq!(type_size, self.pointer_size());
val.offset.bytes() as u128
}
Scalar::Bits { bits, defined } if defined as u64 >= size.bits() && size.bits() != 0 => bits,
Scalar::Bits { .. } => {
self.check_align(ptr.into(), ptr_align)?;
self.mark_definedness(ptr, size, false)?;
Scalar::Bits { size: 0, .. } => {
// nothing to do for ZSTs
assert_eq!(type_size.bytes(), 0);
return Ok(());
}
Scalar::Bits { bits, size } => {
assert_eq!(size as u64, type_size.bytes());
bits
},
};
let ptr = ptr.to_ptr()?;
{
let align = self.int_align(size);
let dst = self.get_bytes_mut(ptr, size, ptr_align.min(align))?;
let dst = self.get_bytes_mut(ptr, type_size, ptr_align.min(type_align))?;
if signed {
write_target_int(endianness, dst, bytes as i128).unwrap();
} else {
@ -835,9 +849,9 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
Ok(())
}
pub fn write_ptr_sized_unsigned(&mut self, ptr: Pointer, ptr_align: Align, val: Scalar) -> EvalResult<'tcx> {
pub fn write_ptr_sized_unsigned(&mut self, ptr: Pointer, ptr_align: Align, val: ScalarMaybeUndef) -> EvalResult<'tcx> {
let ptr_size = self.pointer_size();
self.write_scalar(ptr.into(), ptr_align, val, ptr_size, false)
self.write_scalar(ptr.into(), ptr_align, val, ptr_size, ptr_align, false)
}
fn int_align(&self, size: Size) -> Align {
@ -984,7 +998,7 @@ pub trait HasMemory<'a, 'mir, 'tcx: 'a + 'mir, M: Machine<'mir, 'tcx>> {
fn into_ptr(
&self,
value: Value,
) -> EvalResult<'tcx, Scalar> {
) -> EvalResult<'tcx, ScalarMaybeUndef> {
Ok(match value {
Value::ByRef(ptr, align) => {
self.memory().read_ptr_sized(ptr.to_ptr()?, align)?
@ -997,7 +1011,7 @@ pub trait HasMemory<'a, 'mir, 'tcx: 'a + 'mir, M: Machine<'mir, 'tcx>> {
fn into_ptr_vtable_pair(
&self,
value: Value,
) -> EvalResult<'tcx, (Scalar, Pointer)> {
) -> EvalResult<'tcx, (ScalarMaybeUndef, Pointer)> {
match value {
Value::ByRef(ref_ptr, align) => {
let mem = self.memory();
@ -1005,11 +1019,11 @@ pub trait HasMemory<'a, 'mir, 'tcx: 'a + 'mir, M: Machine<'mir, 'tcx>> {
let vtable = mem.read_ptr_sized(
ref_ptr.ptr_offset(mem.pointer_size(), &mem.tcx.data_layout)?.to_ptr()?,
align
)?.to_ptr()?;
)?.unwrap_or_err()?.to_ptr()?;
Ok((ptr, vtable))
}
Value::ScalarPair(ptr, vtable) => Ok((ptr.into(), vtable.to_ptr()?)),
Value::ScalarPair(ptr, vtable) => Ok((ptr, vtable.unwrap_or_err()?.to_ptr()?)),
_ => bug!("expected ptr and vtable, got {:?}", value),
}
}
@ -1017,7 +1031,7 @@ pub trait HasMemory<'a, 'mir, 'tcx: 'a + 'mir, M: Machine<'mir, 'tcx>> {
fn into_slice(
&self,
value: Value,
) -> EvalResult<'tcx, (Scalar, u64)> {
) -> EvalResult<'tcx, (ScalarMaybeUndef, u64)> {
match value {
Value::ByRef(ref_ptr, align) => {
let mem = self.memory();
@ -1025,12 +1039,12 @@ pub trait HasMemory<'a, 'mir, 'tcx: 'a + 'mir, M: Machine<'mir, 'tcx>> {
let len = mem.read_ptr_sized(
ref_ptr.ptr_offset(mem.pointer_size(), &mem.tcx.data_layout)?.to_ptr()?,
align
)?.to_bits(mem.pointer_size())? as u64;
)?.unwrap_or_err()?.to_bits(mem.pointer_size())? as u64;
Ok((ptr, len))
}
Value::ScalarPair(ptr, val) => {
let len = val.to_bits(self.memory().pointer_size())?;
Ok((ptr.into(), len as u64))
let len = val.unwrap_or_err()?.to_bits(self.memory().pointer_size())?;
Ok((ptr, len as u64))
}
Value::Scalar(_) => bug!("expected ptr and length, got {:?}", value),
}

View File

@ -11,8 +11,10 @@ mod step;
mod terminator;
mod traits;
pub use self::eval_context::{EvalContext, Frame, StackPopCleanup,
TyAndPacked, ValTy};
pub use self::eval_context::{
EvalContext, Frame, StackPopCleanup,
TyAndPacked, ValTy,
};
pub use self::place::{Place, PlaceExtra};
@ -34,26 +36,21 @@ pub use self::machine::Machine;
pub use self::memory::{write_target_uint, write_target_int, read_target_uint};
use rustc::mir::interpret::{EvalResult, EvalErrorKind};
use rustc::ty::{Ty, TyCtxt, ParamEnv};
use rustc::ty::layout::TyLayout;
pub fn sign_extend<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, value: u128, ty: Ty<'tcx>) -> EvalResult<'tcx, u128> {
let param_env = ParamEnv::empty();
let layout = tcx.layout_of(param_env.and(ty)).map_err(|layout| EvalErrorKind::Layout(layout))?;
pub fn sign_extend(value: u128, layout: TyLayout<'_>) -> u128 {
let size = layout.size.bits();
assert!(layout.abi.is_signed());
// sign extend
let shift = 128 - size;
// shift the unsigned value to the left
// and back to the right as signed (essentially fills with FF on the left)
Ok((((value << shift) as i128) >> shift) as u128)
(((value << shift) as i128) >> shift) as u128
}
pub fn truncate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, value: u128, ty: Ty<'tcx>) -> EvalResult<'tcx, u128> {
let param_env = ParamEnv::empty();
let layout = tcx.layout_of(param_env.and(ty)).map_err(|layout| EvalErrorKind::Layout(layout))?;
pub fn truncate(value: u128, layout: TyLayout<'_>) -> u128 {
let size = layout.size.bits();
let shift = 128 - size;
// truncate (shift left to drop out leftover values, shift right to fill with zeroes)
Ok((value << shift) >> shift)
(value << shift) >> shift
}

View File

@ -1,7 +1,7 @@
use rustc::mir;
use rustc::ty::{self, Ty, layout};
use syntax::ast::FloatTy;
use rustc::ty::layout::LayoutOf;
use rustc::ty::layout::{LayoutOf, TyLayout};
use rustc_apfloat::ieee::{Double, Single};
use rustc_apfloat::Float;
@ -11,7 +11,7 @@ use rustc::mir::interpret::{EvalResult, Scalar, Value};
impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
fn binop_with_overflow(
&mut self,
&self,
op: mir::BinOp,
left: ValTy<'tcx>,
right: ValTy<'tcx>,
@ -32,7 +32,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
dest_ty: Ty<'tcx>,
) -> EvalResult<'tcx> {
let (val, overflowed) = self.binop_with_overflow(op, left, right)?;
let val = Value::ScalarPair(val, Scalar::from_bool(overflowed));
let val = Value::ScalarPair(val.into(), Scalar::from_bool(overflowed).into());
let valty = ValTy {
value: val,
ty: dest_ty,
@ -97,13 +97,13 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
let signed = left_layout.abi.is_signed();
let mut oflo = (r as u32 as u128) != r;
let mut r = r as u32;
let size = left_layout.size.bits() as u32;
oflo |= r >= size;
let size = left_layout.size;
oflo |= r >= size.bits() as u32;
if oflo {
r %= size;
r %= size.bits() as u32;
}
let result = if signed {
let l = self.sign_extend(l, left_ty)? as i128;
let l = self.sign_extend(l, left_layout) as i128;
let result = match bin_op {
Shl => l << r,
Shr => l >> r,
@ -117,10 +117,10 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
_ => bug!("it has already been checked that this is a shift op"),
}
};
let truncated = self.truncate(result, left_ty)?;
let truncated = self.truncate(result, left_layout);
return Ok((Scalar::Bits {
bits: truncated,
defined: size as u8,
size: size.bytes() as u8,
}, oflo));
}
@ -145,8 +145,8 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
_ => None,
};
if let Some(op) = op {
let l = self.sign_extend(l, left_ty)? as i128;
let r = self.sign_extend(r, right_ty)? as i128;
let l = self.sign_extend(l, left_layout) as i128;
let r = self.sign_extend(r, right_layout) as i128;
return Ok((Scalar::from_bool(op(&l, &r)), false));
}
let op: Option<fn(i128, i128) -> (i128, bool)> = match bin_op {
@ -160,14 +160,14 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
_ => None,
};
if let Some(op) = op {
let l128 = self.sign_extend(l, left_ty)? as i128;
let r = self.sign_extend(r, right_ty)? as i128;
let size = left_layout.size.bits();
let l128 = self.sign_extend(l, left_layout) as i128;
let r = self.sign_extend(r, right_layout) as i128;
let size = left_layout.size;
match bin_op {
Rem | Div => {
// int_min / -1
if r == -1 && l == (1 << (size - 1)) {
return Ok((Scalar::Bits { bits: l, defined: size as u8 }, true));
if r == -1 && l == (1 << (size.bits() - 1)) {
return Ok((Scalar::Bits { bits: l, size: size.bytes() as u8 }, true));
}
},
_ => {},
@ -175,27 +175,27 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
trace!("{}, {}, {}", l, l128, r);
let (result, mut oflo) = op(l128, r);
trace!("{}, {}", result, oflo);
if !oflo && size != 128 {
let max = 1 << (size - 1);
if !oflo && size.bits() != 128 {
let max = 1 << (size.bits() - 1);
oflo = result >= max || result < -max;
}
let result = result as u128;
let truncated = self.truncate(result, left_ty)?;
let truncated = self.truncate(result, left_layout);
return Ok((Scalar::Bits {
bits: truncated,
defined: size as u8,
size: size.bytes() as u8,
}, oflo));
}
}
if let ty::TyFloat(fty) = left_ty.sty {
macro_rules! float_math {
($ty:path, $bitsize:expr) => {{
($ty:path, $size:expr) => {{
let l = <$ty>::from_bits(l);
let r = <$ty>::from_bits(r);
let bitify = |res: ::rustc_apfloat::StatusAnd<$ty>| Scalar::Bits {
bits: res.value.to_bits(),
defined: $bitsize,
size: $size,
};
let val = match bin_op {
Eq => Scalar::from_bool(l == r),
@ -215,12 +215,12 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
}};
}
match fty {
FloatTy::F32 => float_math!(Single, 32),
FloatTy::F64 => float_math!(Double, 64),
FloatTy::F32 => float_math!(Single, 4),
FloatTy::F64 => float_math!(Double, 8),
}
}
let bit_width = self.layout_of(left_ty).unwrap().size.bits() as u8;
let size = self.layout_of(left_ty).unwrap().size.bytes() as u8;
// only ints left
let val = match bin_op {
@ -232,9 +232,9 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
Gt => Scalar::from_bool(l > r),
Ge => Scalar::from_bool(l >= r),
BitOr => Scalar::Bits { bits: l | r, defined: bit_width },
BitAnd => Scalar::Bits { bits: l & r, defined: bit_width },
BitXor => Scalar::Bits { bits: l ^ r, defined: bit_width },
BitOr => Scalar::Bits { bits: l | r, size },
BitAnd => Scalar::Bits { bits: l & r, size },
BitXor => Scalar::Bits { bits: l ^ r, size },
Add | Sub | Mul | Rem | Div => {
let op: fn(u128, u128) -> (u128, bool) = match bin_op {
@ -248,10 +248,10 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
_ => bug!(),
};
let (result, oflo) = op(l, r);
let truncated = self.truncate(result, left_ty)?;
let truncated = self.truncate(result, left_layout);
return Ok((Scalar::Bits {
bits: truncated,
defined: bit_width,
size,
}, oflo || truncated != result));
}
@ -275,17 +275,16 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
&self,
un_op: mir::UnOp,
val: Scalar,
ty: Ty<'tcx>,
layout: TyLayout<'tcx>,
) -> EvalResult<'tcx, Scalar> {
use rustc::mir::UnOp::*;
use rustc_apfloat::ieee::{Single, Double};
use rustc_apfloat::Float;
let size = self.layout_of(ty)?.size;
let size = layout.size;
let bytes = val.to_bits(size)?;
let size = size.bits();
let result_bytes = match (un_op, &ty.sty) {
let result_bytes = match (un_op, &layout.ty.sty) {
(Not, ty::TyBool) => !val.to_bool()? as u128,
@ -294,13 +293,13 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
(Neg, ty::TyFloat(FloatTy::F32)) => Single::to_bits(-Single::from_bits(bytes)),
(Neg, ty::TyFloat(FloatTy::F64)) => Double::to_bits(-Double::from_bits(bytes)),
(Neg, _) if bytes == (1 << (size - 1)) => return err!(OverflowNeg),
(Neg, _) if bytes == (1 << (size.bits() - 1)) => return err!(OverflowNeg),
(Neg, _) => (-(bytes as i128)) as u128,
};
Ok(Scalar::Bits {
bits: self.truncate(result_bytes, ty)?,
defined: size as u8,
bits: self.truncate(result_bytes, layout),
size: size.bytes() as u8,
})
}
}

View File

@ -3,7 +3,7 @@ use rustc::ty::{self, Ty, TyCtxt};
use rustc::ty::layout::{self, Align, LayoutOf, TyLayout};
use rustc_data_structures::indexed_vec::Idx;
use rustc::mir::interpret::{GlobalId, Value, Scalar, EvalResult, Pointer};
use rustc::mir::interpret::{GlobalId, Value, Scalar, EvalResult, Pointer, ScalarMaybeUndef};
use super::{EvalContext, Machine, ValTy};
use interpret::memory::HasMemory;
@ -14,7 +14,7 @@ pub enum Place {
/// A place may have an invalid (integral or undef) pointer,
/// since it might be turned back into a reference
/// before ever being dereferenced.
ptr: Scalar,
ptr: ScalarMaybeUndef,
align: Align,
extra: PlaceExtra,
},
@ -35,10 +35,10 @@ pub enum PlaceExtra {
impl<'tcx> Place {
/// Produces a Place that will error if attempted to be read from
pub fn undef() -> Self {
Self::from_scalar_ptr(Scalar::undef().into(), Align::from_bytes(1, 1).unwrap())
Self::from_scalar_ptr(ScalarMaybeUndef::Undef, Align::from_bytes(1, 1).unwrap())
}
pub fn from_scalar_ptr(ptr: Scalar, align: Align) -> Self {
pub fn from_scalar_ptr(ptr: ScalarMaybeUndef, align: Align) -> Self {
Place::Ptr {
ptr,
align,
@ -47,10 +47,10 @@ impl<'tcx> Place {
}
pub fn from_ptr(ptr: Pointer, align: Align) -> Self {
Self::from_scalar_ptr(ptr.into(), align)
Self::from_scalar_ptr(ScalarMaybeUndef::Scalar(ptr.into()), align)
}
pub fn to_ptr_align_extra(self) -> (Scalar, Align, PlaceExtra) {
pub fn to_ptr_align_extra(self) -> (ScalarMaybeUndef, Align, PlaceExtra) {
match self {
Place::Ptr { ptr, align, extra } => (ptr, align, extra),
_ => bug!("to_ptr_and_extra: expected Place::Ptr, got {:?}", self),
@ -58,7 +58,7 @@ impl<'tcx> Place {
}
}
pub fn to_ptr_align(self) -> (Scalar, Align) {
pub fn to_ptr_align(self) -> (ScalarMaybeUndef, Align) {
let (ptr, align, _extra) = self.to_ptr_align_extra();
(ptr, align)
}
@ -66,7 +66,7 @@ impl<'tcx> Place {
pub fn to_ptr(self) -> EvalResult<'tcx, Pointer> {
// At this point, we forget about the alignment information -- the place has been turned into a reference,
// and no matter where it came from, it now must be aligned.
self.to_ptr_align().0.to_ptr()
self.to_ptr_align().0.unwrap_or_err()?.to_ptr()
}
pub(super) fn elem_ty_and_len(
@ -106,7 +106,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
// Might allow this in the future, right now there's no way to do this from Rust code anyway
Local(mir::RETURN_PLACE) => err!(ReadFromReturnPointer),
// Directly reading a local will always succeed
Local(local) => self.frame().get_local(local).map(Some),
Local(local) => self.frame().locals[local].access().map(Some),
// No fast path for statics. Reading from statics is rare and would require another
// Machine function to handle differently in miri.
Promoted(_) |
@ -129,7 +129,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
let field = base_layout.field(self, field_index)?;
if field.size.bytes() == 0 {
return Ok((
Value::Scalar(Scalar::undef()),
Value::Scalar(ScalarMaybeUndef::Scalar(Scalar::Bits { bits: 0, size: 0 })),
field,
));
}
@ -197,9 +197,9 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
match place {
Place::Ptr { ptr, align, extra } => {
assert_eq!(extra, PlaceExtra::None);
Ok(Value::ByRef(ptr, align))
Ok(Value::ByRef(ptr.unwrap_or_err()?, align))
}
Place::Local { frame, local } => self.stack[frame].get_local(local),
Place::Local { frame, local } => self.stack[frame].locals[local].access(),
}
}
@ -220,7 +220,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
})?;
if let Value::ByRef(ptr, align) = val {
Place::Ptr {
ptr,
ptr: ptr.into(),
align,
extra: PlaceExtra::None,
}
@ -238,7 +238,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
};
let alloc = Machine::init_static(self, cid)?;
Place::Ptr {
ptr: Scalar::Ptr(alloc.into()),
ptr: ScalarMaybeUndef::Scalar(Scalar::Ptr(alloc.into())),
align: layout.align,
extra: PlaceExtra::None,
}
@ -276,14 +276,13 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
let (base_ptr, base_align, base_extra) = match base {
Place::Ptr { ptr, align, extra } => (ptr, align, extra),
Place::Local { frame, local } => {
match (&self.stack[frame].get_local(local)?, &base_layout.abi) {
match (self.stack[frame].locals[local].access()?, &base_layout.abi) {
// in case the field covers the entire type, just return the value
(&Value::Scalar(_), &layout::Abi::Scalar(_)) |
(&Value::ScalarPair(..), &layout::Abi::ScalarPair(..))
if offset.bytes() == 0 && field.size == base_layout.size =>
{
return Ok((base, field));
}
(Value::Scalar(_), &layout::Abi::Scalar(_)) |
(Value::ScalarPair(..), &layout::Abi::ScalarPair(..))
if offset.bytes() == 0 && field.size == base_layout.size => {
return Ok((base, field))
},
_ => self.force_allocation(base)?.to_ptr_align_extra(),
}
}
@ -413,7 +412,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
}
Index(local) => {
let value = self.frame().get_local(local)?;
let value = self.frame().locals[local].access()?;
let ty = self.tcx.types.usize;
let n = self
.value_to_scalar(ValTy { value, ty })?

View File

@ -90,7 +90,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
// Mark locals as alive
StorageLive(local) => {
let old_val = self.frame_mut().storage_live(local);
let old_val = self.storage_live(local)?;
self.deallocate_local(old_val)?;
}

View File

@ -2,7 +2,7 @@ use rustc::mir::BasicBlock;
use rustc::ty::{self, Ty};
use syntax::codemap::Span;
use rustc::mir::interpret::{EvalResult, Scalar, Value};
use rustc::mir::interpret::{EvalResult, Value};
use interpret::{Machine, ValTy, EvalContext, Place, PlaceExtra};
impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
@ -33,7 +33,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
ptr,
align: _,
extra: PlaceExtra::None,
} => ptr.to_value(),
} => Value::Scalar(ptr),
_ => bug!("force_allocation broken"),
};
self.drop(val, instance, ty, span, target)
@ -51,17 +51,10 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
let instance = match ty.sty {
ty::TyDynamic(..) => {
let vtable = match arg {
Value::ScalarPair(_, Scalar::Ptr(vtable)) => vtable,
_ => bug!("expected fat ptr, got {:?}", arg),
};
match self.read_drop_type_from_vtable(vtable)? {
Some(func) => func,
// no drop fn -> bail out
None => {
self.goto_block(target);
return Ok(())
},
if let Value::ScalarPair(_, vtable) = arg {
self.read_drop_type_from_vtable(vtable.unwrap_or_err()?.to_ptr()?)?
} else {
bug!("expected fat ptr, got {:?}", arg);
}
}
_ => instance,

View File

@ -4,7 +4,7 @@ use rustc::ty::layout::{LayoutOf, Size};
use syntax::codemap::Span;
use rustc_target::spec::abi::Abi;
use rustc::mir::interpret::{EvalResult, Scalar};
use rustc::mir::interpret::{EvalResult, Scalar, Value};
use super::{EvalContext, Place, Machine, ValTy};
use rustc_data_structures::indexed_vec::Idx;
@ -47,7 +47,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
for (index, &const_int) in values.iter().enumerate() {
// Compare using binary_op
let const_int = Scalar::Bits { bits: const_int, defined: 128 };
let const_int = Scalar::Bits { bits: const_int, size: discr_layout.size.bytes() as u8 };
let res = self.binary_op(mir::BinOp::Eq,
discr_prim, discr_val.ty,
const_int, discr_val.ty
@ -392,12 +392,12 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
let fn_ptr = self.memory.read_ptr_sized(
vtable.offset(ptr_size * (idx as u64 + 3), &self)?,
ptr_align
)?.to_ptr()?;
)?.unwrap_or_err()?.to_ptr()?;
let instance = self.memory.get_fn(fn_ptr)?;
let mut args = args.to_vec();
let ty = self.layout_of(args[0].ty)?.field(&self, 0)?.ty;
args[0].ty = ty;
args[0].value = ptr.to_value();
args[0].value = Value::Scalar(ptr);
// recurse with concrete function
self.eval_fn_call(instance, destination, &args, span, sig)
}

View File

@ -1,6 +1,6 @@
use rustc::ty::{self, Ty};
use rustc::ty::layout::{Size, Align, LayoutOf};
use rustc::mir::interpret::{Scalar, Value, Pointer, EvalResult};
use rustc::mir::interpret::{Scalar, Pointer, EvalResult};
use syntax::ast::Mutability;
@ -36,25 +36,25 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
let drop = ::monomorphize::resolve_drop_in_place(*self.tcx, ty);
let drop = self.memory.create_fn_alloc(drop);
self.memory.write_ptr_sized_unsigned(vtable, ptr_align, drop.into())?;
self.memory.write_ptr_sized_unsigned(vtable, ptr_align, Scalar::Ptr(drop).into())?;
let size_ptr = vtable.offset(ptr_size, &self)?;
self.memory.write_ptr_sized_unsigned(size_ptr, ptr_align, Scalar::Bits {
bits: size as u128,
defined: ptr_size.bits() as u8,
})?;
size: ptr_size.bytes() as u8,
}.into())?;
let align_ptr = vtable.offset(ptr_size * 2, &self)?;
self.memory.write_ptr_sized_unsigned(align_ptr, ptr_align, Scalar::Bits {
bits: align as u128,
defined: ptr_size.bits() as u8,
})?;
size: ptr_size.bytes() as u8,
}.into())?;
for (i, method) in methods.iter().enumerate() {
if let Some((def_id, substs)) = *method {
let instance = self.resolve(def_id, substs)?;
let fn_ptr = self.memory.create_fn_alloc(instance);
let method_ptr = vtable.offset(ptr_size * (3 + i as u64), &self)?;
self.memory.write_ptr_sized_unsigned(method_ptr, ptr_align, fn_ptr.into())?;
self.memory.write_ptr_sized_unsigned(method_ptr, ptr_align, Scalar::Ptr(fn_ptr).into())?;
}
}
@ -69,16 +69,11 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
pub fn read_drop_type_from_vtable(
&self,
vtable: Pointer,
) -> EvalResult<'tcx, Option<ty::Instance<'tcx>>> {
) -> EvalResult<'tcx, ty::Instance<'tcx>> {
// we don't care about the pointee type, we just want a pointer
let pointer_align = self.tcx.data_layout.pointer_align;
let pointer_size = self.tcx.data_layout.pointer_size.bits() as u8;
match self.read_ptr(vtable, pointer_align, self.tcx.mk_nil_ptr())? {
// some values don't need to call a drop impl, so the value is null
Value::Scalar(Scalar::Bits { bits: 0, defined} ) if defined == pointer_size => Ok(None),
Value::Scalar(Scalar::Ptr(drop_fn)) => self.memory.get_fn(drop_fn).map(Some),
_ => err!(ReadBytesAsPointer),
}
let drop_fn = self.memory.read_ptr_sized(vtable, pointer_align)?.unwrap_or_err()?.to_ptr()?;
self.memory.get_fn(drop_fn)
}
pub fn read_size_and_align_from_vtable(
@ -87,11 +82,11 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
) -> EvalResult<'tcx, (Size, Align)> {
let pointer_size = self.memory.pointer_size();
let pointer_align = self.tcx.data_layout.pointer_align;
let size = self.memory.read_ptr_sized(vtable.offset(pointer_size, self)?, pointer_align)?.to_bits(pointer_size)? as u64;
let size = self.memory.read_ptr_sized(vtable.offset(pointer_size, self)?, pointer_align)?.unwrap_or_err()?.to_bits(pointer_size)? as u64;
let align = self.memory.read_ptr_sized(
vtable.offset(pointer_size * 2, self)?,
pointer_align
)?.to_bits(pointer_size)? as u64;
)?.unwrap_or_err()?.to_bits(pointer_size)? as u64;
Ok((Size::from_bytes(size), Align::from_bytes(align, align).unwrap()))
}
}

View File

@ -193,7 +193,7 @@ use rustc::hir::itemlikevisit::ItemLikeVisitor;
use rustc::hir::map as hir_map;
use rustc::hir::def_id::DefId;
use rustc::mir::interpret::{AllocId, ConstValue};
use rustc::mir::interpret::{AllocId, ConstValue, ScalarMaybeUndef};
use rustc::middle::lang_items::{ExchangeMallocFnLangItem, StartFnLangItem};
use rustc::ty::subst::Substs;
use rustc::ty::{self, TypeFoldable, Ty, TyCtxt, GenericParamDefKind};
@ -1264,11 +1264,11 @@ fn collect_const<'a, 'tcx>(
};
match val {
ConstValue::Unevaluated(..) => bug!("const eval yielded unevaluated const"),
ConstValue::ScalarPair(Scalar::Ptr(a), Scalar::Ptr(b)) => {
ConstValue::ScalarPair(Scalar::Ptr(a), ScalarMaybeUndef::Scalar(Scalar::Ptr(b))) => {
collect_miri(tcx, a.alloc_id, output);
collect_miri(tcx, b.alloc_id, output);
}
ConstValue::ScalarPair(_, Scalar::Ptr(ptr)) |
ConstValue::ScalarPair(_, ScalarMaybeUndef::Scalar(Scalar::Ptr(ptr))) |
ConstValue::ScalarPair(Scalar::Ptr(ptr), _) |
ConstValue::Scalar(Scalar::Ptr(ptr)) =>
collect_miri(tcx, ptr.alloc_id, output),

View File

@ -17,7 +17,7 @@ use rustc::mir::{Constant, Location, Place, Mir, Operand, Rvalue, Local};
use rustc::mir::{NullOp, StatementKind, Statement, BasicBlock, LocalKind};
use rustc::mir::{TerminatorKind, ClearCrossCrate, SourceInfo, BinOp, ProjectionElem};
use rustc::mir::visit::{Visitor, PlaceContext};
use rustc::mir::interpret::{ConstEvalErr, EvalErrorKind};
use rustc::mir::interpret::{ConstEvalErr, EvalErrorKind, ScalarMaybeUndef};
use rustc::ty::{TyCtxt, self, Instance};
use rustc::mir::interpret::{Value, Scalar, GlobalId, EvalResult};
use interpret::EvalContext;
@ -368,8 +368,8 @@ impl<'b, 'a, 'tcx:'b> ConstPropagator<'b, 'a, 'tcx> {
type_size_of(self.tcx, self.param_env, ty).and_then(|n| Some((
Value::Scalar(Scalar::Bits {
bits: n as u128,
defined: self.tcx.data_layout.pointer_size.bits() as u8,
}),
size: self.tcx.data_layout.pointer_size.bytes() as u8,
}.into()),
self.tcx.layout_of(self.param_env.and(self.tcx.types.usize)).ok()?,
span,
)))
@ -390,8 +390,8 @@ impl<'b, 'a, 'tcx:'b> ConstPropagator<'b, 'a, 'tcx> {
let prim = self.use_ecx(source_info, |this| {
this.ecx.value_to_scalar(ValTy { value: val.0, ty: val.1.ty })
})?;
let val = self.use_ecx(source_info, |this| this.ecx.unary_op(op, prim, val.1.ty))?;
Some((Value::Scalar(val), place_layout, span))
let val = self.use_ecx(source_info, |this| this.ecx.unary_op(op, prim, val.1))?;
Some((Value::Scalar(val.into()), place_layout, span))
}
Rvalue::CheckedBinaryOp(op, ref left, ref right) |
Rvalue::BinaryOp(op, ref left, ref right) => {
@ -449,8 +449,8 @@ impl<'b, 'a, 'tcx:'b> ConstPropagator<'b, 'a, 'tcx> {
})?;
let val = if let Rvalue::CheckedBinaryOp(..) = *rvalue {
Value::ScalarPair(
val,
Scalar::from_bool(overflow),
val.into(),
Scalar::from_bool(overflow).into(),
)
} else {
if overflow {
@ -458,7 +458,7 @@ impl<'b, 'a, 'tcx:'b> ConstPropagator<'b, 'a, 'tcx> {
let _: Option<()> = self.use_ecx(source_info, |_| Err(err));
return None;
}
Value::Scalar(val)
Value::Scalar(val.into())
};
Some((val, place_layout, span))
},
@ -576,7 +576,7 @@ impl<'b, 'a, 'tcx> Visitor<'tcx> for ConstPropagator<'b, 'a, 'tcx> {
if let TerminatorKind::Assert { expected, msg, cond, .. } = kind {
if let Some(value) = self.eval_operand(cond, source_info) {
trace!("assertion on {:?} should be {:?}", value, expected);
if Value::Scalar(Scalar::from_bool(*expected)) != value.0 {
if Value::Scalar(Scalar::from_bool(*expected).into()) != value.0 {
// poison all places this operand references so that further code
// doesn't use the invalid value
match cond {
@ -613,14 +613,18 @@ impl<'b, 'a, 'tcx> Visitor<'tcx> for ConstPropagator<'b, 'a, 'tcx> {
.eval_operand(len, source_info)
.expect("len must be const");
let len = match len.0 {
Value::Scalar(Scalar::Bits { bits, ..}) => bits,
Value::Scalar(ScalarMaybeUndef::Scalar(Scalar::Bits {
bits, ..
})) => bits,
_ => bug!("const len not primitive: {:?}", len),
};
let index = self
.eval_operand(index, source_info)
.expect("index must be const");
let index = match index.0 {
Value::Scalar(Scalar::Bits { bits, .. }) => bits,
Value::Scalar(ScalarMaybeUndef::Scalar(Scalar::Bits {
bits, ..
})) => bits,
_ => bug!("const index not primitive: {:?}", index),
};
format!(

View File

@ -16,7 +16,7 @@ union Foo {
enum Bar {
Boo = [unsafe { Foo { b: () }.a }; 4][3],
//~^ ERROR constant evaluation of enum discriminant resulted in non-integer
//~^ ERROR could not evaluate enum discriminant
}
fn main() {

View File

@ -0,0 +1,16 @@
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// compile-pass
const PARSE_BOOL: Option<&'static str> = None;
static FOO: (Option<&str>, u32) = (PARSE_BOOL, 42);
fn main() {}

View File

@ -34,7 +34,7 @@ const fn read_field2() -> Field2 {
}
const fn read_field3() -> Field3 {
const FIELD3: Field3 = unsafe { UNION.field3 }; //~ ERROR exhibits undefined behavior
const FIELD3: Field3 = unsafe { UNION.field3 }; //~ ERROR cannot be used
FIELD3
}

View File

@ -1,11 +1,10 @@
error[E0080]: this constant likely exhibits undefined behavior
error: this constant cannot be used
--> $DIR/union-const-eval-field.rs:37:5
|
LL | const FIELD3: Field3 = unsafe { UNION.field3 }; //~ ERROR exhibits undefined behavior
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed: encountered undefined bytes
LL | const FIELD3: Field3 = unsafe { UNION.field3 }; //~ ERROR cannot be used
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ attempted to read undefined bytes
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rust compiler repository if you believe it should not be considered undefined behavior
= note: #[deny(const_err)] on by default
error: aborting due to previous error
For more information about this error, try `rustc --explain E0080`.

View File

@ -20,9 +20,9 @@ union DummyUnion {
const UNION: DummyUnion = DummyUnion { field1: 1065353216 };
const FIELD3: Field3 = unsafe { UNION.field3 }; //~ ERROR this constant likely exhibits undefined
const FIELD3: Field3 = unsafe { UNION.field3 }; //~ ERROR this constant cannot be used
const FIELD_PATH: Struct = Struct { //~ ERROR this constant likely exhibits undefined behavior
const FIELD_PATH: Struct = Struct { //~ ERROR this constant cannot be used
a: 42,
b: unsafe { UNION.field3 },
};

View File

@ -1,21 +1,19 @@
error[E0080]: this constant likely exhibits undefined behavior
error: this constant cannot be used
--> $DIR/union-ice.rs:23:1
|
LL | const FIELD3: Field3 = unsafe { UNION.field3 }; //~ ERROR this constant likely exhibits undefined
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed: encountered undefined bytes
LL | const FIELD3: Field3 = unsafe { UNION.field3 }; //~ ERROR this constant cannot be used
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ attempted to read undefined bytes
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rust compiler repository if you believe it should not be considered undefined behavior
= note: #[deny(const_err)] on by default
error[E0080]: this constant likely exhibits undefined behavior
error: this constant cannot be used
--> $DIR/union-ice.rs:25:1
|
LL | / const FIELD_PATH: Struct = Struct { //~ ERROR this constant likely exhibits undefined behavior
LL | / const FIELD_PATH: Struct = Struct { //~ ERROR this constant cannot be used
LL | | a: 42,
LL | | b: unsafe { UNION.field3 },
LL | | };
| |__^ type validation failed: encountered undefined bytes at .b
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rust compiler repository if you believe it should not be considered undefined behavior
| |__^ attempted to read undefined bytes
error[E0080]: this constant likely exhibits undefined behavior
--> $DIR/union-ice.rs:35:1