don't allow ZST in ScalarInt

There are several indications that we should not ZST as a ScalarInt:
- We had two ways to have ZST valtrees, either an empty `Branch` or a `Leaf` with a ZST in it.
  `ValTree::zst()` used the former, but the latter could possibly arise as well.
- Likewise, the interpreter had `Immediate::Uninit` and `Immediate::Scalar(Scalar::ZST)`.
- LLVM codegen already had to special-case ZST ScalarInt.

So instead add new ZST variants to those types that did not have other variants
which could be used for this purpose.
This commit is contained in:
Ralf Jung 2022-07-03 11:17:23 -04:00
parent c4693bc946
commit a422b42159
21 changed files with 78 additions and 61 deletions

View File

@ -13,7 +13,6 @@ use rustc_codegen_ssa::traits::*;
use rustc_middle::bug; use rustc_middle::bug;
use rustc_middle::mir::interpret::{ConstAllocation, GlobalAlloc, Scalar}; use rustc_middle::mir::interpret::{ConstAllocation, GlobalAlloc, Scalar};
use rustc_middle::ty::layout::{LayoutOf, TyAndLayout}; use rustc_middle::ty::layout::{LayoutOf, TyAndLayout};
use rustc_middle::ty::ScalarInt;
use rustc_target::abi::{self, AddressSpace, HasDataLayout, Pointer, Size}; use rustc_target::abi::{self, AddressSpace, HasDataLayout, Pointer, Size};
use libc::{c_char, c_uint}; use libc::{c_char, c_uint};
@ -223,13 +222,13 @@ impl<'ll, 'tcx> ConstMethods<'tcx> for CodegenCx<'ll, 'tcx> {
}) })
} }
fn zst_to_backend(&self, _llty: &'ll Type) -> &'ll Value {
self.const_undef(self.type_ix(0))
}
fn scalar_to_backend(&self, cv: Scalar, layout: abi::Scalar, llty: &'ll Type) -> &'ll Value { fn scalar_to_backend(&self, cv: Scalar, layout: abi::Scalar, llty: &'ll Type) -> &'ll Value {
let bitsize = if layout.is_bool() { 1 } else { layout.size(self).bits() }; let bitsize = if layout.is_bool() { 1 } else { layout.size(self).bits() };
match cv { match cv {
Scalar::Int(ScalarInt::ZST) => {
assert_eq!(0, layout.size(self).bytes());
self.const_undef(self.type_ix(0))
}
Scalar::Int(int) => { Scalar::Int(int) => {
let data = int.assert_bits(layout.size(self)); let data = int.assert_bits(layout.size(self));
let llval = self.const_uint_big(self.type_ix(bitsize), data); let llval = self.const_uint_big(self.type_ix(bitsize), data);

View File

@ -84,6 +84,10 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
let llval = bx.scalar_to_backend(x, scalar, bx.immediate_backend_type(layout)); let llval = bx.scalar_to_backend(x, scalar, bx.immediate_backend_type(layout));
OperandValue::Immediate(llval) OperandValue::Immediate(llval)
} }
ConstValue::ZST => {
let llval = bx.zst_to_backend(bx.immediate_backend_type(layout));
OperandValue::Immediate(llval)
}
ConstValue::Slice { data, start, end } => { ConstValue::Slice { data, start, end } => {
let Abi::ScalarPair(a_scalar, _) = layout.abi else { let Abi::ScalarPair(a_scalar, _) = layout.abi else {
bug!("from_const: invalid ScalarPair layout: {:#?}", layout); bug!("from_const: invalid ScalarPair layout: {:#?}", layout);

View File

@ -29,6 +29,7 @@ pub trait ConstMethods<'tcx>: BackendTypes {
fn const_data_from_alloc(&self, alloc: ConstAllocation<'tcx>) -> Self::Value; fn const_data_from_alloc(&self, alloc: ConstAllocation<'tcx>) -> Self::Value;
fn scalar_to_backend(&self, cv: Scalar, layout: abi::Scalar, llty: Self::Type) -> Self::Value; fn scalar_to_backend(&self, cv: Scalar, layout: abi::Scalar, llty: Self::Type) -> Self::Value;
fn zst_to_backend(&self, llty: Self::Type) -> Self::Value;
fn from_const_alloc( fn from_const_alloc(
&self, &self,
layout: TyAndLayout<'tcx>, layout: TyAndLayout<'tcx>,

View File

@ -2,7 +2,7 @@ use super::{CompileTimeEvalContext, CompileTimeInterpreter, ConstEvalErr};
use crate::interpret::eval_nullary_intrinsic; use crate::interpret::eval_nullary_intrinsic;
use crate::interpret::{ use crate::interpret::{
intern_const_alloc_recursive, Allocation, ConstAlloc, ConstValue, CtfeValidationMode, GlobalId, intern_const_alloc_recursive, Allocation, ConstAlloc, ConstValue, CtfeValidationMode, GlobalId,
Immediate, InternKind, InterpCx, InterpResult, MPlaceTy, MemoryKind, OpTy, RefTracking, Scalar, Immediate, InternKind, InterpCx, InterpResult, MPlaceTy, MemoryKind, OpTy, RefTracking,
ScalarMaybeUninit, StackPopCleanup, ScalarMaybeUninit, StackPopCleanup,
}; };
@ -157,7 +157,7 @@ pub(super) fn op_to_const<'tcx>(
"this MPlaceTy must come from a validated constant, thus we can assume the \ "this MPlaceTy must come from a validated constant, thus we can assume the \
alignment is correct", alignment is correct",
); );
ConstValue::Scalar(Scalar::ZST) ConstValue::ZST
} }
} }
}; };

View File

@ -272,7 +272,7 @@ pub fn valtree_to_const_value<'tcx>(
match ty.kind() { match ty.kind() {
ty::FnDef(..) => { ty::FnDef(..) => {
assert!(valtree.unwrap_branch().is_empty()); assert!(valtree.unwrap_branch().is_empty());
ConstValue::Scalar(Scalar::ZST) ConstValue::ZST
} }
ty::Bool | ty::Int(_) | ty::Uint(_) | ty::Float(_) | ty::Char => match valtree { ty::Bool | ty::Int(_) | ty::Uint(_) | ty::Float(_) | ty::Char => match valtree {
ty::ValTree::Leaf(scalar_int) => ConstValue::Scalar(Scalar::Int(scalar_int)), ty::ValTree::Leaf(scalar_int) => ConstValue::Scalar(Scalar::Int(scalar_int)),
@ -344,11 +344,7 @@ fn valtree_into_mplace<'tcx>(
match ty.kind() { match ty.kind() {
ty::FnDef(_, _) => { ty::FnDef(_, _) => {
ecx.write_immediate( ecx.write_immediate(Immediate::Uninit, &place.into()).unwrap();
Immediate::Scalar(ScalarMaybeUninit::Scalar(Scalar::ZST)),
&place.into(),
)
.unwrap();
} }
ty::Bool | ty::Int(_) | ty::Uint(_) | ty::Float(_) | ty::Char => { ty::Bool | ty::Int(_) | ty::Uint(_) | ty::Float(_) | ty::Char => {
let scalar_int = valtree.unwrap_leaf(); let scalar_int = valtree.unwrap_leaf();

View File

@ -297,8 +297,8 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
let Some(alloc) = self.get_place_alloc(mplace)? else { let Some(alloc) = self.get_place_alloc(mplace)? else {
return Ok(Some(ImmTy { return Ok(Some(ImmTy {
// zero-sized type // zero-sized type can be left uninit
imm: Scalar::ZST.into(), imm: Immediate::Uninit,
layout: mplace.layout, layout: mplace.layout,
})); }));
}; };
@ -441,8 +441,8 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
// This makes several assumptions about what layouts we will encounter; we match what // This makes several assumptions about what layouts we will encounter; we match what
// codegen does as good as we can (see `extract_field` in `rustc_codegen_ssa/src/mir/operand.rs`). // codegen does as good as we can (see `extract_field` in `rustc_codegen_ssa/src/mir/operand.rs`).
let field_val: Immediate<_> = match (*base, base.layout.abi) { let field_val: Immediate<_> = match (*base, base.layout.abi) {
// the field contains no information // the field contains no information, can be left uninit
_ if field_layout.is_zst() => Scalar::ZST.into(), _ if field_layout.is_zst() => Immediate::Uninit,
// the field covers the entire type // the field covers the entire type
_ if field_layout.size == base.layout.size => { _ if field_layout.size == base.layout.size => {
assert!(match (base.layout.abi, field_layout.abi) { assert!(match (base.layout.abi, field_layout.abi) {
@ -553,8 +553,8 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
) -> InterpResult<'tcx, OpTy<'tcx, M::PointerTag>> { ) -> InterpResult<'tcx, OpTy<'tcx, M::PointerTag>> {
let layout = self.layout_of_local(frame, local, layout)?; let layout = self.layout_of_local(frame, local, layout)?;
let op = if layout.is_zst() { let op = if layout.is_zst() {
// Do not read from ZST, they might not be initialized // Bypass `access_local` (helps in ConstProp)
Operand::Immediate(Scalar::ZST.into()) Operand::Immediate(Immediate::Uninit)
} else { } else {
*M::access_local(frame, local)? *M::access_local(frame, local)?
}; };
@ -709,6 +709,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
Operand::Indirect(MemPlace::from_ptr(ptr.into())) Operand::Indirect(MemPlace::from_ptr(ptr.into()))
} }
ConstValue::Scalar(x) => Operand::Immediate(tag_scalar(x)?.into()), ConstValue::Scalar(x) => Operand::Immediate(tag_scalar(x)?.into()),
ConstValue::ZST => Operand::Immediate(Immediate::Uninit),
ConstValue::Slice { data, start, end } => { ConstValue::Slice { data, start, end } => {
// We rely on mutability being set correctly in `data` to prevent writes // We rely on mutability being set correctly in `data` to prevent writes
// where none should happen. // where none should happen.

View File

@ -59,6 +59,7 @@
#![feature(drain_filter)] #![feature(drain_filter)]
#![feature(intra_doc_pointers)] #![feature(intra_doc_pointers)]
#![feature(yeet_expr)] #![feature(yeet_expr)]
#![feature(const_option)]
#![recursion_limit = "512"] #![recursion_limit = "512"]
#![allow(rustc::potential_query_instability)] #![allow(rustc::potential_query_instability)]

View File

@ -29,11 +29,14 @@ pub struct ConstAlloc<'tcx> {
#[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord, TyEncodable, TyDecodable, Hash)] #[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord, TyEncodable, TyDecodable, Hash)]
#[derive(HashStable)] #[derive(HashStable)]
pub enum ConstValue<'tcx> { pub enum ConstValue<'tcx> {
/// Used only for types with `layout::abi::Scalar` ABI and ZSTs. /// Used only for types with `layout::abi::Scalar` ABI.
/// ///
/// Not using the enum `Value` to encode that this must not be `Uninit`. /// Not using the enum `Value` to encode that this must not be `Uninit`.
Scalar(Scalar), Scalar(Scalar),
/// Only used for ZSTs.
ZST,
/// Used only for `&[u8]` and `&str` /// Used only for `&[u8]` and `&str`
Slice { data: ConstAllocation<'tcx>, start: usize, end: usize }, Slice { data: ConstAllocation<'tcx>, start: usize, end: usize },
@ -55,6 +58,7 @@ impl<'a, 'tcx> Lift<'tcx> for ConstValue<'a> {
fn lift_to_tcx(self, tcx: TyCtxt<'tcx>) -> Option<ConstValue<'tcx>> { fn lift_to_tcx(self, tcx: TyCtxt<'tcx>) -> Option<ConstValue<'tcx>> {
Some(match self { Some(match self {
ConstValue::Scalar(s) => ConstValue::Scalar(s), ConstValue::Scalar(s) => ConstValue::Scalar(s),
ConstValue::ZST => ConstValue::ZST,
ConstValue::Slice { data, start, end } => { ConstValue::Slice { data, start, end } => {
ConstValue::Slice { data: tcx.lift(data)?, start, end } ConstValue::Slice { data: tcx.lift(data)?, start, end }
} }
@ -69,7 +73,7 @@ impl<'tcx> ConstValue<'tcx> {
#[inline] #[inline]
pub fn try_to_scalar(&self) -> Option<Scalar<AllocId>> { pub fn try_to_scalar(&self) -> Option<Scalar<AllocId>> {
match *self { match *self {
ConstValue::ByRef { .. } | ConstValue::Slice { .. } => None, ConstValue::ByRef { .. } | ConstValue::Slice { .. } | ConstValue::ZST => None,
ConstValue::Scalar(val) => Some(val), ConstValue::Scalar(val) => Some(val),
} }
} }
@ -111,10 +115,6 @@ impl<'tcx> ConstValue<'tcx> {
pub fn from_machine_usize(i: u64, cx: &impl HasDataLayout) -> Self { pub fn from_machine_usize(i: u64, cx: &impl HasDataLayout) -> Self {
ConstValue::Scalar(Scalar::from_machine_usize(i, cx)) ConstValue::Scalar(Scalar::from_machine_usize(i, cx))
} }
pub fn zst() -> Self {
Self::Scalar(Scalar::ZST)
}
} }
/// A `Scalar` represents an immediate, primitive value existing outside of a /// A `Scalar` represents an immediate, primitive value existing outside of a
@ -194,8 +194,6 @@ impl<Tag> From<ScalarInt> for Scalar<Tag> {
} }
impl<Tag> Scalar<Tag> { impl<Tag> Scalar<Tag> {
pub const ZST: Self = Scalar::Int(ScalarInt::ZST);
#[inline(always)] #[inline(always)]
pub fn from_pointer(ptr: Pointer<Tag>, cx: &impl HasDataLayout) -> Self { pub fn from_pointer(ptr: Pointer<Tag>, cx: &impl HasDataLayout) -> Self {
Scalar::Ptr(ptr, u8::try_from(cx.pointer_size().bytes()).unwrap()) Scalar::Ptr(ptr, u8::try_from(cx.pointer_size().bytes()).unwrap())

View File

@ -1711,7 +1711,7 @@ impl<'tcx> Operand<'tcx> {
Operand::Constant(Box::new(Constant { Operand::Constant(Box::new(Constant {
span, span,
user_ty: None, user_ty: None,
literal: ConstantKind::Val(ConstValue::zst(), ty), literal: ConstantKind::Val(ConstValue::ZST, ty),
})) }))
} }
@ -2196,7 +2196,7 @@ impl<'tcx> ConstantKind<'tcx> {
#[inline] #[inline]
pub fn zero_sized(ty: Ty<'tcx>) -> Self { pub fn zero_sized(ty: Ty<'tcx>) -> Self {
let cv = ConstValue::Scalar(Scalar::ZST); let cv = ConstValue::ZST;
Self::Val(cv, ty) Self::Val(cv, ty)
} }

View File

@ -449,6 +449,7 @@ impl<'tcx> Visitor<'tcx> for ExtraComments<'tcx> {
} }
let fmt_val = |val: &ConstValue<'tcx>| match val { let fmt_val = |val: &ConstValue<'tcx>| match val {
ConstValue::ZST => format!("ZST"),
ConstValue::Scalar(s) => format!("Scalar({:?})", s), ConstValue::Scalar(s) => format!("Scalar({:?})", s),
ConstValue::Slice { .. } => format!("Slice(..)"), ConstValue::Slice { .. } => format!("Slice(..)"),
ConstValue::ByRef { .. } => format!("ByRef(..)"), ConstValue::ByRef { .. } => format!("ByRef(..)"),
@ -679,6 +680,7 @@ pub fn write_allocations<'tcx>(
ConstValue::Scalar(interpret::Scalar::Int { .. }) => { ConstValue::Scalar(interpret::Scalar::Int { .. }) => {
Either::Left(Either::Right(std::iter::empty())) Either::Left(Either::Right(std::iter::empty()))
} }
ConstValue::ZST => Either::Left(Either::Right(std::iter::empty())),
ConstValue::ByRef { alloc, .. } | ConstValue::Slice { data: alloc, .. } => { ConstValue::ByRef { alloc, .. } | ConstValue::Slice { data: alloc, .. } => {
Either::Right(alloc_ids_from_alloc(alloc)) Either::Right(alloc_ids_from_alloc(alloc))
} }

View File

@ -419,6 +419,10 @@ pub enum ExprKind<'tcx> {
lit: ty::ScalarInt, lit: ty::ScalarInt,
user_ty: Option<Canonical<'tcx, UserType<'tcx>>>, user_ty: Option<Canonical<'tcx, UserType<'tcx>>>,
}, },
/// A literal of a ZST type.
ZstLiteral {
user_ty: Option<Canonical<'tcx, UserType<'tcx>>>,
},
/// Associated constants and named constants /// Associated constants and named constants
NamedConst { NamedConst {
def_id: DefId, def_id: DefId,
@ -456,7 +460,7 @@ pub enum ExprKind<'tcx> {
impl<'tcx> ExprKind<'tcx> { impl<'tcx> ExprKind<'tcx> {
pub fn zero_sized_literal(user_ty: Option<Canonical<'tcx, UserType<'tcx>>>) -> Self { pub fn zero_sized_literal(user_ty: Option<Canonical<'tcx, UserType<'tcx>>>) -> Self {
ExprKind::NonHirLiteral { lit: ty::ScalarInt::ZST, user_ty } ExprKind::ZstLiteral { user_ty }
} }
} }

View File

@ -129,6 +129,7 @@ pub fn walk_expr<'a, 'tcx: 'a, V: Visitor<'a, 'tcx>>(visitor: &mut V, expr: &Exp
Closure { closure_id: _, substs: _, upvars: _, movability: _, fake_reads: _ } => {} Closure { closure_id: _, substs: _, upvars: _, movability: _, fake_reads: _ } => {}
Literal { lit: _, neg: _ } => {} Literal { lit: _, neg: _ } => {}
NonHirLiteral { lit: _, user_ty: _ } => {} NonHirLiteral { lit: _, user_ty: _ } => {}
ZstLiteral { user_ty: _ } => {}
NamedConst { def_id: _, substs: _, user_ty: _ } => {} NamedConst { def_id: _, substs: _, user_ty: _ } => {}
ConstParam { param: _, def_id: _ } => {} ConstParam { param: _, def_id: _ } => {}
StaticRef { alloc_id: _, ty: _, def_id: _ } => {} StaticRef { alloc_id: _, ty: _, def_id: _ } => {}

View File

@ -4,6 +4,7 @@ use rustc_serialize::{Decodable, Decoder, Encodable, Encoder};
use rustc_target::abi::Size; use rustc_target::abi::Size;
use std::convert::{TryFrom, TryInto}; use std::convert::{TryFrom, TryInto};
use std::fmt; use std::fmt;
use std::num::NonZeroU8;
use crate::ty::TyCtxt; use crate::ty::TyCtxt;
@ -123,7 +124,7 @@ pub struct ScalarInt {
/// The first `size` bytes of `data` are the value. /// The first `size` bytes of `data` are the value.
/// Do not try to read less or more bytes than that. The remaining bytes must be 0. /// Do not try to read less or more bytes than that. The remaining bytes must be 0.
data: u128, data: u128,
size: u8, size: NonZeroU8,
} }
// Cannot derive these, as the derives take references to the fields, and we // Cannot derive these, as the derives take references to the fields, and we
@ -135,33 +136,31 @@ impl<CTX> crate::ty::HashStable<CTX> for ScalarInt {
// Since `Self` is a packed struct, that would create a possibly unaligned reference, // Since `Self` is a packed struct, that would create a possibly unaligned reference,
// which is UB. // which is UB.
{ self.data }.hash_stable(hcx, hasher); { self.data }.hash_stable(hcx, hasher);
self.size.hash_stable(hcx, hasher); self.size.get().hash_stable(hcx, hasher);
} }
} }
impl<S: Encoder> Encodable<S> for ScalarInt { impl<S: Encoder> Encodable<S> for ScalarInt {
fn encode(&self, s: &mut S) { fn encode(&self, s: &mut S) {
s.emit_u128(self.data); s.emit_u128(self.data);
s.emit_u8(self.size); s.emit_u8(self.size.get());
} }
} }
impl<D: Decoder> Decodable<D> for ScalarInt { impl<D: Decoder> Decodable<D> for ScalarInt {
fn decode(d: &mut D) -> ScalarInt { fn decode(d: &mut D) -> ScalarInt {
ScalarInt { data: d.read_u128(), size: d.read_u8() } ScalarInt { data: d.read_u128(), size: NonZeroU8::new(d.read_u8()).unwrap() }
} }
} }
impl ScalarInt { impl ScalarInt {
pub const TRUE: ScalarInt = ScalarInt { data: 1_u128, size: 1 }; pub const TRUE: ScalarInt = ScalarInt { data: 1_u128, size: NonZeroU8::new(1).unwrap() };
pub const FALSE: ScalarInt = ScalarInt { data: 0_u128, size: 1 }; pub const FALSE: ScalarInt = ScalarInt { data: 0_u128, size: NonZeroU8::new(1).unwrap() };
pub const ZST: ScalarInt = ScalarInt { data: 0_u128, size: 0 };
#[inline] #[inline]
pub fn size(self) -> Size { pub fn size(self) -> Size {
Size::from_bytes(self.size) Size::from_bytes(self.size.get())
} }
/// Make sure the `data` fits in `size`. /// Make sure the `data` fits in `size`.
@ -185,7 +184,7 @@ impl ScalarInt {
#[inline] #[inline]
pub fn null(size: Size) -> Self { pub fn null(size: Size) -> Self {
Self { data: 0, size: size.bytes() as u8 } Self { data: 0, size: NonZeroU8::new(size.bytes() as u8).unwrap() }
} }
#[inline] #[inline]
@ -197,7 +196,7 @@ impl ScalarInt {
pub fn try_from_uint(i: impl Into<u128>, size: Size) -> Option<Self> { pub fn try_from_uint(i: impl Into<u128>, size: Size) -> Option<Self> {
let data = i.into(); let data = i.into();
if size.truncate(data) == data { if size.truncate(data) == data {
Some(Self { data, size: size.bytes() as u8 }) Some(Self { data, size: NonZeroU8::new(size.bytes() as u8).unwrap() })
} else { } else {
None None
} }
@ -209,7 +208,7 @@ impl ScalarInt {
// `into` performed sign extension, we have to truncate // `into` performed sign extension, we have to truncate
let truncated = size.truncate(i as u128); let truncated = size.truncate(i as u128);
if size.sign_extend(truncated) as i128 == i { if size.sign_extend(truncated) as i128 == i {
Some(Self { data: truncated, size: size.bytes() as u8 }) Some(Self { data: truncated, size: NonZeroU8::new(size.bytes() as u8).unwrap() })
} else { } else {
None None
} }
@ -225,7 +224,7 @@ impl ScalarInt {
#[inline] #[inline]
pub fn to_bits(self, target_size: Size) -> Result<u128, Size> { pub fn to_bits(self, target_size: Size) -> Result<u128, Size> {
assert_ne!(target_size.bytes(), 0, "you should never look at the bits of a ZST"); assert_ne!(target_size.bytes(), 0, "you should never look at the bits of a ZST");
if target_size.bytes() == u64::from(self.size) { if target_size.bytes() == u64::from(self.size.get()) {
self.check_data(); self.check_data();
Ok(self.data) Ok(self.data)
} else { } else {
@ -339,7 +338,7 @@ macro_rules! from {
fn from(u: $ty) -> Self { fn from(u: $ty) -> Self {
Self { Self {
data: u128::from(u), data: u128::from(u),
size: std::mem::size_of::<$ty>() as u8, size: NonZeroU8::new(std::mem::size_of::<$ty>() as u8).unwrap(),
} }
} }
} }
@ -382,7 +381,7 @@ impl TryFrom<ScalarInt> for bool {
impl From<char> for ScalarInt { impl From<char> for ScalarInt {
#[inline] #[inline]
fn from(c: char) -> Self { fn from(c: char) -> Self {
Self { data: c as u128, size: std::mem::size_of::<char>() as u8 } Self { data: c as u128, size: NonZeroU8::new(std::mem::size_of::<char>() as u8).unwrap() }
} }
} }
@ -409,7 +408,7 @@ impl From<Single> for ScalarInt {
#[inline] #[inline]
fn from(f: Single) -> Self { fn from(f: Single) -> Self {
// We trust apfloat to give us properly truncated data. // We trust apfloat to give us properly truncated data.
Self { data: f.to_bits(), size: 4 } Self { data: f.to_bits(), size: NonZeroU8::new((Single::BITS / 8) as u8).unwrap() }
} }
} }
@ -425,7 +424,7 @@ impl From<Double> for ScalarInt {
#[inline] #[inline]
fn from(f: Double) -> Self { fn from(f: Double) -> Self {
// We trust apfloat to give us properly truncated data. // We trust apfloat to give us properly truncated data.
Self { data: f.to_bits(), size: 8 } Self { data: f.to_bits(), size: NonZeroU8::new((Double::BITS / 8) as u8).unwrap() }
} }
} }
@ -439,13 +438,8 @@ impl TryFrom<ScalarInt> for Double {
impl fmt::Debug for ScalarInt { impl fmt::Debug for ScalarInt {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if self.size == 0 { // Dispatch to LowerHex below.
self.check_data(); write!(f, "0x{:x}", self)
write!(f, "<ZST>")
} else {
// Dispatch to LowerHex below.
write!(f, "0x{:x}", self)
}
} }
} }
@ -463,7 +457,7 @@ impl fmt::LowerHex for ScalarInt {
// would thus borrow `self.data`. Since `Self` // would thus borrow `self.data`. Since `Self`
// is a packed struct, that would create a possibly unaligned reference, which // is a packed struct, that would create a possibly unaligned reference, which
// is UB. // is UB.
write!(f, "{:01$x}", { self.data }, self.size as usize * 2) write!(f, "{:01$x}", { self.data }, self.size.get() as usize * 2)
} }
} }
@ -477,7 +471,7 @@ impl fmt::UpperHex for ScalarInt {
// would thus borrow `self.data`. Since `Self` // would thus borrow `self.data`. Since `Self`
// is a packed struct, that would create a possibly unaligned reference, which // is a packed struct, that would create a possibly unaligned reference, which
// is UB. // is UB.
write!(f, "{:01$X}", { self.data }, self.size as usize * 2) write!(f, "{:01$X}", { self.data }, self.size.get() as usize * 2)
} }
} }

View File

@ -1355,10 +1355,6 @@ pub trait PrettyPrinter<'tcx>:
" as ", " as ",
)?; )?;
} }
// For function type zsts just printing the path is enough
ty::FnDef(d, s) if int == ScalarInt::ZST => {
p!(print_value_path(*d, s))
}
// Nontrivial types with scalar bit representation // Nontrivial types with scalar bit representation
_ => { _ => {
let print = |mut this: Self| { let print = |mut this: Self| {

View File

@ -49,11 +49,22 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
inferred_ty: ty, inferred_ty: ty,
}) })
}); });
let literal = ConstantKind::Val(ConstValue::Scalar(Scalar::Int(lit)), ty); let literal = ConstantKind::Val(ConstValue::Scalar(Scalar::Int(lit)), ty);
Constant { span, user_ty: user_ty, literal } Constant { span, user_ty: user_ty, literal }
} }
ExprKind::ZstLiteral { user_ty } => {
let user_ty = user_ty.map(|user_ty| {
this.canonical_user_type_annotations.push(CanonicalUserTypeAnnotation {
span,
user_ty,
inferred_ty: ty,
})
});
let literal = ConstantKind::Val(ConstValue::ZST, ty);
Constant { span, user_ty: user_ty, literal }
}
ExprKind::NamedConst { def_id, substs, user_ty } => { ExprKind::NamedConst { def_id, substs, user_ty } => {
let user_ty = user_ty.map(|user_ty| { let user_ty = user_ty.map(|user_ty| {
this.canonical_user_type_annotations.push(CanonicalUserTypeAnnotation { this.canonical_user_type_annotations.push(CanonicalUserTypeAnnotation {

View File

@ -603,6 +603,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
| ExprKind::Literal { .. } | ExprKind::Literal { .. }
| ExprKind::NamedConst { .. } | ExprKind::NamedConst { .. }
| ExprKind::NonHirLiteral { .. } | ExprKind::NonHirLiteral { .. }
| ExprKind::ZstLiteral { .. }
| ExprKind::ConstParam { .. } | ExprKind::ConstParam { .. }
| ExprKind::ConstBlock { .. } | ExprKind::ConstBlock { .. }
| ExprKind::StaticRef { .. } | ExprKind::StaticRef { .. }

View File

@ -415,6 +415,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
ExprKind::Literal { .. } ExprKind::Literal { .. }
| ExprKind::NamedConst { .. } | ExprKind::NamedConst { .. }
| ExprKind::NonHirLiteral { .. } | ExprKind::NonHirLiteral { .. }
| ExprKind::ZstLiteral { .. }
| ExprKind::ConstParam { .. } | ExprKind::ConstParam { .. }
| ExprKind::ConstBlock { .. } | ExprKind::ConstBlock { .. }
| ExprKind::StaticRef { .. } => { | ExprKind::StaticRef { .. } => {

View File

@ -72,6 +72,7 @@ impl Category {
ExprKind::ConstBlock { .. } ExprKind::ConstBlock { .. }
| ExprKind::Literal { .. } | ExprKind::Literal { .. }
| ExprKind::NonHirLiteral { .. } | ExprKind::NonHirLiteral { .. }
| ExprKind::ZstLiteral { .. }
| ExprKind::ConstParam { .. } | ExprKind::ConstParam { .. }
| ExprKind::StaticRef { .. } | ExprKind::StaticRef { .. }
| ExprKind::NamedConst { .. } => Some(Category::Constant), | ExprKind::NamedConst { .. } => Some(Category::Constant),

View File

@ -559,6 +559,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
| ExprKind::Literal { .. } | ExprKind::Literal { .. }
| ExprKind::NamedConst { .. } | ExprKind::NamedConst { .. }
| ExprKind::NonHirLiteral { .. } | ExprKind::NonHirLiteral { .. }
| ExprKind::ZstLiteral { .. }
| ExprKind::ConstParam { .. } | ExprKind::ConstParam { .. }
| ExprKind::ThreadLocalRef(_) | ExprKind::ThreadLocalRef(_)
| ExprKind::StaticRef { .. } => { | ExprKind::StaticRef { .. } => {

View File

@ -307,6 +307,7 @@ impl<'a, 'tcx> Visitor<'a, 'tcx> for UnsafetyVisitor<'a, 'tcx> {
| ExprKind::Literal { .. } | ExprKind::Literal { .. }
| ExprKind::NamedConst { .. } | ExprKind::NamedConst { .. }
| ExprKind::NonHirLiteral { .. } | ExprKind::NonHirLiteral { .. }
| ExprKind::ZstLiteral { .. }
| ExprKind::ConstParam { .. } | ExprKind::ConstParam { .. }
| ExprKind::ConstBlock { .. } | ExprKind::ConstBlock { .. }
| ExprKind::Deref { .. } | ExprKind::Deref { .. }

View File

@ -451,6 +451,10 @@ impl<'a, 'tcx> AbstractConstBuilder<'a, 'tcx> {
let val = ty::ValTree::from_scalar_int(lit); let val = ty::ValTree::from_scalar_int(lit);
self.nodes.push(Node::Leaf(ty::Const::from_value(self.tcx, val, node.ty))) self.nodes.push(Node::Leaf(ty::Const::from_value(self.tcx, val, node.ty)))
} }
&ExprKind::ZstLiteral { user_ty: _ } => {
let val = ty::ValTree::zst();
self.nodes.push(Node::Leaf(ty::Const::from_value(self.tcx, val, node.ty)))
}
&ExprKind::NamedConst { def_id, substs, user_ty: _ } => { &ExprKind::NamedConst { def_id, substs, user_ty: _ } => {
let uneval = ty::Unevaluated::new(ty::WithOptConstParam::unknown(def_id), substs); let uneval = ty::Unevaluated::new(ty::WithOptConstParam::unknown(def_id), substs);