Add a distinct OperandValue::ZeroSized variant for ZSTs

These tend to have special handling in a bunch of places anyway, so the variant helps remember that.  And I think it's easier to grok than non-Scalar Aggregates sometimes being `Immediates` (like I got wrong and caused 109992).  As a minor bonus, it means we don't need to generate poison LLVM values for them to pass around in `OperandValue::Immediate`s.
This commit is contained in:
Scott McMurray 2023-05-07 03:00:41 -07:00
parent 9af3865dec
commit bf36193ef6
11 changed files with 157 additions and 76 deletions

View File

@ -758,7 +758,7 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> {
assert_eq!(place.llextra.is_some(), place.layout.is_unsized());
if place.layout.is_zst() {
return OperandRef::new_zst(self, place.layout);
return OperandRef::zero_sized(place.layout);
}
fn scalar_load_metadata<'a, 'gcc, 'tcx>(bx: &mut Builder<'a, 'gcc, 'tcx>, load: RValue<'gcc>, scalar: &abi::Scalar) {

View File

@ -159,8 +159,7 @@ impl<'tcx> LayoutGccExt<'tcx> for TyAndLayout<'tcx> {
fn is_gcc_immediate(&self) -> bool {
match self.abi {
Abi::Scalar(_) | Abi::Vector { .. } => true,
Abi::ScalarPair(..) => false,
Abi::Uninhabited | Abi::Aggregate { .. } => self.is_zst(),
Abi::ScalarPair(..) | Abi::Uninhabited | Abi::Aggregate { .. } => false,
}
}

View File

@ -486,7 +486,7 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
assert_eq!(place.llextra.is_some(), place.layout.is_unsized());
if place.layout.is_zst() {
return OperandRef::new_zst(self, place.layout);
return OperandRef::zero_sized(place.layout);
}
#[instrument(level = "trace", skip(bx))]

View File

@ -198,8 +198,7 @@ impl<'tcx> LayoutLlvmExt<'tcx> for TyAndLayout<'tcx> {
fn is_llvm_immediate(&self) -> bool {
match self.abi {
Abi::Scalar(_) | Abi::Vector { .. } => true,
Abi::ScalarPair(..) => false,
Abi::Uninhabited | Abi::Aggregate { .. } => self.is_zst(),
Abi::ScalarPair(..) | Abi::Uninhabited | Abi::Aggregate { .. } => false,
}
}

View File

@ -295,7 +295,7 @@ pub fn coerce_unsized_into<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
let (base, info) = match bx.load_operand(src).val {
OperandValue::Pair(base, info) => unsize_ptr(bx, base, src_ty, dst_ty, Some(info)),
OperandValue::Immediate(base) => unsize_ptr(bx, base, src_ty, dst_ty, None),
OperandValue::Ref(..) => bug!(),
OperandValue::Ref(..) | OperandValue::ZeroSized => bug!(),
};
OperandValue::Pair(base, info).store(bx, dst);
}

View File

@ -1,5 +1,5 @@
use super::operand::OperandRef;
use super::operand::OperandValue::{Immediate, Pair, Ref};
use super::operand::OperandValue::{Immediate, Pair, Ref, ZeroSized};
use super::place::PlaceRef;
use super::{CachedLlbb, FunctionCx, LocalRef};
@ -427,6 +427,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
assert_eq!(align, op.layout.align.abi, "return place is unaligned!");
llval
}
ZeroSized => bug!("ZST return value shouldn't be in PassMode::Cast"),
};
let ty = bx.cast_backend_type(cast_ty);
let addr = bx.pointercast(llslot, bx.type_ptr_to(ty));
@ -1386,6 +1387,16 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
(llval, align, true)
}
}
ZeroSized => match arg.mode {
PassMode::Indirect { .. } => {
// Though `extern "Rust"` doesn't pass ZSTs, some ABIs pass
// a pointer for `repr(C)` structs even when empty, so get
// one from an `alloca` (which can be left uninitialized).
let scratch = PlaceRef::alloca(bx, arg.layout);
(scratch.llval, scratch.align, true)
}
_ => bug!("ZST {op:?} wasn't ignored, but was passed with abi {arg:?}"),
},
};
if by_ref && !arg.is_indirect() {

View File

@ -352,6 +352,9 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
bx.set_var_name(a, &(name.clone() + ".0"));
bx.set_var_name(b, &(name.clone() + ".1"));
}
OperandValue::ZeroSized => {
// These never have a value to talk about
}
},
LocalRef::PendingOperand => {}
}

View File

@ -129,16 +129,13 @@ enum LocalRef<'tcx, V> {
PendingOperand,
}
impl<'a, 'tcx, V: CodegenObject> LocalRef<'tcx, V> {
fn new_operand<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
bx: &mut Bx,
layout: TyAndLayout<'tcx>,
) -> LocalRef<'tcx, V> {
impl<'tcx, V: CodegenObject> LocalRef<'tcx, V> {
fn new_operand(layout: TyAndLayout<'tcx>) -> LocalRef<'tcx, V> {
if layout.is_zst() {
// Zero-size temporaries aren't always initialized, which
// doesn't matter because they don't contain data, but
// we need something in the operand.
LocalRef::Operand(OperandRef::new_zst(bx, layout))
LocalRef::Operand(OperandRef::zero_sized(layout))
} else {
LocalRef::PendingOperand
}
@ -249,7 +246,7 @@ pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
}
} else {
debug!("alloc: {:?} -> operand", local);
LocalRef::new_operand(&mut start_bx, layout)
LocalRef::new_operand(layout)
}
};
@ -355,7 +352,7 @@ fn arg_local_refs<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
let local = |op| LocalRef::Operand(op);
match arg.mode {
PassMode::Ignore => {
return local(OperandRef::new_zst(bx, arg.layout));
return local(OperandRef::zero_sized(arg.layout));
}
PassMode::Direct(_) => {
let llarg = bx.get_param(llarg_idx);

View File

@ -45,6 +45,14 @@ pub enum OperandValue<V> {
/// as returned by [`LayoutTypeMethods::scalar_pair_element_backend_type`]
/// with `immediate: true`.
Pair(V, V),
/// A value taking no bytes, and which therefore needs no LLVM value at all.
///
/// If you ever need a `V` to pass to something, get a fresh poison value
/// from [`ConstMethods::const_poison`].
///
/// An `OperandValue` *must* be this variant for any type for which
/// `is_zst` on its `Layout` returns `true`.
ZeroSized,
}
/// An `OperandRef` is an "SSA" reference to a Rust value, along with
@ -71,15 +79,9 @@ impl<V: CodegenObject> fmt::Debug for OperandRef<'_, V> {
}
impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
pub fn new_zst<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
bx: &mut Bx,
layout: TyAndLayout<'tcx>,
) -> OperandRef<'tcx, V> {
pub fn zero_sized(layout: TyAndLayout<'tcx>) -> OperandRef<'tcx, V> {
assert!(layout.is_zst());
OperandRef {
val: OperandValue::Immediate(bx.const_poison(bx.immediate_backend_type(layout))),
layout,
}
OperandRef { val: OperandValue::ZeroSized, layout }
}
pub fn from_const<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
@ -97,7 +99,7 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
let llval = bx.scalar_to_backend(x, scalar, bx.immediate_backend_type(layout));
OperandValue::Immediate(llval)
}
ConstValue::ZeroSized => return OperandRef::new_zst(bx, layout),
ConstValue::ZeroSized => return OperandRef::zero_sized(layout),
ConstValue::Slice { data, start, end } => {
let Abi::ScalarPair(a_scalar, _) = layout.abi else {
bug!("from_const: invalid ScalarPair layout: {:#?}", layout);
@ -178,7 +180,7 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
);
OperandRef { val: OperandValue::Pair(a_val, b_val), layout }
}
_ if layout.is_zst() => OperandRef::new_zst(bx, layout),
_ if layout.is_zst() => OperandRef::zero_sized(layout),
_ => {
// Neither a scalar nor scalar pair. Load from a place
let init = bx.const_data_from_alloc(alloc);
@ -216,6 +218,7 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
OperandValue::Immediate(llptr) => (llptr, None),
OperandValue::Pair(llptr, llextra) => (llptr, Some(llextra)),
OperandValue::Ref(..) => bug!("Deref of by-Ref operand {:?}", self),
OperandValue::ZeroSized => bug!("Deref of ZST operand {:?}", self),
};
let layout = cx.layout_of(projected_ty);
PlaceRef { llval: llptr, llextra, layout, align: layout.align.abi }
@ -273,9 +276,7 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
let mut val = match (self.val, self.layout.abi) {
// If the field is ZST, it has no data.
_ if field.is_zst() => {
return OperandRef::new_zst(bx, field);
}
_ if field.is_zst() => OperandValue::ZeroSized,
// Newtype of a scalar, scalar pair or vector.
(OperandValue::Immediate(_) | OperandValue::Pair(..), _)
@ -306,6 +307,7 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
};
match (&mut val, field.abi) {
(OperandValue::ZeroSized, _) => {}
(
OperandValue::Immediate(llval),
Abi::Scalar(_) | Abi::ScalarPair(..) | Abi::Vector { .. },
@ -359,8 +361,8 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
impl<'a, 'tcx, V: CodegenObject> OperandValue<V> {
/// Returns an `OperandValue` that's generally UB to use in any way.
///
/// Depending on the `layout`, returns an `Immediate` or `Pair` containing
/// poison value(s), or a `Ref` containing a poison pointer.
/// Depending on the `layout`, returns `ZeroSized` for ZSTs, an `Immediate` or
/// `Pair` containing poison value(s), or a `Ref` containing a poison pointer.
///
/// Supports sized types only.
pub fn poison<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
@ -368,7 +370,9 @@ impl<'a, 'tcx, V: CodegenObject> OperandValue<V> {
layout: TyAndLayout<'tcx>,
) -> OperandValue<V> {
assert!(layout.is_sized());
if bx.cx().is_backend_immediate(layout) {
if layout.is_zst() {
OperandValue::ZeroSized
} else if bx.cx().is_backend_immediate(layout) {
let ibty = bx.cx().immediate_backend_type(layout);
OperandValue::Immediate(bx.const_poison(ibty))
} else if bx.cx().is_backend_scalar_pair(layout) {
@ -421,12 +425,11 @@ impl<'a, 'tcx, V: CodegenObject> OperandValue<V> {
flags: MemFlags,
) {
debug!("OperandRef::store: operand={:?}, dest={:?}", self, dest);
// Avoid generating stores of zero-sized values, because the only way to have a zero-sized
// value is through `undef`, and store itself is useless.
if dest.layout.is_zst() {
return;
}
match self {
OperandValue::ZeroSized => {
// Avoid generating stores of zero-sized values, because the only way to have a zero-sized
// value is through `undef`/`poison`, and the store itself is useless.
}
OperandValue::Ref(r, None, source_align) => {
if flags.contains(MemFlags::NONTEMPORAL) {
// HACK(nox): This is inefficient but there is no nontemporal memcpy.
@ -527,7 +530,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
// checks in `codegen_consume` and `extract_field`.
let elem = o.layout.field(bx.cx(), 0);
if elem.is_zst() {
o = OperandRef::new_zst(bx, elem);
o = OperandRef::zero_sized(elem);
} else {
return None;
}
@ -561,7 +564,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
// ZSTs don't require any actual memory access.
if layout.is_zst() {
return OperandRef::new_zst(bx, layout);
return OperandRef::zero_sized(layout);
}
if let Some(o) = self.maybe_codegen_consume_direct(bx, place_ref) {

View File

@ -70,6 +70,9 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
OperandValue::Ref(_, Some(_), _) => {
bug!("unsized coercion on an unsized rvalue");
}
OperandValue::ZeroSized => {
bug!("unsized coercion on a ZST rvalue");
}
}
}
@ -165,11 +168,11 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
}
match src.val {
OperandValue::Ref(..) => {
OperandValue::Ref(..) | OperandValue::ZeroSized => {
span_bug!(
self.mir.span,
"Operand path should have handled transmute \
from `Ref` {src:?} to place {dst:?}"
from {src:?} to place {dst:?}"
);
}
OperandValue::Immediate(..) | OperandValue::Pair(..) => {
@ -220,17 +223,22 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
let fake_place = PlaceRef::new_sized_aligned(cast_ptr, cast, align);
Some(bx.load_operand(fake_place).val)
}
OperandValue::ZeroSized => {
let OperandValueKind::ZeroSized = operand_kind else {
bug!("Found {operand_kind:?} for operand {operand:?}");
};
if let OperandValueKind::ZeroSized = cast_kind {
Some(OperandValue::ZeroSized)
} else {
None
}
}
OperandValue::Immediate(imm) => {
let OperandValueKind::Immediate(in_scalar) = operand_kind else {
bug!("Found {operand_kind:?} for operand {operand:?}");
};
if let OperandValueKind::Immediate(out_scalar) = cast_kind {
match (in_scalar, out_scalar) {
(ScalarOrZst::Zst, ScalarOrZst::Zst) => {
Some(OperandRef::new_zst(bx, cast).val)
}
(ScalarOrZst::Scalar(in_scalar), ScalarOrZst::Scalar(out_scalar))
if in_scalar.size(self.cx) == out_scalar.size(self.cx) =>
if let OperandValueKind::Immediate(out_scalar) = cast_kind
&& in_scalar.size(self.cx) == out_scalar.size(self.cx)
{
let operand_bty = bx.backend_type(operand.layout);
let cast_bty = bx.backend_type(cast);
@ -242,9 +250,6 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
out_scalar,
cast_bty,
)))
}
_ => None,
}
} else {
None
}
@ -457,6 +462,9 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
OperandValue::Ref(..) => {
bug!("by-ref operand {:?} in `codegen_rvalue_operand`", operand);
}
OperandValue::ZeroSized => {
bug!("zero-sized operand {:?} in `codegen_rvalue_operand`", operand);
}
};
let (lldata, llextra) =
base::unsize_ptr(bx, lldata, operand.layout.ty, cast.ty, llextra);
@ -490,6 +498,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
OperandValue::Ref(_, _, _) => todo!(),
OperandValue::Immediate(v) => (v, None),
OperandValue::Pair(v, l) => (v, Some(l)),
OperandValue::ZeroSized => bug!("ZST -- which is not PointerLike -- in DynStar"),
};
let (lldata, llextra) =
base::cast_to_dyn_star(bx, lldata, operand.layout, cast.ty, llextra);
@ -718,7 +727,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
// According to `rvalue_creates_operand`, only ZST
// aggregate rvalues are allowed to be operands.
let ty = rvalue.ty(self.mir, self.cx.tcx());
OperandRef::new_zst(bx, self.cx.layout_of(self.monomorphize(ty)))
OperandRef::zero_sized(self.cx.layout_of(self.monomorphize(ty)))
}
mir::Rvalue::ShallowInitBox(ref operand, content_ty) => {
let operand = self.codegen_operand(bx, operand);
@ -936,6 +945,12 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
// Can always load from a pointer as needed
(OperandValueKind::Ref, _) => true,
// ZST-to-ZST is the easiest thing ever
(OperandValueKind::ZeroSized, OperandValueKind::ZeroSized) => true,
// But if only one of them is a ZST the sizes can't match
(OperandValueKind::ZeroSized, _) | (_, OperandValueKind::ZeroSized) => false,
// Need to generate an `alloc` to get a pointer from an immediate
(OperandValueKind::Immediate(..) | OperandValueKind::Pair(..), OperandValueKind::Ref) => false,
@ -979,12 +994,13 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
/// Gets which variant of [`OperandValue`] is expected for a particular type.
fn value_kind(&self, layout: TyAndLayout<'tcx>) -> OperandValueKind {
if self.cx.is_backend_immediate(layout) {
if layout.is_zst() {
OperandValueKind::ZeroSized
} else if self.cx.is_backend_immediate(layout) {
debug_assert!(!self.cx.is_backend_scalar_pair(layout));
OperandValueKind::Immediate(match layout.abi {
abi::Abi::Scalar(s) => ScalarOrZst::Scalar(s),
abi::Abi::Vector { element, .. } => ScalarOrZst::Scalar(element),
_ if layout.is_zst() => ScalarOrZst::Zst,
abi::Abi::Scalar(s) => s,
abi::Abi::Vector { element, .. } => element,
x => span_bug!(self.mir.span, "Couldn't translate {x:?} as backend immediate"),
})
} else if self.cx.is_backend_scalar_pair(layout) {
@ -1007,21 +1023,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
#[derive(Debug, Copy, Clone)]
enum OperandValueKind {
Ref,
Immediate(ScalarOrZst),
Immediate(abi::Scalar),
Pair(abi::Scalar, abi::Scalar),
}
#[derive(Debug, Copy, Clone)]
enum ScalarOrZst {
Zst,
Scalar(abi::Scalar),
}
impl ScalarOrZst {
pub fn size(self, cx: &impl abi::HasDataLayout) -> abi::Size {
match self {
ScalarOrZst::Zst => abi::Size::ZERO,
ScalarOrZst::Scalar(s) => s.size(cx),
}
}
ZeroSized,
}

View File

@ -14,10 +14,10 @@ use std::intrinsics::{transmute, transmute_unchecked};
// Some of these need custom MIR to not get removed by MIR optimizations.
use std::intrinsics::mir::*;
enum Never {}
pub enum ZstNever {}
#[repr(align(2))]
pub struct BigNever(Never, u16, Never);
pub struct BigNever(ZstNever, u16, ZstNever);
#[repr(align(8))]
pub struct Scalar64(i64);
@ -56,11 +56,13 @@ pub unsafe fn check_bigger_array(x: [u32; 3]) -> [u32; 7] {
transmute_unchecked(x)
}
// CHECK-LABEL: @check_to_uninhabited(
// CHECK-LABEL: @check_to_empty_array(
#[no_mangle]
#[custom_mir(dialect = "runtime", phase = "optimized")]
pub unsafe fn check_to_uninhabited(x: u16) -> BigNever {
pub unsafe fn check_to_empty_array(x: [u32; 5]) -> [u32; 0] {
// CHECK-NOT: trap
// CHECK: call void @llvm.trap
// CHECK-NOT: trap
mir!{
{
RET = CastTransmute(x);
@ -69,6 +71,37 @@ pub unsafe fn check_to_uninhabited(x: u16) -> BigNever {
}
}
// CHECK-LABEL: @check_from_empty_array(
#[no_mangle]
#[custom_mir(dialect = "runtime", phase = "optimized")]
pub unsafe fn check_from_empty_array(x: [u32; 0]) -> [u32; 5] {
// CHECK-NOT: trap
// CHECK: call void @llvm.trap
// CHECK-NOT: trap
mir!{
{
RET = CastTransmute(x);
Return()
}
}
}
// CHECK-LABEL: @check_to_uninhabited(
#[no_mangle]
#[custom_mir(dialect = "runtime", phase = "optimized")]
pub unsafe fn check_to_uninhabited(x: u16) {
// CHECK-NOT: trap
// CHECK: call void @llvm.trap
// CHECK-NOT: trap
mir!{
let temp: BigNever;
{
temp = CastTransmute(x);
Return()
}
}
}
// CHECK-LABEL: @check_from_uninhabited(
#[no_mangle]
#[custom_mir(dialect = "runtime", phase = "optimized")]
@ -366,6 +399,40 @@ pub unsafe fn check_issue_109992(x: ()) -> [(); 1] {
}
}
// CHECK-LABEL: @check_unit_to_never(
#[no_mangle]
#[custom_mir(dialect = "runtime", phase = "optimized")]
pub unsafe fn check_unit_to_never(x: ()) {
// This uses custom MIR to avoid MIR optimizations having removed ZST ops.
// CHECK-NOT: trap
// CHECK: call void @llvm.trap
// CHECK-NOT: trap
mir!{
let temp: ZstNever;
{
temp = CastTransmute(x);
Return()
}
}
}
// CHECK-LABEL: @check_unit_from_never(
#[no_mangle]
#[custom_mir(dialect = "runtime", phase = "optimized")]
pub unsafe fn check_unit_from_never(x: ZstNever) -> () {
// This uses custom MIR to avoid MIR optimizations having removed ZST ops.
// CHECK: start
// CHECK-NEXT: ret void
mir!{
{
RET = CastTransmute(x);
Return()
}
}
}
// CHECK-LABEL: @check_maybe_uninit_pair(i16 %x.0, i64 %x.1)
#[no_mangle]
pub unsafe fn check_maybe_uninit_pair(