diff --git a/compiler/rustc_codegen_cranelift/src/constant.rs b/compiler/rustc_codegen_cranelift/src/constant.rs index 148b66d959e..df1150ec0b8 100644 --- a/compiler/rustc_codegen_cranelift/src/constant.rs +++ b/compiler/rustc_codegen_cranelift/src/constant.rs @@ -128,7 +128,7 @@ pub(crate) fn codegen_const_value<'tcx>( ty: Ty<'tcx>, ) -> CValue<'tcx> { let layout = fx.layout_of(ty); - assert!(!layout.is_unsized(), "sized const value"); + assert!(layout.is_sized(), "unsized const value"); if layout.is_zst() { return CValue::by_ref(crate::Pointer::dangling(layout.align.pref), layout); diff --git a/compiler/rustc_codegen_cranelift/src/value_and_place.rs b/compiler/rustc_codegen_cranelift/src/value_and_place.rs index c3dfbd37279..c5bd574623d 100644 --- a/compiler/rustc_codegen_cranelift/src/value_and_place.rs +++ b/compiler/rustc_codegen_cranelift/src/value_and_place.rs @@ -19,7 +19,7 @@ fn codegen_field<'tcx>( }; if let Some(extra) = extra { - if !field_layout.is_unsized() { + if field_layout.is_sized() { return simple(fx); } match field_layout.ty.kind() { @@ -364,7 +364,7 @@ impl<'tcx> CPlace<'tcx> { fx: &mut FunctionCx<'_, '_, 'tcx>, layout: TyAndLayout<'tcx>, ) -> CPlace<'tcx> { - assert!(!layout.is_unsized()); + assert!(layout.is_sized()); if layout.size.bytes() == 0 { return CPlace { inner: CPlaceInner::Addr(Pointer::dangling(layout.align.pref), None), @@ -825,7 +825,7 @@ impl<'tcx> CPlace<'tcx> { fx: &FunctionCx<'_, '_, 'tcx>, variant: VariantIdx, ) -> Self { - assert!(!self.layout().is_unsized()); + assert!(self.layout().is_sized()); let layout = self.layout().for_variant(fx, variant); CPlace { inner: self.inner, layout } } diff --git a/compiler/rustc_codegen_gcc/src/type_.rs b/compiler/rustc_codegen_gcc/src/type_.rs index 862ed62c68b..bdf7318ce48 100644 --- a/compiler/rustc_codegen_gcc/src/type_.rs +++ b/compiler/rustc_codegen_gcc/src/type_.rs @@ -277,7 +277,7 @@ pub fn struct_fields<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, layout: TyAndLayout offset = target_offset + field.size; prev_effective_align = effective_field_align; } - if !layout.is_unsized() && field_count > 0 { + if layout.is_sized() && field_count > 0 { if offset > layout.size { bug!("layout: {:#?} stride: {:?} offset: {:?}", layout, layout.size, offset); } diff --git a/compiler/rustc_codegen_llvm/src/debuginfo/utils.rs b/compiler/rustc_codegen_llvm/src/debuginfo/utils.rs index a40cfc8b23f..5cd0e1cb63a 100644 --- a/compiler/rustc_codegen_llvm/src/debuginfo/utils.rs +++ b/compiler/rustc_codegen_llvm/src/debuginfo/utils.rs @@ -72,7 +72,7 @@ pub(crate) fn fat_pointer_kind<'ll, 'tcx>( layout.is_unsized() ); - if !layout.is_unsized() { + if layout.is_sized() { return None; } diff --git a/compiler/rustc_codegen_llvm/src/type_of.rs b/compiler/rustc_codegen_llvm/src/type_of.rs index dc1165835e7..182adf81785 100644 --- a/compiler/rustc_codegen_llvm/src/type_of.rs +++ b/compiler/rustc_codegen_llvm/src/type_of.rs @@ -140,7 +140,7 @@ fn struct_llfields<'a, 'tcx>( prev_effective_align = effective_field_align; } let padding_used = result.len() > field_count; - if !layout.is_unsized() && field_count > 0 { + if layout.is_sized() && field_count > 0 { if offset > layout.size { bug!("layout: {:#?} stride: {:?} offset: {:?}", layout, layout.size, offset); } diff --git a/compiler/rustc_codegen_ssa/src/glue.rs b/compiler/rustc_codegen_ssa/src/glue.rs index e6f402ef19d..6015d48deca 100644 --- a/compiler/rustc_codegen_ssa/src/glue.rs +++ b/compiler/rustc_codegen_ssa/src/glue.rs @@ -15,7 +15,7 @@ pub fn size_and_align_of_dst<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( ) -> (Bx::Value, Bx::Value) { let layout = bx.layout_of(t); debug!("size_and_align_of_dst(ty={}, info={:?}): layout: {:?}", t, info, layout); - if !layout.is_unsized() { + if layout.is_sized() { let size = bx.const_usize(layout.size.bytes()); let align = bx.const_usize(layout.align.abi.bytes()); return (size, align); diff --git a/compiler/rustc_codegen_ssa/src/mir/place.rs b/compiler/rustc_codegen_ssa/src/mir/place.rs index 8c72d6d1fbe..90855538589 100644 --- a/compiler/rustc_codegen_ssa/src/mir/place.rs +++ b/compiler/rustc_codegen_ssa/src/mir/place.rs @@ -29,7 +29,7 @@ pub struct PlaceRef<'tcx, V> { impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> { pub fn new_sized(llval: V, layout: TyAndLayout<'tcx>) -> PlaceRef<'tcx, V> { - assert!(!layout.is_unsized()); + assert!(layout.is_sized()); PlaceRef { llval, llextra: None, layout, align: layout.align.abi } } @@ -38,7 +38,7 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> { layout: TyAndLayout<'tcx>, align: Align, ) -> PlaceRef<'tcx, V> { - assert!(!layout.is_unsized()); + assert!(layout.is_sized()); PlaceRef { llval, llextra: None, layout, align } } @@ -48,7 +48,7 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> { bx: &mut Bx, layout: TyAndLayout<'tcx>, ) -> Self { - assert!(!layout.is_unsized(), "tried to statically allocate unsized place"); + assert!(layout.is_sized(), "tried to statically allocate unsized place"); let tmp = bx.alloca(bx.cx().backend_type(layout), layout.align.abi); Self::new_sized(tmp, layout) } @@ -145,7 +145,7 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> { ); return simple(); } - _ if !field.is_unsized() => return simple(), + _ if field.is_sized() => return simple(), ty::Slice(..) | ty::Str | ty::Foreign(..) => return simple(), ty::Adt(def, _) => { if def.repr().packed() { diff --git a/compiler/rustc_const_eval/src/const_eval/eval_queries.rs b/compiler/rustc_const_eval/src/const_eval/eval_queries.rs index 1b1052fdf47..8f5e503d659 100644 --- a/compiler/rustc_const_eval/src/const_eval/eval_queries.rs +++ b/compiler/rustc_const_eval/src/const_eval/eval_queries.rs @@ -46,7 +46,7 @@ fn eval_body_using_ecx<'mir, 'tcx>( ecx.tcx.def_kind(cid.instance.def_id()) ); let layout = ecx.layout_of(body.bound_return_ty().subst(tcx, cid.instance.substs))?; - assert!(!layout.is_unsized()); + assert!(layout.is_sized()); let ret = ecx.allocate(layout, MemoryKind::Stack)?; trace!( diff --git a/compiler/rustc_const_eval/src/interpret/eval_context.rs b/compiler/rustc_const_eval/src/interpret/eval_context.rs index f7d64f6d4f4..ab82268dde3 100644 --- a/compiler/rustc_const_eval/src/interpret/eval_context.rs +++ b/compiler/rustc_const_eval/src/interpret/eval_context.rs @@ -572,7 +572,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { metadata: &MemPlaceMeta, layout: &TyAndLayout<'tcx>, ) -> InterpResult<'tcx, Option<(Size, Align)>> { - if !layout.is_unsized() { + if layout.is_sized() { return Ok(Some((layout.size, layout.align.abi))); } match layout.ty.kind() { diff --git a/compiler/rustc_const_eval/src/interpret/intrinsics.rs b/compiler/rustc_const_eval/src/interpret/intrinsics.rs index b92a6878847..e68456a1d73 100644 --- a/compiler/rustc_const_eval/src/interpret/intrinsics.rs +++ b/compiler/rustc_const_eval/src/interpret/intrinsics.rs @@ -713,7 +713,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { rhs: &OpTy<'tcx, >::Provenance>, ) -> InterpResult<'tcx, Scalar> { let layout = self.layout_of(lhs.layout.ty.builtin_deref(true).unwrap().ty)?; - assert!(!layout.is_unsized()); + assert!(layout.is_sized()); let get_bytes = |this: &InterpCx<'mir, 'tcx, M>, op: &OpTy<'tcx, >::Provenance>, diff --git a/compiler/rustc_const_eval/src/interpret/memory.rs b/compiler/rustc_const_eval/src/interpret/memory.rs index e5e015c1e18..a529972db9d 100644 --- a/compiler/rustc_const_eval/src/interpret/memory.rs +++ b/compiler/rustc_const_eval/src/interpret/memory.rs @@ -683,7 +683,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { // Use size and align of the type. let ty = self.tcx.type_of(def_id); let layout = self.tcx.layout_of(ParamEnv::empty().and(ty)).unwrap(); - assert!(!layout.is_unsized()); + assert!(layout.is_sized()); (layout.size, layout.align.abi, AllocKind::LiveData) } Some(GlobalAlloc::Memory(alloc)) => { diff --git a/compiler/rustc_const_eval/src/interpret/operand.rs b/compiler/rustc_const_eval/src/interpret/operand.rs index dd00678aa0c..274917f09fe 100644 --- a/compiler/rustc_const_eval/src/interpret/operand.rs +++ b/compiler/rustc_const_eval/src/interpret/operand.rs @@ -280,7 +280,7 @@ impl<'tcx, Prov: Provenance> OpTy<'tcx, Prov> { layout: TyAndLayout<'tcx>, cx: &impl HasDataLayout, ) -> InterpResult<'tcx, Self> { - assert!(!layout.is_unsized()); + assert!(layout.is_sized()); self.offset_with_meta(offset, MemPlaceMeta::None, layout, cx) } } diff --git a/compiler/rustc_const_eval/src/interpret/place.rs b/compiler/rustc_const_eval/src/interpret/place.rs index b0625b5f412..4d0125bf395 100644 --- a/compiler/rustc_const_eval/src/interpret/place.rs +++ b/compiler/rustc_const_eval/src/interpret/place.rs @@ -201,7 +201,7 @@ impl<'tcx, Prov: Provenance> MPlaceTy<'tcx, Prov> { layout: TyAndLayout<'tcx>, cx: &impl HasDataLayout, ) -> InterpResult<'tcx, Self> { - assert!(!layout.is_unsized()); + assert!(layout.is_sized()); self.offset_with_meta(offset, MemPlaceMeta::None, layout, cx) } @@ -340,7 +340,7 @@ where &self, place: &MPlaceTy<'tcx, M::Provenance>, ) -> InterpResult<'tcx, Option>> { - assert!(!place.layout.is_unsized()); + assert!(place.layout.is_sized()); assert!(!place.meta.has_meta()); let size = place.layout.size; self.get_ptr_alloc(place.ptr, size, place.align) @@ -351,7 +351,7 @@ where &mut self, place: &MPlaceTy<'tcx, M::Provenance>, ) -> InterpResult<'tcx, Option>> { - assert!(!place.layout.is_unsized()); + assert!(place.layout.is_sized()); assert!(!place.meta.has_meta()); let size = place.layout.size; self.get_ptr_alloc_mut(place.ptr, size, place.align) @@ -485,7 +485,7 @@ where src: Immediate, dest: &PlaceTy<'tcx, M::Provenance>, ) -> InterpResult<'tcx> { - assert!(!dest.layout.is_unsized(), "Cannot write unsized data"); + assert!(dest.layout.is_sized(), "Cannot write unsized data"); trace!("write_immediate: {:?} <- {:?}: {}", *dest, src, dest.layout.ty); // See if we can avoid an allocation. This is the counterpart to `read_immediate_raw`, @@ -746,7 +746,7 @@ where layout: TyAndLayout<'tcx>, kind: MemoryKind, ) -> InterpResult<'tcx, MPlaceTy<'tcx, M::Provenance>> { - assert!(!layout.is_unsized()); + assert!(layout.is_sized()); let ptr = self.allocate_ptr(layout.size, layout.align.abi, kind)?; Ok(MPlaceTy::from_aligned_ptr(ptr.into(), layout)) } diff --git a/compiler/rustc_const_eval/src/interpret/step.rs b/compiler/rustc_const_eval/src/interpret/step.rs index c6e04cbfb6b..3c286fa61be 100644 --- a/compiler/rustc_const_eval/src/interpret/step.rs +++ b/compiler/rustc_const_eval/src/interpret/step.rs @@ -209,7 +209,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { Repeat(ref operand, _) => { let src = self.eval_operand(operand, None)?; - assert!(!src.layout.is_unsized()); + assert!(src.layout.is_sized()); let dest = self.force_allocation(&dest)?; let length = dest.len(self)?; diff --git a/compiler/rustc_const_eval/src/interpret/traits.rs b/compiler/rustc_const_eval/src/interpret/traits.rs index cab23b7241f..fa15d466ac1 100644 --- a/compiler/rustc_const_eval/src/interpret/traits.rs +++ b/compiler/rustc_const_eval/src/interpret/traits.rs @@ -53,7 +53,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { ) -> InterpResult<'tcx, (Size, Align)> { let (ty, _trait_ref) = self.get_ptr_vtable(vtable)?; let layout = self.layout_of(ty)?; - assert!(!layout.is_unsized(), "there are no vtables for unsized types"); + assert!(layout.is_sized(), "there are no vtables for unsized types"); Ok((layout.size, layout.align.abi)) } } diff --git a/compiler/rustc_middle/src/ty/vtable.rs b/compiler/rustc_middle/src/ty/vtable.rs index 5ca51c25a9c..6eae94511e4 100644 --- a/compiler/rustc_middle/src/ty/vtable.rs +++ b/compiler/rustc_middle/src/ty/vtable.rs @@ -66,7 +66,7 @@ pub(super) fn vtable_allocation_provider<'tcx>( let layout = tcx .layout_of(ty::ParamEnv::reveal_all().and(ty)) .expect("failed to build vtable representation"); - assert!(!layout.is_unsized(), "can't create a vtable for an unsized type"); + assert!(layout.is_sized(), "can't create a vtable for an unsized type"); let size = layout.size.bytes(); let align = layout.align.abi.bytes(); diff --git a/compiler/rustc_mir_transform/src/const_prop.rs b/compiler/rustc_mir_transform/src/const_prop.rs index 4e451588845..4f30e8a0be0 100644 --- a/compiler/rustc_mir_transform/src/const_prop.rs +++ b/compiler/rustc_mir_transform/src/const_prop.rs @@ -385,7 +385,7 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> { // I don't know how return types can seem to be unsized but this happens in the // `type/type-unsatisfiable.rs` test. .filter(|ret_layout| { - !ret_layout.is_unsized() && ret_layout.size < Size::from_bytes(MAX_ALLOC_LIMIT) + ret_layout.is_sized() && ret_layout.size < Size::from_bytes(MAX_ALLOC_LIMIT) }) .unwrap_or_else(|| ecx.layout_of(tcx.types.unit).unwrap()); diff --git a/compiler/rustc_mir_transform/src/const_prop_lint.rs b/compiler/rustc_mir_transform/src/const_prop_lint.rs index 479c4e577d4..163446c52e4 100644 --- a/compiler/rustc_mir_transform/src/const_prop_lint.rs +++ b/compiler/rustc_mir_transform/src/const_prop_lint.rs @@ -199,7 +199,7 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> { // I don't know how return types can seem to be unsized but this happens in the // `type/type-unsatisfiable.rs` test. .filter(|ret_layout| { - !ret_layout.is_unsized() && ret_layout.size < Size::from_bytes(MAX_ALLOC_LIMIT) + ret_layout.is_sized() && ret_layout.size < Size::from_bytes(MAX_ALLOC_LIMIT) }) .unwrap_or_else(|| ecx.layout_of(tcx.types.unit).unwrap()); diff --git a/compiler/rustc_target/src/abi/mod.rs b/compiler/rustc_target/src/abi/mod.rs index 7171ca7bf89..decbefc2f7c 100644 --- a/compiler/rustc_target/src/abi/mod.rs +++ b/compiler/rustc_target/src/abi/mod.rs @@ -1083,6 +1083,11 @@ impl Abi { } } + #[inline] + pub fn is_sized(&self) -> bool { + !self.is_unsized() + } + /// Returns `true` if this is a single signed integer scalar #[inline] pub fn is_signed(&self) -> bool { @@ -1490,6 +1495,11 @@ impl<'a, Ty> TyAndLayout<'a, Ty> { self.abi.is_unsized() } + #[inline] + pub fn is_sized(&self) -> bool { + self.abi.is_sized() + } + /// Returns `true` if the type is a ZST and not unsized. pub fn is_zst(&self) -> bool { match self.abi { diff --git a/compiler/rustc_ty_utils/src/layout.rs b/compiler/rustc_ty_utils/src/layout.rs index 52ba0eee97c..b59be0a0ea7 100644 --- a/compiler/rustc_ty_utils/src/layout.rs +++ b/compiler/rustc_ty_utils/src/layout.rs @@ -668,7 +668,7 @@ fn layout_of_uncached<'tcx>( let mut abi = Abi::Aggregate { sized: true }; let index = VariantIdx::new(0); for field in &variants[index] { - assert!(!field.is_unsized()); + assert!(field.is_sized()); align = align.max(field.align); // If all non-ZST fields have the same ABI, forward this ABI diff --git a/src/tools/miri/src/stacked_borrows/mod.rs b/src/tools/miri/src/stacked_borrows/mod.rs index 7f18e5dbae0..9ce02a02ec4 100644 --- a/src/tools/miri/src/stacked_borrows/mod.rs +++ b/src/tools/miri/src/stacked_borrows/mod.rs @@ -1053,7 +1053,7 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> { // pointers we need to retag, so we can stop recursion early. // This optimization is crucial for ZSTs, because they can contain way more fields // than we can ever visit. - if !place.layout.is_unsized() && place.layout.size < self.ecx.pointer_size() { + if place.layout.is_sized() && place.layout.size < self.ecx.pointer_size() { return Ok(()); }