Auto merge of #102169 - scottmcm:constify-some-conditions, r=thomcc

Make ZST checks in core/alloc more readable

There's a bunch of these checks because of special handing for ZSTs in various unsafe implementations of stuff.

This lets them be `T::IS_ZST` instead of `mem::size_of::<T>() == 0` every time, making them both more readable and more terse.

*Not* proposed for stabilization.  Would be `pub(crate)` except `alloc` wants to use it too.

(And while it doesn't matter now, if we ever get something like #85836 making it a const can help codegen be simpler.)
This commit is contained in:
bors 2022-09-25 01:20:11 +00:00
commit e58621a4a3
14 changed files with 94 additions and 54 deletions

View File

@ -12,11 +12,17 @@ use core::fmt;
use core::hash::{Hash, Hasher};
use core::iter::{repeat_with, FromIterator};
use core::marker::PhantomData;
use core::mem::{self, ManuallyDrop, MaybeUninit};
use core::mem::{ManuallyDrop, MaybeUninit, SizedTypeProperties};
use core::ops::{Index, IndexMut, Range, RangeBounds};
use core::ptr::{self, NonNull};
use core::slice;
// This is used in a bunch of intra-doc links.
// FIXME: For some reason, `#[cfg(doc)]` wasn't sufficient, resulting in
// failures in linkchecker even though rustdoc built the docs just fine.
#[allow(unused_imports)]
use core::mem;
use crate::alloc::{Allocator, Global};
use crate::collections::TryReserveError;
use crate::collections::TryReserveErrorKind;
@ -177,7 +183,7 @@ impl<T, A: Allocator> VecDeque<T, A> {
/// Marginally more convenient
#[inline]
fn cap(&self) -> usize {
if mem::size_of::<T>() == 0 {
if T::IS_ZST {
// For zero sized types, we are always at maximum capacity
MAXIMUM_ZST_CAPACITY
} else {
@ -3038,7 +3044,7 @@ impl<T, A: Allocator> From<Vec<T, A>> for VecDeque<T, A> {
/// `Vec<T>` came from `From<VecDeque<T>>` and hasn't been reallocated.
fn from(mut other: Vec<T, A>) -> Self {
let len = other.len();
if mem::size_of::<T>() == 0 {
if T::IS_ZST {
// There's no actual allocation for ZSTs to worry about capacity,
// but `VecDeque` can't handle as much length as `Vec`.
assert!(len < MAXIMUM_ZST_CAPACITY, "capacity overflow");
@ -3124,7 +3130,7 @@ impl<T, const N: usize> From<[T; N]> for VecDeque<T> {
fn from(arr: [T; N]) -> Self {
let mut deq = VecDeque::with_capacity(N);
let arr = ManuallyDrop::new(arr);
if mem::size_of::<T>() != 0 {
if !<T>::IS_ZST {
// SAFETY: VecDeque::with_capacity ensures that there is enough capacity.
unsafe {
ptr::copy_nonoverlapping(arr.as_ptr(), deq.ptr(), N);

View File

@ -139,6 +139,7 @@
#![feature(receiver_trait)]
#![feature(saturating_int_impl)]
#![feature(set_ptr_value)]
#![feature(sized_type_properties)]
#![feature(slice_from_ptr_range)]
#![feature(slice_group_by)]
#![feature(slice_ptr_get)]

View File

@ -3,7 +3,7 @@
use core::alloc::LayoutError;
use core::cmp;
use core::intrinsics;
use core::mem::{self, ManuallyDrop, MaybeUninit};
use core::mem::{self, ManuallyDrop, MaybeUninit, SizedTypeProperties};
use core::ops::Drop;
use core::ptr::{self, NonNull, Unique};
use core::slice;
@ -168,7 +168,7 @@ impl<T, A: Allocator> RawVec<T, A> {
#[cfg(not(no_global_oom_handling))]
fn allocate_in(capacity: usize, init: AllocInit, alloc: A) -> Self {
// Don't allocate here because `Drop` will not deallocate when `capacity` is 0.
if mem::size_of::<T>() == 0 || capacity == 0 {
if T::IS_ZST || capacity == 0 {
Self::new_in(alloc)
} else {
// We avoid `unwrap_or_else` here because it bloats the amount of
@ -229,7 +229,7 @@ impl<T, A: Allocator> RawVec<T, A> {
/// This will always be `usize::MAX` if `T` is zero-sized.
#[inline(always)]
pub fn capacity(&self) -> usize {
if mem::size_of::<T>() == 0 { usize::MAX } else { self.cap }
if T::IS_ZST { usize::MAX } else { self.cap }
}
/// Returns a shared reference to the allocator backing this `RawVec`.
@ -238,7 +238,7 @@ impl<T, A: Allocator> RawVec<T, A> {
}
fn current_memory(&self) -> Option<(NonNull<u8>, Layout)> {
if mem::size_of::<T>() == 0 || self.cap == 0 {
if T::IS_ZST || self.cap == 0 {
None
} else {
// We have an allocated chunk of memory, so we can bypass runtime
@ -380,7 +380,7 @@ impl<T, A: Allocator> RawVec<T, A> {
// This is ensured by the calling contexts.
debug_assert!(additional > 0);
if mem::size_of::<T>() == 0 {
if T::IS_ZST {
// Since we return a capacity of `usize::MAX` when `elem_size` is
// 0, getting to here necessarily means the `RawVec` is overfull.
return Err(CapacityOverflow.into());
@ -406,7 +406,7 @@ impl<T, A: Allocator> RawVec<T, A> {
// `grow_amortized`, but this method is usually instantiated less often so
// it's less critical.
fn grow_exact(&mut self, len: usize, additional: usize) -> Result<(), TryReserveError> {
if mem::size_of::<T>() == 0 {
if T::IS_ZST {
// Since we return a capacity of `usize::MAX` when the type size is
// 0, getting to here necessarily means the `RawVec` is overfull.
return Err(CapacityOverflow.into());

View File

@ -16,9 +16,7 @@ use core::borrow::{Borrow, BorrowMut};
#[cfg(not(no_global_oom_handling))]
use core::cmp::Ordering::{self, Less};
#[cfg(not(no_global_oom_handling))]
use core::mem;
#[cfg(not(no_global_oom_handling))]
use core::mem::size_of;
use core::mem::{self, SizedTypeProperties};
#[cfg(not(no_global_oom_handling))]
use core::ptr;
@ -1018,7 +1016,7 @@ where
const MIN_RUN: usize = 10;
// Sorting has no meaningful behavior on zero-sized types.
if size_of::<T>() == 0 {
if T::IS_ZST {
return;
}

View File

@ -1,7 +1,7 @@
use crate::alloc::{Allocator, Global};
use core::fmt;
use core::iter::{FusedIterator, TrustedLen};
use core::mem::{self, ManuallyDrop};
use core::mem::{self, ManuallyDrop, SizedTypeProperties};
use core::ptr::{self, NonNull};
use core::slice::{self};
@ -202,7 +202,7 @@ impl<T, A: Allocator> Drop for Drain<'_, T, A> {
let mut vec = self.vec;
if mem::size_of::<T>() == 0 {
if T::IS_ZST {
// ZSTs have no identity, so we don't need to move them around, we only need to drop the correct amount.
// this can be achieved by manipulating the Vec length instead of moving values out from `iter`.
unsafe {

View File

@ -135,7 +135,7 @@
//! vec.truncate(write_idx);
//! ```
use core::iter::{InPlaceIterable, SourceIter, TrustedRandomAccessNoCoerce};
use core::mem::{self, ManuallyDrop};
use core::mem::{self, ManuallyDrop, SizedTypeProperties};
use core::ptr::{self};
use super::{InPlaceDrop, SpecFromIter, SpecFromIterNested, Vec};
@ -154,7 +154,7 @@ where
default fn from_iter(mut iterator: I) -> Self {
// See "Layout constraints" section in the module documentation. We rely on const
// optimization here since these conditions currently cannot be expressed as trait bounds
if mem::size_of::<T>() == 0
if T::IS_ZST
|| mem::size_of::<T>()
!= mem::size_of::<<<I as SourceIter>::Source as AsVecIntoIter>::Item>()
|| mem::align_of::<T>()

View File

@ -8,7 +8,7 @@ use core::iter::{
FusedIterator, InPlaceIterable, SourceIter, TrustedLen, TrustedRandomAccessNoCoerce,
};
use core::marker::PhantomData;
use core::mem::{self, ManuallyDrop, MaybeUninit};
use core::mem::{self, ManuallyDrop, MaybeUninit, SizedTypeProperties};
#[cfg(not(no_global_oom_handling))]
use core::ops::Deref;
use core::ptr::{self, NonNull};
@ -149,7 +149,7 @@ impl<T, A: Allocator> Iterator for IntoIter<T, A> {
fn next(&mut self) -> Option<T> {
if self.ptr == self.end {
None
} else if mem::size_of::<T>() == 0 {
} else if T::IS_ZST {
// purposefully don't use 'ptr.offset' because for
// vectors with 0-size elements this would return the
// same pointer.
@ -167,7 +167,7 @@ impl<T, A: Allocator> Iterator for IntoIter<T, A> {
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let exact = if mem::size_of::<T>() == 0 {
let exact = if T::IS_ZST {
self.end.addr().wrapping_sub(self.ptr.addr())
} else {
unsafe { self.end.sub_ptr(self.ptr) }
@ -179,7 +179,7 @@ impl<T, A: Allocator> Iterator for IntoIter<T, A> {
fn advance_by(&mut self, n: usize) -> Result<(), usize> {
let step_size = self.len().min(n);
let to_drop = ptr::slice_from_raw_parts_mut(self.ptr as *mut T, step_size);
if mem::size_of::<T>() == 0 {
if T::IS_ZST {
// SAFETY: due to unchecked casts of unsigned amounts to signed offsets the wraparound
// effectively results in unsigned pointers representing positions 0..usize::MAX,
// which is valid for ZSTs.
@ -209,7 +209,7 @@ impl<T, A: Allocator> Iterator for IntoIter<T, A> {
let len = self.len();
if mem::size_of::<T>() == 0 {
if T::IS_ZST {
if len < N {
self.forget_remaining_elements();
// Safety: ZSTs can be conjured ex nihilo, only the amount has to be correct
@ -253,7 +253,7 @@ impl<T, A: Allocator> Iterator for IntoIter<T, A> {
// that `T: Copy` so reading elements from the buffer doesn't invalidate
// them for `Drop`.
unsafe {
if mem::size_of::<T>() == 0 { mem::zeroed() } else { ptr::read(self.ptr.add(i)) }
if T::IS_ZST { mem::zeroed() } else { ptr::read(self.ptr.add(i)) }
}
}
}
@ -264,7 +264,7 @@ impl<T, A: Allocator> DoubleEndedIterator for IntoIter<T, A> {
fn next_back(&mut self) -> Option<T> {
if self.end == self.ptr {
None
} else if mem::size_of::<T>() == 0 {
} else if T::IS_ZST {
// See above for why 'ptr.offset' isn't used
self.end = self.end.wrapping_byte_sub(1);
@ -280,7 +280,7 @@ impl<T, A: Allocator> DoubleEndedIterator for IntoIter<T, A> {
#[inline]
fn advance_back_by(&mut self, n: usize) -> Result<(), usize> {
let step_size = self.len().min(n);
if mem::size_of::<T>() == 0 {
if T::IS_ZST {
// SAFETY: same as for advance_by()
self.end = self.end.wrapping_byte_sub(step_size);
} else {

View File

@ -64,7 +64,7 @@ use core::iter;
#[cfg(not(no_global_oom_handling))]
use core::iter::FromIterator;
use core::marker::PhantomData;
use core::mem::{self, ManuallyDrop, MaybeUninit};
use core::mem::{self, ManuallyDrop, MaybeUninit, SizedTypeProperties};
use core::ops::{self, Index, IndexMut, Range, RangeBounds};
use core::ptr::{self, NonNull};
use core::slice::{self, SliceIndex};
@ -2347,7 +2347,7 @@ impl<T, A: Allocator, const N: usize> Vec<[T; N], A> {
#[unstable(feature = "slice_flatten", issue = "95629")]
pub fn into_flattened(self) -> Vec<T, A> {
let (ptr, len, cap, alloc) = self.into_raw_parts_with_alloc();
let (new_len, new_cap) = if mem::size_of::<T>() == 0 {
let (new_len, new_cap) = if T::IS_ZST {
(len.checked_mul(N).expect("vec len overflow"), usize::MAX)
} else {
// SAFETY:
@ -2677,7 +2677,7 @@ impl<T, A: Allocator> IntoIterator for Vec<T, A> {
let mut me = ManuallyDrop::new(self);
let alloc = ManuallyDrop::new(ptr::read(me.allocator()));
let begin = me.as_mut_ptr();
let end = if mem::size_of::<T>() == 0 {
let end = if T::IS_ZST {
begin.wrapping_byte_add(me.len())
} else {
begin.add(me.len()) as *const T

View File

@ -1178,3 +1178,44 @@ pub const fn discriminant<T>(v: &T) -> Discriminant<T> {
pub const fn variant_count<T>() -> usize {
intrinsics::variant_count::<T>()
}
/// Provides associated constants for various useful properties of types,
/// to give them a canonical form in our code and make them easier to read.
///
/// This is here only to simplify all the ZST checks we need in the library.
/// It's not on a stabilization track right now.
#[doc(hidden)]
#[unstable(feature = "sized_type_properties", issue = "none")]
pub trait SizedTypeProperties: Sized {
/// `true` if this type requires no storage.
/// `false` if its [size](size_of) is greater than zero.
///
/// # Examples
///
/// ```
/// #![feature(sized_type_properties)]
/// use core::mem::SizedTypeProperties;
///
/// fn do_something_with<T>() {
/// if T::IS_ZST {
/// // ... special approach ...
/// } else {
/// // ... the normal thing ...
/// }
/// }
///
/// struct MyUnit;
/// assert!(MyUnit::IS_ZST);
///
/// // For negative checks, consider using UFCS to emphasize the negation
/// assert!(!<i32>::IS_ZST);
/// // As it can sometimes hide in the type otherwise
/// assert!(!String::IS_ZST);
/// ```
#[doc(hidden)]
#[unstable(feature = "sized_type_properties", issue = "none")]
const IS_ZST: bool = size_of::<Self>() == 0;
}
#[doc(hidden)]
#[unstable(feature = "sized_type_properties", issue = "none")]
impl<T> SizedTypeProperties for T {}

View File

@ -9,7 +9,7 @@ use crate::fmt;
use crate::intrinsics::{assume, exact_div, unchecked_sub};
use crate::iter::{FusedIterator, TrustedLen, TrustedRandomAccess, TrustedRandomAccessNoCoerce};
use crate::marker::{PhantomData, Send, Sized, Sync};
use crate::mem;
use crate::mem::{self, SizedTypeProperties};
use crate::num::NonZeroUsize;
use crate::ptr::NonNull;
@ -91,11 +91,8 @@ impl<'a, T> Iter<'a, T> {
unsafe {
assume(!ptr.is_null());
let end = if mem::size_of::<T>() == 0 {
ptr.wrapping_byte_add(slice.len())
} else {
ptr.add(slice.len())
};
let end =
if T::IS_ZST { ptr.wrapping_byte_add(slice.len()) } else { ptr.add(slice.len()) };
Self { ptr: NonNull::new_unchecked(ptr as *mut T), end, _marker: PhantomData }
}
@ -227,11 +224,8 @@ impl<'a, T> IterMut<'a, T> {
unsafe {
assume(!ptr.is_null());
let end = if mem::size_of::<T>() == 0 {
ptr.wrapping_byte_add(slice.len())
} else {
ptr.add(slice.len())
};
let end =
if T::IS_ZST { ptr.wrapping_byte_add(slice.len()) } else { ptr.add(slice.len()) };
Self { ptr: NonNull::new_unchecked(ptr), end, _marker: PhantomData }
}

View File

@ -100,7 +100,7 @@ macro_rules! iterator {
// Unsafe because the offset must not exceed `self.len()`.
#[inline(always)]
unsafe fn pre_dec_end(&mut self, offset: usize) -> * $raw_mut T {
if mem::size_of::<T>() == 0 {
if T::IS_ZST {
zst_shrink!(self, offset);
self.ptr.as_ptr()
} else {
@ -140,7 +140,7 @@ macro_rules! iterator {
// since we check if the iterator is empty first.
unsafe {
assume(!self.ptr.as_ptr().is_null());
if mem::size_of::<T>() != 0 {
if !<T>::IS_ZST {
assume(!self.end.is_null());
}
if is_empty!(self) {
@ -166,7 +166,7 @@ macro_rules! iterator {
fn nth(&mut self, n: usize) -> Option<$elem> {
if n >= len!(self) {
// This iterator is now empty.
if mem::size_of::<T>() == 0 {
if T::IS_ZST {
// We have to do it this way as `ptr` may never be 0, but `end`
// could be (due to wrapping).
self.end = self.ptr.as_ptr();
@ -355,7 +355,7 @@ macro_rules! iterator {
// empty first.
unsafe {
assume(!self.ptr.as_ptr().is_null());
if mem::size_of::<T>() != 0 {
if !<T>::IS_ZST {
assume(!self.end.is_null());
}
if is_empty!(self) {

View File

@ -9,7 +9,7 @@
use crate::cmp::Ordering::{self, Greater, Less};
use crate::intrinsics::{assert_unsafe_precondition, exact_div};
use crate::marker::Copy;
use crate::mem;
use crate::mem::{self, SizedTypeProperties};
use crate::num::NonZeroUsize;
use crate::ops::{Bound, FnMut, OneSidedRange, Range, RangeBounds};
use crate::option::Option;
@ -3456,7 +3456,7 @@ impl<T> [T] {
#[must_use]
pub unsafe fn align_to<U>(&self) -> (&[T], &[U], &[T]) {
// Note that most of this function will be constant-evaluated,
if mem::size_of::<U>() == 0 || mem::size_of::<T>() == 0 {
if U::IS_ZST || T::IS_ZST {
// handle ZSTs specially, which is don't handle them at all.
return (self, &[], &[]);
}
@ -3517,7 +3517,7 @@ impl<T> [T] {
#[must_use]
pub unsafe fn align_to_mut<U>(&mut self) -> (&mut [T], &mut [U], &mut [T]) {
// Note that most of this function will be constant-evaluated,
if mem::size_of::<U>() == 0 || mem::size_of::<T>() == 0 {
if U::IS_ZST || T::IS_ZST {
// handle ZSTs specially, which is don't handle them at all.
return (self, &mut [], &mut []);
}
@ -4063,7 +4063,7 @@ impl<T, const N: usize> [[T; N]] {
/// ```
#[unstable(feature = "slice_flatten", issue = "95629")]
pub fn flatten(&self) -> &[T] {
let len = if crate::mem::size_of::<T>() == 0 {
let len = if T::IS_ZST {
self.len().checked_mul(N).expect("slice len overflow")
} else {
// SAFETY: `self.len() * N` cannot overflow because `self` is
@ -4101,7 +4101,7 @@ impl<T, const N: usize> [[T; N]] {
/// ```
#[unstable(feature = "slice_flatten", issue = "95629")]
pub fn flatten_mut(&mut self) -> &mut [T] {
let len = if crate::mem::size_of::<T>() == 0 {
let len = if T::IS_ZST {
self.len().checked_mul(N).expect("slice len overflow")
} else {
// SAFETY: `self.len() * N` cannot overflow because `self` is

View File

@ -1,5 +1,5 @@
use crate::cmp;
use crate::mem::{self, MaybeUninit};
use crate::mem::{self, MaybeUninit, SizedTypeProperties};
use crate::ptr;
/// Rotates the range `[mid-left, mid+right)` such that the element at `mid` becomes the first
@ -63,7 +63,7 @@ use crate::ptr;
/// when `left < right` the swapping happens from the left instead.
pub unsafe fn ptr_rotate<T>(mut left: usize, mut mid: *mut T, mut right: usize) {
type BufType = [usize; 32];
if mem::size_of::<T>() == 0 {
if T::IS_ZST {
return;
}
loop {

View File

@ -7,7 +7,7 @@
//! stable sorting implementation.
use crate::cmp;
use crate::mem::{self, MaybeUninit};
use crate::mem::{self, MaybeUninit, SizedTypeProperties};
use crate::ptr;
/// When dropped, copies from `src` into `dest`.
@ -813,7 +813,7 @@ where
F: FnMut(&T, &T) -> bool,
{
// Sorting has no meaningful behavior on zero-sized types.
if mem::size_of::<T>() == 0 {
if T::IS_ZST {
return;
}
@ -898,7 +898,7 @@ where
panic!("partition_at_index index {} greater than length of slice {}", index, v.len());
}
if mem::size_of::<T>() == 0 {
if T::IS_ZST {
// Sorting has no meaningful behavior on zero-sized types. Do nothing.
} else if index == v.len() - 1 {
// Find max element and place it in the last position of the array. We're free to use