add more niches to rawvec

This commit is contained in:
The 8472 2023-01-12 22:54:27 +01:00
parent 7e452c123c
commit 502df1b7d4
4 changed files with 66 additions and 24 deletions

View File

@ -25,6 +25,16 @@ enum AllocInit {
Zeroed,
}
#[repr(transparent)]
#[cfg_attr(target_pointer_width = "16", rustc_layout_scalar_valid_range_end(0x7fff))]
#[cfg_attr(target_pointer_width = "32", rustc_layout_scalar_valid_range_end(0x7fff_ffff))]
#[cfg_attr(target_pointer_width = "64", rustc_layout_scalar_valid_range_end(0x7fff_ffff_ffff_ffff))]
struct Cap(usize);
impl Cap {
const ZERO: Cap = unsafe { Cap(0) };
}
/// A low-level utility for more ergonomically allocating, reallocating, and deallocating
/// a buffer of memory on the heap without having to worry about all the corner cases
/// involved. This type is excellent for building your own data structures like Vec and VecDeque.
@ -50,7 +60,7 @@ enum AllocInit {
#[allow(missing_debug_implementations)]
pub(crate) struct RawVec<T, A: Allocator = Global> {
ptr: Unique<T>,
cap: usize,
cap: Cap,
alloc: A,
}
@ -119,7 +129,7 @@ impl<T, A: Allocator> RawVec<T, A> {
/// the returned `RawVec`.
pub const fn new_in(alloc: A) -> Self {
// `cap: 0` means "unallocated". zero-sized types are ignored.
Self { ptr: Unique::dangling(), cap: 0, alloc }
Self { ptr: Unique::dangling(), cap: Cap::ZERO, alloc }
}
/// Like `with_capacity`, but parameterized over the choice of
@ -194,7 +204,7 @@ impl<T, A: Allocator> RawVec<T, A> {
// here should change to `ptr.len() / mem::size_of::<T>()`.
Self {
ptr: unsafe { Unique::new_unchecked(ptr.cast().as_ptr()) },
cap: capacity,
cap: unsafe { Cap(capacity) },
alloc,
}
}
@ -207,12 +217,13 @@ impl<T, A: Allocator> RawVec<T, A> {
/// The `ptr` must be allocated (via the given allocator `alloc`), and with the given
/// `capacity`.
/// The `capacity` cannot exceed `isize::MAX` for sized types. (only a concern on 32-bit
/// systems). ZST vectors may have a capacity up to `usize::MAX`.
/// systems). For ZSTs capacity is ignored.
/// If the `ptr` and `capacity` come from a `RawVec` created via `alloc`, then this is
/// guaranteed.
#[inline]
pub unsafe fn from_raw_parts_in(ptr: *mut T, capacity: usize, alloc: A) -> Self {
Self { ptr: unsafe { Unique::new_unchecked(ptr) }, cap: capacity, alloc }
let cap = if T::IS_ZST { Cap::ZERO } else { unsafe { Cap(capacity) } };
Self { ptr: unsafe { Unique::new_unchecked(ptr) }, cap, alloc }
}
/// Gets a raw pointer to the start of the allocation. Note that this is
@ -228,7 +239,7 @@ impl<T, A: Allocator> RawVec<T, A> {
/// This will always be `usize::MAX` if `T` is zero-sized.
#[inline(always)]
pub fn capacity(&self) -> usize {
if T::IS_ZST { usize::MAX } else { self.cap }
if T::IS_ZST { usize::MAX } else { self.cap.0 }
}
/// Returns a shared reference to the allocator backing this `RawVec`.
@ -237,7 +248,7 @@ impl<T, A: Allocator> RawVec<T, A> {
}
fn current_memory(&self) -> Option<(NonNull<u8>, Layout)> {
if T::IS_ZST || self.cap == 0 {
if T::IS_ZST || self.cap.0 == 0 {
None
} else {
// We could use Layout::array here which ensures the absence of isize and usize overflows
@ -247,7 +258,7 @@ impl<T, A: Allocator> RawVec<T, A> {
let _: () = const { assert!(mem::size_of::<T>() % mem::align_of::<T>() == 0) };
unsafe {
let align = mem::align_of::<T>();
let size = mem::size_of::<T>().unchecked_mul(self.cap);
let size = mem::size_of::<T>().unchecked_mul(self.cap.0);
let layout = Layout::from_size_align_unchecked(size, align);
Some((self.ptr.cast().into(), layout))
}
@ -375,12 +386,15 @@ impl<T, A: Allocator> RawVec<T, A> {
additional > self.capacity().wrapping_sub(len)
}
fn set_ptr_and_cap(&mut self, ptr: NonNull<[u8]>, cap: usize) {
/// # Safety:
///
/// `cap` must not exceed `isize::MAX`.
unsafe fn set_ptr_and_cap(&mut self, ptr: NonNull<[u8]>, cap: usize) {
// Allocators currently return a `NonNull<[u8]>` whose length matches
// the size requested. If that ever changes, the capacity here should
// change to `ptr.len() / mem::size_of::<T>()`.
self.ptr = unsafe { Unique::new_unchecked(ptr.cast().as_ptr()) };
self.cap = cap;
self.cap = unsafe { Cap(cap) };
}
// This method is usually instantiated many times. So we want it to be as
@ -405,14 +419,15 @@ impl<T, A: Allocator> RawVec<T, A> {
// This guarantees exponential growth. The doubling cannot overflow
// because `cap <= isize::MAX` and the type of `cap` is `usize`.
let cap = cmp::max(self.cap * 2, required_cap);
let cap = cmp::max(self.cap.0 * 2, required_cap);
let cap = cmp::max(Self::MIN_NON_ZERO_CAP, cap);
let new_layout = Layout::array::<T>(cap);
// `finish_grow` is non-generic over `T`.
let ptr = finish_grow(new_layout, self.current_memory(), &mut self.alloc)?;
self.set_ptr_and_cap(ptr, cap);
// SAFETY: finish_grow would have resulted in a capacity overflow if we tried to allocate more than isize::MAX items
unsafe { self.set_ptr_and_cap(ptr, cap) };
Ok(())
}
@ -431,7 +446,10 @@ impl<T, A: Allocator> RawVec<T, A> {
// `finish_grow` is non-generic over `T`.
let ptr = finish_grow(new_layout, self.current_memory(), &mut self.alloc)?;
self.set_ptr_and_cap(ptr, cap);
// SAFETY: finish_grow would have resulted in a capacity overflow if we tried to allocate more than isize::MAX items
unsafe {
self.set_ptr_and_cap(ptr, cap);
}
Ok(())
}
@ -449,7 +467,7 @@ impl<T, A: Allocator> RawVec<T, A> {
if cap == 0 {
unsafe { self.alloc.deallocate(ptr, layout) };
self.ptr = Unique::dangling();
self.cap = 0;
self.cap = Cap::ZERO;
} else {
let ptr = unsafe {
// `Layout::array` cannot overflow here because it would have
@ -460,7 +478,10 @@ impl<T, A: Allocator> RawVec<T, A> {
.shrink(ptr, layout, new_layout)
.map_err(|_| AllocError { layout: new_layout, non_exhaustive: () })?
};
self.set_ptr_and_cap(ptr, cap);
// SAFETY: if the allocation is valid, then the capacity is too
unsafe {
self.set_ptr_and_cap(ptr, cap);
}
}
Ok(())
}

View File

@ -1,4 +1,5 @@
use super::*;
use core::mem::size_of;
use std::cell::Cell;
#[test]
@ -161,3 +162,11 @@ fn zst_reserve_exact_panic() {
v.reserve_exact(101, usize::MAX - 100);
}
#[test]
fn niches() {
let baseline = size_of::<RawVec<u8>>();
assert_eq!(size_of::<Option<RawVec<u8>>>(), baseline);
assert_eq!(size_of::<Option<Option<RawVec<u8>>>>(), baseline);
assert_eq!(size_of::<Option<Option<Option<RawVec<u8>>>>>(), baseline);
}

View File

@ -9,9 +9,12 @@
// CHECK-LABEL: define {{(dso_local )?}}void @string_new
#[no_mangle]
pub fn string_new() -> String {
// CHECK: store ptr inttoptr
// CHECK-NOT: load i8
// CHECK: store i{{32|64}}
// CHECK-NEXT: getelementptr
// CHECK-NEXT: call void @llvm.memset
// CHECK-NEXT: store ptr
// CHECK-NEXT: getelementptr
// CHECK-NEXT: store i{{32|64}}
// CHECK-NEXT: ret void
String::new()
}
@ -19,9 +22,12 @@ pub fn string_new() -> String {
// CHECK-LABEL: define {{(dso_local )?}}void @empty_to_string
#[no_mangle]
pub fn empty_to_string() -> String {
// CHECK: store ptr inttoptr
// CHECK-NOT: load i8
// CHECK: store i{{32|64}}
// CHECK-NEXT: getelementptr
// CHECK-NEXT: call void @llvm.memset
// CHECK-NEXT: store ptr
// CHECK-NEXT: getelementptr
// CHECK-NEXT: store i{{32|64}}
// CHECK-NEXT: ret void
"".to_string()
}
@ -32,9 +38,12 @@ pub fn empty_to_string() -> String {
// CHECK-LABEL: @empty_vec
#[no_mangle]
pub fn empty_vec() -> Vec<u8> {
// CHECK: store ptr inttoptr
// CHECK: store i{{32|64}}
// CHECK-NOT: load i8
// CHECK-NEXT: getelementptr
// CHECK-NEXT: call void @llvm.memset
// CHECK-NEXT: store ptr
// CHECK-NEXT: getelementptr
// CHECK-NEXT: store i{{32|64}}
// CHECK-NEXT: ret void
vec![]
}
@ -42,9 +51,12 @@ pub fn empty_vec() -> Vec<u8> {
// CHECK-LABEL: @empty_vec_clone
#[no_mangle]
pub fn empty_vec_clone() -> Vec<u8> {
// CHECK: store ptr inttoptr
// CHECK: store i{{32|64}}
// CHECK-NOT: load i8
// CHECK-NEXT: getelementptr
// CHECK-NEXT: call void @llvm.memset
// CHECK-NEXT: store ptr
// CHECK-NEXT: getelementptr
// CHECK-NEXT: store i{{32|64}}
// CHECK-NEXT: ret void
vec![].clone()
}

View File

@ -1,3 +1,3 @@
thread 'main' panicked at library/alloc/src/raw_vec.rs:545:5:
thread 'main' panicked at library/alloc/src/raw_vec.rs:571:5:
capacity overflow
note: run with `RUST_BACKTRACE=1` environment variable to display a backtrace