mirror of
https://github.com/rust-lang/rust.git
synced 2025-02-03 02:23:20 +00:00
Overhaul of the AllocRef
trait to match allocator-wg's latest consens
This commit is contained in:
parent
2fbb07525e
commit
56cbf2f22a
@ -2,7 +2,7 @@
|
||||
|
||||
#![stable(feature = "alloc_module", since = "1.28.0")]
|
||||
|
||||
use core::intrinsics::{min_align_of_val, size_of_val};
|
||||
use core::intrinsics::{self, min_align_of_val, size_of_val};
|
||||
use core::ptr::{NonNull, Unique};
|
||||
use core::usize;
|
||||
|
||||
@ -165,11 +165,19 @@ pub unsafe fn alloc_zeroed(layout: Layout) -> *mut u8 {
|
||||
#[unstable(feature = "allocator_api", issue = "32838")]
|
||||
unsafe impl AllocRef for Global {
|
||||
#[inline]
|
||||
fn alloc(&mut self, layout: Layout) -> Result<(NonNull<u8>, usize), AllocErr> {
|
||||
if layout.size() == 0 {
|
||||
fn alloc(&mut self, layout: Layout, init: AllocInit) -> Result<(NonNull<u8>, usize), AllocErr> {
|
||||
let new_size = layout.size();
|
||||
if new_size == 0 {
|
||||
Ok((layout.dangling(), 0))
|
||||
} else {
|
||||
unsafe { NonNull::new(alloc(layout)).ok_or(AllocErr).map(|p| (p, layout.size())) }
|
||||
unsafe {
|
||||
let raw_ptr = match init {
|
||||
AllocInit::Uninitialized => alloc(layout),
|
||||
AllocInit::Zeroed => alloc_zeroed(layout),
|
||||
};
|
||||
let ptr = NonNull::new(raw_ptr).ok_or(AllocErr)?;
|
||||
Ok((ptr, new_size))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -181,33 +189,77 @@ unsafe impl AllocRef for Global {
|
||||
}
|
||||
|
||||
#[inline]
|
||||
unsafe fn realloc(
|
||||
unsafe fn grow(
|
||||
&mut self,
|
||||
ptr: NonNull<u8>,
|
||||
layout: Layout,
|
||||
new_size: usize,
|
||||
placement: ReallocPlacement,
|
||||
init: AllocInit,
|
||||
) -> Result<(NonNull<u8>, usize), AllocErr> {
|
||||
match (layout.size(), new_size) {
|
||||
(0, 0) => Ok((layout.dangling(), 0)),
|
||||
(0, _) => self.alloc(Layout::from_size_align_unchecked(new_size, layout.align())),
|
||||
(_, 0) => {
|
||||
self.dealloc(ptr, layout);
|
||||
Ok((layout.dangling(), 0))
|
||||
let old_size = layout.size();
|
||||
debug_assert!(
|
||||
new_size >= old_size,
|
||||
"`new_size` must be greater than or equal to `layout.size()`"
|
||||
);
|
||||
|
||||
if old_size == new_size {
|
||||
return Ok((ptr, new_size));
|
||||
}
|
||||
|
||||
match placement {
|
||||
ReallocPlacement::MayMove => {
|
||||
if old_size == 0 {
|
||||
self.alloc(Layout::from_size_align_unchecked(new_size, layout.align()), init)
|
||||
} else {
|
||||
// `realloc` probably checks for `new_size > old_size` or something similar.
|
||||
// `new_size` must be greater than or equal to `old_size` due to the safety constraint,
|
||||
// and `new_size` == `old_size` was caught before
|
||||
intrinsics::assume(new_size > old_size);
|
||||
let ptr =
|
||||
NonNull::new(realloc(ptr.as_ptr(), layout, new_size)).ok_or(AllocErr)?;
|
||||
let new_layout = Layout::from_size_align_unchecked(new_size, layout.align());
|
||||
init.initialize_offset(ptr, new_layout, old_size);
|
||||
Ok((ptr, new_size))
|
||||
}
|
||||
}
|
||||
(_, _) => NonNull::new(realloc(ptr.as_ptr(), layout, new_size))
|
||||
.ok_or(AllocErr)
|
||||
.map(|p| (p, new_size)),
|
||||
ReallocPlacement::InPlace => Err(AllocErr),
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn alloc_zeroed(&mut self, layout: Layout) -> Result<(NonNull<u8>, usize), AllocErr> {
|
||||
if layout.size() == 0 {
|
||||
Ok((layout.dangling(), 0))
|
||||
} else {
|
||||
unsafe {
|
||||
NonNull::new(alloc_zeroed(layout)).ok_or(AllocErr).map(|p| (p, layout.size()))
|
||||
unsafe fn shrink(
|
||||
&mut self,
|
||||
ptr: NonNull<u8>,
|
||||
layout: Layout,
|
||||
new_size: usize,
|
||||
placement: ReallocPlacement,
|
||||
) -> Result<(NonNull<u8>, usize), AllocErr> {
|
||||
let old_size = layout.size();
|
||||
debug_assert!(
|
||||
new_size <= old_size,
|
||||
"`new_size` must be smaller than or equal to `layout.size()`"
|
||||
);
|
||||
|
||||
if old_size == new_size {
|
||||
return Ok((ptr, new_size));
|
||||
}
|
||||
|
||||
match placement {
|
||||
ReallocPlacement::MayMove => {
|
||||
let ptr = if new_size == 0 {
|
||||
self.dealloc(ptr, layout);
|
||||
layout.dangling()
|
||||
} else {
|
||||
// `realloc` probably checks for `new_size > old_size` or something similar.
|
||||
// `new_size` must be smaller than or equal to `old_size` due to the safety constraint,
|
||||
// and `new_size` == `old_size` was caught before
|
||||
intrinsics::assume(new_size < old_size);
|
||||
NonNull::new(realloc(ptr.as_ptr(), layout, new_size)).ok_or(AllocErr)?
|
||||
};
|
||||
Ok((ptr, new_size))
|
||||
}
|
||||
ReallocPlacement::InPlace => Err(AllocErr),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -218,14 +270,10 @@ unsafe impl AllocRef for Global {
|
||||
#[lang = "exchange_malloc"]
|
||||
#[inline]
|
||||
unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {
|
||||
if size == 0 {
|
||||
align as *mut u8
|
||||
} else {
|
||||
let layout = Layout::from_size_align_unchecked(size, align);
|
||||
match Global.alloc(layout) {
|
||||
Ok((ptr, _)) => ptr.as_ptr(),
|
||||
Err(_) => handle_alloc_error(layout),
|
||||
}
|
||||
let layout = Layout::from_size_align_unchecked(size, align);
|
||||
match Global.alloc(layout, AllocInit::Uninitialized) {
|
||||
Ok((ptr, _)) => ptr.as_ptr(),
|
||||
Err(_) => handle_alloc_error(layout),
|
||||
}
|
||||
}
|
||||
|
||||
@ -239,11 +287,8 @@ unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {
|
||||
pub(crate) unsafe fn box_free<T: ?Sized>(ptr: Unique<T>) {
|
||||
let size = size_of_val(ptr.as_ref());
|
||||
let align = min_align_of_val(ptr.as_ref());
|
||||
// We do not allocate for Box<T> when T is ZST, so deallocation is also not necessary.
|
||||
if size != 0 {
|
||||
let layout = Layout::from_size_align_unchecked(size, align);
|
||||
Global.dealloc(ptr.cast().into(), layout);
|
||||
}
|
||||
let layout = Layout::from_size_align_unchecked(size, align);
|
||||
Global.dealloc(ptr.cast().into(), layout)
|
||||
}
|
||||
|
||||
/// Abort on memory allocation error or failure.
|
||||
|
@ -8,8 +8,9 @@ use test::Bencher;
|
||||
fn allocate_zeroed() {
|
||||
unsafe {
|
||||
let layout = Layout::from_size_align(1024, 1).unwrap();
|
||||
let (ptr, _) =
|
||||
Global.alloc_zeroed(layout.clone()).unwrap_or_else(|_| handle_alloc_error(layout));
|
||||
let (ptr, _) = Global
|
||||
.alloc(layout.clone(), AllocInit::Zeroed)
|
||||
.unwrap_or_else(|_| handle_alloc_error(layout));
|
||||
|
||||
let mut i = ptr.cast::<u8>().as_ptr();
|
||||
let end = i.add(layout.size());
|
||||
|
@ -146,7 +146,7 @@ use core::ptr::{self, NonNull, Unique};
|
||||
use core::slice;
|
||||
use core::task::{Context, Poll};
|
||||
|
||||
use crate::alloc::{self, AllocRef, Global};
|
||||
use crate::alloc::{self, AllocInit, AllocRef, Global};
|
||||
use crate::raw_vec::RawVec;
|
||||
use crate::str::from_boxed_utf8_unchecked;
|
||||
use crate::vec::Vec;
|
||||
@ -196,14 +196,12 @@ impl<T> Box<T> {
|
||||
#[unstable(feature = "new_uninit", issue = "63291")]
|
||||
pub fn new_uninit() -> Box<mem::MaybeUninit<T>> {
|
||||
let layout = alloc::Layout::new::<mem::MaybeUninit<T>>();
|
||||
unsafe {
|
||||
let ptr = if layout.size() == 0 {
|
||||
NonNull::dangling()
|
||||
} else {
|
||||
Global.alloc(layout).unwrap_or_else(|_| alloc::handle_alloc_error(layout)).0.cast()
|
||||
};
|
||||
Box::from_raw(ptr.as_ptr())
|
||||
}
|
||||
let ptr = Global
|
||||
.alloc(layout, AllocInit::Uninitialized)
|
||||
.unwrap_or_else(|_| alloc::handle_alloc_error(layout))
|
||||
.0
|
||||
.cast();
|
||||
unsafe { Box::from_raw(ptr.as_ptr()) }
|
||||
}
|
||||
|
||||
/// Constructs a new `Box` with uninitialized contents, with the memory
|
||||
@ -226,11 +224,13 @@ impl<T> Box<T> {
|
||||
/// [zeroed]: ../../std/mem/union.MaybeUninit.html#method.zeroed
|
||||
#[unstable(feature = "new_uninit", issue = "63291")]
|
||||
pub fn new_zeroed() -> Box<mem::MaybeUninit<T>> {
|
||||
unsafe {
|
||||
let mut uninit = Self::new_uninit();
|
||||
ptr::write_bytes::<T>(uninit.as_mut_ptr(), 0, 1);
|
||||
uninit
|
||||
}
|
||||
let layout = alloc::Layout::new::<mem::MaybeUninit<T>>();
|
||||
let ptr = Global
|
||||
.alloc(layout, AllocInit::Zeroed)
|
||||
.unwrap_or_else(|_| alloc::handle_alloc_error(layout))
|
||||
.0
|
||||
.cast();
|
||||
unsafe { Box::from_raw(ptr.as_ptr()) }
|
||||
}
|
||||
|
||||
/// Constructs a new `Pin<Box<T>>`. If `T` does not implement `Unpin`, then
|
||||
@ -266,14 +266,12 @@ impl<T> Box<[T]> {
|
||||
#[unstable(feature = "new_uninit", issue = "63291")]
|
||||
pub fn new_uninit_slice(len: usize) -> Box<[mem::MaybeUninit<T>]> {
|
||||
let layout = alloc::Layout::array::<mem::MaybeUninit<T>>(len).unwrap();
|
||||
unsafe {
|
||||
let ptr = if layout.size() == 0 {
|
||||
NonNull::dangling()
|
||||
} else {
|
||||
Global.alloc(layout).unwrap_or_else(|_| alloc::handle_alloc_error(layout)).0.cast()
|
||||
};
|
||||
Box::from_raw(slice::from_raw_parts_mut(ptr.as_ptr(), len))
|
||||
}
|
||||
let ptr = Global
|
||||
.alloc(layout, AllocInit::Uninitialized)
|
||||
.unwrap_or_else(|_| alloc::handle_alloc_error(layout))
|
||||
.0
|
||||
.cast();
|
||||
unsafe { Box::from_raw(slice::from_raw_parts_mut(ptr.as_ptr(), len)) }
|
||||
}
|
||||
}
|
||||
|
||||
@ -778,7 +776,7 @@ impl<T: Copy> From<&[T]> for Box<[T]> {
|
||||
let buf = RawVec::with_capacity(len);
|
||||
unsafe {
|
||||
ptr::copy_nonoverlapping(slice.as_ptr(), buf.ptr(), len);
|
||||
buf.into_box()
|
||||
buf.into_box().assume_init()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2,12 +2,17 @@
|
||||
#![doc(hidden)]
|
||||
|
||||
use core::cmp;
|
||||
use core::mem;
|
||||
use core::mem::{self, MaybeUninit};
|
||||
use core::ops::Drop;
|
||||
use core::ptr::{self, NonNull, Unique};
|
||||
use core::ptr::Unique;
|
||||
use core::slice;
|
||||
|
||||
use crate::alloc::{handle_alloc_error, AllocErr, AllocRef, Global, Layout};
|
||||
use crate::alloc::{
|
||||
handle_alloc_error, AllocErr,
|
||||
AllocInit::{self, *},
|
||||
AllocRef, Global, Layout,
|
||||
ReallocPlacement::{self, *},
|
||||
};
|
||||
use crate::boxed::Box;
|
||||
use crate::collections::TryReserveError::{self, *};
|
||||
|
||||
@ -19,28 +24,22 @@ mod tests;
|
||||
/// involved. This type is excellent for building your own data structures like Vec and VecDeque.
|
||||
/// In particular:
|
||||
///
|
||||
/// * Produces `Unique::empty()` on zero-sized types.
|
||||
/// * Produces `Unique::empty()` on zero-length allocations.
|
||||
/// * Catches all overflows in capacity computations (promotes them to "capacity overflow" panics).
|
||||
/// * Guards against 32-bit systems allocating more than isize::MAX bytes.
|
||||
/// * Guards against overflowing your length.
|
||||
/// * Aborts on OOM or calls `handle_alloc_error` as applicable.
|
||||
/// * Avoids freeing `Unique::empty()`.
|
||||
/// * Calls `handle_alloc_error` for fallible allocations.
|
||||
/// * Contains a `ptr::Unique` and thus endows the user with all related benefits.
|
||||
/// * Uses the excess returned from the allocator to use the largest available capacity.
|
||||
///
|
||||
/// This type does not in anyway inspect the memory that it manages. When dropped it *will*
|
||||
/// free its memory, but it *won't* try to drop its contents. It is up to the user of `RawVec`
|
||||
/// to handle the actual things *stored* inside of a `RawVec`.
|
||||
///
|
||||
/// Note that a `RawVec` always forces its capacity to be `usize::MAX` for zero-sized types.
|
||||
/// This enables you to use capacity-growing logic catch the overflows in your length
|
||||
/// that might occur with zero-sized types.
|
||||
///
|
||||
/// The above means that you need to be careful when round-tripping this type with a
|
||||
/// Note that the excess of a zero-sized types is always infinite, so `capacity()` always returns
|
||||
/// `usize::MAX`. This means that you need to be careful when round-tripping this type with a
|
||||
/// `Box<[T]>`, since `capacity()` won't yield the length. However, `with_capacity`,
|
||||
/// `shrink_to_fit`, and `from_box` will actually set `RawVec`'s private capacity
|
||||
/// field. This allows zero-sized types to not be special-cased by consumers of
|
||||
/// this type.
|
||||
/// `shrink_to_fit`, and `from_box` will actually set `RawVec`'s private capacity field. This allows
|
||||
/// zero-sized types to not be special-cased by consumers of this type.
|
||||
#[allow(missing_debug_implementations)]
|
||||
pub struct RawVec<T, A: AllocRef = Global> {
|
||||
ptr: Unique<T>,
|
||||
@ -52,49 +51,30 @@ impl<T, A: AllocRef> RawVec<T, A> {
|
||||
/// Like `new`, but parameterized over the choice of allocator for
|
||||
/// the returned `RawVec`.
|
||||
pub const fn new_in(a: A) -> Self {
|
||||
let cap = if mem::size_of::<T>() == 0 { core::usize::MAX } else { 0 };
|
||||
|
||||
// `Unique::empty()` doubles as "unallocated" and "zero-sized allocation".
|
||||
RawVec { ptr: Unique::empty(), cap, a }
|
||||
// `cap: 0` means "unallocated". zero-sized allocations are handled by `AllocRef`
|
||||
Self { ptr: Unique::empty(), cap: 0, a }
|
||||
}
|
||||
|
||||
/// Like `with_capacity`, but parameterized over the choice of
|
||||
/// allocator for the returned `RawVec`.
|
||||
#[inline]
|
||||
pub fn with_capacity_in(capacity: usize, a: A) -> Self {
|
||||
RawVec::allocate_in(capacity, false, a)
|
||||
Self::allocate_in(capacity, Uninitialized, a)
|
||||
}
|
||||
|
||||
/// Like `with_capacity_zeroed`, but parameterized over the choice
|
||||
/// of allocator for the returned `RawVec`.
|
||||
#[inline]
|
||||
pub fn with_capacity_zeroed_in(capacity: usize, a: A) -> Self {
|
||||
RawVec::allocate_in(capacity, true, a)
|
||||
Self::allocate_in(capacity, Zeroed, a)
|
||||
}
|
||||
|
||||
fn allocate_in(mut capacity: usize, zeroed: bool, mut a: A) -> Self {
|
||||
let elem_size = mem::size_of::<T>();
|
||||
fn allocate_in(capacity: usize, init: AllocInit, mut a: A) -> Self {
|
||||
let layout = Layout::array::<T>(capacity).unwrap_or_else(|_| capacity_overflow());
|
||||
alloc_guard(layout.size()).unwrap_or_else(|_| capacity_overflow());
|
||||
|
||||
let alloc_size = capacity.checked_mul(elem_size).unwrap_or_else(|| capacity_overflow());
|
||||
alloc_guard(alloc_size).unwrap_or_else(|_| capacity_overflow());
|
||||
|
||||
// Handles ZSTs and `capacity == 0` alike.
|
||||
let ptr = if alloc_size == 0 {
|
||||
NonNull::<T>::dangling()
|
||||
} else {
|
||||
let align = mem::align_of::<T>();
|
||||
let layout = Layout::from_size_align(alloc_size, align).unwrap();
|
||||
let result = if zeroed { a.alloc_zeroed(layout) } else { a.alloc(layout) };
|
||||
match result {
|
||||
Ok((ptr, size)) => {
|
||||
capacity = size / elem_size;
|
||||
ptr.cast()
|
||||
}
|
||||
Err(_) => handle_alloc_error(layout),
|
||||
}
|
||||
};
|
||||
|
||||
RawVec { ptr: ptr.into(), cap: capacity, a }
|
||||
let (ptr, excess) = a.alloc(layout, init).unwrap_or_else(|_| handle_alloc_error(layout));
|
||||
Self { ptr: ptr.cast().into(), cap: Self::capacity_from_bytes(excess), a }
|
||||
}
|
||||
}
|
||||
|
||||
@ -138,13 +118,13 @@ impl<T> RawVec<T, Global> {
|
||||
/// Aborts on OOM.
|
||||
#[inline]
|
||||
pub fn with_capacity(capacity: usize) -> Self {
|
||||
RawVec::allocate_in(capacity, false, Global)
|
||||
Self::with_capacity_in(capacity, Global)
|
||||
}
|
||||
|
||||
/// Like `with_capacity`, but guarantees the buffer is zeroed.
|
||||
#[inline]
|
||||
pub fn with_capacity_zeroed(capacity: usize) -> Self {
|
||||
RawVec::allocate_in(capacity, true, Global)
|
||||
Self::with_capacity_zeroed_in(capacity, Global)
|
||||
}
|
||||
}
|
||||
|
||||
@ -156,8 +136,9 @@ impl<T, A: AllocRef> RawVec<T, A> {
|
||||
/// The `ptr` must be allocated (via the given allocator `a`), and with the given `capacity`.
|
||||
/// The `capacity` cannot exceed `isize::MAX` (only a concern on 32-bit systems).
|
||||
/// If the `ptr` and `capacity` come from a `RawVec` created via `a`, then this is guaranteed.
|
||||
#[inline]
|
||||
pub unsafe fn from_raw_parts_in(ptr: *mut T, capacity: usize, a: A) -> Self {
|
||||
RawVec { ptr: Unique::new_unchecked(ptr), cap: capacity, a }
|
||||
Self { ptr: Unique::new_unchecked(ptr), cap: capacity, a }
|
||||
}
|
||||
}
|
||||
|
||||
@ -169,8 +150,9 @@ impl<T> RawVec<T, Global> {
|
||||
/// The `ptr` must be allocated (on the system heap), and with the given `capacity`.
|
||||
/// The `capacity` cannot exceed `isize::MAX` (only a concern on 32-bit systems).
|
||||
/// If the `ptr` and `capacity` come from a `RawVec`, then this is guaranteed.
|
||||
#[inline]
|
||||
pub unsafe fn from_raw_parts(ptr: *mut T, capacity: usize) -> Self {
|
||||
RawVec { ptr: Unique::new_unchecked(ptr), cap: capacity, a: Global }
|
||||
Self::from_raw_parts_in(ptr, capacity, Global)
|
||||
}
|
||||
|
||||
/// Converts a `Box<[T]>` into a `RawVec<T>`.
|
||||
@ -196,7 +178,7 @@ impl<T, A: AllocRef> RawVec<T, A> {
|
||||
/// This will always be `usize::MAX` if `T` is zero-sized.
|
||||
#[inline(always)]
|
||||
pub fn capacity(&self) -> usize {
|
||||
if mem::size_of::<T>() == 0 { !0 } else { self.cap }
|
||||
if mem::size_of::<T>() == 0 { usize::MAX } else { self.cap }
|
||||
}
|
||||
|
||||
/// Returns a shared reference to the allocator backing this `RawVec`.
|
||||
@ -274,50 +256,10 @@ impl<T, A: AllocRef> RawVec<T, A> {
|
||||
#[inline(never)]
|
||||
#[cold]
|
||||
pub fn double(&mut self) {
|
||||
unsafe {
|
||||
let elem_size = mem::size_of::<T>();
|
||||
|
||||
// Since we set the capacity to `usize::MAX` when `elem_size` is
|
||||
// 0, getting to here necessarily means the `RawVec` is overfull.
|
||||
assert!(elem_size != 0, "capacity overflow");
|
||||
|
||||
let (ptr, new_cap) = match self.current_layout() {
|
||||
Some(cur) => {
|
||||
// Since we guarantee that we never allocate more than
|
||||
// `isize::MAX` bytes, `elem_size * self.cap <= isize::MAX` as
|
||||
// a precondition, so this can't overflow. Additionally the
|
||||
// alignment will never be too large as to "not be
|
||||
// satisfiable", so `Layout::from_size_align` will always
|
||||
// return `Some`.
|
||||
//
|
||||
// TL;DR, we bypass runtime checks due to dynamic assertions
|
||||
// in this module, allowing us to use
|
||||
// `from_size_align_unchecked`.
|
||||
let new_cap = 2 * self.cap;
|
||||
let new_size = new_cap * elem_size;
|
||||
alloc_guard(new_size).unwrap_or_else(|_| capacity_overflow());
|
||||
let ptr_res = self.a.realloc(NonNull::from(self.ptr).cast(), cur, new_size);
|
||||
match ptr_res {
|
||||
Ok((ptr, new_size)) => (ptr, new_size / elem_size),
|
||||
Err(_) => handle_alloc_error(Layout::from_size_align_unchecked(
|
||||
new_size,
|
||||
cur.align(),
|
||||
)),
|
||||
}
|
||||
}
|
||||
None => {
|
||||
// Skip to 4 because tiny `Vec`'s are dumb; but not if that
|
||||
// would cause overflow.
|
||||
let new_cap = if elem_size > (!0) / 8 { 1 } else { 4 };
|
||||
let layout = Layout::array::<T>(new_cap).unwrap();
|
||||
match self.a.alloc(layout) {
|
||||
Ok((ptr, new_size)) => (ptr, new_size / elem_size),
|
||||
Err(_) => handle_alloc_error(layout),
|
||||
}
|
||||
}
|
||||
};
|
||||
self.ptr = ptr.cast().into();
|
||||
self.cap = new_cap;
|
||||
match self.grow(Double, MayMove, Uninitialized) {
|
||||
Err(CapacityOverflow) => capacity_overflow(),
|
||||
Err(AllocError { layout, .. }) => handle_alloc_error(layout),
|
||||
Ok(()) => { /* yay */ }
|
||||
}
|
||||
}
|
||||
|
||||
@ -336,99 +278,7 @@ impl<T, A: AllocRef> RawVec<T, A> {
|
||||
#[inline(never)]
|
||||
#[cold]
|
||||
pub fn double_in_place(&mut self) -> bool {
|
||||
unsafe {
|
||||
let elem_size = mem::size_of::<T>();
|
||||
let old_layout = match self.current_layout() {
|
||||
Some(layout) => layout,
|
||||
None => return false, // nothing to double
|
||||
};
|
||||
|
||||
// Since we set the capacity to `usize::MAX` when `elem_size` is
|
||||
// 0, getting to here necessarily means the `RawVec` is overfull.
|
||||
assert!(elem_size != 0, "capacity overflow");
|
||||
|
||||
// Since we guarantee that we never allocate more than `isize::MAX`
|
||||
// bytes, `elem_size * self.cap <= isize::MAX` as a precondition, so
|
||||
// this can't overflow.
|
||||
//
|
||||
// Similarly to with `double` above, we can go straight to
|
||||
// `Layout::from_size_align_unchecked` as we know this won't
|
||||
// overflow and the alignment is sufficiently small.
|
||||
let new_cap = 2 * self.cap;
|
||||
let new_size = new_cap * elem_size;
|
||||
alloc_guard(new_size).unwrap_or_else(|_| capacity_overflow());
|
||||
match self.a.grow_in_place(NonNull::from(self.ptr).cast(), old_layout, new_size) {
|
||||
Ok(_) => {
|
||||
// We can't directly divide `size`.
|
||||
self.cap = new_cap;
|
||||
true
|
||||
}
|
||||
Err(_) => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// The same as `reserve_exact`, but returns on errors instead of panicking or aborting.
|
||||
pub fn try_reserve_exact(
|
||||
&mut self,
|
||||
used_capacity: usize,
|
||||
needed_extra_capacity: usize,
|
||||
) -> Result<(), TryReserveError> {
|
||||
self.reserve_internal(used_capacity, needed_extra_capacity, Fallible, Exact)
|
||||
}
|
||||
|
||||
/// Ensures that the buffer contains at least enough space to hold
|
||||
/// `used_capacity + needed_extra_capacity` elements. If it doesn't already,
|
||||
/// will reallocate the minimum possible amount of memory necessary.
|
||||
/// Generally this will be exactly the amount of memory necessary,
|
||||
/// but in principle the allocator is free to give back more than
|
||||
/// we asked for.
|
||||
///
|
||||
/// If `used_capacity` exceeds `self.capacity()`, this may fail to actually allocate
|
||||
/// the requested space. This is not really unsafe, but the unsafe
|
||||
/// code *you* write that relies on the behavior of this function may break.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// * Panics if the requested capacity exceeds `usize::MAX` bytes.
|
||||
/// * Panics on 32-bit platforms if the requested capacity exceeds
|
||||
/// `isize::MAX` bytes.
|
||||
///
|
||||
/// # Aborts
|
||||
///
|
||||
/// Aborts on OOM.
|
||||
pub fn reserve_exact(&mut self, used_capacity: usize, needed_extra_capacity: usize) {
|
||||
match self.reserve_internal(used_capacity, needed_extra_capacity, Infallible, Exact) {
|
||||
Err(CapacityOverflow) => capacity_overflow(),
|
||||
Err(AllocError { .. }) => unreachable!(),
|
||||
Ok(()) => { /* yay */ }
|
||||
}
|
||||
}
|
||||
|
||||
/// Calculates the buffer's new size given that it'll hold `used_capacity +
|
||||
/// needed_extra_capacity` elements. This logic is used in amortized reserve methods.
|
||||
/// Returns `(new_capacity, new_alloc_size)`.
|
||||
fn amortized_new_size(
|
||||
&self,
|
||||
used_capacity: usize,
|
||||
needed_extra_capacity: usize,
|
||||
) -> Result<usize, TryReserveError> {
|
||||
// Nothing we can really do about these checks, sadly.
|
||||
let required_cap =
|
||||
used_capacity.checked_add(needed_extra_capacity).ok_or(CapacityOverflow)?;
|
||||
// Cannot overflow, because `cap <= isize::MAX`, and type of `cap` is `usize`.
|
||||
let double_cap = self.cap * 2;
|
||||
// `double_cap` guarantees exponential growth.
|
||||
Ok(cmp::max(double_cap, required_cap))
|
||||
}
|
||||
|
||||
/// The same as `reserve`, but returns on errors instead of panicking or aborting.
|
||||
pub fn try_reserve(
|
||||
&mut self,
|
||||
used_capacity: usize,
|
||||
needed_extra_capacity: usize,
|
||||
) -> Result<(), TryReserveError> {
|
||||
self.reserve_internal(used_capacity, needed_extra_capacity, Fallible, Amortized)
|
||||
self.grow(Double, InPlace, Uninitialized).is_ok()
|
||||
}
|
||||
|
||||
/// Ensures that the buffer contains at least enough space to hold
|
||||
@ -484,12 +334,26 @@ impl<T, A: AllocRef> RawVec<T, A> {
|
||||
/// # }
|
||||
/// ```
|
||||
pub fn reserve(&mut self, used_capacity: usize, needed_extra_capacity: usize) {
|
||||
match self.reserve_internal(used_capacity, needed_extra_capacity, Infallible, Amortized) {
|
||||
match self.try_reserve(used_capacity, needed_extra_capacity) {
|
||||
Err(CapacityOverflow) => capacity_overflow(),
|
||||
Err(AllocError { .. }) => unreachable!(),
|
||||
Err(AllocError { layout, .. }) => handle_alloc_error(layout),
|
||||
Ok(()) => { /* yay */ }
|
||||
}
|
||||
}
|
||||
|
||||
/// The same as `reserve`, but returns on errors instead of panicking or aborting.
|
||||
pub fn try_reserve(
|
||||
&mut self,
|
||||
used_capacity: usize,
|
||||
needed_extra_capacity: usize,
|
||||
) -> Result<(), TryReserveError> {
|
||||
if self.needs_to_grow(used_capacity, needed_extra_capacity) {
|
||||
self.grow(Amortized { used_capacity, needed_extra_capacity }, MayMove, Uninitialized)
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Attempts to ensure that the buffer contains at least enough space to hold
|
||||
/// `used_capacity + needed_extra_capacity` elements. If it doesn't already have
|
||||
/// enough capacity, will reallocate in place enough space plus comfortable slack
|
||||
@ -508,45 +372,54 @@ impl<T, A: AllocRef> RawVec<T, A> {
|
||||
/// * Panics on 32-bit platforms if the requested capacity exceeds
|
||||
/// `isize::MAX` bytes.
|
||||
pub fn reserve_in_place(&mut self, used_capacity: usize, needed_extra_capacity: usize) -> bool {
|
||||
unsafe {
|
||||
// NOTE: we don't early branch on ZSTs here because we want this
|
||||
// to actually catch "asking for more than usize::MAX" in that case.
|
||||
// If we make it past the first branch then we are guaranteed to
|
||||
// panic.
|
||||
// This is more readable than putting this in one line:
|
||||
// `!self.needs_to_grow(...) || self.grow(...).is_ok()`
|
||||
if self.needs_to_grow(used_capacity, needed_extra_capacity) {
|
||||
self.grow(Amortized { used_capacity, needed_extra_capacity }, InPlace, Uninitialized)
|
||||
.is_ok()
|
||||
} else {
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
// Don't actually need any more capacity. If the current `cap` is 0, we can't
|
||||
// reallocate in place.
|
||||
// Wrapping in case they give a bad `used_capacity`
|
||||
let old_layout = match self.current_layout() {
|
||||
Some(layout) => layout,
|
||||
None => return false,
|
||||
};
|
||||
if self.capacity().wrapping_sub(used_capacity) >= needed_extra_capacity {
|
||||
return false;
|
||||
}
|
||||
/// Ensures that the buffer contains at least enough space to hold
|
||||
/// `used_capacity + needed_extra_capacity` elements. If it doesn't already,
|
||||
/// will reallocate the minimum possible amount of memory necessary.
|
||||
/// Generally this will be exactly the amount of memory necessary,
|
||||
/// but in principle the allocator is free to give back more than
|
||||
/// we asked for.
|
||||
///
|
||||
/// If `used_capacity` exceeds `self.capacity()`, this may fail to actually allocate
|
||||
/// the requested space. This is not really unsafe, but the unsafe
|
||||
/// code *you* write that relies on the behavior of this function may break.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// * Panics if the requested capacity exceeds `usize::MAX` bytes.
|
||||
/// * Panics on 32-bit platforms if the requested capacity exceeds
|
||||
/// `isize::MAX` bytes.
|
||||
///
|
||||
/// # Aborts
|
||||
///
|
||||
/// Aborts on OOM.
|
||||
pub fn reserve_exact(&mut self, used_capacity: usize, needed_extra_capacity: usize) {
|
||||
match self.try_reserve_exact(used_capacity, needed_extra_capacity) {
|
||||
Err(CapacityOverflow) => capacity_overflow(),
|
||||
Err(AllocError { layout, .. }) => handle_alloc_error(layout),
|
||||
Ok(()) => { /* yay */ }
|
||||
}
|
||||
}
|
||||
|
||||
let new_cap = self
|
||||
.amortized_new_size(used_capacity, needed_extra_capacity)
|
||||
.unwrap_or_else(|_| capacity_overflow());
|
||||
|
||||
// Here, `cap < used_capacity + needed_extra_capacity <= new_cap`
|
||||
// (regardless of whether `self.cap - used_capacity` wrapped).
|
||||
// Therefore, we can safely call `grow_in_place`.
|
||||
|
||||
let new_layout = Layout::new::<T>().repeat(new_cap).unwrap().0;
|
||||
// FIXME: may crash and burn on over-reserve
|
||||
alloc_guard(new_layout.size()).unwrap_or_else(|_| capacity_overflow());
|
||||
match self.a.grow_in_place(
|
||||
NonNull::from(self.ptr).cast(),
|
||||
old_layout,
|
||||
new_layout.size(),
|
||||
) {
|
||||
Ok(_) => {
|
||||
self.cap = new_cap;
|
||||
true
|
||||
}
|
||||
Err(_) => false,
|
||||
}
|
||||
/// The same as `reserve_exact`, but returns on errors instead of panicking or aborting.
|
||||
pub fn try_reserve_exact(
|
||||
&mut self,
|
||||
used_capacity: usize,
|
||||
needed_extra_capacity: usize,
|
||||
) -> Result<(), TryReserveError> {
|
||||
if self.needs_to_grow(used_capacity, needed_extra_capacity) {
|
||||
self.grow(Exact { used_capacity, needed_extra_capacity }, MayMove, Uninitialized)
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@ -561,126 +434,134 @@ impl<T, A: AllocRef> RawVec<T, A> {
|
||||
///
|
||||
/// Aborts on OOM.
|
||||
pub fn shrink_to_fit(&mut self, amount: usize) {
|
||||
let elem_size = mem::size_of::<T>();
|
||||
|
||||
// Set the `cap` because they might be about to promote to a `Box<[T]>`
|
||||
if elem_size == 0 {
|
||||
self.cap = amount;
|
||||
return;
|
||||
}
|
||||
|
||||
// This check is my waterloo; it's the only thing `Vec` wouldn't have to do.
|
||||
assert!(self.cap >= amount, "Tried to shrink to a larger capacity");
|
||||
|
||||
if amount == 0 {
|
||||
// We want to create a new zero-length vector within the
|
||||
// same allocator. We use `ptr::write` to avoid an
|
||||
// erroneous attempt to drop the contents, and we use
|
||||
// `ptr::read` to sidestep condition against destructuring
|
||||
// types that implement Drop.
|
||||
|
||||
unsafe {
|
||||
let a = ptr::read(&self.a as *const A);
|
||||
self.dealloc_buffer();
|
||||
ptr::write(self, RawVec::new_in(a));
|
||||
}
|
||||
} else if self.cap != amount {
|
||||
unsafe {
|
||||
// We know here that our `amount` is greater than zero. This
|
||||
// implies, via the assert above, that capacity is also greater
|
||||
// than zero, which means that we've got a current layout that
|
||||
// "fits"
|
||||
//
|
||||
// We also know that `self.cap` is greater than `amount`, and
|
||||
// consequently we don't need runtime checks for creating either
|
||||
// layout.
|
||||
let old_size = elem_size * self.cap;
|
||||
let new_size = elem_size * amount;
|
||||
let align = mem::align_of::<T>();
|
||||
let old_layout = Layout::from_size_align_unchecked(old_size, align);
|
||||
match self.a.realloc(NonNull::from(self.ptr).cast(), old_layout, new_size) {
|
||||
Ok((ptr, _)) => self.ptr = ptr.cast().into(),
|
||||
Err(_) => {
|
||||
handle_alloc_error(Layout::from_size_align_unchecked(new_size, align))
|
||||
}
|
||||
}
|
||||
}
|
||||
self.cap = amount;
|
||||
match self.shrink(amount, MayMove) {
|
||||
Err(CapacityOverflow) => capacity_overflow(),
|
||||
Err(AllocError { layout, .. }) => handle_alloc_error(layout),
|
||||
Ok(()) => { /* yay */ }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
enum Fallibility {
|
||||
Fallible,
|
||||
Infallible,
|
||||
#[derive(Copy, Clone)]
|
||||
enum Strategy {
|
||||
Double,
|
||||
Amortized { used_capacity: usize, needed_extra_capacity: usize },
|
||||
Exact { used_capacity: usize, needed_extra_capacity: usize },
|
||||
}
|
||||
|
||||
use Fallibility::*;
|
||||
|
||||
enum ReserveStrategy {
|
||||
Exact,
|
||||
Amortized,
|
||||
}
|
||||
|
||||
use ReserveStrategy::*;
|
||||
use Strategy::*;
|
||||
|
||||
impl<T, A: AllocRef> RawVec<T, A> {
|
||||
fn reserve_internal(
|
||||
/// Returns if the buffer needs to grow to fulfill the needed extra capacity.
|
||||
/// Mainly used to make inlining reserve-calls possible without inlining `grow`.
|
||||
fn needs_to_grow(&self, used_capacity: usize, needed_extra_capacity: usize) -> bool {
|
||||
needed_extra_capacity > self.capacity().wrapping_sub(used_capacity)
|
||||
}
|
||||
|
||||
fn capacity_from_bytes(excess: usize) -> usize {
|
||||
match mem::size_of::<T>() {
|
||||
0 => usize::MAX,
|
||||
elem_size => excess / elem_size,
|
||||
}
|
||||
}
|
||||
|
||||
/// Single method to handle all possibilities of growing the buffer.
|
||||
fn grow(
|
||||
&mut self,
|
||||
used_capacity: usize,
|
||||
needed_extra_capacity: usize,
|
||||
fallibility: Fallibility,
|
||||
strategy: ReserveStrategy,
|
||||
strategy: Strategy,
|
||||
placement: ReallocPlacement,
|
||||
init: AllocInit,
|
||||
) -> Result<(), TryReserveError> {
|
||||
let elem_size = mem::size_of::<T>();
|
||||
|
||||
unsafe {
|
||||
// NOTE: we don't early branch on ZSTs here because we want this
|
||||
// to actually catch "asking for more than usize::MAX" in that case.
|
||||
// If we make it past the first branch then we are guaranteed to
|
||||
// panic.
|
||||
|
||||
// Don't actually need any more capacity.
|
||||
// Wrapping in case they gave a bad `used_capacity`.
|
||||
if self.capacity().wrapping_sub(used_capacity) >= needed_extra_capacity {
|
||||
return Ok(());
|
||||
let new_layout = match strategy {
|
||||
Double => unsafe {
|
||||
if elem_size == 0 {
|
||||
// Since we return a capacity of `usize::MAX` when `elem_size` is
|
||||
// 0, getting to here necessarily means the `RawVec` is overfull.
|
||||
return Err(CapacityOverflow);
|
||||
}
|
||||
// Since we guarantee that we never allocate more than `isize::MAX` bytes,
|
||||
// `elem_size * self.cap <= isize::MAX` as a precondition, so this can't overflow.
|
||||
// Additionally the alignment will never be too large as to "not be satisfiable",
|
||||
// so `Layout::from_size_align` will always return `Some`.
|
||||
//
|
||||
// TL;DR, we bypass runtime checks due to dynamic assertions in this module,
|
||||
// allowing us to use `from_size_align_unchecked`.
|
||||
let cap = if self.cap == 0 {
|
||||
// Skip to 4 because tiny `Vec`'s are dumb; but not if that would cause overflow.
|
||||
if elem_size > usize::MAX / 8 { 1 } else { 4 }
|
||||
} else {
|
||||
self.cap * 2
|
||||
};
|
||||
Layout::from_size_align_unchecked(cap * elem_size, mem::align_of::<T>())
|
||||
},
|
||||
Amortized { used_capacity, needed_extra_capacity } => {
|
||||
// Nothing we can really do about these checks, sadly.
|
||||
let required_cap =
|
||||
used_capacity.checked_add(needed_extra_capacity).ok_or(CapacityOverflow)?;
|
||||
// Cannot overflow, because `cap <= isize::MAX`, and type of `cap` is `usize`.
|
||||
let double_cap = self.cap * 2;
|
||||
// `double_cap` guarantees exponential growth.
|
||||
let cap = cmp::max(double_cap, required_cap);
|
||||
Layout::array::<T>(cap).map_err(|_| CapacityOverflow)?
|
||||
}
|
||||
Exact { used_capacity, needed_extra_capacity } => {
|
||||
let cap =
|
||||
used_capacity.checked_add(needed_extra_capacity).ok_or(CapacityOverflow)?;
|
||||
Layout::array::<T>(cap).map_err(|_| CapacityOverflow)?
|
||||
}
|
||||
};
|
||||
|
||||
// Nothing we can really do about these checks, sadly.
|
||||
let new_cap = match strategy {
|
||||
Exact => {
|
||||
used_capacity.checked_add(needed_extra_capacity).ok_or(CapacityOverflow)?
|
||||
}
|
||||
Amortized => self.amortized_new_size(used_capacity, needed_extra_capacity)?,
|
||||
};
|
||||
let new_layout = Layout::array::<T>(new_cap).map_err(|_| CapacityOverflow)?;
|
||||
let allocation = if let Some(old_layout) = self.current_layout() {
|
||||
debug_assert!(old_layout.align() == new_layout.align());
|
||||
unsafe {
|
||||
self.a.grow(self.ptr.cast().into(), old_layout, new_layout.size(), placement, init)
|
||||
}
|
||||
} else {
|
||||
match placement {
|
||||
MayMove => self.a.alloc(new_layout, init),
|
||||
InPlace => Err(AllocErr),
|
||||
}
|
||||
};
|
||||
|
||||
alloc_guard(new_layout.size())?;
|
||||
allocation
|
||||
.map(|(ptr, excess)| {
|
||||
self.ptr = ptr.cast().into();
|
||||
self.cap = Self::capacity_from_bytes(excess);
|
||||
})
|
||||
.map_err(|_| TryReserveError::AllocError { layout: new_layout, non_exhaustive: () })
|
||||
}
|
||||
|
||||
let res = match self.current_layout() {
|
||||
Some(layout) => {
|
||||
debug_assert!(new_layout.align() == layout.align());
|
||||
self.a.realloc(NonNull::from(self.ptr).cast(), layout, new_layout.size())
|
||||
}
|
||||
None => self.a.alloc(new_layout),
|
||||
};
|
||||
fn shrink(
|
||||
&mut self,
|
||||
amount: usize,
|
||||
placement: ReallocPlacement,
|
||||
) -> Result<(), TryReserveError> {
|
||||
assert!(amount <= self.cap, "Tried to shrink to a larger capacity");
|
||||
|
||||
let (ptr, new_cap) = match (res, fallibility) {
|
||||
(Err(AllocErr), Infallible) => handle_alloc_error(new_layout),
|
||||
(Err(AllocErr), Fallible) => {
|
||||
return Err(TryReserveError::AllocError {
|
||||
layout: new_layout,
|
||||
non_exhaustive: (),
|
||||
});
|
||||
}
|
||||
(Ok((ptr, new_size)), _) => (ptr, new_size / elem_size),
|
||||
};
|
||||
let elem_size = mem::size_of::<T>();
|
||||
let old_layout =
|
||||
if let Some(layout) = self.current_layout() { layout } else { return Ok(()) };
|
||||
let old_ptr = self.ptr.cast().into();
|
||||
let new_size = amount * elem_size;
|
||||
|
||||
self.ptr = ptr.cast().into();
|
||||
self.cap = new_cap;
|
||||
let allocation = unsafe {
|
||||
if amount == 0 && placement == MayMove {
|
||||
self.dealloc_buffer();
|
||||
Ok((old_layout.dangling(), 0))
|
||||
} else {
|
||||
self.a.shrink(old_ptr, old_layout, new_size, placement)
|
||||
}
|
||||
};
|
||||
|
||||
Ok(())
|
||||
}
|
||||
allocation
|
||||
.map(|(ptr, excess)| {
|
||||
self.ptr = ptr.cast().into();
|
||||
self.cap = Self::capacity_from_bytes(excess);
|
||||
})
|
||||
.map_err(|_| TryReserveError::AllocError {
|
||||
layout: unsafe { Layout::from_size_align_unchecked(new_size, old_layout.align()) },
|
||||
non_exhaustive: (),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@ -689,29 +570,24 @@ impl<T> RawVec<T, Global> {
|
||||
///
|
||||
/// Note that this will correctly reconstitute any `cap` changes
|
||||
/// that may have been performed. (See description of type for details.)
|
||||
///
|
||||
/// # Undefined Behavior
|
||||
///
|
||||
/// All elements of `RawVec<T, Global>` must be initialized. Notice that
|
||||
/// the rules around uninitialized boxed values are not finalized yet,
|
||||
/// but until they are, it is advisable to avoid them.
|
||||
pub unsafe fn into_box(self) -> Box<[T]> {
|
||||
// NOTE: not calling `capacity()` here; actually using the real `cap` field!
|
||||
let slice = slice::from_raw_parts_mut(self.ptr(), self.cap);
|
||||
let output: Box<[T]> = Box::from_raw(slice);
|
||||
mem::forget(self);
|
||||
output
|
||||
pub fn into_box(self) -> Box<[MaybeUninit<T>]> {
|
||||
unsafe {
|
||||
// NOTE: not calling `capacity()` here; actually using the real `cap` field!
|
||||
let slice = slice::from_raw_parts_mut(self.ptr() as *mut MaybeUninit<T>, self.cap);
|
||||
let output = Box::from_raw(slice);
|
||||
mem::forget(self);
|
||||
output
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, A: AllocRef> RawVec<T, A> {
|
||||
/// Frees the memory owned by the `RawVec` *without* trying to drop its contents.
|
||||
pub unsafe fn dealloc_buffer(&mut self) {
|
||||
let elem_size = mem::size_of::<T>();
|
||||
if elem_size != 0 {
|
||||
if let Some(layout) = self.current_layout() {
|
||||
self.a.dealloc(NonNull::from(self.ptr).cast(), layout);
|
||||
}
|
||||
if let Some(layout) = self.current_layout() {
|
||||
self.a.dealloc(self.ptr.cast().into(), layout);
|
||||
self.ptr = Unique::empty();
|
||||
self.cap = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -719,9 +595,7 @@ impl<T, A: AllocRef> RawVec<T, A> {
|
||||
unsafe impl<#[may_dangle] T, A: AllocRef> Drop for RawVec<T, A> {
|
||||
/// Frees the memory owned by the `RawVec` *without* trying to drop its contents.
|
||||
fn drop(&mut self) {
|
||||
unsafe {
|
||||
self.dealloc_buffer();
|
||||
}
|
||||
unsafe { self.dealloc_buffer() }
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,4 +1,5 @@
|
||||
use super::*;
|
||||
use core::ptr::NonNull;
|
||||
|
||||
#[test]
|
||||
fn allocator_param() {
|
||||
@ -20,12 +21,16 @@ fn allocator_param() {
|
||||
fuel: usize,
|
||||
}
|
||||
unsafe impl AllocRef for BoundedAlloc {
|
||||
fn alloc(&mut self, layout: Layout) -> Result<(NonNull<u8>, usize), AllocErr> {
|
||||
fn alloc(
|
||||
&mut self,
|
||||
layout: Layout,
|
||||
init: AllocInit,
|
||||
) -> Result<(NonNull<u8>, usize), AllocErr> {
|
||||
let size = layout.size();
|
||||
if size > self.fuel {
|
||||
return Err(AllocErr);
|
||||
}
|
||||
match Global.alloc(layout) {
|
||||
match Global.alloc(layout, init) {
|
||||
ok @ Ok(_) => {
|
||||
self.fuel -= size;
|
||||
ok
|
||||
|
@ -252,7 +252,7 @@ use core::ptr::{self, NonNull};
|
||||
use core::slice::{self, from_raw_parts_mut};
|
||||
use core::usize;
|
||||
|
||||
use crate::alloc::{box_free, handle_alloc_error, AllocRef, Global, Layout};
|
||||
use crate::alloc::{box_free, handle_alloc_error, AllocInit, AllocRef, Global, Layout};
|
||||
use crate::string::String;
|
||||
use crate::vec::Vec;
|
||||
|
||||
@ -936,7 +936,9 @@ impl<T: ?Sized> Rc<T> {
|
||||
let layout = Layout::new::<RcBox<()>>().extend(value_layout).unwrap().0.pad_to_align();
|
||||
|
||||
// Allocate for the layout.
|
||||
let (mem, _) = Global.alloc(layout).unwrap_or_else(|_| handle_alloc_error(layout));
|
||||
let (mem, _) = Global
|
||||
.alloc(layout, AllocInit::Uninitialized)
|
||||
.unwrap_or_else(|_| handle_alloc_error(layout));
|
||||
|
||||
// Initialize the RcBox
|
||||
let inner = mem_to_rcbox(mem.as_ptr());
|
||||
|
@ -25,7 +25,7 @@ use core::sync::atomic;
|
||||
use core::sync::atomic::Ordering::{Acquire, Relaxed, Release, SeqCst};
|
||||
use core::{isize, usize};
|
||||
|
||||
use crate::alloc::{box_free, handle_alloc_error, AllocRef, Global, Layout};
|
||||
use crate::alloc::{box_free, handle_alloc_error, AllocInit, AllocRef, Global, Layout};
|
||||
use crate::boxed::Box;
|
||||
use crate::rc::is_dangling;
|
||||
use crate::string::String;
|
||||
@ -814,7 +814,9 @@ impl<T: ?Sized> Arc<T> {
|
||||
// reference (see #54908).
|
||||
let layout = Layout::new::<ArcInner<()>>().extend(value_layout).unwrap().0.pad_to_align();
|
||||
|
||||
let (mem, _) = Global.alloc(layout).unwrap_or_else(|_| handle_alloc_error(layout));
|
||||
let (mem, _) = Global
|
||||
.alloc(layout, AllocInit::Uninitialized)
|
||||
.unwrap_or_else(|_| handle_alloc_error(layout));
|
||||
|
||||
// Initialize the ArcInner
|
||||
let inner = mem_to_arcinner(mem.as_ptr());
|
||||
|
@ -1,4 +1,4 @@
|
||||
use std::alloc::{AllocRef, Global, Layout, System};
|
||||
use std::alloc::{AllocInit, AllocRef, Global, Layout, System};
|
||||
|
||||
/// Issue #45955 and #62251.
|
||||
#[test]
|
||||
@ -20,7 +20,13 @@ fn check_overalign_requests<T: AllocRef>(mut allocator: T) {
|
||||
unsafe {
|
||||
let pointers: Vec<_> = (0..iterations)
|
||||
.map(|_| {
|
||||
allocator.alloc(Layout::from_size_align(size, align).unwrap()).unwrap().0
|
||||
allocator
|
||||
.alloc(
|
||||
Layout::from_size_align(size, align).unwrap(),
|
||||
AllocInit::Uninitialized,
|
||||
)
|
||||
.unwrap()
|
||||
.0
|
||||
})
|
||||
.collect();
|
||||
for &ptr in &pointers {
|
||||
|
@ -679,7 +679,7 @@ impl<T> Vec<T> {
|
||||
self.shrink_to_fit();
|
||||
let buf = ptr::read(&self.buf);
|
||||
mem::forget(self);
|
||||
buf.into_box()
|
||||
buf.into_box().assume_init()
|
||||
}
|
||||
}
|
||||
|
||||
|
1043
src/libcore/alloc.rs
1043
src/libcore/alloc.rs
File diff suppressed because it is too large
Load Diff
198
src/libcore/alloc/global.rs
Normal file
198
src/libcore/alloc/global.rs
Normal file
@ -0,0 +1,198 @@
|
||||
use crate::alloc::Layout;
|
||||
use crate::cmp;
|
||||
use crate::ptr;
|
||||
|
||||
/// A memory allocator that can be registered as the standard library’s default
|
||||
/// through the `#[global_allocator]` attribute.
|
||||
///
|
||||
/// Some of the methods require that a memory block be *currently
|
||||
/// allocated* via an allocator. This means that:
|
||||
///
|
||||
/// * the starting address for that memory block was previously
|
||||
/// returned by a previous call to an allocation method
|
||||
/// such as `alloc`, and
|
||||
///
|
||||
/// * the memory block has not been subsequently deallocated, where
|
||||
/// blocks are deallocated either by being passed to a deallocation
|
||||
/// method such as `dealloc` or by being
|
||||
/// passed to a reallocation method that returns a non-null pointer.
|
||||
///
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```no_run
|
||||
/// use std::alloc::{GlobalAlloc, Layout, alloc};
|
||||
/// use std::ptr::null_mut;
|
||||
///
|
||||
/// struct MyAllocator;
|
||||
///
|
||||
/// unsafe impl GlobalAlloc for MyAllocator {
|
||||
/// unsafe fn alloc(&self, _layout: Layout) -> *mut u8 { null_mut() }
|
||||
/// unsafe fn dealloc(&self, _ptr: *mut u8, _layout: Layout) {}
|
||||
/// }
|
||||
///
|
||||
/// #[global_allocator]
|
||||
/// static A: MyAllocator = MyAllocator;
|
||||
///
|
||||
/// fn main() {
|
||||
/// unsafe {
|
||||
/// assert!(alloc(Layout::new::<u32>()).is_null())
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// # Safety
|
||||
///
|
||||
/// The `GlobalAlloc` trait is an `unsafe` trait for a number of reasons, and
|
||||
/// implementors must ensure that they adhere to these contracts:
|
||||
///
|
||||
/// * It's undefined behavior if global allocators unwind. This restriction may
|
||||
/// be lifted in the future, but currently a panic from any of these
|
||||
/// functions may lead to memory unsafety.
|
||||
///
|
||||
/// * `Layout` queries and calculations in general must be correct. Callers of
|
||||
/// this trait are allowed to rely on the contracts defined on each method,
|
||||
/// and implementors must ensure such contracts remain true.
|
||||
#[stable(feature = "global_alloc", since = "1.28.0")]
|
||||
pub unsafe trait GlobalAlloc {
|
||||
/// Allocate memory as described by the given `layout`.
|
||||
///
|
||||
/// Returns a pointer to newly-allocated memory,
|
||||
/// or null to indicate allocation failure.
|
||||
///
|
||||
/// # Safety
|
||||
///
|
||||
/// This function is unsafe because undefined behavior can result
|
||||
/// if the caller does not ensure that `layout` has non-zero size.
|
||||
///
|
||||
/// (Extension subtraits might provide more specific bounds on
|
||||
/// behavior, e.g., guarantee a sentinel address or a null pointer
|
||||
/// in response to a zero-size allocation request.)
|
||||
///
|
||||
/// The allocated block of memory may or may not be initialized.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// Returning a null pointer indicates that either memory is exhausted
|
||||
/// or `layout` does not meet this allocator's size or alignment constraints.
|
||||
///
|
||||
/// Implementations are encouraged to return null on memory
|
||||
/// exhaustion rather than aborting, but this is not
|
||||
/// a strict requirement. (Specifically: it is *legal* to
|
||||
/// implement this trait atop an underlying native allocation
|
||||
/// library that aborts on memory exhaustion.)
|
||||
///
|
||||
/// Clients wishing to abort computation in response to an
|
||||
/// allocation error are encouraged to call the [`handle_alloc_error`] function,
|
||||
/// rather than directly invoking `panic!` or similar.
|
||||
///
|
||||
/// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html
|
||||
#[stable(feature = "global_alloc", since = "1.28.0")]
|
||||
unsafe fn alloc(&self, layout: Layout) -> *mut u8;
|
||||
|
||||
/// Deallocate the block of memory at the given `ptr` pointer with the given `layout`.
|
||||
///
|
||||
/// # Safety
|
||||
///
|
||||
/// This function is unsafe because undefined behavior can result
|
||||
/// if the caller does not ensure all of the following:
|
||||
///
|
||||
/// * `ptr` must denote a block of memory currently allocated via
|
||||
/// this allocator,
|
||||
///
|
||||
/// * `layout` must be the same layout that was used
|
||||
/// to allocate that block of memory,
|
||||
#[stable(feature = "global_alloc", since = "1.28.0")]
|
||||
unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout);
|
||||
|
||||
/// Behaves like `alloc`, but also ensures that the contents
|
||||
/// are set to zero before being returned.
|
||||
///
|
||||
/// # Safety
|
||||
///
|
||||
/// This function is unsafe for the same reasons that `alloc` is.
|
||||
/// However the allocated block of memory is guaranteed to be initialized.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// Returning a null pointer indicates that either memory is exhausted
|
||||
/// or `layout` does not meet allocator's size or alignment constraints,
|
||||
/// just as in `alloc`.
|
||||
///
|
||||
/// Clients wishing to abort computation in response to an
|
||||
/// allocation error are encouraged to call the [`handle_alloc_error`] function,
|
||||
/// rather than directly invoking `panic!` or similar.
|
||||
///
|
||||
/// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html
|
||||
#[stable(feature = "global_alloc", since = "1.28.0")]
|
||||
unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut u8 {
|
||||
let size = layout.size();
|
||||
let ptr = self.alloc(layout);
|
||||
if !ptr.is_null() {
|
||||
ptr::write_bytes(ptr, 0, size);
|
||||
}
|
||||
ptr
|
||||
}
|
||||
|
||||
/// Shrink or grow a block of memory to the given `new_size`.
|
||||
/// The block is described by the given `ptr` pointer and `layout`.
|
||||
///
|
||||
/// If this returns a non-null pointer, then ownership of the memory block
|
||||
/// referenced by `ptr` has been transferred to this allocator.
|
||||
/// The memory may or may not have been deallocated,
|
||||
/// and should be considered unusable (unless of course it was
|
||||
/// transferred back to the caller again via the return value of
|
||||
/// this method). The new memory block is allocated with `layout`, but
|
||||
/// with the `size` updated to `new_size`.
|
||||
///
|
||||
/// If this method returns null, then ownership of the memory
|
||||
/// block has not been transferred to this allocator, and the
|
||||
/// contents of the memory block are unaltered.
|
||||
///
|
||||
/// # Safety
|
||||
///
|
||||
/// This function is unsafe because undefined behavior can result
|
||||
/// if the caller does not ensure all of the following:
|
||||
///
|
||||
/// * `ptr` must be currently allocated via this allocator,
|
||||
///
|
||||
/// * `layout` must be the same layout that was used
|
||||
/// to allocate that block of memory,
|
||||
///
|
||||
/// * `new_size` must be greater than zero.
|
||||
///
|
||||
/// * `new_size`, when rounded up to the nearest multiple of `layout.align()`,
|
||||
/// must not overflow (i.e., the rounded value must be less than `usize::MAX`).
|
||||
///
|
||||
/// (Extension subtraits might provide more specific bounds on
|
||||
/// behavior, e.g., guarantee a sentinel address or a null pointer
|
||||
/// in response to a zero-size allocation request.)
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// Returns null if the new layout does not meet the size
|
||||
/// and alignment constraints of the allocator, or if reallocation
|
||||
/// otherwise fails.
|
||||
///
|
||||
/// Implementations are encouraged to return null on memory
|
||||
/// exhaustion rather than panicking or aborting, but this is not
|
||||
/// a strict requirement. (Specifically: it is *legal* to
|
||||
/// implement this trait atop an underlying native allocation
|
||||
/// library that aborts on memory exhaustion.)
|
||||
///
|
||||
/// Clients wishing to abort computation in response to a
|
||||
/// reallocation error are encouraged to call the [`handle_alloc_error`] function,
|
||||
/// rather than directly invoking `panic!` or similar.
|
||||
///
|
||||
/// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html
|
||||
#[stable(feature = "global_alloc", since = "1.28.0")]
|
||||
unsafe fn realloc(&self, ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 {
|
||||
let new_layout = Layout::from_size_align_unchecked(new_size, layout.align());
|
||||
let new_ptr = self.alloc(new_layout);
|
||||
if !new_ptr.is_null() {
|
||||
ptr::copy_nonoverlapping(ptr, new_ptr, cmp::min(layout.size(), new_size));
|
||||
self.dealloc(ptr, layout);
|
||||
}
|
||||
new_ptr
|
||||
}
|
||||
}
|
345
src/libcore/alloc/layout.rs
Normal file
345
src/libcore/alloc/layout.rs
Normal file
@ -0,0 +1,345 @@
|
||||
// ignore-tidy-undocumented-unsafe
|
||||
|
||||
use crate::cmp;
|
||||
use crate::fmt;
|
||||
use crate::mem;
|
||||
use crate::num::NonZeroUsize;
|
||||
use crate::ptr::NonNull;
|
||||
|
||||
const fn size_align<T>() -> (usize, usize) {
|
||||
(mem::size_of::<T>(), mem::align_of::<T>())
|
||||
}
|
||||
|
||||
/// Layout of a block of memory.
|
||||
///
|
||||
/// An instance of `Layout` describes a particular layout of memory.
|
||||
/// You build a `Layout` up as an input to give to an allocator.
|
||||
///
|
||||
/// All layouts have an associated size and a power-of-two alignment.
|
||||
///
|
||||
/// (Note that layouts are *not* required to have non-zero size,
|
||||
/// even though `GlobalAlloc` requires that all memory requests
|
||||
/// be non-zero in size. A caller must either ensure that conditions
|
||||
/// like this are met, use specific allocators with looser
|
||||
/// requirements, or use the more lenient `AllocRef` interface.)
|
||||
#[stable(feature = "alloc_layout", since = "1.28.0")]
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
#[lang = "alloc_layout"]
|
||||
pub struct Layout {
|
||||
// size of the requested block of memory, measured in bytes.
|
||||
size_: usize,
|
||||
|
||||
// alignment of the requested block of memory, measured in bytes.
|
||||
// we ensure that this is always a power-of-two, because API's
|
||||
// like `posix_memalign` require it and it is a reasonable
|
||||
// constraint to impose on Layout constructors.
|
||||
//
|
||||
// (However, we do not analogously require `align >= sizeof(void*)`,
|
||||
// even though that is *also* a requirement of `posix_memalign`.)
|
||||
align_: NonZeroUsize,
|
||||
}
|
||||
|
||||
impl Layout {
|
||||
/// Constructs a `Layout` from a given `size` and `align`,
|
||||
/// or returns `LayoutErr` if any of the following conditions
|
||||
/// are not met:
|
||||
///
|
||||
/// * `align` must not be zero,
|
||||
///
|
||||
/// * `align` must be a power of two,
|
||||
///
|
||||
/// * `size`, when rounded up to the nearest multiple of `align`,
|
||||
/// must not overflow (i.e., the rounded value must be less than
|
||||
/// or equal to `usize::MAX`).
|
||||
#[stable(feature = "alloc_layout", since = "1.28.0")]
|
||||
#[rustc_const_unstable(feature = "const_alloc_layout", issue = "67521")]
|
||||
#[inline]
|
||||
pub const fn from_size_align(size: usize, align: usize) -> Result<Self, LayoutErr> {
|
||||
if !align.is_power_of_two() {
|
||||
return Err(LayoutErr { private: () });
|
||||
}
|
||||
|
||||
// (power-of-two implies align != 0.)
|
||||
|
||||
// Rounded up size is:
|
||||
// size_rounded_up = (size + align - 1) & !(align - 1);
|
||||
//
|
||||
// We know from above that align != 0. If adding (align - 1)
|
||||
// does not overflow, then rounding up will be fine.
|
||||
//
|
||||
// Conversely, &-masking with !(align - 1) will subtract off
|
||||
// only low-order-bits. Thus if overflow occurs with the sum,
|
||||
// the &-mask cannot subtract enough to undo that overflow.
|
||||
//
|
||||
// Above implies that checking for summation overflow is both
|
||||
// necessary and sufficient.
|
||||
if size > usize::MAX - (align - 1) {
|
||||
return Err(LayoutErr { private: () });
|
||||
}
|
||||
|
||||
unsafe { Ok(Layout::from_size_align_unchecked(size, align)) }
|
||||
}
|
||||
|
||||
/// Creates a layout, bypassing all checks.
|
||||
///
|
||||
/// # Safety
|
||||
///
|
||||
/// This function is unsafe as it does not verify the preconditions from
|
||||
/// [`Layout::from_size_align`](#method.from_size_align).
|
||||
#[stable(feature = "alloc_layout", since = "1.28.0")]
|
||||
#[rustc_const_stable(feature = "alloc_layout", since = "1.28.0")]
|
||||
#[inline]
|
||||
pub const unsafe fn from_size_align_unchecked(size: usize, align: usize) -> Self {
|
||||
Layout { size_: size, align_: NonZeroUsize::new_unchecked(align) }
|
||||
}
|
||||
|
||||
/// The minimum size in bytes for a memory block of this layout.
|
||||
#[stable(feature = "alloc_layout", since = "1.28.0")]
|
||||
#[rustc_const_unstable(feature = "const_alloc_layout", issue = "67521")]
|
||||
#[inline]
|
||||
pub const fn size(&self) -> usize {
|
||||
self.size_
|
||||
}
|
||||
|
||||
/// The minimum byte alignment for a memory block of this layout.
|
||||
#[stable(feature = "alloc_layout", since = "1.28.0")]
|
||||
#[rustc_const_unstable(feature = "const_alloc_layout", issue = "67521")]
|
||||
#[inline]
|
||||
pub const fn align(&self) -> usize {
|
||||
self.align_.get()
|
||||
}
|
||||
|
||||
/// Constructs a `Layout` suitable for holding a value of type `T`.
|
||||
#[stable(feature = "alloc_layout", since = "1.28.0")]
|
||||
#[rustc_const_stable(feature = "alloc_layout_const_new", since = "1.42.0")]
|
||||
#[inline]
|
||||
pub const fn new<T>() -> Self {
|
||||
let (size, align) = size_align::<T>();
|
||||
// Note that the align is guaranteed by rustc to be a power of two and
|
||||
// the size+align combo is guaranteed to fit in our address space. As a
|
||||
// result use the unchecked constructor here to avoid inserting code
|
||||
// that panics if it isn't optimized well enough.
|
||||
unsafe { Layout::from_size_align_unchecked(size, align) }
|
||||
}
|
||||
|
||||
/// Produces layout describing a record that could be used to
|
||||
/// allocate backing structure for `T` (which could be a trait
|
||||
/// or other unsized type like a slice).
|
||||
#[stable(feature = "alloc_layout", since = "1.28.0")]
|
||||
#[inline]
|
||||
pub fn for_value<T: ?Sized>(t: &T) -> Self {
|
||||
let (size, align) = (mem::size_of_val(t), mem::align_of_val(t));
|
||||
// See rationale in `new` for why this is using an unsafe variant below
|
||||
debug_assert!(Layout::from_size_align(size, align).is_ok());
|
||||
unsafe { Layout::from_size_align_unchecked(size, align) }
|
||||
}
|
||||
|
||||
/// Creates a `NonNull` that is dangling, but well-aligned for this Layout.
|
||||
///
|
||||
/// Note that the pointer value may potentially represent a valid pointer,
|
||||
/// which means this must not be used as a "not yet initialized"
|
||||
/// sentinel value. Types that lazily allocate must track initialization by
|
||||
/// some other means.
|
||||
#[unstable(feature = "alloc_layout_extra", issue = "55724")]
|
||||
pub const fn dangling(&self) -> NonNull<u8> {
|
||||
// align is non-zero and a power of two
|
||||
unsafe { NonNull::new_unchecked(self.align() as *mut u8) }
|
||||
}
|
||||
|
||||
/// Creates a layout describing the record that can hold a value
|
||||
/// of the same layout as `self`, but that also is aligned to
|
||||
/// alignment `align` (measured in bytes).
|
||||
///
|
||||
/// If `self` already meets the prescribed alignment, then returns
|
||||
/// `self`.
|
||||
///
|
||||
/// Note that this method does not add any padding to the overall
|
||||
/// size, regardless of whether the returned layout has a different
|
||||
/// alignment. In other words, if `K` has size 16, `K.align_to(32)`
|
||||
/// will *still* have size 16.
|
||||
///
|
||||
/// Returns an error if the combination of `self.size()` and the given
|
||||
/// `align` violates the conditions listed in
|
||||
/// [`Layout::from_size_align`](#method.from_size_align).
|
||||
#[unstable(feature = "alloc_layout_extra", issue = "55724")]
|
||||
#[inline]
|
||||
pub fn align_to(&self, align: usize) -> Result<Self, LayoutErr> {
|
||||
Layout::from_size_align(self.size(), cmp::max(self.align(), align))
|
||||
}
|
||||
|
||||
/// Returns the amount of padding we must insert after `self`
|
||||
/// to ensure that the following address will satisfy `align`
|
||||
/// (measured in bytes).
|
||||
///
|
||||
/// e.g., if `self.size()` is 9, then `self.padding_needed_for(4)`
|
||||
/// returns 3, because that is the minimum number of bytes of
|
||||
/// padding required to get a 4-aligned address (assuming that the
|
||||
/// corresponding memory block starts at a 4-aligned address).
|
||||
///
|
||||
/// The return value of this function has no meaning if `align` is
|
||||
/// not a power-of-two.
|
||||
///
|
||||
/// Note that the utility of the returned value requires `align`
|
||||
/// to be less than or equal to the alignment of the starting
|
||||
/// address for the whole allocated block of memory. One way to
|
||||
/// satisfy this constraint is to ensure `align <= self.align()`.
|
||||
#[unstable(feature = "alloc_layout_extra", issue = "55724")]
|
||||
#[rustc_const_unstable(feature = "const_alloc_layout", issue = "67521")]
|
||||
#[inline]
|
||||
pub const fn padding_needed_for(&self, align: usize) -> usize {
|
||||
let len = self.size();
|
||||
|
||||
// Rounded up value is:
|
||||
// len_rounded_up = (len + align - 1) & !(align - 1);
|
||||
// and then we return the padding difference: `len_rounded_up - len`.
|
||||
//
|
||||
// We use modular arithmetic throughout:
|
||||
//
|
||||
// 1. align is guaranteed to be > 0, so align - 1 is always
|
||||
// valid.
|
||||
//
|
||||
// 2. `len + align - 1` can overflow by at most `align - 1`,
|
||||
// so the &-mask with `!(align - 1)` will ensure that in the
|
||||
// case of overflow, `len_rounded_up` will itself be 0.
|
||||
// Thus the returned padding, when added to `len`, yields 0,
|
||||
// which trivially satisfies the alignment `align`.
|
||||
//
|
||||
// (Of course, attempts to allocate blocks of memory whose
|
||||
// size and padding overflow in the above manner should cause
|
||||
// the allocator to yield an error anyway.)
|
||||
|
||||
let len_rounded_up = len.wrapping_add(align).wrapping_sub(1) & !align.wrapping_sub(1);
|
||||
len_rounded_up.wrapping_sub(len)
|
||||
}
|
||||
|
||||
/// Creates a layout by rounding the size of this layout up to a multiple
|
||||
/// of the layout's alignment.
|
||||
///
|
||||
/// This is equivalent to adding the result of `padding_needed_for`
|
||||
/// to the layout's current size.
|
||||
#[unstable(feature = "alloc_layout_extra", issue = "55724")]
|
||||
#[inline]
|
||||
pub fn pad_to_align(&self) -> Layout {
|
||||
let pad = self.padding_needed_for(self.align());
|
||||
// This cannot overflow. Quoting from the invariant of Layout:
|
||||
// > `size`, when rounded up to the nearest multiple of `align`,
|
||||
// > must not overflow (i.e., the rounded value must be less than
|
||||
// > `usize::MAX`)
|
||||
let new_size = self.size() + pad;
|
||||
|
||||
Layout::from_size_align(new_size, self.align()).unwrap()
|
||||
}
|
||||
|
||||
/// Creates a layout describing the record for `n` instances of
|
||||
/// `self`, with a suitable amount of padding between each to
|
||||
/// ensure that each instance is given its requested size and
|
||||
/// alignment. On success, returns `(k, offs)` where `k` is the
|
||||
/// layout of the array and `offs` is the distance between the start
|
||||
/// of each element in the array.
|
||||
///
|
||||
/// On arithmetic overflow, returns `LayoutErr`.
|
||||
#[unstable(feature = "alloc_layout_extra", issue = "55724")]
|
||||
#[inline]
|
||||
pub fn repeat(&self, n: usize) -> Result<(Self, usize), LayoutErr> {
|
||||
// This cannot overflow. Quoting from the invariant of Layout:
|
||||
// > `size`, when rounded up to the nearest multiple of `align`,
|
||||
// > must not overflow (i.e., the rounded value must be less than
|
||||
// > `usize::MAX`)
|
||||
let padded_size = self.size() + self.padding_needed_for(self.align());
|
||||
let alloc_size = padded_size.checked_mul(n).ok_or(LayoutErr { private: () })?;
|
||||
|
||||
unsafe {
|
||||
// self.align is already known to be valid and alloc_size has been
|
||||
// padded already.
|
||||
Ok((Layout::from_size_align_unchecked(alloc_size, self.align()), padded_size))
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a layout describing the record for `self` followed by
|
||||
/// `next`, including any necessary padding to ensure that `next`
|
||||
/// will be properly aligned. Note that the resulting layout will
|
||||
/// satisfy the alignment properties of both `self` and `next`.
|
||||
///
|
||||
/// The resulting layout will be the same as that of a C struct containing
|
||||
/// two fields with the layouts of `self` and `next`, in that order.
|
||||
///
|
||||
/// Returns `Some((k, offset))`, where `k` is layout of the concatenated
|
||||
/// record and `offset` is the relative location, in bytes, of the
|
||||
/// start of the `next` embedded within the concatenated record
|
||||
/// (assuming that the record itself starts at offset 0).
|
||||
///
|
||||
/// On arithmetic overflow, returns `LayoutErr`.
|
||||
#[unstable(feature = "alloc_layout_extra", issue = "55724")]
|
||||
#[inline]
|
||||
pub fn extend(&self, next: Self) -> Result<(Self, usize), LayoutErr> {
|
||||
let new_align = cmp::max(self.align(), next.align());
|
||||
let pad = self.padding_needed_for(next.align());
|
||||
|
||||
let offset = self.size().checked_add(pad).ok_or(LayoutErr { private: () })?;
|
||||
let new_size = offset.checked_add(next.size()).ok_or(LayoutErr { private: () })?;
|
||||
|
||||
let layout = Layout::from_size_align(new_size, new_align)?;
|
||||
Ok((layout, offset))
|
||||
}
|
||||
|
||||
/// Creates a layout describing the record for `n` instances of
|
||||
/// `self`, with no padding between each instance.
|
||||
///
|
||||
/// Note that, unlike `repeat`, `repeat_packed` does not guarantee
|
||||
/// that the repeated instances of `self` will be properly
|
||||
/// aligned, even if a given instance of `self` is properly
|
||||
/// aligned. In other words, if the layout returned by
|
||||
/// `repeat_packed` is used to allocate an array, it is not
|
||||
/// guaranteed that all elements in the array will be properly
|
||||
/// aligned.
|
||||
///
|
||||
/// On arithmetic overflow, returns `LayoutErr`.
|
||||
#[unstable(feature = "alloc_layout_extra", issue = "55724")]
|
||||
#[inline]
|
||||
pub fn repeat_packed(&self, n: usize) -> Result<Self, LayoutErr> {
|
||||
let size = self.size().checked_mul(n).ok_or(LayoutErr { private: () })?;
|
||||
Layout::from_size_align(size, self.align())
|
||||
}
|
||||
|
||||
/// Creates a layout describing the record for `self` followed by
|
||||
/// `next` with no additional padding between the two. Since no
|
||||
/// padding is inserted, the alignment of `next` is irrelevant,
|
||||
/// and is not incorporated *at all* into the resulting layout.
|
||||
///
|
||||
/// On arithmetic overflow, returns `LayoutErr`.
|
||||
#[unstable(feature = "alloc_layout_extra", issue = "55724")]
|
||||
#[inline]
|
||||
pub fn extend_packed(&self, next: Self) -> Result<Self, LayoutErr> {
|
||||
let new_size = self.size().checked_add(next.size()).ok_or(LayoutErr { private: () })?;
|
||||
Layout::from_size_align(new_size, self.align())
|
||||
}
|
||||
|
||||
/// Creates a layout describing the record for a `[T; n]`.
|
||||
///
|
||||
/// On arithmetic overflow, returns `LayoutErr`.
|
||||
#[unstable(feature = "alloc_layout_extra", issue = "55724")]
|
||||
#[inline]
|
||||
pub fn array<T>(n: usize) -> Result<Self, LayoutErr> {
|
||||
Layout::new::<T>().repeat(n).map(|(k, offs)| {
|
||||
debug_assert!(offs == mem::size_of::<T>());
|
||||
k
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// The parameters given to `Layout::from_size_align`
|
||||
/// or some other `Layout` constructor
|
||||
/// do not satisfy its documented constraints.
|
||||
#[stable(feature = "alloc_layout", since = "1.28.0")]
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
pub struct LayoutErr {
|
||||
private: (),
|
||||
}
|
||||
|
||||
// (we need this for downstream impl of trait Error)
|
||||
#[stable(feature = "alloc_layout", since = "1.28.0")]
|
||||
impl fmt::Display for LayoutErr {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str("invalid parameters to Layout::from_size_align")
|
||||
}
|
||||
}
|
376
src/libcore/alloc/mod.rs
Normal file
376
src/libcore/alloc/mod.rs
Normal file
@ -0,0 +1,376 @@
|
||||
//! Memory allocation APIs
|
||||
|
||||
#![stable(feature = "alloc_module", since = "1.28.0")]
|
||||
|
||||
mod global;
|
||||
mod layout;
|
||||
|
||||
#[stable(feature = "global_alloc", since = "1.28.0")]
|
||||
pub use self::global::GlobalAlloc;
|
||||
#[stable(feature = "alloc_layout", since = "1.28.0")]
|
||||
pub use self::layout::{Layout, LayoutErr};
|
||||
|
||||
use crate::fmt;
|
||||
use crate::ptr::{self, NonNull};
|
||||
|
||||
/// The `AllocErr` error indicates an allocation failure
|
||||
/// that may be due to resource exhaustion or to
|
||||
/// something wrong when combining the given input arguments with this
|
||||
/// allocator.
|
||||
#[unstable(feature = "allocator_api", issue = "32838")]
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
pub struct AllocErr;
|
||||
|
||||
// (we need this for downstream impl of trait Error)
|
||||
#[unstable(feature = "allocator_api", issue = "32838")]
|
||||
impl fmt::Display for AllocErr {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str("memory allocation failed")
|
||||
}
|
||||
}
|
||||
|
||||
/// A desired initial state for allocated memory.
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
|
||||
#[unstable(feature = "allocator_api", issue = "32838")]
|
||||
pub enum AllocInit {
|
||||
/// The contents of the new memory are undefined.
|
||||
///
|
||||
/// Reading uninitialized memory is Undefined Behavior; it must be initialized before use.
|
||||
Uninitialized,
|
||||
/// The new memory is guaranteed to be zeroed.
|
||||
Zeroed,
|
||||
}
|
||||
|
||||
impl AllocInit {
|
||||
/// Initialize the memory block referenced by `ptr` and specified by `Layout`.
|
||||
///
|
||||
/// This behaves like calling [`AllocInit::initialize_offset(ptr, layout, 0)`][off].
|
||||
///
|
||||
/// [off]: AllocInit::initialize_offset
|
||||
///
|
||||
/// # Safety
|
||||
///
|
||||
/// * `layout` must [*fit*] the block of memory referenced by `ptr`
|
||||
///
|
||||
/// [*fit*]: trait.AllocRef.html#memory-fitting
|
||||
#[inline]
|
||||
#[unstable(feature = "allocator_api", issue = "32838")]
|
||||
pub unsafe fn initialize(self, ptr: NonNull<u8>, layout: Layout) {
|
||||
self.initialize_offset(ptr, layout, 0)
|
||||
}
|
||||
|
||||
/// Initialize the memory block referenced by `ptr` and specified by `Layout` at the specified
|
||||
/// `offset`.
|
||||
///
|
||||
/// This is a no-op for [`AllocInit::Uninitialized`] and writes zeroes for [`AllocInit::Zeroed`]
|
||||
/// at `ptr + offset` until `ptr + layout.size()`.
|
||||
///
|
||||
/// # Safety
|
||||
///
|
||||
/// * `layout` must [*fit*] the block of memory referenced by `ptr`
|
||||
///
|
||||
/// * `offset` must be smaller than or equal to `layout.size()`
|
||||
///
|
||||
/// [*fit*]: trait.AllocRef.html#memory-fitting
|
||||
#[unstable(feature = "allocator_api", issue = "32838")]
|
||||
pub unsafe fn initialize_offset(self, ptr: NonNull<u8>, layout: Layout, offset: usize) {
|
||||
debug_assert!(
|
||||
offset <= layout.size(),
|
||||
"`offset` must be smaller than or equal to `layout.size()`"
|
||||
);
|
||||
match self {
|
||||
AllocInit::Uninitialized => (),
|
||||
AllocInit::Zeroed => {
|
||||
let new_ptr = ptr.as_ptr().add(offset);
|
||||
let size = layout.size() - offset;
|
||||
ptr::write_bytes(new_ptr, 0, size);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A placement constraint when growing or shrinking an existing allocation.
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
|
||||
#[unstable(feature = "allocator_api", issue = "32838")]
|
||||
pub enum ReallocPlacement {
|
||||
/// The allocator is allowed to move the allocation to a different memory address.
|
||||
// FIXME(wg-allocators#46): Add a section to the module documentation "What is a legal
|
||||
// allocator" and link it at "valid location".
|
||||
///
|
||||
/// If the allocation _does_ move, it's the responsibility of the allocator
|
||||
/// to also move the data from the previous location to the new location.
|
||||
MayMove,
|
||||
/// The address of the new memory must not change.
|
||||
///
|
||||
/// If the allocation would have to be moved to a new location to fit, the
|
||||
/// reallocation request will fail.
|
||||
InPlace,
|
||||
}
|
||||
|
||||
/// An implementation of `AllocRef` can allocate, grow, shrink, and deallocate arbitrary blocks of
|
||||
/// data described via [`Layout`][].
|
||||
///
|
||||
/// `AllocRef` is designed to be implemented on ZSTs, references, or smart pointers because having
|
||||
/// an allocator like `MyAlloc([u8; N])` cannot be moved, without updating the pointers to the
|
||||
/// allocated memory.
|
||||
///
|
||||
/// Unlike [`GlobalAlloc`][], zero-sized allocations are allowed in `AllocRef`. If an underlying
|
||||
/// allocator does not support this (like jemalloc) or return a null pointer (such as
|
||||
/// `libc::malloc`), this case must be caught. [`Layout::dangling()`][] then can be used to create
|
||||
/// an aligned `NonNull<u8>`.
|
||||
///
|
||||
/// ### Currently allocated memory
|
||||
///
|
||||
/// Some of the methods require that a memory block be *currently allocated* via an allocator. This
|
||||
/// means that:
|
||||
///
|
||||
/// * the starting address for that memory block was previously returned by [`alloc`], [`grow`], or
|
||||
/// [`shrink`], and
|
||||
///
|
||||
/// * the memory block has not been subsequently deallocated, where blocks are either deallocated
|
||||
/// directly by being passed to [`dealloc`] or were changed by being passed to [`grow`] or
|
||||
/// [`shrink`] that returns `Ok`. If `grow` or `shrink` have returned `Err`, the passed pointer
|
||||
/// remains valid.
|
||||
///
|
||||
/// [`alloc`]: AllocRef::alloc
|
||||
/// [`grow`]: AllocRef::grow
|
||||
/// [`shrink`]: AllocRef::shrink
|
||||
/// [`dealloc`]: AllocRef::dealloc
|
||||
///
|
||||
/// ### Memory fitting
|
||||
///
|
||||
/// Some of the methods require that a layout *fit* a memory block. What it means for a layout to
|
||||
/// "fit" a memory block means (or equivalently, for a memory block to "fit" a layout) is that the
|
||||
/// following conditions must hold:
|
||||
///
|
||||
/// * The block must be allocated with the same alignment as [`layout.align()`], and
|
||||
///
|
||||
/// * The provided [`layout.size()`] must fall in the range `min ..= max`, where:
|
||||
/// - `min` is the size of the layout most recently used to allocate the block, and
|
||||
/// - `max` is the latest actual size returned from [`alloc`], [`grow`], or [`shrink`].
|
||||
///
|
||||
/// [`layout.align()`]: Layout::align
|
||||
/// [`layout.size()`]: Layout::size
|
||||
///
|
||||
/// ### Notes
|
||||
///
|
||||
/// * if a layout `k` fits a memory block (denoted by `ptr`) currently allocated via an allocator
|
||||
/// `a`, then it is legal to use that layout to deallocate it, i.e.,
|
||||
/// [`a.dealloc(ptr, k);`][`dealloc`], and
|
||||
///
|
||||
/// * if an allocator does not support overallocating, it is fine to simply return
|
||||
/// [`layout.size()`] as the actual size.
|
||||
///
|
||||
/// # Safety
|
||||
///
|
||||
/// * Pointers returned from an allocator must point to valid memory and retain their validity until
|
||||
/// the instance and all of its clones are dropped,
|
||||
///
|
||||
/// * cloning or moving the allocator must not invalidate pointers returned from this allocator.
|
||||
/// A cloned allocator must behave like the same allocator, and
|
||||
///
|
||||
/// * any pointer to a memory block which is [*currently allocated*] may be passed to any other
|
||||
/// method of the allocator.
|
||||
///
|
||||
/// [*currently allocated*]: #currently-allocated-memory
|
||||
#[unstable(feature = "allocator_api", issue = "32838")]
|
||||
pub unsafe trait AllocRef {
|
||||
/// On success, returns a pointer meeting the size and alignment guarantees of `layout` and the
|
||||
/// actual size of the allocated block, which is greater than or equal to `layout.size()`.
|
||||
///
|
||||
/// The returned block of storage is initialized as specified by [`init`], all the way up to
|
||||
/// the returned `actual_size`.
|
||||
///
|
||||
/// [`init`]: AllocInit
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// Returning `Err` indicates that either memory is exhausted or `layout` does not meet
|
||||
/// allocator's size or alignment constraints.
|
||||
///
|
||||
/// Implementations are encouraged to return `Err` on memory exhaustion rather than panicking or
|
||||
/// aborting, but this is not a strict requirement. (Specifically: it is *legal* to implement
|
||||
/// this trait atop an underlying native allocation library that aborts on memory exhaustion.)
|
||||
///
|
||||
/// Clients wishing to abort computation in response to an allocation error are encouraged to
|
||||
/// call the [`handle_alloc_error`] function, rather than directly invoking `panic!` or similar.
|
||||
///
|
||||
/// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html
|
||||
fn alloc(&mut self, layout: Layout, init: AllocInit) -> Result<(NonNull<u8>, usize), AllocErr>;
|
||||
|
||||
/// Deallocates the memory referenced by `ptr`.
|
||||
///
|
||||
/// # Safety
|
||||
///
|
||||
/// * `ptr` must denote a block of memory [*currently allocated*] via this allocator,
|
||||
///
|
||||
/// * `layout` must [*fit*] that block of memory, and
|
||||
///
|
||||
/// * the alignment of the `layout` must match the alignment used to allocate that block of
|
||||
/// memory.
|
||||
///
|
||||
/// [*currently allocated*]: #currently-allocated-memory
|
||||
/// [*fit*]: #memory-fitting
|
||||
unsafe fn dealloc(&mut self, ptr: NonNull<u8>, layout: Layout);
|
||||
|
||||
/// Attempts to extend the allocation referenced by `ptr` to fit `new_size`.
|
||||
///
|
||||
/// Returns a pointer and the actual size of the allocated block. The pointer is suitable for
|
||||
/// holding data described by a new layout with `layout`’s alignment and a size given by
|
||||
/// `new_size`. To accomplish this, the allocator may extend the allocation referenced by `ptr`
|
||||
/// to fit the new layout.
|
||||
///
|
||||
/// If this returns `Ok`, then ownership of the memory block referenced by `ptr` has been
|
||||
/// transferred to this allocator. The memory may or may not have been freed, and should be
|
||||
/// considered unusable (unless of course it was transferred back to the caller again via the
|
||||
/// return value of this method).
|
||||
///
|
||||
/// If this method returns `Err`, then ownership of the memory block has not been transferred to
|
||||
/// this allocator, and the contents of the memory block are unaltered.
|
||||
///
|
||||
/// The behavior of how the allocator tries to grow the memory is specified by [`placement`].
|
||||
/// The first `layout.size()` bytes of memory are preserved or copied as appropriate from `ptr`,
|
||||
/// and the remaining bytes, from `layout.size()` to the returned actual size, are initialized
|
||||
/// according to [`init`].
|
||||
///
|
||||
/// [`placement`]: ReallocPlacement
|
||||
/// [`init`]: AllocInit
|
||||
///
|
||||
/// # Safety
|
||||
///
|
||||
/// * `ptr` must be [*currently allocated*] via this allocator,
|
||||
///
|
||||
/// * `layout` must [*fit*] the `ptr`. (The `new_size` argument need not fit it.)
|
||||
///
|
||||
// We can't require that `new_size` is strictly greater than `layout.size()` because of ZSTs.
|
||||
// An alternative would be
|
||||
// * `new_size must be strictly greater than `layout.size()` or both are zero
|
||||
/// * `new_size` must be greater than or equal to `layout.size()`
|
||||
///
|
||||
/// * `new_size`, when rounded up to the nearest multiple of `layout.align()`, must not overflow
|
||||
/// (i.e., the rounded value must be less than `usize::MAX`).
|
||||
///
|
||||
/// [*currently allocated*]: #currently-allocated-memory
|
||||
/// [*fit*]: #memory-fitting
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// Returns `Err` if the new layout does not meet the allocator's size and alignment
|
||||
/// constraints of the allocator, or if growing otherwise fails.
|
||||
///
|
||||
/// Implementations are encouraged to return `Err` on memory exhaustion rather than panicking or
|
||||
/// aborting, but this is not a strict requirement. (Specifically: it is *legal* to implement
|
||||
/// this trait atop an underlying native allocation library that aborts on memory exhaustion.)
|
||||
///
|
||||
/// Clients wishing to abort computation in response to an allocation error are encouraged to
|
||||
/// call the [`handle_alloc_error`] function, rather than directly invoking `panic!` or similar.
|
||||
///
|
||||
/// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html
|
||||
unsafe fn grow(
|
||||
&mut self,
|
||||
ptr: NonNull<u8>,
|
||||
layout: Layout,
|
||||
new_size: usize,
|
||||
placement: ReallocPlacement,
|
||||
init: AllocInit,
|
||||
) -> Result<(NonNull<u8>, usize), AllocErr> {
|
||||
let old_size = layout.size();
|
||||
debug_assert!(
|
||||
new_size >= old_size,
|
||||
"`new_size` must be greater than or equal to `layout.size()`"
|
||||
);
|
||||
|
||||
if new_size == old_size {
|
||||
return Ok((ptr, new_size));
|
||||
}
|
||||
|
||||
match placement {
|
||||
ReallocPlacement::MayMove => {
|
||||
let (new_ptr, alloc_size) =
|
||||
self.alloc(Layout::from_size_align_unchecked(new_size, layout.align()), init)?;
|
||||
ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_ptr(), old_size);
|
||||
self.dealloc(ptr, layout);
|
||||
Ok((new_ptr, alloc_size))
|
||||
}
|
||||
ReallocPlacement::InPlace => Err(AllocErr),
|
||||
}
|
||||
}
|
||||
|
||||
/// Attempts to shrink the allocation referenced by `ptr` to fit `new_size`.
|
||||
///
|
||||
/// Returns a pointer and the actual size of the allocated block. The pointer is suitable for
|
||||
/// holding data described by a new layout with `layout`’s alignment and a size given by
|
||||
/// `new_size`. To accomplish this, the allocator may shrink the allocation referenced by `ptr`
|
||||
/// to fit the new layout.
|
||||
///
|
||||
/// The behavior on how the allocator tries to shrink the memory can be specified by
|
||||
/// [`placement`].
|
||||
///
|
||||
/// If this returns `Ok`, then ownership of the memory block referenced by `ptr` has been
|
||||
/// transferred to this allocator. The memory may or may not have been freed, and should be
|
||||
/// considered unusable unless it was transferred back to the caller again via the
|
||||
/// return value of this method.
|
||||
///
|
||||
/// If this method returns `Err`, then ownership of the memory block has not been transferred to
|
||||
/// this allocator, and the contents of the memory block are unaltered.
|
||||
///
|
||||
/// [`placement`]: ReallocPlacement
|
||||
///
|
||||
/// # Safety
|
||||
///
|
||||
/// * `ptr` must be [*currently allocated*] via this allocator,
|
||||
///
|
||||
/// * `layout` must [*fit*] the `ptr`. (The `new_size` argument need not fit it.)
|
||||
///
|
||||
// We can't require that `new_size` is strictly smaller than `layout.size()` because of ZSTs.
|
||||
// An alternative would be
|
||||
// * `new_size must be strictly smaller than `layout.size()` or both are zero
|
||||
/// * `new_size` must be smaller than or equal to `layout.size()`
|
||||
///
|
||||
/// [*currently allocated*]: #currently-allocated-memory
|
||||
/// [*fit*]: #memory-fitting
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// Returns `Err` if the new layout does not meet the allocator's size and alignment
|
||||
/// constraints of the allocator, or if shrinking otherwise fails.
|
||||
///
|
||||
/// Implementations are encouraged to return `Err` on memory exhaustion rather than panicking or
|
||||
/// aborting, but this is not a strict requirement. (Specifically: it is *legal* to implement
|
||||
/// this trait atop an underlying native allocation library that aborts on memory exhaustion.)
|
||||
///
|
||||
/// Clients wishing to abort computation in response to an allocation error are encouraged to
|
||||
/// call the [`handle_alloc_error`] function, rather than directly invoking `panic!` or similar.
|
||||
///
|
||||
/// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html
|
||||
unsafe fn shrink(
|
||||
&mut self,
|
||||
ptr: NonNull<u8>,
|
||||
layout: Layout,
|
||||
new_size: usize,
|
||||
placement: ReallocPlacement,
|
||||
) -> Result<(NonNull<u8>, usize), AllocErr> {
|
||||
let old_size = layout.size();
|
||||
debug_assert!(
|
||||
new_size <= old_size,
|
||||
"`new_size` must be smaller than or equal to `layout.size()`"
|
||||
);
|
||||
|
||||
if new_size == old_size {
|
||||
return Ok((ptr, new_size));
|
||||
}
|
||||
|
||||
match placement {
|
||||
ReallocPlacement::MayMove => {
|
||||
let (new_ptr, alloc_size) = self.alloc(
|
||||
Layout::from_size_align_unchecked(new_size, layout.align()),
|
||||
AllocInit::Uninitialized,
|
||||
)?;
|
||||
ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_ptr(), new_size);
|
||||
self.dealloc(ptr, layout);
|
||||
Ok((new_ptr, alloc_size))
|
||||
}
|
||||
ReallocPlacement::InPlace => Err(AllocErr),
|
||||
}
|
||||
}
|
||||
}
|
@ -61,6 +61,7 @@
|
||||
|
||||
#![stable(feature = "alloc_module", since = "1.28.0")]
|
||||
|
||||
use core::intrinsics;
|
||||
use core::ptr::NonNull;
|
||||
use core::sync::atomic::{AtomicPtr, Ordering};
|
||||
use core::{mem, ptr};
|
||||
@ -133,32 +134,21 @@ pub use alloc_crate::alloc::*;
|
||||
#[derive(Debug, Default, Copy, Clone)]
|
||||
pub struct System;
|
||||
|
||||
// The AllocRef impl checks the layout size to be non-zero and forwards to the GlobalAlloc impl,
|
||||
// which is in `std::sys::*::alloc`.
|
||||
#[unstable(feature = "allocator_api", issue = "32838")]
|
||||
unsafe impl AllocRef for System {
|
||||
#[inline]
|
||||
fn alloc(&mut self, layout: Layout) -> Result<(NonNull<u8>, usize), AllocErr> {
|
||||
if layout.size() == 0 {
|
||||
fn alloc(&mut self, layout: Layout, init: AllocInit) -> Result<(NonNull<u8>, usize), AllocErr> {
|
||||
let new_size = layout.size();
|
||||
if new_size == 0 {
|
||||
Ok((layout.dangling(), 0))
|
||||
} else {
|
||||
unsafe {
|
||||
NonNull::new(GlobalAlloc::alloc(self, layout))
|
||||
.ok_or(AllocErr)
|
||||
.map(|p| (p, layout.size()))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn alloc_zeroed(&mut self, layout: Layout) -> Result<(NonNull<u8>, usize), AllocErr> {
|
||||
if layout.size() == 0 {
|
||||
Ok((layout.dangling(), 0))
|
||||
} else {
|
||||
unsafe {
|
||||
NonNull::new(GlobalAlloc::alloc_zeroed(self, layout))
|
||||
.ok_or(AllocErr)
|
||||
.map(|p| (p, layout.size()))
|
||||
let raw_ptr = match init {
|
||||
AllocInit::Uninitialized => GlobalAlloc::alloc(self, layout),
|
||||
AllocInit::Zeroed => GlobalAlloc::alloc_zeroed(self, layout),
|
||||
};
|
||||
let ptr = NonNull::new(raw_ptr).ok_or(AllocErr)?;
|
||||
Ok((ptr, new_size))
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -171,22 +161,79 @@ unsafe impl AllocRef for System {
|
||||
}
|
||||
|
||||
#[inline]
|
||||
unsafe fn realloc(
|
||||
unsafe fn grow(
|
||||
&mut self,
|
||||
ptr: NonNull<u8>,
|
||||
layout: Layout,
|
||||
new_size: usize,
|
||||
placement: ReallocPlacement,
|
||||
init: AllocInit,
|
||||
) -> Result<(NonNull<u8>, usize), AllocErr> {
|
||||
match (layout.size(), new_size) {
|
||||
(0, 0) => Ok((layout.dangling(), 0)),
|
||||
(0, _) => self.alloc(Layout::from_size_align_unchecked(new_size, layout.align())),
|
||||
(_, 0) => {
|
||||
self.dealloc(ptr, layout);
|
||||
Ok((layout.dangling(), 0))
|
||||
let old_size = layout.size();
|
||||
debug_assert!(
|
||||
new_size >= old_size,
|
||||
"`new_size` must be greater than or equal to `layout.size()`"
|
||||
);
|
||||
|
||||
if old_size == new_size {
|
||||
return Ok((ptr, new_size));
|
||||
}
|
||||
|
||||
match placement {
|
||||
ReallocPlacement::MayMove => {
|
||||
if old_size == 0 {
|
||||
self.alloc(Layout::from_size_align_unchecked(new_size, layout.align()), init)
|
||||
} else {
|
||||
// `realloc` probably checks for `new_size > old_size` or something similar.
|
||||
// `new_size` must be greater than or equal to `old_size` due to the safety constraint,
|
||||
// and `new_size` == `old_size` was caught before
|
||||
intrinsics::assume(new_size > old_size);
|
||||
let ptr =
|
||||
NonNull::new(GlobalAlloc::realloc(self, ptr.as_ptr(), layout, new_size))
|
||||
.ok_or(AllocErr)?;
|
||||
let new_layout = Layout::from_size_align_unchecked(new_size, layout.align());
|
||||
init.initialize_offset(ptr, new_layout, old_size);
|
||||
Ok((ptr, new_size))
|
||||
}
|
||||
}
|
||||
(_, _) => NonNull::new(GlobalAlloc::realloc(self, ptr.as_ptr(), layout, new_size))
|
||||
.ok_or(AllocErr)
|
||||
.map(|p| (p, new_size)),
|
||||
ReallocPlacement::InPlace => Err(AllocErr),
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
unsafe fn shrink(
|
||||
&mut self,
|
||||
ptr: NonNull<u8>,
|
||||
layout: Layout,
|
||||
new_size: usize,
|
||||
placement: ReallocPlacement,
|
||||
) -> Result<(NonNull<u8>, usize), AllocErr> {
|
||||
let old_size = layout.size();
|
||||
debug_assert!(
|
||||
new_size <= old_size,
|
||||
"`new_size` must be smaller than or equal to `layout.size()`"
|
||||
);
|
||||
|
||||
if old_size == new_size {
|
||||
return Ok((ptr, new_size));
|
||||
}
|
||||
|
||||
match placement {
|
||||
ReallocPlacement::MayMove => {
|
||||
let ptr = if new_size == 0 {
|
||||
self.dealloc(ptr, layout);
|
||||
layout.dangling()
|
||||
} else {
|
||||
// `realloc` probably checks for `new_size > old_size` or something similar.
|
||||
// `new_size` must be smaller than or equal to `old_size` due to the safety constraint,
|
||||
// and `new_size` == `old_size` was caught before
|
||||
intrinsics::assume(new_size < old_size);
|
||||
NonNull::new(GlobalAlloc::realloc(self, ptr.as_ptr(), layout, new_size))
|
||||
.ok_or(AllocErr)?
|
||||
};
|
||||
Ok((ptr, new_size))
|
||||
}
|
||||
ReallocPlacement::InPlace => Err(AllocErr),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -238,9 +285,7 @@ pub fn rust_oom(layout: Layout) -> ! {
|
||||
let hook: fn(Layout) =
|
||||
if hook.is_null() { default_alloc_error_hook } else { unsafe { mem::transmute(hook) } };
|
||||
hook(layout);
|
||||
unsafe {
|
||||
crate::sys::abort_internal();
|
||||
}
|
||||
unsafe { crate::sys::abort_internal() }
|
||||
}
|
||||
|
||||
#[cfg(not(test))]
|
||||
|
@ -15,7 +15,7 @@
|
||||
|
||||
use core::array;
|
||||
|
||||
use crate::alloc::{AllocErr, CannotReallocInPlace, LayoutErr};
|
||||
use crate::alloc::{AllocErr, LayoutErr};
|
||||
use crate::any::TypeId;
|
||||
use crate::backtrace::Backtrace;
|
||||
use crate::borrow::Cow;
|
||||
@ -409,13 +409,6 @@ impl Error for AllocErr {}
|
||||
)]
|
||||
impl Error for LayoutErr {}
|
||||
|
||||
#[unstable(
|
||||
feature = "allocator_api",
|
||||
reason = "the precise API and guarantees it provides may be tweaked.",
|
||||
issue = "32838"
|
||||
)]
|
||||
impl Error for CannotReallocInPlace {}
|
||||
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
impl Error for str::ParseBoolError {
|
||||
#[allow(deprecated)]
|
||||
|
@ -7,7 +7,7 @@
|
||||
|
||||
extern crate helper;
|
||||
|
||||
use std::alloc::{self, Global, AllocRef, System, Layout};
|
||||
use std::alloc::{self, AllocInit, AllocRef, Global, Layout, System};
|
||||
use std::sync::atomic::{AtomicUsize, Ordering};
|
||||
|
||||
static HITS: AtomicUsize = AtomicUsize::new(0);
|
||||
@ -37,7 +37,7 @@ fn main() {
|
||||
unsafe {
|
||||
let layout = Layout::from_size_align(4, 2).unwrap();
|
||||
|
||||
let (ptr, _) = Global.alloc(layout.clone()).unwrap();
|
||||
let (ptr, _) = Global.alloc(layout.clone(), AllocInit::Uninitialized).unwrap();
|
||||
helper::work_with(&ptr);
|
||||
assert_eq!(HITS.load(Ordering::SeqCst), n + 1);
|
||||
Global.dealloc(ptr, layout.clone());
|
||||
@ -49,7 +49,7 @@ fn main() {
|
||||
drop(s);
|
||||
assert_eq!(HITS.load(Ordering::SeqCst), n + 4);
|
||||
|
||||
let (ptr, _) = System.alloc(layout.clone()).unwrap();
|
||||
let (ptr, _) = System.alloc(layout.clone(), AllocInit::Uninitialized).unwrap();
|
||||
assert_eq!(HITS.load(Ordering::SeqCst), n + 4);
|
||||
helper::work_with(&ptr);
|
||||
System.dealloc(ptr, layout);
|
||||
|
@ -9,8 +9,8 @@
|
||||
extern crate custom;
|
||||
extern crate helper;
|
||||
|
||||
use std::alloc::{Global, AllocRef, System, Layout};
|
||||
use std::sync::atomic::{Ordering, AtomicUsize};
|
||||
use std::alloc::{AllocInit, AllocRef, Global, Layout, System};
|
||||
use std::sync::atomic::{AtomicUsize, Ordering};
|
||||
|
||||
#[global_allocator]
|
||||
static GLOBAL: custom::A = custom::A(AtomicUsize::new(0));
|
||||
@ -20,13 +20,13 @@ fn main() {
|
||||
let n = GLOBAL.0.load(Ordering::SeqCst);
|
||||
let layout = Layout::from_size_align(4, 2).unwrap();
|
||||
|
||||
let (ptr, _) = Global.alloc(layout.clone()).unwrap();
|
||||
let (ptr, _) = Global.alloc(layout.clone(), AllocInit::Uninitialized).unwrap();
|
||||
helper::work_with(&ptr);
|
||||
assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 1);
|
||||
Global.dealloc(ptr, layout.clone());
|
||||
assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 2);
|
||||
|
||||
let (ptr, _) = System.alloc(layout.clone()).unwrap();
|
||||
let (ptr, _) = System.alloc(layout.clone(), AllocInit::Uninitialized).unwrap();
|
||||
assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 2);
|
||||
helper::work_with(&ptr);
|
||||
System.dealloc(ptr, layout);
|
||||
|
@ -6,7 +6,7 @@
|
||||
|
||||
#![feature(allocator_api)]
|
||||
|
||||
use std::alloc::{Global, AllocRef, Layout, handle_alloc_error};
|
||||
use std::alloc::{handle_alloc_error, AllocInit, AllocRef, Global, Layout, ReallocPlacement};
|
||||
use std::ptr::{self, NonNull};
|
||||
|
||||
fn main() {
|
||||
@ -16,17 +16,17 @@ fn main() {
|
||||
}
|
||||
|
||||
unsafe fn test_triangle() -> bool {
|
||||
static COUNT : usize = 16;
|
||||
static COUNT: usize = 16;
|
||||
let mut ascend = vec![ptr::null_mut(); COUNT];
|
||||
let ascend = &mut *ascend;
|
||||
static ALIGN : usize = 1;
|
||||
static ALIGN: usize = 1;
|
||||
|
||||
// Checks that `ascend` forms triangle of ascending size formed
|
||||
// from pairs of rows (where each pair of rows is equally sized),
|
||||
// and the elements of the triangle match their row-pair index.
|
||||
unsafe fn sanity_check(ascend: &[*mut u8]) {
|
||||
for i in 0..COUNT / 2 {
|
||||
let (p0, p1, size) = (ascend[2*i], ascend[2*i+1], idx_to_size(i));
|
||||
let (p0, p1, size) = (ascend[2 * i], ascend[2 * i + 1], idx_to_size(i));
|
||||
for j in 0..size {
|
||||
assert_eq!(*p0.add(j), i as u8);
|
||||
assert_eq!(*p1.add(j), i as u8);
|
||||
@ -34,14 +34,16 @@ unsafe fn test_triangle() -> bool {
|
||||
}
|
||||
}
|
||||
|
||||
static PRINT : bool = false;
|
||||
static PRINT: bool = false;
|
||||
|
||||
unsafe fn allocate(layout: Layout) -> *mut u8 {
|
||||
if PRINT {
|
||||
println!("allocate({:?})", layout);
|
||||
}
|
||||
|
||||
let (ptr, _) = Global.alloc(layout).unwrap_or_else(|_| handle_alloc_error(layout));
|
||||
let (ptr, _) = Global
|
||||
.alloc(layout, AllocInit::Uninitialized)
|
||||
.unwrap_or_else(|_| handle_alloc_error(layout));
|
||||
|
||||
if PRINT {
|
||||
println!("allocate({:?}) = {:?}", layout, ptr);
|
||||
@ -63,19 +65,33 @@ unsafe fn test_triangle() -> bool {
|
||||
println!("reallocate({:?}, old={:?}, new={:?})", ptr, old, new);
|
||||
}
|
||||
|
||||
let (ptr, _) = Global.realloc(NonNull::new_unchecked(ptr), old, new.size())
|
||||
.unwrap_or_else(|_| handle_alloc_error(
|
||||
Layout::from_size_align_unchecked(new.size(), old.align())
|
||||
));
|
||||
let allocation = if new.size() > old.size() {
|
||||
Global.grow(
|
||||
NonNull::new_unchecked(ptr),
|
||||
old,
|
||||
new.size(),
|
||||
ReallocPlacement::MayMove,
|
||||
AllocInit::Uninitialized,
|
||||
)
|
||||
} else if new.size() < old.size() {
|
||||
Global.shrink(NonNull::new_unchecked(ptr), old, new.size(), ReallocPlacement::MayMove)
|
||||
} else {
|
||||
return ptr;
|
||||
};
|
||||
|
||||
let (ptr, _) = allocation.unwrap_or_else(|_| {
|
||||
handle_alloc_error(Layout::from_size_align_unchecked(new.size(), old.align()))
|
||||
});
|
||||
|
||||
if PRINT {
|
||||
println!("reallocate({:?}, old={:?}, new={:?}) = {:?}",
|
||||
ptr, old, new, ptr);
|
||||
println!("reallocate({:?}, old={:?}, new={:?}) = {:?}", ptr, old, new, ptr);
|
||||
}
|
||||
ptr.cast().as_ptr()
|
||||
}
|
||||
|
||||
fn idx_to_size(i: usize) -> usize { (i+1) * 10 }
|
||||
fn idx_to_size(i: usize) -> usize {
|
||||
(i + 1) * 10
|
||||
}
|
||||
|
||||
// Allocate pairs of rows that form a triangle shape. (Hope is
|
||||
// that at least two rows will be allocated near each other, so
|
||||
@ -83,13 +99,13 @@ unsafe fn test_triangle() -> bool {
|
||||
// way.)
|
||||
for i in 0..COUNT / 2 {
|
||||
let size = idx_to_size(i);
|
||||
ascend[2*i] = allocate(Layout::from_size_align(size, ALIGN).unwrap());
|
||||
ascend[2*i+1] = allocate(Layout::from_size_align(size, ALIGN).unwrap());
|
||||
ascend[2 * i] = allocate(Layout::from_size_align(size, ALIGN).unwrap());
|
||||
ascend[2 * i + 1] = allocate(Layout::from_size_align(size, ALIGN).unwrap());
|
||||
}
|
||||
|
||||
// Initialize each pair of rows to distinct value.
|
||||
for i in 0..COUNT / 2 {
|
||||
let (p0, p1, size) = (ascend[2*i], ascend[2*i+1], idx_to_size(i));
|
||||
let (p0, p1, size) = (ascend[2 * i], ascend[2 * i + 1], idx_to_size(i));
|
||||
for j in 0..size {
|
||||
*p0.add(j) = i as u8;
|
||||
*p1.add(j) = i as u8;
|
||||
@ -104,8 +120,8 @@ unsafe fn test_triangle() -> bool {
|
||||
|
||||
for i in 0..COUNT / 2 {
|
||||
let size = idx_to_size(i);
|
||||
deallocate(ascend[2*i], Layout::from_size_align(size, ALIGN).unwrap());
|
||||
deallocate(ascend[2*i+1], Layout::from_size_align(size, ALIGN).unwrap());
|
||||
deallocate(ascend[2 * i], Layout::from_size_align(size, ALIGN).unwrap());
|
||||
deallocate(ascend[2 * i + 1], Layout::from_size_align(size, ALIGN).unwrap());
|
||||
}
|
||||
|
||||
return true;
|
||||
@ -115,68 +131,68 @@ unsafe fn test_triangle() -> bool {
|
||||
// realloc'ing each row from top to bottom, and checking all the
|
||||
// rows as we go.
|
||||
unsafe fn test_1(ascend: &mut [*mut u8]) {
|
||||
let new_size = idx_to_size(COUNT-1);
|
||||
let new_size = idx_to_size(COUNT - 1);
|
||||
let new = Layout::from_size_align(new_size, ALIGN).unwrap();
|
||||
for i in 0..COUNT / 2 {
|
||||
let (p0, p1, old_size) = (ascend[2*i], ascend[2*i+1], idx_to_size(i));
|
||||
let (p0, p1, old_size) = (ascend[2 * i], ascend[2 * i + 1], idx_to_size(i));
|
||||
assert!(old_size < new_size);
|
||||
let old = Layout::from_size_align(old_size, ALIGN).unwrap();
|
||||
|
||||
ascend[2*i] = reallocate(p0, old.clone(), new.clone());
|
||||
ascend[2 * i] = reallocate(p0, old.clone(), new.clone());
|
||||
sanity_check(&*ascend);
|
||||
|
||||
ascend[2*i+1] = reallocate(p1, old.clone(), new.clone());
|
||||
ascend[2 * i + 1] = reallocate(p1, old.clone(), new.clone());
|
||||
sanity_check(&*ascend);
|
||||
}
|
||||
}
|
||||
|
||||
// Test 2: turn the square back into a triangle, top to bottom.
|
||||
unsafe fn test_2(ascend: &mut [*mut u8]) {
|
||||
let old_size = idx_to_size(COUNT-1);
|
||||
let old_size = idx_to_size(COUNT - 1);
|
||||
let old = Layout::from_size_align(old_size, ALIGN).unwrap();
|
||||
for i in 0..COUNT / 2 {
|
||||
let (p0, p1, new_size) = (ascend[2*i], ascend[2*i+1], idx_to_size(i));
|
||||
let (p0, p1, new_size) = (ascend[2 * i], ascend[2 * i + 1], idx_to_size(i));
|
||||
assert!(new_size < old_size);
|
||||
let new = Layout::from_size_align(new_size, ALIGN).unwrap();
|
||||
|
||||
ascend[2*i] = reallocate(p0, old.clone(), new.clone());
|
||||
ascend[2 * i] = reallocate(p0, old.clone(), new.clone());
|
||||
sanity_check(&*ascend);
|
||||
|
||||
ascend[2*i+1] = reallocate(p1, old.clone(), new.clone());
|
||||
ascend[2 * i + 1] = reallocate(p1, old.clone(), new.clone());
|
||||
sanity_check(&*ascend);
|
||||
}
|
||||
}
|
||||
|
||||
// Test 3: turn triangle into a square, bottom to top.
|
||||
unsafe fn test_3(ascend: &mut [*mut u8]) {
|
||||
let new_size = idx_to_size(COUNT-1);
|
||||
let new_size = idx_to_size(COUNT - 1);
|
||||
let new = Layout::from_size_align(new_size, ALIGN).unwrap();
|
||||
for i in (0..COUNT / 2).rev() {
|
||||
let (p0, p1, old_size) = (ascend[2*i], ascend[2*i+1], idx_to_size(i));
|
||||
let (p0, p1, old_size) = (ascend[2 * i], ascend[2 * i + 1], idx_to_size(i));
|
||||
assert!(old_size < new_size);
|
||||
let old = Layout::from_size_align(old_size, ALIGN).unwrap();
|
||||
|
||||
ascend[2*i+1] = reallocate(p1, old.clone(), new.clone());
|
||||
ascend[2 * i + 1] = reallocate(p1, old.clone(), new.clone());
|
||||
sanity_check(&*ascend);
|
||||
|
||||
ascend[2*i] = reallocate(p0, old.clone(), new.clone());
|
||||
ascend[2 * i] = reallocate(p0, old.clone(), new.clone());
|
||||
sanity_check(&*ascend);
|
||||
}
|
||||
}
|
||||
|
||||
// Test 4: turn the square back into a triangle, bottom to top.
|
||||
unsafe fn test_4(ascend: &mut [*mut u8]) {
|
||||
let old_size = idx_to_size(COUNT-1);
|
||||
let old_size = idx_to_size(COUNT - 1);
|
||||
let old = Layout::from_size_align(old_size, ALIGN).unwrap();
|
||||
for i in (0..COUNT / 2).rev() {
|
||||
let (p0, p1, new_size) = (ascend[2*i], ascend[2*i+1], idx_to_size(i));
|
||||
let (p0, p1, new_size) = (ascend[2 * i], ascend[2 * i + 1], idx_to_size(i));
|
||||
assert!(new_size < old_size);
|
||||
let new = Layout::from_size_align(new_size, ALIGN).unwrap();
|
||||
|
||||
ascend[2*i+1] = reallocate(p1, old.clone(), new.clone());
|
||||
ascend[2 * i + 1] = reallocate(p1, old.clone(), new.clone());
|
||||
sanity_check(&*ascend);
|
||||
|
||||
ascend[2*i] = reallocate(p0, old.clone(), new.clone());
|
||||
ascend[2 * i] = reallocate(p0, old.clone(), new.clone());
|
||||
sanity_check(&*ascend);
|
||||
}
|
||||
}
|
||||
|
@ -1,33 +1,33 @@
|
||||
// run-pass
|
||||
#![allow(dead_code)]
|
||||
#![allow(non_camel_case_types)]
|
||||
|
||||
// pretty-expanded FIXME #23616
|
||||
|
||||
#![feature(allocator_api)]
|
||||
|
||||
use std::alloc::{AllocRef, Global, Layout, handle_alloc_error};
|
||||
use std::alloc::{handle_alloc_error, AllocInit, AllocRef, Global, Layout};
|
||||
use std::ptr::NonNull;
|
||||
|
||||
struct arena(());
|
||||
|
||||
struct Bcx<'a> {
|
||||
fcx: &'a Fcx<'a>
|
||||
fcx: &'a Fcx<'a>,
|
||||
}
|
||||
|
||||
struct Fcx<'a> {
|
||||
arena: &'a arena,
|
||||
ccx: &'a Ccx
|
||||
ccx: &'a Ccx,
|
||||
}
|
||||
|
||||
struct Ccx {
|
||||
x: isize
|
||||
x: isize,
|
||||
}
|
||||
|
||||
fn alloc(_bcx: &arena) -> &Bcx<'_> {
|
||||
unsafe {
|
||||
let layout = Layout::new::<Bcx>();
|
||||
let (ptr, _) = Global.alloc(layout).unwrap_or_else(|_| handle_alloc_error(layout));
|
||||
let (ptr, _) = Global
|
||||
.alloc(layout, AllocInit::Uninitialized)
|
||||
.unwrap_or_else(|_| handle_alloc_error(layout));
|
||||
&*(ptr.as_ptr() as *const _)
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user