Remove alignment from MemoryBlock

This commit is contained in:
Tim Diekmann 2020-03-25 21:12:12 +01:00
parent bfbdb5f06f
commit 03b055b0b4
14 changed files with 211 additions and 202 deletions

View File

@ -4,7 +4,7 @@
use core::intrinsics::{self, min_align_of_val, size_of_val};
use core::ptr::{NonNull, Unique};
use core::{mem, usize};
use core::usize;
#[stable(feature = "alloc_module", since = "1.28.0")]
#[doc(inline)]
@ -167,94 +167,94 @@ unsafe impl AllocRef for Global {
#[inline]
fn alloc(&mut self, layout: Layout, init: AllocInit) -> Result<MemoryBlock, AllocErr> {
unsafe {
if layout.size() == 0 {
Ok(MemoryBlock::new(layout.dangling(), layout))
let size = layout.size();
if size == 0 {
Ok(MemoryBlock::new(layout.dangling(), 0))
} else {
let raw_ptr = match init {
AllocInit::Uninitialized => alloc(layout),
AllocInit::Zeroed => alloc_zeroed(layout),
};
let ptr = NonNull::new(raw_ptr).ok_or(AllocErr)?;
Ok(MemoryBlock::new(ptr, layout))
Ok(MemoryBlock::new(ptr, size))
}
}
}
#[inline]
unsafe fn dealloc(&mut self, memory: MemoryBlock) {
if memory.size() != 0 {
dealloc(memory.ptr().as_ptr(), memory.layout())
unsafe fn dealloc(&mut self, ptr: NonNull<u8>, layout: Layout) {
if layout.size() != 0 {
dealloc(ptr.as_ptr(), layout)
}
}
#[inline]
unsafe fn grow(
&mut self,
memory: &mut MemoryBlock,
ptr: NonNull<u8>,
layout: Layout,
new_size: usize,
placement: ReallocPlacement,
init: AllocInit,
) -> Result<(), AllocErr> {
let old_size = memory.size();
) -> Result<MemoryBlock, AllocErr> {
let old_size = layout.size();
debug_assert!(
new_size >= old_size,
"`new_size` must be greater than or equal to `memory.size()`"
);
if old_size == new_size {
return Ok(());
return Ok(MemoryBlock::new(ptr, old_size));
}
let new_layout = Layout::from_size_align_unchecked(new_size, memory.align());
match placement {
ReallocPlacement::InPlace => return Err(AllocErr),
ReallocPlacement::MayMove if memory.size() == 0 => {
*memory = self.alloc(new_layout, init)?
ReallocPlacement::InPlace => Err(AllocErr),
ReallocPlacement::MayMove if layout.size() == 0 => {
let new_layout = Layout::from_size_align_unchecked(new_size, layout.align());
self.alloc(new_layout, init)
}
ReallocPlacement::MayMove => {
// `realloc` probably checks for `new_size > old_size` or something similar.
intrinsics::assume(new_size > old_size);
let ptr = realloc(memory.ptr().as_ptr(), memory.layout(), new_size);
*memory = MemoryBlock::new(NonNull::new(ptr).ok_or(AllocErr)?, new_layout);
let ptr = realloc(ptr.as_ptr(), layout, new_size);
let mut memory = MemoryBlock::new(NonNull::new(ptr).ok_or(AllocErr)?, new_size);
memory.init_offset(init, old_size);
Ok(memory)
}
}
Ok(())
}
#[inline]
unsafe fn shrink(
&mut self,
memory: &mut MemoryBlock,
ptr: NonNull<u8>,
layout: Layout,
new_size: usize,
placement: ReallocPlacement,
) -> Result<(), AllocErr> {
let old_size = memory.size();
) -> Result<MemoryBlock, AllocErr> {
let old_size = layout.size();
debug_assert!(
new_size <= old_size,
"`new_size` must be smaller than or equal to `memory.size()`"
);
if old_size == new_size {
return Ok(());
return Ok(MemoryBlock::new(ptr, old_size));
}
let new_layout = Layout::from_size_align_unchecked(new_size, memory.align());
match placement {
ReallocPlacement::InPlace => return Err(AllocErr),
ReallocPlacement::InPlace => Err(AllocErr),
ReallocPlacement::MayMove if new_size == 0 => {
let new_memory = MemoryBlock::new(new_layout.dangling(), new_layout);
let old_memory = mem::replace(memory, new_memory);
self.dealloc(old_memory)
self.dealloc(ptr, layout);
Ok(MemoryBlock::new(layout.dangling(), 0))
}
ReallocPlacement::MayMove => {
// `realloc` probably checks for `new_size < old_size` or something similar.
intrinsics::assume(new_size < old_size);
let ptr = realloc(memory.ptr().as_ptr(), memory.layout(), new_size);
*memory = MemoryBlock::new(NonNull::new(ptr).ok_or(AllocErr)?, new_layout);
let ptr = realloc(ptr.as_ptr(), layout, new_size);
Ok(MemoryBlock::new(NonNull::new(ptr).ok_or(AllocErr)?, new_size))
}
}
Ok(())
}
}
@ -282,7 +282,7 @@ pub(crate) unsafe fn box_free<T: ?Sized>(ptr: Unique<T>) {
let size = size_of_val(ptr.as_ref());
let align = min_align_of_val(ptr.as_ref());
let layout = Layout::from_size_align_unchecked(size, align);
Global.dealloc(MemoryBlock::new(ptr.cast().into(), layout))
Global.dealloc(ptr.cast().into(), layout)
}
/// Abort on memory allocation error or failure.

View File

@ -18,7 +18,7 @@ fn allocate_zeroed() {
assert_eq!(*i, 0);
i = i.offset(1);
}
Global.dealloc(memory);
Global.dealloc(memory.ptr(), layout);
}
}

View File

@ -31,7 +31,6 @@
// - A node of length `n` has `n` keys, `n` values, and (in an internal node) `n + 1` edges.
// This implies that even an empty internal node has at least one edge.
use core::alloc::MemoryBlock;
use core::cmp::Ordering;
use core::marker::PhantomData;
use core::mem::{self, MaybeUninit};
@ -228,10 +227,7 @@ impl<K, V> Root<K, V> {
}
unsafe {
Global.dealloc(MemoryBlock::new(
NonNull::from(top).cast(),
Layout::new::<InternalNode<K, V>>(),
));
Global.dealloc(NonNull::from(top).cast(), Layout::new::<InternalNode<K, V>>());
}
}
}
@ -396,14 +392,14 @@ impl<K, V> NodeRef<marker::Owned, K, V, marker::LeafOrInternal> {
let height = self.height;
let node = self.node;
let ret = self.ascend().ok();
Global.dealloc(MemoryBlock::new(
Global.dealloc(
node.cast(),
if height > 0 {
Layout::new::<InternalNode<K, V>>()
} else {
Layout::new::<LeafNode<K, V>>()
},
));
);
ret
}
}
@ -1167,7 +1163,7 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::
} else {
Layout::new::<LeafNode<K, V>>()
};
Global.dealloc(MemoryBlock::new(right_node.node.cast(), layout));
Global.dealloc(right_node.node.cast(), layout);
Handle::new_edge(self.node, self.idx)
}

View File

@ -5,7 +5,7 @@ use core::alloc::MemoryBlock;
use core::cmp;
use core::mem::{self, MaybeUninit};
use core::ops::Drop;
use core::ptr::Unique;
use core::ptr::{NonNull, Unique};
use core::slice;
use crate::alloc::{
@ -197,7 +197,7 @@ impl<T, A: AllocRef> RawVec<T, A> {
&mut self.alloc
}
fn current_memory(&self) -> Option<MemoryBlock> {
fn current_memory(&self) -> Option<(NonNull<u8>, Layout)> {
if mem::size_of::<T>() == 0 || self.cap == 0 {
None
} else {
@ -207,7 +207,7 @@ impl<T, A: AllocRef> RawVec<T, A> {
let align = mem::align_of::<T>();
let size = mem::size_of::<T>() * self.cap;
let layout = Layout::from_size_align_unchecked(size, align);
Some(MemoryBlock::new(self.ptr.cast().into(), layout))
Some((self.ptr.cast().into(), layout))
}
}
}
@ -472,7 +472,6 @@ impl<T, A: AllocRef> RawVec<T, A> {
fn set_memory(&mut self, memory: MemoryBlock) {
self.ptr = memory.ptr().cast().into();
self.cap = Self::capacity_from_bytes(memory.size());
drop(memory);
}
/// Single method to handle all possibilities of growing the buffer.
@ -488,7 +487,7 @@ impl<T, A: AllocRef> RawVec<T, A> {
// 0, getting to here necessarily means the `RawVec` is overfull.
return Err(CapacityOverflow);
}
let layout = match strategy {
let new_layout = match strategy {
Double => unsafe {
// Since we guarantee that we never allocate more than `isize::MAX` bytes,
// `elem_size * self.cap <= isize::MAX` as a precondition, so this can't overflow.
@ -522,22 +521,20 @@ impl<T, A: AllocRef> RawVec<T, A> {
}
};
let memory = if let Some(mut memory) = self.current_memory() {
debug_assert_eq!(memory.align(), layout.align());
let memory = if let Some((ptr, old_layout)) = self.current_memory() {
debug_assert_eq!(old_layout.align(), new_layout.align());
unsafe {
self.alloc
.grow(&mut memory, layout.size(), placement, init)
.map_err(|_| AllocError { layout, non_exhaustive: () })?
};
memory
.grow(ptr, old_layout, new_layout.size(), placement, init)
.map_err(|_| AllocError { layout: new_layout, non_exhaustive: () })?
}
} else {
match placement {
MayMove => self.alloc.alloc(layout, init),
MayMove => self.alloc.alloc(new_layout, init),
InPlace => Err(AllocErr),
}
.map_err(|_| AllocError { layout, non_exhaustive: () })?
.map_err(|_| AllocError { layout: new_layout, non_exhaustive: () })?
};
self.set_memory(memory);
Ok(())
}
@ -549,18 +546,17 @@ impl<T, A: AllocRef> RawVec<T, A> {
) -> Result<(), TryReserveError> {
assert!(amount <= self.capacity(), "Tried to shrink to a larger capacity");
let mut memory = if let Some(mem) = self.current_memory() { mem } else { return Ok(()) };
let (ptr, layout) = if let Some(mem) = self.current_memory() { mem } else { return Ok(()) };
let new_size = amount * mem::size_of::<T>();
unsafe {
self.alloc.shrink(&mut memory, new_size, placement).map_err(|_| {
let memory = unsafe {
self.alloc.shrink(ptr, layout, new_size, placement).map_err(|_| {
TryReserveError::AllocError {
layout: Layout::from_size_align_unchecked(new_size, memory.align()),
layout: Layout::from_size_align_unchecked(new_size, layout.align()),
non_exhaustive: (),
}
})?;
}
})?
};
self.set_memory(memory);
Ok(())
}
@ -593,8 +589,8 @@ impl<T> RawVec<T, Global> {
unsafe impl<#[may_dangle] T, A: AllocRef> Drop for RawVec<T, A> {
/// Frees the memory owned by the `RawVec` *without* trying to drop its contents.
fn drop(&mut self) {
if let Some(memory) = self.current_memory() {
unsafe { self.alloc.dealloc(memory) }
if let Some((ptr, layout)) = self.current_memory() {
unsafe { self.alloc.dealloc(ptr, layout) }
}
}
}

View File

@ -34,8 +34,8 @@ fn allocator_param() {
err @ Err(_) => err,
}
}
unsafe fn dealloc(&mut self, memory: MemoryBlock) {
Global.dealloc(memory)
unsafe fn dealloc(&mut self, ptr: NonNull<u8>, layout: Layout) {
Global.dealloc(ptr, layout)
}
}

View File

@ -234,7 +234,6 @@ use crate::boxed::Box;
#[cfg(test)]
use std::boxed::Box;
use core::alloc::MemoryBlock;
use core::any::Any;
use core::array::LengthAtMost32;
use core::borrow;
@ -1032,7 +1031,7 @@ impl<T> Rc<[T]> {
let slice = from_raw_parts_mut(self.elems, self.n_elems);
ptr::drop_in_place(slice);
Global.dealloc(MemoryBlock::new(self.mem, self.layout));
Global.dealloc(self.mem, self.layout);
}
}
}
@ -1132,10 +1131,7 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Rc<T> {
self.dec_weak();
if self.weak() == 0 {
Global.dealloc(MemoryBlock::new(
self.ptr.cast(),
Layout::for_value(self.ptr.as_ref()),
));
Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref()));
}
}
}
@ -1943,10 +1939,7 @@ impl<T: ?Sized> Drop for Weak<T> {
// the strong pointers have disappeared.
if inner.weak() == 0 {
unsafe {
Global.dealloc(MemoryBlock::new(
self.ptr.cast(),
Layout::for_value(self.ptr.as_ref()),
));
Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref()));
}
}
}

View File

@ -6,7 +6,6 @@
//!
//! [arc]: struct.Arc.html
use core::alloc::MemoryBlock;
use core::any::Any;
use core::array::LengthAtMost32;
use core::borrow;
@ -771,7 +770,7 @@ impl<T: ?Sized> Arc<T> {
if self.inner().weak.fetch_sub(1, Release) == 1 {
acquire!(self.inner().weak);
Global.dealloc(MemoryBlock::new(self.ptr.cast(), Layout::for_value(self.ptr.as_ref())))
Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref()))
}
}
@ -910,7 +909,7 @@ impl<T> Arc<[T]> {
let slice = from_raw_parts_mut(self.elems, self.n_elems);
ptr::drop_in_place(slice);
Global.dealloc(MemoryBlock::new(self.mem.cast(), self.layout));
Global.dealloc(self.mem.cast(), self.layout);
}
}
}
@ -1735,12 +1734,7 @@ impl<T: ?Sized> Drop for Weak<T> {
if inner.weak.fetch_sub(1, Release) == 1 {
acquire!(inner.weak);
unsafe {
Global.dealloc(MemoryBlock::new(
self.ptr.cast(),
Layout::for_value(self.ptr.as_ref()),
))
}
unsafe { Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref())) }
}
}
}

View File

@ -1,4 +1,4 @@
use std::alloc::{AllocInit, AllocRef, Global, Layout, MemoryBlock, System};
use std::alloc::{AllocInit, AllocRef, Global, Layout, System};
/// Issue #45955 and #62251.
#[test]
@ -39,10 +39,7 @@ fn check_overalign_requests<T: AllocRef>(mut allocator: T) {
// Clean up
for &ptr in &pointers {
allocator.dealloc(MemoryBlock::new(
ptr,
Layout::from_size_align(size, align).unwrap(),
))
allocator.dealloc(ptr, Layout::from_size_align(size, align).unwrap())
}
}
}

View File

@ -11,7 +11,6 @@ pub use self::global::GlobalAlloc;
pub use self::layout::{Layout, LayoutErr};
use crate::fmt;
use crate::mem;
use crate::ptr::{self, NonNull};
/// The `AllocErr` error indicates an allocation failure
@ -45,25 +44,17 @@ pub enum AllocInit {
/// Represents a block of allocated memory returned by an allocator.
#[derive(Debug)]
#[unstable(feature = "allocator_api", issue = "32838")]
#[must_use = "`MemoryBlock` should be passed to `AllocRef::dealloc`"]
pub struct MemoryBlock {
ptr: NonNull<u8>,
layout: Layout,
size: usize,
}
impl MemoryBlock {
/// Creates a new `MemoryBlock`.
///
/// # Safety
///
/// * The block must be allocated with the same alignment as [`layout.align()`], and
/// * The provided [`layout.size()`] must fall in the range `min ..= max`, where:
/// - `min` is the size requested size when allocating the block, and
/// - `max` is the size of the memory block.
/// Creates a new `MemoryBlock` from the specified `ptr` and `size`.
#[inline]
#[unstable(feature = "allocator_api", issue = "32838")]
pub const unsafe fn new(ptr: NonNull<u8>, layout: Layout) -> Self {
Self { ptr, layout }
pub const fn new(ptr: NonNull<u8>, size: usize) -> Self {
Self { ptr, size }
}
/// Acquires the underlying `NonNull<u8>` pointer.
@ -73,25 +64,11 @@ impl MemoryBlock {
self.ptr
}
/// Returns the layout describing the memory block.
#[inline]
#[unstable(feature = "allocator_api", issue = "32838")]
pub const fn layout(&self) -> Layout {
self.layout
}
/// Returns the size of the memory block.
#[inline]
#[unstable(feature = "allocator_api", issue = "32838")]
pub const fn size(&self) -> usize {
self.layout().size()
}
/// Returns the minimum alignment of the memory block.
#[inline]
#[unstable(feature = "allocator_api", issue = "32838")]
pub const fn align(&self) -> usize {
self.layout().align()
self.size
}
/// Initialize the memory block like specified by `init`.
@ -160,6 +137,39 @@ pub enum ReallocPlacement {
/// allocator does not support this (like jemalloc) or return a null pointer (such as
/// `libc::malloc`), this case must be caught.
///
/// ### Currently allocated memory
///
/// Some of the methods require that a memory block be *currently allocated* via an allocator. This
/// means that:
///
/// * the starting address for that memory block was previously returned by [`alloc`], [`grow`], or
/// [`shrink`], and
///
/// * the memory block has not been subsequently deallocated, where blocks are either deallocated
/// directly by being passed to [`dealloc`] or were changed by being passed to [`grow`] or
/// [`shrink`] that returns `Ok`. If `grow` or `shrink` have returned `Err`, the passed pointer
/// remains valid.
///
/// [`alloc`]: AllocRef::alloc
/// [`grow`]: AllocRef::grow
/// [`shrink`]: AllocRef::shrink
/// [`dealloc`]: AllocRef::dealloc
///
/// ### Memory fitting
///
/// Some of the methods require that a layout *fit* a memory block. What it means for a layout to
/// "fit" a memory block means (or equivalently, for a memory block to "fit" a layout) is that the
/// following conditions must hold:
///
/// * The block must be allocated with the same alignment as [`layout.align()`], and
///
/// * The provided [`layout.size()`] must fall in the range `min ..= max`, where:
/// - `min` is the size of the layout most recently used to allocate the block, and
/// - `max` is the latest actual size returned from [`alloc`], [`grow`], or [`shrink`].
///
/// [`layout.align()`]: Layout::align
/// [`layout.size()`]: Layout::size
///
/// # Safety
///
/// * Memory blocks returned from an allocator must point to valid memory and retain their validity
@ -168,6 +178,9 @@ pub enum ReallocPlacement {
/// * cloning or moving the allocator must not invalidate memory blocks returned from this
/// allocator. A cloned allocator must behave like the same allocator.
///
/// * any pointer to a memory block which is [*currently allocated*] may be passed to any other
/// method of the allocator.
///
/// [*currently allocated*]: #currently-allocated-memory
#[unstable(feature = "allocator_api", issue = "32838")]
pub unsafe trait AllocRef {
@ -198,25 +211,45 @@ pub unsafe trait AllocRef {
/// # Safety
///
/// `memory` must be a memory block returned by this allocator.
unsafe fn dealloc(&mut self, memory: MemoryBlock);
unsafe fn dealloc(&mut self, ptr: NonNull<u8>, layout: Layout);
/// Attempts to extend the memory block.
///
/// The behavior of how the allocator tries to grow the memory is specified by [`placement`].
/// The first `memory.size()` bytes are preserved or copied as appropriate from `ptr`, and the
/// remaining bytes up to the new `memory.size()` are initialized according to [`init`].
/// Returns a new memory block containing a pointer and the actual size of the allocated
/// block. The pointer is suitable for holding data described by a new layout with `layout`s
/// alignment and a size given by `new_size`. To accomplish this, the allocator may extend the
/// allocation referenced by `ptr` to fit the new layout. If the [`placement`] is
/// [`InPlace`], the returned pointer is guaranteed to be the same as the passed `ptr`.
///
/// If this returns `Ok`, then ownership of the memory block referenced by `ptr` has been
/// transferred to this allocator. The memory may or may not have been freed, and should be
/// considered unusable (unless of course it was transferred back to the caller again via the
/// return value of this method).
///
/// If this method returns `Err`, then ownership of the memory block has not been transferred to
/// this allocator, and the contents of the memory block are unaltered.
///
/// The behavior of how the allocator tries to grow the memory is specified by [`placement`].
/// After growing a memory block, the new memory can be separated into three regions:
/// 1. `0..layout.size()`. This region is preserved or copied as appropriate from `ptr`.
/// 2. `layout.size()..allocated_size` where `allocated_size` is the latest returned
/// size of the allocator. The new content is implementation defined. Allocators may
/// initialize it according to [`init`] or leave them as is.
/// 3. `allocated_size..returned_size` is initialized according to [`init`].
///
/// [`InPlace`]: ReallocPlacement::InPlace
/// [`placement`]: ReallocPlacement
/// [`init`]: AllocInit
///
/// # Safety
///
/// * `memory` must be a memory block returned by this allocator.
/// * `ptr` must be [*currently allocated*] via this allocator,
/// * `layout` must [*fit*] the `ptr`. (The `new_size` argument need not fit it.)
// We can't require that `new_size` is strictly greater than `memory.size()` because of ZSTs.
// An alternative would be
// * `new_size must be strictly greater than `memory.size()` or both are zero
/// * `new_size` must be greater than or equal to `memory.size()`
/// * `new_size`, when rounded up to the nearest multiple of `memory.align()`, must not overflow
/// * `new_size` must be greater than or equal to `layout.size()`
/// * `new_size`, when rounded up to the nearest multiple of `layout.align()`, must not overflow
/// (i.e., the rounded value must be less than `usize::MAX`).
///
/// [*currently allocated*]: #currently-allocated-memory
@ -237,46 +270,59 @@ pub unsafe trait AllocRef {
/// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html
unsafe fn grow(
&mut self,
memory: &mut MemoryBlock,
ptr: NonNull<u8>,
layout: Layout,
new_size: usize,
placement: ReallocPlacement,
init: AllocInit,
) -> Result<(), AllocErr> {
) -> Result<MemoryBlock, AllocErr> {
match placement {
ReallocPlacement::InPlace => Err(AllocErr),
ReallocPlacement::MayMove => {
let old_size = memory.size();
let old_size = layout.size();
debug_assert!(
new_size >= old_size,
"`new_size` must be greater than or equal to `memory.size()`"
"`new_size` must be greater than or equal to `layout.size()`"
);
if new_size == old_size {
return Ok(());
return Ok(MemoryBlock::new(ptr, old_size));
}
let new_layout = Layout::from_size_align_unchecked(new_size, memory.align());
let new_layout = Layout::from_size_align_unchecked(new_size, layout.align());
let new_memory = self.alloc(new_layout, init)?;
ptr::copy_nonoverlapping(
memory.ptr().as_ptr(),
new_memory.ptr().as_ptr(),
old_size,
);
self.dealloc(mem::replace(memory, new_memory));
Ok(())
ptr::copy_nonoverlapping(ptr.as_ptr(), new_memory.ptr().as_ptr(), old_size);
self.dealloc(ptr, layout);
Ok(new_memory)
}
}
}
/// Attempts to shrink the memory block.
///
/// Returns a new memory block containing a pointer and the actual size of the allocated
/// block. The pointer is suitable for holding data described by a new layout with `layout`s
/// alignment and a size given by `new_size`. To accomplish this, the allocator may shrink the
/// allocation referenced by `ptr` to fit the new layout. If the [`placement`] is
/// [`InPlace`], the returned pointer is guaranteed to be the same as the passed `ptr`.
///
/// If this returns `Ok`, then ownership of the memory block referenced by `ptr` has been
/// transferred to this allocator. The memory may or may not have been freed, and should be
/// considered unusable unless it was transferred back to the caller again via the
/// return value of this method.
///
/// If this method returns `Err`, then ownership of the memory block has not been transferred to
/// this allocator, and the contents of the memory block are unaltered.
///
/// The behavior of how the allocator tries to shrink the memory is specified by [`placement`].
///
/// [`InPlace`]: ReallocPlacement::InPlace
/// [`placement`]: ReallocPlacement
///
/// # Safety
///
/// * `memory` must be a memory block returned by this allocator.
/// * `ptr` must be [*currently allocated*] via this allocator,
/// * `layout` must [*fit*] the `ptr`. (The `new_size` argument need not fit it.)
// We can't require that `new_size` is strictly smaller than `memory.size()` because of ZSTs.
// An alternative would be
// * `new_size must be strictly smaller than `memory.size()` or both are zero
@ -300,32 +346,29 @@ pub unsafe trait AllocRef {
/// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html
unsafe fn shrink(
&mut self,
memory: &mut MemoryBlock,
ptr: NonNull<u8>,
layout: Layout,
new_size: usize,
placement: ReallocPlacement,
) -> Result<(), AllocErr> {
) -> Result<MemoryBlock, AllocErr> {
match placement {
ReallocPlacement::InPlace => Err(AllocErr),
ReallocPlacement::MayMove => {
let old_size = memory.size();
let old_size = layout.size();
debug_assert!(
new_size <= old_size,
"`new_size` must be smaller than or equal to `layout.size()`"
);
if new_size == old_size {
return Ok(());
return Ok(MemoryBlock::new(ptr, old_size));
}
let new_layout = Layout::from_size_align_unchecked(new_size, memory.align());
let new_layout = Layout::from_size_align_unchecked(new_size, layout.align());
let new_memory = self.alloc(new_layout, AllocInit::Uninitialized)?;
ptr::copy_nonoverlapping(
memory.ptr().as_ptr(),
new_memory.ptr().as_ptr(),
new_size,
);
self.dealloc(mem::replace(memory, new_memory));
Ok(())
ptr::copy_nonoverlapping(ptr.as_ptr(), new_memory.ptr().as_ptr(), new_size);
self.dealloc(ptr, layout);
Ok(new_memory)
}
}
}

View File

@ -141,99 +141,96 @@ unsafe impl AllocRef for System {
#[inline]
fn alloc(&mut self, layout: Layout, init: AllocInit) -> Result<MemoryBlock, AllocErr> {
unsafe {
if layout.size() == 0 {
Ok(MemoryBlock::new(layout.dangling(), layout))
let size = layout.size();
if size == 0 {
Ok(MemoryBlock::new(layout.dangling(), 0))
} else {
let raw_ptr = match init {
AllocInit::Uninitialized => GlobalAlloc::alloc(self, layout),
AllocInit::Zeroed => GlobalAlloc::alloc_zeroed(self, layout),
};
let ptr = NonNull::new(raw_ptr).ok_or(AllocErr)?;
Ok(MemoryBlock::new(ptr, layout))
Ok(MemoryBlock::new(ptr, size))
}
}
}
#[inline]
unsafe fn dealloc(&mut self, memory: MemoryBlock) {
if memory.size() != 0 {
GlobalAlloc::dealloc(self, memory.ptr().as_ptr(), memory.layout())
unsafe fn dealloc(&mut self, ptr: NonNull<u8>, layout: Layout) {
if layout.size() != 0 {
GlobalAlloc::dealloc(self, ptr.as_ptr(), layout)
}
}
#[inline]
unsafe fn grow(
&mut self,
memory: &mut MemoryBlock,
ptr: NonNull<u8>,
layout: Layout,
new_size: usize,
placement: ReallocPlacement,
init: AllocInit,
) -> Result<(), AllocErr> {
let old_size = memory.size();
) -> Result<MemoryBlock, AllocErr> {
let old_size = layout.size();
debug_assert!(
new_size >= old_size,
"`new_size` must be greater than or equal to `memory.size()`"
);
if old_size == new_size {
return Ok(());
return Ok(MemoryBlock::new(ptr, old_size));
}
let new_layout = Layout::from_size_align_unchecked(new_size, memory.align());
match placement {
ReallocPlacement::InPlace => return Err(AllocErr),
ReallocPlacement::MayMove if memory.size() == 0 => {
*memory = self.alloc(new_layout, init)?
ReallocPlacement::InPlace => Err(AllocErr),
ReallocPlacement::MayMove if layout.size() == 0 => {
let new_layout = Layout::from_size_align_unchecked(new_size, layout.align());
self.alloc(new_layout, init)
}
ReallocPlacement::MayMove => {
// `realloc` probably checks for `new_size > old_size` or something similar.
intrinsics::assume(new_size > old_size);
let ptr =
GlobalAlloc::realloc(self, memory.ptr().as_ptr(), memory.layout(), new_size);
*memory = MemoryBlock::new(NonNull::new(ptr).ok_or(AllocErr)?, new_layout);
let ptr = GlobalAlloc::realloc(self, ptr.as_ptr(), layout, new_size);
let mut memory = MemoryBlock::new(NonNull::new(ptr).ok_or(AllocErr)?, new_size);
memory.init_offset(init, old_size);
Ok(memory)
}
}
Ok(())
}
#[inline]
unsafe fn shrink(
&mut self,
memory: &mut MemoryBlock,
ptr: NonNull<u8>,
layout: Layout,
new_size: usize,
placement: ReallocPlacement,
) -> Result<(), AllocErr> {
let old_size = memory.size();
) -> Result<MemoryBlock, AllocErr> {
let old_size = layout.size();
debug_assert!(
new_size <= old_size,
"`new_size` must be smaller than or equal to `memory.size()`"
);
if old_size == new_size {
return Ok(());
return Ok(MemoryBlock::new(ptr, old_size));
}
let new_layout = Layout::from_size_align_unchecked(new_size, memory.align());
match placement {
ReallocPlacement::InPlace => return Err(AllocErr),
ReallocPlacement::InPlace => Err(AllocErr),
ReallocPlacement::MayMove if new_size == 0 => {
let new_memory = MemoryBlock::new(new_layout.dangling(), new_layout);
let old_memory = mem::replace(memory, new_memory);
self.dealloc(old_memory)
self.dealloc(ptr, layout);
Ok(MemoryBlock::new(layout.dangling(), 0))
}
ReallocPlacement::MayMove => {
// `realloc` probably checks for `new_size < old_size` or something similar.
intrinsics::assume(new_size < old_size);
let ptr =
GlobalAlloc::realloc(self, memory.ptr().as_ptr(), memory.layout(), new_size);
*memory = MemoryBlock::new(NonNull::new(ptr).ok_or(AllocErr)?, new_layout);
let ptr = GlobalAlloc::realloc(self, ptr.as_ptr(), layout, new_size);
Ok(MemoryBlock::new(NonNull::new(ptr).ok_or(AllocErr)?, new_size))
}
}
Ok(())
}
}
static HOOK: AtomicPtr<()> = AtomicPtr::new(ptr::null_mut());
/// Registers a custom allocation error hook, replacing any that was previously registered.

View File

@ -40,7 +40,7 @@ fn main() {
let memory = Global.alloc(layout.clone(), AllocInit::Uninitialized).unwrap();
helper::work_with(&memory.ptr());
assert_eq!(HITS.load(Ordering::SeqCst), n + 1);
Global.dealloc(memory);
Global.dealloc(memory.ptr(), layout);
assert_eq!(HITS.load(Ordering::SeqCst), n + 2);
let s = String::with_capacity(10);
@ -52,7 +52,7 @@ fn main() {
let memory = System.alloc(layout.clone(), AllocInit::Uninitialized).unwrap();
assert_eq!(HITS.load(Ordering::SeqCst), n + 4);
helper::work_with(&memory.ptr());
System.dealloc(memory);
System.dealloc(memory.ptr(), layout);
assert_eq!(HITS.load(Ordering::SeqCst), n + 4);
}
}

View File

@ -23,13 +23,13 @@ fn main() {
let memory = Global.alloc(layout.clone(), AllocInit::Uninitialized).unwrap();
helper::work_with(&memory.ptr());
assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 1);
Global.dealloc(memory);
Global.dealloc(memory.ptr(), layout);
assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 2);
let memory = System.alloc(layout.clone(), AllocInit::Uninitialized).unwrap();
assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 2);
helper::work_with(&memory.ptr());
System.dealloc(memory);
System.dealloc(memory.ptr(), layout);
assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 2);
}
}

View File

@ -6,9 +6,7 @@
#![feature(allocator_api)]
use std::alloc::{
handle_alloc_error, AllocInit, AllocRef, Global, Layout, MemoryBlock, ReallocPlacement,
};
use std::alloc::{handle_alloc_error, AllocInit, AllocRef, Global, Layout, ReallocPlacement};
use std::ptr::{self, NonNull};
fn main() {
@ -59,7 +57,7 @@ unsafe fn test_triangle() -> bool {
println!("deallocate({:?}, {:?}", ptr, layout);
}
Global.dealloc(MemoryBlock::new(NonNull::new_unchecked(ptr), layout));
Global.dealloc(NonNull::new_unchecked(ptr), layout);
}
unsafe fn reallocate(ptr: *mut u8, old: Layout, new: Layout) -> *mut u8 {
@ -67,21 +65,19 @@ unsafe fn test_triangle() -> bool {
println!("reallocate({:?}, old={:?}, new={:?})", ptr, old, new);
}
let mut memory = MemoryBlock::new(NonNull::new_unchecked(ptr), old);
let result = if new.size() > old.size() {
let memory = if new.size() > old.size() {
Global.grow(
&mut memory,
NonNull::new_unchecked(ptr),
old,
new.size(),
ReallocPlacement::MayMove,
AllocInit::Uninitialized,
)
} else if new.size() < old.size() {
Global.shrink(&mut memory, new.size(), ReallocPlacement::MayMove)
} else {
return ptr;
Global.shrink(NonNull::new_unchecked(ptr), old, new.size(), ReallocPlacement::MayMove)
};
result.unwrap_or_else(|_| {
let memory = memory.unwrap_or_else(|_| {
handle_alloc_error(Layout::from_size_align_unchecked(new.size(), old.align()))
});

View File

@ -4,7 +4,7 @@
// pretty-expanded FIXME #23616
#![feature(allocator_api)]
use std::alloc::{handle_alloc_error, AllocInit, AllocRef, Global, Layout, MemoryBlock};
use std::alloc::{handle_alloc_error, AllocInit, AllocRef, Global, Layout};
use std::ptr::NonNull;
struct arena(());
@ -40,10 +40,7 @@ fn g(fcx: &Fcx) {
let bcx = Bcx { fcx };
let bcx2 = h(&bcx);
unsafe {
Global.dealloc(MemoryBlock::new(
NonNull::new_unchecked(bcx2 as *const _ as *mut _),
Layout::new::<Bcx>(),
));
Global.dealloc(NonNull::new_unchecked(bcx2 as *const _ as *mut _), Layout::new::<Bcx>());
}
}