Rename AllocRef to Allocator and (de)alloc to (de)allocate

This commit is contained in:
Tim Diekmann 2020-12-04 14:47:15 +01:00
parent e6225434ff
commit 9274b37d99
27 changed files with 337 additions and 335 deletions

View File

@ -38,7 +38,7 @@ extern "Rust" {
/// The global memory allocator.
///
/// This type implements the [`AllocRef`] trait by forwarding calls
/// This type implements the [`Allocator`] trait by forwarding calls
/// to the allocator registered with the `#[global_allocator]` attribute
/// if there is one, or the `std` crates default.
///
@ -59,7 +59,7 @@ pub use std::alloc::Global;
/// if there is one, or the `std` crates default.
///
/// This function is expected to be deprecated in favor of the `alloc` method
/// of the [`Global`] type when it and the [`AllocRef`] trait become stable.
/// of the [`Global`] type when it and the [`Allocator`] trait become stable.
///
/// # Safety
///
@ -93,7 +93,7 @@ pub unsafe fn alloc(layout: Layout) -> *mut u8 {
/// if there is one, or the `std` crates default.
///
/// This function is expected to be deprecated in favor of the `dealloc` method
/// of the [`Global`] type when it and the [`AllocRef`] trait become stable.
/// of the [`Global`] type when it and the [`Allocator`] trait become stable.
///
/// # Safety
///
@ -111,7 +111,7 @@ pub unsafe fn dealloc(ptr: *mut u8, layout: Layout) {
/// if there is one, or the `std` crates default.
///
/// This function is expected to be deprecated in favor of the `realloc` method
/// of the [`Global`] type when it and the [`AllocRef`] trait become stable.
/// of the [`Global`] type when it and the [`Allocator`] trait become stable.
///
/// # Safety
///
@ -129,7 +129,7 @@ pub unsafe fn realloc(ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8
/// if there is one, or the `std` crates default.
///
/// This function is expected to be deprecated in favor of the `alloc_zeroed` method
/// of the [`Global`] type when it and the [`AllocRef`] trait become stable.
/// of the [`Global`] type when it and the [`Allocator`] trait become stable.
///
/// # Safety
///
@ -170,7 +170,7 @@ impl Global {
}
}
// SAFETY: Same as `AllocRef::grow`
// SAFETY: Same as `Allocator::grow`
#[inline]
unsafe fn grow_impl(
&self,
@ -211,7 +211,7 @@ impl Global {
old_size => unsafe {
let new_ptr = self.alloc_impl(new_layout, zeroed)?;
ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), old_size);
self.dealloc(ptr, old_layout);
self.deallocate(ptr, old_layout);
Ok(new_ptr)
},
}
@ -220,19 +220,19 @@ impl Global {
#[unstable(feature = "allocator_api", issue = "32838")]
#[cfg(not(test))]
unsafe impl AllocRef for Global {
unsafe impl Allocator for Global {
#[inline]
fn alloc(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
self.alloc_impl(layout, false)
}
#[inline]
fn alloc_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
self.alloc_impl(layout, true)
}
#[inline]
unsafe fn dealloc(&self, ptr: NonNull<u8>, layout: Layout) {
unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
if layout.size() != 0 {
// SAFETY: `layout` is non-zero in size,
// other conditions must be upheld by the caller
@ -277,7 +277,7 @@ unsafe impl AllocRef for Global {
match new_layout.size() {
// SAFETY: conditions must be upheld by the caller
0 => unsafe {
self.dealloc(ptr, old_layout);
self.deallocate(ptr, old_layout);
Ok(NonNull::slice_from_raw_parts(new_layout.dangling(), 0))
},
@ -297,9 +297,9 @@ unsafe impl AllocRef for Global {
// `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract
// for `dealloc` must be upheld by the caller.
new_size => unsafe {
let new_ptr = self.alloc(new_layout)?;
let new_ptr = self.allocate(new_layout)?;
ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), new_size);
self.dealloc(ptr, old_layout);
self.deallocate(ptr, old_layout);
Ok(new_ptr)
},
}
@ -313,7 +313,7 @@ unsafe impl AllocRef for Global {
#[inline]
unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {
let layout = unsafe { Layout::from_size_align_unchecked(size, align) };
match Global.alloc(layout) {
match Global.allocate(layout) {
Ok(ptr) => ptr.as_mut_ptr(),
Err(_) => handle_alloc_error(layout),
}
@ -322,16 +322,16 @@ unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {
#[cfg_attr(not(test), lang = "box_free")]
#[inline]
// This signature has to be the same as `Box`, otherwise an ICE will happen.
// When an additional parameter to `Box` is added (like `A: AllocRef`), this has to be added here as
// When an additional parameter to `Box` is added (like `A: Allocator`), this has to be added here as
// well.
// For example if `Box` is changed to `struct Box<T: ?Sized, A: AllocRef>(Unique<T>, A)`,
// this function has to be changed to `fn box_free<T: ?Sized, A: AllocRef>(Unique<T>, A)` as well.
pub(crate) unsafe fn box_free<T: ?Sized, A: AllocRef>(ptr: Unique<T>, alloc: A) {
// For example if `Box` is changed to `struct Box<T: ?Sized, A: Allocator>(Unique<T>, A)`,
// this function has to be changed to `fn box_free<T: ?Sized, A: Allocator>(Unique<T>, A)` as well.
pub(crate) unsafe fn box_free<T: ?Sized, A: Allocator>(ptr: Unique<T>, alloc: A) {
unsafe {
let size = size_of_val(ptr.as_ref());
let align = min_align_of_val(ptr.as_ref());
let layout = Layout::from_size_align_unchecked(size, align);
alloc.dealloc(ptr.cast().into(), layout)
alloc.deallocate(ptr.cast().into(), layout)
}
}

View File

@ -9,7 +9,7 @@ fn allocate_zeroed() {
unsafe {
let layout = Layout::from_size_align(1024, 1).unwrap();
let ptr =
Global.alloc_zeroed(layout.clone()).unwrap_or_else(|_| handle_alloc_error(layout));
Global.allocate_zeroed(layout.clone()).unwrap_or_else(|_| handle_alloc_error(layout));
let mut i = ptr.as_non_null_ptr().as_ptr();
let end = i.add(layout.size());
@ -17,7 +17,7 @@ fn allocate_zeroed() {
assert_eq!(*i, 0);
i = i.offset(1);
}
Global.dealloc(ptr.as_non_null_ptr(), layout);
Global.deallocate(ptr.as_non_null_ptr(), layout);
}
}

View File

@ -153,7 +153,7 @@ use core::pin::Pin;
use core::ptr::{self, Unique};
use core::task::{Context, Poll};
use crate::alloc::{handle_alloc_error, AllocRef, Global, Layout};
use crate::alloc::{handle_alloc_error, Allocator, Global, Layout};
use crate::borrow::Cow;
use crate::raw_vec::RawVec;
use crate::str::from_boxed_utf8_unchecked;
@ -167,7 +167,7 @@ use crate::vec::Vec;
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Box<
T: ?Sized,
#[unstable(feature = "allocator_api", issue = "32838")] A: AllocRef = Global,
#[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
>(Unique<T>, A);
impl<T> Box<T> {
@ -243,7 +243,7 @@ impl<T> Box<T> {
}
}
impl<T, A: AllocRef> Box<T, A> {
impl<T, A: Allocator> Box<T, A> {
/// Allocates memory in the given allocator then places `x` into it.
///
/// This doesn't actually allocate if `T` is zero-sized.
@ -291,7 +291,7 @@ impl<T, A: AllocRef> Box<T, A> {
// #[unstable(feature = "new_uninit", issue = "63291")]
pub fn new_uninit_in(alloc: A) -> Box<mem::MaybeUninit<T>, A> {
let layout = Layout::new::<mem::MaybeUninit<T>>();
let ptr = alloc.alloc(layout).unwrap_or_else(|_| handle_alloc_error(layout)).cast();
let ptr = alloc.allocate(layout).unwrap_or_else(|_| handle_alloc_error(layout)).cast();
unsafe { Box::from_raw_in(ptr.as_ptr(), alloc) }
}
@ -319,7 +319,8 @@ impl<T, A: AllocRef> Box<T, A> {
// #[unstable(feature = "new_uninit", issue = "63291")]
pub fn new_zeroed_in(alloc: A) -> Box<mem::MaybeUninit<T>, A> {
let layout = Layout::new::<mem::MaybeUninit<T>>();
let ptr = alloc.alloc_zeroed(layout).unwrap_or_else(|_| handle_alloc_error(layout)).cast();
let ptr =
alloc.allocate_zeroed(layout).unwrap_or_else(|_| handle_alloc_error(layout)).cast();
unsafe { Box::from_raw_in(ptr.as_ptr(), alloc) }
}
@ -339,7 +340,7 @@ impl<T, A: AllocRef> Box<T, A> {
/// This conversion does not allocate on the heap and happens in place.
#[unstable(feature = "box_into_boxed_slice", issue = "71582")]
pub fn into_boxed_slice(boxed: Self) -> Box<[T], A> {
let (raw, alloc) = Box::into_raw_with_alloc(boxed);
let (raw, alloc) = Box::into_raw_with_allocator(boxed);
unsafe { Box::from_raw_in(raw as *mut [T; 1], alloc) }
}
}
@ -394,7 +395,7 @@ impl<T> Box<[T]> {
}
}
impl<T, A: AllocRef> Box<[T], A> {
impl<T, A: Allocator> Box<[T], A> {
/// Constructs a new boxed slice with uninitialized contents in the provided allocator.
///
/// # Examples
@ -450,7 +451,7 @@ impl<T, A: AllocRef> Box<[T], A> {
}
}
impl<T, A: AllocRef> Box<mem::MaybeUninit<T>, A> {
impl<T, A: Allocator> Box<mem::MaybeUninit<T>, A> {
/// Converts to `Box<T, A>`.
///
/// # Safety
@ -482,12 +483,12 @@ impl<T, A: AllocRef> Box<mem::MaybeUninit<T>, A> {
#[unstable(feature = "new_uninit", issue = "63291")]
#[inline]
pub unsafe fn assume_init(self) -> Box<T, A> {
let (raw, alloc) = Box::into_raw_with_alloc(self);
let (raw, alloc) = Box::into_raw_with_allocator(self);
unsafe { Box::from_raw_in(raw as *mut T, alloc) }
}
}
impl<T, A: AllocRef> Box<[mem::MaybeUninit<T>], A> {
impl<T, A: Allocator> Box<[mem::MaybeUninit<T>], A> {
/// Converts to `Box<[T], A>`.
///
/// # Safety
@ -521,7 +522,7 @@ impl<T, A: AllocRef> Box<[mem::MaybeUninit<T>], A> {
#[unstable(feature = "new_uninit", issue = "63291")]
#[inline]
pub unsafe fn assume_init(self) -> Box<[T], A> {
let (raw, alloc) = Box::into_raw_with_alloc(self);
let (raw, alloc) = Box::into_raw_with_allocator(self);
unsafe { Box::from_raw_in(raw as *mut [T], alloc) }
}
}
@ -575,7 +576,7 @@ impl<T: ?Sized> Box<T> {
}
}
impl<T: ?Sized, A: AllocRef> Box<T, A> {
impl<T: ?Sized, A: Allocator> Box<T, A> {
/// Constructs a box from a raw pointer in the given allocator.
///
/// After calling this function, the raw pointer is owned by the
@ -594,24 +595,24 @@ impl<T: ?Sized, A: AllocRef> Box<T, A> {
/// # Examples
///
/// Recreate a `Box` which was previously converted to a raw pointer
/// using [`Box::into_raw_with_alloc`]:
/// using [`Box::into_raw_with_allocator`]:
/// ```
/// #![feature(allocator_api)]
///
/// use std::alloc::System;
///
/// let x = Box::new_in(5, System);
/// let (ptr, alloc) = Box::into_raw_with_alloc(x);
/// let (ptr, alloc) = Box::into_raw_with_allocator(x);
/// let x = unsafe { Box::from_raw_in(ptr, alloc) };
/// ```
/// Manually create a `Box` from scratch by using the system allocator:
/// ```
/// #![feature(allocator_api, slice_ptr_get)]
///
/// use std::alloc::{AllocRef, Layout, System};
/// use std::alloc::{Allocator, Layout, System};
///
/// unsafe {
/// let ptr = System.alloc(Layout::new::<i32>())?.as_mut_ptr();
/// let ptr = System.allocate(Layout::new::<i32>())?.as_mut_ptr();
/// // In general .write is required to avoid attempting to destruct
/// // the (uninitialized) previous contents of `ptr`, though for this
/// // simple example `*ptr = 5` would have worked as well.
@ -671,7 +672,7 @@ impl<T: ?Sized, A: AllocRef> Box<T, A> {
#[stable(feature = "box_raw", since = "1.4.0")]
#[inline]
pub fn into_raw(b: Self) -> *mut T {
Self::into_raw_with_alloc(b).0
Self::into_raw_with_allocator(b).0
}
/// Consumes the `Box`, returning a wrapped raw pointer and the allocator.
@ -687,7 +688,7 @@ impl<T: ?Sized, A: AllocRef> Box<T, A> {
/// the cleanup.
///
/// Note: this is an associated function, which means that you have
/// to call it as `Box::into_raw_with_alloc(b)` instead of `b.into_raw_with_alloc()`. This
/// to call it as `Box::into_raw_with_allocator(b)` instead of `b.into_raw_with_allocator()`. This
/// is so that there is no conflict with a method on the inner type.
///
/// # Examples
@ -699,7 +700,7 @@ impl<T: ?Sized, A: AllocRef> Box<T, A> {
/// use std::alloc::System;
///
/// let x = Box::new_in(String::from("Hello"), System);
/// let (ptr, alloc) = Box::into_raw_with_alloc(x);
/// let (ptr, alloc) = Box::into_raw_with_allocator(x);
/// let x = unsafe { Box::from_raw_in(ptr, alloc) };
/// ```
/// Manual cleanup by explicitly running the destructor and deallocating
@ -707,22 +708,22 @@ impl<T: ?Sized, A: AllocRef> Box<T, A> {
/// ```
/// #![feature(allocator_api)]
///
/// use std::alloc::{AllocRef, Layout, System};
/// use std::alloc::{Allocator, Layout, System};
/// use std::ptr::{self, NonNull};
///
/// let x = Box::new_in(String::from("Hello"), System);
/// let (ptr, alloc) = Box::into_raw_with_alloc(x);
/// let (ptr, alloc) = Box::into_raw_with_allocator(x);
/// unsafe {
/// ptr::drop_in_place(ptr);
/// let non_null = NonNull::new_unchecked(ptr);
/// alloc.dealloc(non_null.cast(), Layout::new::<String>());
/// alloc.deallocate(non_null.cast(), Layout::new::<String>());
/// }
/// ```
///
/// [memory layout]: self#memory-layout
#[unstable(feature = "allocator_api", issue = "32838")]
#[inline]
pub fn into_raw_with_alloc(b: Self) -> (*mut T, A) {
pub fn into_raw_with_allocator(b: Self) -> (*mut T, A) {
let (leaked, alloc) = Box::into_unique(b);
(leaked.as_ptr(), alloc)
}
@ -747,11 +748,11 @@ impl<T: ?Sized, A: AllocRef> Box<T, A> {
/// Returns a reference to the underlying allocator.
///
/// Note: this is an associated function, which means that you have
/// to call it as `Box::alloc_ref(&b)` instead of `b.alloc_ref()`. This
/// to call it as `Box::allocator(&b)` instead of `b.allocator()`. This
/// is so that there is no conflict with a method on the inner type.
#[unstable(feature = "allocator_api", issue = "32838")]
#[inline]
pub fn alloc_ref(b: &Self) -> &A {
pub fn allocator(b: &Self) -> &A {
&b.1
}
@ -817,7 +818,7 @@ impl<T: ?Sized, A: AllocRef> Box<T, A> {
}
#[stable(feature = "rust1", since = "1.0.0")]
unsafe impl<#[may_dangle] T: ?Sized, A: AllocRef> Drop for Box<T, A> {
unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Box<T, A> {
fn drop(&mut self) {
// FIXME: Do nothing, drop is currently performed by compiler.
}
@ -846,7 +847,7 @@ impl Default for Box<str> {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Clone, A: AllocRef + Clone> Clone for Box<T, A> {
impl<T: Clone, A: Allocator + Clone> Clone for Box<T, A> {
/// Returns a new box with a `clone()` of this box's contents.
///
/// # Examples
@ -900,7 +901,7 @@ impl Clone for Box<str> {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized + PartialEq, A: AllocRef> PartialEq for Box<T, A> {
impl<T: ?Sized + PartialEq, A: Allocator> PartialEq for Box<T, A> {
#[inline]
fn eq(&self, other: &Self) -> bool {
PartialEq::eq(&**self, &**other)
@ -911,7 +912,7 @@ impl<T: ?Sized + PartialEq, A: AllocRef> PartialEq for Box<T, A> {
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized + PartialOrd, A: AllocRef> PartialOrd for Box<T, A> {
impl<T: ?Sized + PartialOrd, A: Allocator> PartialOrd for Box<T, A> {
#[inline]
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
PartialOrd::partial_cmp(&**self, &**other)
@ -934,24 +935,24 @@ impl<T: ?Sized + PartialOrd, A: AllocRef> PartialOrd for Box<T, A> {
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized + Ord, A: AllocRef> Ord for Box<T, A> {
impl<T: ?Sized + Ord, A: Allocator> Ord for Box<T, A> {
#[inline]
fn cmp(&self, other: &Self) -> Ordering {
Ord::cmp(&**self, &**other)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized + Eq, A: AllocRef> Eq for Box<T, A> {}
impl<T: ?Sized + Eq, A: Allocator> Eq for Box<T, A> {}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized + Hash, A: AllocRef> Hash for Box<T, A> {
impl<T: ?Sized + Hash, A: Allocator> Hash for Box<T, A> {
fn hash<H: Hasher>(&self, state: &mut H) {
(**self).hash(state);
}
}
#[stable(feature = "indirect_hasher_impl", since = "1.22.0")]
impl<T: ?Sized + Hasher, A: AllocRef> Hasher for Box<T, A> {
impl<T: ?Sized + Hasher, A: Allocator> Hasher for Box<T, A> {
fn finish(&self) -> u64 {
(**self).finish()
}
@ -1016,7 +1017,7 @@ impl<T> From<T> for Box<T> {
}
#[stable(feature = "pin", since = "1.33.0")]
impl<T: ?Sized, A: AllocRef> From<Box<T, A>> for Pin<Box<T, A>>
impl<T: ?Sized, A: Allocator> From<Box<T, A>> for Pin<Box<T, A>>
where
A: 'static,
{
@ -1094,7 +1095,7 @@ impl From<Cow<'_, str>> for Box<str> {
}
#[stable(feature = "boxed_str_conv", since = "1.19.0")]
impl<A: AllocRef> From<Box<str, A>> for Box<[u8], A> {
impl<A: Allocator> From<Box<str, A>> for Box<[u8], A> {
/// Converts a `Box<str>` into a `Box<[u8]>`
///
/// This conversion does not allocate on the heap and happens in place.
@ -1113,7 +1114,7 @@ impl<A: AllocRef> From<Box<str, A>> for Box<[u8], A> {
/// ```
#[inline]
fn from(s: Box<str, A>) -> Self {
let (raw, alloc) = Box::into_raw_with_alloc(s);
let (raw, alloc) = Box::into_raw_with_allocator(s);
unsafe { Box::from_raw_in(raw as *mut [u8], alloc) }
}
}
@ -1147,7 +1148,7 @@ impl<T, const N: usize> TryFrom<Box<[T]>> for Box<[T; N]> {
}
}
impl<A: AllocRef> Box<dyn Any, A> {
impl<A: Allocator> Box<dyn Any, A> {
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
/// Attempt to downcast the box to a concrete type.
@ -1170,7 +1171,7 @@ impl<A: AllocRef> Box<dyn Any, A> {
pub fn downcast<T: Any>(self) -> Result<Box<T, A>, Self> {
if self.is::<T>() {
unsafe {
let (raw, alloc): (*mut dyn Any, _) = Box::into_raw_with_alloc(self);
let (raw, alloc): (*mut dyn Any, _) = Box::into_raw_with_allocator(self);
Ok(Box::from_raw_in(raw as *mut T, alloc))
}
} else {
@ -1179,7 +1180,7 @@ impl<A: AllocRef> Box<dyn Any, A> {
}
}
impl<A: AllocRef> Box<dyn Any + Send, A> {
impl<A: Allocator> Box<dyn Any + Send, A> {
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
/// Attempt to downcast the box to a concrete type.
@ -1202,7 +1203,7 @@ impl<A: AllocRef> Box<dyn Any + Send, A> {
pub fn downcast<T: Any>(self) -> Result<Box<T, A>, Self> {
if self.is::<T>() {
unsafe {
let (raw, alloc): (*mut (dyn Any + Send), _) = Box::into_raw_with_alloc(self);
let (raw, alloc): (*mut (dyn Any + Send), _) = Box::into_raw_with_allocator(self);
Ok(Box::from_raw_in(raw as *mut T, alloc))
}
} else {
@ -1212,21 +1213,21 @@ impl<A: AllocRef> Box<dyn Any + Send, A> {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: fmt::Display + ?Sized, A: AllocRef> fmt::Display for Box<T, A> {
impl<T: fmt::Display + ?Sized, A: Allocator> fmt::Display for Box<T, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(&**self, f)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: fmt::Debug + ?Sized, A: AllocRef> fmt::Debug for Box<T, A> {
impl<T: fmt::Debug + ?Sized, A: Allocator> fmt::Debug for Box<T, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt(&**self, f)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized, A: AllocRef> fmt::Pointer for Box<T, A> {
impl<T: ?Sized, A: Allocator> fmt::Pointer for Box<T, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
// It's not possible to extract the inner Uniq directly from the Box,
// instead we cast it to a *const which aliases the Unique
@ -1236,7 +1237,7 @@ impl<T: ?Sized, A: AllocRef> fmt::Pointer for Box<T, A> {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized, A: AllocRef> Deref for Box<T, A> {
impl<T: ?Sized, A: Allocator> Deref for Box<T, A> {
type Target = T;
fn deref(&self) -> &T {
@ -1245,17 +1246,17 @@ impl<T: ?Sized, A: AllocRef> Deref for Box<T, A> {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized, A: AllocRef> DerefMut for Box<T, A> {
impl<T: ?Sized, A: Allocator> DerefMut for Box<T, A> {
fn deref_mut(&mut self) -> &mut T {
&mut **self
}
}
#[unstable(feature = "receiver_trait", issue = "none")]
impl<T: ?Sized, A: AllocRef> Receiver for Box<T, A> {}
impl<T: ?Sized, A: Allocator> Receiver for Box<T, A> {}
#[stable(feature = "rust1", since = "1.0.0")]
impl<I: Iterator + ?Sized, A: AllocRef> Iterator for Box<I, A> {
impl<I: Iterator + ?Sized, A: Allocator> Iterator for Box<I, A> {
type Item = I::Item;
fn next(&mut self) -> Option<I::Item> {
(**self).next()
@ -1276,7 +1277,7 @@ trait BoxIter {
fn last(self) -> Option<Self::Item>;
}
impl<I: Iterator + ?Sized, A: AllocRef> BoxIter for Box<I, A> {
impl<I: Iterator + ?Sized, A: Allocator> BoxIter for Box<I, A> {
type Item = I::Item;
default fn last(self) -> Option<I::Item> {
#[inline]
@ -1291,14 +1292,14 @@ impl<I: Iterator + ?Sized, A: AllocRef> BoxIter for Box<I, A> {
/// Specialization for sized `I`s that uses `I`s implementation of `last()`
/// instead of the default.
#[stable(feature = "rust1", since = "1.0.0")]
impl<I: Iterator, A: AllocRef> BoxIter for Box<I, A> {
impl<I: Iterator, A: Allocator> BoxIter for Box<I, A> {
fn last(self) -> Option<I::Item> {
(*self).last()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<I: DoubleEndedIterator + ?Sized, A: AllocRef> DoubleEndedIterator for Box<I, A> {
impl<I: DoubleEndedIterator + ?Sized, A: Allocator> DoubleEndedIterator for Box<I, A> {
fn next_back(&mut self) -> Option<I::Item> {
(**self).next_back()
}
@ -1307,7 +1308,7 @@ impl<I: DoubleEndedIterator + ?Sized, A: AllocRef> DoubleEndedIterator for Box<I
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<I: ExactSizeIterator + ?Sized, A: AllocRef> ExactSizeIterator for Box<I, A> {
impl<I: ExactSizeIterator + ?Sized, A: Allocator> ExactSizeIterator for Box<I, A> {
fn len(&self) -> usize {
(**self).len()
}
@ -1317,10 +1318,10 @@ impl<I: ExactSizeIterator + ?Sized, A: AllocRef> ExactSizeIterator for Box<I, A>
}
#[stable(feature = "fused", since = "1.26.0")]
impl<I: FusedIterator + ?Sized, A: AllocRef> FusedIterator for Box<I, A> {}
impl<I: FusedIterator + ?Sized, A: Allocator> FusedIterator for Box<I, A> {}
#[stable(feature = "boxed_closure_impls", since = "1.35.0")]
impl<Args, F: FnOnce<Args> + ?Sized, A: AllocRef> FnOnce<Args> for Box<F, A> {
impl<Args, F: FnOnce<Args> + ?Sized, A: Allocator> FnOnce<Args> for Box<F, A> {
type Output = <F as FnOnce<Args>>::Output;
extern "rust-call" fn call_once(self, args: Args) -> Self::Output {
@ -1329,21 +1330,21 @@ impl<Args, F: FnOnce<Args> + ?Sized, A: AllocRef> FnOnce<Args> for Box<F, A> {
}
#[stable(feature = "boxed_closure_impls", since = "1.35.0")]
impl<Args, F: FnMut<Args> + ?Sized, A: AllocRef> FnMut<Args> for Box<F, A> {
impl<Args, F: FnMut<Args> + ?Sized, A: Allocator> FnMut<Args> for Box<F, A> {
extern "rust-call" fn call_mut(&mut self, args: Args) -> Self::Output {
<F as FnMut<Args>>::call_mut(self, args)
}
}
#[stable(feature = "boxed_closure_impls", since = "1.35.0")]
impl<Args, F: Fn<Args> + ?Sized, A: AllocRef> Fn<Args> for Box<F, A> {
impl<Args, F: Fn<Args> + ?Sized, A: Allocator> Fn<Args> for Box<F, A> {
extern "rust-call" fn call(&self, args: Args) -> Self::Output {
<F as Fn<Args>>::call(self, args)
}
}
#[unstable(feature = "coerce_unsized", issue = "27732")]
impl<T: ?Sized + Unsize<U>, U: ?Sized, A: AllocRef> CoerceUnsized<Box<U, A>> for Box<T, A> {}
impl<T: ?Sized + Unsize<U>, U: ?Sized, A: Allocator> CoerceUnsized<Box<U, A>> for Box<T, A> {}
#[unstable(feature = "dispatch_from_dyn", issue = "none")]
impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Box<U>> for Box<T, Global> {}
@ -1356,9 +1357,9 @@ impl<I> FromIterator<I> for Box<[I]> {
}
#[stable(feature = "box_slice_clone", since = "1.3.0")]
impl<T: Clone, A: AllocRef + Clone> Clone for Box<[T], A> {
impl<T: Clone, A: Allocator + Clone> Clone for Box<[T], A> {
fn clone(&self) -> Self {
let alloc = Box::alloc_ref(self).clone();
let alloc = Box::allocator(self).clone();
self.to_vec_in(alloc).into_boxed_slice()
}
@ -1372,28 +1373,28 @@ impl<T: Clone, A: AllocRef + Clone> Clone for Box<[T], A> {
}
#[stable(feature = "box_borrow", since = "1.1.0")]
impl<T: ?Sized, A: AllocRef> borrow::Borrow<T> for Box<T, A> {
impl<T: ?Sized, A: Allocator> borrow::Borrow<T> for Box<T, A> {
fn borrow(&self) -> &T {
&**self
}
}
#[stable(feature = "box_borrow", since = "1.1.0")]
impl<T: ?Sized, A: AllocRef> borrow::BorrowMut<T> for Box<T, A> {
impl<T: ?Sized, A: Allocator> borrow::BorrowMut<T> for Box<T, A> {
fn borrow_mut(&mut self) -> &mut T {
&mut **self
}
}
#[stable(since = "1.5.0", feature = "smart_ptr_as_ref")]
impl<T: ?Sized, A: AllocRef> AsRef<T> for Box<T, A> {
impl<T: ?Sized, A: Allocator> AsRef<T> for Box<T, A> {
fn as_ref(&self) -> &T {
&**self
}
}
#[stable(since = "1.5.0", feature = "smart_ptr_as_ref")]
impl<T: ?Sized, A: AllocRef> AsMut<T> for Box<T, A> {
impl<T: ?Sized, A: Allocator> AsMut<T> for Box<T, A> {
fn as_mut(&mut self) -> &mut T {
&mut **self
}
@ -1422,10 +1423,10 @@ impl<T: ?Sized, A: AllocRef> AsMut<T> for Box<T, A> {
* could have a method to project a Pin<T> from it.
*/
#[stable(feature = "pin", since = "1.33.0")]
impl<T: ?Sized, A: AllocRef> Unpin for Box<T, A> where A: 'static {}
impl<T: ?Sized, A: Allocator> Unpin for Box<T, A> where A: 'static {}
#[unstable(feature = "generator_trait", issue = "43122")]
impl<G: ?Sized + Generator<R> + Unpin, R, A: AllocRef> Generator<R> for Box<G, A>
impl<G: ?Sized + Generator<R> + Unpin, R, A: Allocator> Generator<R> for Box<G, A>
where
A: 'static,
{
@ -1438,7 +1439,7 @@ where
}
#[unstable(feature = "generator_trait", issue = "43122")]
impl<G: ?Sized + Generator<R>, R, A: AllocRef> Generator<R> for Pin<Box<G, A>>
impl<G: ?Sized + Generator<R>, R, A: Allocator> Generator<R> for Pin<Box<G, A>>
where
A: 'static,
{
@ -1451,7 +1452,7 @@ where
}
#[stable(feature = "futures_api", since = "1.36.0")]
impl<F: ?Sized + Future + Unpin, A: AllocRef> Future for Box<F, A>
impl<F: ?Sized + Future + Unpin, A: Allocator> Future for Box<F, A>
where
A: 'static,
{

View File

@ -36,7 +36,7 @@ use core::marker::PhantomData;
use core::mem::{self, MaybeUninit};
use core::ptr::{self, NonNull};
use crate::alloc::{AllocRef, Global, Layout};
use crate::alloc::{Allocator, Global, Layout};
use crate::boxed::Box;
const B: usize = 6;
@ -195,7 +195,7 @@ impl<K, V> NodeRef<marker::Owned, K, V, marker::LeafOrInternal> {
self.borrow_mut().clear_parent_link();
unsafe {
Global.dealloc(top.cast(), Layout::new::<InternalNode<K, V>>());
Global.deallocate(top.cast(), Layout::new::<InternalNode<K, V>>());
}
}
}
@ -449,7 +449,7 @@ impl<K, V> NodeRef<marker::Owned, K, V, marker::LeafOrInternal> {
let node = self.node;
let ret = self.ascend().ok();
unsafe {
Global.dealloc(
Global.deallocate(
node.cast(),
if height > 0 {
Layout::new::<InternalNode<K, V>>()
@ -1407,9 +1407,9 @@ impl<'a, K: 'a, V: 'a> BalancingContext<'a, K, V> {
left_node.correct_childrens_parent_links(left_len + 1..=left_len + 1 + right_len);
Global.dealloc(right_node.node.cast(), Layout::new::<InternalNode<K, V>>());
Global.deallocate(right_node.node.cast(), Layout::new::<InternalNode<K, V>>());
} else {
Global.dealloc(right_node.node.cast(), Layout::new::<LeafNode<K, V>>());
Global.deallocate(right_node.node.cast(), Layout::new::<LeafNode<K, V>>());
}
let new_idx = match track_edge_idx {

View File

@ -9,7 +9,7 @@ use core::ops::Drop;
use core::ptr::{self, NonNull, Unique};
use core::slice;
use crate::alloc::{handle_alloc_error, AllocRef, Global, Layout};
use crate::alloc::{handle_alloc_error, Allocator, Global, Layout};
use crate::boxed::Box;
use crate::collections::TryReserveError::{self, *};
@ -46,7 +46,7 @@ enum AllocInit {
/// `usize::MAX`. This means that you need to be careful when round-tripping this type with a
/// `Box<[T]>`, since `capacity()` won't yield the length.
#[allow(missing_debug_implementations)]
pub struct RawVec<T, A: AllocRef = Global> {
pub struct RawVec<T, A: Allocator = Global> {
ptr: Unique<T>,
cap: usize,
alloc: A,
@ -113,7 +113,7 @@ impl<T> RawVec<T, Global> {
}
}
impl<T, A: AllocRef> RawVec<T, A> {
impl<T, A: Allocator> RawVec<T, A> {
/// Like `new`, but parameterized over the choice of allocator for
/// the returned `RawVec`.
#[rustc_allow_const_fn_unstable(const_fn)]
@ -139,7 +139,7 @@ impl<T, A: AllocRef> RawVec<T, A> {
/// Converts a `Box<[T]>` into a `RawVec<T>`.
pub fn from_box(slice: Box<[T], A>) -> Self {
unsafe {
let (slice, alloc) = Box::into_raw_with_alloc(slice);
let (slice, alloc) = Box::into_raw_with_allocator(slice);
RawVec::from_raw_parts_in(slice.as_mut_ptr(), slice.len(), alloc)
}
}
@ -185,8 +185,8 @@ impl<T, A: AllocRef> RawVec<T, A> {
Err(_) => capacity_overflow(),
}
let result = match init {
AllocInit::Uninitialized => alloc.alloc(layout),
AllocInit::Zeroed => alloc.alloc_zeroed(layout),
AllocInit::Uninitialized => alloc.allocate(layout),
AllocInit::Zeroed => alloc.allocate_zeroed(layout),
};
let ptr = match result {
Ok(ptr) => ptr,
@ -232,7 +232,7 @@ impl<T, A: AllocRef> RawVec<T, A> {
}
/// Returns a shared reference to the allocator backing this `RawVec`.
pub fn alloc_ref(&self) -> &A {
pub fn allocator(&self) -> &A {
&self.alloc
}
@ -359,7 +359,7 @@ impl<T, A: AllocRef> RawVec<T, A> {
}
}
impl<T, A: AllocRef> RawVec<T, A> {
impl<T, A: Allocator> RawVec<T, A> {
/// Returns if the buffer needs to grow to fulfill the needed extra capacity.
/// Mainly used to make inlining reserve-calls possible without inlining `grow`.
fn needs_to_grow(&self, len: usize, additional: usize) -> bool {
@ -471,7 +471,7 @@ fn finish_grow<A>(
alloc: &mut A,
) -> Result<NonNull<[u8]>, TryReserveError>
where
A: AllocRef,
A: Allocator,
{
// Check for the error here to minimize the size of `RawVec::grow_*`.
let new_layout = new_layout.map_err(|_| CapacityOverflow)?;
@ -486,17 +486,17 @@ where
alloc.grow(ptr, old_layout, new_layout)
}
} else {
alloc.alloc(new_layout)
alloc.allocate(new_layout)
};
memory.map_err(|_| AllocError { layout: new_layout, non_exhaustive: () })
}
unsafe impl<#[may_dangle] T, A: AllocRef> Drop for RawVec<T, A> {
unsafe impl<#[may_dangle] T, A: Allocator> Drop for RawVec<T, A> {
/// Frees the memory owned by the `RawVec` *without* trying to drop its contents.
fn drop(&mut self) {
if let Some((ptr, layout)) = self.current_memory() {
unsafe { self.alloc.dealloc(ptr, layout) }
unsafe { self.alloc.deallocate(ptr, layout) }
}
}
}

View File

@ -20,13 +20,13 @@ fn allocator_param() {
struct BoundedAlloc {
fuel: Cell<usize>,
}
unsafe impl AllocRef for BoundedAlloc {
fn alloc(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
unsafe impl Allocator for BoundedAlloc {
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
let size = layout.size();
if size > self.fuel.get() {
return Err(AllocError);
}
match Global.alloc(layout) {
match Global.allocate(layout) {
ok @ Ok(_) => {
self.fuel.set(self.fuel.get() - size);
ok
@ -34,8 +34,8 @@ fn allocator_param() {
err @ Err(_) => err,
}
}
unsafe fn dealloc(&self, ptr: NonNull<u8>, layout: Layout) {
unsafe { Global.dealloc(ptr, layout) }
unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
unsafe { Global.deallocate(ptr, layout) }
}
}

View File

@ -262,7 +262,7 @@ use core::pin::Pin;
use core::ptr::{self, NonNull};
use core::slice::from_raw_parts_mut;
use crate::alloc::{box_free, handle_alloc_error, AllocError, AllocRef, Global, Layout};
use crate::alloc::{box_free, handle_alloc_error, AllocError, Allocator, Global, Layout};
use crate::borrow::{Cow, ToOwned};
use crate::string::String;
use crate::vec::Vec;
@ -416,7 +416,7 @@ impl<T> Rc<T> {
unsafe {
Rc::from_ptr(Rc::allocate_for_layout(
Layout::new::<T>(),
|layout| Global.alloc(layout),
|layout| Global.allocate(layout),
|mem| mem as *mut RcBox<mem::MaybeUninit<T>>,
))
}
@ -447,7 +447,7 @@ impl<T> Rc<T> {
unsafe {
Rc::from_ptr(Rc::allocate_for_layout(
Layout::new::<T>(),
|layout| Global.alloc_zeroed(layout),
|layout| Global.allocate_zeroed(layout),
|mem| mem as *mut RcBox<mem::MaybeUninit<T>>,
))
}
@ -555,7 +555,7 @@ impl<T> Rc<[T]> {
unsafe {
Rc::from_ptr(Rc::allocate_for_layout(
Layout::array::<T>(len).unwrap(),
|layout| Global.alloc_zeroed(layout),
|layout| Global.allocate_zeroed(layout),
|mem| {
ptr::slice_from_raw_parts_mut(mem as *mut T, len)
as *mut RcBox<[mem::MaybeUninit<T>]>
@ -1040,7 +1040,7 @@ impl<T: ?Sized> Rc<T> {
unsafe {
Self::allocate_for_layout(
Layout::for_value(&*ptr),
|layout| Global.alloc(layout),
|layout| Global.allocate(layout),
|mem| set_data_ptr(ptr as *mut T, mem) as *mut RcBox<T>,
)
}
@ -1075,7 +1075,7 @@ impl<T> Rc<[T]> {
unsafe {
Self::allocate_for_layout(
Layout::array::<T>(len).unwrap(),
|layout| Global.alloc(layout),
|layout| Global.allocate(layout),
|mem| ptr::slice_from_raw_parts_mut(mem as *mut T, len) as *mut RcBox<[T]>,
)
}
@ -1125,7 +1125,7 @@ impl<T> Rc<[T]> {
let slice = from_raw_parts_mut(self.elems, self.n_elems);
ptr::drop_in_place(slice);
Global.dealloc(self.mem, self.layout);
Global.deallocate(self.mem, self.layout);
}
}
}
@ -1225,7 +1225,7 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Rc<T> {
self.inner().dec_weak();
if self.inner().weak() == 0 {
Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref()));
Global.deallocate(self.ptr.cast(), Layout::for_value(self.ptr.as_ref()));
}
}
}
@ -2040,7 +2040,7 @@ impl<T: ?Sized> Drop for Weak<T> {
// the strong pointers have disappeared.
if inner.weak() == 0 {
unsafe {
Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref()));
Global.deallocate(self.ptr.cast(), Layout::for_value(self.ptr.as_ref()));
}
}
}

View File

@ -87,7 +87,7 @@ use core::cmp::Ordering::{self, Less};
use core::mem::{self, size_of};
use core::ptr;
use crate::alloc::{AllocRef, Global};
use crate::alloc::{Allocator, Global};
use crate::borrow::ToOwned;
use crate::boxed::Box;
use crate::vec::Vec;
@ -138,7 +138,7 @@ pub use hack::to_vec;
// `core::slice::SliceExt` - we need to supply these functions for the
// `test_permutations` test
mod hack {
use core::alloc::AllocRef;
use core::alloc::Allocator;
use crate::boxed::Box;
use crate::vec::Vec;
@ -146,33 +146,33 @@ mod hack {
// We shouldn't add inline attribute to this since this is used in
// `vec!` macro mostly and causes perf regression. See #71204 for
// discussion and perf results.
pub fn into_vec<T, A: AllocRef>(b: Box<[T], A>) -> Vec<T, A> {
pub fn into_vec<T, A: Allocator>(b: Box<[T], A>) -> Vec<T, A> {
unsafe {
let len = b.len();
let (b, alloc) = Box::into_raw_with_alloc(b);
let (b, alloc) = Box::into_raw_with_allocator(b);
Vec::from_raw_parts_in(b as *mut T, len, len, alloc)
}
}
#[inline]
pub fn to_vec<T: ConvertVec, A: AllocRef>(s: &[T], alloc: A) -> Vec<T, A> {
pub fn to_vec<T: ConvertVec, A: Allocator>(s: &[T], alloc: A) -> Vec<T, A> {
T::to_vec(s, alloc)
}
pub trait ConvertVec {
fn to_vec<A: AllocRef>(s: &[Self], alloc: A) -> Vec<Self, A>
fn to_vec<A: Allocator>(s: &[Self], alloc: A) -> Vec<Self, A>
where
Self: Sized;
}
impl<T: Clone> ConvertVec for T {
#[inline]
default fn to_vec<A: AllocRef>(s: &[Self], alloc: A) -> Vec<Self, A> {
struct DropGuard<'a, T, A: AllocRef> {
default fn to_vec<A: Allocator>(s: &[Self], alloc: A) -> Vec<Self, A> {
struct DropGuard<'a, T, A: Allocator> {
vec: &'a mut Vec<T, A>,
num_init: usize,
}
impl<'a, T, A: AllocRef> Drop for DropGuard<'a, T, A> {
impl<'a, T, A: Allocator> Drop for DropGuard<'a, T, A> {
#[inline]
fn drop(&mut self) {
// SAFETY:
@ -203,7 +203,7 @@ mod hack {
impl<T: Copy> ConvertVec for T {
#[inline]
fn to_vec<A: AllocRef>(s: &[Self], alloc: A) -> Vec<Self, A> {
fn to_vec<A: Allocator>(s: &[Self], alloc: A) -> Vec<Self, A> {
let mut v = Vec::with_capacity_in(s.len(), alloc);
// SAFETY:
// allocated above with the capacity of `s`, and initialize to `s.len()` in
@ -464,7 +464,7 @@ impl<T> [T] {
/// ```
#[inline]
#[unstable(feature = "allocator_api", issue = "32838")]
pub fn to_vec_in<A: AllocRef>(&self, alloc: A) -> Vec<T, A>
pub fn to_vec_in<A: Allocator>(&self, alloc: A) -> Vec<T, A>
where
T: Clone,
{
@ -488,7 +488,7 @@ impl<T> [T] {
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn into_vec<A: AllocRef>(self: Box<Self, A>) -> Vec<T, A> {
pub fn into_vec<A: Allocator>(self: Box<Self, A>) -> Vec<T, A> {
// N.B., see the `hack` module in this file for more details.
hack::into_vec(self)
}

View File

@ -22,7 +22,7 @@ use core::slice::from_raw_parts_mut;
use core::sync::atomic;
use core::sync::atomic::Ordering::{Acquire, Relaxed, Release, SeqCst};
use crate::alloc::{box_free, handle_alloc_error, AllocError, AllocRef, Global, Layout};
use crate::alloc::{box_free, handle_alloc_error, AllocError, Allocator, Global, Layout};
use crate::borrow::{Cow, ToOwned};
use crate::boxed::Box;
use crate::rc::is_dangling;
@ -434,7 +434,7 @@ impl<T> Arc<T> {
unsafe {
Arc::from_ptr(Arc::allocate_for_layout(
Layout::new::<T>(),
|layout| Global.alloc(layout),
|layout| Global.allocate(layout),
|mem| mem as *mut ArcInner<mem::MaybeUninit<T>>,
))
}
@ -465,7 +465,7 @@ impl<T> Arc<T> {
unsafe {
Arc::from_ptr(Arc::allocate_for_layout(
Layout::new::<T>(),
|layout| Global.alloc_zeroed(layout),
|layout| Global.allocate_zeroed(layout),
|mem| mem as *mut ArcInner<mem::MaybeUninit<T>>,
))
}
@ -572,7 +572,7 @@ impl<T> Arc<[T]> {
unsafe {
Arc::from_ptr(Arc::allocate_for_layout(
Layout::array::<T>(len).unwrap(),
|layout| Global.alloc_zeroed(layout),
|layout| Global.allocate_zeroed(layout),
|mem| {
ptr::slice_from_raw_parts_mut(mem as *mut T, len)
as *mut ArcInner<[mem::MaybeUninit<T>]>
@ -1015,7 +1015,7 @@ impl<T: ?Sized> Arc<T> {
unsafe {
Self::allocate_for_layout(
Layout::for_value(&*ptr),
|layout| Global.alloc(layout),
|layout| Global.allocate(layout),
|mem| set_data_ptr(ptr as *mut T, mem) as *mut ArcInner<T>,
)
}
@ -1050,7 +1050,7 @@ impl<T> Arc<[T]> {
unsafe {
Self::allocate_for_layout(
Layout::array::<T>(len).unwrap(),
|layout| Global.alloc(layout),
|layout| Global.allocate(layout),
|mem| ptr::slice_from_raw_parts_mut(mem as *mut T, len) as *mut ArcInner<[T]>,
)
}
@ -1102,7 +1102,7 @@ impl<T> Arc<[T]> {
let slice = from_raw_parts_mut(self.elems, self.n_elems);
ptr::drop_in_place(slice);
Global.dealloc(self.mem, self.layout);
Global.deallocate(self.mem, self.layout);
}
}
}
@ -1925,7 +1925,7 @@ impl<T: ?Sized> Drop for Weak<T> {
if inner.weak.fetch_sub(1, Release) == 1 {
acquire!(inner.weak);
unsafe { Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref())) }
unsafe { Global.deallocate(self.ptr.cast(), Layout::for_value(self.ptr.as_ref())) }
}
}
}

View File

@ -68,7 +68,7 @@ use core::ops::{self, Index, IndexMut, Range, RangeBounds};
use core::ptr::{self, NonNull};
use core::slice::{self, SliceIndex};
use crate::alloc::{AllocRef, Global};
use crate::alloc::{Allocator, Global};
use crate::borrow::{Cow, ToOwned};
use crate::boxed::Box;
use crate::collections::TryReserveError;
@ -298,7 +298,7 @@ use crate::raw_vec::RawVec;
/// [`&`]: ../../std/primitive.reference.html
#[stable(feature = "rust1", since = "1.0.0")]
#[cfg_attr(not(test), rustc_diagnostic_item = "vec_type")]
pub struct Vec<T, #[unstable(feature = "allocator_api", issue = "32838")] A: AllocRef = Global> {
pub struct Vec<T, #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global> {
buf: RawVec<T, A>,
len: usize,
}
@ -433,7 +433,7 @@ impl<T> Vec<T> {
}
}
impl<T, A: AllocRef> Vec<T, A> {
impl<T, A: Allocator> Vec<T, A> {
/// Constructs a new, empty `Vec<T, A>`.
///
/// The vector will not allocate until elements are pushed onto it.
@ -555,7 +555,7 @@ impl<T, A: AllocRef> Vec<T, A> {
/// let p = v.as_mut_ptr();
/// let len = v.len();
/// let cap = v.capacity();
/// let alloc = v.alloc_ref();
/// let alloc = v.allocator();
///
/// unsafe {
/// // Overwrite memory with 4, 5, 6
@ -656,7 +656,7 @@ impl<T, A: AllocRef> Vec<T, A> {
let len = me.len();
let capacity = me.capacity();
let ptr = me.as_mut_ptr();
let alloc = unsafe { ptr::read(me.alloc_ref()) };
let alloc = unsafe { ptr::read(me.allocator()) };
(ptr, len, capacity, alloc)
}
@ -1058,8 +1058,8 @@ impl<T, A: AllocRef> Vec<T, A> {
/// Returns a reference to the underlying allocator.
#[unstable(feature = "allocator_api", issue = "32838")]
#[inline]
pub fn alloc_ref(&self) -> &A {
self.buf.alloc_ref()
pub fn allocator(&self) -> &A {
self.buf.allocator()
}
/// Forces the length of the vector to `new_len`.
@ -1620,12 +1620,12 @@ impl<T, A: AllocRef> Vec<T, A> {
// the new vector can take over the original buffer and avoid the copy
return mem::replace(
self,
Vec::with_capacity_in(self.capacity(), self.alloc_ref().clone()),
Vec::with_capacity_in(self.capacity(), self.allocator().clone()),
);
}
let other_len = self.len - at;
let mut other = Vec::with_capacity_in(other_len, self.alloc_ref().clone());
let mut other = Vec::with_capacity_in(other_len, self.allocator().clone());
// Unsafely `set_len` and copy items to `other`.
unsafe {
@ -1749,7 +1749,7 @@ impl<T, A: AllocRef> Vec<T, A> {
}
}
impl<T: Clone, A: AllocRef> Vec<T, A> {
impl<T: Clone, A: Allocator> Vec<T, A> {
/// Resizes the `Vec` in-place so that `len` is equal to `new_len`.
///
/// If `new_len` is greater than `len`, the `Vec` is extended by the
@ -1844,7 +1844,7 @@ impl<T, F: FnMut() -> T> ExtendWith<T> for ExtendFunc<F> {
}
}
impl<T, A: AllocRef> Vec<T, A> {
impl<T, A: Allocator> Vec<T, A> {
/// Extend the vector by `n` values, using the given generator.
fn extend_with<E: ExtendWith<T>>(&mut self, n: usize, mut value: E) {
self.reserve(n);
@ -1904,7 +1904,7 @@ impl Drop for SetLenOnDrop<'_> {
}
}
impl<T: PartialEq, A: AllocRef> Vec<T, A> {
impl<T: PartialEq, A: Allocator> Vec<T, A> {
/// Removes consecutive repeated elements in the vector according to the
/// [`PartialEq`] trait implementation.
///
@ -1926,7 +1926,7 @@ impl<T: PartialEq, A: AllocRef> Vec<T, A> {
}
}
impl<T, A: AllocRef> Vec<T, A> {
impl<T, A: Allocator> Vec<T, A> {
/// Removes the first instance of `item` from the vector if the item exists.
///
/// This method will be removed soon.
@ -1959,17 +1959,17 @@ pub fn from_elem<T: Clone>(elem: T, n: usize) -> Vec<T> {
#[doc(hidden)]
#[unstable(feature = "allocator_api", issue = "32838")]
pub fn from_elem_in<T: Clone, A: AllocRef>(elem: T, n: usize, alloc: A) -> Vec<T, A> {
pub fn from_elem_in<T: Clone, A: Allocator>(elem: T, n: usize, alloc: A) -> Vec<T, A> {
<T as SpecFromElem>::from_elem(elem, n, alloc)
}
// Specialization trait used for Vec::from_elem
trait SpecFromElem: Sized {
fn from_elem<A: AllocRef>(elem: Self, n: usize, alloc: A) -> Vec<Self, A>;
fn from_elem<A: Allocator>(elem: Self, n: usize, alloc: A) -> Vec<Self, A>;
}
impl<T: Clone> SpecFromElem for T {
default fn from_elem<A: AllocRef>(elem: Self, n: usize, alloc: A) -> Vec<Self, A> {
default fn from_elem<A: Allocator>(elem: Self, n: usize, alloc: A) -> Vec<Self, A> {
let mut v = Vec::with_capacity_in(n, alloc);
v.extend_with(n, ExtendElement(elem));
v
@ -1978,7 +1978,7 @@ impl<T: Clone> SpecFromElem for T {
impl SpecFromElem for i8 {
#[inline]
fn from_elem<A: AllocRef>(elem: i8, n: usize, alloc: A) -> Vec<i8, A> {
fn from_elem<A: Allocator>(elem: i8, n: usize, alloc: A) -> Vec<i8, A> {
if elem == 0 {
return Vec { buf: RawVec::with_capacity_zeroed_in(n, alloc), len: n };
}
@ -1993,7 +1993,7 @@ impl SpecFromElem for i8 {
impl SpecFromElem for u8 {
#[inline]
fn from_elem<A: AllocRef>(elem: u8, n: usize, alloc: A) -> Vec<u8, A> {
fn from_elem<A: Allocator>(elem: u8, n: usize, alloc: A) -> Vec<u8, A> {
if elem == 0 {
return Vec { buf: RawVec::with_capacity_zeroed_in(n, alloc), len: n };
}
@ -2008,7 +2008,7 @@ impl SpecFromElem for u8 {
impl<T: Clone + IsZero> SpecFromElem for T {
#[inline]
fn from_elem<A: AllocRef>(elem: T, n: usize, alloc: A) -> Vec<T, A> {
fn from_elem<A: Allocator>(elem: T, n: usize, alloc: A) -> Vec<T, A> {
if elem.is_zero() {
return Vec { buf: RawVec::with_capacity_zeroed_in(n, alloc), len: n };
}
@ -2093,7 +2093,7 @@ unsafe impl<T: ?Sized> IsZero for Option<Box<T>> {
////////////////////////////////////////////////////////////////////////////////
#[stable(feature = "rust1", since = "1.0.0")]
impl<T, A: AllocRef> ops::Deref for Vec<T, A> {
impl<T, A: Allocator> ops::Deref for Vec<T, A> {
type Target = [T];
fn deref(&self) -> &[T] {
@ -2102,17 +2102,17 @@ impl<T, A: AllocRef> ops::Deref for Vec<T, A> {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T, A: AllocRef> ops::DerefMut for Vec<T, A> {
impl<T, A: Allocator> ops::DerefMut for Vec<T, A> {
fn deref_mut(&mut self) -> &mut [T] {
unsafe { slice::from_raw_parts_mut(self.as_mut_ptr(), self.len) }
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Clone, A: AllocRef + Clone> Clone for Vec<T, A> {
impl<T: Clone, A: Allocator + Clone> Clone for Vec<T, A> {
#[cfg(not(test))]
fn clone(&self) -> Self {
let alloc = self.alloc_ref().clone();
let alloc = self.allocator().clone();
<[T]>::to_vec_in(&**self, alloc)
}
@ -2122,7 +2122,7 @@ impl<T: Clone, A: AllocRef + Clone> Clone for Vec<T, A> {
// NB see the slice::hack module in slice.rs for more information
#[cfg(test)]
fn clone(&self) -> Self {
let alloc = self.alloc_ref().clone();
let alloc = self.allocator().clone();
crate::slice::to_vec(&**self, alloc)
}
@ -2141,7 +2141,7 @@ impl<T: Clone, A: AllocRef + Clone> Clone for Vec<T, A> {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Hash, A: AllocRef> Hash for Vec<T, A> {
impl<T: Hash, A: Allocator> Hash for Vec<T, A> {
#[inline]
fn hash<H: Hasher>(&self, state: &mut H) {
Hash::hash(&**self, state)
@ -2153,7 +2153,7 @@ impl<T: Hash, A: AllocRef> Hash for Vec<T, A> {
message = "vector indices are of type `usize` or ranges of `usize`",
label = "vector indices are of type `usize` or ranges of `usize`"
)]
impl<T, I: SliceIndex<[T]>, A: AllocRef> Index<I> for Vec<T, A> {
impl<T, I: SliceIndex<[T]>, A: Allocator> Index<I> for Vec<T, A> {
type Output = I::Output;
#[inline]
@ -2167,7 +2167,7 @@ impl<T, I: SliceIndex<[T]>, A: AllocRef> Index<I> for Vec<T, A> {
message = "vector indices are of type `usize` or ranges of `usize`",
label = "vector indices are of type `usize` or ranges of `usize`"
)]
impl<T, I: SliceIndex<[T]>, A: AllocRef> IndexMut<I> for Vec<T, A> {
impl<T, I: SliceIndex<[T]>, A: Allocator> IndexMut<I> for Vec<T, A> {
#[inline]
fn index_mut(&mut self, index: I) -> &mut Self::Output {
IndexMut::index_mut(&mut **self, index)
@ -2183,7 +2183,7 @@ impl<T> FromIterator<T> for Vec<T> {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T, A: AllocRef> IntoIterator for Vec<T, A> {
impl<T, A: Allocator> IntoIterator for Vec<T, A> {
type Item = T;
type IntoIter = IntoIter<T, A>;
@ -2204,7 +2204,7 @@ impl<T, A: AllocRef> IntoIterator for Vec<T, A> {
fn into_iter(self) -> IntoIter<T, A> {
unsafe {
let mut me = ManuallyDrop::new(self);
let alloc = ptr::read(me.alloc_ref());
let alloc = ptr::read(me.allocator());
let begin = me.as_mut_ptr();
let end = if mem::size_of::<T>() == 0 {
arith_offset(begin as *const i8, me.len() as isize) as *const T
@ -2225,7 +2225,7 @@ impl<T, A: AllocRef> IntoIterator for Vec<T, A> {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T, A: AllocRef> IntoIterator for &'a Vec<T, A> {
impl<'a, T, A: Allocator> IntoIterator for &'a Vec<T, A> {
type Item = &'a T;
type IntoIter = slice::Iter<'a, T>;
@ -2235,7 +2235,7 @@ impl<'a, T, A: AllocRef> IntoIterator for &'a Vec<T, A> {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T, A: AllocRef> IntoIterator for &'a mut Vec<T, A> {
impl<'a, T, A: Allocator> IntoIterator for &'a mut Vec<T, A> {
type Item = &'a mut T;
type IntoIter = slice::IterMut<'a, T>;
@ -2245,7 +2245,7 @@ impl<'a, T, A: AllocRef> IntoIterator for &'a mut Vec<T, A> {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T, A: AllocRef> Extend<T> for Vec<T, A> {
impl<T, A: Allocator> Extend<T> for Vec<T, A> {
#[inline]
fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
<Self as SpecExtend<T, I::IntoIter>>::spec_extend(self, iter.into_iter())
@ -2533,7 +2533,7 @@ trait SpecExtend<T, I> {
fn spec_extend(&mut self, iter: I);
}
impl<T, I, A: AllocRef> SpecExtend<T, I> for Vec<T, A>
impl<T, I, A: Allocator> SpecExtend<T, I> for Vec<T, A>
where
I: Iterator<Item = T>,
{
@ -2542,7 +2542,7 @@ where
}
}
impl<T, I, A: AllocRef> SpecExtend<T, I> for Vec<T, A>
impl<T, I, A: Allocator> SpecExtend<T, I> for Vec<T, A>
where
I: TrustedLen<Item = T>,
{
@ -2575,7 +2575,7 @@ where
}
}
impl<T, A: AllocRef> SpecExtend<T, IntoIter<T>> for Vec<T, A> {
impl<T, A: Allocator> SpecExtend<T, IntoIter<T>> for Vec<T, A> {
fn spec_extend(&mut self, mut iterator: IntoIter<T>) {
unsafe {
self.append_elements(iterator.as_slice() as _);
@ -2584,7 +2584,7 @@ impl<T, A: AllocRef> SpecExtend<T, IntoIter<T>> for Vec<T, A> {
}
}
impl<'a, T: 'a, I, A: AllocRef + 'a> SpecExtend<&'a T, I> for Vec<T, A>
impl<'a, T: 'a, I, A: Allocator + 'a> SpecExtend<&'a T, I> for Vec<T, A>
where
I: Iterator<Item = &'a T>,
T: Clone,
@ -2594,7 +2594,7 @@ where
}
}
impl<'a, T: 'a, A: AllocRef + 'a> SpecExtend<&'a T, slice::Iter<'a, T>> for Vec<T, A>
impl<'a, T: 'a, A: Allocator + 'a> SpecExtend<&'a T, slice::Iter<'a, T>> for Vec<T, A>
where
T: Copy,
{
@ -2604,7 +2604,7 @@ where
}
}
impl<T, A: AllocRef> Vec<T, A> {
impl<T, A: Allocator> Vec<T, A> {
// leaf method to which various SpecFrom/SpecExtend implementations delegate when
// they have no further optimizations to apply
fn extend_desugared<I: Iterator<Item = T>>(&mut self, mut iterator: I) {
@ -2739,7 +2739,7 @@ impl<T, A: AllocRef> Vec<T, A> {
///
/// [`copy_from_slice`]: ../../std/primitive.slice.html#method.copy_from_slice
#[stable(feature = "extend_ref", since = "1.2.0")]
impl<'a, T: Copy + 'a, A: AllocRef + 'a> Extend<&'a T> for Vec<T, A> {
impl<'a, T: Copy + 'a, A: Allocator + 'a> Extend<&'a T> for Vec<T, A> {
fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
self.spec_extend(iter.into_iter())
}
@ -2771,18 +2771,18 @@ macro_rules! __impl_slice_eq1 {
}
}
__impl_slice_eq1! { [A: AllocRef] Vec<T, A>, Vec<U, A>, #[stable(feature = "rust1", since = "1.0.0")] }
__impl_slice_eq1! { [A: AllocRef] Vec<T, A>, &[U], #[stable(feature = "rust1", since = "1.0.0")] }
__impl_slice_eq1! { [A: AllocRef] Vec<T, A>, &mut [U], #[stable(feature = "rust1", since = "1.0.0")] }
__impl_slice_eq1! { [A: AllocRef] &[T], Vec<U, A>, #[stable(feature = "partialeq_vec_for_ref_slice", since = "1.46.0")] }
__impl_slice_eq1! { [A: AllocRef] &mut [T], Vec<U, A>, #[stable(feature = "partialeq_vec_for_ref_slice", since = "1.46.0")] }
__impl_slice_eq1! { [A: AllocRef] Vec<T, A>, [U], #[stable(feature = "partialeq_vec_for_slice", since = "1.48.0")] }
__impl_slice_eq1! { [A: AllocRef] [T], Vec<U, A>, #[stable(feature = "partialeq_vec_for_slice", since = "1.48.0")] }
__impl_slice_eq1! { [A: AllocRef] Cow<'_, [T]>, Vec<U, A> where T: Clone, #[stable(feature = "rust1", since = "1.0.0")] }
__impl_slice_eq1! { [A: Allocator] Vec<T, A>, Vec<U, A>, #[stable(feature = "rust1", since = "1.0.0")] }
__impl_slice_eq1! { [A: Allocator] Vec<T, A>, &[U], #[stable(feature = "rust1", since = "1.0.0")] }
__impl_slice_eq1! { [A: Allocator] Vec<T, A>, &mut [U], #[stable(feature = "rust1", since = "1.0.0")] }
__impl_slice_eq1! { [A: Allocator] &[T], Vec<U, A>, #[stable(feature = "partialeq_vec_for_ref_slice", since = "1.46.0")] }
__impl_slice_eq1! { [A: Allocator] &mut [T], Vec<U, A>, #[stable(feature = "partialeq_vec_for_ref_slice", since = "1.46.0")] }
__impl_slice_eq1! { [A: Allocator] Vec<T, A>, [U], #[stable(feature = "partialeq_vec_for_slice", since = "1.48.0")] }
__impl_slice_eq1! { [A: Allocator] [T], Vec<U, A>, #[stable(feature = "partialeq_vec_for_slice", since = "1.48.0")] }
__impl_slice_eq1! { [A: Allocator] Cow<'_, [T]>, Vec<U, A> where T: Clone, #[stable(feature = "rust1", since = "1.0.0")] }
__impl_slice_eq1! { [] Cow<'_, [T]>, &[U] where T: Clone, #[stable(feature = "rust1", since = "1.0.0")] }
__impl_slice_eq1! { [] Cow<'_, [T]>, &mut [U] where T: Clone, #[stable(feature = "rust1", since = "1.0.0")] }
__impl_slice_eq1! { [A: AllocRef, const N: usize] Vec<T, A>, [U; N], #[stable(feature = "rust1", since = "1.0.0")] }
__impl_slice_eq1! { [A: AllocRef, const N: usize] Vec<T, A>, &[U; N], #[stable(feature = "rust1", since = "1.0.0")] }
__impl_slice_eq1! { [A: Allocator, const N: usize] Vec<T, A>, [U; N], #[stable(feature = "rust1", since = "1.0.0")] }
__impl_slice_eq1! { [A: Allocator, const N: usize] Vec<T, A>, &[U; N], #[stable(feature = "rust1", since = "1.0.0")] }
// NOTE: some less important impls are omitted to reduce code bloat
// FIXME(Centril): Reconsider this?
@ -2796,7 +2796,7 @@ __impl_slice_eq1! { [A: AllocRef, const N: usize] Vec<T, A>, &[U; N], #[stable(f
/// Implements comparison of vectors, [lexicographically](core::cmp::Ord#lexicographical-comparison).
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: PartialOrd, A: AllocRef> PartialOrd for Vec<T, A> {
impl<T: PartialOrd, A: Allocator> PartialOrd for Vec<T, A> {
#[inline]
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
PartialOrd::partial_cmp(&**self, &**other)
@ -2804,11 +2804,11 @@ impl<T: PartialOrd, A: AllocRef> PartialOrd for Vec<T, A> {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Eq, A: AllocRef> Eq for Vec<T, A> {}
impl<T: Eq, A: Allocator> Eq for Vec<T, A> {}
/// Implements ordering of vectors, [lexicographically](core::cmp::Ord#lexicographical-comparison).
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Ord, A: AllocRef> Ord for Vec<T, A> {
impl<T: Ord, A: Allocator> Ord for Vec<T, A> {
#[inline]
fn cmp(&self, other: &Self) -> Ordering {
Ord::cmp(&**self, &**other)
@ -2816,7 +2816,7 @@ impl<T: Ord, A: AllocRef> Ord for Vec<T, A> {
}
#[stable(feature = "rust1", since = "1.0.0")]
unsafe impl<#[may_dangle] T, A: AllocRef> Drop for Vec<T, A> {
unsafe impl<#[may_dangle] T, A: Allocator> Drop for Vec<T, A> {
fn drop(&mut self) {
unsafe {
// use drop for [T]
@ -2837,35 +2837,35 @@ impl<T> Default for Vec<T> {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: fmt::Debug, A: AllocRef> fmt::Debug for Vec<T, A> {
impl<T: fmt::Debug, A: Allocator> fmt::Debug for Vec<T, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt(&**self, f)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T, A: AllocRef> AsRef<Vec<T, A>> for Vec<T, A> {
impl<T, A: Allocator> AsRef<Vec<T, A>> for Vec<T, A> {
fn as_ref(&self) -> &Vec<T, A> {
self
}
}
#[stable(feature = "vec_as_mut", since = "1.5.0")]
impl<T, A: AllocRef> AsMut<Vec<T, A>> for Vec<T, A> {
impl<T, A: Allocator> AsMut<Vec<T, A>> for Vec<T, A> {
fn as_mut(&mut self) -> &mut Vec<T, A> {
self
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T, A: AllocRef> AsRef<[T]> for Vec<T, A> {
impl<T, A: Allocator> AsRef<[T]> for Vec<T, A> {
fn as_ref(&self) -> &[T] {
self
}
}
#[stable(feature = "vec_as_mut", since = "1.5.0")]
impl<T, A: AllocRef> AsMut<[T]> for Vec<T, A> {
impl<T, A: Allocator> AsMut<[T]> for Vec<T, A> {
fn as_mut(&mut self) -> &mut [T] {
self
}
@ -2920,7 +2920,7 @@ where
// note: test pulls in libstd, which causes errors here
#[cfg(not(test))]
#[stable(feature = "vec_from_box", since = "1.18.0")]
impl<T, A: AllocRef> From<Box<[T], A>> for Vec<T, A> {
impl<T, A: Allocator> From<Box<[T], A>> for Vec<T, A> {
fn from(s: Box<[T], A>) -> Self {
let len = s.len();
Self { buf: RawVec::from_box(s), len }
@ -2930,7 +2930,7 @@ impl<T, A: AllocRef> From<Box<[T], A>> for Vec<T, A> {
// note: test pulls in libstd, which causes errors here
#[cfg(not(test))]
#[stable(feature = "box_from_vec", since = "1.20.0")]
impl<T, A: AllocRef> From<Vec<T, A>> for Box<[T], A> {
impl<T, A: Allocator> From<Vec<T, A>> for Box<[T], A> {
fn from(v: Vec<T, A>) -> Self {
v.into_boxed_slice()
}
@ -2944,7 +2944,7 @@ impl From<&str> for Vec<u8> {
}
#[stable(feature = "array_try_from_vec", since = "1.48.0")]
impl<T, A: AllocRef, const N: usize> TryFrom<Vec<T, A>> for [T; N] {
impl<T, A: Allocator, const N: usize> TryFrom<Vec<T, A>> for [T; N] {
type Error = Vec<T, A>;
/// Gets the entire contents of the `Vec<T>` as an array,
@ -3045,8 +3045,10 @@ where
/// let iter: std::vec::IntoIter<_> = v.into_iter();
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub struct IntoIter<T, #[unstable(feature = "allocator_api", issue = "32838")] A: AllocRef = Global>
{
pub struct IntoIter<
T,
#[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
> {
buf: NonNull<T>,
phantom: PhantomData<T>,
cap: usize,
@ -3056,13 +3058,13 @@ pub struct IntoIter<T, #[unstable(feature = "allocator_api", issue = "32838")] A
}
#[stable(feature = "vec_intoiter_debug", since = "1.13.0")]
impl<T: fmt::Debug, A: AllocRef> fmt::Debug for IntoIter<T, A> {
impl<T: fmt::Debug, A: Allocator> fmt::Debug for IntoIter<T, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("IntoIter").field(&self.as_slice()).finish()
}
}
impl<T, A: AllocRef> IntoIter<T, A> {
impl<T, A: Allocator> IntoIter<T, A> {
/// Returns the remaining items of this iterator as a slice.
///
/// # Examples
@ -3100,7 +3102,7 @@ impl<T, A: AllocRef> IntoIter<T, A> {
/// Returns a reference to the underlying allocator.
#[unstable(feature = "allocator_api", issue = "32838")]
#[inline]
pub fn alloc_ref(&self) -> &A {
pub fn allocator(&self) -> &A {
&self.alloc
}
@ -3126,19 +3128,19 @@ impl<T, A: AllocRef> IntoIter<T, A> {
}
#[stable(feature = "vec_intoiter_as_ref", since = "1.46.0")]
impl<T, A: AllocRef> AsRef<[T]> for IntoIter<T, A> {
impl<T, A: Allocator> AsRef<[T]> for IntoIter<T, A> {
fn as_ref(&self) -> &[T] {
self.as_slice()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
unsafe impl<T: Send, A: AllocRef + Send> Send for IntoIter<T, A> {}
unsafe impl<T: Send, A: Allocator + Send> Send for IntoIter<T, A> {}
#[stable(feature = "rust1", since = "1.0.0")]
unsafe impl<T: Sync, A: AllocRef> Sync for IntoIter<T, A> {}
unsafe impl<T: Sync, A: Allocator> Sync for IntoIter<T, A> {}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T, A: AllocRef> Iterator for IntoIter<T, A> {
impl<T, A: Allocator> Iterator for IntoIter<T, A> {
type Item = T;
#[inline]
@ -3195,7 +3197,7 @@ impl<T, A: AllocRef> Iterator for IntoIter<T, A> {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T, A: AllocRef> DoubleEndedIterator for IntoIter<T, A> {
impl<T, A: Allocator> DoubleEndedIterator for IntoIter<T, A> {
#[inline]
fn next_back(&mut self) -> Option<T> {
if self.end == self.ptr {
@ -3215,23 +3217,23 @@ impl<T, A: AllocRef> DoubleEndedIterator for IntoIter<T, A> {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T, A: AllocRef> ExactSizeIterator for IntoIter<T, A> {
impl<T, A: Allocator> ExactSizeIterator for IntoIter<T, A> {
fn is_empty(&self) -> bool {
self.ptr == self.end
}
}
#[stable(feature = "fused", since = "1.26.0")]
impl<T, A: AllocRef> FusedIterator for IntoIter<T, A> {}
impl<T, A: Allocator> FusedIterator for IntoIter<T, A> {}
#[unstable(feature = "trusted_len", issue = "37572")]
unsafe impl<T, A: AllocRef> TrustedLen for IntoIter<T, A> {}
unsafe impl<T, A: Allocator> TrustedLen for IntoIter<T, A> {}
#[doc(hidden)]
#[unstable(issue = "none", feature = "std_internals")]
// T: Copy as approximation for !Drop since get_unchecked does not advance self.ptr
// and thus we can't implement drop-handling
unsafe impl<T, A: AllocRef> TrustedRandomAccess for IntoIter<T, A>
unsafe impl<T, A: Allocator> TrustedRandomAccess for IntoIter<T, A>
where
T: Copy,
{
@ -3241,7 +3243,7 @@ where
}
#[stable(feature = "vec_into_iter_clone", since = "1.8.0")]
impl<T: Clone, A: AllocRef + Clone> Clone for IntoIter<T, A> {
impl<T: Clone, A: Allocator + Clone> Clone for IntoIter<T, A> {
#[cfg(not(test))]
fn clone(&self) -> Self {
self.as_slice().to_vec_in(self.alloc.clone()).into_iter()
@ -3253,11 +3255,11 @@ impl<T: Clone, A: AllocRef + Clone> Clone for IntoIter<T, A> {
}
#[stable(feature = "rust1", since = "1.0.0")]
unsafe impl<#[may_dangle] T, A: AllocRef> Drop for IntoIter<T, A> {
unsafe impl<#[may_dangle] T, A: Allocator> Drop for IntoIter<T, A> {
fn drop(&mut self) {
struct DropGuard<'a, T, A: AllocRef>(&'a mut IntoIter<T, A>);
struct DropGuard<'a, T, A: Allocator>(&'a mut IntoIter<T, A>);
impl<T, A: AllocRef> Drop for DropGuard<'_, T, A> {
impl<T, A: Allocator> Drop for DropGuard<'_, T, A> {
fn drop(&mut self) {
unsafe {
// `IntoIter::alloc` is not used anymore after this
@ -3278,10 +3280,10 @@ unsafe impl<#[may_dangle] T, A: AllocRef> Drop for IntoIter<T, A> {
}
#[unstable(issue = "none", feature = "inplace_iteration")]
unsafe impl<T, A: AllocRef> InPlaceIterable for IntoIter<T, A> {}
unsafe impl<T, A: Allocator> InPlaceIterable for IntoIter<T, A> {}
#[unstable(issue = "none", feature = "inplace_iteration")]
unsafe impl<T, A: AllocRef> SourceIter for IntoIter<T, A> {
unsafe impl<T, A: Allocator> SourceIter for IntoIter<T, A> {
type Source = Self;
#[inline]
@ -3320,7 +3322,7 @@ impl<T> AsIntoIter for IntoIter<T> {
pub struct Drain<
'a,
T: 'a,
#[unstable(feature = "allocator_api", issue = "32838")] A: AllocRef + 'a = Global,
#[unstable(feature = "allocator_api", issue = "32838")] A: Allocator + 'a = Global,
> {
/// Index of tail to preserve
tail_start: usize,
@ -3332,13 +3334,13 @@ pub struct Drain<
}
#[stable(feature = "collection_debug", since = "1.17.0")]
impl<T: fmt::Debug, A: AllocRef> fmt::Debug for Drain<'_, T, A> {
impl<T: fmt::Debug, A: Allocator> fmt::Debug for Drain<'_, T, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("Drain").field(&self.iter.as_slice()).finish()
}
}
impl<'a, T, A: AllocRef> Drain<'a, T, A> {
impl<'a, T, A: Allocator> Drain<'a, T, A> {
/// Returns the remaining items of this iterator as a slice.
///
/// # Examples
@ -3358,25 +3360,25 @@ impl<'a, T, A: AllocRef> Drain<'a, T, A> {
/// Returns a reference to the underlying allocator.
#[unstable(feature = "allocator_api", issue = "32838")]
#[inline]
pub fn alloc_ref(&self) -> &A {
unsafe { self.vec.as_ref().alloc_ref() }
pub fn allocator(&self) -> &A {
unsafe { self.vec.as_ref().allocator() }
}
}
#[stable(feature = "vec_drain_as_slice", since = "1.46.0")]
impl<'a, T, A: AllocRef> AsRef<[T]> for Drain<'a, T, A> {
impl<'a, T, A: Allocator> AsRef<[T]> for Drain<'a, T, A> {
fn as_ref(&self) -> &[T] {
self.as_slice()
}
}
#[stable(feature = "drain", since = "1.6.0")]
unsafe impl<T: Sync, A: Sync + AllocRef> Sync for Drain<'_, T, A> {}
unsafe impl<T: Sync, A: Sync + Allocator> Sync for Drain<'_, T, A> {}
#[stable(feature = "drain", since = "1.6.0")]
unsafe impl<T: Send, A: Send + AllocRef> Send for Drain<'_, T, A> {}
unsafe impl<T: Send, A: Send + Allocator> Send for Drain<'_, T, A> {}
#[stable(feature = "drain", since = "1.6.0")]
impl<T, A: AllocRef> Iterator for Drain<'_, T, A> {
impl<T, A: Allocator> Iterator for Drain<'_, T, A> {
type Item = T;
#[inline]
@ -3390,7 +3392,7 @@ impl<T, A: AllocRef> Iterator for Drain<'_, T, A> {
}
#[stable(feature = "drain", since = "1.6.0")]
impl<T, A: AllocRef> DoubleEndedIterator for Drain<'_, T, A> {
impl<T, A: Allocator> DoubleEndedIterator for Drain<'_, T, A> {
#[inline]
fn next_back(&mut self) -> Option<T> {
self.iter.next_back().map(|elt| unsafe { ptr::read(elt as *const _) })
@ -3398,13 +3400,13 @@ impl<T, A: AllocRef> DoubleEndedIterator for Drain<'_, T, A> {
}
#[stable(feature = "drain", since = "1.6.0")]
impl<T, A: AllocRef> Drop for Drain<'_, T, A> {
impl<T, A: Allocator> Drop for Drain<'_, T, A> {
fn drop(&mut self) {
/// Continues dropping the remaining elements in the `Drain`, then moves back the
/// un-`Drain`ed elements to restore the original `Vec`.
struct DropGuard<'r, 'a, T, A: AllocRef>(&'r mut Drain<'a, T, A>);
struct DropGuard<'r, 'a, T, A: Allocator>(&'r mut Drain<'a, T, A>);
impl<'r, 'a, T, A: AllocRef> Drop for DropGuard<'r, 'a, T, A> {
impl<'r, 'a, T, A: Allocator> Drop for DropGuard<'r, 'a, T, A> {
fn drop(&mut self) {
// Continue the same loop we have below. If the loop already finished, this does
// nothing.
@ -3440,17 +3442,17 @@ impl<T, A: AllocRef> Drop for Drain<'_, T, A> {
}
#[stable(feature = "drain", since = "1.6.0")]
impl<T, A: AllocRef> ExactSizeIterator for Drain<'_, T, A> {
impl<T, A: Allocator> ExactSizeIterator for Drain<'_, T, A> {
fn is_empty(&self) -> bool {
self.iter.is_empty()
}
}
#[unstable(feature = "trusted_len", issue = "37572")]
unsafe impl<T, A: AllocRef> TrustedLen for Drain<'_, T, A> {}
unsafe impl<T, A: Allocator> TrustedLen for Drain<'_, T, A> {}
#[stable(feature = "fused", since = "1.26.0")]
impl<T, A: AllocRef> FusedIterator for Drain<'_, T, A> {}
impl<T, A: Allocator> FusedIterator for Drain<'_, T, A> {}
/// A splicing iterator for `Vec`.
///
@ -3469,14 +3471,14 @@ impl<T, A: AllocRef> FusedIterator for Drain<'_, T, A> {}
pub struct Splice<
'a,
I: Iterator + 'a,
#[unstable(feature = "allocator_api", issue = "32838")] A: AllocRef + 'a = Global,
#[unstable(feature = "allocator_api", issue = "32838")] A: Allocator + 'a = Global,
> {
drain: Drain<'a, I::Item, A>,
replace_with: I,
}
#[stable(feature = "vec_splice", since = "1.21.0")]
impl<I: Iterator, A: AllocRef> Iterator for Splice<'_, I, A> {
impl<I: Iterator, A: Allocator> Iterator for Splice<'_, I, A> {
type Item = I::Item;
fn next(&mut self) -> Option<Self::Item> {
@ -3489,17 +3491,17 @@ impl<I: Iterator, A: AllocRef> Iterator for Splice<'_, I, A> {
}
#[stable(feature = "vec_splice", since = "1.21.0")]
impl<I: Iterator, A: AllocRef> DoubleEndedIterator for Splice<'_, I, A> {
impl<I: Iterator, A: Allocator> DoubleEndedIterator for Splice<'_, I, A> {
fn next_back(&mut self) -> Option<Self::Item> {
self.drain.next_back()
}
}
#[stable(feature = "vec_splice", since = "1.21.0")]
impl<I: Iterator, A: AllocRef> ExactSizeIterator for Splice<'_, I, A> {}
impl<I: Iterator, A: Allocator> ExactSizeIterator for Splice<'_, I, A> {}
#[stable(feature = "vec_splice", since = "1.21.0")]
impl<I: Iterator, A: AllocRef> Drop for Splice<'_, I, A> {
impl<I: Iterator, A: Allocator> Drop for Splice<'_, I, A> {
fn drop(&mut self) {
self.drain.by_ref().for_each(drop);
@ -3540,7 +3542,7 @@ impl<I: Iterator, A: AllocRef> Drop for Splice<'_, I, A> {
}
/// Private helper methods for `Splice::drop`
impl<T, A: AllocRef> Drain<'_, T, A> {
impl<T, A: Allocator> Drain<'_, T, A> {
/// The range from `self.vec.len` to `self.tail_start` contains elements
/// that have been moved out.
/// Fill that range as much as possible with new elements from the `replace_with` iterator.
@ -3599,7 +3601,7 @@ pub struct DrainFilter<
'a,
T,
F,
#[unstable(feature = "allocator_api", issue = "32838")] A: AllocRef = Global,
#[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
> where
F: FnMut(&mut T) -> bool,
{
@ -3620,20 +3622,20 @@ pub struct DrainFilter<
panic_flag: bool,
}
impl<T, F, A: AllocRef> DrainFilter<'_, T, F, A>
impl<T, F, A: Allocator> DrainFilter<'_, T, F, A>
where
F: FnMut(&mut T) -> bool,
{
/// Returns a reference to the underlying allocator.
#[unstable(feature = "allocator_api", issue = "32838")]
#[inline]
pub fn alloc_ref(&self) -> &A {
self.vec.alloc_ref()
pub fn allocator(&self) -> &A {
self.vec.allocator()
}
}
#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
impl<T, F, A: AllocRef> Iterator for DrainFilter<'_, T, F, A>
impl<T, F, A: Allocator> Iterator for DrainFilter<'_, T, F, A>
where
F: FnMut(&mut T) -> bool,
{
@ -3671,19 +3673,19 @@ where
}
#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
impl<T, F, A: AllocRef> Drop for DrainFilter<'_, T, F, A>
impl<T, F, A: Allocator> Drop for DrainFilter<'_, T, F, A>
where
F: FnMut(&mut T) -> bool,
{
fn drop(&mut self) {
struct BackshiftOnDrop<'a, 'b, T, F, A: AllocRef>
struct BackshiftOnDrop<'a, 'b, T, F, A: Allocator>
where
F: FnMut(&mut T) -> bool,
{
drain: &'b mut DrainFilter<'a, T, F, A>,
}
impl<'a, 'b, T, F, A: AllocRef> Drop for BackshiftOnDrop<'a, 'b, T, F, A>
impl<'a, 'b, T, F, A: Allocator> Drop for BackshiftOnDrop<'a, 'b, T, F, A>
where
F: FnMut(&mut T) -> bool,
{

View File

@ -1,4 +1,4 @@
use std::alloc::{AllocRef, Global, Layout, System};
use std::alloc::{Allocator, Global, Layout, System};
/// Issue #45955 and #62251.
#[test]
@ -11,7 +11,7 @@ fn std_heap_overaligned_request() {
check_overalign_requests(Global)
}
fn check_overalign_requests<T: AllocRef>(allocator: T) {
fn check_overalign_requests<T: Allocator>(allocator: T) {
for &align in &[4, 8, 16, 32] {
// less than and bigger than `MIN_ALIGN`
for &size in &[align / 2, align - 1] {
@ -20,7 +20,7 @@ fn check_overalign_requests<T: AllocRef>(allocator: T) {
unsafe {
let pointers: Vec<_> = (0..iterations)
.map(|_| {
allocator.alloc(Layout::from_size_align(size, align).unwrap()).unwrap()
allocator.allocate(Layout::from_size_align(size, align).unwrap()).unwrap()
})
.collect();
for &ptr in &pointers {
@ -33,7 +33,7 @@ fn check_overalign_requests<T: AllocRef>(allocator: T) {
// Clean up
for &ptr in &pointers {
allocator.dealloc(
allocator.deallocate(
ptr.as_non_null_ptr(),
Layout::from_size_align(size, align).unwrap(),
)

View File

@ -19,7 +19,7 @@ const fn size_align<T>() -> (usize, usize) {
/// even though `GlobalAlloc` requires that all memory requests
/// be non-zero in size. A caller must either ensure that conditions
/// like this are met, use specific allocators with looser
/// requirements, or use the more lenient `AllocRef` interface.)
/// requirements, or use the more lenient `Allocator` interface.)
#[stable(feature = "alloc_layout", since = "1.28.0")]
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
#[lang = "alloc_layout"]

View File

@ -40,14 +40,14 @@ impl fmt::Display for AllocError {
}
}
/// An implementation of `AllocRef` can allocate, grow, shrink, and deallocate arbitrary blocks of
/// An implementation of `Allocator` can allocate, grow, shrink, and deallocate arbitrary blocks of
/// data described via [`Layout`][].
///
/// `AllocRef` is designed to be implemented on ZSTs, references, or smart pointers because having
/// `Allocator` is designed to be implemented on ZSTs, references, or smart pointers because having
/// an allocator like `MyAlloc([u8; N])` cannot be moved, without updating the pointers to the
/// allocated memory.
///
/// Unlike [`GlobalAlloc`][], zero-sized allocations are allowed in `AllocRef`. If an underlying
/// Unlike [`GlobalAlloc`][], zero-sized allocations are allowed in `Allocator`. If an underlying
/// allocator does not support this (like jemalloc) or return a null pointer (such as
/// `libc::malloc`), this must be caught by the implementation.
///
@ -56,18 +56,18 @@ impl fmt::Display for AllocError {
/// Some of the methods require that a memory block be *currently allocated* via an allocator. This
/// means that:
///
/// * the starting address for that memory block was previously returned by [`alloc`], [`grow`], or
/// * the starting address for that memory block was previously returned by [`allocate`], [`grow`], or
/// [`shrink`], and
///
/// * the memory block has not been subsequently deallocated, where blocks are either deallocated
/// directly by being passed to [`dealloc`] or were changed by being passed to [`grow`] or
/// directly by being passed to [`deallocate`] or were changed by being passed to [`grow`] or
/// [`shrink`] that returns `Ok`. If `grow` or `shrink` have returned `Err`, the passed pointer
/// remains valid.
///
/// [`alloc`]: AllocRef::alloc
/// [`grow`]: AllocRef::grow
/// [`shrink`]: AllocRef::shrink
/// [`dealloc`]: AllocRef::dealloc
/// [`allocate`]: Allocator::allocate
/// [`grow`]: Allocator::grow
/// [`shrink`]: Allocator::shrink
/// [`deallocate`]: Allocator::deallocate
///
/// ### Memory fitting
///
@ -79,7 +79,7 @@ impl fmt::Display for AllocError {
///
/// * The provided [`layout.size()`] must fall in the range `min ..= max`, where:
/// - `min` is the size of the layout most recently used to allocate the block, and
/// - `max` is the latest actual size returned from [`alloc`], [`grow`], or [`shrink`].
/// - `max` is the latest actual size returned from [`allocate`], [`grow`], or [`shrink`].
///
/// [`layout.align()`]: Layout::align
/// [`layout.size()`]: Layout::size
@ -97,7 +97,7 @@ impl fmt::Display for AllocError {
///
/// [*currently allocated*]: #currently-allocated-memory
#[unstable(feature = "allocator_api", issue = "32838")]
pub unsafe trait AllocRef {
pub unsafe trait Allocator {
/// Attempts to allocate a block of memory.
///
/// On success, returns a [`NonNull<[u8]>`][NonNull] meeting the size and alignment guarantees of `layout`.
@ -118,9 +118,9 @@ pub unsafe trait AllocRef {
/// call the [`handle_alloc_error`] function, rather than directly invoking `panic!` or similar.
///
/// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html
fn alloc(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError>;
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError>;
/// Behaves like `alloc`, but also ensures that the returned memory is zero-initialized.
/// Behaves like `allocate`, but also ensures that the returned memory is zero-initialized.
///
/// # Errors
///
@ -135,8 +135,8 @@ pub unsafe trait AllocRef {
/// call the [`handle_alloc_error`] function, rather than directly invoking `panic!` or similar.
///
/// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html
fn alloc_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
let ptr = self.alloc(layout)?;
fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
let ptr = self.allocate(layout)?;
// SAFETY: `alloc` returns a valid memory block
unsafe { ptr.as_non_null_ptr().as_ptr().write_bytes(0, ptr.len()) }
Ok(ptr)
@ -151,7 +151,7 @@ pub unsafe trait AllocRef {
///
/// [*currently allocated*]: #currently-allocated-memory
/// [*fit*]: #memory-fitting
unsafe fn dealloc(&self, ptr: NonNull<u8>, layout: Layout);
unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout);
/// Attempts to extend the memory block.
///
@ -200,7 +200,7 @@ pub unsafe trait AllocRef {
"`new_layout.size()` must be greater than or equal to `old_layout.size()`"
);
let new_ptr = self.alloc(new_layout)?;
let new_ptr = self.allocate(new_layout)?;
// SAFETY: because `new_layout.size()` must be greater than or equal to
// `old_layout.size()`, both the old and new memory allocation are valid for reads and
@ -209,7 +209,7 @@ pub unsafe trait AllocRef {
// safe. The safety contract for `dealloc` must be upheld by the caller.
unsafe {
ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), old_layout.size());
self.dealloc(ptr, old_layout);
self.deallocate(ptr, old_layout);
}
Ok(new_ptr)
@ -261,7 +261,7 @@ pub unsafe trait AllocRef {
"`new_layout.size()` must be greater than or equal to `old_layout.size()`"
);
let new_ptr = self.alloc_zeroed(new_layout)?;
let new_ptr = self.allocate_zeroed(new_layout)?;
// SAFETY: because `new_layout.size()` must be greater than or equal to
// `old_layout.size()`, both the old and new memory allocation are valid for reads and
@ -270,7 +270,7 @@ pub unsafe trait AllocRef {
// safe. The safety contract for `dealloc` must be upheld by the caller.
unsafe {
ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), old_layout.size());
self.dealloc(ptr, old_layout);
self.deallocate(ptr, old_layout);
}
Ok(new_ptr)
@ -323,7 +323,7 @@ pub unsafe trait AllocRef {
"`new_layout.size()` must be smaller than or equal to `old_layout.size()`"
);
let new_ptr = self.alloc(new_layout)?;
let new_ptr = self.allocate(new_layout)?;
// SAFETY: because `new_layout.size()` must be lower than or equal to
// `old_layout.size()`, both the old and new memory allocation are valid for reads and
@ -332,15 +332,15 @@ pub unsafe trait AllocRef {
// safe. The safety contract for `dealloc` must be upheld by the caller.
unsafe {
ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), new_layout.size());
self.dealloc(ptr, old_layout);
self.deallocate(ptr, old_layout);
}
Ok(new_ptr)
}
/// Creates a "by reference" adaptor for this instance of `AllocRef`.
/// Creates a "by reference" adaptor for this instance of `Allocator`.
///
/// The returned adaptor also implements `AllocRef` and will simply borrow this.
/// The returned adaptor also implements `Allocator` and will simply borrow this.
#[inline(always)]
fn by_ref(&self) -> &Self {
self
@ -348,24 +348,24 @@ pub unsafe trait AllocRef {
}
#[unstable(feature = "allocator_api", issue = "32838")]
unsafe impl<A> AllocRef for &A
unsafe impl<A> Allocator for &A
where
A: AllocRef + ?Sized,
A: Allocator + ?Sized,
{
#[inline]
fn alloc(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
(**self).alloc(layout)
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
(**self).allocate(layout)
}
#[inline]
fn alloc_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
(**self).alloc_zeroed(layout)
fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
(**self).allocate_zeroed(layout)
}
#[inline]
unsafe fn dealloc(&self, ptr: NonNull<u8>, layout: Layout) {
unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
// SAFETY: the safety contract must be upheld by the caller
unsafe { (**self).dealloc(ptr, layout) }
unsafe { (**self).deallocate(ptr, layout) }
}
#[inline]

View File

@ -439,11 +439,11 @@ impl<T> NonNull<[T]> {
/// ```rust
/// #![feature(allocator_api, ptr_as_uninit)]
///
/// use std::alloc::{AllocRef, Layout, Global};
/// use std::alloc::{Allocator, Layout, Global};
/// use std::mem::MaybeUninit;
/// use std::ptr::NonNull;
///
/// let memory: NonNull<[u8]> = Global.alloc(Layout::new::<[u8; 32]>())?;
/// let memory: NonNull<[u8]> = Global.allocate(Layout::new::<[u8; 32]>())?;
/// // This is safe as `memory` is valid for reads and writes for `memory.len()` many bytes.
/// // Note that calling `memory.as_mut()` is not allowed here as the content may be uninitialized.
/// # #[allow(unused_variables)]

View File

@ -149,7 +149,7 @@ impl System {
}
}
// SAFETY: Same as `AllocRef::grow`
// SAFETY: Same as `Allocator::grow`
#[inline]
unsafe fn grow_impl(
&self,
@ -190,29 +190,29 @@ impl System {
old_size => unsafe {
let new_ptr = self.alloc_impl(new_layout, zeroed)?;
ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), old_size);
AllocRef::dealloc(&self, ptr, old_layout);
Allocator::deallocate(&self, ptr, old_layout);
Ok(new_ptr)
},
}
}
}
// The AllocRef impl checks the layout size to be non-zero and forwards to the GlobalAlloc impl,
// The Allocator impl checks the layout size to be non-zero and forwards to the GlobalAlloc impl,
// which is in `std::sys::*::alloc`.
#[unstable(feature = "allocator_api", issue = "32838")]
unsafe impl AllocRef for System {
unsafe impl Allocator for System {
#[inline]
fn alloc(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
self.alloc_impl(layout, false)
}
#[inline]
fn alloc_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
self.alloc_impl(layout, true)
}
#[inline]
unsafe fn dealloc(&self, ptr: NonNull<u8>, layout: Layout) {
unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
if layout.size() != 0 {
// SAFETY: `layout` is non-zero in size,
// other conditions must be upheld by the caller
@ -257,7 +257,7 @@ unsafe impl AllocRef for System {
match new_layout.size() {
// SAFETY: conditions must be upheld by the caller
0 => unsafe {
AllocRef::dealloc(&self, ptr, old_layout);
Allocator::deallocate(&self, ptr, old_layout);
Ok(NonNull::slice_from_raw_parts(new_layout.dangling(), 0))
},
@ -277,9 +277,9 @@ unsafe impl AllocRef for System {
// `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract
// for `dealloc` must be upheld by the caller.
new_size => unsafe {
let new_ptr = AllocRef::alloc(&self, new_layout)?;
let new_ptr = Allocator::allocate(&self, new_layout)?;
ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), new_size);
AllocRef::dealloc(&self, ptr, old_layout);
Allocator::deallocate(&self, ptr, old_layout);
Ok(new_ptr)
},
}

View File

@ -8,9 +8,8 @@
extern crate helper;
use std::alloc::{self, AllocRef, Global, Layout, System};
use std::alloc::{self, Allocator, Global, Layout, System};
use std::sync::atomic::{AtomicUsize, Ordering};
use std::ptr::NonNull;
static HITS: AtomicUsize = AtomicUsize::new(0);
@ -24,7 +23,7 @@ unsafe impl alloc::GlobalAlloc for A {
unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
HITS.fetch_add(1, Ordering::SeqCst);
AllocRef::dealloc(&System, NonNull::new(ptr).unwrap(), layout)
alloc::GlobalAlloc::dealloc(&System, ptr, layout)
}
}
@ -39,10 +38,10 @@ fn main() {
unsafe {
let layout = Layout::from_size_align(4, 2).unwrap();
let memory = Global.alloc(layout.clone()).unwrap();
let memory = Global.allocate(layout.clone()).unwrap();
helper::work_with(&memory);
assert_eq!(HITS.load(Ordering::SeqCst), n + 1);
Global.dealloc(memory.as_non_null_ptr(), layout);
Global.deallocate(memory.as_non_null_ptr(), layout);
assert_eq!(HITS.load(Ordering::SeqCst), n + 2);
let s = String::with_capacity(10);
@ -51,10 +50,10 @@ fn main() {
drop(s);
assert_eq!(HITS.load(Ordering::SeqCst), n + 4);
let memory = System.alloc(layout.clone()).unwrap();
assert_eq!(HITS.load(Ordering::SeqCst), n + 4);
let memory = System.allocate(layout.clone()).unwrap();
helper::work_with(&memory);
System.dealloc(memory.as_non_null_ptr(), layout);
assert_eq!(HITS.load(Ordering::SeqCst), n + 4);
System.deallocate(memory.as_non_null_ptr(), layout);
assert_eq!(HITS.load(Ordering::SeqCst), n + 4);
}
}

View File

@ -10,7 +10,7 @@
extern crate custom;
extern crate helper;
use std::alloc::{AllocRef, Global, Layout, System};
use std::alloc::{Allocator, Global, Layout, System};
use std::sync::atomic::{AtomicUsize, Ordering};
#[global_allocator]
@ -21,16 +21,16 @@ fn main() {
let n = GLOBAL.0.load(Ordering::SeqCst);
let layout = Layout::from_size_align(4, 2).unwrap();
let memory = Global.alloc(layout.clone()).unwrap();
let memory = Global.allocate(layout.clone()).unwrap();
helper::work_with(&memory);
assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 1);
Global.dealloc(memory.as_non_null_ptr(), layout);
Global.deallocate(memory.as_non_null_ptr(), layout);
assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 2);
let memory = System.alloc(layout.clone()).unwrap();
let memory = System.allocate(layout.clone()).unwrap();
assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 2);
helper::work_with(&memory);
System.dealloc(memory.as_non_null_ptr(), layout);
System.deallocate(memory.as_non_null_ptr(), layout);
assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 2);
}
}

View File

@ -6,7 +6,7 @@ LL | type Ty = Vec<[u8]>;
|
::: $SRC_DIR/alloc/src/vec.rs:LL:COL
|
LL | pub struct Vec<T, #[unstable(feature = "allocator_api", issue = "32838")] A: AllocRef = Global> {
LL | pub struct Vec<T, #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global> {
| - required by this bound in `Vec`
|
= help: the trait `Sized` is not implemented for `[u8]`

View File

@ -17,7 +17,7 @@ LL | let x: Vec<dyn Trait + Sized> = Vec::new();
|
::: $SRC_DIR/alloc/src/vec.rs:LL:COL
|
LL | pub struct Vec<T, #[unstable(feature = "allocator_api", issue = "32838")] A: AllocRef = Global> {
LL | pub struct Vec<T, #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global> {
| - required by this bound in `Vec`
|
= help: the trait `Sized` is not implemented for `dyn Trait`

View File

@ -1,26 +1,26 @@
#![feature(allocator_api)]
use std::alloc::{AllocError, AllocRef, Layout, System};
use std::alloc::{AllocError, Allocator, Layout, System};
use std::ptr::NonNull;
use std::boxed::Box;
struct Allocator {}
struct Alloc {}
unsafe impl AllocRef for Allocator {
fn alloc(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
System.alloc(layout)
unsafe impl Allocator for Alloc {
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
System.allocate(layout)
}
unsafe fn dealloc(&self, ptr: NonNull<u8>, layout: Layout) {
System.dealloc(ptr, layout)
unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
System.deallocate(ptr, layout)
}
}
fn use_value(_: u32) {}
fn main() {
let alloc = Allocator {};
let alloc = Alloc {};
let boxed = Box::new_in(10, alloc.by_ref());
let theref = Box::leak(boxed);
drop(alloc);

View File

@ -6,7 +6,7 @@ LL | impl AsRef<Q> for Box<Q> {
|
= note: conflicting implementation in crate `alloc`:
- impl<T, A> AsRef<T> for Box<T, A>
where A: AllocRef, T: ?Sized;
where A: Allocator, T: ?Sized;
error[E0119]: conflicting implementations of trait `std::convert::From<S>` for type `S`:
--> $DIR/conflict-with-std.rs:12:1

View File

@ -6,7 +6,7 @@ LL | fn iceman(c: Vec<[i32]>) {}
|
::: $SRC_DIR/alloc/src/vec.rs:LL:COL
|
LL | pub struct Vec<T, #[unstable(feature = "allocator_api", issue = "32838")] A: AllocRef = Global> {
LL | pub struct Vec<T, #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global> {
| - required by this bound in `Vec`
|
= help: the trait `Sized` is not implemented for `[i32]`

View File

@ -6,7 +6,7 @@ LL | impl<T> Drop for T where T: A {
|
= note: conflicting implementation in crate `alloc`:
- impl<T, A> Drop for Box<T, A>
where A: AllocRef, T: ?Sized;
where A: Allocator, T: ?Sized;
= note: downstream crates may implement trait `A` for type `std::boxed::Box<_, _>`
error[E0120]: the `Drop` trait may only be implemented for structs, enums, and unions

View File

@ -7,7 +7,7 @@
#![feature(allocator_api)]
#![feature(slice_ptr_get)]
use std::alloc::{handle_alloc_error, AllocRef, Global, Layout};
use std::alloc::{handle_alloc_error, Allocator, Global, Layout};
use std::ptr::{self, NonNull};
fn main() {
@ -42,7 +42,7 @@ unsafe fn test_triangle() -> bool {
println!("allocate({:?})", layout);
}
let ptr = Global.alloc(layout).unwrap_or_else(|_| handle_alloc_error(layout));
let ptr = Global.allocate(layout).unwrap_or_else(|_| handle_alloc_error(layout));
if PRINT {
println!("allocate({:?}) = {:?}", layout, ptr);
@ -56,7 +56,7 @@ unsafe fn test_triangle() -> bool {
println!("deallocate({:?}, {:?}", ptr, layout);
}
Global.dealloc(NonNull::new_unchecked(ptr), layout);
Global.deallocate(NonNull::new_unchecked(ptr), layout);
}
unsafe fn reallocate(ptr: *mut u8, old: Layout, new: Layout) -> *mut u8 {

View File

@ -4,7 +4,7 @@
// pretty-expanded FIXME #23616
#![feature(allocator_api)]
use std::alloc::{handle_alloc_error, AllocRef, Global, Layout};
use std::alloc::{handle_alloc_error, Allocator, Global, Layout};
use std::ptr::NonNull;
struct arena(());
@ -22,23 +22,23 @@ struct Ccx {
x: isize,
}
fn alloc(_bcx: &arena) -> &Bcx<'_> {
fn allocate(_bcx: &arena) -> &Bcx<'_> {
unsafe {
let layout = Layout::new::<Bcx>();
let ptr = Global.alloc(layout).unwrap_or_else(|_| handle_alloc_error(layout));
let ptr = Global.allocate(layout).unwrap_or_else(|_| handle_alloc_error(layout));
&*(ptr.as_ptr() as *const _)
}
}
fn h<'a>(bcx: &'a Bcx<'a>) -> &'a Bcx<'a> {
return alloc(bcx.fcx.arena);
return allocate(bcx.fcx.arena);
}
fn g(fcx: &Fcx) {
let bcx = Bcx { fcx };
let bcx2 = h(&bcx);
unsafe {
Global.dealloc(NonNull::new_unchecked(bcx2 as *const _ as *mut _), Layout::new::<Bcx>());
Global.deallocate(NonNull::new_unchecked(bcx2 as *const _ as *mut _), Layout::new::<Bcx>());
}
}

View File

@ -22,7 +22,7 @@ LL | fn clone(&self) -> Self;
|
LL | / pub struct Box<
LL | | T: ?Sized,
LL | | #[unstable(feature = "allocator_api", issue = "32838")] A: AllocRef = Global,
LL | | #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
LL | | >(Unique<T>, A);
| |________________- doesn't satisfy `Box<dyn Foo>: Clone`
|

View File

@ -19,7 +19,7 @@ LL | fn clone(&self) -> Self;
|
LL | / pub struct Box<
LL | | T: ?Sized,
LL | | #[unstable(feature = "allocator_api", issue = "32838")] A: AllocRef = Global,
LL | | #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
LL | | >(Unique<T>, A);
| |________________- doesn't satisfy `Box<R>: Clone`
|