mirror of
https://github.com/rust-lang/rust.git
synced 2025-04-28 02:57:37 +00:00
parent
fd242ee64c
commit
fddf51ee0b
@ -1 +1 @@
|
||||
Subproject commit 6a8f0a27e9a58c55c89d07bc43a176fdae5e051c
|
||||
Subproject commit 498ac2997420f7b25f7cd0a3f8202950d8ad93ec
|
@ -30,6 +30,7 @@ looks like:
|
||||
#![feature(global_allocator, allocator_api, heap_api)]
|
||||
|
||||
use std::alloc::{GlobalAlloc, System, Layout, Void};
|
||||
use std::ptr::NonNull;
|
||||
|
||||
struct MyAllocator;
|
||||
|
||||
|
@ -16,6 +16,7 @@
|
||||
issue = "32838")]
|
||||
|
||||
use core::intrinsics::{min_align_of_val, size_of_val};
|
||||
use core::ptr::NonNull;
|
||||
use core::usize;
|
||||
|
||||
#[doc(inline)]
|
||||
@ -120,27 +121,27 @@ unsafe impl GlobalAlloc for Global {
|
||||
|
||||
unsafe impl Alloc for Global {
|
||||
#[inline]
|
||||
unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
|
||||
unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<Void>, AllocErr> {
|
||||
GlobalAlloc::alloc(self, layout).into()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {
|
||||
GlobalAlloc::dealloc(self, ptr as *mut Void, layout)
|
||||
unsafe fn dealloc(&mut self, ptr: NonNull<Void>, layout: Layout) {
|
||||
GlobalAlloc::dealloc(self, ptr.as_ptr(), layout)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
unsafe fn realloc(&mut self,
|
||||
ptr: *mut u8,
|
||||
ptr: NonNull<Void>,
|
||||
layout: Layout,
|
||||
new_size: usize)
|
||||
-> Result<*mut u8, AllocErr>
|
||||
-> Result<NonNull<Void>, AllocErr>
|
||||
{
|
||||
GlobalAlloc::realloc(self, ptr as *mut Void, layout, new_size).into()
|
||||
GlobalAlloc::realloc(self, ptr.as_ptr(), layout, new_size).into()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
|
||||
unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<NonNull<Void>, AllocErr> {
|
||||
GlobalAlloc::alloc_zeroed(self, layout).into()
|
||||
}
|
||||
|
||||
@ -195,8 +196,8 @@ mod tests {
|
||||
let ptr = Global.alloc_zeroed(layout.clone())
|
||||
.unwrap_or_else(|_| Global.oom());
|
||||
|
||||
let end = ptr.offset(layout.size() as isize);
|
||||
let mut i = ptr;
|
||||
let mut i = ptr.cast::<u8>().as_ptr();
|
||||
let end = i.offset(layout.size() as isize);
|
||||
while i < end {
|
||||
assert_eq!(*i, 0);
|
||||
i = i.offset(1);
|
||||
|
@ -512,15 +512,13 @@ impl<T: ?Sized> Arc<T> {
|
||||
// Non-inlined part of `drop`.
|
||||
#[inline(never)]
|
||||
unsafe fn drop_slow(&mut self) {
|
||||
let ptr = self.ptr.as_ptr();
|
||||
|
||||
// Destroy the data at this time, even though we may not free the box
|
||||
// allocation itself (there may still be weak pointers lying around).
|
||||
ptr::drop_in_place(&mut self.ptr.as_mut().data);
|
||||
|
||||
if self.inner().weak.fetch_sub(1, Release) == 1 {
|
||||
atomic::fence(Acquire);
|
||||
Global.dealloc(ptr as *mut u8, Layout::for_value(&*ptr))
|
||||
Global.dealloc(self.ptr.as_void(), Layout::for_value(self.ptr.as_ref()))
|
||||
}
|
||||
}
|
||||
|
||||
@ -558,7 +556,7 @@ impl<T: ?Sized> Arc<T> {
|
||||
.unwrap_or_else(|_| Global.oom());
|
||||
|
||||
// Initialize the real ArcInner
|
||||
let inner = set_data_ptr(ptr as *mut T, mem) as *mut ArcInner<T>;
|
||||
let inner = set_data_ptr(ptr as *mut T, mem.as_ptr() as *mut u8) as *mut ArcInner<T>;
|
||||
|
||||
ptr::write(&mut (*inner).strong, atomic::AtomicUsize::new(1));
|
||||
ptr::write(&mut (*inner).weak, atomic::AtomicUsize::new(1));
|
||||
@ -625,7 +623,7 @@ impl<T: Clone> ArcFromSlice<T> for Arc<[T]> {
|
||||
// In the event of a panic, elements that have been written
|
||||
// into the new ArcInner will be dropped, then the memory freed.
|
||||
struct Guard<T> {
|
||||
mem: *mut u8,
|
||||
mem: NonNull<u8>,
|
||||
elems: *mut T,
|
||||
layout: Layout,
|
||||
n_elems: usize,
|
||||
@ -639,7 +637,7 @@ impl<T: Clone> ArcFromSlice<T> for Arc<[T]> {
|
||||
let slice = from_raw_parts_mut(self.elems, self.n_elems);
|
||||
ptr::drop_in_place(slice);
|
||||
|
||||
Global.dealloc(self.mem, self.layout.clone());
|
||||
Global.dealloc(self.mem.as_void(), self.layout.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -655,7 +653,7 @@ impl<T: Clone> ArcFromSlice<T> for Arc<[T]> {
|
||||
let elems = &mut (*ptr).data as *mut [T] as *mut T;
|
||||
|
||||
let mut guard = Guard{
|
||||
mem: mem,
|
||||
mem: NonNull::new_unchecked(mem),
|
||||
elems: elems,
|
||||
layout: layout,
|
||||
n_elems: 0,
|
||||
@ -1147,8 +1145,6 @@ impl<T: ?Sized> Drop for Weak<T> {
|
||||
/// assert!(other_weak_foo.upgrade().is_none());
|
||||
/// ```
|
||||
fn drop(&mut self) {
|
||||
let ptr = self.ptr.as_ptr();
|
||||
|
||||
// If we find out that we were the last weak pointer, then its time to
|
||||
// deallocate the data entirely. See the discussion in Arc::drop() about
|
||||
// the memory orderings
|
||||
@ -1160,7 +1156,7 @@ impl<T: ?Sized> Drop for Weak<T> {
|
||||
if self.inner().weak.fetch_sub(1, Release) == 1 {
|
||||
atomic::fence(Acquire);
|
||||
unsafe {
|
||||
Global.dealloc(ptr as *mut u8, Layout::for_value(&*ptr))
|
||||
Global.dealloc(self.ptr.as_void(), Layout::for_value(self.ptr.as_ref()))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -236,7 +236,7 @@ impl<K, V> Root<K, V> {
|
||||
pub fn pop_level(&mut self) {
|
||||
debug_assert!(self.height > 0);
|
||||
|
||||
let top = self.node.ptr.as_ptr() as *mut u8;
|
||||
let top = self.node.ptr;
|
||||
|
||||
self.node = unsafe {
|
||||
BoxedNode::from_ptr(self.as_mut()
|
||||
@ -249,7 +249,7 @@ impl<K, V> Root<K, V> {
|
||||
self.as_mut().as_leaf_mut().parent = ptr::null();
|
||||
|
||||
unsafe {
|
||||
Global.dealloc(top, Layout::new::<InternalNode<K, V>>());
|
||||
Global.dealloc(NonNull::from(top).as_void(), Layout::new::<InternalNode<K, V>>());
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -433,9 +433,9 @@ impl<K, V> NodeRef<marker::Owned, K, V, marker::Leaf> {
|
||||
marker::Edge
|
||||
>
|
||||
> {
|
||||
let ptr = self.as_leaf() as *const LeafNode<K, V> as *const u8 as *mut u8;
|
||||
let node = self.node;
|
||||
let ret = self.ascend().ok();
|
||||
Global.dealloc(ptr, Layout::new::<LeafNode<K, V>>());
|
||||
Global.dealloc(node.as_void(), Layout::new::<LeafNode<K, V>>());
|
||||
ret
|
||||
}
|
||||
}
|
||||
@ -454,9 +454,9 @@ impl<K, V> NodeRef<marker::Owned, K, V, marker::Internal> {
|
||||
marker::Edge
|
||||
>
|
||||
> {
|
||||
let ptr = self.as_internal() as *const InternalNode<K, V> as *const u8 as *mut u8;
|
||||
let node = self.node;
|
||||
let ret = self.ascend().ok();
|
||||
Global.dealloc(ptr, Layout::new::<InternalNode<K, V>>());
|
||||
Global.dealloc(node.as_void(), Layout::new::<InternalNode<K, V>>());
|
||||
ret
|
||||
}
|
||||
}
|
||||
@ -1239,12 +1239,12 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::
|
||||
}
|
||||
|
||||
Global.dealloc(
|
||||
right_node.node.as_ptr() as *mut u8,
|
||||
right_node.node.as_void(),
|
||||
Layout::new::<InternalNode<K, V>>(),
|
||||
);
|
||||
} else {
|
||||
Global.dealloc(
|
||||
right_node.node.as_ptr() as *mut u8,
|
||||
right_node.node.as_void(),
|
||||
Layout::new::<LeafNode<K, V>>(),
|
||||
);
|
||||
}
|
||||
|
@ -8,14 +8,20 @@
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
pub use alloc::{Excess, Layout, AllocErr, CannotReallocInPlace};
|
||||
#![allow(deprecated)]
|
||||
|
||||
pub use alloc::{Layout, AllocErr, CannotReallocInPlace, Void};
|
||||
use core::alloc::Alloc as CoreAlloc;
|
||||
use core::ptr::NonNull;
|
||||
|
||||
#[doc(hidden)]
|
||||
pub mod __core {
|
||||
pub use core::*;
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Excess(pub *mut u8, pub usize);
|
||||
|
||||
/// Compatibility with older versions of #[global_allocator] during bootstrap
|
||||
pub unsafe trait Alloc {
|
||||
unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr>;
|
||||
@ -42,13 +48,13 @@ pub unsafe trait Alloc {
|
||||
new_layout: Layout) -> Result<(), CannotReallocInPlace>;
|
||||
}
|
||||
|
||||
#[allow(deprecated)]
|
||||
unsafe impl<T> Alloc for T where T: CoreAlloc {
|
||||
unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
|
||||
CoreAlloc::alloc(self, layout)
|
||||
CoreAlloc::alloc(self, layout).map(|ptr| ptr.cast().as_ptr())
|
||||
}
|
||||
|
||||
unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {
|
||||
let ptr = NonNull::new_unchecked(ptr as *mut Void);
|
||||
CoreAlloc::dealloc(self, ptr, layout)
|
||||
}
|
||||
|
||||
@ -64,28 +70,33 @@ unsafe impl<T> Alloc for T where T: CoreAlloc {
|
||||
ptr: *mut u8,
|
||||
layout: Layout,
|
||||
new_layout: Layout) -> Result<*mut u8, AllocErr> {
|
||||
CoreAlloc::realloc(self, ptr, layout, new_layout.size())
|
||||
let ptr = NonNull::new_unchecked(ptr as *mut Void);
|
||||
CoreAlloc::realloc(self, ptr, layout, new_layout.size()).map(|ptr| ptr.cast().as_ptr())
|
||||
}
|
||||
|
||||
unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
|
||||
CoreAlloc::alloc_zeroed(self, layout)
|
||||
CoreAlloc::alloc_zeroed(self, layout).map(|ptr| ptr.cast().as_ptr())
|
||||
}
|
||||
|
||||
unsafe fn alloc_excess(&mut self, layout: Layout) -> Result<Excess, AllocErr> {
|
||||
CoreAlloc::alloc_excess(self, layout)
|
||||
.map(|e| Excess(e.0 .cast().as_ptr(), e.1))
|
||||
}
|
||||
|
||||
unsafe fn realloc_excess(&mut self,
|
||||
ptr: *mut u8,
|
||||
layout: Layout,
|
||||
new_layout: Layout) -> Result<Excess, AllocErr> {
|
||||
let ptr = NonNull::new_unchecked(ptr as *mut Void);
|
||||
CoreAlloc::realloc_excess(self, ptr, layout, new_layout.size())
|
||||
.map(|e| Excess(e.0 .cast().as_ptr(), e.1))
|
||||
}
|
||||
|
||||
unsafe fn grow_in_place(&mut self,
|
||||
ptr: *mut u8,
|
||||
layout: Layout,
|
||||
new_layout: Layout) -> Result<(), CannotReallocInPlace> {
|
||||
let ptr = NonNull::new_unchecked(ptr as *mut Void);
|
||||
CoreAlloc::grow_in_place(self, ptr, layout, new_layout.size())
|
||||
}
|
||||
|
||||
@ -93,6 +104,7 @@ unsafe impl<T> Alloc for T where T: CoreAlloc {
|
||||
ptr: *mut u8,
|
||||
layout: Layout,
|
||||
new_layout: Layout) -> Result<(), CannotReallocInPlace> {
|
||||
let ptr = NonNull::new_unchecked(ptr as *mut Void);
|
||||
CoreAlloc::shrink_in_place(self, ptr, layout, new_layout.size())
|
||||
}
|
||||
}
|
||||
|
@ -99,6 +99,7 @@
|
||||
#![feature(lang_items)]
|
||||
#![feature(libc)]
|
||||
#![feature(needs_allocator)]
|
||||
#![feature(nonnull_cast)]
|
||||
#![feature(nonzero)]
|
||||
#![feature(optin_builtin_traits)]
|
||||
#![feature(pattern)]
|
||||
|
@ -12,7 +12,7 @@ use alloc::{Alloc, Layout, Global};
|
||||
use core::cmp;
|
||||
use core::mem;
|
||||
use core::ops::Drop;
|
||||
use core::ptr::{self, Unique};
|
||||
use core::ptr::{self, NonNull, Unique};
|
||||
use core::slice;
|
||||
use super::boxed::Box;
|
||||
use super::allocator::CollectionAllocErr;
|
||||
@ -90,7 +90,7 @@ impl<T, A: Alloc> RawVec<T, A> {
|
||||
|
||||
// handles ZSTs and `cap = 0` alike
|
||||
let ptr = if alloc_size == 0 {
|
||||
mem::align_of::<T>() as *mut u8
|
||||
NonNull::<T>::dangling().as_void()
|
||||
} else {
|
||||
let align = mem::align_of::<T>();
|
||||
let result = if zeroed {
|
||||
@ -105,7 +105,7 @@ impl<T, A: Alloc> RawVec<T, A> {
|
||||
};
|
||||
|
||||
RawVec {
|
||||
ptr: Unique::new_unchecked(ptr as *mut _),
|
||||
ptr: ptr.cast().into(),
|
||||
cap,
|
||||
a,
|
||||
}
|
||||
@ -310,11 +310,11 @@ impl<T, A: Alloc> RawVec<T, A> {
|
||||
let new_cap = 2 * self.cap;
|
||||
let new_size = new_cap * elem_size;
|
||||
alloc_guard(new_size).expect("capacity overflow");
|
||||
let ptr_res = self.a.realloc(self.ptr.as_ptr() as *mut u8,
|
||||
let ptr_res = self.a.realloc(NonNull::from(self.ptr).as_void(),
|
||||
cur,
|
||||
new_size);
|
||||
match ptr_res {
|
||||
Ok(ptr) => (new_cap, Unique::new_unchecked(ptr as *mut T)),
|
||||
Ok(ptr) => (new_cap, ptr.cast().into()),
|
||||
Err(_) => self.a.oom(),
|
||||
}
|
||||
}
|
||||
@ -369,8 +369,7 @@ impl<T, A: Alloc> RawVec<T, A> {
|
||||
let new_cap = 2 * self.cap;
|
||||
let new_size = new_cap * elem_size;
|
||||
alloc_guard(new_size).expect("capacity overflow");
|
||||
let ptr = self.ptr() as *mut _;
|
||||
match self.a.grow_in_place(ptr, old_layout, new_size) {
|
||||
match self.a.grow_in_place(NonNull::from(self.ptr).as_void(), old_layout, new_size) {
|
||||
Ok(_) => {
|
||||
// We can't directly divide `size`.
|
||||
self.cap = new_cap;
|
||||
@ -427,13 +426,12 @@ impl<T, A: Alloc> RawVec<T, A> {
|
||||
let res = match self.current_layout() {
|
||||
Some(layout) => {
|
||||
debug_assert!(new_layout.align() == layout.align());
|
||||
let old_ptr = self.ptr.as_ptr() as *mut u8;
|
||||
self.a.realloc(old_ptr, layout, new_layout.size())
|
||||
self.a.realloc(NonNull::from(self.ptr).as_void(), layout, new_layout.size())
|
||||
}
|
||||
None => self.a.alloc(new_layout),
|
||||
};
|
||||
|
||||
self.ptr = Unique::new_unchecked(res? as *mut T);
|
||||
self.ptr = res?.cast().into();
|
||||
self.cap = new_cap;
|
||||
|
||||
Ok(())
|
||||
@ -537,13 +535,12 @@ impl<T, A: Alloc> RawVec<T, A> {
|
||||
let res = match self.current_layout() {
|
||||
Some(layout) => {
|
||||
debug_assert!(new_layout.align() == layout.align());
|
||||
let old_ptr = self.ptr.as_ptr() as *mut u8;
|
||||
self.a.realloc(old_ptr, layout, new_layout.size())
|
||||
self.a.realloc(NonNull::from(self.ptr).as_void(), layout, new_layout.size())
|
||||
}
|
||||
None => self.a.alloc(new_layout),
|
||||
};
|
||||
|
||||
self.ptr = Unique::new_unchecked(res? as *mut T);
|
||||
self.ptr = res?.cast().into();
|
||||
self.cap = new_cap;
|
||||
|
||||
Ok(())
|
||||
@ -600,11 +597,12 @@ impl<T, A: Alloc> RawVec<T, A> {
|
||||
// (regardless of whether `self.cap - used_cap` wrapped).
|
||||
// Therefore we can safely call grow_in_place.
|
||||
|
||||
let ptr = self.ptr() as *mut _;
|
||||
let new_layout = Layout::new::<T>().repeat(new_cap).unwrap().0;
|
||||
// FIXME: may crash and burn on over-reserve
|
||||
alloc_guard(new_layout.size()).expect("capacity overflow");
|
||||
match self.a.grow_in_place(ptr, old_layout, new_layout.size()) {
|
||||
match self.a.grow_in_place(
|
||||
NonNull::from(self.ptr).as_void(), old_layout, new_layout.size(),
|
||||
) {
|
||||
Ok(_) => {
|
||||
self.cap = new_cap;
|
||||
true
|
||||
@ -664,10 +662,10 @@ impl<T, A: Alloc> RawVec<T, A> {
|
||||
let new_size = elem_size * amount;
|
||||
let align = mem::align_of::<T>();
|
||||
let old_layout = Layout::from_size_align_unchecked(old_size, align);
|
||||
match self.a.realloc(self.ptr.as_ptr() as *mut u8,
|
||||
match self.a.realloc(NonNull::from(self.ptr).as_void(),
|
||||
old_layout,
|
||||
new_size) {
|
||||
Ok(p) => self.ptr = Unique::new_unchecked(p as *mut T),
|
||||
Ok(p) => self.ptr = p.cast().into(),
|
||||
Err(_) => self.a.oom(),
|
||||
}
|
||||
}
|
||||
@ -700,8 +698,7 @@ impl<T, A: Alloc> RawVec<T, A> {
|
||||
let elem_size = mem::size_of::<T>();
|
||||
if elem_size != 0 {
|
||||
if let Some(layout) = self.current_layout() {
|
||||
let ptr = self.ptr() as *mut u8;
|
||||
self.a.dealloc(ptr, layout);
|
||||
self.a.dealloc(NonNull::from(self.ptr).as_void(), layout);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -737,6 +734,7 @@ fn alloc_guard(alloc_size: usize) -> Result<(), CollectionAllocErr> {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use alloc::Void;
|
||||
|
||||
#[test]
|
||||
fn allocator_param() {
|
||||
@ -756,7 +754,7 @@ mod tests {
|
||||
// before allocation attempts start failing.
|
||||
struct BoundedAlloc { fuel: usize }
|
||||
unsafe impl Alloc for BoundedAlloc {
|
||||
unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
|
||||
unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<Void>, AllocErr> {
|
||||
let size = layout.size();
|
||||
if size > self.fuel {
|
||||
return Err(AllocErr);
|
||||
@ -766,7 +764,7 @@ mod tests {
|
||||
err @ Err(_) => err,
|
||||
}
|
||||
}
|
||||
unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {
|
||||
unsafe fn dealloc(&mut self, ptr: NonNull<Void>, layout: Layout) {
|
||||
Global.dealloc(ptr, layout)
|
||||
}
|
||||
}
|
||||
|
@ -259,7 +259,7 @@ use core::ops::CoerceUnsized;
|
||||
use core::ptr::{self, NonNull};
|
||||
use core::convert::From;
|
||||
|
||||
use alloc::{Global, Alloc, Layout, box_free};
|
||||
use alloc::{Global, Alloc, Layout, Void, box_free};
|
||||
use string::String;
|
||||
use vec::Vec;
|
||||
|
||||
@ -671,7 +671,7 @@ impl<T: ?Sized> Rc<T> {
|
||||
.unwrap_or_else(|_| Global.oom());
|
||||
|
||||
// Initialize the real RcBox
|
||||
let inner = set_data_ptr(ptr as *mut T, mem) as *mut RcBox<T>;
|
||||
let inner = set_data_ptr(ptr as *mut T, mem.as_ptr() as *mut u8) as *mut RcBox<T>;
|
||||
|
||||
ptr::write(&mut (*inner).strong, Cell::new(1));
|
||||
ptr::write(&mut (*inner).weak, Cell::new(1));
|
||||
@ -737,7 +737,7 @@ impl<T: Clone> RcFromSlice<T> for Rc<[T]> {
|
||||
// In the event of a panic, elements that have been written
|
||||
// into the new RcBox will be dropped, then the memory freed.
|
||||
struct Guard<T> {
|
||||
mem: *mut u8,
|
||||
mem: NonNull<Void>,
|
||||
elems: *mut T,
|
||||
layout: Layout,
|
||||
n_elems: usize,
|
||||
@ -760,14 +760,14 @@ impl<T: Clone> RcFromSlice<T> for Rc<[T]> {
|
||||
let v_ptr = v as *const [T];
|
||||
let ptr = Self::allocate_for_ptr(v_ptr);
|
||||
|
||||
let mem = ptr as *mut _ as *mut u8;
|
||||
let mem = ptr as *mut _ as *mut Void;
|
||||
let layout = Layout::for_value(&*ptr);
|
||||
|
||||
// Pointer to first element
|
||||
let elems = &mut (*ptr).value as *mut [T] as *mut T;
|
||||
|
||||
let mut guard = Guard{
|
||||
mem: mem,
|
||||
mem: NonNull::new_unchecked(mem),
|
||||
elems: elems,
|
||||
layout: layout,
|
||||
n_elems: 0,
|
||||
@ -834,8 +834,6 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Rc<T> {
|
||||
/// ```
|
||||
fn drop(&mut self) {
|
||||
unsafe {
|
||||
let ptr = self.ptr.as_ptr();
|
||||
|
||||
self.dec_strong();
|
||||
if self.strong() == 0 {
|
||||
// destroy the contained object
|
||||
@ -846,7 +844,7 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Rc<T> {
|
||||
self.dec_weak();
|
||||
|
||||
if self.weak() == 0 {
|
||||
Global.dealloc(ptr as *mut u8, Layout::for_value(&*ptr));
|
||||
Global.dealloc(self.ptr.as_void(), Layout::for_value(self.ptr.as_ref()));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1266,13 +1264,11 @@ impl<T: ?Sized> Drop for Weak<T> {
|
||||
/// ```
|
||||
fn drop(&mut self) {
|
||||
unsafe {
|
||||
let ptr = self.ptr.as_ptr();
|
||||
|
||||
self.dec_weak();
|
||||
// the weak count starts at 1, and will only go to zero if all
|
||||
// the strong pointers have disappeared.
|
||||
if self.weak() == 0 {
|
||||
Global.dealloc(ptr as *mut u8, Layout::for_value(&*ptr));
|
||||
Global.dealloc(self.ptr.as_void(), Layout::for_value(self.ptr.as_ref()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -34,7 +34,8 @@ fn check_overalign_requests<T: Alloc>(mut allocator: T) {
|
||||
allocator.alloc(Layout::from_size_align(size, align).unwrap()).unwrap()
|
||||
}).collect();
|
||||
for &ptr in &pointers {
|
||||
assert_eq!((ptr as usize) % align, 0, "Got a pointer less aligned than requested")
|
||||
assert_eq!((ptr.as_ptr() as usize) % align, 0,
|
||||
"Got a pointer less aligned than requested")
|
||||
}
|
||||
|
||||
// Clean up
|
||||
|
@ -42,6 +42,7 @@ const MIN_ALIGN: usize = 8;
|
||||
const MIN_ALIGN: usize = 16;
|
||||
|
||||
use core::alloc::{Alloc, GlobalAlloc, AllocErr, Layout, Void};
|
||||
use core::ptr::NonNull;
|
||||
|
||||
#[unstable(feature = "allocator_api", issue = "32838")]
|
||||
pub struct System;
|
||||
@ -49,26 +50,26 @@ pub struct System;
|
||||
#[unstable(feature = "allocator_api", issue = "32838")]
|
||||
unsafe impl Alloc for System {
|
||||
#[inline]
|
||||
unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
|
||||
unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<Void>, AllocErr> {
|
||||
GlobalAlloc::alloc(self, layout).into()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
|
||||
unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<NonNull<Void>, AllocErr> {
|
||||
GlobalAlloc::alloc_zeroed(self, layout).into()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {
|
||||
GlobalAlloc::dealloc(self, ptr as *mut Void, layout)
|
||||
unsafe fn dealloc(&mut self, ptr: NonNull<Void>, layout: Layout) {
|
||||
GlobalAlloc::dealloc(self, ptr.as_ptr(), layout)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
unsafe fn realloc(&mut self,
|
||||
ptr: *mut u8,
|
||||
ptr: NonNull<Void>,
|
||||
old_layout: Layout,
|
||||
new_size: usize) -> Result<*mut u8, AllocErr> {
|
||||
GlobalAlloc::realloc(self, ptr as *mut Void, old_layout, new_size).into()
|
||||
new_size: usize) -> Result<NonNull<Void>, AllocErr> {
|
||||
GlobalAlloc::realloc(self, ptr.as_ptr(), old_layout, new_size).into()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
@ -81,26 +82,26 @@ unsafe impl Alloc for System {
|
||||
#[unstable(feature = "allocator_api", issue = "32838")]
|
||||
unsafe impl<'a> Alloc for &'a System {
|
||||
#[inline]
|
||||
unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
|
||||
unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<Void>, AllocErr> {
|
||||
GlobalAlloc::alloc(*self, layout).into()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
|
||||
unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<NonNull<Void>, AllocErr> {
|
||||
GlobalAlloc::alloc_zeroed(*self, layout).into()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {
|
||||
GlobalAlloc::dealloc(*self, ptr as *mut Void, layout)
|
||||
unsafe fn dealloc(&mut self, ptr: NonNull<Void>, layout: Layout) {
|
||||
GlobalAlloc::dealloc(*self, ptr.as_ptr(), layout)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
unsafe fn realloc(&mut self,
|
||||
ptr: *mut u8,
|
||||
ptr: NonNull<Void>,
|
||||
old_layout: Layout,
|
||||
new_size: usize) -> Result<*mut u8, AllocErr> {
|
||||
GlobalAlloc::realloc(*self, ptr as *mut Void, old_layout, new_size).into()
|
||||
new_size: usize) -> Result<NonNull<Void>, AllocErr> {
|
||||
GlobalAlloc::realloc(*self, ptr.as_ptr(), old_layout, new_size).into()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
|
@ -42,21 +42,17 @@ impl Void {
|
||||
}
|
||||
|
||||
/// Convert from a return value of GlobalAlloc::alloc to that of Alloc::alloc
|
||||
impl From<*mut Void> for Result<*mut u8, AllocErr> {
|
||||
impl From<*mut Void> for Result<NonNull<Void>, AllocErr> {
|
||||
fn from(ptr: *mut Void) -> Self {
|
||||
if !ptr.is_null() {
|
||||
Ok(ptr as *mut u8)
|
||||
} else {
|
||||
Err(AllocErr)
|
||||
}
|
||||
NonNull::new(ptr).ok_or(AllocErr)
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert from a return value of Alloc::alloc to that of GlobalAlloc::alloc
|
||||
impl From<Result<*mut u8, AllocErr>> for *mut Void {
|
||||
fn from(result: Result<*mut u8, AllocErr>) -> Self {
|
||||
impl From<Result<NonNull<Void>, AllocErr>> for *mut Void {
|
||||
fn from(result: Result<NonNull<Void>, AllocErr>) -> Self {
|
||||
match result {
|
||||
Ok(ptr) => ptr as *mut Void,
|
||||
Ok(ptr) => ptr.as_ptr(),
|
||||
Err(_) => Void::null_mut(),
|
||||
}
|
||||
}
|
||||
@ -65,7 +61,7 @@ impl From<Result<*mut u8, AllocErr>> for *mut Void {
|
||||
/// Represents the combination of a starting address and
|
||||
/// a total capacity of the returned block.
|
||||
#[derive(Debug)]
|
||||
pub struct Excess(pub *mut u8, pub usize);
|
||||
pub struct Excess(pub NonNull<Void>, pub usize);
|
||||
|
||||
fn size_align<T>() -> (usize, usize) {
|
||||
(mem::size_of::<T>(), mem::align_of::<T>())
|
||||
@ -575,7 +571,7 @@ pub unsafe trait Alloc {
|
||||
/// Clients wishing to abort computation in response to an
|
||||
/// allocation error are encouraged to call the allocator's `oom`
|
||||
/// method, rather than directly invoking `panic!` or similar.
|
||||
unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr>;
|
||||
unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<Void>, AllocErr>;
|
||||
|
||||
/// Deallocate the memory referenced by `ptr`.
|
||||
///
|
||||
@ -592,7 +588,7 @@ pub unsafe trait Alloc {
|
||||
/// * In addition to fitting the block of memory `layout`, the
|
||||
/// alignment of the `layout` must match the alignment used
|
||||
/// to allocate that block of memory.
|
||||
unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout);
|
||||
unsafe fn dealloc(&mut self, ptr: NonNull<Void>, layout: Layout);
|
||||
|
||||
/// Allocator-specific method for signaling an out-of-memory
|
||||
/// condition.
|
||||
@ -710,9 +706,9 @@ pub unsafe trait Alloc {
|
||||
/// reallocation error are encouraged to call the allocator's `oom`
|
||||
/// method, rather than directly invoking `panic!` or similar.
|
||||
unsafe fn realloc(&mut self,
|
||||
ptr: *mut u8,
|
||||
ptr: NonNull<Void>,
|
||||
layout: Layout,
|
||||
new_size: usize) -> Result<*mut u8, AllocErr> {
|
||||
new_size: usize) -> Result<NonNull<Void>, AllocErr> {
|
||||
let old_size = layout.size();
|
||||
|
||||
if new_size >= old_size {
|
||||
@ -729,7 +725,9 @@ pub unsafe trait Alloc {
|
||||
let new_layout = Layout::from_size_align_unchecked(new_size, layout.align());
|
||||
let result = self.alloc(new_layout);
|
||||
if let Ok(new_ptr) = result {
|
||||
ptr::copy_nonoverlapping(ptr as *const u8, new_ptr, cmp::min(old_size, new_size));
|
||||
ptr::copy_nonoverlapping(ptr.as_ptr() as *const u8,
|
||||
new_ptr.as_ptr() as *mut u8,
|
||||
cmp::min(old_size, new_size));
|
||||
self.dealloc(ptr, layout);
|
||||
}
|
||||
result
|
||||
@ -751,11 +749,11 @@ pub unsafe trait Alloc {
|
||||
/// Clients wishing to abort computation in response to an
|
||||
/// allocation error are encouraged to call the allocator's `oom`
|
||||
/// method, rather than directly invoking `panic!` or similar.
|
||||
unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
|
||||
unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<NonNull<Void>, AllocErr> {
|
||||
let size = layout.size();
|
||||
let p = self.alloc(layout);
|
||||
if let Ok(p) = p {
|
||||
ptr::write_bytes(p, 0, size);
|
||||
ptr::write_bytes(p.as_ptr() as *mut u8, 0, size);
|
||||
}
|
||||
p
|
||||
}
|
||||
@ -800,7 +798,7 @@ pub unsafe trait Alloc {
|
||||
/// reallocation error are encouraged to call the allocator's `oom`
|
||||
/// method, rather than directly invoking `panic!` or similar.
|
||||
unsafe fn realloc_excess(&mut self,
|
||||
ptr: *mut u8,
|
||||
ptr: NonNull<Void>,
|
||||
layout: Layout,
|
||||
new_size: usize) -> Result<Excess, AllocErr> {
|
||||
let new_layout = Layout::from_size_align_unchecked(new_size, layout.align());
|
||||
@ -845,7 +843,7 @@ pub unsafe trait Alloc {
|
||||
/// `grow_in_place` failures without aborting, or to fall back on
|
||||
/// another reallocation method before resorting to an abort.
|
||||
unsafe fn grow_in_place(&mut self,
|
||||
ptr: *mut u8,
|
||||
ptr: NonNull<Void>,
|
||||
layout: Layout,
|
||||
new_size: usize) -> Result<(), CannotReallocInPlace> {
|
||||
let _ = ptr; // this default implementation doesn't care about the actual address.
|
||||
@ -900,7 +898,7 @@ pub unsafe trait Alloc {
|
||||
/// `shrink_in_place` failures without aborting, or to fall back
|
||||
/// on another reallocation method before resorting to an abort.
|
||||
unsafe fn shrink_in_place(&mut self,
|
||||
ptr: *mut u8,
|
||||
ptr: NonNull<Void>,
|
||||
layout: Layout,
|
||||
new_size: usize) -> Result<(), CannotReallocInPlace> {
|
||||
let _ = ptr; // this default implementation doesn't care about the actual address.
|
||||
@ -951,7 +949,7 @@ pub unsafe trait Alloc {
|
||||
{
|
||||
let k = Layout::new::<T>();
|
||||
if k.size() > 0 {
|
||||
unsafe { self.alloc(k).map(|p| NonNull::new_unchecked(p as *mut T)) }
|
||||
unsafe { self.alloc(k).map(|p| p.cast()) }
|
||||
} else {
|
||||
Err(AllocErr)
|
||||
}
|
||||
@ -977,10 +975,9 @@ pub unsafe trait Alloc {
|
||||
unsafe fn dealloc_one<T>(&mut self, ptr: NonNull<T>)
|
||||
where Self: Sized
|
||||
{
|
||||
let raw_ptr = ptr.as_ptr() as *mut u8;
|
||||
let k = Layout::new::<T>();
|
||||
if k.size() > 0 {
|
||||
self.dealloc(raw_ptr, k);
|
||||
self.dealloc(ptr.as_void(), k);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1020,10 +1017,7 @@ pub unsafe trait Alloc {
|
||||
match Layout::array::<T>(n) {
|
||||
Ok(ref layout) if layout.size() > 0 => {
|
||||
unsafe {
|
||||
self.alloc(layout.clone())
|
||||
.map(|p| {
|
||||
NonNull::new_unchecked(p as *mut T)
|
||||
})
|
||||
self.alloc(layout.clone()).map(|p| p.cast())
|
||||
}
|
||||
}
|
||||
_ => Err(AllocErr),
|
||||
@ -1068,11 +1062,10 @@ pub unsafe trait Alloc {
|
||||
n_new: usize) -> Result<NonNull<T>, AllocErr>
|
||||
where Self: Sized
|
||||
{
|
||||
match (Layout::array::<T>(n_old), Layout::array::<T>(n_new), ptr.as_ptr()) {
|
||||
(Ok(ref k_old), Ok(ref k_new), ptr) if k_old.size() > 0 && k_new.size() > 0 => {
|
||||
match (Layout::array::<T>(n_old), Layout::array::<T>(n_new)) {
|
||||
(Ok(ref k_old), Ok(ref k_new)) if k_old.size() > 0 && k_new.size() > 0 => {
|
||||
debug_assert!(k_old.align() == k_new.align());
|
||||
self.realloc(ptr as *mut u8, k_old.clone(), k_new.size())
|
||||
.map(|p| NonNull::new_unchecked(p as *mut T))
|
||||
self.realloc(ptr.as_void(), k_old.clone(), k_new.size()).map(NonNull::cast)
|
||||
}
|
||||
_ => {
|
||||
Err(AllocErr)
|
||||
@ -1103,10 +1096,9 @@ pub unsafe trait Alloc {
|
||||
unsafe fn dealloc_array<T>(&mut self, ptr: NonNull<T>, n: usize) -> Result<(), AllocErr>
|
||||
where Self: Sized
|
||||
{
|
||||
let raw_ptr = ptr.as_ptr() as *mut u8;
|
||||
match Layout::array::<T>(n) {
|
||||
Ok(ref k) if k.size() > 0 => {
|
||||
Ok(self.dealloc(raw_ptr, k.clone()))
|
||||
Ok(self.dealloc(ptr.as_void(), k.clone()))
|
||||
}
|
||||
_ => {
|
||||
Err(AllocErr)
|
||||
|
@ -2750,6 +2750,14 @@ impl<T: ?Sized> NonNull<T> {
|
||||
NonNull::new_unchecked(self.as_ptr() as *mut U)
|
||||
}
|
||||
}
|
||||
|
||||
/// Cast to a `Void` pointer
|
||||
#[unstable(feature = "allocator_api", issue = "32838")]
|
||||
pub fn as_void(self) -> NonNull<::alloc::Void> {
|
||||
unsafe {
|
||||
NonNull::new_unchecked(self.as_ptr() as _)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "nonnull", since = "1.25.0")]
|
||||
|
@ -757,12 +757,10 @@ impl<K, V> RawTable<K, V> {
|
||||
let buffer = Global.alloc(Layout::from_size_align(size, alignment)
|
||||
.map_err(|_| CollectionAllocErr::CapacityOverflow)?)?;
|
||||
|
||||
let hashes = buffer as *mut HashUint;
|
||||
|
||||
Ok(RawTable {
|
||||
capacity_mask: capacity.wrapping_sub(1),
|
||||
size: 0,
|
||||
hashes: TaggedHashUintPtr::new(hashes),
|
||||
hashes: TaggedHashUintPtr::new(buffer.cast().as_ptr()),
|
||||
marker: marker::PhantomData,
|
||||
})
|
||||
}
|
||||
@ -1185,7 +1183,7 @@ unsafe impl<#[may_dangle] K, #[may_dangle] V> Drop for RawTable<K, V> {
|
||||
debug_assert!(!oflo, "should be impossible");
|
||||
|
||||
unsafe {
|
||||
Global.dealloc(self.hashes.ptr() as *mut u8,
|
||||
Global.dealloc(NonNull::new_unchecked(self.hashes.ptr()).as_void(),
|
||||
Layout::from_size_align(size, align).unwrap());
|
||||
// Remember how everything was allocated out of one buffer
|
||||
// during initialization? We only need one call to free here.
|
||||
|
@ -275,6 +275,7 @@
|
||||
#![feature(macro_reexport)]
|
||||
#![feature(macro_vis_matcher)]
|
||||
#![feature(needs_panic_runtime)]
|
||||
#![feature(nonnull_cast)]
|
||||
#![feature(exhaustive_patterns)]
|
||||
#![feature(nonzero)]
|
||||
#![feature(num_bits_bytes)]
|
||||
|
@ -30,7 +30,7 @@ fn main() {
|
||||
let layout = Layout::from_size_align(4, 2).unwrap();
|
||||
|
||||
// Global allocator routes to the `custom_as_global` global
|
||||
let ptr = Global.alloc(layout.clone()).unwrap();
|
||||
let ptr = Global.alloc(layout.clone());
|
||||
helper::work_with(&ptr);
|
||||
assert_eq!(custom_as_global::get(), n + 1);
|
||||
Global.dealloc(ptr, layout.clone());
|
||||
|
@ -13,10 +13,10 @@
|
||||
// Ideally this would be revised to use no_std, but for now it serves
|
||||
// well enough to reproduce (and illustrate) the bug from #16687.
|
||||
|
||||
#![feature(heap_api, allocator_api)]
|
||||
#![feature(heap_api, allocator_api, nonnull_cast)]
|
||||
|
||||
use std::heap::{Heap, Alloc, Layout};
|
||||
use std::ptr;
|
||||
use std::alloc::{Global, Alloc, Layout};
|
||||
use std::ptr::{self, NonNull};
|
||||
|
||||
fn main() {
|
||||
unsafe {
|
||||
@ -50,13 +50,13 @@ unsafe fn test_triangle() -> bool {
|
||||
println!("allocate({:?})", layout);
|
||||
}
|
||||
|
||||
let ret = Heap.alloc(layout.clone()).unwrap_or_else(|_| Heap.oom());
|
||||
let ret = Global.alloc(layout.clone()).unwrap_or_else(|_| Global.oom());
|
||||
|
||||
if PRINT {
|
||||
println!("allocate({:?}) = {:?}", layout, ret);
|
||||
}
|
||||
|
||||
ret
|
||||
ret.cast().as_ptr()
|
||||
}
|
||||
|
||||
unsafe fn deallocate(ptr: *mut u8, layout: Layout) {
|
||||
@ -64,7 +64,7 @@ unsafe fn test_triangle() -> bool {
|
||||
println!("deallocate({:?}, {:?}", ptr, layout);
|
||||
}
|
||||
|
||||
Heap.dealloc(ptr, layout);
|
||||
Global.dealloc(NonNull::new_unchecked(ptr).as_void(), layout);
|
||||
}
|
||||
|
||||
unsafe fn reallocate(ptr: *mut u8, old: Layout, new: Layout) -> *mut u8 {
|
||||
@ -72,14 +72,14 @@ unsafe fn test_triangle() -> bool {
|
||||
println!("reallocate({:?}, old={:?}, new={:?})", ptr, old, new);
|
||||
}
|
||||
|
||||
let ret = Heap.realloc(ptr, old.clone(), new.clone())
|
||||
.unwrap_or_else(|_| Heap.oom());
|
||||
let ret = Global.realloc(NonNull::new_unchecked(ptr).as_void(), old.clone(), new.size())
|
||||
.unwrap_or_else(|_| Global.oom());
|
||||
|
||||
if PRINT {
|
||||
println!("reallocate({:?}, old={:?}, new={:?}) = {:?}",
|
||||
ptr, old, new, ret);
|
||||
}
|
||||
ret
|
||||
ret.cast().as_ptr()
|
||||
}
|
||||
|
||||
fn idx_to_size(i: usize) -> usize { (i+1) * 10 }
|
||||
|
@ -13,6 +13,7 @@
|
||||
#![feature(allocator_api)]
|
||||
|
||||
use std::heap::{Alloc, Heap, Layout};
|
||||
use std::ptr::NonNull;
|
||||
|
||||
struct arena(());
|
||||
|
||||
@ -33,7 +34,7 @@ fn alloc<'a>(_bcx : &'a arena) -> &'a Bcx<'a> {
|
||||
unsafe {
|
||||
let ptr = Heap.alloc(Layout::new::<Bcx>())
|
||||
.unwrap_or_else(|_| Heap.oom());
|
||||
&*(ptr as *const _)
|
||||
&*(ptr.as_ptr() as *const _)
|
||||
}
|
||||
}
|
||||
|
||||
@ -45,7 +46,7 @@ fn g(fcx : &Fcx) {
|
||||
let bcx = Bcx { fcx: fcx };
|
||||
let bcx2 = h(&bcx);
|
||||
unsafe {
|
||||
Heap.dealloc(bcx2 as *const _ as *mut _, Layout::new::<Bcx>());
|
||||
Heap.dealloc(NonNull::new_unchecked(bcx2 as *const _ as *mut _), Layout::new::<Bcx>());
|
||||
}
|
||||
}
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user