Auto merge of #50357 - seanmonstar:arc-weak-null, r=KodrAus

Arc: remove unused allocation from Weak::new()

It seems non-obvious that calling `Weak::new()` actually allocates space for the entire size of `T`, even though you can **never** access that data from such a constructed weak pointer. Besides that, if someone were to create many `Weak:new()`s, they could be unknowingly wasting a bunch of memory.

This change makes it so that `Weak::new()` allocates no memory at all. Instead, it is created with a null pointer. The only things done with a `Weak` are trying to upgrade, cloning, and dropping, meaning there are very few places that the code actually needs to check if the pointer is null.
This commit is contained in:
bors 2018-06-29 04:09:02 +00:00
commit 3b50455c61

View File

@ -23,7 +23,7 @@ use core::borrow;
use core::fmt; use core::fmt;
use core::cmp::Ordering; use core::cmp::Ordering;
use core::intrinsics::abort; use core::intrinsics::abort;
use core::mem::{self, align_of_val, size_of_val, uninitialized}; use core::mem::{self, align_of_val, size_of_val};
use core::ops::Deref; use core::ops::Deref;
use core::ops::CoerceUnsized; use core::ops::CoerceUnsized;
use core::ptr::{self, NonNull}; use core::ptr::{self, NonNull};
@ -43,6 +43,9 @@ use vec::Vec;
/// necessarily) at _exactly_ `MAX_REFCOUNT + 1` references. /// necessarily) at _exactly_ `MAX_REFCOUNT + 1` references.
const MAX_REFCOUNT: usize = (isize::MAX) as usize; const MAX_REFCOUNT: usize = (isize::MAX) as usize;
/// A sentinel value that is used for the pointer of `Weak::new()`.
const WEAK_EMPTY: usize = 1;
/// A thread-safe reference-counting pointer. 'Arc' stands for 'Atomically /// A thread-safe reference-counting pointer. 'Arc' stands for 'Atomically
/// Reference Counted'. /// Reference Counted'.
/// ///
@ -235,6 +238,10 @@ impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Arc<U>> for Arc<T> {}
/// [`None`]: ../../std/option/enum.Option.html#variant.None /// [`None`]: ../../std/option/enum.Option.html#variant.None
#[stable(feature = "arc_weak", since = "1.4.0")] #[stable(feature = "arc_weak", since = "1.4.0")]
pub struct Weak<T: ?Sized> { pub struct Weak<T: ?Sized> {
// This is a `NonNull` to allow optimizing the size of this type in enums,
// but it is actually not truly "non-null". A `Weak::new()` will set this
// to a sentinel value, instead of needing to allocate some space in the
// heap.
ptr: NonNull<ArcInner<T>>, ptr: NonNull<ArcInner<T>>,
} }
@ -1011,8 +1018,8 @@ impl Arc<Any + Send + Sync> {
} }
impl<T> Weak<T> { impl<T> Weak<T> {
/// Constructs a new `Weak<T>`, allocating memory for `T` without initializing /// Constructs a new `Weak<T>`, without allocating any memory.
/// it. Calling [`upgrade`] on the return value always gives [`None`]. /// Calling [`upgrade`] on the return value always gives [`None`].
/// ///
/// [`upgrade`]: struct.Weak.html#method.upgrade /// [`upgrade`]: struct.Weak.html#method.upgrade
/// [`None`]: ../../std/option/enum.Option.html#variant.None /// [`None`]: ../../std/option/enum.Option.html#variant.None
@ -1029,11 +1036,7 @@ impl<T> Weak<T> {
pub fn new() -> Weak<T> { pub fn new() -> Weak<T> {
unsafe { unsafe {
Weak { Weak {
ptr: Box::into_raw_non_null(box ArcInner { ptr: NonNull::new_unchecked(WEAK_EMPTY as *mut _),
strong: atomic::AtomicUsize::new(0),
weak: atomic::AtomicUsize::new(1),
data: uninitialized(),
}),
} }
} }
} }
@ -1070,7 +1073,11 @@ impl<T: ?Sized> Weak<T> {
pub fn upgrade(&self) -> Option<Arc<T>> { pub fn upgrade(&self) -> Option<Arc<T>> {
// We use a CAS loop to increment the strong count instead of a // We use a CAS loop to increment the strong count instead of a
// fetch_add because once the count hits 0 it must never be above 0. // fetch_add because once the count hits 0 it must never be above 0.
let inner = self.inner(); let inner = if self.ptr.as_ptr() as *const u8 as usize == WEAK_EMPTY {
return None;
} else {
unsafe { self.ptr.as_ref() }
};
// Relaxed load because any write of 0 that we can observe // Relaxed load because any write of 0 that we can observe
// leaves the field in a permanently zero state (so a // leaves the field in a permanently zero state (so a
@ -1092,17 +1099,15 @@ impl<T: ?Sized> Weak<T> {
// Relaxed is valid for the same reason it is on Arc's Clone impl // Relaxed is valid for the same reason it is on Arc's Clone impl
match inner.strong.compare_exchange_weak(n, n + 1, Relaxed, Relaxed) { match inner.strong.compare_exchange_weak(n, n + 1, Relaxed, Relaxed) {
Ok(_) => return Some(Arc { ptr: self.ptr, phantom: PhantomData }), Ok(_) => return Some(Arc {
// null checked above
ptr: self.ptr,
phantom: PhantomData,
}),
Err(old) => n = old, Err(old) => n = old,
} }
} }
} }
#[inline]
fn inner(&self) -> &ArcInner<T> {
// See comments above for why this is "safe"
unsafe { self.ptr.as_ref() }
}
} }
#[stable(feature = "arc_weak", since = "1.4.0")] #[stable(feature = "arc_weak", since = "1.4.0")]
@ -1120,11 +1125,16 @@ impl<T: ?Sized> Clone for Weak<T> {
/// ``` /// ```
#[inline] #[inline]
fn clone(&self) -> Weak<T> { fn clone(&self) -> Weak<T> {
let inner = if self.ptr.as_ptr() as *const u8 as usize == WEAK_EMPTY {
return Weak { ptr: self.ptr };
} else {
unsafe { self.ptr.as_ref() }
};
// See comments in Arc::clone() for why this is relaxed. This can use a // See comments in Arc::clone() for why this is relaxed. This can use a
// fetch_add (ignoring the lock) because the weak count is only locked // fetch_add (ignoring the lock) because the weak count is only locked
// where are *no other* weak pointers in existence. (So we can't be // where are *no other* weak pointers in existence. (So we can't be
// running this code in that case). // running this code in that case).
let old_size = self.inner().weak.fetch_add(1, Relaxed); let old_size = inner.weak.fetch_add(1, Relaxed);
// See comments in Arc::clone() for why we do this (for mem::forget). // See comments in Arc::clone() for why we do this (for mem::forget).
if old_size > MAX_REFCOUNT { if old_size > MAX_REFCOUNT {
@ -1139,8 +1149,8 @@ impl<T: ?Sized> Clone for Weak<T> {
#[stable(feature = "downgraded_weak", since = "1.10.0")] #[stable(feature = "downgraded_weak", since = "1.10.0")]
impl<T> Default for Weak<T> { impl<T> Default for Weak<T> {
/// Constructs a new `Weak<T>`, allocating memory for `T` without initializing /// Constructs a new `Weak<T>`, without allocating memory.
/// it. Calling [`upgrade`] on the return value always gives [`None`]. /// Calling [`upgrade`] on the return value always gives [`None`].
/// ///
/// [`upgrade`]: struct.Weak.html#method.upgrade /// [`upgrade`]: struct.Weak.html#method.upgrade
/// [`None`]: ../../std/option/enum.Option.html#variant.None /// [`None`]: ../../std/option/enum.Option.html#variant.None
@ -1193,7 +1203,13 @@ impl<T: ?Sized> Drop for Weak<T> {
// weak count can only be locked if there was precisely one weak ref, // weak count can only be locked if there was precisely one weak ref,
// meaning that drop could only subsequently run ON that remaining weak // meaning that drop could only subsequently run ON that remaining weak
// ref, which can only happen after the lock is released. // ref, which can only happen after the lock is released.
if self.inner().weak.fetch_sub(1, Release) == 1 { let inner = if self.ptr.as_ptr() as *const u8 as usize == WEAK_EMPTY {
return;
} else {
unsafe { self.ptr.as_ref() }
};
if inner.weak.fetch_sub(1, Release) == 1 {
atomic::fence(Acquire); atomic::fence(Acquire);
unsafe { unsafe {
Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref())) Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref()))