diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs index b3305b8ca6d..b3ec830a7d7 100644 --- a/library/alloc/src/rc.rs +++ b/library/alloc/src/rc.rs @@ -313,13 +313,17 @@ fn rcbox_layout_for_value_layout(layout: Layout) -> Layout { #[cfg_attr(not(test), rustc_diagnostic_item = "Rc")] #[stable(feature = "rust1", since = "1.0.0")] #[rustc_insignificant_dtor] -pub struct Rc { +pub struct Rc< + T: ?Sized, + #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, +> { ptr: NonNull>, phantom: PhantomData>, + alloc: A, } #[stable(feature = "rust1", since = "1.0.0")] -impl !Send for Rc {} +impl !Send for Rc {} // Note that this negative impl isn't strictly necessary for correctness, // as `Rc` transitively contains a `Cell`, which is itself `!Sync`. @@ -327,20 +331,32 @@ impl !Send for Rc {} // having an explicit negative impl is nice for documentation purposes // and results in nicer error messages. #[stable(feature = "rust1", since = "1.0.0")] -impl !Sync for Rc {} +impl !Sync for Rc {} #[stable(feature = "catch_unwind", since = "1.9.0")] -impl UnwindSafe for Rc {} +impl UnwindSafe for Rc {} #[stable(feature = "rc_ref_unwind_safe", since = "1.58.0")] -impl RefUnwindSafe for Rc {} +impl RefUnwindSafe for Rc {} #[unstable(feature = "coerce_unsized", issue = "18598")] -impl, U: ?Sized> CoerceUnsized> for Rc {} +impl, U: ?Sized, A: Allocator> CoerceUnsized> for Rc {} #[unstable(feature = "dispatch_from_dyn", issue = "none")] impl, U: ?Sized> DispatchFromDyn> for Rc {} impl Rc { + #[inline] + unsafe fn from_inner(ptr: NonNull>) -> Self { + unsafe { Self::from_inner_in(ptr, Global) } + } + + #[inline] + unsafe fn from_ptr(ptr: *mut RcBox) -> Self { + unsafe { Self::from_inner(NonNull::new_unchecked(ptr)) } + } +} + +impl Rc { #[inline(always)] fn inner(&self) -> &RcBox { // This unsafety is ok because while this Rc is alive we're guaranteed @@ -348,12 +364,14 @@ impl Rc { unsafe { self.ptr.as_ref() } } - unsafe fn from_inner(ptr: NonNull>) -> Self { - Self { ptr, phantom: PhantomData } + #[inline] + unsafe fn from_inner_in(ptr: NonNull>, alloc: A) -> Self { + Self { ptr, phantom: PhantomData, alloc } } - unsafe fn from_ptr(ptr: *mut RcBox) -> Self { - unsafe { Self::from_inner(NonNull::new_unchecked(ptr)) } + #[inline] + unsafe fn from_ptr_in(ptr: *mut RcBox, alloc: A) -> Self { + unsafe { Self::from_inner_in(NonNull::new_unchecked(ptr), alloc) } } } @@ -450,7 +468,7 @@ impl Rc { let init_ptr: NonNull> = uninit_ptr.cast(); - let weak = Weak { ptr: init_ptr }; + let weak = Weak { ptr: init_ptr, alloc: Global }; // It's important we don't give up ownership of the weak pointer, or // else the memory might be freed by the time `data_fn` returns. If @@ -504,7 +522,7 @@ impl Rc { Rc::from_ptr(Rc::allocate_for_layout( Layout::new::(), |layout| Global.allocate(layout), - |mem| mem as *mut RcBox>, + <*mut u8>::cast, )) } } @@ -537,7 +555,7 @@ impl Rc { Rc::from_ptr(Rc::allocate_for_layout( Layout::new::(), |layout| Global.allocate_zeroed(layout), - |mem| mem as *mut RcBox>, + <*mut u8>::cast, )) } } @@ -594,7 +612,7 @@ impl Rc { Ok(Rc::from_ptr(Rc::try_allocate_for_layout( Layout::new::(), |layout| Global.allocate(layout), - |mem| mem as *mut RcBox>, + <*mut u8>::cast, )?)) } } @@ -627,7 +645,7 @@ impl Rc { Ok(Rc::from_ptr(Rc::try_allocate_for_layout( Layout::new::(), |layout| Global.allocate_zeroed(layout), - |mem| mem as *mut RcBox>, + <*mut u8>::cast, )?)) } } @@ -639,6 +657,231 @@ impl Rc { pub fn pin(value: T) -> Pin> { unsafe { Pin::new_unchecked(Rc::new(value)) } } +} + +impl Rc { + /// Returns a reference to the underlying allocator. + #[inline] + #[unstable(feature = "allocator_api", issue = "32838")] + pub fn allocator(&self) -> &A { + &self.alloc + } + /// Constructs a new `Rc` in the provided allocator. + /// + /// # Examples + /// + /// ``` + /// #![feature(allocator_api)] + /// use std::rc::Rc; + /// use std::alloc::System; + /// + /// let five = Rc::new_in(5, System); + /// ``` + #[cfg(not(no_global_oom_handling))] + #[unstable(feature = "allocator_api", issue = "32838")] + #[inline] + pub fn new_in(value: T, alloc: A) -> Rc { + // NOTE: Prefer match over unwrap_or_else since closure sometimes not inlineable. + // That would make code size bigger. + match Self::try_new_in(value, alloc) { + Ok(m) => m, + Err(_) => handle_alloc_error(Layout::new::>()), + } + } + + /// Constructs a new `Rc` with uninitialized contents in the provided allocator. + /// + /// # Examples + /// + /// ``` + /// #![feature(new_uninit)] + /// #![feature(get_mut_unchecked)] + /// #![feature(allocator_api)] + /// + /// use std::rc::Rc; + /// use std::alloc::System; + /// + /// let mut five = Rc::::new_uninit_in(System); + /// + /// let five = unsafe { + /// // Deferred initialization: + /// Rc::get_mut_unchecked(&mut five).as_mut_ptr().write(5); + /// + /// five.assume_init() + /// }; + /// + /// assert_eq!(*five, 5) + /// ``` + #[cfg(not(no_global_oom_handling))] + #[unstable(feature = "allocator_api", issue = "32838")] + // #[unstable(feature = "new_uninit", issue = "63291")] + #[inline] + pub fn new_uninit_in(alloc: A) -> Rc, A> { + unsafe { + Rc::from_ptr_in( + Rc::allocate_for_layout( + Layout::new::(), + |layout| alloc.allocate(layout), + <*mut u8>::cast, + ), + alloc, + ) + } + } + + /// Constructs a new `Rc` with uninitialized contents, with the memory + /// being filled with `0` bytes, in the provided allocator. + /// + /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and + /// incorrect usage of this method. + /// + /// # Examples + /// + /// ``` + /// #![feature(new_uninit)] + /// #![feature(allocator_api)] + /// + /// use std::rc::Rc; + /// use std::alloc::System; + /// + /// let zero = Rc::::new_zeroed_in(System); + /// let zero = unsafe { zero.assume_init() }; + /// + /// assert_eq!(*zero, 0) + /// ``` + /// + /// [zeroed]: mem::MaybeUninit::zeroed + #[cfg(not(no_global_oom_handling))] + #[unstable(feature = "allocator_api", issue = "32838")] + // #[unstable(feature = "new_uninit", issue = "63291")] + #[inline] + pub fn new_zeroed_in(alloc: A) -> Rc, A> { + unsafe { + Rc::from_ptr_in( + Rc::allocate_for_layout( + Layout::new::(), + |layout| alloc.allocate_zeroed(layout), + <*mut u8>::cast, + ), + alloc, + ) + } + } + + /// Constructs a new `Rc` in the provided allocator, returning an error if the allocation + /// fails + /// + /// # Examples + /// + /// ``` + /// #![feature(allocator_api)] + /// use std::rc::Rc; + /// use std::alloc::System; + /// + /// let five = Rc::try_new_in(5, System); + /// # Ok::<(), std::alloc::AllocError>(()) + /// ``` + #[unstable(feature = "allocator_api", issue = "32838")] + #[inline] + pub fn try_new_in(value: T, alloc: A) -> Result { + // There is an implicit weak pointer owned by all the strong + // pointers, which ensures that the weak destructor never frees + // the allocation while the strong destructor is running, even + // if the weak pointer is stored inside the strong one. + let (ptr, alloc) = Box::into_unique(Box::try_new_in( + RcBox { strong: Cell::new(1), weak: Cell::new(1), value }, + alloc, + )?); + Ok(unsafe { Self::from_inner_in(ptr.into(), alloc) }) + } + + /// Constructs a new `Rc` with uninitialized contents, in the provided allocator, returning an + /// error if the allocation fails + /// + /// # Examples + /// + /// ``` + /// #![feature(allocator_api, new_uninit)] + /// #![feature(get_mut_unchecked)] + /// + /// use std::rc::Rc; + /// use std::alloc::System; + /// + /// let mut five = Rc::::try_new_uninit_in(System)?; + /// + /// let five = unsafe { + /// // Deferred initialization: + /// Rc::get_mut_unchecked(&mut five).as_mut_ptr().write(5); + /// + /// five.assume_init() + /// }; + /// + /// assert_eq!(*five, 5); + /// # Ok::<(), std::alloc::AllocError>(()) + /// ``` + #[unstable(feature = "allocator_api", issue = "32838")] + // #[unstable(feature = "new_uninit", issue = "63291")] + #[inline] + pub fn try_new_uninit_in(alloc: A) -> Result, A>, AllocError> { + unsafe { + Ok(Rc::from_ptr_in( + Rc::try_allocate_for_layout( + Layout::new::(), + |layout| alloc.allocate(layout), + <*mut u8>::cast, + )?, + alloc, + )) + } + } + + /// Constructs a new `Rc` with uninitialized contents, with the memory + /// being filled with `0` bytes, in the provided allocator, returning an error if the allocation + /// fails + /// + /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and + /// incorrect usage of this method. + /// + /// # Examples + /// + /// ``` + /// #![feature(allocator_api, new_uninit)] + /// + /// use std::rc::Rc; + /// use std::alloc::System; + /// + /// let zero = Rc::::try_new_zeroed_in(System)?; + /// let zero = unsafe { zero.assume_init() }; + /// + /// assert_eq!(*zero, 0); + /// # Ok::<(), std::alloc::AllocError>(()) + /// ``` + /// + /// [zeroed]: mem::MaybeUninit::zeroed + #[unstable(feature = "allocator_api", issue = "32838")] + //#[unstable(feature = "new_uninit", issue = "63291")] + #[inline] + pub fn try_new_zeroed_in(alloc: A) -> Result, A>, AllocError> { + unsafe { + Ok(Rc::from_ptr_in( + Rc::try_allocate_for_layout( + Layout::new::(), + |layout| alloc.allocate_zeroed(layout), + <*mut u8>::cast, + )?, + alloc, + )) + } + } + + /// Constructs a new `Pin>` in the provided allocator. If `T` does not implement `Unpin`, then + /// `value` will be pinned in memory and unable to be moved. + #[cfg(not(no_global_oom_handling))] + #[unstable(feature = "allocator_api", issue = "32838")] + #[inline] + pub fn pin_in(value: T, alloc: A) -> Pin { + unsafe { Pin::new_unchecked(Rc::new_in(value, alloc)) } + } /// Returns the inner value, if the `Rc` has exactly one strong reference. /// @@ -665,13 +908,14 @@ impl Rc { if Rc::strong_count(&this) == 1 { unsafe { let val = ptr::read(&*this); // copy the contained object + let alloc = ptr::read(&this.alloc); // copy the allocator // Indicate to Weaks that they can't be promoted by decrementing // the strong count, and then remove the implicit "strong weak" // pointer while also handling drop logic by just crafting a // fake Weak. this.inner().dec_strong(); - let _weak = Weak { ptr: this.ptr }; + let _weak = Weak { ptr: this.ptr, alloc }; forget(this); Ok(val) } @@ -758,7 +1002,7 @@ impl Rc<[T]> { Layout::array::(len).unwrap(), |layout| Global.allocate_zeroed(layout), |mem| { - ptr::slice_from_raw_parts_mut(mem as *mut T, len) + ptr::slice_from_raw_parts_mut(mem.cast::(), len) as *mut RcBox<[mem::MaybeUninit]> }, )) @@ -766,7 +1010,84 @@ impl Rc<[T]> { } } -impl Rc> { +impl Rc<[T], A> { + /// Constructs a new reference-counted slice with uninitialized contents. + /// + /// # Examples + /// + /// ``` + /// #![feature(new_uninit)] + /// #![feature(get_mut_unchecked)] + /// #![feature(allocator_api)] + /// + /// use std::rc::Rc; + /// use std::alloc::System; + /// + /// let mut values = Rc::<[u32], _>::new_uninit_slice_in(3, System); + /// + /// let values = unsafe { + /// // Deferred initialization: + /// Rc::get_mut_unchecked(&mut values)[0].as_mut_ptr().write(1); + /// Rc::get_mut_unchecked(&mut values)[1].as_mut_ptr().write(2); + /// Rc::get_mut_unchecked(&mut values)[2].as_mut_ptr().write(3); + /// + /// values.assume_init() + /// }; + /// + /// assert_eq!(*values, [1, 2, 3]) + /// ``` + #[cfg(not(no_global_oom_handling))] + #[unstable(feature = "allocator_api", issue = "32838")] + // #[unstable(feature = "new_uninit", issue = "63291")] + #[inline] + pub fn new_uninit_slice_in(len: usize, alloc: A) -> Rc<[mem::MaybeUninit], A> { + unsafe { Rc::from_ptr_in(Rc::allocate_for_slice_in(len, &alloc), alloc) } + } + + /// Constructs a new reference-counted slice with uninitialized contents, with the memory being + /// filled with `0` bytes. + /// + /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and + /// incorrect usage of this method. + /// + /// # Examples + /// + /// ``` + /// #![feature(new_uninit)] + /// #![feature(allocator_api)] + /// + /// use std::rc::Rc; + /// use std::alloc::System; + /// + /// let values = Rc::<[u32], _>::new_zeroed_slice_in(3, System); + /// let values = unsafe { values.assume_init() }; + /// + /// assert_eq!(*values, [0, 0, 0]) + /// ``` + /// + /// [zeroed]: mem::MaybeUninit::zeroed + #[cfg(not(no_global_oom_handling))] + #[unstable(feature = "allocator_api", issue = "32838")] + // #[unstable(feature = "new_uninit", issue = "63291")] + #[inline] + pub fn new_zeroed_slice_in(len: usize, alloc: A) -> Rc<[mem::MaybeUninit], A> { + unsafe { + Rc::from_ptr_in( + Rc::allocate_for_layout( + Layout::array::(len).unwrap(), + |layout| alloc.allocate_zeroed(layout), + |mem| { + ptr::slice_from_raw_parts_mut(mem.cast::(), len) + as *mut RcBox<[mem::MaybeUninit]> + }, + ), + alloc, + ) + } + } +} + +impl Rc, A> { /// Converts to `Rc`. /// /// # Safety @@ -798,12 +1119,16 @@ impl Rc> { /// ``` #[unstable(feature = "new_uninit", issue = "63291")] #[inline] - pub unsafe fn assume_init(self) -> Rc { - unsafe { Rc::from_inner(mem::ManuallyDrop::new(self).ptr.cast()) } + pub unsafe fn assume_init(self) -> Rc + where + A: Clone, + { + let md_self = mem::ManuallyDrop::new(self); + unsafe { Rc::from_inner_in(md_self.ptr.cast(), md_self.alloc.clone()) } } } -impl Rc<[mem::MaybeUninit]> { +impl Rc<[mem::MaybeUninit], A> { /// Converts to `Rc<[T]>`. /// /// # Safety @@ -838,12 +1163,128 @@ impl Rc<[mem::MaybeUninit]> { /// ``` #[unstable(feature = "new_uninit", issue = "63291")] #[inline] - pub unsafe fn assume_init(self) -> Rc<[T]> { - unsafe { Rc::from_ptr(mem::ManuallyDrop::new(self).ptr.as_ptr() as _) } + pub unsafe fn assume_init(self) -> Rc<[T], A> + where + A: Clone, + { + let md_self = mem::ManuallyDrop::new(self); + unsafe { Rc::from_ptr_in(md_self.ptr.as_ptr() as _, md_self.alloc.clone()) } } } impl Rc { + /// Constructs an `Rc` from a raw pointer. + /// + /// The raw pointer must have been previously returned by a call to + /// [`Rc::into_raw`][into_raw] where `U` must have the same size + /// and alignment as `T`. This is trivially true if `U` is `T`. + /// Note that if `U` is not `T` but has the same size and alignment, this is + /// basically like transmuting references of different types. See + /// [`mem::transmute`][transmute] for more information on what + /// restrictions apply in this case. + /// + /// The raw pointer must point to a block of memory allocated by the global allocator + /// + /// The user of `from_raw` has to make sure a specific value of `T` is only + /// dropped once. + /// + /// This function is unsafe because improper use may lead to memory unsafety, + /// even if the returned `Rc` is never accessed. + /// + /// [into_raw]: Rc::into_raw + /// [transmute]: core::mem::transmute + /// + /// # Examples + /// + /// ``` + /// use std::rc::Rc; + /// + /// let x = Rc::new("hello".to_owned()); + /// let x_ptr = Rc::into_raw(x); + /// + /// unsafe { + /// // Convert back to an `Rc` to prevent leak. + /// let x = Rc::from_raw(x_ptr); + /// assert_eq!(&*x, "hello"); + /// + /// // Further calls to `Rc::from_raw(x_ptr)` would be memory-unsafe. + /// } + /// + /// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling! + /// ``` + #[inline] + #[stable(feature = "rc_raw", since = "1.17.0")] + pub unsafe fn from_raw(ptr: *const T) -> Self { + unsafe { Self::from_raw_in(ptr, Global) } + } + + /// Increments the strong reference count on the `Rc` associated with the + /// provided pointer by one. + /// + /// # Safety + /// + /// The pointer must have been obtained through `Rc::into_raw`, the + /// associated `Rc` instance must be valid (i.e. the strong count must be at + /// least 1) for the duration of this method, and `ptr` must point to a block of memory + /// allocated by the global allocator. + /// + /// # Examples + /// + /// ``` + /// use std::rc::Rc; + /// + /// let five = Rc::new(5); + /// + /// unsafe { + /// let ptr = Rc::into_raw(five); + /// Rc::increment_strong_count(ptr); + /// + /// let five = Rc::from_raw(ptr); + /// assert_eq!(2, Rc::strong_count(&five)); + /// } + /// ``` + #[inline] + #[stable(feature = "rc_mutate_strong_count", since = "1.53.0")] + pub unsafe fn increment_strong_count(ptr: *const T) { + unsafe { Self::increment_strong_count_in(ptr, Global) } + } + + /// Decrements the strong reference count on the `Rc` associated with the + /// provided pointer by one. + /// + /// # Safety + /// + /// The pointer must have been obtained through `Rc::into_raw`, the + /// associated `Rc` instance must be valid (i.e. the strong count must be at + /// least 1) when invoking this method, and `ptr` must point to a block of memory + /// allocated by the global allocator. This method can be used to release the final `Rc` and + /// backing storage, but **should not** be called after the final `Rc` has been released. + /// + /// # Examples + /// + /// ``` + /// use std::rc::Rc; + /// + /// let five = Rc::new(5); + /// + /// unsafe { + /// let ptr = Rc::into_raw(five); + /// Rc::increment_strong_count(ptr); + /// + /// let five = Rc::from_raw(ptr); + /// assert_eq!(2, Rc::strong_count(&five)); + /// Rc::decrement_strong_count(ptr); + /// assert_eq!(1, Rc::strong_count(&five)); + /// } + /// ``` + #[inline] + #[stable(feature = "rc_mutate_strong_count", since = "1.53.0")] + pub unsafe fn decrement_strong_count(ptr: *const T) { + unsafe { Self::decrement_strong_count_in(ptr, Global) } + } +} + +impl Rc { /// Consumes the `Rc`, returning the wrapped pointer. /// /// To avoid a memory leak the pointer must be converted back to an `Rc` using @@ -891,16 +1332,18 @@ impl Rc { unsafe { ptr::addr_of_mut!((*ptr).value) } } - /// Constructs an `Rc` from a raw pointer. + /// Constructs an `Rc` from a raw pointer in the provided allocator. /// /// The raw pointer must have been previously returned by a call to - /// [`Rc::into_raw`][into_raw] where `U` must have the same size + /// [`Rc::into_raw`][into_raw] where `U` must have the same size /// and alignment as `T`. This is trivially true if `U` is `T`. /// Note that if `U` is not `T` but has the same size and alignment, this is /// basically like transmuting references of different types. See /// [`mem::transmute`] for more information on what /// restrictions apply in this case. /// + /// The raw pointer must point to a block of memory allocated by `alloc` + /// /// The user of `from_raw` has to make sure a specific value of `T` is only /// dropped once. /// @@ -912,14 +1355,17 @@ impl Rc { /// # Examples /// /// ``` - /// use std::rc::Rc; + /// #![feature(allocator_api)] /// - /// let x = Rc::new("hello".to_owned()); + /// use std::rc::Rc; + /// use std::alloc::System; + /// + /// let x = Rc::new_in("hello".to_owned(), System); /// let x_ptr = Rc::into_raw(x); /// /// unsafe { /// // Convert back to an `Rc` to prevent leak. - /// let x = Rc::from_raw(x_ptr); + /// let x = Rc::from_raw_in(x_ptr, System); /// assert_eq!(&*x, "hello"); /// /// // Further calls to `Rc::from_raw(x_ptr)` would be memory-unsafe. @@ -927,14 +1373,14 @@ impl Rc { /// /// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling! /// ``` - #[stable(feature = "rc_raw", since = "1.17.0")] - pub unsafe fn from_raw(ptr: *const T) -> Self { + #[unstable(feature = "allocator_api", issue = "32838")] + pub unsafe fn from_raw_in(ptr: *const T, alloc: A) -> Self { let offset = unsafe { data_offset(ptr) }; // Reverse the offset to find the original RcBox. let rc_ptr = unsafe { ptr.byte_sub(offset) as *mut RcBox }; - unsafe { Self::from_ptr(rc_ptr) } + unsafe { Self::from_ptr_in(rc_ptr, alloc) } } /// Creates a new [`Weak`] pointer to this allocation. @@ -951,11 +1397,14 @@ impl Rc { #[must_use = "this returns a new `Weak` pointer, \ without modifying the original `Rc`"] #[stable(feature = "rc_weak", since = "1.4.0")] - pub fn downgrade(this: &Self) -> Weak { + pub fn downgrade(this: &Self) -> Weak + where + A: Clone, + { this.inner().inc_weak(); // Make sure we do not create a dangling Weak debug_assert!(!is_dangling(this.ptr.as_ptr())); - Weak { ptr: this.ptr } + Weak { ptr: this.ptr, alloc: this.alloc.clone() } } /// Gets the number of [`Weak`] pointers to this allocation. @@ -999,30 +1448,37 @@ impl Rc { /// /// # Safety /// - /// The pointer must have been obtained through `Rc::into_raw`, and the + /// The pointer must have been obtained through `Rc::into_raw`, the /// associated `Rc` instance must be valid (i.e. the strong count must be at - /// least 1) for the duration of this method. + /// least 1) for the duration of this method, and `ptr` must point to a block of memory + /// allocated by `alloc` /// /// # Examples /// /// ``` - /// use std::rc::Rc; + /// #![feature(allocator_api)] /// - /// let five = Rc::new(5); + /// use std::rc::Rc; + /// use std::alloc::System; + /// + /// let five = Rc::new_in(5, System); /// /// unsafe { /// let ptr = Rc::into_raw(five); - /// Rc::increment_strong_count(ptr); + /// Rc::increment_strong_count_in(ptr, System); /// - /// let five = Rc::from_raw(ptr); + /// let five = Rc::from_raw_in(ptr, System); /// assert_eq!(2, Rc::strong_count(&five)); /// } /// ``` #[inline] - #[stable(feature = "rc_mutate_strong_count", since = "1.53.0")] - pub unsafe fn increment_strong_count(ptr: *const T) { + #[unstable(feature = "allocator_api", issue = "32838")] + pub unsafe fn increment_strong_count_in(ptr: *const T, alloc: A) + where + A: Clone, + { // Retain Rc, but don't touch refcount by wrapping in ManuallyDrop - let rc = unsafe { mem::ManuallyDrop::new(Rc::::from_raw(ptr)) }; + let rc = unsafe { mem::ManuallyDrop::new(Rc::::from_raw_in(ptr, alloc)) }; // Now increase refcount, but don't drop new refcount either let _rc_clone: mem::ManuallyDrop<_> = rc.clone(); } @@ -1032,33 +1488,36 @@ impl Rc { /// /// # Safety /// - /// The pointer must have been obtained through `Rc::into_raw`, and the + /// The pointer must have been obtained through `Rc::into_raw`, the /// associated `Rc` instance must be valid (i.e. the strong count must be at - /// least 1) when invoking this method. This method can be used to release - /// the final `Rc` and backing storage, but **should not** be called after - /// the final `Rc` has been released. + /// least 1) when invoking this method, and `ptr` must point to a block of memory + /// allocated by `alloc`. This method can be used to release the final `Rc` and backing storage, + /// but **should not** be called after the final `Rc` has been released. /// /// # Examples /// /// ``` - /// use std::rc::Rc; + /// #![feature(allocator_api)] /// - /// let five = Rc::new(5); + /// use std::rc::Rc; + /// use std::alloc::System; + /// + /// let five = Rc::new_in(5, System); /// /// unsafe { /// let ptr = Rc::into_raw(five); - /// Rc::increment_strong_count(ptr); + /// Rc::increment_strong_count_in(ptr, System); /// - /// let five = Rc::from_raw(ptr); + /// let five = Rc::from_raw_in(ptr, System); /// assert_eq!(2, Rc::strong_count(&five)); - /// Rc::decrement_strong_count(ptr); + /// Rc::decrement_strong_count_in(ptr, System); /// assert_eq!(1, Rc::strong_count(&five)); /// } /// ``` #[inline] - #[stable(feature = "rc_mutate_strong_count", since = "1.53.0")] - pub unsafe fn decrement_strong_count(ptr: *const T) { - unsafe { drop(Rc::from_raw(ptr)) }; + #[unstable(feature = "allocator_api", issue = "32838")] + pub unsafe fn decrement_strong_count_in(ptr: *const T, alloc: A) { + unsafe { drop(Rc::from_raw_in(ptr, alloc)) }; } /// Returns `true` if there are no other `Rc` or [`Weak`] pointers to @@ -1188,7 +1647,7 @@ impl Rc { } } -impl Rc { +impl Rc { /// Makes a mutable reference into the given `Rc`. /// /// If there are other `Rc` pointers to the same allocation, then `make_mut` will @@ -1246,7 +1705,7 @@ impl Rc { if Rc::strong_count(this) != 1 { // Gotta clone the data, there are other Rcs. // Pre-allocate memory to allow writing the cloned value directly. - let mut rc = Self::new_uninit(); + let mut rc = Self::new_uninit_in(this.alloc.clone()); unsafe { let data = Rc::get_mut_unchecked(&mut rc); (**this).write_clone_into_raw(data.as_mut_ptr()); @@ -1254,7 +1713,7 @@ impl Rc { } } else if Rc::weak_count(this) != 0 { // Can just steal the data, all that's left is Weaks - let mut rc = Self::new_uninit(); + let mut rc = Self::new_uninit_in(this.alloc.clone()); unsafe { let data = Rc::get_mut_unchecked(&mut rc); data.as_mut_ptr().copy_from_nonoverlapping(&**this, 1); @@ -1310,7 +1769,7 @@ impl Rc { } } -impl Rc { +impl Rc { /// Attempt to downcast the `Rc` to a concrete type. /// /// # Examples @@ -1331,12 +1790,13 @@ impl Rc { /// ``` #[inline] #[stable(feature = "rc_downcast", since = "1.29.0")] - pub fn downcast(self) -> Result, Rc> { + pub fn downcast(self) -> Result, Self> { if (*self).is::() { unsafe { let ptr = self.ptr.cast::>(); + let alloc = self.alloc.clone(); forget(self); - Ok(Rc::from_inner(ptr)) + Ok(Rc::from_inner_in(ptr, alloc)) } } else { Err(self) @@ -1371,11 +1831,12 @@ impl Rc { /// [`downcast`]: Self::downcast #[inline] #[unstable(feature = "downcast_unchecked", issue = "90850")] - pub unsafe fn downcast_unchecked(self) -> Rc { + pub unsafe fn downcast_unchecked(self) -> Rc { unsafe { let ptr = self.ptr.cast::>(); + let alloc = self.alloc.clone(); mem::forget(self); - Rc::from_inner(ptr) + Rc::from_inner_in(ptr, alloc) } } } @@ -1427,25 +1888,27 @@ impl Rc { Ok(inner) } +} +impl Rc { /// Allocates an `RcBox` with sufficient space for an unsized inner value #[cfg(not(no_global_oom_handling))] - unsafe fn allocate_for_ptr(ptr: *const T) -> *mut RcBox { + unsafe fn allocate_for_ptr_in(ptr: *const T, alloc: &A) -> *mut RcBox { // Allocate for the `RcBox` using the given value. unsafe { - Self::allocate_for_layout( + Rc::::allocate_for_layout( Layout::for_value(&*ptr), - |layout| Global.allocate(layout), + |layout| alloc.allocate(layout), |mem| mem.with_metadata_of(ptr as *const RcBox), ) } } #[cfg(not(no_global_oom_handling))] - fn from_box(src: Box) -> Rc { + fn from_box_in(src: Box) -> Rc { unsafe { let value_size = size_of_val(&*src); - let ptr = Self::allocate_for_ptr(&*src); + let ptr = Self::allocate_for_ptr_in(&*src, Box::allocator(&src)); // Copy value as bytes ptr::copy_nonoverlapping( @@ -1455,10 +1918,11 @@ impl Rc { ); // Free the allocation without dropping its contents - let src = Box::from_raw(Box::into_raw(src) as *mut mem::ManuallyDrop); + let (bptr, alloc) = Box::into_raw_with_allocator(src); + let src = Box::from_raw(bptr as *mut mem::ManuallyDrop); drop(src); - Self::from_ptr(ptr) + Self::from_ptr_in(ptr, alloc) } } } @@ -1471,7 +1935,7 @@ impl Rc<[T]> { Self::allocate_for_layout( Layout::array::(len).unwrap(), |layout| Global.allocate(layout), - |mem| ptr::slice_from_raw_parts_mut(mem as *mut T, len) as *mut RcBox<[T]>, + |mem| ptr::slice_from_raw_parts_mut(mem.cast::(), len) as *mut RcBox<[T]>, ) } } @@ -1538,6 +2002,21 @@ impl Rc<[T]> { } } +impl Rc<[T], A> { + /// Allocates an `RcBox<[T]>` with the given length. + #[inline] + #[cfg(not(no_global_oom_handling))] + unsafe fn allocate_for_slice_in(len: usize, alloc: &A) -> *mut RcBox<[T]> { + unsafe { + Rc::<[T]>::allocate_for_layout( + Layout::array::(len).unwrap(), + |layout| alloc.allocate(layout), + |mem| ptr::slice_from_raw_parts_mut(mem.cast::(), len) as *mut RcBox<[T]>, + ) + } + } +} + /// Specialization trait used for `From<&[T]>`. trait RcFromSlice { fn from_slice(slice: &[T]) -> Self; @@ -1560,7 +2039,7 @@ impl RcFromSlice for Rc<[T]> { } #[stable(feature = "rust1", since = "1.0.0")] -impl Deref for Rc { +impl Deref for Rc { type Target = T; #[inline(always)] @@ -1573,7 +2052,7 @@ impl Deref for Rc { impl Receiver for Rc {} #[stable(feature = "rust1", since = "1.0.0")] -unsafe impl<#[may_dangle] T: ?Sized> Drop for Rc { +unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Rc { /// Drops the `Rc`. /// /// This will decrement the strong reference count. If the strong reference @@ -1611,7 +2090,7 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Rc { self.inner().dec_weak(); if self.inner().weak() == 0 { - Global.deallocate(self.ptr.cast(), Layout::for_value(self.ptr.as_ref())); + self.alloc.deallocate(self.ptr.cast(), Layout::for_value(self.ptr.as_ref())); } } } @@ -1619,7 +2098,7 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Rc { } #[stable(feature = "rust1", since = "1.0.0")] -impl Clone for Rc { +impl Clone for Rc { /// Makes a clone of the `Rc` pointer. /// /// This creates another pointer to the same allocation, increasing the @@ -1635,10 +2114,10 @@ impl Clone for Rc { /// let _ = Rc::clone(&five); /// ``` #[inline] - fn clone(&self) -> Rc { + fn clone(&self) -> Self { unsafe { self.inner().inc_strong(); - Self::from_inner(self.ptr) + Self::from_inner_in(self.ptr, self.alloc.clone()) } } } @@ -1663,20 +2142,20 @@ impl Default for Rc { } #[stable(feature = "rust1", since = "1.0.0")] -trait RcEqIdent { - fn eq(&self, other: &Rc) -> bool; - fn ne(&self, other: &Rc) -> bool; +trait RcEqIdent { + fn eq(&self, other: &Rc) -> bool; + fn ne(&self, other: &Rc) -> bool; } #[stable(feature = "rust1", since = "1.0.0")] -impl RcEqIdent for Rc { +impl RcEqIdent for Rc { #[inline] - default fn eq(&self, other: &Rc) -> bool { + default fn eq(&self, other: &Rc) -> bool { **self == **other } #[inline] - default fn ne(&self, other: &Rc) -> bool { + default fn ne(&self, other: &Rc) -> bool { **self != **other } } @@ -1695,20 +2174,20 @@ impl MarkerEq for T {} /// /// We can only do this when `T: Eq` as a `PartialEq` might be deliberately irreflexive. #[stable(feature = "rust1", since = "1.0.0")] -impl RcEqIdent for Rc { +impl RcEqIdent for Rc { #[inline] - fn eq(&self, other: &Rc) -> bool { + fn eq(&self, other: &Rc) -> bool { Rc::ptr_eq(self, other) || **self == **other } #[inline] - fn ne(&self, other: &Rc) -> bool { + fn ne(&self, other: &Rc) -> bool { !Rc::ptr_eq(self, other) && **self != **other } } #[stable(feature = "rust1", since = "1.0.0")] -impl PartialEq for Rc { +impl PartialEq for Rc { /// Equality for two `Rc`s. /// /// Two `Rc`s are equal if their inner values are equal, even if they are @@ -1728,7 +2207,7 @@ impl PartialEq for Rc { /// assert!(five == Rc::new(5)); /// ``` #[inline] - fn eq(&self, other: &Rc) -> bool { + fn eq(&self, other: &Rc) -> bool { RcEqIdent::eq(self, other) } @@ -1750,16 +2229,16 @@ impl PartialEq for Rc { /// assert!(five != Rc::new(6)); /// ``` #[inline] - fn ne(&self, other: &Rc) -> bool { + fn ne(&self, other: &Rc) -> bool { RcEqIdent::ne(self, other) } } #[stable(feature = "rust1", since = "1.0.0")] -impl Eq for Rc {} +impl Eq for Rc {} #[stable(feature = "rust1", since = "1.0.0")] -impl PartialOrd for Rc { +impl PartialOrd for Rc { /// Partial comparison for two `Rc`s. /// /// The two are compared by calling `partial_cmp()` on their inner values. @@ -1775,7 +2254,7 @@ impl PartialOrd for Rc { /// assert_eq!(Some(Ordering::Less), five.partial_cmp(&Rc::new(6))); /// ``` #[inline(always)] - fn partial_cmp(&self, other: &Rc) -> Option { + fn partial_cmp(&self, other: &Rc) -> Option { (**self).partial_cmp(&**other) } @@ -1793,7 +2272,7 @@ impl PartialOrd for Rc { /// assert!(five < Rc::new(6)); /// ``` #[inline(always)] - fn lt(&self, other: &Rc) -> bool { + fn lt(&self, other: &Rc) -> bool { **self < **other } @@ -1811,7 +2290,7 @@ impl PartialOrd for Rc { /// assert!(five <= Rc::new(5)); /// ``` #[inline(always)] - fn le(&self, other: &Rc) -> bool { + fn le(&self, other: &Rc) -> bool { **self <= **other } @@ -1829,7 +2308,7 @@ impl PartialOrd for Rc { /// assert!(five > Rc::new(4)); /// ``` #[inline(always)] - fn gt(&self, other: &Rc) -> bool { + fn gt(&self, other: &Rc) -> bool { **self > **other } @@ -1847,13 +2326,13 @@ impl PartialOrd for Rc { /// assert!(five >= Rc::new(5)); /// ``` #[inline(always)] - fn ge(&self, other: &Rc) -> bool { + fn ge(&self, other: &Rc) -> bool { **self >= **other } } #[stable(feature = "rust1", since = "1.0.0")] -impl Ord for Rc { +impl Ord for Rc { /// Comparison for two `Rc`s. /// /// The two are compared by calling `cmp()` on their inner values. @@ -1869,34 +2348,34 @@ impl Ord for Rc { /// assert_eq!(Ordering::Less, five.cmp(&Rc::new(6))); /// ``` #[inline] - fn cmp(&self, other: &Rc) -> Ordering { + fn cmp(&self, other: &Rc) -> Ordering { (**self).cmp(&**other) } } #[stable(feature = "rust1", since = "1.0.0")] -impl Hash for Rc { +impl Hash for Rc { fn hash(&self, state: &mut H) { (**self).hash(state); } } #[stable(feature = "rust1", since = "1.0.0")] -impl fmt::Display for Rc { +impl fmt::Display for Rc { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Display::fmt(&**self, f) } } #[stable(feature = "rust1", since = "1.0.0")] -impl fmt::Debug for Rc { +impl fmt::Debug for Rc { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Debug::fmt(&**self, f) } } #[stable(feature = "rust1", since = "1.0.0")] -impl fmt::Pointer for Rc { +impl fmt::Pointer for Rc { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Pointer::fmt(&(&**self as *const T), f) } @@ -1982,7 +2461,7 @@ impl From for Rc { #[cfg(not(no_global_oom_handling))] #[stable(feature = "shared_from_slice", since = "1.21.0")] -impl From> for Rc { +impl From> for Rc { /// Move a boxed object to a new, reference counted, allocation. /// /// # Example @@ -1994,14 +2473,14 @@ impl From> for Rc { /// assert_eq!(1, *shared); /// ``` #[inline] - fn from(v: Box) -> Rc { - Rc::from_box(v) + fn from(v: Box) -> Rc { + Rc::from_box_in(v) } } #[cfg(not(no_global_oom_handling))] #[stable(feature = "shared_from_slice", since = "1.21.0")] -impl From> for Rc<[T]> { +impl From> for Rc<[T], A> { /// Allocate a reference-counted slice and move `v`'s items into it. /// /// # Example @@ -2013,12 +2492,18 @@ impl From> for Rc<[T]> { /// assert_eq!(vec![1, 2, 3], *shared); /// ``` #[inline] - fn from(mut v: Vec) -> Rc<[T]> { + fn from(v: Vec) -> Rc<[T], A> { unsafe { - let rc = Rc::copy_from_slice(&v); - // Allow the Vec to free its memory, but not destroy its contents - v.set_len(0); - rc + let (vec_ptr, len, cap, alloc) = v.into_raw_parts_with_alloc(); + + let rc_ptr = Self::allocate_for_slice_in(len, &alloc); + ptr::copy_nonoverlapping(vec_ptr, &mut (*rc_ptr).value as *mut [T] as *mut T, len); + + // Create a `Vec` with length 0, to deallocate the buffer + // without dropping its contents or the allocator + let _ = Vec::from_raw_parts_in(vec_ptr, 0, cap, &alloc); + + Self::from_ptr_in(rc_ptr, alloc) } } } @@ -2189,7 +2674,10 @@ impl> ToRcSlice for I { /// /// [`upgrade`]: Weak::upgrade #[stable(feature = "rc_weak", since = "1.4.0")] -pub struct Weak { +pub struct Weak< + T: ?Sized, + #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, +> { // This is a `NonNull` to allow optimizing the size of this type in enums, // but it is not necessarily a valid pointer. // `Weak::new` sets this to `usize::MAX` so that it doesn’t need @@ -2197,15 +2685,16 @@ pub struct Weak { // will ever have because RcBox has alignment at least 2. // This is only possible when `T: Sized`; unsized `T` never dangle. ptr: NonNull>, + alloc: A, } #[stable(feature = "rc_weak", since = "1.4.0")] -impl !Send for Weak {} +impl !Send for Weak {} #[stable(feature = "rc_weak", since = "1.4.0")] -impl !Sync for Weak {} +impl !Sync for Weak {} #[unstable(feature = "coerce_unsized", issue = "18598")] -impl, U: ?Sized> CoerceUnsized> for Weak {} +impl, U: ?Sized, A: Allocator> CoerceUnsized> for Weak {} #[unstable(feature = "dispatch_from_dyn", issue = "none")] impl, U: ?Sized> DispatchFromDyn> for Weak {} @@ -2224,16 +2713,45 @@ impl Weak { /// let empty: Weak = Weak::new(); /// assert!(empty.upgrade().is_none()); /// ``` + #[inline] #[stable(feature = "downgraded_weak", since = "1.10.0")] #[rustc_const_unstable(feature = "const_weak_new", issue = "95091", reason = "recently added")] #[must_use] pub const fn new() -> Weak { - Weak { ptr: unsafe { NonNull::new_unchecked(ptr::invalid_mut::>(usize::MAX)) } } + Weak { + ptr: unsafe { NonNull::new_unchecked(ptr::invalid_mut::>(usize::MAX)) }, + alloc: Global, + } + } +} + +impl Weak { + /// Constructs a new `Weak`, without allocating any memory, technically in the provided + /// allocator. + /// Calling [`upgrade`] on the return value always gives [`None`]. + /// + /// [`upgrade`]: Weak::upgrade + /// + /// # Examples + /// + /// ``` + /// use std::rc::Weak; + /// + /// let empty: Weak = Weak::new(); + /// assert!(empty.upgrade().is_none()); + /// ``` + #[inline] + #[unstable(feature = "allocator_api", issue = "32838")] + pub fn new_in(alloc: A) -> Weak { + Weak { + ptr: unsafe { NonNull::new_unchecked(ptr::invalid_mut::>(usize::MAX)) }, + alloc, + } } } pub(crate) fn is_dangling(ptr: *mut T) -> bool { - (ptr as *mut ()).addr() == usize::MAX + (ptr.cast::<()>()).addr() == usize::MAX } /// Helper type to allow accessing the reference counts without @@ -2244,6 +2762,56 @@ struct WeakInner<'a> { } impl Weak { + /// Converts a raw pointer previously created by [`into_raw`] back into `Weak`. + /// + /// This can be used to safely get a strong reference (by calling [`upgrade`] + /// later) or to deallocate the weak count by dropping the `Weak`. + /// + /// It takes ownership of one weak reference (with the exception of pointers created by [`new`], + /// as these don't own anything; the method still works on them). + /// + /// # Safety + /// + /// The pointer must have originated from the [`into_raw`] and must still own its potential + /// weak reference, and `ptr` must point to a block of memory allocated by the global allocator. + /// + /// It is allowed for the strong count to be 0 at the time of calling this. Nevertheless, this + /// takes ownership of one weak reference currently represented as a raw pointer (the weak + /// count is not modified by this operation) and therefore it must be paired with a previous + /// call to [`into_raw`]. + /// + /// # Examples + /// + /// ``` + /// use std::rc::{Rc, Weak}; + /// + /// let strong = Rc::new("hello".to_owned()); + /// + /// let raw_1 = Rc::downgrade(&strong).into_raw(); + /// let raw_2 = Rc::downgrade(&strong).into_raw(); + /// + /// assert_eq!(2, Rc::weak_count(&strong)); + /// + /// assert_eq!("hello", &*unsafe { Weak::from_raw(raw_1) }.upgrade().unwrap()); + /// assert_eq!(1, Rc::weak_count(&strong)); + /// + /// drop(strong); + /// + /// // Decrement the last weak count. + /// assert!(unsafe { Weak::from_raw(raw_2) }.upgrade().is_none()); + /// ``` + /// + /// [`into_raw`]: Weak::into_raw + /// [`upgrade`]: Weak::upgrade + /// [`new`]: Weak::new + #[inline] + #[stable(feature = "weak_into_raw", since = "1.45.0")] + pub unsafe fn from_raw(ptr: *const T) -> Self { + unsafe { Self::from_raw_in(ptr, Global) } + } +} + +impl Weak { /// Returns a raw pointer to the object `T` pointed to by this `Weak`. /// /// The pointer is valid only if there are some strong references. The pointer may be dangling, @@ -2321,6 +2889,45 @@ impl Weak { result } + /// Consumes the `Weak` and turns it into a raw pointer. + /// + /// This converts the weak pointer into a raw pointer, while still preserving the ownership of + /// one weak reference (the weak count is not modified by this operation). It can be turned + /// back into the `Weak` with [`from_raw`]. + /// + /// The same restrictions of accessing the target of the pointer as with + /// [`as_ptr`] apply. + /// + /// # Examples + /// + /// ``` + /// use std::rc::{Rc, Weak}; + /// + /// let strong = Rc::new("hello".to_owned()); + /// let weak = Rc::downgrade(&strong); + /// let raw = weak.into_raw(); + /// + /// assert_eq!(1, Rc::weak_count(&strong)); + /// assert_eq!("hello", unsafe { &*raw }); + /// + /// drop(unsafe { Weak::from_raw(raw) }); + /// assert_eq!(0, Rc::weak_count(&strong)); + /// ``` + /// + /// [`from_raw`]: Weak::from_raw + /// [`as_ptr`]: Weak::as_ptr + #[inline] + #[unstable(feature = "allocator_api", issue = "32838")] + pub fn into_raw_and_alloc(self) -> (*const T, A) + where + A: Clone, + { + let result = self.as_ptr(); + let alloc = self.alloc.clone(); + mem::forget(self); + (result, alloc) + } + /// Converts a raw pointer previously created by [`into_raw`] back into `Weak`. /// /// This can be used to safely get a strong reference (by calling [`upgrade`] @@ -2332,7 +2939,7 @@ impl Weak { /// # Safety /// /// The pointer must have originated from the [`into_raw`] and must still own its potential - /// weak reference. + /// weak reference, and `ptr` must point to a block of memory allocated by `alloc`. /// /// It is allowed for the strong count to be 0 at the time of calling this. Nevertheless, this /// takes ownership of one weak reference currently represented as a raw pointer (the weak @@ -2363,8 +2970,9 @@ impl Weak { /// [`into_raw`]: Weak::into_raw /// [`upgrade`]: Weak::upgrade /// [`new`]: Weak::new - #[stable(feature = "weak_into_raw", since = "1.45.0")] - pub unsafe fn from_raw(ptr: *const T) -> Self { + #[inline] + #[unstable(feature = "allocator_api", issue = "32838")] + pub unsafe fn from_raw_in(ptr: *const T, alloc: A) -> Self { // See Weak::as_ptr for context on how the input pointer is derived. let ptr = if is_dangling(ptr as *mut T) { @@ -2380,7 +2988,7 @@ impl Weak { }; // SAFETY: we now have recovered the original Weak pointer, so can create the Weak. - Weak { ptr: unsafe { NonNull::new_unchecked(ptr) } } + Weak { ptr: unsafe { NonNull::new_unchecked(ptr) }, alloc } } /// Attempts to upgrade the `Weak` pointer to an [`Rc`], delaying @@ -2409,7 +3017,10 @@ impl Weak { #[must_use = "this returns a new `Rc`, \ without modifying the original weak pointer"] #[stable(feature = "rc_weak", since = "1.4.0")] - pub fn upgrade(&self) -> Option> { + pub fn upgrade(&self) -> Option> + where + A: Clone, + { let inner = self.inner()?; if inner.strong() == 0 { @@ -2417,7 +3028,7 @@ impl Weak { } else { unsafe { inner.inc_strong(); - Some(Rc::from_inner(self.ptr)) + Some(Rc::from_inner_in(self.ptr, self.alloc.clone())) } } } @@ -2437,15 +3048,15 @@ impl Weak { #[must_use] #[stable(feature = "weak_counts", since = "1.41.0")] pub fn weak_count(&self) -> usize { - self.inner() - .map(|inner| { - if inner.strong() > 0 { - inner.weak() - 1 // subtract the implicit weak ptr - } else { - 0 - } - }) - .unwrap_or(0) + if let Some(inner) = self.inner() { + if inner.strong() > 0 { + inner.weak() - 1 // subtract the implicit weak ptr + } else { + 0 + } + } else { + 0 + } } /// Returns `None` when the pointer is dangling and there is no allocated `RcBox`, @@ -2513,7 +3124,7 @@ impl Weak { } #[stable(feature = "rc_weak", since = "1.4.0")] -unsafe impl<#[may_dangle] T: ?Sized> Drop for Weak { +unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Weak { /// Drops the `Weak` pointer. /// /// # Examples @@ -2546,14 +3157,14 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Weak { // the strong pointers have disappeared. if inner.weak() == 0 { unsafe { - Global.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr())); + self.alloc.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr())); } } } } #[stable(feature = "rc_weak", since = "1.4.0")] -impl Clone for Weak { +impl Clone for Weak { /// Makes a clone of the `Weak` pointer that points to the same allocation. /// /// # Examples @@ -2566,16 +3177,16 @@ impl Clone for Weak { /// let _ = Weak::clone(&weak_five); /// ``` #[inline] - fn clone(&self) -> Weak { + fn clone(&self) -> Weak { if let Some(inner) = self.inner() { inner.inc_weak() } - Weak { ptr: self.ptr } + Weak { ptr: self.ptr, alloc: self.alloc.clone() } } } #[stable(feature = "rc_weak", since = "1.4.0")] -impl fmt::Debug for Weak { +impl fmt::Debug for Weak { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "(Weak)") } @@ -2707,21 +3318,21 @@ impl<'a> RcInnerPtr for WeakInner<'a> { } #[stable(feature = "rust1", since = "1.0.0")] -impl borrow::Borrow for Rc { +impl borrow::Borrow for Rc { fn borrow(&self) -> &T { &**self } } #[stable(since = "1.5.0", feature = "smart_ptr_as_ref")] -impl AsRef for Rc { +impl AsRef for Rc { fn as_ref(&self) -> &T { &**self } } #[stable(feature = "pin", since = "1.33.0")] -impl Unpin for Rc {} +impl Unpin for Rc {} /// Get the offset within an `RcBox` for the payload behind a pointer. /// @@ -2822,7 +3433,7 @@ impl UniqueRc { unsafe { this.ptr.as_ref().inc_weak(); } - Weak { ptr: this.ptr } + Weak { ptr: this.ptr, alloc: Global } } /// Converts the `UniqueRc` into a regular [`Rc`] diff --git a/library/alloc/src/sync.rs b/library/alloc/src/sync.rs index 5bb1a93aeaf..e00850eb5d8 100644 --- a/library/alloc/src/sync.rs +++ b/library/alloc/src/sync.rs @@ -246,32 +246,48 @@ macro_rules! acquire { /// [rc_examples]: crate::rc#examples #[cfg_attr(not(test), rustc_diagnostic_item = "Arc")] #[stable(feature = "rust1", since = "1.0.0")] -pub struct Arc { +pub struct Arc< + T: ?Sized, + #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, +> { ptr: NonNull>, phantom: PhantomData>, + alloc: A, } #[stable(feature = "rust1", since = "1.0.0")] -unsafe impl Send for Arc {} +unsafe impl Send for Arc {} #[stable(feature = "rust1", since = "1.0.0")] -unsafe impl Sync for Arc {} +unsafe impl Sync for Arc {} #[stable(feature = "catch_unwind", since = "1.9.0")] -impl UnwindSafe for Arc {} +impl UnwindSafe for Arc {} #[unstable(feature = "coerce_unsized", issue = "18598")] -impl, U: ?Sized> CoerceUnsized> for Arc {} +impl, U: ?Sized, A: Allocator> CoerceUnsized> for Arc {} #[unstable(feature = "dispatch_from_dyn", issue = "none")] impl, U: ?Sized> DispatchFromDyn> for Arc {} impl Arc { unsafe fn from_inner(ptr: NonNull>) -> Self { - Self { ptr, phantom: PhantomData } + unsafe { Self::from_inner_in(ptr, Global) } } unsafe fn from_ptr(ptr: *mut ArcInner) -> Self { - unsafe { Self::from_inner(NonNull::new_unchecked(ptr)) } + unsafe { Self::from_ptr_in(ptr, Global) } + } +} + +impl Arc { + #[inline] + unsafe fn from_inner_in(ptr: NonNull>, alloc: A) -> Self { + Self { ptr, phantom: PhantomData, alloc } + } + + #[inline] + unsafe fn from_ptr_in(ptr: *mut ArcInner, alloc: A) -> Self { + unsafe { Self::from_inner_in(NonNull::new_unchecked(ptr), alloc) } } } @@ -296,7 +312,10 @@ impl Arc { /// /// [`upgrade`]: Weak::upgrade #[stable(feature = "arc_weak", since = "1.4.0")] -pub struct Weak { +pub struct Weak< + T: ?Sized, + #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, +> { // This is a `NonNull` to allow optimizing the size of this type in enums, // but it is not necessarily a valid pointer. // `Weak::new` sets this to `usize::MAX` so that it doesn’t need @@ -304,15 +323,16 @@ pub struct Weak { // will ever have because RcBox has alignment at least 2. // This is only possible when `T: Sized`; unsized `T` never dangle. ptr: NonNull>, + alloc: A, } #[stable(feature = "arc_weak", since = "1.4.0")] -unsafe impl Send for Weak {} +unsafe impl Send for Weak {} #[stable(feature = "arc_weak", since = "1.4.0")] -unsafe impl Sync for Weak {} +unsafe impl Sync for Weak {} #[unstable(feature = "coerce_unsized", issue = "18598")] -impl, U: ?Sized> CoerceUnsized> for Weak {} +impl, U: ?Sized, A: Allocator> CoerceUnsized> for Weak {} #[unstable(feature = "dispatch_from_dyn", issue = "none")] impl, U: ?Sized> DispatchFromDyn> for Weak {} @@ -442,7 +462,7 @@ impl Arc { .into(); let init_ptr: NonNull> = uninit_ptr.cast(); - let weak = Weak { ptr: init_ptr }; + let weak = Weak { ptr: init_ptr, alloc: Global }; // It's important we don't give up ownership of the weak pointer, or // else the memory might be freed by the time `data_fn` returns. If @@ -510,7 +530,7 @@ impl Arc { Arc::from_ptr(Arc::allocate_for_layout( Layout::new::(), |layout| Global.allocate(layout), - |mem| mem as *mut ArcInner>, + <*mut u8>::cast, )) } } @@ -544,7 +564,7 @@ impl Arc { Arc::from_ptr(Arc::allocate_for_layout( Layout::new::(), |layout| Global.allocate_zeroed(layout), - |mem| mem as *mut ArcInner>, + <*mut u8>::cast, )) } } @@ -617,7 +637,7 @@ impl Arc { Ok(Arc::from_ptr(Arc::try_allocate_for_layout( Layout::new::(), |layout| Global.allocate(layout), - |mem| mem as *mut ArcInner>, + <*mut u8>::cast, )?)) } } @@ -650,10 +670,254 @@ impl Arc { Ok(Arc::from_ptr(Arc::try_allocate_for_layout( Layout::new::(), |layout| Global.allocate_zeroed(layout), - |mem| mem as *mut ArcInner>, + <*mut u8>::cast, )?)) } } +} + +impl Arc { + /// Returns a reference to the underlying allocator. + #[inline] + #[unstable(feature = "allocator_api", issue = "32838")] + pub fn allocator(&self) -> &A { + &self.alloc + } + /// Constructs a new `Arc` in the provided allocator. + /// + /// # Examples + /// + /// ``` + /// #![feature(allocator_api)] + /// + /// use std::sync::Arc; + /// use std::alloc::System; + /// + /// let five = Arc::new_in(5, System); + /// ``` + #[inline] + #[cfg(not(no_global_oom_handling))] + #[unstable(feature = "allocator_api", issue = "32838")] + pub fn new_in(data: T, alloc: A) -> Arc { + // Start the weak pointer count as 1 which is the weak pointer that's + // held by all the strong pointers (kinda), see std/rc.rs for more info + let x = Box::new_in( + ArcInner { + strong: atomic::AtomicUsize::new(1), + weak: atomic::AtomicUsize::new(1), + data, + }, + alloc, + ); + let (ptr, alloc) = Box::into_unique(x); + unsafe { Self::from_inner_in(ptr.into(), alloc) } + } + + /// Constructs a new `Arc` with uninitialized contents in the provided allocator. + /// + /// # Examples + /// + /// ``` + /// #![feature(new_uninit)] + /// #![feature(get_mut_unchecked)] + /// #![feature(allocator_api)] + /// + /// use std::sync::Arc; + /// use std::alloc::System; + /// + /// let mut five = Arc::::new_uninit_in(System); + /// + /// let five = unsafe { + /// // Deferred initialization: + /// Arc::get_mut_unchecked(&mut five).as_mut_ptr().write(5); + /// + /// five.assume_init() + /// }; + /// + /// assert_eq!(*five, 5) + /// ``` + #[cfg(not(no_global_oom_handling))] + #[unstable(feature = "allocator_api", issue = "32838")] + // #[unstable(feature = "new_uninit", issue = "63291")] + #[inline] + pub fn new_uninit_in(alloc: A) -> Arc, A> { + unsafe { + Arc::from_ptr_in( + Arc::allocate_for_layout( + Layout::new::(), + |layout| alloc.allocate(layout), + <*mut u8>::cast, + ), + alloc, + ) + } + } + + /// Constructs a new `Arc` with uninitialized contents, with the memory + /// being filled with `0` bytes, in the provided allocator. + /// + /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and incorrect usage + /// of this method. + /// + /// # Examples + /// + /// ``` + /// #![feature(new_uninit)] + /// #![feature(allocator_api)] + /// + /// use std::sync::Arc; + /// use std::alloc::System; + /// + /// let zero = Arc::::new_zeroed_in(System); + /// let zero = unsafe { zero.assume_init() }; + /// + /// assert_eq!(*zero, 0) + /// ``` + /// + /// [zeroed]: mem::MaybeUninit::zeroed + #[cfg(not(no_global_oom_handling))] + #[unstable(feature = "allocator_api", issue = "32838")] + // #[unstable(feature = "new_uninit", issue = "63291")] + #[inline] + pub fn new_zeroed_in(alloc: A) -> Arc, A> { + unsafe { + Arc::from_ptr_in( + Arc::allocate_for_layout( + Layout::new::(), + |layout| alloc.allocate_zeroed(layout), + <*mut u8>::cast, + ), + alloc, + ) + } + } + + /// Constructs a new `Pin>` in the provided allocator. If `T` does not implement `Unpin`, + /// then `data` will be pinned in memory and unable to be moved. + #[cfg(not(no_global_oom_handling))] + #[unstable(feature = "allocator_api", issue = "32838")] + #[inline] + pub fn pin_in(data: T, alloc: A) -> Pin> { + unsafe { Pin::new_unchecked(Arc::new_in(data, alloc)) } + } + + /// Constructs a new `Pin>` in the provided allocator, return an error if allocation + /// fails. + #[inline] + #[unstable(feature = "allocator_api", issue = "32838")] + pub fn try_pin_in(data: T, alloc: A) -> Result>, AllocError> { + unsafe { Ok(Pin::new_unchecked(Arc::try_new_in(data, alloc)?)) } + } + + /// Constructs a new `Arc` in the provided allocator, returning an error if allocation fails. + /// + /// # Examples + /// + /// ``` + /// #![feature(allocator_api)] + /// + /// use std::sync::Arc; + /// use std::alloc::System; + /// + /// let five = Arc::try_new_in(5, System)?; + /// # Ok::<(), std::alloc::AllocError>(()) + /// ``` + #[inline] + #[unstable(feature = "allocator_api", issue = "32838")] + #[inline] + pub fn try_new_in(data: T, alloc: A) -> Result, AllocError> { + // Start the weak pointer count as 1 which is the weak pointer that's + // held by all the strong pointers (kinda), see std/rc.rs for more info + let x = Box::try_new_in( + ArcInner { + strong: atomic::AtomicUsize::new(1), + weak: atomic::AtomicUsize::new(1), + data, + }, + alloc, + )?; + let (ptr, alloc) = Box::into_unique(x); + Ok(unsafe { Self::from_inner_in(ptr.into(), alloc) }) + } + + /// Constructs a new `Arc` with uninitialized contents, in the provided allocator, returning an + /// error if allocation fails. + /// + /// # Examples + /// + /// ``` + /// #![feature(new_uninit, allocator_api)] + /// #![feature(get_mut_unchecked)] + /// + /// use std::sync::Arc; + /// use std::alloc::System; + /// + /// let mut five = Arc::::try_new_uninit_in(System)?; + /// + /// let five = unsafe { + /// // Deferred initialization: + /// Arc::get_mut_unchecked(&mut five).as_mut_ptr().write(5); + /// + /// five.assume_init() + /// }; + /// + /// assert_eq!(*five, 5); + /// # Ok::<(), std::alloc::AllocError>(()) + /// ``` + #[unstable(feature = "allocator_api", issue = "32838")] + // #[unstable(feature = "new_uninit", issue = "63291")] + #[inline] + pub fn try_new_uninit_in(alloc: A) -> Result, A>, AllocError> { + unsafe { + Ok(Arc::from_ptr_in( + Arc::try_allocate_for_layout( + Layout::new::(), + |layout| alloc.allocate(layout), + <*mut u8>::cast, + )?, + alloc, + )) + } + } + + /// Constructs a new `Arc` with uninitialized contents, with the memory + /// being filled with `0` bytes, in the provided allocator, returning an error if allocation + /// fails. + /// + /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and incorrect usage + /// of this method. + /// + /// # Examples + /// + /// ``` + /// #![feature(new_uninit, allocator_api)] + /// + /// use std::sync::Arc; + /// use std::alloc::System; + /// + /// let zero = Arc::::try_new_zeroed_in(System)?; + /// let zero = unsafe { zero.assume_init() }; + /// + /// assert_eq!(*zero, 0); + /// # Ok::<(), std::alloc::AllocError>(()) + /// ``` + /// + /// [zeroed]: mem::MaybeUninit::zeroed + #[unstable(feature = "allocator_api", issue = "32838")] + // #[unstable(feature = "new_uninit", issue = "63291")] + #[inline] + pub fn try_new_zeroed_in(alloc: A) -> Result, A>, AllocError> { + unsafe { + Ok(Arc::from_ptr_in( + Arc::try_allocate_for_layout( + Layout::new::(), + |layout| alloc.allocate_zeroed(layout), + <*mut u8>::cast, + )?, + alloc, + )) + } + } /// Returns the inner value, if the `Arc` has exactly one strong reference. /// /// Otherwise, an [`Err`] is returned with the same `Arc` that was @@ -695,9 +959,10 @@ impl Arc { unsafe { let elem = ptr::read(&this.ptr.as_ref().data); + let alloc = ptr::read(&this.alloc); // copy the allocator // Make a weak pointer to clean up the implicit strong-weak reference - let _weak = Weak { ptr: this.ptr }; + let _weak = Weak { ptr: this.ptr, alloc }; mem::forget(this); Ok(elem) @@ -814,9 +1079,11 @@ impl Arc { // in `drop_slow`. Instead of dropping the value behind the pointer, // it is read and eventually returned; `ptr::read` has the same // safety conditions as `ptr::drop_in_place`. - let inner = unsafe { ptr::read(Self::get_mut_unchecked(&mut this)) }; - drop(Weak { ptr: this.ptr }); + let inner = unsafe { ptr::read(Self::get_mut_unchecked(&mut this)) }; + let alloc = unsafe { ptr::read(&this.alloc) }; + + drop(Weak { ptr: this.ptr, alloc }); Some(inner) } @@ -891,7 +1158,83 @@ impl Arc<[T]> { } } -impl Arc> { +impl Arc<[T], A> { + /// Constructs a new atomically reference-counted slice with uninitialized contents in the + /// provided allocator. + /// + /// # Examples + /// + /// ``` + /// #![feature(new_uninit)] + /// #![feature(get_mut_unchecked)] + /// #![feature(allocator_api)] + /// + /// use std::sync::Arc; + /// use std::alloc::System; + /// + /// let mut values = Arc::<[u32], _>::new_uninit_slice_in(3, System); + /// + /// let values = unsafe { + /// // Deferred initialization: + /// Arc::get_mut_unchecked(&mut values)[0].as_mut_ptr().write(1); + /// Arc::get_mut_unchecked(&mut values)[1].as_mut_ptr().write(2); + /// Arc::get_mut_unchecked(&mut values)[2].as_mut_ptr().write(3); + /// + /// values.assume_init() + /// }; + /// + /// assert_eq!(*values, [1, 2, 3]) + /// ``` + #[cfg(not(no_global_oom_handling))] + #[unstable(feature = "new_uninit", issue = "63291")] + #[inline] + pub fn new_uninit_slice_in(len: usize, alloc: A) -> Arc<[mem::MaybeUninit], A> { + unsafe { Arc::from_ptr_in(Arc::allocate_for_slice_in(len, &alloc), alloc) } + } + + /// Constructs a new atomically reference-counted slice with uninitialized contents, with the memory being + /// filled with `0` bytes, in the provided allocator. + /// + /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and + /// incorrect usage of this method. + /// + /// # Examples + /// + /// ``` + /// #![feature(new_uninit)] + /// #![feature(allocator_api)] + /// + /// use std::sync::Arc; + /// use std::alloc::System; + /// + /// let values = Arc::<[u32], _>::new_zeroed_slice_in(3, System); + /// let values = unsafe { values.assume_init() }; + /// + /// assert_eq!(*values, [0, 0, 0]) + /// ``` + /// + /// [zeroed]: mem::MaybeUninit::zeroed + #[cfg(not(no_global_oom_handling))] + #[unstable(feature = "new_uninit", issue = "63291")] + #[inline] + pub fn new_zeroed_slice_in(len: usize, alloc: A) -> Arc<[mem::MaybeUninit], A> { + unsafe { + Arc::from_ptr_in( + Arc::allocate_for_layout( + Layout::array::(len).unwrap(), + |layout| alloc.allocate_zeroed(layout), + |mem| { + ptr::slice_from_raw_parts_mut(mem.cast::(), len) + as *mut ArcInner<[mem::MaybeUninit]> + }, + ), + alloc, + ) + } + } +} + +impl Arc, A> { /// Converts to `Arc`. /// /// # Safety @@ -924,12 +1267,16 @@ impl Arc> { #[unstable(feature = "new_uninit", issue = "63291")] #[must_use = "`self` will be dropped if the result is not used"] #[inline] - pub unsafe fn assume_init(self) -> Arc { - unsafe { Arc::from_inner(mem::ManuallyDrop::new(self).ptr.cast()) } + pub unsafe fn assume_init(self) -> Arc + where + A: Clone, + { + let md_self = mem::ManuallyDrop::new(self); + unsafe { Arc::from_inner_in(md_self.ptr.cast(), md_self.alloc.clone()) } } } -impl Arc<[mem::MaybeUninit]> { +impl Arc<[mem::MaybeUninit], A> { /// Converts to `Arc<[T]>`. /// /// # Safety @@ -965,12 +1312,129 @@ impl Arc<[mem::MaybeUninit]> { #[unstable(feature = "new_uninit", issue = "63291")] #[must_use = "`self` will be dropped if the result is not used"] #[inline] - pub unsafe fn assume_init(self) -> Arc<[T]> { - unsafe { Arc::from_ptr(mem::ManuallyDrop::new(self).ptr.as_ptr() as _) } + pub unsafe fn assume_init(self) -> Arc<[T], A> + where + A: Clone, + { + let md_self = mem::ManuallyDrop::new(self); + unsafe { Arc::from_ptr_in(md_self.ptr.as_ptr() as _, md_self.alloc.clone()) } } } impl Arc { + /// Constructs an `Arc` from a raw pointer. + /// + /// The raw pointer must have been previously returned by a call to + /// [`Arc::into_raw`][into_raw] where `U` must have the same size and + /// alignment as `T`. This is trivially true if `U` is `T`. + /// Note that if `U` is not `T` but has the same size and alignment, this is + /// basically like transmuting references of different types. See + /// [`mem::transmute`][transmute] for more information on what + /// restrictions apply in this case. + /// + /// The user of `from_raw` has to make sure a specific value of `T` is only + /// dropped once. + /// + /// This function is unsafe because improper use may lead to memory unsafety, + /// even if the returned `Arc` is never accessed. + /// + /// [into_raw]: Arc::into_raw + /// [transmute]: core::mem::transmute + /// + /// # Examples + /// + /// ``` + /// use std::sync::Arc; + /// + /// let x = Arc::new("hello".to_owned()); + /// let x_ptr = Arc::into_raw(x); + /// + /// unsafe { + /// // Convert back to an `Arc` to prevent leak. + /// let x = Arc::from_raw(x_ptr); + /// assert_eq!(&*x, "hello"); + /// + /// // Further calls to `Arc::from_raw(x_ptr)` would be memory-unsafe. + /// } + /// + /// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling! + /// ``` + #[inline] + #[stable(feature = "rc_raw", since = "1.17.0")] + pub unsafe fn from_raw(ptr: *const T) -> Self { + unsafe { Arc::from_raw_in(ptr, Global) } + } + + /// Increments the strong reference count on the `Arc` associated with the + /// provided pointer by one. + /// + /// # Safety + /// + /// The pointer must have been obtained through `Arc::into_raw`, and the + /// associated `Arc` instance must be valid (i.e. the strong count must be at + /// least 1) for the duration of this method. + /// + /// # Examples + /// + /// ``` + /// use std::sync::Arc; + /// + /// let five = Arc::new(5); + /// + /// unsafe { + /// let ptr = Arc::into_raw(five); + /// Arc::increment_strong_count(ptr); + /// + /// // This assertion is deterministic because we haven't shared + /// // the `Arc` between threads. + /// let five = Arc::from_raw(ptr); + /// assert_eq!(2, Arc::strong_count(&five)); + /// } + /// ``` + #[inline] + #[stable(feature = "arc_mutate_strong_count", since = "1.51.0")] + pub unsafe fn increment_strong_count(ptr: *const T) { + unsafe { Arc::increment_strong_count_in(ptr, Global) } + } + + /// Decrements the strong reference count on the `Arc` associated with the + /// provided pointer by one. + /// + /// # Safety + /// + /// The pointer must have been obtained through `Arc::into_raw`, and the + /// associated `Arc` instance must be valid (i.e. the strong count must be at + /// least 1) when invoking this method. This method can be used to release the final + /// `Arc` and backing storage, but **should not** be called after the final `Arc` has been + /// released. + /// + /// # Examples + /// + /// ``` + /// use std::sync::Arc; + /// + /// let five = Arc::new(5); + /// + /// unsafe { + /// let ptr = Arc::into_raw(five); + /// Arc::increment_strong_count(ptr); + /// + /// // Those assertions are deterministic because we haven't shared + /// // the `Arc` between threads. + /// let five = Arc::from_raw(ptr); + /// assert_eq!(2, Arc::strong_count(&five)); + /// Arc::decrement_strong_count(ptr); + /// assert_eq!(1, Arc::strong_count(&five)); + /// } + /// ``` + #[inline] + #[stable(feature = "arc_mutate_strong_count", since = "1.51.0")] + pub unsafe fn decrement_strong_count(ptr: *const T) { + unsafe { Arc::decrement_strong_count_in(ptr, Global) } + } +} + +impl Arc { /// Consumes the `Arc`, returning the wrapped pointer. /// /// To avoid a memory leak the pointer must be converted back to an `Arc` using @@ -1020,16 +1484,18 @@ impl Arc { unsafe { ptr::addr_of_mut!((*ptr).data) } } - /// Constructs an `Arc` from a raw pointer. + /// Constructs an `Arc` from a raw pointer. /// /// The raw pointer must have been previously returned by a call to - /// [`Arc::into_raw`][into_raw] where `U` must have the same size and + /// [`Arc::into_raw`][into_raw] where `U` must have the same size and /// alignment as `T`. This is trivially true if `U` is `T`. /// Note that if `U` is not `T` but has the same size and alignment, this is /// basically like transmuting references of different types. See /// [`mem::transmute`][transmute] for more information on what /// restrictions apply in this case. /// + /// The raw pointer must point to a block of memory allocated by `alloc` + /// /// The user of `from_raw` has to make sure a specific value of `T` is only /// dropped once. /// @@ -1042,14 +1508,17 @@ impl Arc { /// # Examples /// /// ``` - /// use std::sync::Arc; + /// #![feature(allocator_api)] /// - /// let x = Arc::new("hello".to_owned()); + /// use std::sync::Arc; + /// use std::alloc::System; + /// + /// let x = Arc::new_in("hello".to_owned(), System); /// let x_ptr = Arc::into_raw(x); /// /// unsafe { /// // Convert back to an `Arc` to prevent leak. - /// let x = Arc::from_raw(x_ptr); + /// let x = Arc::from_raw_in(x_ptr, System); /// assert_eq!(&*x, "hello"); /// /// // Further calls to `Arc::from_raw(x_ptr)` would be memory-unsafe. @@ -1057,15 +1526,16 @@ impl Arc { /// /// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling! /// ``` - #[stable(feature = "rc_raw", since = "1.17.0")] - pub unsafe fn from_raw(ptr: *const T) -> Self { + #[inline] + #[unstable(feature = "allocator_api", issue = "32838")] + pub unsafe fn from_raw_in(ptr: *const T, alloc: A) -> Self { unsafe { let offset = data_offset(ptr); // Reverse the offset to find the original ArcInner. let arc_ptr = ptr.byte_sub(offset) as *mut ArcInner; - Self::from_ptr(arc_ptr) + Self::from_ptr_in(arc_ptr, alloc) } } @@ -1083,7 +1553,10 @@ impl Arc { #[must_use = "this returns a new `Weak` pointer, \ without modifying the original `Arc`"] #[stable(feature = "arc_weak", since = "1.4.0")] - pub fn downgrade(this: &Self) -> Weak { + pub fn downgrade(this: &Self) -> Weak + where + A: Clone, + { // This Relaxed is OK because we're checking the value in the CAS // below. let mut cur = this.inner().weak.load(Relaxed); @@ -1110,7 +1583,7 @@ impl Arc { Ok(_) => { // Make sure we do not create a dangling Weak debug_assert!(!is_dangling(this.ptr.as_ptr())); - return Weak { ptr: this.ptr }; + return Weak { ptr: this.ptr, alloc: this.alloc.clone() }; } Err(old) => cur = old, } @@ -1181,30 +1654,37 @@ impl Arc { /// /// The pointer must have been obtained through `Arc::into_raw`, and the /// associated `Arc` instance must be valid (i.e. the strong count must be at - /// least 1) for the duration of this method. + /// least 1) for the duration of this method,, and `ptr` must point to a block of memory + /// allocated by `alloc`. /// /// # Examples /// /// ``` - /// use std::sync::Arc; + /// #![feature(allocator_api)] /// - /// let five = Arc::new(5); + /// use std::sync::Arc; + /// use std::alloc::System; + /// + /// let five = Arc::new_in(5, System); /// /// unsafe { /// let ptr = Arc::into_raw(five); - /// Arc::increment_strong_count(ptr); + /// Arc::increment_strong_count_in(ptr, System); /// /// // This assertion is deterministic because we haven't shared /// // the `Arc` between threads. - /// let five = Arc::from_raw(ptr); + /// let five = Arc::from_raw_in(ptr, System); /// assert_eq!(2, Arc::strong_count(&five)); /// } /// ``` #[inline] - #[stable(feature = "arc_mutate_strong_count", since = "1.51.0")] - pub unsafe fn increment_strong_count(ptr: *const T) { + #[unstable(feature = "allocator_api", issue = "32838")] + pub unsafe fn increment_strong_count_in(ptr: *const T, alloc: A) + where + A: Clone, + { // Retain Arc, but don't touch refcount by wrapping in ManuallyDrop - let arc = unsafe { mem::ManuallyDrop::new(Arc::::from_raw(ptr)) }; + let arc = unsafe { mem::ManuallyDrop::new(Arc::from_raw_in(ptr, alloc)) }; // Now increase refcount, but don't drop new refcount either let _arc_clone: mem::ManuallyDrop<_> = arc.clone(); } @@ -1214,35 +1694,39 @@ impl Arc { /// /// # Safety /// - /// The pointer must have been obtained through `Arc::into_raw`, and the + /// The pointer must have been obtained through `Arc::into_raw`, the /// associated `Arc` instance must be valid (i.e. the strong count must be at - /// least 1) when invoking this method. This method can be used to release the final + /// least 1) when invoking this method, and `ptr` must point to a block of memory + /// allocated by `alloc`. This method can be used to release the final /// `Arc` and backing storage, but **should not** be called after the final `Arc` has been /// released. /// /// # Examples /// /// ``` - /// use std::sync::Arc; + /// #![feature(allocator_api)] /// - /// let five = Arc::new(5); + /// use std::sync::Arc; + /// use std::alloc::System; + /// + /// let five = Arc::new_in(5, System); /// /// unsafe { /// let ptr = Arc::into_raw(five); - /// Arc::increment_strong_count(ptr); + /// Arc::increment_strong_count_in(ptr, System); /// /// // Those assertions are deterministic because we haven't shared /// // the `Arc` between threads. - /// let five = Arc::from_raw(ptr); + /// let five = Arc::from_raw_in(ptr, System); /// assert_eq!(2, Arc::strong_count(&five)); - /// Arc::decrement_strong_count(ptr); + /// Arc::decrement_strong_count_in(ptr, System); /// assert_eq!(1, Arc::strong_count(&five)); /// } /// ``` #[inline] - #[stable(feature = "arc_mutate_strong_count", since = "1.51.0")] - pub unsafe fn decrement_strong_count(ptr: *const T) { - unsafe { drop(Arc::from_raw(ptr)) }; + #[unstable(feature = "allocator_api", issue = "32838")] + pub unsafe fn decrement_strong_count_in(ptr: *const T, alloc: A) { + unsafe { drop(Arc::from_raw_in(ptr, alloc)) }; } #[inline] @@ -1263,7 +1747,10 @@ impl Arc { unsafe { ptr::drop_in_place(Self::get_mut_unchecked(self)) }; // Drop the weak ref collectively held by all strong references - drop(Weak { ptr: self.ptr }); + // Take a reference to `self.alloc` instead of cloning because 1. it'll + // last long enough, and 2. you should be able to drop `Arc`s with + // unclonable allocators + drop(Weak { ptr: self.ptr, alloc: &self.alloc }); } /// Returns `true` if the two `Arc`s point to the same allocation in a vein similar to @@ -1345,25 +1832,28 @@ impl Arc { inner } +} +impl Arc { /// Allocates an `ArcInner` with sufficient space for an unsized inner value. + #[inline] #[cfg(not(no_global_oom_handling))] - unsafe fn allocate_for_ptr(ptr: *const T) -> *mut ArcInner { + unsafe fn allocate_for_ptr_in(ptr: *const T, alloc: &A) -> *mut ArcInner { // Allocate for the `ArcInner` using the given value. unsafe { - Self::allocate_for_layout( + Arc::allocate_for_layout( Layout::for_value(&*ptr), - |layout| Global.allocate(layout), + |layout| alloc.allocate(layout), |mem| mem.with_metadata_of(ptr as *const ArcInner), ) } } #[cfg(not(no_global_oom_handling))] - fn from_box(src: Box) -> Arc { + fn from_box_in(src: Box) -> Arc { unsafe { let value_size = size_of_val(&*src); - let ptr = Self::allocate_for_ptr(&*src); + let ptr = Self::allocate_for_ptr_in(&*src, Box::allocator(&src)); // Copy value as bytes ptr::copy_nonoverlapping( @@ -1373,10 +1863,11 @@ impl Arc { ); // Free the allocation without dropping its contents - let src = Box::from_raw(Box::into_raw(src) as *mut mem::ManuallyDrop); + let (bptr, alloc) = Box::into_raw_with_allocator(src); + let src = Box::from_raw(bptr as *mut mem::ManuallyDrop); drop(src); - Self::from_ptr(ptr) + Self::from_ptr_in(ptr, alloc) } } } @@ -1389,7 +1880,7 @@ impl Arc<[T]> { Self::allocate_for_layout( Layout::array::(len).unwrap(), |layout| Global.allocate(layout), - |mem| ptr::slice_from_raw_parts_mut(mem as *mut T, len) as *mut ArcInner<[T]>, + |mem| ptr::slice_from_raw_parts_mut(mem.cast::(), len) as *mut ArcInner<[T]>, ) } } @@ -1458,6 +1949,21 @@ impl Arc<[T]> { } } +impl Arc<[T], A> { + /// Allocates an `ArcInner<[T]>` with the given length. + #[inline] + #[cfg(not(no_global_oom_handling))] + unsafe fn allocate_for_slice_in(len: usize, alloc: &A) -> *mut ArcInner<[T]> { + unsafe { + Arc::allocate_for_layout( + Layout::array::(len).unwrap(), + |layout| alloc.allocate(layout), + |mem| ptr::slice_from_raw_parts_mut(mem.cast::(), len) as *mut ArcInner<[T]>, + ) + } + } +} + /// Specialization trait used for `From<&[T]>`. #[cfg(not(no_global_oom_handling))] trait ArcFromSlice { @@ -1481,7 +1987,7 @@ impl ArcFromSlice for Arc<[T]> { } #[stable(feature = "rust1", since = "1.0.0")] -impl Clone for Arc { +impl Clone for Arc { /// Makes a clone of the `Arc` pointer. /// /// This creates another pointer to the same allocation, increasing the @@ -1497,7 +2003,7 @@ impl Clone for Arc { /// let _ = Arc::clone(&five); /// ``` #[inline] - fn clone(&self) -> Arc { + fn clone(&self) -> Arc { // Using a relaxed ordering is alright here, as knowledge of the // original reference prevents other threads from erroneously deleting // the object. @@ -1530,12 +2036,12 @@ impl Clone for Arc { abort(); } - unsafe { Self::from_inner(self.ptr) } + unsafe { Self::from_inner_in(self.ptr, self.alloc.clone()) } } } #[stable(feature = "rust1", since = "1.0.0")] -impl Deref for Arc { +impl Deref for Arc { type Target = T; #[inline] @@ -1547,7 +2053,7 @@ impl Deref for Arc { #[unstable(feature = "receiver_trait", issue = "none")] impl Receiver for Arc {} -impl Arc { +impl Arc { /// Makes a mutable reference into the given `Arc`. /// /// If there are other `Arc` pointers to the same allocation, then `make_mut` will @@ -1613,7 +2119,7 @@ impl Arc { if this.inner().strong.compare_exchange(1, 0, Acquire, Relaxed).is_err() { // Another strong pointer exists, so we must clone. // Pre-allocate memory to allow writing the cloned value directly. - let mut arc = Self::new_uninit(); + let mut arc = Self::new_uninit_in(this.alloc.clone()); unsafe { let data = Arc::get_mut_unchecked(&mut arc); (**this).write_clone_into_raw(data.as_mut_ptr()); @@ -1634,10 +2140,10 @@ impl Arc { // Materialize our own implicit weak pointer, so that it can clean // up the ArcInner as needed. - let _weak = Weak { ptr: this.ptr }; + let _weak = Weak { ptr: this.ptr, alloc: this.alloc.clone() }; // Can just steal the data, all that's left is Weaks - let mut arc = Self::new_uninit(); + let mut arc = Self::new_uninit_in(this.alloc.clone()); unsafe { let data = Arc::get_mut_unchecked(&mut arc); data.as_mut_ptr().copy_from_nonoverlapping(&**this, 1); @@ -1690,7 +2196,7 @@ impl Arc { } } -impl Arc { +impl Arc { /// Returns a mutable reference into the given `Arc`, if there are /// no other `Arc` or [`Weak`] pointers to the same allocation. /// @@ -1828,7 +2334,7 @@ impl Arc { } #[stable(feature = "rust1", since = "1.0.0")] -unsafe impl<#[may_dangle] T: ?Sized> Drop for Arc { +unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Arc { /// Drops the `Arc`. /// /// This will decrement the strong reference count. If the strong reference @@ -1899,7 +2405,7 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Arc { } } -impl Arc { +impl Arc { /// Attempt to downcast the `Arc` to a concrete type. /// /// # Examples @@ -1920,15 +2426,16 @@ impl Arc { /// ``` #[inline] #[stable(feature = "rc_downcast", since = "1.29.0")] - pub fn downcast(self) -> Result, Self> + pub fn downcast(self) -> Result, Self> where T: Any + Send + Sync, { if (*self).is::() { unsafe { let ptr = self.ptr.cast::>(); + let alloc = self.alloc.clone(); mem::forget(self); - Ok(Arc::from_inner(ptr)) + Ok(Arc::from_inner_in(ptr, alloc)) } } else { Err(self) @@ -1963,14 +2470,15 @@ impl Arc { /// [`downcast`]: Self::downcast #[inline] #[unstable(feature = "downcast_unchecked", issue = "90850")] - pub unsafe fn downcast_unchecked(self) -> Arc + pub unsafe fn downcast_unchecked(self) -> Arc where T: Any + Send + Sync, { unsafe { let ptr = self.ptr.cast::>(); + let alloc = self.alloc.clone(); mem::forget(self); - Arc::from_inner(ptr) + Arc::from_inner_in(ptr, alloc) } } } @@ -1989,11 +2497,43 @@ impl Weak { /// let empty: Weak = Weak::new(); /// assert!(empty.upgrade().is_none()); /// ``` + #[inline] #[stable(feature = "downgraded_weak", since = "1.10.0")] #[rustc_const_unstable(feature = "const_weak_new", issue = "95091", reason = "recently added")] #[must_use] pub const fn new() -> Weak { - Weak { ptr: unsafe { NonNull::new_unchecked(ptr::invalid_mut::>(usize::MAX)) } } + Weak { + ptr: unsafe { NonNull::new_unchecked(ptr::invalid_mut::>(usize::MAX)) }, + alloc: Global, + } + } +} + +impl Weak { + /// Constructs a new `Weak`, without allocating any memory, technically in the provided + /// allocator. + /// Calling [`upgrade`] on the return value always gives [`None`]. + /// + /// [`upgrade`]: Weak::upgrade + /// + /// # Examples + /// + /// ``` + /// #![feature(allocator_api)] + /// + /// use std::sync::Weak; + /// use std::alloc::System; + /// + /// let empty: Weak = Weak::new_in(System); + /// assert!(empty.upgrade().is_none()); + /// ``` + #[inline] + #[unstable(feature = "allocator_api", issue = "32838")] + pub fn new_in(alloc: A) -> Weak { + Weak { + ptr: unsafe { NonNull::new_unchecked(ptr::invalid_mut::>(usize::MAX)) }, + alloc, + } } } @@ -2005,6 +2545,55 @@ struct WeakInner<'a> { } impl Weak { + /// Converts a raw pointer previously created by [`into_raw`] back into `Weak`. + /// + /// This can be used to safely get a strong reference (by calling [`upgrade`] + /// later) or to deallocate the weak count by dropping the `Weak`. + /// + /// It takes ownership of one weak reference (with the exception of pointers created by [`new`], + /// as these don't own anything; the method still works on them). + /// + /// # Safety + /// + /// The pointer must have originated from the [`into_raw`] and must still own its potential + /// weak reference. + /// + /// It is allowed for the strong count to be 0 at the time of calling this. Nevertheless, this + /// takes ownership of one weak reference currently represented as a raw pointer (the weak + /// count is not modified by this operation) and therefore it must be paired with a previous + /// call to [`into_raw`]. + /// # Examples + /// + /// ``` + /// use std::sync::{Arc, Weak}; + /// + /// let strong = Arc::new("hello".to_owned()); + /// + /// let raw_1 = Arc::downgrade(&strong).into_raw(); + /// let raw_2 = Arc::downgrade(&strong).into_raw(); + /// + /// assert_eq!(2, Arc::weak_count(&strong)); + /// + /// assert_eq!("hello", &*unsafe { Weak::from_raw(raw_1) }.upgrade().unwrap()); + /// assert_eq!(1, Arc::weak_count(&strong)); + /// + /// drop(strong); + /// + /// // Decrement the last weak count. + /// assert!(unsafe { Weak::from_raw(raw_2) }.upgrade().is_none()); + /// ``` + /// + /// [`new`]: Weak::new + /// [`into_raw`]: Weak::into_raw + /// [`upgrade`]: Weak::upgrade + #[inline] + #[stable(feature = "weak_into_raw", since = "1.45.0")] + pub unsafe fn from_raw(ptr: *const T) -> Self { + unsafe { Weak::from_raw_in(ptr, Global) } + } +} + +impl Weak { /// Returns a raw pointer to the object `T` pointed to by this `Weak`. /// /// The pointer is valid only if there are some strong references. The pointer may be dangling, @@ -2082,7 +2671,8 @@ impl Weak { result } - /// Converts a raw pointer previously created by [`into_raw`] back into `Weak`. + /// Converts a raw pointer previously created by [`into_raw`] back into `Weak` in the provided + /// allocator. /// /// This can be used to safely get a strong reference (by calling [`upgrade`] /// later) or to deallocate the weak count by dropping the `Weak`. @@ -2093,7 +2683,7 @@ impl Weak { /// # Safety /// /// The pointer must have originated from the [`into_raw`] and must still own its potential - /// weak reference. + /// weak reference, and must point to a block of memory allocated by `alloc`. /// /// It is allowed for the strong count to be 0 at the time of calling this. Nevertheless, this /// takes ownership of one weak reference currently represented as a raw pointer (the weak @@ -2123,8 +2713,9 @@ impl Weak { /// [`new`]: Weak::new /// [`into_raw`]: Weak::into_raw /// [`upgrade`]: Weak::upgrade - #[stable(feature = "weak_into_raw", since = "1.45.0")] - pub unsafe fn from_raw(ptr: *const T) -> Self { + #[inline] + #[unstable(feature = "allocator_api", issue = "32838")] + pub unsafe fn from_raw_in(ptr: *const T, alloc: A) -> Self { // See Weak::as_ptr for context on how the input pointer is derived. let ptr = if is_dangling(ptr as *mut T) { @@ -2140,11 +2731,11 @@ impl Weak { }; // SAFETY: we now have recovered the original Weak pointer, so can create the Weak. - Weak { ptr: unsafe { NonNull::new_unchecked(ptr) } } + Weak { ptr: unsafe { NonNull::new_unchecked(ptr) }, alloc } } } -impl Weak { +impl Weak { /// Attempts to upgrade the `Weak` pointer to an [`Arc`], delaying /// dropping of the inner value if successful. /// @@ -2171,28 +2762,35 @@ impl Weak { #[must_use = "this returns a new `Arc`, \ without modifying the original weak pointer"] #[stable(feature = "arc_weak", since = "1.4.0")] - pub fn upgrade(&self) -> Option> { + pub fn upgrade(&self) -> Option> + where + A: Clone, + { + #[inline] + fn checked_increment(n: usize) -> Option { + // Any write of 0 we can observe leaves the field in permanently zero state. + if n == 0 { + return None; + } + // See comments in `Arc::clone` for why we do this (for `mem::forget`). + assert!(n <= MAX_REFCOUNT, "{}", INTERNAL_OVERFLOW_ERROR); + Some(n + 1) + } + // We use a CAS loop to increment the strong count instead of a // fetch_add as this function should never take the reference count // from zero to one. - self.inner()? - .strong - // Relaxed is fine for the failure case because we don't have any expectations about the new state. - // Acquire is necessary for the success case to synchronise with `Arc::new_cyclic`, when the inner - // value can be initialized after `Weak` references have already been created. In that case, we - // expect to observe the fully initialized value. - .fetch_update(Acquire, Relaxed, |n| { - // Any write of 0 we can observe leaves the field in permanently zero state. - if n == 0 { - return None; - } - // See comments in `Arc::clone` for why we do this (for `mem::forget`). - assert!(n <= MAX_REFCOUNT, "{}", INTERNAL_OVERFLOW_ERROR); - Some(n + 1) - }) - .ok() - // null checked above - .map(|_| unsafe { Arc::from_inner(self.ptr) }) + // + // Relaxed is fine for the failure case because we don't have any expectations about the new state. + // Acquire is necessary for the success case to synchronise with `Arc::new_cyclic`, when the inner + // value can be initialized after `Weak` references have already been created. In that case, we + // expect to observe the fully initialized value. + if self.inner()?.strong.fetch_update(Acquire, Relaxed, checked_increment).is_ok() { + // SAFETY: pointer is not null, verified in checked_increment + unsafe { Some(Arc::from_inner_in(self.ptr, self.alloc.clone())) } + } else { + None + } } /// Gets the number of strong (`Arc`) pointers pointing to this allocation. @@ -2218,22 +2816,22 @@ impl Weak { #[must_use] #[stable(feature = "weak_counts", since = "1.41.0")] pub fn weak_count(&self) -> usize { - self.inner() - .map(|inner| { - let weak = inner.weak.load(Acquire); - let strong = inner.strong.load(Acquire); - if strong == 0 { - 0 - } else { - // Since we observed that there was at least one strong pointer - // after reading the weak count, we know that the implicit weak - // reference (present whenever any strong references are alive) - // was still around when we observed the weak count, and can - // therefore safely subtract it. - weak - 1 - } - }) - .unwrap_or(0) + if let Some(inner) = self.inner() { + let weak = inner.weak.load(Acquire); + let strong = inner.strong.load(Acquire); + if strong == 0 { + 0 + } else { + // Since we observed that there was at least one strong pointer + // after reading the weak count, we know that the implicit weak + // reference (present whenever any strong references are alive) + // was still around when we observed the weak count, and can + // therefore safely subtract it. + weak - 1 + } + } else { + 0 + } } /// Returns `None` when the pointer is dangling and there is no allocated `ArcInner`, @@ -2303,7 +2901,7 @@ impl Weak { } #[stable(feature = "arc_weak", since = "1.4.0")] -impl Clone for Weak { +impl Clone for Weak { /// Makes a clone of the `Weak` pointer that points to the same allocation. /// /// # Examples @@ -2316,11 +2914,11 @@ impl Clone for Weak { /// let _ = Weak::clone(&weak_five); /// ``` #[inline] - fn clone(&self) -> Weak { + fn clone(&self) -> Weak { let inner = if let Some(inner) = self.inner() { inner } else { - return Weak { ptr: self.ptr }; + return Weak { ptr: self.ptr, alloc: self.alloc.clone() }; }; // See comments in Arc::clone() for why this is relaxed. This can use a // fetch_add (ignoring the lock) because the weak count is only locked @@ -2333,7 +2931,7 @@ impl Clone for Weak { abort(); } - Weak { ptr: self.ptr } + Weak { ptr: self.ptr, alloc: self.alloc.clone() } } } @@ -2359,7 +2957,7 @@ impl Default for Weak { } #[stable(feature = "arc_weak", since = "1.4.0")] -unsafe impl<#[may_dangle] T: ?Sized> Drop for Weak { +unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Weak { /// Drops the `Weak` pointer. /// /// # Examples @@ -2397,25 +2995,27 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Weak { if inner.weak.fetch_sub(1, Release) == 1 { acquire!(inner.weak); - unsafe { Global.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr())) } + unsafe { + self.alloc.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr())) + } } } } #[stable(feature = "rust1", since = "1.0.0")] -trait ArcEqIdent { - fn eq(&self, other: &Arc) -> bool; - fn ne(&self, other: &Arc) -> bool; +trait ArcEqIdent { + fn eq(&self, other: &Arc) -> bool; + fn ne(&self, other: &Arc) -> bool; } #[stable(feature = "rust1", since = "1.0.0")] -impl ArcEqIdent for Arc { +impl ArcEqIdent for Arc { #[inline] - default fn eq(&self, other: &Arc) -> bool { + default fn eq(&self, other: &Arc) -> bool { **self == **other } #[inline] - default fn ne(&self, other: &Arc) -> bool { + default fn ne(&self, other: &Arc) -> bool { **self != **other } } @@ -2428,20 +3028,20 @@ impl ArcEqIdent for Arc { /// /// We can only do this when `T: Eq` as a `PartialEq` might be deliberately irreflexive. #[stable(feature = "rust1", since = "1.0.0")] -impl ArcEqIdent for Arc { +impl ArcEqIdent for Arc { #[inline] - fn eq(&self, other: &Arc) -> bool { + fn eq(&self, other: &Arc) -> bool { Arc::ptr_eq(self, other) || **self == **other } #[inline] - fn ne(&self, other: &Arc) -> bool { + fn ne(&self, other: &Arc) -> bool { !Arc::ptr_eq(self, other) && **self != **other } } #[stable(feature = "rust1", since = "1.0.0")] -impl PartialEq for Arc { +impl PartialEq for Arc { /// Equality for two `Arc`s. /// /// Two `Arc`s are equal if their inner values are equal, even if they are @@ -2460,7 +3060,7 @@ impl PartialEq for Arc { /// assert!(five == Arc::new(5)); /// ``` #[inline] - fn eq(&self, other: &Arc) -> bool { + fn eq(&self, other: &Arc) -> bool { ArcEqIdent::eq(self, other) } @@ -2481,13 +3081,13 @@ impl PartialEq for Arc { /// assert!(five != Arc::new(6)); /// ``` #[inline] - fn ne(&self, other: &Arc) -> bool { + fn ne(&self, other: &Arc) -> bool { ArcEqIdent::ne(self, other) } } #[stable(feature = "rust1", since = "1.0.0")] -impl PartialOrd for Arc { +impl PartialOrd for Arc { /// Partial comparison for two `Arc`s. /// /// The two are compared by calling `partial_cmp()` on their inner values. @@ -2502,7 +3102,7 @@ impl PartialOrd for Arc { /// /// assert_eq!(Some(Ordering::Less), five.partial_cmp(&Arc::new(6))); /// ``` - fn partial_cmp(&self, other: &Arc) -> Option { + fn partial_cmp(&self, other: &Arc) -> Option { (**self).partial_cmp(&**other) } @@ -2519,7 +3119,7 @@ impl PartialOrd for Arc { /// /// assert!(five < Arc::new(6)); /// ``` - fn lt(&self, other: &Arc) -> bool { + fn lt(&self, other: &Arc) -> bool { *(*self) < *(*other) } @@ -2536,7 +3136,7 @@ impl PartialOrd for Arc { /// /// assert!(five <= Arc::new(5)); /// ``` - fn le(&self, other: &Arc) -> bool { + fn le(&self, other: &Arc) -> bool { *(*self) <= *(*other) } @@ -2553,7 +3153,7 @@ impl PartialOrd for Arc { /// /// assert!(five > Arc::new(4)); /// ``` - fn gt(&self, other: &Arc) -> bool { + fn gt(&self, other: &Arc) -> bool { *(*self) > *(*other) } @@ -2570,12 +3170,12 @@ impl PartialOrd for Arc { /// /// assert!(five >= Arc::new(5)); /// ``` - fn ge(&self, other: &Arc) -> bool { + fn ge(&self, other: &Arc) -> bool { *(*self) >= *(*other) } } #[stable(feature = "rust1", since = "1.0.0")] -impl Ord for Arc { +impl Ord for Arc { /// Comparison for two `Arc`s. /// /// The two are compared by calling `cmp()` on their inner values. @@ -2590,29 +3190,29 @@ impl Ord for Arc { /// /// assert_eq!(Ordering::Less, five.cmp(&Arc::new(6))); /// ``` - fn cmp(&self, other: &Arc) -> Ordering { + fn cmp(&self, other: &Arc) -> Ordering { (**self).cmp(&**other) } } #[stable(feature = "rust1", since = "1.0.0")] -impl Eq for Arc {} +impl Eq for Arc {} #[stable(feature = "rust1", since = "1.0.0")] -impl fmt::Display for Arc { +impl fmt::Display for Arc { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Display::fmt(&**self, f) } } #[stable(feature = "rust1", since = "1.0.0")] -impl fmt::Debug for Arc { +impl fmt::Debug for Arc { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Debug::fmt(&**self, f) } } #[stable(feature = "rust1", since = "1.0.0")] -impl fmt::Pointer for Arc { +impl fmt::Pointer for Arc { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Pointer::fmt(&(&**self as *const T), f) } @@ -2637,7 +3237,7 @@ impl Default for Arc { } #[stable(feature = "rust1", since = "1.0.0")] -impl Hash for Arc { +impl Hash for Arc { fn hash(&self, state: &mut H) { (**self).hash(state) } @@ -2724,7 +3324,7 @@ impl From for Arc { #[cfg(not(no_global_oom_handling))] #[stable(feature = "shared_from_slice", since = "1.21.0")] -impl From> for Arc { +impl From> for Arc { /// Move a boxed object to a new, reference-counted allocation. /// /// # Example @@ -2736,14 +3336,14 @@ impl From> for Arc { /// assert_eq!("eggplant", &shared[..]); /// ``` #[inline] - fn from(v: Box) -> Arc { - Arc::from_box(v) + fn from(v: Box) -> Arc { + Arc::from_box_in(v) } } #[cfg(not(no_global_oom_handling))] #[stable(feature = "shared_from_slice", since = "1.21.0")] -impl From> for Arc<[T]> { +impl From> for Arc<[T], A> { /// Allocate a reference-counted slice and move `v`'s items into it. /// /// # Example @@ -2755,12 +3355,18 @@ impl From> for Arc<[T]> { /// assert_eq!(&[1, 2, 3], &shared[..]); /// ``` #[inline] - fn from(mut v: Vec) -> Arc<[T]> { + fn from(v: Vec) -> Arc<[T], A> { unsafe { - let rc = Arc::copy_from_slice(&v); - // Allow the Vec to free its memory, but not destroy its contents - v.set_len(0); - rc + let (vec_ptr, len, cap, alloc) = v.into_raw_parts_with_alloc(); + + let rc_ptr = Self::allocate_for_slice_in(len, &alloc); + ptr::copy_nonoverlapping(vec_ptr, &mut (*rc_ptr).data as *mut [T] as *mut T, len); + + // Create a `Vec` with length 0, to deallocate the buffer + // without dropping its contents or the allocator + let _ = Vec::from_raw_parts_in(vec_ptr, 0, cap, &alloc); + + Self::from_ptr_in(rc_ptr, alloc) } } } @@ -2812,12 +3418,13 @@ impl From> for Arc<[u8]> { } #[stable(feature = "boxed_slice_try_from", since = "1.43.0")] -impl TryFrom> for Arc<[T; N]> { - type Error = Arc<[T]>; +impl TryFrom> for Arc<[T; N], A> { + type Error = Arc<[T], A>; - fn try_from(boxed_slice: Arc<[T]>) -> Result { + fn try_from(boxed_slice: Arc<[T], A>) -> Result { if boxed_slice.len() == N { - Ok(unsafe { Arc::from_raw(Arc::into_raw(boxed_slice) as *mut [T; N]) }) + let alloc = boxed_slice.alloc.clone(); + Ok(unsafe { Arc::from_raw_in(Arc::into_raw(boxed_slice) as *mut [T; N], alloc) }) } else { Err(boxed_slice) } @@ -2910,21 +3517,21 @@ impl> ToArcSlice for I { } #[stable(feature = "rust1", since = "1.0.0")] -impl borrow::Borrow for Arc { +impl borrow::Borrow for Arc { fn borrow(&self) -> &T { &**self } } #[stable(since = "1.5.0", feature = "smart_ptr_as_ref")] -impl AsRef for Arc { +impl AsRef for Arc { fn as_ref(&self) -> &T { &**self } } #[stable(feature = "pin", since = "1.33.0")] -impl Unpin for Arc {} +impl Unpin for Arc {} /// Get the offset within an `ArcInner` for the payload behind a pointer. /// diff --git a/src/etc/natvis/liballoc.natvis b/src/etc/natvis/liballoc.natvis index c4ad98ec1d3..00c17d83322 100644 --- a/src/etc/natvis/liballoc.natvis +++ b/src/etc/natvis/liballoc.natvis @@ -66,7 +66,10 @@ dyn pointees. Rc<[T]> and Arc<[T]> are handled separately altogether so we can actually show - the slice values. + the slice values. These visualizers have a second wildcard `foo<slice2$<*>, *>` + which accounts for the allocator parameter. This isn't needed for the other visualizers since + their inner `*` eats the type parameter but since the slice ones match part of the type params + it is necessary for them. --> @@ -84,7 +87,7 @@ - + {{ len={ptr.pointer.length} }} ptr.pointer.length @@ -114,7 +117,7 @@ - + {{ len={ptr.pointer.length} }} ptr.pointer.length @@ -143,7 +146,7 @@ - + {{ len={ptr.pointer.length} }} ptr.pointer.length @@ -172,7 +175,7 @@ - + {{ len={ptr.pointer.length} }} ptr.pointer.length diff --git a/tests/debuginfo/rc_arc.rs b/tests/debuginfo/rc_arc.rs index 5d5492d7217..1d07bd76eab 100644 --- a/tests/debuginfo/rc_arc.rs +++ b/tests/debuginfo/rc_arc.rs @@ -27,37 +27,37 @@ // cdb-command:g // cdb-command:dx rc,d -// cdb-check:rc,d : 111 [Type: alloc::rc::Rc] +// cdb-check:rc,d : 111 [Type: alloc::rc::Rc] // cdb-check: [Reference count] : 11 [Type: core::cell::Cell] // cdb-check: [Weak reference count] : 2 [Type: core::cell::Cell] // cdb-command:dx weak_rc,d -// cdb-check:weak_rc,d : 111 [Type: alloc::rc::Weak] +// cdb-check:weak_rc,d : 111 [Type: alloc::rc::Weak] // cdb-check: [Reference count] : 11 [Type: core::cell::Cell] // cdb-check: [Weak reference count] : 2 [Type: core::cell::Cell] // cdb-command:dx arc,d -// cdb-check:arc,d : 222 [Type: alloc::sync::Arc] +// cdb-check:arc,d : 222 [Type: alloc::sync::Arc] // cdb-check: [Reference count] : 21 [Type: core::sync::atomic::AtomicUsize] // cdb-check: [Weak reference count] : 2 [Type: core::sync::atomic::AtomicUsize] // cdb-command:dx weak_arc,d -// cdb-check:weak_arc,d : 222 [Type: alloc::sync::Weak] +// cdb-check:weak_arc,d : 222 [Type: alloc::sync::Weak] // cdb-check: [Reference count] : 21 [Type: core::sync::atomic::AtomicUsize] // cdb-check: [Weak reference count] : 2 [Type: core::sync::atomic::AtomicUsize] // cdb-command:dx dyn_rc,d -// cdb-check:dyn_rc,d [Type: alloc::rc::Rc >] +// cdb-check:dyn_rc,d [Type: alloc::rc::Rc,alloc::alloc::Global>] // cdb-check: [Reference count] : 31 [Type: core::cell::Cell] // cdb-check: [Weak reference count] : 2 [Type: core::cell::Cell] // cdb-command:dx dyn_rc_weak,d -// cdb-check:dyn_rc_weak,d [Type: alloc::rc::Weak >] +// cdb-check:dyn_rc_weak,d [Type: alloc::rc::Weak,alloc::alloc::Global>] // cdb-check: [Reference count] : 31 [Type: core::cell::Cell] // cdb-check: [Weak reference count] : 2 [Type: core::cell::Cell] // cdb-command:dx slice_rc,d -// cdb-check:slice_rc,d : { len=3 } [Type: alloc::rc::Rc >] +// cdb-check:slice_rc,d : { len=3 } [Type: alloc::rc::Rc,alloc::alloc::Global>] // cdb-check: [Length] : 3 [Type: [...]] // cdb-check: [Reference count] : 41 [Type: core::cell::Cell] // cdb-check: [Weak reference count] : 2 [Type: core::cell::Cell] @@ -66,7 +66,7 @@ // cdb-check: [2] : 3 [Type: u32] // cdb-command:dx slice_rc_weak,d -// cdb-check:slice_rc_weak,d : { len=3 } [Type: alloc::rc::Weak >] +// cdb-check:slice_rc_weak,d : { len=3 } [Type: alloc::rc::Weak,alloc::alloc::Global>] // cdb-check: [Length] : 3 [Type: [...]] // cdb-check: [Reference count] : 41 [Type: core::cell::Cell] // cdb-check: [Weak reference count] : 2 [Type: core::cell::Cell] @@ -75,17 +75,17 @@ // cdb-check: [2] : 3 [Type: u32] // cdb-command:dx dyn_arc,d -// cdb-check:dyn_arc,d [Type: alloc::sync::Arc >] +// cdb-check:dyn_arc,d [Type: alloc::sync::Arc,alloc::alloc::Global>] // cdb-check: [Reference count] : 51 [Type: core::sync::atomic::AtomicUsize] // cdb-check: [Weak reference count] : 2 [Type: core::sync::atomic::AtomicUsize] // cdb-command:dx dyn_arc_weak,d -// cdb-check:dyn_arc_weak,d [Type: alloc::sync::Weak >] +// cdb-check:dyn_arc_weak,d [Type: alloc::sync::Weak,alloc::alloc::Global>] // cdb-check: [Reference count] : 51 [Type: core::sync::atomic::AtomicUsize] // cdb-check: [Weak reference count] : 2 [Type: core::sync::atomic::AtomicUsize] // cdb-command:dx slice_arc,d -// cdb-check:slice_arc,d : { len=3 } [Type: alloc::sync::Arc >] +// cdb-check:slice_arc,d : { len=3 } [Type: alloc::sync::Arc,alloc::alloc::Global>] // cdb-check: [Length] : 3 [Type: [...]] // cdb-check: [Reference count] : 61 [Type: core::sync::atomic::AtomicUsize] // cdb-check: [Weak reference count] : 2 [Type: core::sync::atomic::AtomicUsize] @@ -94,7 +94,7 @@ // cdb-check: [2] : 6 [Type: u32] // cdb-command:dx slice_arc_weak,d -// cdb-check:slice_arc_weak,d : { len=3 } [Type: alloc::sync::Weak >] +// cdb-check:slice_arc_weak,d : { len=3 } [Type: alloc::sync::Weak,alloc::alloc::Global>] // cdb-check: [Length] : 3 [Type: [...]] // cdb-check: [Reference count] : 61 [Type: core::sync::atomic::AtomicUsize] // cdb-check: [Weak reference count] : 2 [Type: core::sync::atomic::AtomicUsize] diff --git a/tests/debuginfo/thread.rs b/tests/debuginfo/thread.rs index e7e83c7aacd..5516f4fec3e 100644 --- a/tests/debuginfo/thread.rs +++ b/tests/debuginfo/thread.rs @@ -14,7 +14,7 @@ // // cdb-command:dx t,d // cdb-check:t,d : [...] [Type: std::thread::Thread *] -// cdb-check:[...] inner [...][Type: core::pin::Pin >] +// cdb-check:[...] inner [...][Type: core::pin::Pin >] use std::thread;