mirror of
https://github.com/rust-lang/rust.git
synced 2024-11-25 08:13:41 +00:00
Generalize {Rc,Arc}::make_mut()
to unsized types.
This requires introducing a new internal type `RcUninit` (and `ArcUninit`), which can own an `RcBox<T>` without requiring it to be initialized, sized, or a slice. This is similar to `UniqueRc`, but `UniqueRc` doesn't support the allocator parameter, and there is no `UniqueArc`.
This commit is contained in:
parent
a9a4830d25
commit
88c3db57e4
@ -1749,7 +1749,8 @@ impl<T: ?Sized, A: Allocator> Rc<T, A> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: Clone, A: Allocator + Clone> Rc<T, A> {
|
#[cfg(not(no_global_oom_handling))]
|
||||||
|
impl<T: ?Sized + CloneToUninit, A: Allocator + Clone> Rc<T, A> {
|
||||||
/// Makes a mutable reference into the given `Rc`.
|
/// Makes a mutable reference into the given `Rc`.
|
||||||
///
|
///
|
||||||
/// If there are other `Rc` pointers to the same allocation, then `make_mut` will
|
/// If there are other `Rc` pointers to the same allocation, then `make_mut` will
|
||||||
@ -1800,31 +1801,52 @@ impl<T: Clone, A: Allocator + Clone> Rc<T, A> {
|
|||||||
/// assert!(76 == *data);
|
/// assert!(76 == *data);
|
||||||
/// assert!(weak.upgrade().is_none());
|
/// assert!(weak.upgrade().is_none());
|
||||||
/// ```
|
/// ```
|
||||||
#[cfg(not(no_global_oom_handling))]
|
|
||||||
#[inline]
|
#[inline]
|
||||||
#[stable(feature = "rc_unique", since = "1.4.0")]
|
#[stable(feature = "rc_unique", since = "1.4.0")]
|
||||||
pub fn make_mut(this: &mut Self) -> &mut T {
|
pub fn make_mut(this: &mut Self) -> &mut T {
|
||||||
|
let size_of_val = size_of_val::<T>(&**this);
|
||||||
|
|
||||||
if Rc::strong_count(this) != 1 {
|
if Rc::strong_count(this) != 1 {
|
||||||
// Gotta clone the data, there are other Rcs.
|
// Gotta clone the data, there are other Rcs.
|
||||||
// Pre-allocate memory to allow writing the cloned value directly.
|
|
||||||
let mut rc = Self::new_uninit_in(this.alloc.clone());
|
let this_data_ref: &T = &**this;
|
||||||
unsafe {
|
// `in_progress` drops the allocation if we panic before finishing initializing it.
|
||||||
let data = Rc::get_mut_unchecked(&mut rc);
|
let mut in_progress: UniqueRcUninit<T, A> =
|
||||||
(**this).clone_to_uninit(data.as_mut_ptr());
|
UniqueRcUninit::new(this_data_ref, this.alloc.clone());
|
||||||
*this = rc.assume_init();
|
|
||||||
}
|
// Initialize with clone of this.
|
||||||
|
let initialized_clone = unsafe {
|
||||||
|
// Clone. If the clone panics, `in_progress` will be dropped and clean up.
|
||||||
|
this_data_ref.clone_to_uninit(in_progress.data_ptr());
|
||||||
|
// Cast type of pointer, now that it is initialized.
|
||||||
|
in_progress.into_rc()
|
||||||
|
};
|
||||||
|
|
||||||
|
// Replace `this` with newly constructed Rc.
|
||||||
|
*this = initialized_clone;
|
||||||
} else if Rc::weak_count(this) != 0 {
|
} else if Rc::weak_count(this) != 0 {
|
||||||
// Can just steal the data, all that's left is Weaks
|
// Can just steal the data, all that's left is Weaks
|
||||||
let mut rc = Self::new_uninit_in(this.alloc.clone());
|
|
||||||
|
// We don't need panic-protection like the above branch does, but we might as well
|
||||||
|
// use the same mechanism.
|
||||||
|
let mut in_progress: UniqueRcUninit<T, A> =
|
||||||
|
UniqueRcUninit::new(&**this, this.alloc.clone());
|
||||||
unsafe {
|
unsafe {
|
||||||
let data = Rc::get_mut_unchecked(&mut rc);
|
// Initialize `in_progress` with move of **this.
|
||||||
data.as_mut_ptr().copy_from_nonoverlapping(&**this, 1);
|
// We have to express this in terms of bytes because `T: ?Sized`; there is no
|
||||||
|
// operation that just copies a value based on its `size_of_val()`.
|
||||||
|
ptr::copy_nonoverlapping(
|
||||||
|
ptr::from_ref(&**this).cast::<u8>(),
|
||||||
|
in_progress.data_ptr().cast::<u8>(),
|
||||||
|
size_of_val,
|
||||||
|
);
|
||||||
|
|
||||||
this.inner().dec_strong();
|
this.inner().dec_strong();
|
||||||
// Remove implicit strong-weak ref (no need to craft a fake
|
// Remove implicit strong-weak ref (no need to craft a fake
|
||||||
// Weak here -- we know other Weaks can clean up for us)
|
// Weak here -- we know other Weaks can clean up for us)
|
||||||
this.inner().dec_weak();
|
this.inner().dec_weak();
|
||||||
ptr::write(this, rc.assume_init());
|
// Replace `this` with newly constructed Rc that has the moved data.
|
||||||
|
ptr::write(this, in_progress.into_rc());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// This unsafety is ok because we're guaranteed that the pointer
|
// This unsafety is ok because we're guaranteed that the pointer
|
||||||
@ -3686,3 +3708,67 @@ unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for UniqueRc<T, A> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// A unique owning pointer to a [`RcBox`] **that does not imply the contents are initialized,**
|
||||||
|
/// but will deallocate it (without dropping the value) when dropped.
|
||||||
|
///
|
||||||
|
/// This is a helper for [`Rc::make_mut()`] to ensure correct cleanup on panic.
|
||||||
|
/// It is nearly a duplicate of `UniqueRc<MaybeUninit<T>, A>` except that it allows `T: !Sized`,
|
||||||
|
/// which `MaybeUninit` does not.
|
||||||
|
#[cfg(not(no_global_oom_handling))]
|
||||||
|
struct UniqueRcUninit<T: ?Sized, A: Allocator> {
|
||||||
|
ptr: NonNull<RcBox<T>>,
|
||||||
|
layout_for_value: Layout,
|
||||||
|
alloc: Option<A>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(not(no_global_oom_handling))]
|
||||||
|
impl<T: ?Sized, A: Allocator> UniqueRcUninit<T, A> {
|
||||||
|
/// Allocate a RcBox with layout suitable to contain `for_value` or a clone of it.
|
||||||
|
fn new(for_value: &T, alloc: A) -> UniqueRcUninit<T, A> {
|
||||||
|
let layout = Layout::for_value(for_value);
|
||||||
|
let ptr = unsafe {
|
||||||
|
Rc::allocate_for_layout(
|
||||||
|
layout,
|
||||||
|
|layout_for_rcbox| alloc.allocate(layout_for_rcbox),
|
||||||
|
|mem| mem.with_metadata_of(ptr::from_ref(for_value) as *const RcBox<T>),
|
||||||
|
)
|
||||||
|
};
|
||||||
|
Self { ptr: NonNull::new(ptr).unwrap(), layout_for_value: layout, alloc: Some(alloc) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the pointer to be written into to initialize the [`Rc`].
|
||||||
|
fn data_ptr(&mut self) -> *mut T {
|
||||||
|
let offset = data_offset_align(self.layout_for_value.align());
|
||||||
|
unsafe { self.ptr.as_ptr().byte_add(offset) as *mut T }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Upgrade this into a normal [`Rc`].
|
||||||
|
///
|
||||||
|
/// # Safety
|
||||||
|
///
|
||||||
|
/// The data must have been initialized (by writing to [`Self::data_ptr()`]).
|
||||||
|
unsafe fn into_rc(mut self) -> Rc<T, A> {
|
||||||
|
let ptr = self.ptr;
|
||||||
|
let alloc = self.alloc.take().unwrap();
|
||||||
|
mem::forget(self);
|
||||||
|
// SAFETY: The pointer is valid as per `UniqueRcUninit::new`, and the caller is responsible
|
||||||
|
// for having initialized the data.
|
||||||
|
unsafe { Rc::from_ptr_in(ptr.as_ptr(), alloc) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(not(no_global_oom_handling))]
|
||||||
|
impl<T: ?Sized, A: Allocator> Drop for UniqueRcUninit<T, A> {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
// SAFETY:
|
||||||
|
// * new() produced a pointer safe to deallocate.
|
||||||
|
// * We own the pointer unless into_rc() was called, which forgets us.
|
||||||
|
unsafe {
|
||||||
|
self.alloc
|
||||||
|
.take()
|
||||||
|
.unwrap()
|
||||||
|
.deallocate(self.ptr.cast(), rcbox_layout_for_value_layout(self.layout_for_value));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -316,6 +316,24 @@ fn test_cowrc_clone_weak() {
|
|||||||
assert!(cow1_weak.upgrade().is_none());
|
assert!(cow1_weak.upgrade().is_none());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// This is similar to the doc-test for `Rc::make_mut()`, but on an unsized type (slice).
|
||||||
|
#[test]
|
||||||
|
fn test_cowrc_unsized() {
|
||||||
|
use std::rc::Rc;
|
||||||
|
|
||||||
|
let mut data: Rc<[i32]> = Rc::new([10, 20, 30]);
|
||||||
|
|
||||||
|
Rc::make_mut(&mut data)[0] += 1; // Won't clone anything
|
||||||
|
let mut other_data = Rc::clone(&data); // Won't clone inner data
|
||||||
|
Rc::make_mut(&mut data)[1] += 1; // Clones inner data
|
||||||
|
Rc::make_mut(&mut data)[2] += 1; // Won't clone anything
|
||||||
|
Rc::make_mut(&mut other_data)[0] *= 10; // Won't clone anything
|
||||||
|
|
||||||
|
// Now `data` and `other_data` point to different allocations.
|
||||||
|
assert_eq!(*data, [11, 21, 31]);
|
||||||
|
assert_eq!(*other_data, [110, 20, 30]);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_show() {
|
fn test_show() {
|
||||||
let foo = Rc::new(75);
|
let foo = Rc::new(75);
|
||||||
|
@ -2150,7 +2150,8 @@ unsafe impl<T: ?Sized, A: Allocator> DerefPure for Arc<T, A> {}
|
|||||||
#[unstable(feature = "receiver_trait", issue = "none")]
|
#[unstable(feature = "receiver_trait", issue = "none")]
|
||||||
impl<T: ?Sized> Receiver for Arc<T> {}
|
impl<T: ?Sized> Receiver for Arc<T> {}
|
||||||
|
|
||||||
impl<T: Clone, A: Allocator + Clone> Arc<T, A> {
|
#[cfg(not(no_global_oom_handling))]
|
||||||
|
impl<T: ?Sized + CloneToUninit, A: Allocator + Clone> Arc<T, A> {
|
||||||
/// Makes a mutable reference into the given `Arc`.
|
/// Makes a mutable reference into the given `Arc`.
|
||||||
///
|
///
|
||||||
/// If there are other `Arc` pointers to the same allocation, then `make_mut` will
|
/// If there are other `Arc` pointers to the same allocation, then `make_mut` will
|
||||||
@ -2201,10 +2202,11 @@ impl<T: Clone, A: Allocator + Clone> Arc<T, A> {
|
|||||||
/// assert!(76 == *data);
|
/// assert!(76 == *data);
|
||||||
/// assert!(weak.upgrade().is_none());
|
/// assert!(weak.upgrade().is_none());
|
||||||
/// ```
|
/// ```
|
||||||
#[cfg(not(no_global_oom_handling))]
|
|
||||||
#[inline]
|
#[inline]
|
||||||
#[stable(feature = "arc_unique", since = "1.4.0")]
|
#[stable(feature = "arc_unique", since = "1.4.0")]
|
||||||
pub fn make_mut(this: &mut Self) -> &mut T {
|
pub fn make_mut(this: &mut Self) -> &mut T {
|
||||||
|
let size_of_val = mem::size_of_val::<T>(&**this);
|
||||||
|
|
||||||
// Note that we hold both a strong reference and a weak reference.
|
// Note that we hold both a strong reference and a weak reference.
|
||||||
// Thus, releasing our strong reference only will not, by itself, cause
|
// Thus, releasing our strong reference only will not, by itself, cause
|
||||||
// the memory to be deallocated.
|
// the memory to be deallocated.
|
||||||
@ -2215,13 +2217,19 @@ impl<T: Clone, A: Allocator + Clone> Arc<T, A> {
|
|||||||
// deallocated.
|
// deallocated.
|
||||||
if this.inner().strong.compare_exchange(1, 0, Acquire, Relaxed).is_err() {
|
if this.inner().strong.compare_exchange(1, 0, Acquire, Relaxed).is_err() {
|
||||||
// Another strong pointer exists, so we must clone.
|
// Another strong pointer exists, so we must clone.
|
||||||
// Pre-allocate memory to allow writing the cloned value directly.
|
|
||||||
let mut arc = Self::new_uninit_in(this.alloc.clone());
|
let this_data_ref: &T = &**this;
|
||||||
unsafe {
|
// `in_progress` drops the allocation if we panic before finishing initializing it.
|
||||||
let data = Arc::get_mut_unchecked(&mut arc);
|
let mut in_progress: UniqueArcUninit<T, A> =
|
||||||
(**this).clone_to_uninit(data.as_mut_ptr());
|
UniqueArcUninit::new(this_data_ref, this.alloc.clone());
|
||||||
*this = arc.assume_init();
|
|
||||||
}
|
let initialized_clone = unsafe {
|
||||||
|
// Clone. If the clone panics, `in_progress` will be dropped and clean up.
|
||||||
|
this_data_ref.clone_to_uninit(in_progress.data_ptr());
|
||||||
|
// Cast type of pointer, now that it is initialized.
|
||||||
|
in_progress.into_arc()
|
||||||
|
};
|
||||||
|
*this = initialized_clone;
|
||||||
} else if this.inner().weak.load(Relaxed) != 1 {
|
} else if this.inner().weak.load(Relaxed) != 1 {
|
||||||
// Relaxed suffices in the above because this is fundamentally an
|
// Relaxed suffices in the above because this is fundamentally an
|
||||||
// optimization: we are always racing with weak pointers being
|
// optimization: we are always racing with weak pointers being
|
||||||
@ -2240,11 +2248,22 @@ impl<T: Clone, A: Allocator + Clone> Arc<T, A> {
|
|||||||
let _weak = Weak { ptr: this.ptr, alloc: this.alloc.clone() };
|
let _weak = Weak { ptr: this.ptr, alloc: this.alloc.clone() };
|
||||||
|
|
||||||
// Can just steal the data, all that's left is Weaks
|
// Can just steal the data, all that's left is Weaks
|
||||||
let mut arc = Self::new_uninit_in(this.alloc.clone());
|
//
|
||||||
|
// We don't need panic-protection like the above branch does, but we might as well
|
||||||
|
// use the same mechanism.
|
||||||
|
let mut in_progress: UniqueArcUninit<T, A> =
|
||||||
|
UniqueArcUninit::new(&**this, this.alloc.clone());
|
||||||
unsafe {
|
unsafe {
|
||||||
let data = Arc::get_mut_unchecked(&mut arc);
|
// Initialize `in_progress` with move of **this.
|
||||||
data.as_mut_ptr().copy_from_nonoverlapping(&**this, 1);
|
// We have to express this in terms of bytes because `T: ?Sized`; there is no
|
||||||
ptr::write(this, arc.assume_init());
|
// operation that just copies a value based on its `size_of_val()`.
|
||||||
|
ptr::copy_nonoverlapping(
|
||||||
|
ptr::from_ref(&**this).cast::<u8>(),
|
||||||
|
in_progress.data_ptr().cast::<u8>(),
|
||||||
|
size_of_val,
|
||||||
|
);
|
||||||
|
|
||||||
|
ptr::write(this, in_progress.into_arc());
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// We were the sole reference of either kind; bump back up the
|
// We were the sole reference of either kind; bump back up the
|
||||||
@ -3809,6 +3828,68 @@ fn data_offset_align(align: usize) -> usize {
|
|||||||
layout.size() + layout.padding_needed_for(align)
|
layout.size() + layout.padding_needed_for(align)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// A unique owning pointer to a [`ArcInner`] **that does not imply the contents are initialized,**
|
||||||
|
/// but will deallocate it (without dropping the value) when dropped.
|
||||||
|
///
|
||||||
|
/// This is a helper for [`Arc::make_mut()`] to ensure correct cleanup on panic.
|
||||||
|
#[cfg(not(no_global_oom_handling))]
|
||||||
|
struct UniqueArcUninit<T: ?Sized, A: Allocator> {
|
||||||
|
ptr: NonNull<ArcInner<T>>,
|
||||||
|
layout_for_value: Layout,
|
||||||
|
alloc: Option<A>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(not(no_global_oom_handling))]
|
||||||
|
impl<T: ?Sized, A: Allocator> UniqueArcUninit<T, A> {
|
||||||
|
/// Allocate a ArcInner with layout suitable to contain `for_value` or a clone of it.
|
||||||
|
fn new(for_value: &T, alloc: A) -> UniqueArcUninit<T, A> {
|
||||||
|
let layout = Layout::for_value(for_value);
|
||||||
|
let ptr = unsafe {
|
||||||
|
Arc::allocate_for_layout(
|
||||||
|
layout,
|
||||||
|
|layout_for_arcinner| alloc.allocate(layout_for_arcinner),
|
||||||
|
|mem| mem.with_metadata_of(ptr::from_ref(for_value) as *const ArcInner<T>),
|
||||||
|
)
|
||||||
|
};
|
||||||
|
Self { ptr: NonNull::new(ptr).unwrap(), layout_for_value: layout, alloc: Some(alloc) }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the pointer to be written into to initialize the [`Arc`].
|
||||||
|
fn data_ptr(&mut self) -> *mut T {
|
||||||
|
let offset = data_offset_align(self.layout_for_value.align());
|
||||||
|
unsafe { self.ptr.as_ptr().byte_add(offset) as *mut T }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Upgrade this into a normal [`Arc`].
|
||||||
|
///
|
||||||
|
/// # Safety
|
||||||
|
///
|
||||||
|
/// The data must have been initialized (by writing to [`Self::data_ptr()`]).
|
||||||
|
unsafe fn into_arc(mut self) -> Arc<T, A> {
|
||||||
|
let ptr = self.ptr;
|
||||||
|
let alloc = self.alloc.take().unwrap();
|
||||||
|
mem::forget(self);
|
||||||
|
// SAFETY: The pointer is valid as per `UniqueArcUninit::new`, and the caller is responsible
|
||||||
|
// for having initialized the data.
|
||||||
|
unsafe { Arc::from_ptr_in(ptr.as_ptr(), alloc) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(not(no_global_oom_handling))]
|
||||||
|
impl<T: ?Sized, A: Allocator> Drop for UniqueArcUninit<T, A> {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
// SAFETY:
|
||||||
|
// * new() produced a pointer safe to deallocate.
|
||||||
|
// * We own the pointer unless into_arc() was called, which forgets us.
|
||||||
|
unsafe {
|
||||||
|
self.alloc.take().unwrap().deallocate(
|
||||||
|
self.ptr.cast(),
|
||||||
|
arcinner_layout_for_value_layout(self.layout_for_value),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[stable(feature = "arc_error", since = "1.52.0")]
|
#[stable(feature = "arc_error", since = "1.52.0")]
|
||||||
impl<T: core::error::Error + ?Sized> core::error::Error for Arc<T> {
|
impl<T: core::error::Error + ?Sized> core::error::Error for Arc<T> {
|
||||||
#[allow(deprecated, deprecated_in_future)]
|
#[allow(deprecated, deprecated_in_future)]
|
||||||
|
@ -209,3 +209,21 @@ fn weak_may_dangle() {
|
|||||||
// `val` dropped here while still borrowed
|
// `val` dropped here while still borrowed
|
||||||
// borrow might be used here, when `val` is dropped and runs the `Drop` code for type `std::sync::Weak`
|
// borrow might be used here, when `val` is dropped and runs the `Drop` code for type `std::sync::Weak`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// This is similar to the doc-test for `Arc::make_mut()`, but on an unsized type (slice).
|
||||||
|
#[test]
|
||||||
|
fn make_mut_unsized() {
|
||||||
|
use alloc::sync::Arc;
|
||||||
|
|
||||||
|
let mut data: Arc<[i32]> = Arc::new([10, 20, 30]);
|
||||||
|
|
||||||
|
Arc::make_mut(&mut data)[0] += 1; // Won't clone anything
|
||||||
|
let mut other_data = Arc::clone(&data); // Won't clone inner data
|
||||||
|
Arc::make_mut(&mut data)[1] += 1; // Clones inner data
|
||||||
|
Arc::make_mut(&mut data)[2] += 1; // Won't clone anything
|
||||||
|
Arc::make_mut(&mut other_data)[0] *= 10; // Won't clone anything
|
||||||
|
|
||||||
|
// Now `data` and `other_data` point to different allocations.
|
||||||
|
assert_eq!(*data, [11, 21, 31]);
|
||||||
|
assert_eq!(*other_data, [110, 20, 30]);
|
||||||
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user