mirror of
https://github.com/rust-lang/rust.git
synced 2024-11-22 06:44:35 +00:00
Auto merge of #116113 - kpreid:arcmut, r=dtolnay
Generalize `{Rc,Arc}::make_mut()` to unsized types. * `{Rc,Arc}::make_mut()` now accept any type implementing the new unstable trait `core::clone::CloneToUninit`. * `CloneToUninit` is implemented for `T: Clone` and for `[T] where T: Clone`. * `CloneToUninit` is a generalization of the existing internal trait `alloc::alloc::WriteCloneIntoRaw`. * New feature gate: `clone_to_uninit` This allows performing `make_mut()` on `Rc<[T]>` and `Arc<[T]>`, which was not previously possible. --- Previous PR description, now obsolete: > Add `{Rc, Arc}::make_mut_slice()` > > These functions behave identically to `make_mut()`, but operate on `Arc<[T]>` instead of `Arc<T>`. > > This allows performing the operation on slices, which was not previously possible because `make_mut()` requires `T: Clone` (and slices, being `!Sized`, do not and currently cannot implement `Clone`). > > Feature gate: `make_mut_slice` try-job: test-various
This commit is contained in:
commit
f944afe380
@ -424,29 +424,3 @@ pub mod __alloc_error_handler {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(no_global_oom_handling))]
|
||||
/// Specialize clones into pre-allocated, uninitialized memory.
|
||||
/// Used by `Box::clone` and `Rc`/`Arc::make_mut`.
|
||||
pub(crate) trait WriteCloneIntoRaw: Sized {
|
||||
unsafe fn write_clone_into_raw(&self, target: *mut Self);
|
||||
}
|
||||
|
||||
#[cfg(not(no_global_oom_handling))]
|
||||
impl<T: Clone> WriteCloneIntoRaw for T {
|
||||
#[inline]
|
||||
default unsafe fn write_clone_into_raw(&self, target: *mut Self) {
|
||||
// Having allocated *first* may allow the optimizer to create
|
||||
// the cloned value in-place, skipping the local and move.
|
||||
unsafe { target.write(self.clone()) };
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(no_global_oom_handling))]
|
||||
impl<T: Copy> WriteCloneIntoRaw for T {
|
||||
#[inline]
|
||||
unsafe fn write_clone_into_raw(&self, target: *mut Self) {
|
||||
// We can always copy in-place, without ever involving a local value.
|
||||
unsafe { target.copy_from_nonoverlapping(self, 1) };
|
||||
}
|
||||
}
|
||||
|
@ -188,6 +188,8 @@
|
||||
use core::any::Any;
|
||||
use core::async_iter::AsyncIterator;
|
||||
use core::borrow;
|
||||
#[cfg(not(no_global_oom_handling))]
|
||||
use core::clone::CloneToUninit;
|
||||
use core::cmp::Ordering;
|
||||
use core::error::Error;
|
||||
use core::fmt;
|
||||
@ -207,7 +209,7 @@ use core::slice;
|
||||
use core::task::{Context, Poll};
|
||||
|
||||
#[cfg(not(no_global_oom_handling))]
|
||||
use crate::alloc::{handle_alloc_error, WriteCloneIntoRaw};
|
||||
use crate::alloc::handle_alloc_error;
|
||||
use crate::alloc::{AllocError, Allocator, Global, Layout};
|
||||
#[cfg(not(no_global_oom_handling))]
|
||||
use crate::borrow::Cow;
|
||||
@ -1346,7 +1348,7 @@ impl<T: Clone, A: Allocator + Clone> Clone for Box<T, A> {
|
||||
// Pre-allocate memory to allow writing the cloned value directly.
|
||||
let mut boxed = Self::new_uninit_in(self.1.clone());
|
||||
unsafe {
|
||||
(**self).write_clone_into_raw(boxed.as_mut_ptr());
|
||||
(**self).clone_to_uninit(boxed.as_mut_ptr());
|
||||
boxed.assume_init()
|
||||
}
|
||||
}
|
||||
|
@ -103,6 +103,7 @@
|
||||
#![feature(assert_matches)]
|
||||
#![feature(async_fn_traits)]
|
||||
#![feature(async_iterator)]
|
||||
#![feature(clone_to_uninit)]
|
||||
#![feature(coerce_unsized)]
|
||||
#![feature(const_align_of_val)]
|
||||
#![feature(const_box)]
|
||||
|
@ -249,6 +249,8 @@ use std::boxed::Box;
|
||||
use core::any::Any;
|
||||
use core::borrow;
|
||||
use core::cell::Cell;
|
||||
#[cfg(not(no_global_oom_handling))]
|
||||
use core::clone::CloneToUninit;
|
||||
use core::cmp::Ordering;
|
||||
use core::fmt;
|
||||
use core::hash::{Hash, Hasher};
|
||||
@ -268,8 +270,6 @@ use core::slice::from_raw_parts_mut;
|
||||
|
||||
#[cfg(not(no_global_oom_handling))]
|
||||
use crate::alloc::handle_alloc_error;
|
||||
#[cfg(not(no_global_oom_handling))]
|
||||
use crate::alloc::WriteCloneIntoRaw;
|
||||
use crate::alloc::{AllocError, Allocator, Global, Layout};
|
||||
use crate::borrow::{Cow, ToOwned};
|
||||
#[cfg(not(no_global_oom_handling))]
|
||||
@ -1749,7 +1749,8 @@ impl<T: ?Sized, A: Allocator> Rc<T, A> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Clone, A: Allocator + Clone> Rc<T, A> {
|
||||
#[cfg(not(no_global_oom_handling))]
|
||||
impl<T: ?Sized + CloneToUninit, A: Allocator + Clone> Rc<T, A> {
|
||||
/// Makes a mutable reference into the given `Rc`.
|
||||
///
|
||||
/// If there are other `Rc` pointers to the same allocation, then `make_mut` will
|
||||
@ -1800,31 +1801,52 @@ impl<T: Clone, A: Allocator + Clone> Rc<T, A> {
|
||||
/// assert!(76 == *data);
|
||||
/// assert!(weak.upgrade().is_none());
|
||||
/// ```
|
||||
#[cfg(not(no_global_oom_handling))]
|
||||
#[inline]
|
||||
#[stable(feature = "rc_unique", since = "1.4.0")]
|
||||
pub fn make_mut(this: &mut Self) -> &mut T {
|
||||
let size_of_val = size_of_val::<T>(&**this);
|
||||
|
||||
if Rc::strong_count(this) != 1 {
|
||||
// Gotta clone the data, there are other Rcs.
|
||||
// Pre-allocate memory to allow writing the cloned value directly.
|
||||
let mut rc = Self::new_uninit_in(this.alloc.clone());
|
||||
unsafe {
|
||||
let data = Rc::get_mut_unchecked(&mut rc);
|
||||
(**this).write_clone_into_raw(data.as_mut_ptr());
|
||||
*this = rc.assume_init();
|
||||
}
|
||||
|
||||
let this_data_ref: &T = &**this;
|
||||
// `in_progress` drops the allocation if we panic before finishing initializing it.
|
||||
let mut in_progress: UniqueRcUninit<T, A> =
|
||||
UniqueRcUninit::new(this_data_ref, this.alloc.clone());
|
||||
|
||||
// Initialize with clone of this.
|
||||
let initialized_clone = unsafe {
|
||||
// Clone. If the clone panics, `in_progress` will be dropped and clean up.
|
||||
this_data_ref.clone_to_uninit(in_progress.data_ptr());
|
||||
// Cast type of pointer, now that it is initialized.
|
||||
in_progress.into_rc()
|
||||
};
|
||||
|
||||
// Replace `this` with newly constructed Rc.
|
||||
*this = initialized_clone;
|
||||
} else if Rc::weak_count(this) != 0 {
|
||||
// Can just steal the data, all that's left is Weaks
|
||||
let mut rc = Self::new_uninit_in(this.alloc.clone());
|
||||
|
||||
// We don't need panic-protection like the above branch does, but we might as well
|
||||
// use the same mechanism.
|
||||
let mut in_progress: UniqueRcUninit<T, A> =
|
||||
UniqueRcUninit::new(&**this, this.alloc.clone());
|
||||
unsafe {
|
||||
let data = Rc::get_mut_unchecked(&mut rc);
|
||||
data.as_mut_ptr().copy_from_nonoverlapping(&**this, 1);
|
||||
// Initialize `in_progress` with move of **this.
|
||||
// We have to express this in terms of bytes because `T: ?Sized`; there is no
|
||||
// operation that just copies a value based on its `size_of_val()`.
|
||||
ptr::copy_nonoverlapping(
|
||||
ptr::from_ref(&**this).cast::<u8>(),
|
||||
in_progress.data_ptr().cast::<u8>(),
|
||||
size_of_val,
|
||||
);
|
||||
|
||||
this.inner().dec_strong();
|
||||
// Remove implicit strong-weak ref (no need to craft a fake
|
||||
// Weak here -- we know other Weaks can clean up for us)
|
||||
this.inner().dec_weak();
|
||||
ptr::write(this, rc.assume_init());
|
||||
// Replace `this` with newly constructed Rc that has the moved data.
|
||||
ptr::write(this, in_progress.into_rc());
|
||||
}
|
||||
}
|
||||
// This unsafety is ok because we're guaranteed that the pointer
|
||||
@ -3686,3 +3708,67 @@ unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for UniqueRc<T, A> {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A unique owning pointer to a [`RcBox`] **that does not imply the contents are initialized,**
|
||||
/// but will deallocate it (without dropping the value) when dropped.
|
||||
///
|
||||
/// This is a helper for [`Rc::make_mut()`] to ensure correct cleanup on panic.
|
||||
/// It is nearly a duplicate of `UniqueRc<MaybeUninit<T>, A>` except that it allows `T: !Sized`,
|
||||
/// which `MaybeUninit` does not.
|
||||
#[cfg(not(no_global_oom_handling))]
|
||||
struct UniqueRcUninit<T: ?Sized, A: Allocator> {
|
||||
ptr: NonNull<RcBox<T>>,
|
||||
layout_for_value: Layout,
|
||||
alloc: Option<A>,
|
||||
}
|
||||
|
||||
#[cfg(not(no_global_oom_handling))]
|
||||
impl<T: ?Sized, A: Allocator> UniqueRcUninit<T, A> {
|
||||
/// Allocate a RcBox with layout suitable to contain `for_value` or a clone of it.
|
||||
fn new(for_value: &T, alloc: A) -> UniqueRcUninit<T, A> {
|
||||
let layout = Layout::for_value(for_value);
|
||||
let ptr = unsafe {
|
||||
Rc::allocate_for_layout(
|
||||
layout,
|
||||
|layout_for_rcbox| alloc.allocate(layout_for_rcbox),
|
||||
|mem| mem.with_metadata_of(ptr::from_ref(for_value) as *const RcBox<T>),
|
||||
)
|
||||
};
|
||||
Self { ptr: NonNull::new(ptr).unwrap(), layout_for_value: layout, alloc: Some(alloc) }
|
||||
}
|
||||
|
||||
/// Returns the pointer to be written into to initialize the [`Rc`].
|
||||
fn data_ptr(&mut self) -> *mut T {
|
||||
let offset = data_offset_align(self.layout_for_value.align());
|
||||
unsafe { self.ptr.as_ptr().byte_add(offset) as *mut T }
|
||||
}
|
||||
|
||||
/// Upgrade this into a normal [`Rc`].
|
||||
///
|
||||
/// # Safety
|
||||
///
|
||||
/// The data must have been initialized (by writing to [`Self::data_ptr()`]).
|
||||
unsafe fn into_rc(mut self) -> Rc<T, A> {
|
||||
let ptr = self.ptr;
|
||||
let alloc = self.alloc.take().unwrap();
|
||||
mem::forget(self);
|
||||
// SAFETY: The pointer is valid as per `UniqueRcUninit::new`, and the caller is responsible
|
||||
// for having initialized the data.
|
||||
unsafe { Rc::from_ptr_in(ptr.as_ptr(), alloc) }
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(no_global_oom_handling))]
|
||||
impl<T: ?Sized, A: Allocator> Drop for UniqueRcUninit<T, A> {
|
||||
fn drop(&mut self) {
|
||||
// SAFETY:
|
||||
// * new() produced a pointer safe to deallocate.
|
||||
// * We own the pointer unless into_rc() was called, which forgets us.
|
||||
unsafe {
|
||||
self.alloc
|
||||
.take()
|
||||
.unwrap()
|
||||
.deallocate(self.ptr.cast(), rcbox_layout_for_value_layout(self.layout_for_value));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -316,6 +316,24 @@ fn test_cowrc_clone_weak() {
|
||||
assert!(cow1_weak.upgrade().is_none());
|
||||
}
|
||||
|
||||
/// This is similar to the doc-test for `Rc::make_mut()`, but on an unsized type (slice).
|
||||
#[test]
|
||||
fn test_cowrc_unsized() {
|
||||
use std::rc::Rc;
|
||||
|
||||
let mut data: Rc<[i32]> = Rc::new([10, 20, 30]);
|
||||
|
||||
Rc::make_mut(&mut data)[0] += 1; // Won't clone anything
|
||||
let mut other_data = Rc::clone(&data); // Won't clone inner data
|
||||
Rc::make_mut(&mut data)[1] += 1; // Clones inner data
|
||||
Rc::make_mut(&mut data)[2] += 1; // Won't clone anything
|
||||
Rc::make_mut(&mut other_data)[0] *= 10; // Won't clone anything
|
||||
|
||||
// Now `data` and `other_data` point to different allocations.
|
||||
assert_eq!(*data, [11, 21, 31]);
|
||||
assert_eq!(*other_data, [110, 20, 30]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_show() {
|
||||
let foo = Rc::new(75);
|
||||
|
@ -10,6 +10,8 @@
|
||||
|
||||
use core::any::Any;
|
||||
use core::borrow;
|
||||
#[cfg(not(no_global_oom_handling))]
|
||||
use core::clone::CloneToUninit;
|
||||
use core::cmp::Ordering;
|
||||
use core::fmt;
|
||||
use core::hash::{Hash, Hasher};
|
||||
@ -30,8 +32,6 @@ use core::sync::atomic::Ordering::{Acquire, Relaxed, Release};
|
||||
|
||||
#[cfg(not(no_global_oom_handling))]
|
||||
use crate::alloc::handle_alloc_error;
|
||||
#[cfg(not(no_global_oom_handling))]
|
||||
use crate::alloc::WriteCloneIntoRaw;
|
||||
use crate::alloc::{AllocError, Allocator, Global, Layout};
|
||||
use crate::borrow::{Cow, ToOwned};
|
||||
use crate::boxed::Box;
|
||||
@ -2150,7 +2150,8 @@ unsafe impl<T: ?Sized, A: Allocator> DerefPure for Arc<T, A> {}
|
||||
#[unstable(feature = "receiver_trait", issue = "none")]
|
||||
impl<T: ?Sized> Receiver for Arc<T> {}
|
||||
|
||||
impl<T: Clone, A: Allocator + Clone> Arc<T, A> {
|
||||
#[cfg(not(no_global_oom_handling))]
|
||||
impl<T: ?Sized + CloneToUninit, A: Allocator + Clone> Arc<T, A> {
|
||||
/// Makes a mutable reference into the given `Arc`.
|
||||
///
|
||||
/// If there are other `Arc` pointers to the same allocation, then `make_mut` will
|
||||
@ -2201,10 +2202,11 @@ impl<T: Clone, A: Allocator + Clone> Arc<T, A> {
|
||||
/// assert!(76 == *data);
|
||||
/// assert!(weak.upgrade().is_none());
|
||||
/// ```
|
||||
#[cfg(not(no_global_oom_handling))]
|
||||
#[inline]
|
||||
#[stable(feature = "arc_unique", since = "1.4.0")]
|
||||
pub fn make_mut(this: &mut Self) -> &mut T {
|
||||
let size_of_val = mem::size_of_val::<T>(&**this);
|
||||
|
||||
// Note that we hold both a strong reference and a weak reference.
|
||||
// Thus, releasing our strong reference only will not, by itself, cause
|
||||
// the memory to be deallocated.
|
||||
@ -2215,13 +2217,19 @@ impl<T: Clone, A: Allocator + Clone> Arc<T, A> {
|
||||
// deallocated.
|
||||
if this.inner().strong.compare_exchange(1, 0, Acquire, Relaxed).is_err() {
|
||||
// Another strong pointer exists, so we must clone.
|
||||
// Pre-allocate memory to allow writing the cloned value directly.
|
||||
let mut arc = Self::new_uninit_in(this.alloc.clone());
|
||||
unsafe {
|
||||
let data = Arc::get_mut_unchecked(&mut arc);
|
||||
(**this).write_clone_into_raw(data.as_mut_ptr());
|
||||
*this = arc.assume_init();
|
||||
}
|
||||
|
||||
let this_data_ref: &T = &**this;
|
||||
// `in_progress` drops the allocation if we panic before finishing initializing it.
|
||||
let mut in_progress: UniqueArcUninit<T, A> =
|
||||
UniqueArcUninit::new(this_data_ref, this.alloc.clone());
|
||||
|
||||
let initialized_clone = unsafe {
|
||||
// Clone. If the clone panics, `in_progress` will be dropped and clean up.
|
||||
this_data_ref.clone_to_uninit(in_progress.data_ptr());
|
||||
// Cast type of pointer, now that it is initialized.
|
||||
in_progress.into_arc()
|
||||
};
|
||||
*this = initialized_clone;
|
||||
} else if this.inner().weak.load(Relaxed) != 1 {
|
||||
// Relaxed suffices in the above because this is fundamentally an
|
||||
// optimization: we are always racing with weak pointers being
|
||||
@ -2240,11 +2248,22 @@ impl<T: Clone, A: Allocator + Clone> Arc<T, A> {
|
||||
let _weak = Weak { ptr: this.ptr, alloc: this.alloc.clone() };
|
||||
|
||||
// Can just steal the data, all that's left is Weaks
|
||||
let mut arc = Self::new_uninit_in(this.alloc.clone());
|
||||
//
|
||||
// We don't need panic-protection like the above branch does, but we might as well
|
||||
// use the same mechanism.
|
||||
let mut in_progress: UniqueArcUninit<T, A> =
|
||||
UniqueArcUninit::new(&**this, this.alloc.clone());
|
||||
unsafe {
|
||||
let data = Arc::get_mut_unchecked(&mut arc);
|
||||
data.as_mut_ptr().copy_from_nonoverlapping(&**this, 1);
|
||||
ptr::write(this, arc.assume_init());
|
||||
// Initialize `in_progress` with move of **this.
|
||||
// We have to express this in terms of bytes because `T: ?Sized`; there is no
|
||||
// operation that just copies a value based on its `size_of_val()`.
|
||||
ptr::copy_nonoverlapping(
|
||||
ptr::from_ref(&**this).cast::<u8>(),
|
||||
in_progress.data_ptr().cast::<u8>(),
|
||||
size_of_val,
|
||||
);
|
||||
|
||||
ptr::write(this, in_progress.into_arc());
|
||||
}
|
||||
} else {
|
||||
// We were the sole reference of either kind; bump back up the
|
||||
@ -3809,6 +3828,68 @@ fn data_offset_align(align: usize) -> usize {
|
||||
layout.size() + layout.padding_needed_for(align)
|
||||
}
|
||||
|
||||
/// A unique owning pointer to a [`ArcInner`] **that does not imply the contents are initialized,**
|
||||
/// but will deallocate it (without dropping the value) when dropped.
|
||||
///
|
||||
/// This is a helper for [`Arc::make_mut()`] to ensure correct cleanup on panic.
|
||||
#[cfg(not(no_global_oom_handling))]
|
||||
struct UniqueArcUninit<T: ?Sized, A: Allocator> {
|
||||
ptr: NonNull<ArcInner<T>>,
|
||||
layout_for_value: Layout,
|
||||
alloc: Option<A>,
|
||||
}
|
||||
|
||||
#[cfg(not(no_global_oom_handling))]
|
||||
impl<T: ?Sized, A: Allocator> UniqueArcUninit<T, A> {
|
||||
/// Allocate a ArcInner with layout suitable to contain `for_value` or a clone of it.
|
||||
fn new(for_value: &T, alloc: A) -> UniqueArcUninit<T, A> {
|
||||
let layout = Layout::for_value(for_value);
|
||||
let ptr = unsafe {
|
||||
Arc::allocate_for_layout(
|
||||
layout,
|
||||
|layout_for_arcinner| alloc.allocate(layout_for_arcinner),
|
||||
|mem| mem.with_metadata_of(ptr::from_ref(for_value) as *const ArcInner<T>),
|
||||
)
|
||||
};
|
||||
Self { ptr: NonNull::new(ptr).unwrap(), layout_for_value: layout, alloc: Some(alloc) }
|
||||
}
|
||||
|
||||
/// Returns the pointer to be written into to initialize the [`Arc`].
|
||||
fn data_ptr(&mut self) -> *mut T {
|
||||
let offset = data_offset_align(self.layout_for_value.align());
|
||||
unsafe { self.ptr.as_ptr().byte_add(offset) as *mut T }
|
||||
}
|
||||
|
||||
/// Upgrade this into a normal [`Arc`].
|
||||
///
|
||||
/// # Safety
|
||||
///
|
||||
/// The data must have been initialized (by writing to [`Self::data_ptr()`]).
|
||||
unsafe fn into_arc(mut self) -> Arc<T, A> {
|
||||
let ptr = self.ptr;
|
||||
let alloc = self.alloc.take().unwrap();
|
||||
mem::forget(self);
|
||||
// SAFETY: The pointer is valid as per `UniqueArcUninit::new`, and the caller is responsible
|
||||
// for having initialized the data.
|
||||
unsafe { Arc::from_ptr_in(ptr.as_ptr(), alloc) }
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(no_global_oom_handling))]
|
||||
impl<T: ?Sized, A: Allocator> Drop for UniqueArcUninit<T, A> {
|
||||
fn drop(&mut self) {
|
||||
// SAFETY:
|
||||
// * new() produced a pointer safe to deallocate.
|
||||
// * We own the pointer unless into_arc() was called, which forgets us.
|
||||
unsafe {
|
||||
self.alloc.take().unwrap().deallocate(
|
||||
self.ptr.cast(),
|
||||
arcinner_layout_for_value_layout(self.layout_for_value),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "arc_error", since = "1.52.0")]
|
||||
impl<T: core::error::Error + ?Sized> core::error::Error for Arc<T> {
|
||||
#[allow(deprecated, deprecated_in_future)]
|
||||
|
@ -209,3 +209,21 @@ fn weak_may_dangle() {
|
||||
// `val` dropped here while still borrowed
|
||||
// borrow might be used here, when `val` is dropped and runs the `Drop` code for type `std::sync::Weak`
|
||||
}
|
||||
|
||||
/// This is similar to the doc-test for `Arc::make_mut()`, but on an unsized type (slice).
|
||||
#[test]
|
||||
fn make_mut_unsized() {
|
||||
use alloc::sync::Arc;
|
||||
|
||||
let mut data: Arc<[i32]> = Arc::new([10, 20, 30]);
|
||||
|
||||
Arc::make_mut(&mut data)[0] += 1; // Won't clone anything
|
||||
let mut other_data = Arc::clone(&data); // Won't clone inner data
|
||||
Arc::make_mut(&mut data)[1] += 1; // Clones inner data
|
||||
Arc::make_mut(&mut data)[2] += 1; // Won't clone anything
|
||||
Arc::make_mut(&mut other_data)[0] *= 10; // Won't clone anything
|
||||
|
||||
// Now `data` and `other_data` point to different allocations.
|
||||
assert_eq!(*data, [11, 21, 31]);
|
||||
assert_eq!(*other_data, [110, 20, 30]);
|
||||
}
|
||||
|
@ -36,6 +36,9 @@
|
||||
|
||||
#![stable(feature = "rust1", since = "1.0.0")]
|
||||
|
||||
use crate::mem::{self, MaybeUninit};
|
||||
use crate::ptr;
|
||||
|
||||
/// A common trait for the ability to explicitly duplicate an object.
|
||||
///
|
||||
/// Differs from [`Copy`] in that [`Copy`] is implicit and an inexpensive bit-wise copy, while
|
||||
@ -204,6 +207,189 @@ pub struct AssertParamIsCopy<T: Copy + ?Sized> {
|
||||
_field: crate::marker::PhantomData<T>,
|
||||
}
|
||||
|
||||
/// A generalization of [`Clone`] to dynamically-sized types stored in arbitrary containers.
|
||||
///
|
||||
/// This trait is implemented for all types implementing [`Clone`], and also [slices](slice) of all
|
||||
/// such types. You may also implement this trait to enable cloning trait objects and custom DSTs
|
||||
/// (structures containing dynamically-sized fields).
|
||||
///
|
||||
/// # Safety
|
||||
///
|
||||
/// Implementations must ensure that when `.clone_to_uninit(dst)` returns normally rather than
|
||||
/// panicking, it always leaves `*dst` initialized as a valid value of type `Self`.
|
||||
///
|
||||
/// # See also
|
||||
///
|
||||
/// * [`Clone::clone_from`] is a safe function which may be used instead when `Self` is a [`Sized`]
|
||||
/// and the destination is already initialized; it may be able to reuse allocations owned by
|
||||
/// the destination.
|
||||
/// * [`ToOwned`], which allocates a new destination container.
|
||||
///
|
||||
/// [`ToOwned`]: ../../std/borrow/trait.ToOwned.html
|
||||
#[unstable(feature = "clone_to_uninit", issue = "126799")]
|
||||
pub unsafe trait CloneToUninit {
|
||||
/// Performs copy-assignment from `self` to `dst`.
|
||||
///
|
||||
/// This is analogous to to `std::ptr::write(dst, self.clone())`,
|
||||
/// except that `self` may be a dynamically-sized type ([`!Sized`](Sized)).
|
||||
///
|
||||
/// Before this function is called, `dst` may point to uninitialized memory.
|
||||
/// After this function is called, `dst` will point to initialized memory; it will be
|
||||
/// sound to create a `&Self` reference from the pointer.
|
||||
///
|
||||
/// # Safety
|
||||
///
|
||||
/// Behavior is undefined if any of the following conditions are violated:
|
||||
///
|
||||
/// * `dst` must be [valid] for writes.
|
||||
/// * `dst` must be properly aligned.
|
||||
/// * `dst` must have the same [pointer metadata] (slice length or `dyn` vtable) as `self`.
|
||||
///
|
||||
/// [valid]: ptr#safety
|
||||
/// [pointer metadata]: crate::ptr::metadata()
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// This function may panic. (For example, it might panic if memory allocation for a clone
|
||||
/// of a value owned by `self` fails.)
|
||||
/// If the call panics, then `*dst` should be treated as uninitialized memory; it must not be
|
||||
/// read or dropped, because even if it was previously valid, it may have been partially
|
||||
/// overwritten.
|
||||
///
|
||||
/// The caller may also need to take care to deallocate the allocation pointed to by `dst`,
|
||||
/// if applicable, to avoid a memory leak, and may need to take other precautions to ensure
|
||||
/// soundness in the presence of unwinding.
|
||||
///
|
||||
/// Implementors should avoid leaking values by, upon unwinding, dropping all component values
|
||||
/// that might have already been created. (For example, if a `[Foo]` of length 3 is being
|
||||
/// cloned, and the second of the three calls to `Foo::clone()` unwinds, then the first `Foo`
|
||||
/// cloned should be dropped.)
|
||||
unsafe fn clone_to_uninit(&self, dst: *mut Self);
|
||||
}
|
||||
|
||||
#[unstable(feature = "clone_to_uninit", issue = "126799")]
|
||||
unsafe impl<T: Clone> CloneToUninit for T {
|
||||
default unsafe fn clone_to_uninit(&self, dst: *mut Self) {
|
||||
// SAFETY: The safety conditions of clone_to_uninit() are a superset of those of
|
||||
// ptr::write().
|
||||
unsafe {
|
||||
// We hope the optimizer will figure out to create the cloned value in-place,
|
||||
// skipping ever storing it on the stack and the copy to the destination.
|
||||
ptr::write(dst, self.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Specialized implementation for types that are [`Copy`], not just [`Clone`],
|
||||
// and can therefore be copied bitwise.
|
||||
#[unstable(feature = "clone_to_uninit", issue = "126799")]
|
||||
unsafe impl<T: Copy> CloneToUninit for T {
|
||||
unsafe fn clone_to_uninit(&self, dst: *mut Self) {
|
||||
// SAFETY: The safety conditions of clone_to_uninit() are a superset of those of
|
||||
// ptr::copy_nonoverlapping().
|
||||
unsafe {
|
||||
ptr::copy_nonoverlapping(self, dst, 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[unstable(feature = "clone_to_uninit", issue = "126799")]
|
||||
unsafe impl<T: Clone> CloneToUninit for [T] {
|
||||
#[cfg_attr(debug_assertions, track_caller)]
|
||||
default unsafe fn clone_to_uninit(&self, dst: *mut Self) {
|
||||
let len = self.len();
|
||||
// This is the most likely mistake to make, so check it as a debug assertion.
|
||||
debug_assert_eq!(
|
||||
len,
|
||||
dst.len(),
|
||||
"clone_to_uninit() source and destination must have equal lengths",
|
||||
);
|
||||
|
||||
// SAFETY: The produced `&mut` is valid because:
|
||||
// * The caller is obligated to provide a pointer which is valid for writes.
|
||||
// * All bytes pointed to are in MaybeUninit, so we don't care about the memory's
|
||||
// initialization status.
|
||||
let uninit_ref = unsafe { &mut *(dst as *mut [MaybeUninit<T>]) };
|
||||
|
||||
// Copy the elements
|
||||
let mut initializing = InitializingSlice::from_fully_uninit(uninit_ref);
|
||||
for element_ref in self.iter() {
|
||||
// If the clone() panics, `initializing` will take care of the cleanup.
|
||||
initializing.push(element_ref.clone());
|
||||
}
|
||||
// If we reach here, then the entire slice is initialized, and we've satisfied our
|
||||
// responsibilities to the caller. Disarm the cleanup guard by forgetting it.
|
||||
mem::forget(initializing);
|
||||
}
|
||||
}
|
||||
|
||||
#[unstable(feature = "clone_to_uninit", issue = "126799")]
|
||||
unsafe impl<T: Copy> CloneToUninit for [T] {
|
||||
#[cfg_attr(debug_assertions, track_caller)]
|
||||
unsafe fn clone_to_uninit(&self, dst: *mut Self) {
|
||||
let len = self.len();
|
||||
// This is the most likely mistake to make, so check it as a debug assertion.
|
||||
debug_assert_eq!(
|
||||
len,
|
||||
dst.len(),
|
||||
"clone_to_uninit() source and destination must have equal lengths",
|
||||
);
|
||||
|
||||
// SAFETY: The safety conditions of clone_to_uninit() are a superset of those of
|
||||
// ptr::copy_nonoverlapping().
|
||||
unsafe {
|
||||
ptr::copy_nonoverlapping(self.as_ptr(), dst.as_mut_ptr(), len);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Ownership of a collection of values stored in a non-owned `[MaybeUninit<T>]`, some of which
|
||||
/// are not yet initialized. This is sort of like a `Vec` that doesn't own its allocation.
|
||||
/// Its responsibility is to provide cleanup on unwind by dropping the values that *are*
|
||||
/// initialized, unless disarmed by forgetting.
|
||||
///
|
||||
/// This is a helper for `impl<T: Clone> CloneToUninit for [T]`.
|
||||
struct InitializingSlice<'a, T> {
|
||||
data: &'a mut [MaybeUninit<T>],
|
||||
/// Number of elements of `*self.data` that are initialized.
|
||||
initialized_len: usize,
|
||||
}
|
||||
|
||||
impl<'a, T> InitializingSlice<'a, T> {
|
||||
#[inline]
|
||||
fn from_fully_uninit(data: &'a mut [MaybeUninit<T>]) -> Self {
|
||||
Self { data, initialized_len: 0 }
|
||||
}
|
||||
|
||||
/// Push a value onto the end of the initialized part of the slice.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// Panics if the slice is already fully initialized.
|
||||
#[inline]
|
||||
fn push(&mut self, value: T) {
|
||||
MaybeUninit::write(&mut self.data[self.initialized_len], value);
|
||||
self.initialized_len += 1;
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T> Drop for InitializingSlice<'a, T> {
|
||||
#[cold] // will only be invoked on unwind
|
||||
fn drop(&mut self) {
|
||||
let initialized_slice = ptr::slice_from_raw_parts_mut(
|
||||
MaybeUninit::slice_as_mut_ptr(self.data),
|
||||
self.initialized_len,
|
||||
);
|
||||
// SAFETY:
|
||||
// * the pointer is valid because it was made from a mutable reference
|
||||
// * `initialized_len` counts the initialized elements as an invariant of this type,
|
||||
// so each of the pointed-to elements is initialized and may be dropped.
|
||||
unsafe {
|
||||
ptr::drop_in_place::<[T]>(initialized_slice);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Implementations of `Clone` for primitive types.
|
||||
///
|
||||
/// Implementations that cannot be described in Rust
|
||||
|
@ -1,3 +1,6 @@
|
||||
use core::clone::CloneToUninit;
|
||||
use core::mem::MaybeUninit;
|
||||
|
||||
#[test]
|
||||
#[allow(suspicious_double_ref_op)]
|
||||
fn test_borrowed_clone() {
|
||||
@ -14,3 +17,66 @@ fn test_clone_from() {
|
||||
b.clone_from(&a);
|
||||
assert_eq!(*b, 5);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_clone_to_uninit_slice_success() {
|
||||
// Using `String`s to exercise allocation and Drop of the individual elements;
|
||||
// if something is aliased or double-freed, at least Miri will catch that.
|
||||
let a: [String; 3] = ["a", "b", "c"].map(String::from);
|
||||
|
||||
let mut storage: MaybeUninit<[String; 3]> = MaybeUninit::uninit();
|
||||
let b: [String; 3] = unsafe {
|
||||
a[..].clone_to_uninit(storage.as_mut_ptr() as *mut [String]);
|
||||
storage.assume_init()
|
||||
};
|
||||
|
||||
assert_eq!(a, b);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(panic = "unwind")]
|
||||
fn test_clone_to_uninit_slice_drops_on_panic() {
|
||||
use core::sync::atomic::{AtomicUsize, Ordering::Relaxed};
|
||||
|
||||
/// A static counter is OK to use as long as _this one test_ isn't run several times in
|
||||
/// multiple threads.
|
||||
static COUNTER: AtomicUsize = AtomicUsize::new(0);
|
||||
/// Counts how many instances are live, and panics if a fifth one is created
|
||||
struct CountsDropsAndPanics {}
|
||||
impl CountsDropsAndPanics {
|
||||
fn new() -> Self {
|
||||
COUNTER.fetch_add(1, Relaxed);
|
||||
Self {}
|
||||
}
|
||||
}
|
||||
impl Clone for CountsDropsAndPanics {
|
||||
fn clone(&self) -> Self {
|
||||
if COUNTER.load(Relaxed) == 4 { panic!("intentional panic") } else { Self::new() }
|
||||
}
|
||||
}
|
||||
impl Drop for CountsDropsAndPanics {
|
||||
fn drop(&mut self) {
|
||||
COUNTER.fetch_sub(1, Relaxed);
|
||||
}
|
||||
}
|
||||
|
||||
let a: [CountsDropsAndPanics; 3] = core::array::from_fn(|_| CountsDropsAndPanics::new());
|
||||
assert_eq!(COUNTER.load(Relaxed), 3);
|
||||
|
||||
let panic_payload = std::panic::catch_unwind(|| {
|
||||
let mut storage: MaybeUninit<[CountsDropsAndPanics; 3]> = MaybeUninit::uninit();
|
||||
// This should panic halfway through
|
||||
unsafe {
|
||||
a[..].clone_to_uninit(storage.as_mut_ptr() as *mut [CountsDropsAndPanics]);
|
||||
}
|
||||
})
|
||||
.unwrap_err();
|
||||
assert_eq!(panic_payload.downcast().unwrap(), Box::new("intentional panic"));
|
||||
|
||||
// Check for lack of leak, which is what this test is looking for
|
||||
assert_eq!(COUNTER.load(Relaxed), 3, "leaked during clone!");
|
||||
|
||||
// Might as well exercise the rest of the drops
|
||||
drop(a);
|
||||
assert_eq!(COUNTER.load(Relaxed), 0);
|
||||
}
|
||||
|
@ -8,6 +8,7 @@
|
||||
#![feature(async_iterator)]
|
||||
#![feature(bigint_helper_methods)]
|
||||
#![feature(cell_update)]
|
||||
#![feature(clone_to_uninit)]
|
||||
#![feature(const_align_offset)]
|
||||
#![feature(const_align_of_val_raw)]
|
||||
#![feature(const_black_box)]
|
||||
@ -53,6 +54,7 @@
|
||||
#![feature(slice_split_once)]
|
||||
#![feature(split_as_slice)]
|
||||
#![feature(maybe_uninit_fill)]
|
||||
#![feature(maybe_uninit_slice)]
|
||||
#![feature(maybe_uninit_uninit_array)]
|
||||
#![feature(maybe_uninit_write_slice)]
|
||||
#![feature(maybe_uninit_uninit_array_transpose)]
|
||||
|
Loading…
Reference in New Issue
Block a user