Rollup merge of #120445 - Nemo157:arc-plug, r=Mark-Simulacrum

Fix some `Arc` allocator leaks

This doesn't matter for the stable `Global` allocator as it is a ZST singleton, but other allocators may rely on all instances being dropped.
This commit is contained in:
Guillaume Gomez 2024-01-30 16:57:50 +01:00 committed by GitHub
commit a5aa355ab3
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 80 additions and 29 deletions

View File

@ -279,6 +279,12 @@ impl<T: ?Sized> Arc<T> {
}
impl<T: ?Sized, A: Allocator> Arc<T, A> {
#[inline]
fn internal_into_inner_with_allocator(self) -> (NonNull<ArcInner<T>>, A) {
let this = mem::ManuallyDrop::new(self);
(this.ptr, unsafe { ptr::read(&this.alloc) })
}
#[inline]
unsafe fn from_inner_in(ptr: NonNull<ArcInner<T>>, alloc: A) -> Self {
Self { ptr, phantom: PhantomData, alloc }
@ -1275,12 +1281,9 @@ impl<T, A: Allocator> Arc<mem::MaybeUninit<T>, A> {
#[unstable(feature = "new_uninit", issue = "63291")]
#[must_use = "`self` will be dropped if the result is not used"]
#[inline]
pub unsafe fn assume_init(self) -> Arc<T, A>
where
A: Clone,
{
let md_self = mem::ManuallyDrop::new(self);
unsafe { Arc::from_inner_in(md_self.ptr.cast(), md_self.alloc.clone()) }
pub unsafe fn assume_init(self) -> Arc<T, A> {
let (ptr, alloc) = self.internal_into_inner_with_allocator();
unsafe { Arc::from_inner_in(ptr.cast(), alloc) }
}
}
@ -1320,12 +1323,9 @@ impl<T, A: Allocator> Arc<[mem::MaybeUninit<T>], A> {
#[unstable(feature = "new_uninit", issue = "63291")]
#[must_use = "`self` will be dropped if the result is not used"]
#[inline]
pub unsafe fn assume_init(self) -> Arc<[T], A>
where
A: Clone,
{
let md_self = mem::ManuallyDrop::new(self);
unsafe { Arc::from_ptr_in(md_self.ptr.as_ptr() as _, md_self.alloc.clone()) }
pub unsafe fn assume_init(self) -> Arc<[T], A> {
let (ptr, alloc) = self.internal_into_inner_with_allocator();
unsafe { Arc::from_ptr_in(ptr.as_ptr() as _, alloc) }
}
}
@ -2413,7 +2413,7 @@ unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Arc<T, A> {
}
}
impl<A: Allocator + Clone> Arc<dyn Any + Send + Sync, A> {
impl<A: Allocator> Arc<dyn Any + Send + Sync, A> {
/// Attempt to downcast the `Arc<dyn Any + Send + Sync>` to a concrete type.
///
/// # Examples
@ -2440,10 +2440,8 @@ impl<A: Allocator + Clone> Arc<dyn Any + Send + Sync, A> {
{
if (*self).is::<T>() {
unsafe {
let ptr = self.ptr.cast::<ArcInner<T>>();
let alloc = self.alloc.clone();
mem::forget(self);
Ok(Arc::from_inner_in(ptr, alloc))
let (ptr, alloc) = self.internal_into_inner_with_allocator();
Ok(Arc::from_inner_in(ptr.cast(), alloc))
}
} else {
Err(self)
@ -2483,10 +2481,8 @@ impl<A: Allocator + Clone> Arc<dyn Any + Send + Sync, A> {
T: Any + Send + Sync,
{
unsafe {
let ptr = self.ptr.cast::<ArcInner<T>>();
let alloc = self.alloc.clone();
mem::forget(self);
Arc::from_inner_in(ptr, alloc)
let (ptr, alloc) = self.internal_into_inner_with_allocator();
Arc::from_inner_in(ptr.cast(), alloc)
}
}
}
@ -3442,13 +3438,13 @@ impl From<Arc<str>> for Arc<[u8]> {
}
#[stable(feature = "boxed_slice_try_from", since = "1.43.0")]
impl<T, A: Allocator + Clone, const N: usize> TryFrom<Arc<[T], A>> for Arc<[T; N], A> {
impl<T, A: Allocator, const N: usize> TryFrom<Arc<[T], A>> for Arc<[T; N], A> {
type Error = Arc<[T], A>;
fn try_from(boxed_slice: Arc<[T], A>) -> Result<Self, Self::Error> {
if boxed_slice.len() == N {
let alloc = boxed_slice.alloc.clone();
Ok(unsafe { Arc::from_raw_in(Arc::into_raw(boxed_slice) as *mut [T; N], alloc) })
let (ptr, alloc) = boxed_slice.internal_into_inner_with_allocator();
Ok(unsafe { Arc::from_inner_in(ptr.cast(), alloc) })
} else {
Err(boxed_slice)
}

View File

@ -1,13 +1,15 @@
use super::*;
use std::clone::Clone;
use std::mem::MaybeUninit;
use std::option::Option::None;
use std::sync::atomic::AtomicUsize;
use std::sync::atomic::Ordering::SeqCst;
use std::sync::mpsc::channel;
use std::sync::Mutex;
use std::thread;
struct Canary(*mut atomic::AtomicUsize);
struct Canary(*mut AtomicUsize);
impl Drop for Canary {
fn drop(&mut self) {
@ -21,6 +23,37 @@ impl Drop for Canary {
}
}
struct AllocCanary<'a>(&'a AtomicUsize);
impl<'a> AllocCanary<'a> {
fn new(counter: &'a AtomicUsize) -> Self {
counter.fetch_add(1, SeqCst);
Self(counter)
}
}
unsafe impl Allocator for AllocCanary<'_> {
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
std::alloc::Global.allocate(layout)
}
unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
unsafe { std::alloc::Global.deallocate(ptr, layout) }
}
}
impl Clone for AllocCanary<'_> {
fn clone(&self) -> Self {
Self::new(self.0)
}
}
impl Drop for AllocCanary<'_> {
fn drop(&mut self) {
self.0.fetch_sub(1, SeqCst);
}
}
#[test]
#[cfg_attr(target_os = "emscripten", ignore)]
fn manually_share_arc() {
@ -295,16 +328,16 @@ fn weak_self_cyclic() {
#[test]
fn drop_arc() {
let mut canary = atomic::AtomicUsize::new(0);
let x = Arc::new(Canary(&mut canary as *mut atomic::AtomicUsize));
let mut canary = AtomicUsize::new(0);
let x = Arc::new(Canary(&mut canary as *mut AtomicUsize));
drop(x);
assert!(canary.load(Acquire) == 1);
}
#[test]
fn drop_arc_weak() {
let mut canary = atomic::AtomicUsize::new(0);
let arc = Arc::new(Canary(&mut canary as *mut atomic::AtomicUsize));
let mut canary = AtomicUsize::new(0);
let arc = Arc::new(Canary(&mut canary as *mut AtomicUsize));
let arc_weak = Arc::downgrade(&arc);
assert!(canary.load(Acquire) == 0);
drop(arc);
@ -660,3 +693,25 @@ fn arc_drop_dereferenceable_race() {
thread.join().unwrap();
}
}
#[test]
fn arc_doesnt_leak_allocator() {
let counter = AtomicUsize::new(0);
{
let arc: Arc<dyn Any + Send + Sync, _> = Arc::new_in(5usize, AllocCanary::new(&counter));
drop(arc.downcast::<usize>().unwrap());
let arc: Arc<dyn Any + Send + Sync, _> = Arc::new_in(5usize, AllocCanary::new(&counter));
drop(unsafe { arc.downcast_unchecked::<usize>() });
let arc = Arc::new_in(MaybeUninit::<usize>::new(5usize), AllocCanary::new(&counter));
drop(unsafe { arc.assume_init() });
let arc: Arc<[MaybeUninit<usize>], _> =
Arc::new_zeroed_slice_in(5, AllocCanary::new(&counter));
drop(unsafe { arc.assume_init() });
}
assert_eq!(counter.load(SeqCst), 0);
}