mirror of
https://github.com/rust-lang/rust.git
synced 2025-06-05 03:38:29 +00:00
Fix some Arc
allocator leaks
This doesn't matter for the stable `Global` allocator as it is a ZST singleton, but other allocators may rely on all instances being dropped.
This commit is contained in:
parent
6351247048
commit
6837b812e6
@ -279,6 +279,12 @@ impl<T: ?Sized> Arc<T> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl<T: ?Sized, A: Allocator> Arc<T, A> {
|
impl<T: ?Sized, A: Allocator> Arc<T, A> {
|
||||||
|
#[inline]
|
||||||
|
fn internal_into_inner_with_allocator(self) -> (NonNull<ArcInner<T>>, A) {
|
||||||
|
let this = mem::ManuallyDrop::new(self);
|
||||||
|
(this.ptr, unsafe { ptr::read(&this.alloc) })
|
||||||
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
unsafe fn from_inner_in(ptr: NonNull<ArcInner<T>>, alloc: A) -> Self {
|
unsafe fn from_inner_in(ptr: NonNull<ArcInner<T>>, alloc: A) -> Self {
|
||||||
Self { ptr, phantom: PhantomData, alloc }
|
Self { ptr, phantom: PhantomData, alloc }
|
||||||
@ -1271,12 +1277,9 @@ impl<T, A: Allocator> Arc<mem::MaybeUninit<T>, A> {
|
|||||||
#[unstable(feature = "new_uninit", issue = "63291")]
|
#[unstable(feature = "new_uninit", issue = "63291")]
|
||||||
#[must_use = "`self` will be dropped if the result is not used"]
|
#[must_use = "`self` will be dropped if the result is not used"]
|
||||||
#[inline]
|
#[inline]
|
||||||
pub unsafe fn assume_init(self) -> Arc<T, A>
|
pub unsafe fn assume_init(self) -> Arc<T, A> {
|
||||||
where
|
let (ptr, alloc) = self.internal_into_inner_with_allocator();
|
||||||
A: Clone,
|
unsafe { Arc::from_inner_in(ptr.cast(), alloc) }
|
||||||
{
|
|
||||||
let md_self = mem::ManuallyDrop::new(self);
|
|
||||||
unsafe { Arc::from_inner_in(md_self.ptr.cast(), md_self.alloc.clone()) }
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1316,12 +1319,9 @@ impl<T, A: Allocator> Arc<[mem::MaybeUninit<T>], A> {
|
|||||||
#[unstable(feature = "new_uninit", issue = "63291")]
|
#[unstable(feature = "new_uninit", issue = "63291")]
|
||||||
#[must_use = "`self` will be dropped if the result is not used"]
|
#[must_use = "`self` will be dropped if the result is not used"]
|
||||||
#[inline]
|
#[inline]
|
||||||
pub unsafe fn assume_init(self) -> Arc<[T], A>
|
pub unsafe fn assume_init(self) -> Arc<[T], A> {
|
||||||
where
|
let (ptr, alloc) = self.internal_into_inner_with_allocator();
|
||||||
A: Clone,
|
unsafe { Arc::from_ptr_in(ptr.as_ptr() as _, alloc) }
|
||||||
{
|
|
||||||
let md_self = mem::ManuallyDrop::new(self);
|
|
||||||
unsafe { Arc::from_ptr_in(md_self.ptr.as_ptr() as _, md_self.alloc.clone()) }
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2409,7 +2409,7 @@ unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Arc<T, A> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<A: Allocator + Clone> Arc<dyn Any + Send + Sync, A> {
|
impl<A: Allocator> Arc<dyn Any + Send + Sync, A> {
|
||||||
/// Attempt to downcast the `Arc<dyn Any + Send + Sync>` to a concrete type.
|
/// Attempt to downcast the `Arc<dyn Any + Send + Sync>` to a concrete type.
|
||||||
///
|
///
|
||||||
/// # Examples
|
/// # Examples
|
||||||
@ -2436,10 +2436,8 @@ impl<A: Allocator + Clone> Arc<dyn Any + Send + Sync, A> {
|
|||||||
{
|
{
|
||||||
if (*self).is::<T>() {
|
if (*self).is::<T>() {
|
||||||
unsafe {
|
unsafe {
|
||||||
let ptr = self.ptr.cast::<ArcInner<T>>();
|
let (ptr, alloc) = self.internal_into_inner_with_allocator();
|
||||||
let alloc = self.alloc.clone();
|
Ok(Arc::from_inner_in(ptr.cast(), alloc))
|
||||||
mem::forget(self);
|
|
||||||
Ok(Arc::from_inner_in(ptr, alloc))
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
Err(self)
|
Err(self)
|
||||||
@ -2479,10 +2477,8 @@ impl<A: Allocator + Clone> Arc<dyn Any + Send + Sync, A> {
|
|||||||
T: Any + Send + Sync,
|
T: Any + Send + Sync,
|
||||||
{
|
{
|
||||||
unsafe {
|
unsafe {
|
||||||
let ptr = self.ptr.cast::<ArcInner<T>>();
|
let (ptr, alloc) = self.internal_into_inner_with_allocator();
|
||||||
let alloc = self.alloc.clone();
|
Arc::from_inner_in(ptr.cast(), alloc)
|
||||||
mem::forget(self);
|
|
||||||
Arc::from_inner_in(ptr, alloc)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -3438,13 +3434,13 @@ impl From<Arc<str>> for Arc<[u8]> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[stable(feature = "boxed_slice_try_from", since = "1.43.0")]
|
#[stable(feature = "boxed_slice_try_from", since = "1.43.0")]
|
||||||
impl<T, A: Allocator + Clone, const N: usize> TryFrom<Arc<[T], A>> for Arc<[T; N], A> {
|
impl<T, A: Allocator, const N: usize> TryFrom<Arc<[T], A>> for Arc<[T; N], A> {
|
||||||
type Error = Arc<[T], A>;
|
type Error = Arc<[T], A>;
|
||||||
|
|
||||||
fn try_from(boxed_slice: Arc<[T], A>) -> Result<Self, Self::Error> {
|
fn try_from(boxed_slice: Arc<[T], A>) -> Result<Self, Self::Error> {
|
||||||
if boxed_slice.len() == N {
|
if boxed_slice.len() == N {
|
||||||
let alloc = boxed_slice.alloc.clone();
|
let (ptr, alloc) = boxed_slice.internal_into_inner_with_allocator();
|
||||||
Ok(unsafe { Arc::from_raw_in(Arc::into_raw(boxed_slice) as *mut [T; N], alloc) })
|
Ok(unsafe { Arc::from_inner_in(ptr.cast(), alloc) })
|
||||||
} else {
|
} else {
|
||||||
Err(boxed_slice)
|
Err(boxed_slice)
|
||||||
}
|
}
|
||||||
|
@ -1,13 +1,15 @@
|
|||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
use std::clone::Clone;
|
use std::clone::Clone;
|
||||||
|
use std::mem::MaybeUninit;
|
||||||
use std::option::Option::None;
|
use std::option::Option::None;
|
||||||
|
use std::sync::atomic::AtomicUsize;
|
||||||
use std::sync::atomic::Ordering::SeqCst;
|
use std::sync::atomic::Ordering::SeqCst;
|
||||||
use std::sync::mpsc::channel;
|
use std::sync::mpsc::channel;
|
||||||
use std::sync::Mutex;
|
use std::sync::Mutex;
|
||||||
use std::thread;
|
use std::thread;
|
||||||
|
|
||||||
struct Canary(*mut atomic::AtomicUsize);
|
struct Canary(*mut AtomicUsize);
|
||||||
|
|
||||||
impl Drop for Canary {
|
impl Drop for Canary {
|
||||||
fn drop(&mut self) {
|
fn drop(&mut self) {
|
||||||
@ -21,6 +23,37 @@ impl Drop for Canary {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
struct AllocCanary<'a>(&'a AtomicUsize);
|
||||||
|
|
||||||
|
impl<'a> AllocCanary<'a> {
|
||||||
|
fn new(counter: &'a AtomicUsize) -> Self {
|
||||||
|
counter.fetch_add(1, SeqCst);
|
||||||
|
Self(counter)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
unsafe impl Allocator for AllocCanary<'_> {
|
||||||
|
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
|
||||||
|
std::alloc::Global.allocate(layout)
|
||||||
|
}
|
||||||
|
|
||||||
|
unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
|
||||||
|
unsafe { std::alloc::Global.deallocate(ptr, layout) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Clone for AllocCanary<'_> {
|
||||||
|
fn clone(&self) -> Self {
|
||||||
|
Self::new(self.0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Drop for AllocCanary<'_> {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
self.0.fetch_sub(1, SeqCst);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
#[cfg_attr(target_os = "emscripten", ignore)]
|
#[cfg_attr(target_os = "emscripten", ignore)]
|
||||||
fn manually_share_arc() {
|
fn manually_share_arc() {
|
||||||
@ -295,16 +328,16 @@ fn weak_self_cyclic() {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn drop_arc() {
|
fn drop_arc() {
|
||||||
let mut canary = atomic::AtomicUsize::new(0);
|
let mut canary = AtomicUsize::new(0);
|
||||||
let x = Arc::new(Canary(&mut canary as *mut atomic::AtomicUsize));
|
let x = Arc::new(Canary(&mut canary as *mut AtomicUsize));
|
||||||
drop(x);
|
drop(x);
|
||||||
assert!(canary.load(Acquire) == 1);
|
assert!(canary.load(Acquire) == 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn drop_arc_weak() {
|
fn drop_arc_weak() {
|
||||||
let mut canary = atomic::AtomicUsize::new(0);
|
let mut canary = AtomicUsize::new(0);
|
||||||
let arc = Arc::new(Canary(&mut canary as *mut atomic::AtomicUsize));
|
let arc = Arc::new(Canary(&mut canary as *mut AtomicUsize));
|
||||||
let arc_weak = Arc::downgrade(&arc);
|
let arc_weak = Arc::downgrade(&arc);
|
||||||
assert!(canary.load(Acquire) == 0);
|
assert!(canary.load(Acquire) == 0);
|
||||||
drop(arc);
|
drop(arc);
|
||||||
@ -660,3 +693,25 @@ fn arc_drop_dereferenceable_race() {
|
|||||||
thread.join().unwrap();
|
thread.join().unwrap();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn arc_doesnt_leak_allocator() {
|
||||||
|
let counter = AtomicUsize::new(0);
|
||||||
|
|
||||||
|
{
|
||||||
|
let arc: Arc<dyn Any + Send + Sync, _> = Arc::new_in(5usize, AllocCanary::new(&counter));
|
||||||
|
drop(arc.downcast::<usize>().unwrap());
|
||||||
|
|
||||||
|
let arc: Arc<dyn Any + Send + Sync, _> = Arc::new_in(5usize, AllocCanary::new(&counter));
|
||||||
|
drop(unsafe { arc.downcast_unchecked::<usize>() });
|
||||||
|
|
||||||
|
let arc = Arc::new_in(MaybeUninit::<usize>::new(5usize), AllocCanary::new(&counter));
|
||||||
|
drop(unsafe { arc.assume_init() });
|
||||||
|
|
||||||
|
let arc: Arc<[MaybeUninit<usize>], _> =
|
||||||
|
Arc::new_zeroed_slice_in(5, AllocCanary::new(&counter));
|
||||||
|
drop(unsafe { arc.assume_init() });
|
||||||
|
}
|
||||||
|
|
||||||
|
assert_eq!(counter.load(SeqCst), 0);
|
||||||
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user