Auto merge of #132231 - lukas-code:rc-plug-leaks, r=tgross35

Rc/Arc: don't leak the allocation if drop panics

Currently, when the last `Rc<T>` or `Arc<T>` is dropped and the destructor of `T` panics, the allocation will be leaked. This leak is unnecessary since the data cannot be (safely) accessed again and `Box` already deallocates in this case, so let's do the same for `Rc` and `Arc`, too.
This commit is contained in:
bors 2024-10-29 16:26:00 +00:00
commit e473783d90
5 changed files with 139 additions and 21 deletions

View File

@ -376,6 +376,21 @@ impl<T: ?Sized, A: Allocator> Rc<T, A> {
unsafe fn from_ptr_in(ptr: *mut RcInner<T>, alloc: A) -> Self { unsafe fn from_ptr_in(ptr: *mut RcInner<T>, alloc: A) -> Self {
unsafe { Self::from_inner_in(NonNull::new_unchecked(ptr), alloc) } unsafe { Self::from_inner_in(NonNull::new_unchecked(ptr), alloc) }
} }
// Non-inlined part of `drop`.
#[inline(never)]
unsafe fn drop_slow(&mut self) {
// Reconstruct the "strong weak" pointer and drop it when this
// variable goes out of scope. This ensures that the memory is
// deallocated even if the destructor of `T` panics.
let _weak = Weak { ptr: self.ptr, alloc: &self.alloc };
// Destroy the contained object.
// We cannot use `get_mut_unchecked` here, because `self.alloc` is borrowed.
unsafe {
ptr::drop_in_place(&mut (*self.ptr.as_ptr()).value);
}
}
} }
impl<T> Rc<T> { impl<T> Rc<T> {
@ -2252,21 +2267,12 @@ unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Rc<T, A> {
/// drop(foo); // Doesn't print anything /// drop(foo); // Doesn't print anything
/// drop(foo2); // Prints "dropped!" /// drop(foo2); // Prints "dropped!"
/// ``` /// ```
#[inline]
fn drop(&mut self) { fn drop(&mut self) {
unsafe { unsafe {
self.inner().dec_strong(); self.inner().dec_strong();
if self.inner().strong() == 0 { if self.inner().strong() == 0 {
// destroy the contained object self.drop_slow();
ptr::drop_in_place(Self::get_mut_unchecked(self));
// remove the implicit "strong weak" pointer now that we've
// destroyed the contents.
self.inner().dec_weak();
if self.inner().weak() == 0 {
self.alloc
.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr()));
}
} }
} }
} }

View File

@ -1872,15 +1872,17 @@ impl<T: ?Sized, A: Allocator> Arc<T, A> {
// Non-inlined part of `drop`. // Non-inlined part of `drop`.
#[inline(never)] #[inline(never)]
unsafe fn drop_slow(&mut self) { unsafe fn drop_slow(&mut self) {
// Drop the weak ref collectively held by all strong references when this
// variable goes out of scope. This ensures that the memory is deallocated
// even if the destructor of `T` panics.
// Take a reference to `self.alloc` instead of cloning because 1. it'll last long
// enough, and 2. you should be able to drop `Arc`s with unclonable allocators
let _weak = Weak { ptr: self.ptr, alloc: &self.alloc };
// Destroy the data at this time, even though we must not free the box // Destroy the data at this time, even though we must not free the box
// allocation itself (there might still be weak pointers lying around). // allocation itself (there might still be weak pointers lying around).
unsafe { ptr::drop_in_place(Self::get_mut_unchecked(self)) }; // We cannot use `get_mut_unchecked` here, because `self.alloc` is borrowed.
unsafe { ptr::drop_in_place(&mut (*self.ptr.as_ptr()).data) };
// Drop the weak ref collectively held by all strong references
// Take a reference to `self.alloc` instead of cloning because 1. it'll
// last long enough, and 2. you should be able to drop `Arc`s with
// unclonable allocators
drop(Weak { ptr: self.ptr, alloc: &self.alloc });
} }
/// Returns `true` if the two `Arc`s point to the same allocation in a vein similar to /// Returns `true` if the two `Arc`s point to the same allocation in a vein similar to

View File

@ -1,5 +1,5 @@
use std::any::Any; use std::any::Any;
use std::cell::RefCell; use std::cell::{Cell, RefCell};
use std::iter::TrustedLen; use std::iter::TrustedLen;
use std::mem; use std::mem;
use std::sync::{Arc, Weak}; use std::sync::{Arc, Weak};
@ -89,7 +89,7 @@ fn eq() {
// The test code below is identical to that in `rc.rs`. // The test code below is identical to that in `rc.rs`.
// For better maintainability we therefore define this type alias. // For better maintainability we therefore define this type alias.
type Rc<T> = Arc<T>; type Rc<T, A = std::alloc::Global> = Arc<T, A>;
const SHARED_ITER_MAX: u16 = 100; const SHARED_ITER_MAX: u16 = 100;
@ -210,6 +210,42 @@ fn weak_may_dangle() {
// borrow might be used here, when `val` is dropped and runs the `Drop` code for type `std::sync::Weak` // borrow might be used here, when `val` is dropped and runs the `Drop` code for type `std::sync::Weak`
} }
/// Test that a panic from a destructor does not leak the allocation.
#[test]
#[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")]
fn panic_no_leak() {
use std::alloc::{AllocError, Allocator, Global, Layout};
use std::panic::{AssertUnwindSafe, catch_unwind};
use std::ptr::NonNull;
struct AllocCount(Cell<i32>);
unsafe impl Allocator for AllocCount {
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
self.0.set(self.0.get() + 1);
Global.allocate(layout)
}
unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
self.0.set(self.0.get() - 1);
unsafe { Global.deallocate(ptr, layout) }
}
}
struct PanicOnDrop;
impl Drop for PanicOnDrop {
fn drop(&mut self) {
panic!("PanicOnDrop");
}
}
let alloc = AllocCount(Cell::new(0));
let rc = Rc::new_in(PanicOnDrop, &alloc);
assert_eq!(alloc.0.get(), 1);
let panic_message = catch_unwind(AssertUnwindSafe(|| drop(rc))).unwrap_err();
assert_eq!(*panic_message.downcast_ref::<&'static str>().unwrap(), "PanicOnDrop");
assert_eq!(alloc.0.get(), 0);
}
/// This is similar to the doc-test for `Arc::make_mut()`, but on an unsized type (slice). /// This is similar to the doc-test for `Arc::make_mut()`, but on an unsized type (slice).
#[test] #[test]
fn make_mut_unsized() { fn make_mut_unsized() {

View File

@ -60,6 +60,44 @@ fn box_deref_lval() {
assert_eq!(x.get(), 1000); assert_eq!(x.get(), 1000);
} }
/// Test that a panic from a destructor does not leak the allocation.
#[test]
#[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")]
fn panic_no_leak() {
use std::alloc::{AllocError, Allocator, Global, Layout};
use std::panic::{AssertUnwindSafe, catch_unwind};
use std::ptr::NonNull;
struct AllocCount(Cell<i32>);
unsafe impl Allocator for AllocCount {
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
self.0.set(self.0.get() + 1);
Global.allocate(layout)
}
unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
self.0.set(self.0.get() - 1);
unsafe { Global.deallocate(ptr, layout) }
}
}
struct PanicOnDrop {
_data: u8,
}
impl Drop for PanicOnDrop {
fn drop(&mut self) {
panic!("PanicOnDrop");
}
}
let alloc = AllocCount(Cell::new(0));
let b = Box::new_in(PanicOnDrop { _data: 42 }, &alloc);
assert_eq!(alloc.0.get(), 1);
let panic_message = catch_unwind(AssertUnwindSafe(|| drop(b))).unwrap_err();
assert_eq!(*panic_message.downcast_ref::<&'static str>().unwrap(), "PanicOnDrop");
assert_eq!(alloc.0.get(), 0);
}
#[allow(unused)] #[allow(unused)]
pub struct ConstAllocator; pub struct ConstAllocator;

View File

@ -1,5 +1,5 @@
use std::any::Any; use std::any::Any;
use std::cell::RefCell; use std::cell::{Cell, RefCell};
use std::iter::TrustedLen; use std::iter::TrustedLen;
use std::mem; use std::mem;
use std::rc::{Rc, Weak}; use std::rc::{Rc, Weak};
@ -206,6 +206,42 @@ fn weak_may_dangle() {
// borrow might be used here, when `val` is dropped and runs the `Drop` code for type `std::rc::Weak` // borrow might be used here, when `val` is dropped and runs the `Drop` code for type `std::rc::Weak`
} }
/// Test that a panic from a destructor does not leak the allocation.
#[test]
#[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")]
fn panic_no_leak() {
use std::alloc::{AllocError, Allocator, Global, Layout};
use std::panic::{AssertUnwindSafe, catch_unwind};
use std::ptr::NonNull;
struct AllocCount(Cell<i32>);
unsafe impl Allocator for AllocCount {
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
self.0.set(self.0.get() + 1);
Global.allocate(layout)
}
unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
self.0.set(self.0.get() - 1);
unsafe { Global.deallocate(ptr, layout) }
}
}
struct PanicOnDrop;
impl Drop for PanicOnDrop {
fn drop(&mut self) {
panic!("PanicOnDrop");
}
}
let alloc = AllocCount(Cell::new(0));
let rc = Rc::new_in(PanicOnDrop, &alloc);
assert_eq!(alloc.0.get(), 1);
let panic_message = catch_unwind(AssertUnwindSafe(|| drop(rc))).unwrap_err();
assert_eq!(*panic_message.downcast_ref::<&'static str>().unwrap(), "PanicOnDrop");
assert_eq!(alloc.0.get(), 0);
}
#[allow(unused)] #[allow(unused)]
mod pin_coerce_unsized { mod pin_coerce_unsized {
use alloc::rc::{Rc, UniqueRc}; use alloc::rc::{Rc, UniqueRc};