Fix stacked borrows violation in rustc_arena

There was a problem with storing a `Box<T>` in a struct, where
the current rules would invalidate the value. this makes it store
a raw pointer instead, circumventing the aliasing problems.
This commit is contained in:
Nilstrieb 2022-06-03 22:47:05 +02:00
parent 907ea55841
commit 211fb66810

View File

@ -19,6 +19,7 @@
#![feature(rustc_attrs)] #![feature(rustc_attrs)]
#![cfg_attr(test, feature(test))] #![cfg_attr(test, feature(test))]
#![feature(strict_provenance)] #![feature(strict_provenance)]
#![feature(ptr_const_cast)]
use smallvec::SmallVec; use smallvec::SmallVec;
@ -27,7 +28,7 @@ use std::cell::{Cell, RefCell};
use std::cmp; use std::cmp;
use std::marker::{PhantomData, Send}; use std::marker::{PhantomData, Send};
use std::mem::{self, MaybeUninit}; use std::mem::{self, MaybeUninit};
use std::ptr; use std::ptr::{self, NonNull};
use std::slice; use std::slice;
#[inline(never)] #[inline(never)]
@ -55,15 +56,24 @@ pub struct TypedArena<T> {
struct ArenaChunk<T = u8> { struct ArenaChunk<T = u8> {
/// The raw storage for the arena chunk. /// The raw storage for the arena chunk.
storage: Box<[MaybeUninit<T>]>, storage: NonNull<[MaybeUninit<T>]>,
/// The number of valid entries in the chunk. /// The number of valid entries in the chunk.
entries: usize, entries: usize,
} }
unsafe impl<#[may_dangle] T> Drop for ArenaChunk<T> {
fn drop(&mut self) {
unsafe { Box::from_raw(self.storage.as_mut()) };
}
}
impl<T> ArenaChunk<T> { impl<T> ArenaChunk<T> {
#[inline] #[inline]
unsafe fn new(capacity: usize) -> ArenaChunk<T> { unsafe fn new(capacity: usize) -> ArenaChunk<T> {
ArenaChunk { storage: Box::new_uninit_slice(capacity), entries: 0 } ArenaChunk {
storage: NonNull::new(Box::into_raw(Box::new_uninit_slice(capacity))).unwrap(),
entries: 0,
}
} }
/// Destroys this arena chunk. /// Destroys this arena chunk.
@ -72,14 +82,15 @@ impl<T> ArenaChunk<T> {
// The branch on needs_drop() is an -O1 performance optimization. // The branch on needs_drop() is an -O1 performance optimization.
// Without the branch, dropping TypedArena<u8> takes linear time. // Without the branch, dropping TypedArena<u8> takes linear time.
if mem::needs_drop::<T>() { if mem::needs_drop::<T>() {
ptr::drop_in_place(MaybeUninit::slice_assume_init_mut(&mut self.storage[..len])); let slice = &mut *(self.storage.as_mut());
ptr::drop_in_place(MaybeUninit::slice_assume_init_mut(&mut slice[..len]));
} }
} }
// Returns a pointer to the first allocated object. // Returns a pointer to the first allocated object.
#[inline] #[inline]
fn start(&mut self) -> *mut T { fn start(&mut self) -> *mut T {
MaybeUninit::slice_as_mut_ptr(&mut self.storage) self.storage.as_ptr() as *mut T
} }
// Returns a pointer to the end of the allocated space. // Returns a pointer to the end of the allocated space.
@ -90,7 +101,7 @@ impl<T> ArenaChunk<T> {
// A pointer as large as possible for zero-sized elements. // A pointer as large as possible for zero-sized elements.
ptr::invalid_mut(!0) ptr::invalid_mut(!0)
} else { } else {
self.start().add(self.storage.len()) self.start().add((*self.storage.as_ptr()).len())
} }
} }
} }
@ -274,7 +285,7 @@ impl<T> TypedArena<T> {
// If the previous chunk's len is less than HUGE_PAGE // If the previous chunk's len is less than HUGE_PAGE
// bytes, then this chunk will be least double the previous // bytes, then this chunk will be least double the previous
// chunk's size. // chunk's size.
new_cap = last_chunk.storage.len().min(HUGE_PAGE / elem_size / 2); new_cap = (*last_chunk.storage.as_ptr()).len().min(HUGE_PAGE / elem_size / 2);
new_cap *= 2; new_cap *= 2;
} else { } else {
new_cap = PAGE / elem_size; new_cap = PAGE / elem_size;
@ -382,7 +393,7 @@ impl DroplessArena {
// If the previous chunk's len is less than HUGE_PAGE // If the previous chunk's len is less than HUGE_PAGE
// bytes, then this chunk will be least double the previous // bytes, then this chunk will be least double the previous
// chunk's size. // chunk's size.
new_cap = last_chunk.storage.len().min(HUGE_PAGE / 2); new_cap = (*last_chunk.storage.as_ptr()).len().min(HUGE_PAGE / 2);
new_cap *= 2; new_cap *= 2;
} else { } else {
new_cap = PAGE; new_cap = PAGE;