Rollup merge of #97711 - Nilstrieb:rustc-arena-ub, r=wesleywiser

Improve soundness of rustc_arena

Make it runnable in miri by changing the loop iteration count for some tests in miri. Also fix a stacked borrows issue with box.
This commit is contained in:
Dylan DPC 2022-07-07 18:06:49 +05:30 committed by GitHub
commit d63c713947
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 39 additions and 12 deletions

View File

@ -19,6 +19,7 @@
#![feature(rustc_attrs)] #![feature(rustc_attrs)]
#![cfg_attr(test, feature(test))] #![cfg_attr(test, feature(test))]
#![feature(strict_provenance)] #![feature(strict_provenance)]
#![feature(ptr_const_cast)]
use smallvec::SmallVec; use smallvec::SmallVec;
@ -27,7 +28,7 @@ use std::cell::{Cell, RefCell};
use std::cmp; use std::cmp;
use std::marker::{PhantomData, Send}; use std::marker::{PhantomData, Send};
use std::mem::{self, MaybeUninit}; use std::mem::{self, MaybeUninit};
use std::ptr; use std::ptr::{self, NonNull};
use std::slice; use std::slice;
#[inline(never)] #[inline(never)]
@ -55,15 +56,24 @@ pub struct TypedArena<T> {
struct ArenaChunk<T = u8> { struct ArenaChunk<T = u8> {
/// The raw storage for the arena chunk. /// The raw storage for the arena chunk.
storage: Box<[MaybeUninit<T>]>, storage: NonNull<[MaybeUninit<T>]>,
/// The number of valid entries in the chunk. /// The number of valid entries in the chunk.
entries: usize, entries: usize,
} }
unsafe impl<#[may_dangle] T> Drop for ArenaChunk<T> {
fn drop(&mut self) {
unsafe { Box::from_raw(self.storage.as_mut()) };
}
}
impl<T> ArenaChunk<T> { impl<T> ArenaChunk<T> {
#[inline] #[inline]
unsafe fn new(capacity: usize) -> ArenaChunk<T> { unsafe fn new(capacity: usize) -> ArenaChunk<T> {
ArenaChunk { storage: Box::new_uninit_slice(capacity), entries: 0 } ArenaChunk {
storage: NonNull::new(Box::into_raw(Box::new_uninit_slice(capacity))).unwrap(),
entries: 0,
}
} }
/// Destroys this arena chunk. /// Destroys this arena chunk.
@ -72,14 +82,15 @@ impl<T> ArenaChunk<T> {
// The branch on needs_drop() is an -O1 performance optimization. // The branch on needs_drop() is an -O1 performance optimization.
// Without the branch, dropping TypedArena<u8> takes linear time. // Without the branch, dropping TypedArena<u8> takes linear time.
if mem::needs_drop::<T>() { if mem::needs_drop::<T>() {
ptr::drop_in_place(MaybeUninit::slice_assume_init_mut(&mut self.storage[..len])); let slice = &mut *(self.storage.as_mut());
ptr::drop_in_place(MaybeUninit::slice_assume_init_mut(&mut slice[..len]));
} }
} }
// Returns a pointer to the first allocated object. // Returns a pointer to the first allocated object.
#[inline] #[inline]
fn start(&mut self) -> *mut T { fn start(&mut self) -> *mut T {
MaybeUninit::slice_as_mut_ptr(&mut self.storage) self.storage.as_ptr() as *mut T
} }
// Returns a pointer to the end of the allocated space. // Returns a pointer to the end of the allocated space.
@ -90,7 +101,7 @@ impl<T> ArenaChunk<T> {
// A pointer as large as possible for zero-sized elements. // A pointer as large as possible for zero-sized elements.
ptr::invalid_mut(!0) ptr::invalid_mut(!0)
} else { } else {
self.start().add(self.storage.len()) self.start().add((*self.storage.as_ptr()).len())
} }
} }
} }
@ -274,7 +285,7 @@ impl<T> TypedArena<T> {
// If the previous chunk's len is less than HUGE_PAGE // If the previous chunk's len is less than HUGE_PAGE
// bytes, then this chunk will be least double the previous // bytes, then this chunk will be least double the previous
// chunk's size. // chunk's size.
new_cap = last_chunk.storage.len().min(HUGE_PAGE / elem_size / 2); new_cap = (*last_chunk.storage.as_ptr()).len().min(HUGE_PAGE / elem_size / 2);
new_cap *= 2; new_cap *= 2;
} else { } else {
new_cap = PAGE / elem_size; new_cap = PAGE / elem_size;
@ -382,7 +393,7 @@ impl DroplessArena {
// If the previous chunk's len is less than HUGE_PAGE // If the previous chunk's len is less than HUGE_PAGE
// bytes, then this chunk will be least double the previous // bytes, then this chunk will be least double the previous
// chunk's size. // chunk's size.
new_cap = last_chunk.storage.len().min(HUGE_PAGE / 2); new_cap = (*last_chunk.storage.as_ptr()).len().min(HUGE_PAGE / 2);
new_cap *= 2; new_cap *= 2;
} else { } else {
new_cap = PAGE; new_cap = PAGE;

View File

@ -79,7 +79,11 @@ fn test_arena_alloc_nested() {
#[test] #[test]
pub fn test_copy() { pub fn test_copy() {
let arena = TypedArena::default(); let arena = TypedArena::default();
for _ in 0..100000 { #[cfg(not(miri))]
const N: usize = 100000;
#[cfg(miri)]
const N: usize = 1000;
for _ in 0..N {
arena.alloc(Point { x: 1, y: 2, z: 3 }); arena.alloc(Point { x: 1, y: 2, z: 3 });
} }
} }
@ -106,7 +110,11 @@ struct Noncopy {
#[test] #[test]
pub fn test_noncopy() { pub fn test_noncopy() {
let arena = TypedArena::default(); let arena = TypedArena::default();
for _ in 0..100000 { #[cfg(not(miri))]
const N: usize = 100000;
#[cfg(miri)]
const N: usize = 1000;
for _ in 0..N {
arena.alloc(Noncopy { string: "hello world".to_string(), array: vec![1, 2, 3, 4, 5] }); arena.alloc(Noncopy { string: "hello world".to_string(), array: vec![1, 2, 3, 4, 5] });
} }
} }
@ -114,7 +122,11 @@ pub fn test_noncopy() {
#[test] #[test]
pub fn test_typed_arena_zero_sized() { pub fn test_typed_arena_zero_sized() {
let arena = TypedArena::default(); let arena = TypedArena::default();
for _ in 0..100000 { #[cfg(not(miri))]
const N: usize = 100000;
#[cfg(miri)]
const N: usize = 1000;
for _ in 0..N {
arena.alloc(()); arena.alloc(());
} }
} }
@ -124,7 +136,11 @@ pub fn test_typed_arena_clear() {
let mut arena = TypedArena::default(); let mut arena = TypedArena::default();
for _ in 0..10 { for _ in 0..10 {
arena.clear(); arena.clear();
for _ in 0..10000 { #[cfg(not(miri))]
const N: usize = 10000;
#[cfg(miri)]
const N: usize = 100;
for _ in 0..N {
arena.alloc(Point { x: 1, y: 2, z: 3 }); arena.alloc(Point { x: 1, y: 2, z: 3 });
} }
} }