2014-01-07 01:03:30 +00:00
|
|
|
//! The arena, a fast but limited type of allocator.
|
|
|
|
//!
|
|
|
|
//! Arenas are a type of allocator that destroy the objects within, all at
|
|
|
|
//! once, once the arena itself is destroyed. They do not support deallocation
|
|
|
|
//! of individual objects while the arena itself is still alive. The benefit
|
|
|
|
//! of an arena is very fast allocation; just a pointer bump.
|
2014-04-04 11:57:39 +00:00
|
|
|
//!
|
2020-05-04 09:25:09 +00:00
|
|
|
//! This crate implements several kinds of arena.
|
2012-08-21 22:32:30 +00:00
|
|
|
|
2019-02-05 13:37:15 +00:00
|
|
|
#![doc(
|
2020-09-23 19:51:56 +00:00
|
|
|
html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/",
|
2015-11-03 14:03:22 +00:00
|
|
|
test(no_crate_inject, attr(deny(warnings)))
|
|
|
|
)]
|
2023-03-03 05:20:24 +00:00
|
|
|
#![feature(core_intrinsics)]
|
2016-12-28 22:47:10 +00:00
|
|
|
#![feature(dropck_eyepatch)]
|
2020-09-17 14:40:02 +00:00
|
|
|
#![feature(new_uninit)]
|
|
|
|
#![feature(maybe_uninit_slice)]
|
2020-10-30 10:42:44 +00:00
|
|
|
#![feature(min_specialization)]
|
2021-05-25 17:57:02 +00:00
|
|
|
#![feature(decl_macro)]
|
2022-08-19 09:20:22 +00:00
|
|
|
#![feature(pointer_byte_offsets)]
|
2021-05-25 17:57:02 +00:00
|
|
|
#![feature(rustc_attrs)]
|
2015-01-22 20:33:46 +00:00
|
|
|
#![cfg_attr(test, feature(test))]
|
2022-03-22 20:21:33 +00:00
|
|
|
#![feature(strict_provenance)]
|
2023-04-28 17:07:29 +00:00
|
|
|
#![deny(unsafe_op_in_unsafe_fn)]
|
2022-08-18 18:27:29 +00:00
|
|
|
#![deny(rustc::untranslatable_diagnostic)]
|
|
|
|
#![deny(rustc::diagnostic_outside_of_impl)]
|
2023-08-22 11:06:38 +00:00
|
|
|
#![allow(internal_features)]
|
2023-04-09 20:29:56 +00:00
|
|
|
#![allow(clippy::mut_from_ref)] // Arena allocators are one of the places where this pattern is fine.
|
2016-01-06 17:07:21 +00:00
|
|
|
|
2018-12-14 18:02:15 +00:00
|
|
|
use smallvec::SmallVec;
|
2014-10-28 21:06:06 +00:00
|
|
|
|
2020-06-17 00:00:00 +00:00
|
|
|
use std::alloc::Layout;
|
2013-12-31 01:32:53 +00:00
|
|
|
use std::cell::{Cell, RefCell};
|
2022-08-09 00:14:43 +00:00
|
|
|
use std::marker::PhantomData;
|
2020-09-17 14:40:02 +00:00
|
|
|
use std::mem::{self, MaybeUninit};
|
2022-06-03 20:47:05 +00:00
|
|
|
use std::ptr::{self, NonNull};
|
2016-10-17 21:03:40 +00:00
|
|
|
use std::slice;
|
2023-03-03 05:20:24 +00:00
|
|
|
use std::{cmp, intrinsics};
|
2015-09-08 22:53:46 +00:00
|
|
|
|
2023-09-25 01:27:25 +00:00
|
|
|
/// This calls the passed function while ensuring it won't be inlined into the caller.
|
2020-10-15 07:07:02 +00:00
|
|
|
#[inline(never)]
|
|
|
|
#[cold]
|
2023-09-25 01:27:25 +00:00
|
|
|
fn outline<F: FnOnce() -> R, R>(f: F) -> R {
|
2020-10-15 07:07:02 +00:00
|
|
|
f()
|
|
|
|
}
|
|
|
|
|
2016-09-19 23:52:38 +00:00
|
|
|
/// An arena that can hold objects of only one type.
|
2014-01-07 01:03:30 +00:00
|
|
|
pub struct TypedArena<T> {
|
|
|
|
/// A pointer to the next object to be allocated.
|
2015-08-12 03:52:37 +00:00
|
|
|
ptr: Cell<*mut T>,
|
2014-01-07 01:03:30 +00:00
|
|
|
|
|
|
|
/// A pointer to the end of the allocated area. When this pointer is
|
|
|
|
/// reached, a new chunk is allocated.
|
2015-08-12 03:52:37 +00:00
|
|
|
end: Cell<*mut T>,
|
2014-01-07 01:03:30 +00:00
|
|
|
|
2016-09-19 23:52:38 +00:00
|
|
|
/// A vector of arena chunks.
|
2022-01-25 14:35:52 +00:00
|
|
|
chunks: RefCell<Vec<ArenaChunk<T>>>,
|
2015-01-21 19:02:52 +00:00
|
|
|
|
|
|
|
/// Marker indicating that dropping the arena causes its owned
|
|
|
|
/// instances of `T` to be dropped.
|
2015-08-13 16:48:34 +00:00
|
|
|
_own: PhantomData<T>,
|
2014-01-07 01:03:30 +00:00
|
|
|
}
|
|
|
|
|
2022-01-25 14:35:52 +00:00
|
|
|
struct ArenaChunk<T = u8> {
|
2016-09-19 23:52:38 +00:00
|
|
|
/// The raw storage for the arena chunk.
|
2022-06-03 20:47:05 +00:00
|
|
|
storage: NonNull<[MaybeUninit<T>]>,
|
2018-12-14 18:02:15 +00:00
|
|
|
/// The number of valid entries in the chunk.
|
|
|
|
entries: usize,
|
2014-09-05 13:08:30 +00:00
|
|
|
}
|
|
|
|
|
2022-06-03 20:47:05 +00:00
|
|
|
unsafe impl<#[may_dangle] T> Drop for ArenaChunk<T> {
|
|
|
|
fn drop(&mut self) {
|
2023-06-11 15:44:28 +00:00
|
|
|
unsafe { drop(Box::from_raw(self.storage.as_mut())) }
|
2022-06-03 20:47:05 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-01-25 14:35:52 +00:00
|
|
|
impl<T> ArenaChunk<T> {
|
2014-01-07 01:03:30 +00:00
|
|
|
#[inline]
|
2022-01-25 14:35:52 +00:00
|
|
|
unsafe fn new(capacity: usize) -> ArenaChunk<T> {
|
2022-06-03 20:47:05 +00:00
|
|
|
ArenaChunk {
|
2023-04-28 17:07:29 +00:00
|
|
|
storage: NonNull::from(Box::leak(Box::new_uninit_slice(capacity))),
|
2022-06-03 20:47:05 +00:00
|
|
|
entries: 0,
|
|
|
|
}
|
2014-04-26 01:24:51 +00:00
|
|
|
}
|
|
|
|
|
2015-08-12 03:52:37 +00:00
|
|
|
/// Destroys this arena chunk.
|
2023-04-28 17:07:29 +00:00
|
|
|
///
|
|
|
|
/// # Safety
|
|
|
|
///
|
|
|
|
/// The caller must ensure that `len` elements of this chunk have been initialized.
|
2014-01-07 01:03:30 +00:00
|
|
|
#[inline]
|
2015-02-09 07:00:46 +00:00
|
|
|
unsafe fn destroy(&mut self, len: usize) {
|
2015-08-12 03:52:37 +00:00
|
|
|
// The branch on needs_drop() is an -O1 performance optimization.
|
2023-04-28 17:07:29 +00:00
|
|
|
// Without the branch, dropping TypedArena<T> takes linear time.
|
2017-05-10 17:13:42 +00:00
|
|
|
if mem::needs_drop::<T>() {
|
2023-04-28 17:07:29 +00:00
|
|
|
// SAFETY: The caller must ensure that `len` elements of this chunk have
|
|
|
|
// been initialized.
|
|
|
|
unsafe {
|
|
|
|
let slice = self.storage.as_mut();
|
|
|
|
ptr::drop_in_place(MaybeUninit::slice_assume_init_mut(&mut slice[..len]));
|
|
|
|
}
|
2014-01-07 01:03:30 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Returns a pointer to the first allocated object.
|
|
|
|
#[inline]
|
2020-09-17 14:40:02 +00:00
|
|
|
fn start(&mut self) -> *mut T {
|
2022-06-03 20:47:05 +00:00
|
|
|
self.storage.as_ptr() as *mut T
|
2014-01-07 01:03:30 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Returns a pointer to the end of the allocated space.
|
|
|
|
#[inline]
|
2020-09-17 14:40:02 +00:00
|
|
|
fn end(&mut self) -> *mut T {
|
2014-01-07 01:03:30 +00:00
|
|
|
unsafe {
|
2015-08-12 03:52:37 +00:00
|
|
|
if mem::size_of::<T>() == 0 {
|
|
|
|
// A pointer as large as possible for zero-sized elements.
|
2022-03-22 20:21:33 +00:00
|
|
|
ptr::invalid_mut(!0)
|
2015-08-12 03:52:37 +00:00
|
|
|
} else {
|
2023-04-28 02:48:37 +00:00
|
|
|
self.start().add(self.storage.len())
|
2015-08-12 03:52:37 +00:00
|
|
|
}
|
2014-01-07 01:03:30 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-05-04 09:25:09 +00:00
|
|
|
// The arenas start with PAGE-sized chunks, and then each new chunk is twice as
|
|
|
|
// big as its predecessor, up until we reach HUGE_PAGE-sized chunks, whereupon
|
|
|
|
// we stop growing. This scales well, from arenas that are barely used up to
|
|
|
|
// arenas that are used for 100s of MiBs. Note also that the chosen sizes match
|
|
|
|
// the usual sizes of pages and huge pages on Linux.
|
2015-08-12 03:52:37 +00:00
|
|
|
const PAGE: usize = 4096;
|
2020-05-04 09:25:09 +00:00
|
|
|
const HUGE_PAGE: usize = 2 * 1024 * 1024;
|
2015-08-12 03:52:37 +00:00
|
|
|
|
2018-10-16 14:57:53 +00:00
|
|
|
impl<T> Default for TypedArena<T> {
|
2016-09-19 23:52:38 +00:00
|
|
|
/// Creates a new `TypedArena`.
|
2018-10-16 14:57:53 +00:00
|
|
|
fn default() -> TypedArena<T> {
|
2016-09-19 23:52:38 +00:00
|
|
|
TypedArena {
|
|
|
|
// We set both `ptr` and `end` to 0 so that the first call to
|
|
|
|
// alloc() will trigger a grow().
|
2019-06-17 10:52:37 +00:00
|
|
|
ptr: Cell::new(ptr::null_mut()),
|
|
|
|
end: Cell::new(ptr::null_mut()),
|
2021-11-15 22:49:15 +00:00
|
|
|
chunks: Default::default(),
|
2016-09-19 23:52:38 +00:00
|
|
|
_own: PhantomData,
|
2014-01-07 01:03:30 +00:00
|
|
|
}
|
|
|
|
}
|
2018-10-16 14:57:53 +00:00
|
|
|
}
|
2014-01-07 01:03:30 +00:00
|
|
|
|
2020-10-30 10:42:44 +00:00
|
|
|
trait IterExt<T> {
|
|
|
|
fn alloc_from_iter(self, arena: &TypedArena<T>) -> &mut [T];
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<I, T> IterExt<T> for I
|
|
|
|
where
|
|
|
|
I: IntoIterator<Item = T>,
|
|
|
|
{
|
2022-01-25 23:33:41 +00:00
|
|
|
// This default collects into a `SmallVec` and then allocates by copying
|
|
|
|
// from it. The specializations below for types like `Vec` are more
|
|
|
|
// efficient, copying directly without the intermediate collecting step.
|
|
|
|
// This default could be made more efficient, like
|
|
|
|
// `DroplessArena::alloc_from_iter`, but it's not hot enough to bother.
|
2020-10-30 10:42:44 +00:00
|
|
|
#[inline]
|
|
|
|
default fn alloc_from_iter(self, arena: &TypedArena<T>) -> &mut [T] {
|
|
|
|
let vec: SmallVec<[_; 8]> = self.into_iter().collect();
|
|
|
|
vec.alloc_from_iter(arena)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<T, const N: usize> IterExt<T> for std::array::IntoIter<T, N> {
|
|
|
|
#[inline]
|
|
|
|
fn alloc_from_iter(self, arena: &TypedArena<T>) -> &mut [T] {
|
|
|
|
let len = self.len();
|
|
|
|
if len == 0 {
|
|
|
|
return &mut [];
|
|
|
|
}
|
2022-01-25 23:33:41 +00:00
|
|
|
// Move the content to the arena by copying and then forgetting it.
|
2023-09-19 21:36:19 +00:00
|
|
|
let start_ptr = arena.alloc_raw_slice(len);
|
2020-10-30 10:42:44 +00:00
|
|
|
unsafe {
|
|
|
|
self.as_slice().as_ptr().copy_to_nonoverlapping(start_ptr, len);
|
|
|
|
mem::forget(self);
|
|
|
|
slice::from_raw_parts_mut(start_ptr, len)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<T> IterExt<T> for Vec<T> {
|
|
|
|
#[inline]
|
|
|
|
fn alloc_from_iter(mut self, arena: &TypedArena<T>) -> &mut [T] {
|
|
|
|
let len = self.len();
|
|
|
|
if len == 0 {
|
|
|
|
return &mut [];
|
|
|
|
}
|
2022-01-25 23:33:41 +00:00
|
|
|
// Move the content to the arena by copying and then forgetting it.
|
2023-09-19 21:36:19 +00:00
|
|
|
let start_ptr = arena.alloc_raw_slice(len);
|
2020-10-30 10:42:44 +00:00
|
|
|
unsafe {
|
|
|
|
self.as_ptr().copy_to_nonoverlapping(start_ptr, len);
|
|
|
|
self.set_len(0);
|
|
|
|
slice::from_raw_parts_mut(start_ptr, len)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<A: smallvec::Array> IterExt<A::Item> for SmallVec<A> {
|
|
|
|
#[inline]
|
|
|
|
fn alloc_from_iter(mut self, arena: &TypedArena<A::Item>) -> &mut [A::Item] {
|
|
|
|
let len = self.len();
|
|
|
|
if len == 0 {
|
|
|
|
return &mut [];
|
|
|
|
}
|
2022-01-25 23:33:41 +00:00
|
|
|
// Move the content to the arena by copying and then forgetting it.
|
2023-09-19 21:36:19 +00:00
|
|
|
let start_ptr = arena.alloc_raw_slice(len);
|
2020-10-30 10:42:44 +00:00
|
|
|
unsafe {
|
|
|
|
self.as_ptr().copy_to_nonoverlapping(start_ptr, len);
|
|
|
|
self.set_len(0);
|
|
|
|
slice::from_raw_parts_mut(start_ptr, len)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-10-16 14:57:53 +00:00
|
|
|
impl<T> TypedArena<T> {
|
2014-08-04 10:48:39 +00:00
|
|
|
/// Allocates an object in the `TypedArena`, returning a reference to it.
|
2014-01-07 01:03:30 +00:00
|
|
|
#[inline]
|
2014-10-27 20:31:41 +00:00
|
|
|
pub fn alloc(&self, object: T) -> &mut T {
|
2014-06-10 03:41:44 +00:00
|
|
|
if self.ptr == self.end {
|
2016-10-17 21:03:40 +00:00
|
|
|
self.grow(1)
|
2014-06-10 03:41:44 +00:00
|
|
|
}
|
2014-01-07 01:03:30 +00:00
|
|
|
|
2015-09-07 22:36:29 +00:00
|
|
|
unsafe {
|
2015-08-12 03:52:37 +00:00
|
|
|
if mem::size_of::<T>() == 0 {
|
2022-08-19 09:20:22 +00:00
|
|
|
self.ptr.set(self.ptr.get().wrapping_byte_add(1));
|
2022-03-22 20:21:33 +00:00
|
|
|
let ptr = ptr::NonNull::<T>::dangling().as_ptr();
|
2015-08-12 03:52:37 +00:00
|
|
|
// Don't drop the object. This `write` is equivalent to `forget`.
|
|
|
|
ptr::write(ptr, object);
|
|
|
|
&mut *ptr
|
|
|
|
} else {
|
|
|
|
let ptr = self.ptr.get();
|
|
|
|
// Advance the pointer.
|
2022-08-19 09:33:06 +00:00
|
|
|
self.ptr.set(self.ptr.get().add(1));
|
2015-08-12 03:52:37 +00:00
|
|
|
// Write into uninitialized memory.
|
|
|
|
ptr::write(ptr, object);
|
|
|
|
&mut *ptr
|
|
|
|
}
|
2015-09-07 22:36:29 +00:00
|
|
|
}
|
2014-01-07 01:03:30 +00:00
|
|
|
}
|
|
|
|
|
2018-12-14 18:02:15 +00:00
|
|
|
#[inline]
|
2020-05-20 10:45:05 +00:00
|
|
|
fn can_allocate(&self, additional: usize) -> bool {
|
2022-03-28 04:43:18 +00:00
|
|
|
// FIXME: this should *likely* use `offset_from`, but more
|
|
|
|
// investigation is needed (including running tests in miri).
|
2022-03-22 20:21:33 +00:00
|
|
|
let available_bytes = self.end.get().addr() - self.ptr.get().addr();
|
2020-05-20 10:45:05 +00:00
|
|
|
let additional_bytes = additional.checked_mul(mem::size_of::<T>()).unwrap();
|
|
|
|
available_bytes >= additional_bytes
|
2018-12-14 18:02:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
#[inline]
|
2023-09-19 21:36:19 +00:00
|
|
|
fn alloc_raw_slice(&self, len: usize) -> *mut T {
|
2018-12-14 18:02:15 +00:00
|
|
|
assert!(mem::size_of::<T>() != 0);
|
|
|
|
assert!(len != 0);
|
|
|
|
|
2023-09-19 21:32:12 +00:00
|
|
|
// Ensure the current chunk can fit `len` objects.
|
|
|
|
if !self.can_allocate(len) {
|
|
|
|
self.grow(len);
|
|
|
|
debug_assert!(self.can_allocate(len));
|
|
|
|
}
|
2018-12-14 18:02:15 +00:00
|
|
|
|
|
|
|
let start_ptr = self.ptr.get();
|
2023-09-19 21:32:12 +00:00
|
|
|
// SAFETY: `can_allocate`/`grow` ensures that there is enough space for
|
|
|
|
// `len` elements.
|
2023-04-28 17:07:29 +00:00
|
|
|
unsafe { self.ptr.set(start_ptr.add(len)) };
|
2018-12-14 18:02:15 +00:00
|
|
|
start_ptr
|
|
|
|
}
|
|
|
|
|
|
|
|
#[inline]
|
|
|
|
pub fn alloc_from_iter<I: IntoIterator<Item = T>>(&self, iter: I) -> &mut [T] {
|
2016-10-17 21:03:40 +00:00
|
|
|
assert!(mem::size_of::<T>() != 0);
|
2020-10-30 10:42:44 +00:00
|
|
|
iter.alloc_from_iter(self)
|
2016-10-17 21:03:40 +00:00
|
|
|
}
|
|
|
|
|
2014-01-07 01:03:30 +00:00
|
|
|
/// Grows the arena.
|
|
|
|
#[inline(never)]
|
2015-08-12 03:52:37 +00:00
|
|
|
#[cold]
|
2020-05-20 10:45:05 +00:00
|
|
|
fn grow(&self, additional: usize) {
|
2014-09-05 13:08:30 +00:00
|
|
|
unsafe {
|
2020-05-21 01:03:33 +00:00
|
|
|
// We need the element size to convert chunk sizes (ranging from
|
2020-05-04 09:25:09 +00:00
|
|
|
// PAGE to HUGE_PAGE bytes) to element counts.
|
|
|
|
let elem_size = cmp::max(1, mem::size_of::<T>());
|
2015-08-12 03:52:37 +00:00
|
|
|
let mut chunks = self.chunks.borrow_mut();
|
2020-05-20 10:45:05 +00:00
|
|
|
let mut new_cap;
|
2016-09-19 23:52:38 +00:00
|
|
|
if let Some(last_chunk) = chunks.last_mut() {
|
2020-10-20 09:32:10 +00:00
|
|
|
// If a type is `!needs_drop`, we don't need to keep track of how many elements
|
|
|
|
// the chunk stores - the field will be ignored anyway.
|
|
|
|
if mem::needs_drop::<T>() {
|
2022-03-28 04:43:18 +00:00
|
|
|
// FIXME: this should *likely* use `offset_from`, but more
|
|
|
|
// investigation is needed (including running tests in miri).
|
2022-03-22 20:21:33 +00:00
|
|
|
let used_bytes = self.ptr.get().addr() - last_chunk.start().addr();
|
2020-10-20 09:32:10 +00:00
|
|
|
last_chunk.entries = used_bytes / mem::size_of::<T>();
|
|
|
|
}
|
2020-05-21 01:03:33 +00:00
|
|
|
|
2020-09-17 14:40:02 +00:00
|
|
|
// If the previous chunk's len is less than HUGE_PAGE
|
2020-05-21 01:03:33 +00:00
|
|
|
// bytes, then this chunk will be least double the previous
|
|
|
|
// chunk's size.
|
2023-04-28 02:48:37 +00:00
|
|
|
new_cap = last_chunk.storage.len().min(HUGE_PAGE / elem_size / 2);
|
2020-10-27 01:02:48 +00:00
|
|
|
new_cap *= 2;
|
2015-08-12 03:52:37 +00:00
|
|
|
} else {
|
2020-05-20 10:45:05 +00:00
|
|
|
new_cap = PAGE / elem_size;
|
2015-08-12 03:52:37 +00:00
|
|
|
}
|
2020-05-20 10:45:05 +00:00
|
|
|
// Also ensure that this chunk can fit `additional`.
|
|
|
|
new_cap = cmp::max(additional, new_cap);
|
2020-05-04 09:25:09 +00:00
|
|
|
|
2022-01-25 14:35:52 +00:00
|
|
|
let mut chunk = ArenaChunk::<T>::new(new_cap);
|
2016-09-19 23:52:38 +00:00
|
|
|
self.ptr.set(chunk.start());
|
|
|
|
self.end.set(chunk.end());
|
|
|
|
chunks.push(chunk);
|
2014-09-05 13:08:30 +00:00
|
|
|
}
|
2014-01-07 01:03:30 +00:00
|
|
|
}
|
2016-09-21 23:47:14 +00:00
|
|
|
|
2015-08-13 16:10:19 +00:00
|
|
|
// Drops the contents of the last chunk. The last chunk is partially empty, unlike all other
|
|
|
|
// chunks.
|
2022-01-25 14:35:52 +00:00
|
|
|
fn clear_last_chunk(&self, last_chunk: &mut ArenaChunk<T>) {
|
2015-08-13 16:10:19 +00:00
|
|
|
// Determine how much was filled.
|
2022-03-22 20:21:33 +00:00
|
|
|
let start = last_chunk.start().addr();
|
2015-08-13 16:10:19 +00:00
|
|
|
// We obtain the value of the pointer to the first uninitialized element.
|
2022-03-22 20:21:33 +00:00
|
|
|
let end = self.ptr.get().addr();
|
2015-08-13 16:10:19 +00:00
|
|
|
// We then calculate the number of elements to be dropped in the last chunk,
|
|
|
|
// which is the filled area's length.
|
|
|
|
let diff = if mem::size_of::<T>() == 0 {
|
|
|
|
// `T` is ZST. It can't have a drop flag, so the value here doesn't matter. We get
|
|
|
|
// the number of zero-sized values in the last and only chunk, just out of caution.
|
|
|
|
// Recall that `end` was incremented for each allocated value.
|
|
|
|
end - start
|
|
|
|
} else {
|
2022-03-28 04:43:18 +00:00
|
|
|
// FIXME: this should *likely* use `offset_from`, but more
|
|
|
|
// investigation is needed (including running tests in miri).
|
2015-08-13 16:10:19 +00:00
|
|
|
(end - start) / mem::size_of::<T>()
|
|
|
|
};
|
|
|
|
// Pass that to the `destroy` method.
|
|
|
|
unsafe {
|
|
|
|
last_chunk.destroy(diff);
|
|
|
|
}
|
|
|
|
// Reset the chunk.
|
|
|
|
self.ptr.set(last_chunk.start());
|
|
|
|
}
|
2014-01-07 01:03:30 +00:00
|
|
|
}
|
|
|
|
|
2016-12-28 22:47:10 +00:00
|
|
|
unsafe impl<#[may_dangle] T> Drop for TypedArena<T> {
|
2014-01-07 01:03:30 +00:00
|
|
|
fn drop(&mut self) {
|
|
|
|
unsafe {
|
2014-09-05 13:08:30 +00:00
|
|
|
// Determine how much was filled.
|
2015-08-12 03:52:37 +00:00
|
|
|
let mut chunks_borrow = self.chunks.borrow_mut();
|
2016-09-19 23:52:38 +00:00
|
|
|
if let Some(mut last_chunk) = chunks_borrow.pop() {
|
|
|
|
// Drop the contents of the last chunk.
|
|
|
|
self.clear_last_chunk(&mut last_chunk);
|
|
|
|
// The last chunk will be dropped. Destroy all other chunks.
|
|
|
|
for chunk in chunks_borrow.iter_mut() {
|
2018-12-14 18:02:15 +00:00
|
|
|
chunk.destroy(chunk.entries);
|
2016-09-19 23:52:38 +00:00
|
|
|
}
|
2015-08-12 03:52:37 +00:00
|
|
|
}
|
2020-09-17 14:40:02 +00:00
|
|
|
// Box handles deallocation of `last_chunk` and `self.chunks`.
|
2014-01-07 01:03:30 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-08-13 16:48:34 +00:00
|
|
|
unsafe impl<T: Send> Send for TypedArena<T> {}
|
|
|
|
|
2023-03-03 05:20:24 +00:00
|
|
|
#[inline(always)]
|
|
|
|
fn align_down(val: usize, align: usize) -> usize {
|
2023-08-14 19:29:19 +00:00
|
|
|
debug_assert!(align.is_power_of_two());
|
2023-03-03 05:20:24 +00:00
|
|
|
val & !(align - 1)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[inline(always)]
|
2023-08-14 19:29:19 +00:00
|
|
|
fn align_up(val: usize, align: usize) -> usize {
|
|
|
|
debug_assert!(align.is_power_of_two());
|
2023-03-03 05:20:24 +00:00
|
|
|
(val + align - 1) & !(align - 1)
|
|
|
|
}
|
|
|
|
|
2023-08-14 19:29:19 +00:00
|
|
|
// Pointer alignment is common in compiler types, so keep `DroplessArena` aligned to them
|
|
|
|
// to optimize away alignment code.
|
2023-03-03 05:20:24 +00:00
|
|
|
const DROPLESS_ALIGNMENT: usize = mem::align_of::<usize>();
|
|
|
|
|
2021-11-15 22:49:15 +00:00
|
|
|
/// An arena that can hold objects of multiple different types that impl `Copy`
|
|
|
|
/// and/or satisfy `!mem::needs_drop`.
|
2016-12-22 17:12:56 +00:00
|
|
|
pub struct DroplessArena {
|
2020-09-21 00:00:00 +00:00
|
|
|
/// A pointer to the start of the free space.
|
|
|
|
start: Cell<*mut u8>,
|
2016-12-22 17:12:56 +00:00
|
|
|
|
2020-09-21 00:00:00 +00:00
|
|
|
/// A pointer to the end of free space.
|
|
|
|
///
|
2021-11-15 22:49:15 +00:00
|
|
|
/// The allocation proceeds downwards from the end of the chunk towards the
|
|
|
|
/// start. (This is slightly simpler and faster than allocating upwards,
|
|
|
|
/// see <https://fitzgeraldnick.com/2019/11/01/always-bump-downwards.html>.)
|
2020-09-21 00:00:00 +00:00
|
|
|
/// When this pointer crosses the start pointer, a new chunk is allocated.
|
2023-08-14 19:29:19 +00:00
|
|
|
///
|
|
|
|
/// This is kept aligned to DROPLESS_ALIGNMENT.
|
2016-12-22 17:12:56 +00:00
|
|
|
end: Cell<*mut u8>,
|
|
|
|
|
|
|
|
/// A vector of arena chunks.
|
2022-01-25 14:35:52 +00:00
|
|
|
chunks: RefCell<Vec<ArenaChunk>>,
|
2016-12-22 17:12:56 +00:00
|
|
|
}
|
|
|
|
|
2017-12-03 12:49:46 +00:00
|
|
|
unsafe impl Send for DroplessArena {}
|
|
|
|
|
2018-10-16 14:57:53 +00:00
|
|
|
impl Default for DroplessArena {
|
2018-11-29 13:33:36 +00:00
|
|
|
#[inline]
|
2018-10-16 14:57:53 +00:00
|
|
|
fn default() -> DroplessArena {
|
2016-12-22 17:12:56 +00:00
|
|
|
DroplessArena {
|
2023-09-19 07:08:13 +00:00
|
|
|
// We set both `start` and `end` to 0 so that the first call to
|
|
|
|
// alloc() will trigger a grow().
|
2020-09-21 00:00:00 +00:00
|
|
|
start: Cell::new(ptr::null_mut()),
|
2019-06-17 10:52:37 +00:00
|
|
|
end: Cell::new(ptr::null_mut()),
|
2018-10-16 14:57:53 +00:00
|
|
|
chunks: Default::default(),
|
2016-12-22 17:12:56 +00:00
|
|
|
}
|
|
|
|
}
|
2018-10-16 14:57:53 +00:00
|
|
|
}
|
2016-12-22 17:12:56 +00:00
|
|
|
|
2018-10-16 14:57:53 +00:00
|
|
|
impl DroplessArena {
|
2023-03-05 05:26:51 +00:00
|
|
|
fn grow(&self, layout: Layout) {
|
|
|
|
// Add some padding so we can align `self.end` while
|
2023-09-19 07:08:13 +00:00
|
|
|
// still fitting in a `layout` allocation.
|
2023-03-05 05:26:51 +00:00
|
|
|
let additional = layout.size() + cmp::max(DROPLESS_ALIGNMENT, layout.align()) - 1;
|
|
|
|
|
2016-12-22 17:12:56 +00:00
|
|
|
unsafe {
|
|
|
|
let mut chunks = self.chunks.borrow_mut();
|
2020-05-20 10:45:05 +00:00
|
|
|
let mut new_cap;
|
2016-12-22 17:12:56 +00:00
|
|
|
if let Some(last_chunk) = chunks.last_mut() {
|
2020-05-21 01:03:33 +00:00
|
|
|
// There is no need to update `last_chunk.entries` because that
|
|
|
|
// field isn't used by `DroplessArena`.
|
|
|
|
|
2020-09-17 14:40:02 +00:00
|
|
|
// If the previous chunk's len is less than HUGE_PAGE
|
2020-05-21 01:03:33 +00:00
|
|
|
// bytes, then this chunk will be least double the previous
|
|
|
|
// chunk's size.
|
2023-04-28 02:48:37 +00:00
|
|
|
new_cap = last_chunk.storage.len().min(HUGE_PAGE / 2);
|
2020-10-27 01:02:48 +00:00
|
|
|
new_cap *= 2;
|
2016-12-22 17:12:56 +00:00
|
|
|
} else {
|
2020-05-20 10:45:05 +00:00
|
|
|
new_cap = PAGE;
|
2016-12-22 17:12:56 +00:00
|
|
|
}
|
2020-05-20 10:45:05 +00:00
|
|
|
// Also ensure that this chunk can fit `additional`.
|
|
|
|
new_cap = cmp::max(additional, new_cap);
|
2020-05-04 09:25:09 +00:00
|
|
|
|
2023-08-14 19:29:19 +00:00
|
|
|
let mut chunk = ArenaChunk::new(align_up(new_cap, PAGE));
|
2020-09-21 00:00:00 +00:00
|
|
|
self.start.set(chunk.start());
|
2023-03-03 05:20:24 +00:00
|
|
|
|
2023-09-19 07:08:13 +00:00
|
|
|
// Align the end to DROPLESS_ALIGNMENT.
|
2023-03-03 05:20:24 +00:00
|
|
|
let end = align_down(chunk.end().addr(), DROPLESS_ALIGNMENT);
|
2023-08-14 19:29:19 +00:00
|
|
|
|
|
|
|
// Make sure we don't go past `start`. This should not happen since the allocation
|
|
|
|
// should be at least DROPLESS_ALIGNMENT - 1 bytes.
|
|
|
|
debug_assert!(chunk.start().addr() <= end);
|
|
|
|
|
2023-03-03 05:20:24 +00:00
|
|
|
self.end.set(chunk.end().with_addr(end));
|
|
|
|
|
2016-12-22 17:12:56 +00:00
|
|
|
chunks.push(chunk);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-03-03 05:20:24 +00:00
|
|
|
#[inline(never)]
|
|
|
|
#[cold]
|
|
|
|
fn grow_and_alloc_raw(&self, layout: Layout) -> *mut u8 {
|
2023-03-05 05:26:51 +00:00
|
|
|
self.grow(layout);
|
|
|
|
self.alloc_raw_without_grow(layout).unwrap()
|
2023-03-03 05:20:24 +00:00
|
|
|
}
|
|
|
|
|
2020-06-17 00:00:00 +00:00
|
|
|
/// Allocates a byte slice with specified layout from the current memory
|
|
|
|
/// chunk. Returns `None` if there is no free space left to satisfy the
|
|
|
|
/// request.
|
2016-12-22 17:12:56 +00:00
|
|
|
#[inline]
|
2020-06-17 00:00:00 +00:00
|
|
|
fn alloc_raw_without_grow(&self, layout: Layout) -> Option<*mut u8> {
|
2022-03-22 20:21:33 +00:00
|
|
|
let start = self.start.get().addr();
|
|
|
|
let old_end = self.end.get();
|
|
|
|
let end = old_end.addr();
|
2020-09-21 00:00:00 +00:00
|
|
|
|
2023-03-03 05:20:24 +00:00
|
|
|
// Align allocated bytes so that `self.end` stays aligned to DROPLESS_ALIGNMENT
|
2023-08-14 19:29:19 +00:00
|
|
|
let bytes = align_up(layout.size(), DROPLESS_ALIGNMENT);
|
2023-03-03 05:20:24 +00:00
|
|
|
|
|
|
|
// Tell LLVM that `end` is aligned to DROPLESS_ALIGNMENT
|
|
|
|
unsafe { intrinsics::assume(end == align_down(end, DROPLESS_ALIGNMENT)) };
|
2020-09-21 00:00:00 +00:00
|
|
|
|
2023-03-03 05:20:24 +00:00
|
|
|
let new_end = align_down(end.checked_sub(bytes)?, layout.align());
|
2020-09-21 00:00:00 +00:00
|
|
|
if start <= new_end {
|
2022-03-22 20:21:33 +00:00
|
|
|
let new_end = old_end.with_addr(new_end);
|
2023-08-14 19:29:19 +00:00
|
|
|
// `new_end` is aligned to DROPLESS_ALIGNMENT as `align_down` preserves alignment
|
|
|
|
// as both `end` and `bytes` are already aligned to DROPLESS_ALIGNMENT.
|
2020-09-21 00:00:00 +00:00
|
|
|
self.end.set(new_end);
|
|
|
|
Some(new_end)
|
2020-06-11 00:00:00 +00:00
|
|
|
} else {
|
|
|
|
None
|
|
|
|
}
|
|
|
|
}
|
2016-12-22 17:12:56 +00:00
|
|
|
|
2020-06-11 00:00:00 +00:00
|
|
|
#[inline]
|
2020-06-17 00:00:00 +00:00
|
|
|
pub fn alloc_raw(&self, layout: Layout) -> *mut u8 {
|
|
|
|
assert!(layout.size() != 0);
|
2023-03-03 05:20:24 +00:00
|
|
|
if let Some(a) = self.alloc_raw_without_grow(layout) {
|
|
|
|
return a;
|
2018-05-02 06:02:57 +00:00
|
|
|
}
|
2023-03-03 05:20:24 +00:00
|
|
|
// No free space left. Allocate a new chunk to satisfy the request.
|
|
|
|
// On failure the grow will panic or abort.
|
|
|
|
self.grow_and_alloc_raw(layout)
|
2018-05-02 06:02:57 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
#[inline]
|
|
|
|
pub fn alloc<T>(&self, object: T) -> &mut T {
|
|
|
|
assert!(!mem::needs_drop::<T>());
|
2023-03-03 05:20:24 +00:00
|
|
|
assert!(mem::size_of::<T>() != 0);
|
2018-05-02 06:02:57 +00:00
|
|
|
|
2023-09-19 21:45:46 +00:00
|
|
|
let layout = Layout::new::<T>();
|
|
|
|
let mem = if let Some(a) = self.alloc_raw_without_grow(layout) {
|
2023-03-03 05:20:24 +00:00
|
|
|
a
|
|
|
|
} else {
|
|
|
|
// No free space left. Allocate a new chunk to satisfy the request.
|
|
|
|
// On failure the grow will panic or abort.
|
2023-09-19 21:45:46 +00:00
|
|
|
self.grow_and_alloc_raw(layout)
|
2023-03-03 05:20:24 +00:00
|
|
|
} as *mut T;
|
2018-05-02 06:02:57 +00:00
|
|
|
|
|
|
|
unsafe {
|
2016-12-22 17:12:56 +00:00
|
|
|
// Write into uninitialized memory.
|
2018-05-02 06:02:57 +00:00
|
|
|
ptr::write(mem, object);
|
|
|
|
&mut *mem
|
2016-12-22 17:12:56 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Allocates a slice of objects that are copied into the `DroplessArena`, returning a mutable
|
|
|
|
/// reference to it. Will panic if passed a zero-sized type.
|
|
|
|
///
|
|
|
|
/// Panics:
|
2017-12-31 16:17:01 +00:00
|
|
|
///
|
2016-12-22 17:12:56 +00:00
|
|
|
/// - Zero-sized types
|
|
|
|
/// - Zero-length slices
|
|
|
|
#[inline]
|
|
|
|
pub fn alloc_slice<T>(&self, slice: &[T]) -> &mut [T]
|
2017-12-29 22:04:21 +00:00
|
|
|
where
|
|
|
|
T: Copy,
|
|
|
|
{
|
2017-05-10 17:13:42 +00:00
|
|
|
assert!(!mem::needs_drop::<T>());
|
2016-12-22 17:12:56 +00:00
|
|
|
assert!(mem::size_of::<T>() != 0);
|
2018-11-30 15:38:30 +00:00
|
|
|
assert!(!slice.is_empty());
|
2016-12-22 17:12:56 +00:00
|
|
|
|
2020-06-17 00:00:00 +00:00
|
|
|
let mem = self.alloc_raw(Layout::for_value::<[T]>(slice)) as *mut T;
|
2016-12-22 17:12:56 +00:00
|
|
|
|
|
|
|
unsafe {
|
2020-06-14 00:00:00 +00:00
|
|
|
mem.copy_from_nonoverlapping(slice.as_ptr(), slice.len());
|
|
|
|
slice::from_raw_parts_mut(mem, slice.len())
|
2016-12-22 17:12:56 +00:00
|
|
|
}
|
|
|
|
}
|
2018-12-14 18:02:15 +00:00
|
|
|
|
2023-04-28 17:07:29 +00:00
|
|
|
/// # Safety
|
|
|
|
///
|
|
|
|
/// The caller must ensure that `mem` is valid for writes up to
|
|
|
|
/// `size_of::<T>() * len`.
|
2019-04-23 23:45:50 +00:00
|
|
|
#[inline]
|
|
|
|
unsafe fn write_from_iter<T, I: Iterator<Item = T>>(
|
|
|
|
&self,
|
|
|
|
mut iter: I,
|
|
|
|
len: usize,
|
|
|
|
mem: *mut T,
|
|
|
|
) -> &mut [T] {
|
|
|
|
let mut i = 0;
|
|
|
|
// Use a manual loop since LLVM manages to optimize it better for
|
|
|
|
// slice iterators
|
|
|
|
loop {
|
2023-04-28 17:07:29 +00:00
|
|
|
// SAFETY: The caller must ensure that `mem` is valid for writes up to
|
|
|
|
// `size_of::<T>() * len`.
|
|
|
|
unsafe {
|
|
|
|
match iter.next() {
|
|
|
|
Some(value) if i < len => mem.add(i).write(value),
|
|
|
|
Some(_) | None => {
|
|
|
|
// We only return as many items as the iterator gave us, even
|
|
|
|
// though it was supposed to give us `len`
|
|
|
|
return slice::from_raw_parts_mut(mem, i);
|
|
|
|
}
|
|
|
|
}
|
2019-04-23 23:45:50 +00:00
|
|
|
}
|
|
|
|
i += 1;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-12-14 18:02:15 +00:00
|
|
|
#[inline]
|
|
|
|
pub fn alloc_from_iter<T, I: IntoIterator<Item = T>>(&self, iter: I) -> &mut [T] {
|
2019-04-23 23:45:50 +00:00
|
|
|
let iter = iter.into_iter();
|
2018-12-14 18:02:15 +00:00
|
|
|
assert!(mem::size_of::<T>() != 0);
|
|
|
|
assert!(!mem::needs_drop::<T>());
|
|
|
|
|
|
|
|
let size_hint = iter.size_hint();
|
|
|
|
|
|
|
|
match size_hint {
|
|
|
|
(min, Some(max)) if min == max => {
|
2019-04-01 20:35:13 +00:00
|
|
|
// We know the exact number of elements the iterator will produce here
|
|
|
|
let len = min;
|
|
|
|
|
|
|
|
if len == 0 {
|
2018-12-14 18:02:15 +00:00
|
|
|
return &mut [];
|
|
|
|
}
|
2020-06-17 00:00:00 +00:00
|
|
|
|
|
|
|
let mem = self.alloc_raw(Layout::array::<T>(len).unwrap()) as *mut T;
|
2019-04-23 23:45:50 +00:00
|
|
|
unsafe { self.write_from_iter(iter, len, mem) }
|
2018-12-14 18:02:15 +00:00
|
|
|
}
|
|
|
|
(_, _) => {
|
2023-09-25 01:27:25 +00:00
|
|
|
outline(move || -> &mut [T] {
|
2018-12-14 18:02:15 +00:00
|
|
|
let mut vec: SmallVec<[_; 8]> = iter.collect();
|
|
|
|
if vec.is_empty() {
|
|
|
|
return &mut [];
|
|
|
|
}
|
|
|
|
// Move the content to the arena by copying it and then forgetting
|
|
|
|
// the content of the SmallVec
|
|
|
|
unsafe {
|
|
|
|
let len = vec.len();
|
2020-06-17 00:00:00 +00:00
|
|
|
let start_ptr =
|
|
|
|
self.alloc_raw(Layout::for_value::<[T]>(vec.as_slice())) as *mut T;
|
2018-12-14 18:02:15 +00:00
|
|
|
vec.as_ptr().copy_to_nonoverlapping(start_ptr, len);
|
2019-04-01 20:35:13 +00:00
|
|
|
vec.set_len(0);
|
2018-12-14 18:02:15 +00:00
|
|
|
slice::from_raw_parts_mut(start_ptr, len)
|
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2016-12-22 17:12:56 +00:00
|
|
|
}
|
|
|
|
|
2022-01-25 23:33:41 +00:00
|
|
|
/// Declare an `Arena` containing one dropless arena and many typed arenas (the
|
|
|
|
/// types of the typed arenas are specified by the arguments).
|
|
|
|
///
|
|
|
|
/// There are three cases of interest.
|
|
|
|
/// - Types that are `Copy`: these need not be specified in the arguments. They
|
|
|
|
/// will use the `DroplessArena`.
|
|
|
|
/// - Types that are `!Copy` and `!Drop`: these must be specified in the
|
|
|
|
/// arguments. An empty `TypedArena` will be created for each one, but the
|
|
|
|
/// `DroplessArena` will always be used and the `TypedArena` will stay empty.
|
|
|
|
/// This is odd but harmless, because an empty arena allocates no memory.
|
|
|
|
/// - Types that are `!Copy` and `Drop`: these must be specified in the
|
|
|
|
/// arguments. The `TypedArena` will be used for them.
|
|
|
|
///
|
2021-05-25 17:57:02 +00:00
|
|
|
#[rustc_macro_transparency = "semitransparent"]
|
2021-11-15 22:28:26 +00:00
|
|
|
pub macro declare_arena([$($a:tt $name:ident: $ty:ty,)*]) {
|
2021-05-25 17:57:02 +00:00
|
|
|
#[derive(Default)]
|
2021-11-15 22:28:26 +00:00
|
|
|
pub struct Arena<'tcx> {
|
2021-05-25 17:57:02 +00:00
|
|
|
pub dropless: $crate::DroplessArena,
|
2021-11-15 06:15:42 +00:00
|
|
|
$($name: $crate::TypedArena<$ty>,)*
|
2021-05-25 17:57:02 +00:00
|
|
|
}
|
2020-03-21 00:42:14 +00:00
|
|
|
|
2022-01-25 23:46:40 +00:00
|
|
|
pub trait ArenaAllocatable<'tcx, C = rustc_arena::IsNotCopy>: Sized {
|
2023-04-09 20:29:56 +00:00
|
|
|
#[allow(clippy::mut_from_ref)]
|
2021-05-25 17:57:02 +00:00
|
|
|
fn allocate_on<'a>(self, arena: &'a Arena<'tcx>) -> &'a mut Self;
|
2023-04-09 20:29:56 +00:00
|
|
|
#[allow(clippy::mut_from_ref)]
|
2021-05-25 17:57:02 +00:00
|
|
|
fn allocate_from_iter<'a>(
|
|
|
|
arena: &'a Arena<'tcx>,
|
|
|
|
iter: impl ::std::iter::IntoIterator<Item = Self>,
|
|
|
|
) -> &'a mut [Self];
|
|
|
|
}
|
|
|
|
|
2021-11-15 22:49:15 +00:00
|
|
|
// Any type that impls `Copy` can be arena-allocated in the `DroplessArena`.
|
2022-01-25 23:46:40 +00:00
|
|
|
impl<'tcx, T: Copy> ArenaAllocatable<'tcx, rustc_arena::IsCopy> for T {
|
2021-05-25 17:57:02 +00:00
|
|
|
#[inline]
|
2023-04-09 20:29:56 +00:00
|
|
|
#[allow(clippy::mut_from_ref)]
|
2021-05-25 17:57:02 +00:00
|
|
|
fn allocate_on<'a>(self, arena: &'a Arena<'tcx>) -> &'a mut Self {
|
|
|
|
arena.dropless.alloc(self)
|
|
|
|
}
|
|
|
|
#[inline]
|
2023-04-09 20:29:56 +00:00
|
|
|
#[allow(clippy::mut_from_ref)]
|
2021-05-25 17:57:02 +00:00
|
|
|
fn allocate_from_iter<'a>(
|
|
|
|
arena: &'a Arena<'tcx>,
|
|
|
|
iter: impl ::std::iter::IntoIterator<Item = Self>,
|
|
|
|
) -> &'a mut [Self] {
|
|
|
|
arena.dropless.alloc_from_iter(iter)
|
2020-03-21 00:42:14 +00:00
|
|
|
}
|
2021-05-25 17:57:02 +00:00
|
|
|
}
|
|
|
|
$(
|
2022-01-25 23:46:40 +00:00
|
|
|
impl<'tcx> ArenaAllocatable<'tcx, rustc_arena::IsNotCopy> for $ty {
|
2020-03-21 00:42:14 +00:00
|
|
|
#[inline]
|
2021-11-15 22:28:26 +00:00
|
|
|
fn allocate_on<'a>(self, arena: &'a Arena<'tcx>) -> &'a mut Self {
|
2021-05-25 17:57:02 +00:00
|
|
|
if !::std::mem::needs_drop::<Self>() {
|
2021-11-15 06:15:42 +00:00
|
|
|
arena.dropless.alloc(self)
|
|
|
|
} else {
|
|
|
|
arena.$name.alloc(self)
|
2021-05-25 17:57:02 +00:00
|
|
|
}
|
2020-06-15 17:04:43 +00:00
|
|
|
}
|
2021-05-25 17:57:02 +00:00
|
|
|
|
2020-06-15 17:04:43 +00:00
|
|
|
#[inline]
|
2023-04-09 20:29:56 +00:00
|
|
|
#[allow(clippy::mut_from_ref)]
|
2020-06-15 17:04:43 +00:00
|
|
|
fn allocate_from_iter<'a>(
|
2021-11-15 22:28:26 +00:00
|
|
|
arena: &'a Arena<'tcx>,
|
2020-06-15 17:04:43 +00:00
|
|
|
iter: impl ::std::iter::IntoIterator<Item = Self>,
|
|
|
|
) -> &'a mut [Self] {
|
2021-05-25 17:57:02 +00:00
|
|
|
if !::std::mem::needs_drop::<Self>() {
|
2021-11-15 06:15:42 +00:00
|
|
|
arena.dropless.alloc_from_iter(iter)
|
|
|
|
} else {
|
|
|
|
arena.$name.alloc_from_iter(iter)
|
2020-03-21 00:42:14 +00:00
|
|
|
}
|
|
|
|
}
|
2021-05-25 17:57:02 +00:00
|
|
|
}
|
|
|
|
)*
|
2020-03-21 00:42:14 +00:00
|
|
|
|
2021-05-25 17:57:02 +00:00
|
|
|
impl<'tcx> Arena<'tcx> {
|
|
|
|
#[inline]
|
2023-04-09 20:29:56 +00:00
|
|
|
#[allow(clippy::mut_from_ref)]
|
2022-01-25 23:46:40 +00:00
|
|
|
pub fn alloc<T: ArenaAllocatable<'tcx, C>, C>(&self, value: T) -> &mut T {
|
2021-05-25 17:57:02 +00:00
|
|
|
value.allocate_on(self)
|
|
|
|
}
|
2020-03-21 00:42:14 +00:00
|
|
|
|
2021-11-15 22:49:15 +00:00
|
|
|
// Any type that impls `Copy` can have slices be arena-allocated in the `DroplessArena`.
|
2021-05-25 17:57:02 +00:00
|
|
|
#[inline]
|
2023-04-09 20:29:56 +00:00
|
|
|
#[allow(clippy::mut_from_ref)]
|
2021-05-25 17:57:02 +00:00
|
|
|
pub fn alloc_slice<T: ::std::marker::Copy>(&self, value: &[T]) -> &mut [T] {
|
|
|
|
if value.is_empty() {
|
|
|
|
return &mut [];
|
2020-03-21 00:42:14 +00:00
|
|
|
}
|
2021-05-25 17:57:02 +00:00
|
|
|
self.dropless.alloc_slice(value)
|
|
|
|
}
|
2020-03-21 00:42:14 +00:00
|
|
|
|
2023-04-09 20:29:56 +00:00
|
|
|
#[allow(clippy::mut_from_ref)]
|
2023-09-19 22:51:38 +00:00
|
|
|
pub fn alloc_from_iter<T: ArenaAllocatable<'tcx, C>, C>(
|
|
|
|
&self,
|
2021-05-25 17:57:02 +00:00
|
|
|
iter: impl ::std::iter::IntoIterator<Item = T>,
|
2023-09-19 22:51:38 +00:00
|
|
|
) -> &mut [T] {
|
2021-05-25 17:57:02 +00:00
|
|
|
T::allocate_from_iter(self, iter)
|
2020-03-21 00:42:14 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-01-25 23:46:40 +00:00
|
|
|
// Marker types that let us give different behaviour for arenas allocating
|
|
|
|
// `Copy` types vs `!Copy` types.
|
|
|
|
pub struct IsCopy;
|
|
|
|
pub struct IsNotCopy;
|
|
|
|
|
2014-01-07 01:03:30 +00:00
|
|
|
#[cfg(test)]
|
2019-06-03 17:58:27 +00:00
|
|
|
mod tests;
|