2014-04-03 03:06:55 +00:00
|
|
|
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
|
2012-12-04 00:48:01 +00:00
|
|
|
// file at the top-level directory of this distribution and at
|
|
|
|
// http://rust-lang.org/COPYRIGHT.
|
|
|
|
//
|
|
|
|
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
|
|
|
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
|
|
|
// option. This file may not be copied, modified, or distributed
|
|
|
|
// except according to those terms.
|
2014-01-07 01:03:30 +00:00
|
|
|
//
|
|
|
|
//! The arena, a fast but limited type of allocator.
|
|
|
|
//!
|
|
|
|
//! Arenas are a type of allocator that destroy the objects within, all at
|
|
|
|
//! once, once the arena itself is destroyed. They do not support deallocation
|
|
|
|
//! of individual objects while the arena itself is still alive. The benefit
|
|
|
|
//! of an arena is very fast allocation; just a pointer bump.
|
2012-08-21 22:32:30 +00:00
|
|
|
|
2014-03-22 01:05:05 +00:00
|
|
|
#![crate_id = "arena#0.10-pre"]
|
|
|
|
#![crate_type = "rlib"]
|
|
|
|
#![crate_type = "dylib"]
|
|
|
|
#![license = "MIT/ASL2"]
|
|
|
|
#![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
|
|
|
|
html_favicon_url = "http://www.rust-lang.org/favicon.ico",
|
|
|
|
html_root_url = "http://static.rust-lang.org/doc/master")]
|
|
|
|
#![allow(missing_doc)]
|
2014-03-27 22:13:16 +00:00
|
|
|
#![allow(visible_private_types)] // NOTE: remove after a stage0 snap
|
|
|
|
|
2014-02-14 18:10:06 +00:00
|
|
|
extern crate collections;
|
2014-02-03 05:56:49 +00:00
|
|
|
|
2013-06-28 22:32:26 +00:00
|
|
|
use std::cast::{transmute, transmute_mut, transmute_mut_region};
|
|
|
|
use std::cast;
|
2013-12-31 01:32:53 +00:00
|
|
|
use std::cell::{Cell, RefCell};
|
2014-02-09 06:16:42 +00:00
|
|
|
use std::mem;
|
2014-02-21 12:25:17 +00:00
|
|
|
use std::ptr::read;
|
2014-02-06 07:34:33 +00:00
|
|
|
use std::cmp;
|
Replaces the free-standing functions in f32, &c.
The free-standing functions in f32, f64, i8, i16, i32, i64, u8, u16,
u32, u64, float, int, and uint are replaced with generic functions in
num instead.
If you were previously using any of those functions, just replace them
with the corresponding function with the same name in num.
Note: If you were using a function that corresponds to an operator, use
the operator instead.
2013-07-08 16:05:17 +00:00
|
|
|
use std::num;
|
2014-02-01 04:53:11 +00:00
|
|
|
use std::rc::Rc;
|
2014-01-07 01:03:30 +00:00
|
|
|
use std::rt::global_heap;
|
2014-02-16 07:49:08 +00:00
|
|
|
use std::intrinsics::{TyDesc, get_tydesc};
|
|
|
|
use std::intrinsics;
|
2013-03-05 20:21:02 +00:00
|
|
|
|
2012-08-21 22:32:30 +00:00
|
|
|
// The way arena uses arrays is really deeply awful. The arrays are
|
|
|
|
// allocated, and have capacities reserved, but the fill for the array
|
|
|
|
// will always stay at 0.
|
2014-02-25 02:18:19 +00:00
|
|
|
#[deriving(Clone, Eq)]
|
2013-01-22 16:44:24 +00:00
|
|
|
struct Chunk {
|
2014-03-05 23:28:08 +00:00
|
|
|
data: Rc<RefCell<Vec<u8> >>,
|
2013-12-31 01:32:53 +00:00
|
|
|
fill: Cell<uint>,
|
2014-03-26 23:01:11 +00:00
|
|
|
is_copy: Cell<bool>,
|
2013-01-22 16:44:24 +00:00
|
|
|
}
|
2014-02-01 04:53:11 +00:00
|
|
|
impl Chunk {
|
|
|
|
fn capacity(&self) -> uint {
|
2014-03-20 22:05:56 +00:00
|
|
|
self.data.borrow().capacity()
|
2014-02-01 04:53:11 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
unsafe fn as_ptr(&self) -> *u8 {
|
2014-03-20 22:05:56 +00:00
|
|
|
self.data.borrow().as_ptr()
|
2014-02-01 04:53:11 +00:00
|
|
|
}
|
|
|
|
}
|
2012-07-11 22:00:40 +00:00
|
|
|
|
2014-01-07 01:03:30 +00:00
|
|
|
// Arenas are used to quickly allocate objects that share a
|
|
|
|
// lifetime. The arena uses ~[u8] vectors as a backing store to
|
|
|
|
// allocate objects from. For each allocated object, the arena stores
|
|
|
|
// a pointer to the type descriptor followed by the
|
|
|
|
// object. (Potentially with alignment padding after each of them.)
|
|
|
|
// When the arena is destroyed, it iterates through all of its chunks,
|
|
|
|
// and uses the tydesc information to trace through the objects,
|
|
|
|
// calling the destructors on them.
|
|
|
|
// One subtle point that needs to be addressed is how to handle
|
|
|
|
// failures while running the user provided initializer function. It
|
|
|
|
// is important to not run the destructor on uninitialized objects, but
|
|
|
|
// how to detect them is somewhat subtle. Since alloc() can be invoked
|
|
|
|
// recursively, it is not sufficient to simply exclude the most recent
|
|
|
|
// object. To solve this without requiring extra space, we use the low
|
|
|
|
// order bit of the tydesc pointer to encode whether the object it
|
|
|
|
// describes has been fully initialized.
|
|
|
|
|
|
|
|
// As an optimization, objects with destructors are stored in
|
|
|
|
// different chunks than objects without destructors. This reduces
|
|
|
|
// overhead when initializing plain-old-data and means we don't need
|
|
|
|
// to waste time running the destructors of POD.
|
2012-09-28 23:24:57 +00:00
|
|
|
pub struct Arena {
|
2013-05-03 22:57:18 +00:00
|
|
|
// The head is separated out from the list as a unbenchmarked
|
2012-08-21 22:32:30 +00:00
|
|
|
// microoptimization, to avoid needing to case on the list to
|
|
|
|
// access the head.
|
2014-03-27 22:13:16 +00:00
|
|
|
head: Chunk,
|
|
|
|
copy_head: Chunk,
|
|
|
|
chunks: RefCell<Vec<Chunk>>,
|
2012-11-14 02:38:18 +00:00
|
|
|
}
|
|
|
|
|
2013-08-05 08:43:40 +00:00
|
|
|
impl Arena {
|
|
|
|
pub fn new() -> Arena {
|
|
|
|
Arena::new_with_size(32u)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn new_with_size(initial_size: uint) -> Arena {
|
|
|
|
Arena {
|
|
|
|
head: chunk(initial_size, false),
|
2014-03-26 23:01:11 +00:00
|
|
|
copy_head: chunk(initial_size, true),
|
2014-03-19 04:31:40 +00:00
|
|
|
chunks: RefCell::new(Vec::new()),
|
2012-08-21 22:32:30 +00:00
|
|
|
}
|
|
|
|
}
|
2012-07-11 22:00:40 +00:00
|
|
|
}
|
2012-03-21 02:06:04 +00:00
|
|
|
|
2014-03-26 23:01:11 +00:00
|
|
|
fn chunk(size: uint, is_copy: bool) -> Chunk {
|
2013-01-22 16:44:24 +00:00
|
|
|
Chunk {
|
2014-03-05 23:28:08 +00:00
|
|
|
data: Rc::new(RefCell::new(Vec::with_capacity(size))),
|
2013-12-31 01:32:53 +00:00
|
|
|
fill: Cell::new(0u),
|
2014-03-26 23:01:11 +00:00
|
|
|
is_copy: Cell::new(is_copy),
|
2013-01-22 16:44:24 +00:00
|
|
|
}
|
2012-03-21 02:06:04 +00:00
|
|
|
}
|
|
|
|
|
2013-08-05 08:43:40 +00:00
|
|
|
#[unsafe_destructor]
|
|
|
|
impl Drop for Arena {
|
2013-09-17 01:18:07 +00:00
|
|
|
fn drop(&mut self) {
|
2013-08-05 08:43:40 +00:00
|
|
|
unsafe {
|
|
|
|
destroy_chunk(&self.head);
|
2014-03-19 04:31:40 +00:00
|
|
|
for chunk in self.chunks.borrow().iter() {
|
2014-03-26 23:01:11 +00:00
|
|
|
if !chunk.is_copy.get() {
|
2013-08-05 08:43:40 +00:00
|
|
|
destroy_chunk(chunk);
|
|
|
|
}
|
2014-02-25 02:18:19 +00:00
|
|
|
}
|
2013-08-05 08:43:40 +00:00
|
|
|
}
|
2013-01-22 16:44:24 +00:00
|
|
|
}
|
2012-03-21 02:06:04 +00:00
|
|
|
}
|
|
|
|
|
2013-06-18 21:45:18 +00:00
|
|
|
#[inline]
|
2014-01-07 01:03:30 +00:00
|
|
|
fn round_up(base: uint, align: uint) -> uint {
|
|
|
|
(base.checked_add(&(align - 1))).unwrap() & !(&(align - 1))
|
2012-08-21 22:32:30 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Walk down a chunk, running the destructors for any objects stored
|
|
|
|
// in it.
|
2012-09-20 01:14:30 +00:00
|
|
|
unsafe fn destroy_chunk(chunk: &Chunk) {
|
2012-08-21 22:32:30 +00:00
|
|
|
let mut idx = 0;
|
2014-02-01 04:53:11 +00:00
|
|
|
let buf = chunk.as_ptr();
|
2013-12-31 01:32:53 +00:00
|
|
|
let fill = chunk.fill.get();
|
2012-08-21 22:32:30 +00:00
|
|
|
|
|
|
|
while idx < fill {
|
2014-02-10 21:50:42 +00:00
|
|
|
let tydesc_data: *uint = transmute(buf.offset(idx as int));
|
2012-08-21 22:32:30 +00:00
|
|
|
let (tydesc, is_done) = un_bitpack_tydesc_ptr(*tydesc_data);
|
2013-06-05 04:43:41 +00:00
|
|
|
let (size, align) = ((*tydesc).size, (*tydesc).align);
|
2012-08-21 22:32:30 +00:00
|
|
|
|
2013-10-17 01:34:01 +00:00
|
|
|
let after_tydesc = idx + mem::size_of::<*TyDesc>();
|
2012-08-21 22:32:30 +00:00
|
|
|
|
2014-01-07 01:03:30 +00:00
|
|
|
let start = round_up(after_tydesc, align);
|
2012-08-21 22:32:30 +00:00
|
|
|
|
2013-10-21 20:08:31 +00:00
|
|
|
//debug!("freeing object: idx = {}, size = {}, align = {}, done = {}",
|
2012-08-21 22:32:30 +00:00
|
|
|
// start, size, align, is_done);
|
|
|
|
if is_done {
|
2014-02-10 21:50:42 +00:00
|
|
|
((*tydesc).drop_glue)(buf.offset(start as int) as *i8);
|
2012-08-21 22:32:30 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Find where the next tydesc lives
|
2014-01-07 01:03:30 +00:00
|
|
|
idx = round_up(start + size, mem::pref_align_of::<*TyDesc>());
|
2012-08-21 22:32:30 +00:00
|
|
|
}
|
2012-08-02 23:00:45 +00:00
|
|
|
}
|
|
|
|
|
2012-08-21 22:32:30 +00:00
|
|
|
// We encode whether the object a tydesc describes has been
|
|
|
|
// initialized in the arena in the low bit of the tydesc pointer. This
|
|
|
|
// is necessary in order to properly do cleanup if a failure occurs
|
|
|
|
// during an initializer.
|
2013-06-18 21:45:18 +00:00
|
|
|
#[inline]
|
2014-02-21 12:56:09 +00:00
|
|
|
fn bitpack_tydesc_ptr(p: *TyDesc, is_done: bool) -> uint {
|
|
|
|
p as uint | (is_done as uint)
|
2012-08-21 22:32:30 +00:00
|
|
|
}
|
2013-06-18 21:45:18 +00:00
|
|
|
#[inline]
|
2014-02-21 12:56:09 +00:00
|
|
|
fn un_bitpack_tydesc_ptr(p: uint) -> (*TyDesc, bool) {
|
|
|
|
((p & !1) as *TyDesc, p & 1 == 1)
|
2012-08-21 22:32:30 +00:00
|
|
|
}
|
|
|
|
|
2013-05-31 22:17:22 +00:00
|
|
|
impl Arena {
|
2014-02-01 04:53:11 +00:00
|
|
|
fn chunk_size(&self) -> uint {
|
2014-03-26 23:01:11 +00:00
|
|
|
self.copy_head.capacity()
|
2014-02-01 04:53:11 +00:00
|
|
|
}
|
2012-10-05 21:58:42 +00:00
|
|
|
// Functions for the POD part of the arena
|
2014-03-26 23:01:11 +00:00
|
|
|
fn alloc_copy_grow(&mut self, n_bytes: uint, align: uint) -> *u8 {
|
2012-10-05 21:58:42 +00:00
|
|
|
// Allocate a new chunk.
|
2014-02-06 07:34:33 +00:00
|
|
|
let new_min_chunk_size = cmp::max(n_bytes, self.chunk_size());
|
2014-03-19 04:31:40 +00:00
|
|
|
self.chunks.borrow_mut().push(self.copy_head.clone());
|
2014-03-26 23:01:11 +00:00
|
|
|
self.copy_head =
|
2014-01-30 05:35:17 +00:00
|
|
|
chunk(num::next_power_of_two(new_min_chunk_size + 1u), true);
|
2012-10-05 21:58:42 +00:00
|
|
|
|
2014-03-26 23:01:11 +00:00
|
|
|
return self.alloc_copy_inner(n_bytes, align);
|
2012-10-05 21:58:42 +00:00
|
|
|
}
|
|
|
|
|
2013-06-18 21:45:18 +00:00
|
|
|
#[inline]
|
2014-03-26 23:01:11 +00:00
|
|
|
fn alloc_copy_inner(&mut self, n_bytes: uint, align: uint) -> *u8 {
|
2013-04-29 22:23:04 +00:00
|
|
|
unsafe {
|
2013-06-12 02:13:42 +00:00
|
|
|
let this = transmute_mut_region(self);
|
2014-03-26 23:01:11 +00:00
|
|
|
let start = round_up(this.copy_head.fill.get(), align);
|
2013-04-29 22:23:04 +00:00
|
|
|
let end = start + n_bytes;
|
2014-02-01 04:53:11 +00:00
|
|
|
if end > self.chunk_size() {
|
2014-03-26 23:01:11 +00:00
|
|
|
return this.alloc_copy_grow(n_bytes, align);
|
2013-04-29 22:23:04 +00:00
|
|
|
}
|
2014-03-26 23:01:11 +00:00
|
|
|
this.copy_head.fill.set(end);
|
2012-10-05 21:58:42 +00:00
|
|
|
|
2013-10-21 20:08:31 +00:00
|
|
|
//debug!("idx = {}, size = {}, align = {}, fill = {}",
|
2013-12-31 01:32:53 +00:00
|
|
|
// start, n_bytes, align, head.fill.get());
|
2012-10-05 21:58:42 +00:00
|
|
|
|
2014-03-26 23:01:11 +00:00
|
|
|
this.copy_head.as_ptr().offset(start as int)
|
2012-10-05 21:58:42 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-06-18 21:45:18 +00:00
|
|
|
#[inline]
|
2014-03-26 23:01:11 +00:00
|
|
|
fn alloc_copy<'a, T>(&'a mut self, op: || -> T) -> &'a T {
|
2012-10-05 21:58:42 +00:00
|
|
|
unsafe {
|
2014-03-26 23:01:11 +00:00
|
|
|
let ptr = self.alloc_copy_inner(mem::size_of::<T>(), mem::min_align_of::<T>());
|
2013-04-20 14:27:16 +00:00
|
|
|
let ptr: *mut T = transmute(ptr);
|
2014-02-09 06:16:42 +00:00
|
|
|
mem::move_val_init(&mut (*ptr), op());
|
2013-04-20 14:27:16 +00:00
|
|
|
return transmute(ptr);
|
2012-10-05 21:58:42 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Functions for the non-POD part of the arena
|
2014-03-26 23:01:11 +00:00
|
|
|
fn alloc_noncopy_grow(&mut self, n_bytes: uint, align: uint)
|
2013-05-31 22:17:22 +00:00
|
|
|
-> (*u8, *u8) {
|
2012-10-05 21:58:42 +00:00
|
|
|
// Allocate a new chunk.
|
2014-02-06 07:34:33 +00:00
|
|
|
let new_min_chunk_size = cmp::max(n_bytes, self.chunk_size());
|
2014-03-19 04:31:40 +00:00
|
|
|
self.chunks.borrow_mut().push(self.head.clone());
|
2012-10-05 21:58:42 +00:00
|
|
|
self.head =
|
2014-01-30 05:35:17 +00:00
|
|
|
chunk(num::next_power_of_two(new_min_chunk_size + 1u), false);
|
2012-10-05 21:58:42 +00:00
|
|
|
|
2014-03-26 23:01:11 +00:00
|
|
|
return self.alloc_noncopy_inner(n_bytes, align);
|
2012-10-05 21:58:42 +00:00
|
|
|
}
|
|
|
|
|
2013-06-18 21:45:18 +00:00
|
|
|
#[inline]
|
2014-03-26 23:01:11 +00:00
|
|
|
fn alloc_noncopy_inner(&mut self, n_bytes: uint, align: uint)
|
2013-05-31 22:17:22 +00:00
|
|
|
-> (*u8, *u8) {
|
2013-04-29 22:23:04 +00:00
|
|
|
unsafe {
|
2013-06-26 02:19:38 +00:00
|
|
|
let start;
|
|
|
|
let end;
|
|
|
|
let tydesc_start;
|
|
|
|
let after_tydesc;
|
|
|
|
|
|
|
|
{
|
|
|
|
let head = transmute_mut_region(&mut self.head);
|
|
|
|
|
2013-12-31 01:32:53 +00:00
|
|
|
tydesc_start = head.fill.get();
|
|
|
|
after_tydesc = head.fill.get() + mem::size_of::<*TyDesc>();
|
2014-01-07 01:03:30 +00:00
|
|
|
start = round_up(after_tydesc, align);
|
2013-06-26 02:19:38 +00:00
|
|
|
end = start + n_bytes;
|
|
|
|
}
|
2013-04-29 22:23:04 +00:00
|
|
|
|
2014-02-01 04:53:11 +00:00
|
|
|
if end > self.head.capacity() {
|
2014-03-26 23:01:11 +00:00
|
|
|
return self.alloc_noncopy_grow(n_bytes, align);
|
2013-04-29 22:23:04 +00:00
|
|
|
}
|
2013-06-26 02:19:38 +00:00
|
|
|
|
|
|
|
let head = transmute_mut_region(&mut self.head);
|
2014-01-07 01:03:30 +00:00
|
|
|
head.fill.set(round_up(end, mem::pref_align_of::<*TyDesc>()));
|
2012-10-05 21:58:42 +00:00
|
|
|
|
2013-10-21 20:08:31 +00:00
|
|
|
//debug!("idx = {}, size = {}, align = {}, fill = {}",
|
2013-04-29 22:23:04 +00:00
|
|
|
// start, n_bytes, align, head.fill);
|
2012-10-05 21:58:42 +00:00
|
|
|
|
2014-02-01 04:53:11 +00:00
|
|
|
let buf = self.head.as_ptr();
|
2014-02-10 21:50:42 +00:00
|
|
|
return (buf.offset(tydesc_start as int), buf.offset(start as int));
|
2012-10-05 21:58:42 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-06-18 21:45:18 +00:00
|
|
|
#[inline]
|
2014-03-26 23:01:11 +00:00
|
|
|
fn alloc_noncopy<'a, T>(&'a mut self, op: || -> T) -> &'a T {
|
2012-10-05 21:58:42 +00:00
|
|
|
unsafe {
|
2013-06-20 09:39:49 +00:00
|
|
|
let tydesc = get_tydesc::<T>();
|
2012-10-05 21:58:42 +00:00
|
|
|
let (ty_ptr, ptr) =
|
2014-03-26 23:01:11 +00:00
|
|
|
self.alloc_noncopy_inner(mem::size_of::<T>(), mem::min_align_of::<T>());
|
2013-04-20 14:27:16 +00:00
|
|
|
let ty_ptr: *mut uint = transmute(ty_ptr);
|
|
|
|
let ptr: *mut T = transmute(ptr);
|
2012-10-05 21:58:42 +00:00
|
|
|
// Write in our tydesc along with a bit indicating that it
|
|
|
|
// has *not* been initialized yet.
|
2013-04-20 14:27:16 +00:00
|
|
|
*ty_ptr = transmute(tydesc);
|
2012-10-05 21:58:42 +00:00
|
|
|
// Actually initialize it
|
2014-02-09 06:16:42 +00:00
|
|
|
mem::move_val_init(&mut(*ptr), op());
|
2012-10-05 21:58:42 +00:00
|
|
|
// Now that we are done, update the tydesc to indicate that
|
|
|
|
// the object is there.
|
|
|
|
*ty_ptr = bitpack_tydesc_ptr(tydesc, true);
|
|
|
|
|
2013-04-20 14:27:16 +00:00
|
|
|
return transmute(ptr);
|
2012-10-05 21:58:42 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// The external interface
|
2013-06-18 21:45:18 +00:00
|
|
|
#[inline]
|
2013-11-19 05:54:13 +00:00
|
|
|
pub fn alloc<'a, T>(&'a self, op: || -> T) -> &'a T {
|
2013-04-10 20:14:06 +00:00
|
|
|
unsafe {
|
2014-01-26 08:43:42 +00:00
|
|
|
// FIXME: Borrow check
|
2013-06-16 15:50:16 +00:00
|
|
|
let this = transmute_mut(self);
|
|
|
|
if intrinsics::needs_drop::<T>() {
|
2014-03-26 23:01:11 +00:00
|
|
|
this.alloc_noncopy(op)
|
2013-06-16 15:50:16 +00:00
|
|
|
} else {
|
2014-03-26 23:01:11 +00:00
|
|
|
this.alloc_copy(op)
|
2013-04-10 20:14:06 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2012-10-05 21:58:42 +00:00
|
|
|
}
|
2012-03-21 02:06:04 +00:00
|
|
|
|
2012-08-21 22:32:30 +00:00
|
|
|
#[test]
|
|
|
|
fn test_arena_destructors() {
|
2013-08-05 08:43:40 +00:00
|
|
|
let arena = Arena::new();
|
2013-08-03 16:45:23 +00:00
|
|
|
for i in range(0u, 10) {
|
2012-08-21 22:32:30 +00:00
|
|
|
// Arena allocate something with drop glue to make sure it
|
|
|
|
// doesn't leak.
|
2014-04-03 03:06:55 +00:00
|
|
|
arena.alloc(|| Rc::new(i));
|
2012-08-21 22:32:30 +00:00
|
|
|
// Allocate something with funny size and alignment, to keep
|
|
|
|
// things interesting.
|
2013-11-20 23:46:49 +00:00
|
|
|
arena.alloc(|| [0u8, 1u8, 2u8]);
|
2012-08-21 22:32:30 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-04-29 22:23:04 +00:00
|
|
|
#[test]
|
|
|
|
#[should_fail]
|
2012-08-21 22:32:30 +00:00
|
|
|
fn test_arena_destructors_fail() {
|
2013-08-05 08:43:40 +00:00
|
|
|
let arena = Arena::new();
|
2012-08-21 22:32:30 +00:00
|
|
|
// Put some stuff in the arena.
|
2013-08-03 16:45:23 +00:00
|
|
|
for i in range(0u, 10) {
|
2012-08-21 22:32:30 +00:00
|
|
|
// Arena allocate something with drop glue to make sure it
|
|
|
|
// doesn't leak.
|
2014-04-03 03:06:55 +00:00
|
|
|
arena.alloc(|| { Rc::new(i) });
|
2012-08-21 22:32:30 +00:00
|
|
|
// Allocate something with funny size and alignment, to keep
|
|
|
|
// things interesting.
|
2013-11-22 03:20:48 +00:00
|
|
|
arena.alloc(|| { [0u8, 1u8, 2u8] });
|
2012-08-21 22:32:30 +00:00
|
|
|
}
|
|
|
|
// Now, fail while allocating
|
2014-04-03 03:06:55 +00:00
|
|
|
arena.alloc::<Rc<int>>(|| {
|
2012-08-21 22:32:30 +00:00
|
|
|
// Now fail.
|
2013-10-21 20:08:31 +00:00
|
|
|
fail!();
|
2013-11-22 03:20:48 +00:00
|
|
|
});
|
2012-08-21 22:32:30 +00:00
|
|
|
}
|
2014-01-07 01:03:30 +00:00
|
|
|
|
|
|
|
/// An arena that can hold objects of only one type.
|
|
|
|
///
|
|
|
|
/// Safety note: Modifying objects in the arena that have already had their
|
|
|
|
/// `drop` destructors run can cause leaks, because the destructor will not
|
|
|
|
/// run again for these objects.
|
|
|
|
pub struct TypedArena<T> {
|
|
|
|
/// A pointer to the next object to be allocated.
|
2014-03-27 22:13:16 +00:00
|
|
|
ptr: *T,
|
2014-01-07 01:03:30 +00:00
|
|
|
|
|
|
|
/// A pointer to the end of the allocated area. When this pointer is
|
|
|
|
/// reached, a new chunk is allocated.
|
2014-03-27 22:13:16 +00:00
|
|
|
end: *T,
|
2014-01-07 01:03:30 +00:00
|
|
|
|
|
|
|
/// A pointer to the first arena segment.
|
2014-03-27 22:13:16 +00:00
|
|
|
first: Option<~TypedArenaChunk<T>>,
|
2014-01-07 01:03:30 +00:00
|
|
|
}
|
|
|
|
|
2014-02-21 12:25:17 +00:00
|
|
|
struct TypedArenaChunk<T> {
|
2014-01-07 01:03:30 +00:00
|
|
|
/// Pointer to the next arena segment.
|
2014-02-21 12:25:17 +00:00
|
|
|
next: Option<~TypedArenaChunk<T>>,
|
2014-01-07 01:03:30 +00:00
|
|
|
|
|
|
|
/// The number of elements that this chunk can hold.
|
|
|
|
capacity: uint,
|
|
|
|
|
|
|
|
// Objects follow here, suitably aligned.
|
|
|
|
}
|
|
|
|
|
2014-02-21 12:25:17 +00:00
|
|
|
impl<T> TypedArenaChunk<T> {
|
2014-01-07 01:03:30 +00:00
|
|
|
#[inline]
|
2014-02-21 12:25:17 +00:00
|
|
|
fn new(next: Option<~TypedArenaChunk<T>>, capacity: uint) -> ~TypedArenaChunk<T> {
|
|
|
|
let mut size = mem::size_of::<TypedArenaChunk<T>>();
|
2014-01-07 01:03:30 +00:00
|
|
|
size = round_up(size, mem::min_align_of::<T>());
|
|
|
|
let elem_size = mem::size_of::<T>();
|
|
|
|
let elems_size = elem_size.checked_mul(&capacity).unwrap();
|
|
|
|
size = size.checked_add(&elems_size).unwrap();
|
|
|
|
|
|
|
|
let mut chunk = unsafe {
|
|
|
|
let chunk = global_heap::exchange_malloc(size);
|
2014-02-21 12:25:17 +00:00
|
|
|
let mut chunk: ~TypedArenaChunk<T> = cast::transmute(chunk);
|
2014-02-09 06:16:42 +00:00
|
|
|
mem::move_val_init(&mut chunk.next, next);
|
2014-01-07 01:03:30 +00:00
|
|
|
chunk
|
|
|
|
};
|
|
|
|
|
|
|
|
chunk.capacity = capacity;
|
|
|
|
chunk
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Destroys this arena chunk. If the type descriptor is supplied, the
|
|
|
|
/// drop glue is called; otherwise, drop glue is not called.
|
|
|
|
#[inline]
|
2014-02-21 12:25:17 +00:00
|
|
|
unsafe fn destroy(&mut self, len: uint) {
|
2014-01-07 01:03:30 +00:00
|
|
|
// Destroy all the allocated objects.
|
2014-02-21 12:25:17 +00:00
|
|
|
if intrinsics::needs_drop::<T>() {
|
|
|
|
let mut start = self.start();
|
|
|
|
for _ in range(0, len) {
|
|
|
|
read(start as *T); // run the destructor on the pointer
|
|
|
|
start = start.offset(mem::size_of::<T>() as int)
|
2014-01-07 01:03:30 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Destroy the next chunk.
|
2014-01-31 20:35:36 +00:00
|
|
|
let next_opt = mem::replace(&mut self.next, None);
|
2014-01-07 01:03:30 +00:00
|
|
|
match next_opt {
|
|
|
|
None => {}
|
|
|
|
Some(mut next) => {
|
|
|
|
// We assume that the next chunk is completely filled.
|
2014-02-21 12:25:17 +00:00
|
|
|
next.destroy(next.capacity)
|
2014-01-07 01:03:30 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Returns a pointer to the first allocated object.
|
|
|
|
#[inline]
|
2014-02-21 12:25:17 +00:00
|
|
|
fn start(&self) -> *u8 {
|
|
|
|
let this: *TypedArenaChunk<T> = self;
|
2014-01-07 01:03:30 +00:00
|
|
|
unsafe {
|
2014-02-21 12:25:17 +00:00
|
|
|
cast::transmute(round_up(this.offset(1) as uint, mem::min_align_of::<T>()))
|
2014-01-07 01:03:30 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Returns a pointer to the end of the allocated space.
|
|
|
|
#[inline]
|
2014-02-21 12:25:17 +00:00
|
|
|
fn end(&self) -> *u8 {
|
2014-01-07 01:03:30 +00:00
|
|
|
unsafe {
|
2014-02-21 12:25:17 +00:00
|
|
|
let size = mem::size_of::<T>().checked_mul(&self.capacity).unwrap();
|
|
|
|
self.start().offset(size as int)
|
2014-01-07 01:03:30 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<T> TypedArena<T> {
|
|
|
|
/// Creates a new arena with preallocated space for 8 objects.
|
|
|
|
#[inline]
|
|
|
|
pub fn new() -> TypedArena<T> {
|
|
|
|
TypedArena::with_capacity(8)
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Creates a new arena with preallocated space for the given number of
|
|
|
|
/// objects.
|
|
|
|
#[inline]
|
|
|
|
pub fn with_capacity(capacity: uint) -> TypedArena<T> {
|
2014-02-21 12:25:17 +00:00
|
|
|
let chunk = TypedArenaChunk::<T>::new(None, capacity);
|
2014-01-07 01:03:30 +00:00
|
|
|
TypedArena {
|
2014-02-21 12:25:17 +00:00
|
|
|
ptr: chunk.start() as *T,
|
|
|
|
end: chunk.end() as *T,
|
2014-01-07 01:03:30 +00:00
|
|
|
first: Some(chunk),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Allocates an object into this arena.
|
|
|
|
#[inline]
|
|
|
|
pub fn alloc<'a>(&'a self, object: T) -> &'a T {
|
|
|
|
unsafe {
|
|
|
|
let this = cast::transmute_mut(self);
|
|
|
|
if this.ptr == this.end {
|
|
|
|
this.grow()
|
|
|
|
}
|
|
|
|
|
|
|
|
let ptr: &'a mut T = cast::transmute(this.ptr);
|
2014-02-09 06:16:42 +00:00
|
|
|
mem::move_val_init(ptr, object);
|
2014-01-07 01:03:30 +00:00
|
|
|
this.ptr = this.ptr.offset(1);
|
|
|
|
let ptr: &'a T = ptr;
|
|
|
|
ptr
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Grows the arena.
|
|
|
|
#[inline(never)]
|
|
|
|
fn grow(&mut self) {
|
|
|
|
let chunk = self.first.take_unwrap();
|
|
|
|
let new_capacity = chunk.capacity.checked_mul(&2).unwrap();
|
2014-02-21 12:25:17 +00:00
|
|
|
let chunk = TypedArenaChunk::<T>::new(Some(chunk), new_capacity);
|
|
|
|
self.ptr = chunk.start() as *T;
|
|
|
|
self.end = chunk.end() as *T;
|
2014-01-07 01:03:30 +00:00
|
|
|
self.first = Some(chunk)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[unsafe_destructor]
|
|
|
|
impl<T> Drop for TypedArena<T> {
|
|
|
|
fn drop(&mut self) {
|
|
|
|
// Determine how much was filled.
|
2014-02-21 12:25:17 +00:00
|
|
|
let start = self.first.get_ref().start() as uint;
|
2014-01-07 01:03:30 +00:00
|
|
|
let end = self.ptr as uint;
|
|
|
|
let diff = (end - start) / mem::size_of::<T>();
|
|
|
|
|
|
|
|
// Pass that to the `destroy` method.
|
|
|
|
unsafe {
|
2014-02-21 12:25:17 +00:00
|
|
|
self.first.get_mut_ref().destroy(diff)
|
2014-01-07 01:03:30 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[cfg(test)]
|
2014-02-14 01:49:11 +00:00
|
|
|
mod tests {
|
|
|
|
extern crate test;
|
2014-03-05 23:28:08 +00:00
|
|
|
|
|
|
|
|
2014-02-14 01:49:11 +00:00
|
|
|
use self::test::BenchHarness;
|
2014-01-07 01:03:30 +00:00
|
|
|
use super::{Arena, TypedArena};
|
|
|
|
|
|
|
|
struct Point {
|
|
|
|
x: int,
|
|
|
|
y: int,
|
|
|
|
z: int,
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
2014-03-26 23:01:11 +00:00
|
|
|
pub fn test_copy() {
|
2014-01-07 01:03:30 +00:00
|
|
|
let arena = TypedArena::new();
|
2014-01-25 23:38:44 +00:00
|
|
|
for _ in range(0, 100000) {
|
2014-01-07 01:03:30 +00:00
|
|
|
arena.alloc(Point {
|
|
|
|
x: 1,
|
|
|
|
y: 2,
|
|
|
|
z: 3,
|
|
|
|
});
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[bench]
|
2014-03-26 23:01:11 +00:00
|
|
|
pub fn bench_copy(bh: &mut BenchHarness) {
|
2014-01-07 01:03:30 +00:00
|
|
|
let arena = TypedArena::new();
|
|
|
|
bh.iter(|| {
|
|
|
|
arena.alloc(Point {
|
|
|
|
x: 1,
|
|
|
|
y: 2,
|
|
|
|
z: 3,
|
2014-02-08 10:32:09 +00:00
|
|
|
})
|
2014-01-07 01:03:30 +00:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
#[bench]
|
2014-03-26 23:01:11 +00:00
|
|
|
pub fn bench_copy_nonarena(bh: &mut BenchHarness) {
|
2014-01-07 01:03:30 +00:00
|
|
|
bh.iter(|| {
|
2014-02-08 10:32:09 +00:00
|
|
|
~Point {
|
2014-01-07 01:03:30 +00:00
|
|
|
x: 1,
|
|
|
|
y: 2,
|
|
|
|
z: 3,
|
2014-02-08 10:32:09 +00:00
|
|
|
}
|
2014-01-07 01:03:30 +00:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
#[bench]
|
2014-03-26 23:01:11 +00:00
|
|
|
pub fn bench_copy_old_arena(bh: &mut BenchHarness) {
|
2014-01-07 01:03:30 +00:00
|
|
|
let arena = Arena::new();
|
|
|
|
bh.iter(|| {
|
|
|
|
arena.alloc(|| {
|
|
|
|
Point {
|
|
|
|
x: 1,
|
|
|
|
y: 2,
|
|
|
|
z: 3,
|
|
|
|
}
|
2014-02-08 10:32:09 +00:00
|
|
|
})
|
2014-01-07 01:03:30 +00:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2014-03-26 23:01:11 +00:00
|
|
|
struct Noncopy {
|
2014-01-07 01:03:30 +00:00
|
|
|
string: ~str,
|
2014-03-05 23:28:08 +00:00
|
|
|
array: Vec<int> ,
|
2014-01-07 01:03:30 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
2014-03-26 23:01:11 +00:00
|
|
|
pub fn test_noncopy() {
|
2014-01-07 01:03:30 +00:00
|
|
|
let arena = TypedArena::new();
|
2014-01-25 23:38:44 +00:00
|
|
|
for _ in range(0, 100000) {
|
2014-03-26 23:01:11 +00:00
|
|
|
arena.alloc(Noncopy {
|
2014-01-07 01:03:30 +00:00
|
|
|
string: ~"hello world",
|
2014-03-05 23:28:08 +00:00
|
|
|
array: vec!( 1, 2, 3, 4, 5 ),
|
2014-01-07 01:03:30 +00:00
|
|
|
});
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[bench]
|
2014-03-26 23:01:11 +00:00
|
|
|
pub fn bench_noncopy(bh: &mut BenchHarness) {
|
2014-01-07 01:03:30 +00:00
|
|
|
let arena = TypedArena::new();
|
|
|
|
bh.iter(|| {
|
2014-03-26 23:01:11 +00:00
|
|
|
arena.alloc(Noncopy {
|
2014-01-07 01:03:30 +00:00
|
|
|
string: ~"hello world",
|
2014-03-05 23:28:08 +00:00
|
|
|
array: vec!( 1, 2, 3, 4, 5 ),
|
2014-02-08 10:32:09 +00:00
|
|
|
})
|
2014-01-07 01:03:30 +00:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
#[bench]
|
2014-03-26 23:01:11 +00:00
|
|
|
pub fn bench_noncopy_nonarena(bh: &mut BenchHarness) {
|
2014-01-07 01:03:30 +00:00
|
|
|
bh.iter(|| {
|
2014-03-26 23:01:11 +00:00
|
|
|
~Noncopy {
|
2014-01-07 01:03:30 +00:00
|
|
|
string: ~"hello world",
|
2014-03-05 23:28:08 +00:00
|
|
|
array: vec!( 1, 2, 3, 4, 5 ),
|
2014-02-08 10:32:09 +00:00
|
|
|
}
|
2014-01-07 01:03:30 +00:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
#[bench]
|
2014-03-26 23:01:11 +00:00
|
|
|
pub fn bench_noncopy_old_arena(bh: &mut BenchHarness) {
|
2014-01-07 01:03:30 +00:00
|
|
|
let arena = Arena::new();
|
|
|
|
bh.iter(|| {
|
2014-03-26 23:01:11 +00:00
|
|
|
arena.alloc(|| Noncopy {
|
2014-01-07 01:03:30 +00:00
|
|
|
string: ~"hello world",
|
2014-03-05 23:28:08 +00:00
|
|
|
array: vec!( 1, 2, 3, 4, 5 ),
|
2014-02-08 10:32:09 +00:00
|
|
|
})
|
2014-01-07 01:03:30 +00:00
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|