mirror of
https://github.com/vulkano-rs/vulkano.git
synced 2024-11-22 06:45:23 +00:00
Add Suballocator::suballocations
(#2499)
* Add `Suballocator::suballocations` * Add missing `Send` and `Sync` impls for `free_list::Suballocations` * Missed docs * Strange import, what is rust-analyzer smoking
This commit is contained in:
parent
f911996534
commit
984cbeb0c3
@ -1,4 +1,6 @@
|
|||||||
use super::{AllocationType, Region, Suballocation, Suballocator, SuballocatorError};
|
use super::{
|
||||||
|
AllocationType, Region, Suballocation, SuballocationNode, Suballocator, SuballocatorError,
|
||||||
|
};
|
||||||
use crate::{
|
use crate::{
|
||||||
memory::{
|
memory::{
|
||||||
allocator::{align_up, array_vec::ArrayVec, AllocationHandle, DeviceLayout},
|
allocator::{align_up, array_vec::ArrayVec, AllocationHandle, DeviceLayout},
|
||||||
@ -6,10 +8,7 @@ use crate::{
|
|||||||
},
|
},
|
||||||
DeviceSize, NonZeroDeviceSize,
|
DeviceSize, NonZeroDeviceSize,
|
||||||
};
|
};
|
||||||
use std::{
|
use std::cmp;
|
||||||
cell::{Cell, UnsafeCell},
|
|
||||||
cmp,
|
|
||||||
};
|
|
||||||
|
|
||||||
/// A [suballocator] whose structure forms a binary tree of power-of-two-sized suballocations.
|
/// A [suballocator] whose structure forms a binary tree of power-of-two-sized suballocations.
|
||||||
///
|
///
|
||||||
@ -62,8 +61,11 @@ use std::{
|
|||||||
pub struct BuddyAllocator {
|
pub struct BuddyAllocator {
|
||||||
region_offset: DeviceSize,
|
region_offset: DeviceSize,
|
||||||
// Total memory remaining in the region.
|
// Total memory remaining in the region.
|
||||||
free_size: Cell<DeviceSize>,
|
free_size: DeviceSize,
|
||||||
state: UnsafeCell<BuddyAllocatorState>,
|
// Every order has its own free-list for convenience, so that we don't have to traverse a tree.
|
||||||
|
// Each free-list is sorted by offset because we want to find the first-fit as this strategy
|
||||||
|
// minimizes external fragmentation.
|
||||||
|
free_list: ArrayVec<Vec<DeviceSize>, { Self::MAX_ORDERS }>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl BuddyAllocator {
|
impl BuddyAllocator {
|
||||||
@ -75,6 +77,8 @@ impl BuddyAllocator {
|
|||||||
}
|
}
|
||||||
|
|
||||||
unsafe impl Suballocator for BuddyAllocator {
|
unsafe impl Suballocator for BuddyAllocator {
|
||||||
|
type Suballocations<'a> = std::iter::Empty<SuballocationNode>;
|
||||||
|
|
||||||
/// Creates a new `BuddyAllocator` for the given [region].
|
/// Creates a new `BuddyAllocator` for the given [region].
|
||||||
///
|
///
|
||||||
/// # Panics
|
/// # Panics
|
||||||
@ -93,24 +97,21 @@ unsafe impl Suballocator for BuddyAllocator {
|
|||||||
|
|
||||||
assert!(max_order < BuddyAllocator::MAX_ORDERS);
|
assert!(max_order < BuddyAllocator::MAX_ORDERS);
|
||||||
|
|
||||||
let free_size = Cell::new(region.size());
|
|
||||||
|
|
||||||
let mut free_list =
|
let mut free_list =
|
||||||
ArrayVec::new(max_order + 1, [EMPTY_FREE_LIST; BuddyAllocator::MAX_ORDERS]);
|
ArrayVec::new(max_order + 1, [EMPTY_FREE_LIST; BuddyAllocator::MAX_ORDERS]);
|
||||||
// The root node has the lowest offset and highest order, so it's the whole region.
|
// The root node has the lowest offset and highest order, so it's the whole region.
|
||||||
free_list[max_order].push(region.offset());
|
free_list[max_order].push(region.offset());
|
||||||
let state = UnsafeCell::new(BuddyAllocatorState { free_list });
|
|
||||||
|
|
||||||
BuddyAllocator {
|
BuddyAllocator {
|
||||||
region_offset: region.offset(),
|
region_offset: region.offset(),
|
||||||
free_size,
|
free_size: region.size(),
|
||||||
state,
|
free_list,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
fn allocate(
|
fn allocate(
|
||||||
&self,
|
&mut self,
|
||||||
layout: DeviceLayout,
|
layout: DeviceLayout,
|
||||||
allocation_type: AllocationType,
|
allocation_type: AllocationType,
|
||||||
buffer_image_granularity: DeviceAlignment,
|
buffer_image_granularity: DeviceAlignment,
|
||||||
@ -150,17 +151,16 @@ unsafe impl Suballocator for BuddyAllocator {
|
|||||||
let size = cmp::max(size, BuddyAllocator::MIN_NODE_SIZE).next_power_of_two();
|
let size = cmp::max(size, BuddyAllocator::MIN_NODE_SIZE).next_power_of_two();
|
||||||
|
|
||||||
let min_order = (size / BuddyAllocator::MIN_NODE_SIZE).trailing_zeros() as usize;
|
let min_order = (size / BuddyAllocator::MIN_NODE_SIZE).trailing_zeros() as usize;
|
||||||
let state = unsafe { &mut *self.state.get() };
|
|
||||||
|
|
||||||
// Start searching at the lowest possible order going up.
|
// Start searching at the lowest possible order going up.
|
||||||
for (order, free_list) in state.free_list.iter_mut().enumerate().skip(min_order) {
|
for (order, free_list) in self.free_list.iter_mut().enumerate().skip(min_order) {
|
||||||
for (index, &offset) in free_list.iter().enumerate() {
|
for (index, &offset) in free_list.iter().enumerate() {
|
||||||
if is_aligned(offset, alignment) {
|
if is_aligned(offset, alignment) {
|
||||||
free_list.remove(index);
|
free_list.remove(index);
|
||||||
|
|
||||||
// Go in the opposite direction, splitting nodes from higher orders. The lowest
|
// Go in the opposite direction, splitting nodes from higher orders. The lowest
|
||||||
// order doesn't need any splitting.
|
// order doesn't need any splitting.
|
||||||
for (order, free_list) in state
|
for (order, free_list) in self
|
||||||
.free_list
|
.free_list
|
||||||
.iter_mut()
|
.iter_mut()
|
||||||
.enumerate()
|
.enumerate()
|
||||||
@ -185,7 +185,7 @@ unsafe impl Suballocator for BuddyAllocator {
|
|||||||
|
|
||||||
// This can't overflow because suballocation sizes in the free-list are
|
// This can't overflow because suballocation sizes in the free-list are
|
||||||
// constrained by the remaining size of the region.
|
// constrained by the remaining size of the region.
|
||||||
self.free_size.set(self.free_size.get() - size);
|
self.free_size -= size;
|
||||||
|
|
||||||
return Ok(Suballocation {
|
return Ok(Suballocation {
|
||||||
offset,
|
offset,
|
||||||
@ -206,17 +206,16 @@ unsafe impl Suballocator for BuddyAllocator {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
unsafe fn deallocate(&self, suballocation: Suballocation) {
|
unsafe fn deallocate(&mut self, suballocation: Suballocation) {
|
||||||
let mut offset = suballocation.offset;
|
let mut offset = suballocation.offset;
|
||||||
let order = suballocation.handle.as_index();
|
let order = suballocation.handle.as_index();
|
||||||
|
|
||||||
let min_order = order;
|
let min_order = order;
|
||||||
let state = unsafe { &mut *self.state.get() };
|
|
||||||
|
|
||||||
debug_assert!(!state.free_list[order].contains(&offset));
|
debug_assert!(!self.free_list[order].contains(&offset));
|
||||||
|
|
||||||
// Try to coalesce nodes while incrementing the order.
|
// Try to coalesce nodes while incrementing the order.
|
||||||
for (order, free_list) in state.free_list.iter_mut().enumerate().skip(min_order) {
|
for (order, free_list) in self.free_list.iter_mut().enumerate().skip(min_order) {
|
||||||
// This can't discard any bits because `order` is confined to the range
|
// This can't discard any bits because `order` is confined to the range
|
||||||
// [0, log(region.size / BuddyAllocator::MIN_NODE_SIZE)].
|
// [0, log(region.size / BuddyAllocator::MIN_NODE_SIZE)].
|
||||||
let size = BuddyAllocator::MIN_NODE_SIZE << order;
|
let size = BuddyAllocator::MIN_NODE_SIZE << order;
|
||||||
@ -241,7 +240,7 @@ unsafe impl Suballocator for BuddyAllocator {
|
|||||||
|
|
||||||
// The sizes of suballocations allocated by `self` are constrained by that of
|
// The sizes of suballocations allocated by `self` are constrained by that of
|
||||||
// its region, so they can't possibly overflow when added up.
|
// its region, so they can't possibly overflow when added up.
|
||||||
self.free_size.set(self.free_size.get() + size);
|
self.free_size += size;
|
||||||
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
@ -256,17 +255,14 @@ unsafe impl Suballocator for BuddyAllocator {
|
|||||||
/// [internal fragmentation]: super#internal-fragmentation
|
/// [internal fragmentation]: super#internal-fragmentation
|
||||||
#[inline]
|
#[inline]
|
||||||
fn free_size(&self) -> DeviceSize {
|
fn free_size(&self) -> DeviceSize {
|
||||||
self.free_size.get()
|
self.free_size
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
fn cleanup(&mut self) {}
|
fn cleanup(&mut self) {}
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[inline]
|
||||||
struct BuddyAllocatorState {
|
fn suballocations(&self) -> Self::Suballocations<'_> {
|
||||||
// Every order has its own free-list for convenience, so that we don't have to traverse a tree.
|
todo!()
|
||||||
// Each free-list is sorted by offset because we want to find the first-fit as this strategy
|
}
|
||||||
// minimizes external fragmentation.
|
|
||||||
free_list: ArrayVec<Vec<DeviceSize>, { BuddyAllocator::MAX_ORDERS }>,
|
|
||||||
}
|
}
|
||||||
|
@ -1,14 +1,15 @@
|
|||||||
use super::{AllocationType, Region, Suballocation, Suballocator, SuballocatorError};
|
use super::{
|
||||||
|
are_blocks_on_same_page, AllocationType, Region, Suballocation, SuballocationNode,
|
||||||
|
SuballocationType, Suballocator, SuballocatorError,
|
||||||
|
};
|
||||||
use crate::{
|
use crate::{
|
||||||
memory::{
|
memory::{
|
||||||
allocator::{
|
allocator::{align_up, AllocationHandle, DeviceLayout},
|
||||||
align_up, suballocator::are_blocks_on_same_page, AllocationHandle, DeviceLayout,
|
|
||||||
},
|
|
||||||
DeviceAlignment,
|
DeviceAlignment,
|
||||||
},
|
},
|
||||||
DeviceSize,
|
DeviceSize,
|
||||||
};
|
};
|
||||||
use std::cell::Cell;
|
use std::iter::FusedIterator;
|
||||||
|
|
||||||
/// A [suballocator] which can allocate dynamically, but can only free all allocations at once.
|
/// A [suballocator] which can allocate dynamically, but can only free all allocations at once.
|
||||||
///
|
///
|
||||||
@ -53,8 +54,8 @@ use std::cell::Cell;
|
|||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct BumpAllocator {
|
pub struct BumpAllocator {
|
||||||
region: Region,
|
region: Region,
|
||||||
free_start: Cell<DeviceSize>,
|
free_start: DeviceSize,
|
||||||
prev_allocation_type: Cell<AllocationType>,
|
prev_allocation_type: AllocationType,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl BumpAllocator {
|
impl BumpAllocator {
|
||||||
@ -63,26 +64,46 @@ impl BumpAllocator {
|
|||||||
/// [region]: Suballocator#regions
|
/// [region]: Suballocator#regions
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn reset(&mut self) {
|
pub fn reset(&mut self) {
|
||||||
*self.free_start.get_mut() = 0;
|
self.free_start = 0;
|
||||||
*self.prev_allocation_type.get_mut() = AllocationType::Unknown;
|
self.prev_allocation_type = AllocationType::Unknown;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn suballocation_node(&self, part: usize) -> SuballocationNode {
|
||||||
|
if part == 0 {
|
||||||
|
SuballocationNode {
|
||||||
|
offset: self.region.offset(),
|
||||||
|
size: self.free_start,
|
||||||
|
allocation_type: self.prev_allocation_type.into(),
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
debug_assert_eq!(part, 1);
|
||||||
|
|
||||||
|
SuballocationNode {
|
||||||
|
offset: self.region.offset() + self.free_start,
|
||||||
|
size: self.free_size(),
|
||||||
|
allocation_type: SuballocationType::Free,
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
unsafe impl Suballocator for BumpAllocator {
|
unsafe impl Suballocator for BumpAllocator {
|
||||||
|
type Suballocations<'a> = Suballocations<'a>;
|
||||||
|
|
||||||
/// Creates a new `BumpAllocator` for the given [region].
|
/// Creates a new `BumpAllocator` for the given [region].
|
||||||
///
|
///
|
||||||
/// [region]: Suballocator#regions
|
/// [region]: Suballocator#regions
|
||||||
fn new(region: Region) -> Self {
|
fn new(region: Region) -> Self {
|
||||||
BumpAllocator {
|
BumpAllocator {
|
||||||
region,
|
region,
|
||||||
free_start: Cell::new(0),
|
free_start: 0,
|
||||||
prev_allocation_type: Cell::new(AllocationType::Unknown),
|
prev_allocation_type: AllocationType::Unknown,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
fn allocate(
|
fn allocate(
|
||||||
&self,
|
&mut self,
|
||||||
layout: DeviceLayout,
|
layout: DeviceLayout,
|
||||||
allocation_type: AllocationType,
|
allocation_type: AllocationType,
|
||||||
buffer_image_granularity: DeviceAlignment,
|
buffer_image_granularity: DeviceAlignment,
|
||||||
@ -96,13 +117,13 @@ unsafe impl Suballocator for BumpAllocator {
|
|||||||
|
|
||||||
// These can't overflow because suballocation offsets are bounded by the region, whose end
|
// These can't overflow because suballocation offsets are bounded by the region, whose end
|
||||||
// can itself not exceed `DeviceLayout::MAX_SIZE`.
|
// can itself not exceed `DeviceLayout::MAX_SIZE`.
|
||||||
let prev_end = self.region.offset() + self.free_start.get();
|
let prev_end = self.region.offset() + self.free_start;
|
||||||
let mut offset = align_up(prev_end, alignment);
|
let mut offset = align_up(prev_end, alignment);
|
||||||
|
|
||||||
if buffer_image_granularity != DeviceAlignment::MIN
|
if buffer_image_granularity != DeviceAlignment::MIN
|
||||||
&& prev_end > 0
|
&& prev_end > 0
|
||||||
&& are_blocks_on_same_page(0, prev_end, offset, buffer_image_granularity)
|
&& are_blocks_on_same_page(0, prev_end, offset, buffer_image_granularity)
|
||||||
&& has_granularity_conflict(self.prev_allocation_type.get(), allocation_type)
|
&& has_granularity_conflict(self.prev_allocation_type, allocation_type)
|
||||||
{
|
{
|
||||||
offset = align_up(offset, buffer_image_granularity);
|
offset = align_up(offset, buffer_image_granularity);
|
||||||
}
|
}
|
||||||
@ -115,8 +136,8 @@ unsafe impl Suballocator for BumpAllocator {
|
|||||||
return Err(SuballocatorError::OutOfRegionMemory);
|
return Err(SuballocatorError::OutOfRegionMemory);
|
||||||
}
|
}
|
||||||
|
|
||||||
self.free_start.set(free_start);
|
self.free_start = free_start;
|
||||||
self.prev_allocation_type.set(allocation_type);
|
self.prev_allocation_type = allocation_type;
|
||||||
|
|
||||||
Ok(Suballocation {
|
Ok(Suballocation {
|
||||||
offset,
|
offset,
|
||||||
@ -127,17 +148,91 @@ unsafe impl Suballocator for BumpAllocator {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
unsafe fn deallocate(&self, _suballocation: Suballocation) {
|
unsafe fn deallocate(&mut self, _suballocation: Suballocation) {
|
||||||
// such complex, very wow
|
// such complex, very wow
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
fn free_size(&self) -> DeviceSize {
|
fn free_size(&self) -> DeviceSize {
|
||||||
self.region.size() - self.free_start.get()
|
self.region.size() - self.free_start
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
fn cleanup(&mut self) {
|
fn cleanup(&mut self) {
|
||||||
self.reset();
|
self.reset();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn suballocations(&self) -> Self::Suballocations<'_> {
|
||||||
|
let start = if self.free_start == 0 { 1 } else { 0 };
|
||||||
|
let end = if self.free_start == self.region.size() {
|
||||||
|
1
|
||||||
|
} else {
|
||||||
|
2
|
||||||
|
};
|
||||||
|
|
||||||
|
Suballocations {
|
||||||
|
allocator: self,
|
||||||
|
start,
|
||||||
|
end,
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct Suballocations<'a> {
|
||||||
|
allocator: &'a BumpAllocator,
|
||||||
|
start: usize,
|
||||||
|
end: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Iterator for Suballocations<'_> {
|
||||||
|
type Item = SuballocationNode;
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
|
if self.len() != 0 {
|
||||||
|
let node = self.allocator.suballocation_node(self.start);
|
||||||
|
self.start += 1;
|
||||||
|
|
||||||
|
Some(node)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||||
|
let len = self.len();
|
||||||
|
|
||||||
|
(len, Some(len))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn last(mut self) -> Option<Self::Item> {
|
||||||
|
self.next_back()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DoubleEndedIterator for Suballocations<'_> {
|
||||||
|
#[inline]
|
||||||
|
fn next_back(&mut self) -> Option<Self::Item> {
|
||||||
|
if self.len() != 0 {
|
||||||
|
self.end -= 1;
|
||||||
|
let node = self.allocator.suballocation_node(self.end);
|
||||||
|
|
||||||
|
Some(node)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ExactSizeIterator for Suballocations<'_> {
|
||||||
|
#[inline]
|
||||||
|
fn len(&self) -> usize {
|
||||||
|
self.end - self.start
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FusedIterator for Suballocations<'_> {}
|
||||||
|
@ -1,18 +1,15 @@
|
|||||||
use super::{AllocationType, Region, Suballocation, Suballocator, SuballocatorError};
|
use super::{
|
||||||
|
are_blocks_on_same_page, AllocationType, Region, Suballocation, SuballocationNode,
|
||||||
|
SuballocationType, Suballocator, SuballocatorError,
|
||||||
|
};
|
||||||
use crate::{
|
use crate::{
|
||||||
memory::{
|
memory::{
|
||||||
allocator::{
|
allocator::{align_up, AllocationHandle, DeviceLayout},
|
||||||
align_up, suballocator::are_blocks_on_same_page, AllocationHandle, DeviceLayout,
|
|
||||||
},
|
|
||||||
is_aligned, DeviceAlignment,
|
is_aligned, DeviceAlignment,
|
||||||
},
|
},
|
||||||
DeviceSize,
|
DeviceSize,
|
||||||
};
|
};
|
||||||
use std::{
|
use std::{cmp, iter::FusedIterator, marker::PhantomData, ptr::NonNull};
|
||||||
cell::{Cell, UnsafeCell},
|
|
||||||
cmp,
|
|
||||||
ptr::NonNull,
|
|
||||||
};
|
|
||||||
|
|
||||||
/// A [suballocator] that uses the most generic [free-list].
|
/// A [suballocator] that uses the most generic [free-list].
|
||||||
///
|
///
|
||||||
@ -65,47 +62,49 @@ use std::{
|
|||||||
pub struct FreeListAllocator {
|
pub struct FreeListAllocator {
|
||||||
region_offset: DeviceSize,
|
region_offset: DeviceSize,
|
||||||
// Total memory remaining in the region.
|
// Total memory remaining in the region.
|
||||||
free_size: Cell<DeviceSize>,
|
free_size: DeviceSize,
|
||||||
state: UnsafeCell<FreeListAllocatorState>,
|
suballocations: SuballocationList,
|
||||||
}
|
}
|
||||||
|
|
||||||
unsafe impl Send for FreeListAllocator {}
|
|
||||||
|
|
||||||
unsafe impl Suballocator for FreeListAllocator {
|
unsafe impl Suballocator for FreeListAllocator {
|
||||||
|
type Suballocations<'a> = Suballocations<'a>;
|
||||||
|
|
||||||
/// Creates a new `FreeListAllocator` for the given [region].
|
/// Creates a new `FreeListAllocator` for the given [region].
|
||||||
///
|
///
|
||||||
/// [region]: Suballocator#regions
|
/// [region]: Suballocator#regions
|
||||||
fn new(region: Region) -> Self {
|
fn new(region: Region) -> Self {
|
||||||
let free_size = Cell::new(region.size());
|
|
||||||
|
|
||||||
let node_allocator = slabbin::SlabAllocator::<SuballocationListNode>::new(32);
|
let node_allocator = slabbin::SlabAllocator::<SuballocationListNode>::new(32);
|
||||||
let mut free_list = Vec::with_capacity(32);
|
|
||||||
let root_ptr = node_allocator.allocate();
|
let root_ptr = node_allocator.allocate();
|
||||||
let root = SuballocationListNode {
|
let root = SuballocationListNode {
|
||||||
prev: None,
|
prev: None,
|
||||||
next: None,
|
next: None,
|
||||||
offset: region.offset(),
|
offset: region.offset(),
|
||||||
size: region.size(),
|
size: region.size(),
|
||||||
ty: SuballocationType::Free,
|
allocation_type: SuballocationType::Free,
|
||||||
};
|
};
|
||||||
unsafe { root_ptr.as_ptr().write(root) };
|
unsafe { root_ptr.as_ptr().write(root) };
|
||||||
|
|
||||||
|
let mut free_list = Vec::with_capacity(32);
|
||||||
free_list.push(root_ptr);
|
free_list.push(root_ptr);
|
||||||
|
|
||||||
let state = UnsafeCell::new(FreeListAllocatorState {
|
let suballocations = SuballocationList {
|
||||||
node_allocator,
|
head: root_ptr,
|
||||||
|
tail: root_ptr,
|
||||||
|
len: 1,
|
||||||
free_list,
|
free_list,
|
||||||
});
|
node_allocator,
|
||||||
|
};
|
||||||
|
|
||||||
FreeListAllocator {
|
FreeListAllocator {
|
||||||
region_offset: region.offset(),
|
region_offset: region.offset(),
|
||||||
free_size,
|
free_size: region.size(),
|
||||||
state,
|
suballocations,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
fn allocate(
|
fn allocate(
|
||||||
&self,
|
&mut self,
|
||||||
layout: DeviceLayout,
|
layout: DeviceLayout,
|
||||||
allocation_type: AllocationType,
|
allocation_type: AllocationType,
|
||||||
buffer_image_granularity: DeviceAlignment,
|
buffer_image_granularity: DeviceAlignment,
|
||||||
@ -122,9 +121,8 @@ unsafe impl Suballocator for FreeListAllocator {
|
|||||||
|
|
||||||
let size = layout.size();
|
let size = layout.size();
|
||||||
let alignment = layout.alignment();
|
let alignment = layout.alignment();
|
||||||
let state = unsafe { &mut *self.state.get() };
|
|
||||||
|
|
||||||
match state.free_list.last() {
|
match self.suballocations.free_list.last() {
|
||||||
Some(&last) if unsafe { (*last.as_ptr()).size } >= size => {
|
Some(&last) if unsafe { (*last.as_ptr()).size } >= size => {
|
||||||
// We create a dummy node to compare against in the below binary search. The only
|
// We create a dummy node to compare against in the below binary search. The only
|
||||||
// fields of importance are `offset` and `size`. It is paramount that we set
|
// fields of importance are `offset` and `size`. It is paramount that we set
|
||||||
@ -136,7 +134,7 @@ unsafe impl Suballocator for FreeListAllocator {
|
|||||||
next: None,
|
next: None,
|
||||||
offset: 0,
|
offset: 0,
|
||||||
size,
|
size,
|
||||||
ty: SuballocationType::Unknown,
|
allocation_type: SuballocationType::Unknown,
|
||||||
};
|
};
|
||||||
|
|
||||||
// This is almost exclusively going to return `Err`, but that's expected: we are
|
// This is almost exclusively going to return `Err`, but that's expected: we are
|
||||||
@ -149,11 +147,14 @@ unsafe impl Suballocator for FreeListAllocator {
|
|||||||
//
|
//
|
||||||
// Note that `index == free_list.len()` can't be because we checked that the
|
// Note that `index == free_list.len()` can't be because we checked that the
|
||||||
// free-list contains a suballocation that is big enough.
|
// free-list contains a suballocation that is big enough.
|
||||||
let (Ok(index) | Err(index)) = state
|
let (Ok(index) | Err(index)) = self
|
||||||
|
.suballocations
|
||||||
.free_list
|
.free_list
|
||||||
.binary_search_by_key(&dummy_node, |&ptr| unsafe { *ptr.as_ptr() });
|
.binary_search_by_key(&dummy_node, |&ptr| unsafe { *ptr.as_ptr() });
|
||||||
|
|
||||||
for (index, &node_ptr) in state.free_list.iter().enumerate().skip(index) {
|
for (index, &node_ptr) in
|
||||||
|
self.suballocations.free_list.iter().enumerate().skip(index)
|
||||||
|
{
|
||||||
let node = unsafe { *node_ptr.as_ptr() };
|
let node = unsafe { *node_ptr.as_ptr() };
|
||||||
|
|
||||||
// This can't overflow because suballocation offsets are bounded by the region,
|
// This can't overflow because suballocation offsets are bounded by the region,
|
||||||
@ -171,7 +172,7 @@ unsafe impl Suballocator for FreeListAllocator {
|
|||||||
prev.size,
|
prev.size,
|
||||||
offset,
|
offset,
|
||||||
buffer_image_granularity,
|
buffer_image_granularity,
|
||||||
) && has_granularity_conflict(prev.ty, allocation_type)
|
) && has_granularity_conflict(prev.allocation_type, allocation_type)
|
||||||
{
|
{
|
||||||
// This is overflow-safe for the same reason as above.
|
// This is overflow-safe for the same reason as above.
|
||||||
offset = align_up(offset, buffer_image_granularity);
|
offset = align_up(offset, buffer_image_granularity);
|
||||||
@ -187,19 +188,19 @@ unsafe impl Suballocator for FreeListAllocator {
|
|||||||
//
|
//
|
||||||
// `node.offset + node.size` can't overflow for the same reason as above.
|
// `node.offset + node.size` can't overflow for the same reason as above.
|
||||||
if offset + size <= node.offset + node.size {
|
if offset + size <= node.offset + node.size {
|
||||||
state.free_list.remove(index);
|
self.suballocations.free_list.remove(index);
|
||||||
|
|
||||||
// SAFETY:
|
// SAFETY:
|
||||||
// - `node` is free.
|
// - `node` is free.
|
||||||
// - `offset` is that of `node`, possibly rounded up.
|
// - `offset` is that of `node`, possibly rounded up.
|
||||||
// - We checked that `offset + size` falls within `node`.
|
// - We checked that `offset + size` falls within `node`.
|
||||||
unsafe { state.split(node_ptr, offset, size) };
|
unsafe { self.suballocations.split(node_ptr, offset, size) };
|
||||||
|
|
||||||
unsafe { (*node_ptr.as_ptr()).ty = allocation_type.into() };
|
unsafe { (*node_ptr.as_ptr()).allocation_type = allocation_type.into() };
|
||||||
|
|
||||||
// This can't overflow because suballocation sizes in the free-list are
|
// This can't overflow because suballocation sizes in the free-list are
|
||||||
// constrained by the remaining size of the region.
|
// constrained by the remaining size of the region.
|
||||||
self.free_size.set(self.free_size.get() - size);
|
self.free_size -= size;
|
||||||
|
|
||||||
return Ok(Suballocation {
|
return Ok(Suballocation {
|
||||||
offset,
|
offset,
|
||||||
@ -223,7 +224,7 @@ unsafe impl Suballocator for FreeListAllocator {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
unsafe fn deallocate(&self, suballocation: Suballocation) {
|
unsafe fn deallocate(&mut self, suballocation: Suballocation) {
|
||||||
let node_ptr = suballocation
|
let node_ptr = suballocation
|
||||||
.handle
|
.handle
|
||||||
.as_ptr()
|
.as_ptr()
|
||||||
@ -235,44 +236,53 @@ unsafe impl Suballocator for FreeListAllocator {
|
|||||||
let node_ptr = unsafe { NonNull::new_unchecked(node_ptr) };
|
let node_ptr = unsafe { NonNull::new_unchecked(node_ptr) };
|
||||||
let node = unsafe { *node_ptr.as_ptr() };
|
let node = unsafe { *node_ptr.as_ptr() };
|
||||||
|
|
||||||
debug_assert!(node.ty != SuballocationType::Free);
|
debug_assert_ne!(node.allocation_type, SuballocationType::Free);
|
||||||
|
|
||||||
// Suballocation sizes are constrained by the size of the region, so they can't possibly
|
// Suballocation sizes are constrained by the size of the region, so they can't possibly
|
||||||
// overflow when added up.
|
// overflow when added up.
|
||||||
self.free_size.set(self.free_size.get() + node.size);
|
self.free_size += node.size;
|
||||||
|
|
||||||
unsafe { (*node_ptr.as_ptr()).ty = SuballocationType::Free };
|
unsafe { (*node_ptr.as_ptr()).allocation_type = SuballocationType::Free };
|
||||||
|
|
||||||
let state = unsafe { &mut *self.state.get() };
|
unsafe { self.suballocations.coalesce(node_ptr) };
|
||||||
|
unsafe { self.suballocations.deallocate(node_ptr) };
|
||||||
unsafe { state.coalesce(node_ptr) };
|
|
||||||
unsafe { state.deallocate(node_ptr) };
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
fn free_size(&self) -> DeviceSize {
|
fn free_size(&self) -> DeviceSize {
|
||||||
self.free_size.get()
|
self.free_size
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
fn cleanup(&mut self) {}
|
fn cleanup(&mut self) {}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn suballocations(&self) -> Self::Suballocations<'_> {
|
||||||
|
self.suballocations.iter()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
struct FreeListAllocatorState {
|
struct SuballocationList {
|
||||||
node_allocator: slabbin::SlabAllocator<SuballocationListNode>,
|
head: NonNull<SuballocationListNode>,
|
||||||
|
tail: NonNull<SuballocationListNode>,
|
||||||
|
len: usize,
|
||||||
// Free suballocations sorted by size in ascending order. This means we can always find a
|
// Free suballocations sorted by size in ascending order. This means we can always find a
|
||||||
// best-fit in *O*(log(*n*)) time in the worst case, and iterating in order is very efficient.
|
// best-fit in *O*(log(*n*)) time in the worst case, and iterating in order is very efficient.
|
||||||
free_list: Vec<NonNull<SuballocationListNode>>,
|
free_list: Vec<NonNull<SuballocationListNode>>,
|
||||||
|
node_allocator: slabbin::SlabAllocator<SuballocationListNode>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
unsafe impl Send for SuballocationList {}
|
||||||
|
unsafe impl Sync for SuballocationList {}
|
||||||
|
|
||||||
#[derive(Clone, Copy, Debug)]
|
#[derive(Clone, Copy, Debug)]
|
||||||
struct SuballocationListNode {
|
struct SuballocationListNode {
|
||||||
prev: Option<NonNull<Self>>,
|
prev: Option<NonNull<Self>>,
|
||||||
next: Option<NonNull<Self>>,
|
next: Option<NonNull<Self>>,
|
||||||
offset: DeviceSize,
|
offset: DeviceSize,
|
||||||
size: DeviceSize,
|
size: DeviceSize,
|
||||||
ty: SuballocationType,
|
allocation_type: SuballocationType,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PartialEq for SuballocationListNode {
|
impl PartialEq for SuballocationListNode {
|
||||||
@ -300,48 +310,7 @@ impl Ord for SuballocationListNode {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Tells us if a suballocation is free, and if not, whether it is linear or not. This is needed in
|
impl SuballocationList {
|
||||||
/// order to be able to respect the buffer-image granularity.
|
|
||||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
|
||||||
enum SuballocationType {
|
|
||||||
Unknown,
|
|
||||||
Linear,
|
|
||||||
NonLinear,
|
|
||||||
Free,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<AllocationType> for SuballocationType {
|
|
||||||
fn from(ty: AllocationType) -> Self {
|
|
||||||
match ty {
|
|
||||||
AllocationType::Unknown => SuballocationType::Unknown,
|
|
||||||
AllocationType::Linear => SuballocationType::Linear,
|
|
||||||
AllocationType::NonLinear => SuballocationType::NonLinear,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FreeListAllocatorState {
|
|
||||||
/// Removes the target suballocation from the free-list.
|
|
||||||
///
|
|
||||||
/// # Safety
|
|
||||||
///
|
|
||||||
/// - `node_ptr` must refer to a currently free suballocation of `self`.
|
|
||||||
unsafe fn allocate(&mut self, node_ptr: NonNull<SuballocationListNode>) {
|
|
||||||
debug_assert!(self.free_list.contains(&node_ptr));
|
|
||||||
|
|
||||||
let node = unsafe { *node_ptr.as_ptr() };
|
|
||||||
|
|
||||||
match self
|
|
||||||
.free_list
|
|
||||||
.binary_search_by_key(&node, |&ptr| unsafe { *ptr.as_ptr() })
|
|
||||||
{
|
|
||||||
Ok(index) => {
|
|
||||||
self.free_list.remove(index);
|
|
||||||
}
|
|
||||||
Err(_) => unreachable!(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Fits a suballocation inside the target one, splitting the target at the ends if required.
|
/// Fits a suballocation inside the target one, splitting the target at the ends if required.
|
||||||
///
|
///
|
||||||
/// # Safety
|
/// # Safety
|
||||||
@ -356,7 +325,7 @@ impl FreeListAllocatorState {
|
|||||||
) {
|
) {
|
||||||
let node = unsafe { *node_ptr.as_ptr() };
|
let node = unsafe { *node_ptr.as_ptr() };
|
||||||
|
|
||||||
debug_assert!(node.ty == SuballocationType::Free);
|
debug_assert_eq!(node.allocation_type, SuballocationType::Free);
|
||||||
debug_assert!(offset >= node.offset);
|
debug_assert!(offset >= node.offset);
|
||||||
debug_assert!(offset + size <= node.offset + node.size);
|
debug_assert!(offset + size <= node.offset + node.size);
|
||||||
|
|
||||||
@ -372,7 +341,7 @@ impl FreeListAllocatorState {
|
|||||||
next: Some(node_ptr),
|
next: Some(node_ptr),
|
||||||
offset: node.offset,
|
offset: node.offset,
|
||||||
size: padding_front,
|
size: padding_front,
|
||||||
ty: SuballocationType::Free,
|
allocation_type: SuballocationType::Free,
|
||||||
};
|
};
|
||||||
unsafe { padding_ptr.as_ptr().write(padding) };
|
unsafe { padding_ptr.as_ptr().write(padding) };
|
||||||
|
|
||||||
@ -387,6 +356,12 @@ impl FreeListAllocatorState {
|
|||||||
// of the padding, so this can't overflow.
|
// of the padding, so this can't overflow.
|
||||||
unsafe { (*node_ptr.as_ptr()).size -= padding.size };
|
unsafe { (*node_ptr.as_ptr()).size -= padding.size };
|
||||||
|
|
||||||
|
if node_ptr == self.head {
|
||||||
|
self.head = padding_ptr;
|
||||||
|
}
|
||||||
|
|
||||||
|
self.len += 1;
|
||||||
|
|
||||||
// SAFETY: We just created this suballocation, so there's no way that it was
|
// SAFETY: We just created this suballocation, so there's no way that it was
|
||||||
// deallocated already.
|
// deallocated already.
|
||||||
unsafe { self.deallocate(padding_ptr) };
|
unsafe { self.deallocate(padding_ptr) };
|
||||||
@ -399,7 +374,7 @@ impl FreeListAllocatorState {
|
|||||||
next: node.next,
|
next: node.next,
|
||||||
offset: offset + size,
|
offset: offset + size,
|
||||||
size: padding_back,
|
size: padding_back,
|
||||||
ty: SuballocationType::Free,
|
allocation_type: SuballocationType::Free,
|
||||||
};
|
};
|
||||||
unsafe { padding_ptr.as_ptr().write(padding) };
|
unsafe { padding_ptr.as_ptr().write(padding) };
|
||||||
|
|
||||||
@ -411,6 +386,12 @@ impl FreeListAllocatorState {
|
|||||||
// This is overflow-safe for the same reason as above.
|
// This is overflow-safe for the same reason as above.
|
||||||
unsafe { (*node_ptr.as_ptr()).size -= padding.size };
|
unsafe { (*node_ptr.as_ptr()).size -= padding.size };
|
||||||
|
|
||||||
|
if node_ptr == self.tail {
|
||||||
|
self.tail = padding_ptr;
|
||||||
|
}
|
||||||
|
|
||||||
|
self.len += 1;
|
||||||
|
|
||||||
// SAFETY: Same as above.
|
// SAFETY: Same as above.
|
||||||
unsafe { self.deallocate(padding_ptr) };
|
unsafe { self.deallocate(padding_ptr) };
|
||||||
}
|
}
|
||||||
@ -439,12 +420,12 @@ impl FreeListAllocatorState {
|
|||||||
unsafe fn coalesce(&mut self, node_ptr: NonNull<SuballocationListNode>) {
|
unsafe fn coalesce(&mut self, node_ptr: NonNull<SuballocationListNode>) {
|
||||||
let node = unsafe { *node_ptr.as_ptr() };
|
let node = unsafe { *node_ptr.as_ptr() };
|
||||||
|
|
||||||
debug_assert!(node.ty == SuballocationType::Free);
|
debug_assert_eq!(node.allocation_type, SuballocationType::Free);
|
||||||
|
|
||||||
if let Some(prev_ptr) = node.prev {
|
if let Some(prev_ptr) = node.prev {
|
||||||
let prev = unsafe { *prev_ptr.as_ptr() };
|
let prev = unsafe { *prev_ptr.as_ptr() };
|
||||||
|
|
||||||
if prev.ty == SuballocationType::Free {
|
if prev.allocation_type == SuballocationType::Free {
|
||||||
// SAFETY: We checked that the suballocation is free.
|
// SAFETY: We checked that the suballocation is free.
|
||||||
self.allocate(prev_ptr);
|
self.allocate(prev_ptr);
|
||||||
|
|
||||||
@ -458,11 +439,18 @@ impl FreeListAllocatorState {
|
|||||||
unsafe { (*prev_ptr.as_ptr()).next = Some(node_ptr) };
|
unsafe { (*prev_ptr.as_ptr()).next = Some(node_ptr) };
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if prev_ptr == self.head {
|
||||||
|
self.head = node_ptr;
|
||||||
|
}
|
||||||
|
|
||||||
|
self.len -= 1;
|
||||||
|
|
||||||
// SAFETY:
|
// SAFETY:
|
||||||
// - The suballocation is free.
|
// - The suballocation is free.
|
||||||
// - The suballocation was removed from the free-list.
|
// - The suballocation was removed from the free-list.
|
||||||
// - The next suballocation and possibly a previous suballocation have been updated
|
// - The next suballocation and possibly a previous suballocation have been updated
|
||||||
// such that they no longer reference the suballocation.
|
// such that they no longer reference the suballocation.
|
||||||
|
// - The head no longer points to the suballocation if it used to.
|
||||||
// All of these conditions combined guarantee that `prev_ptr` cannot be used again.
|
// All of these conditions combined guarantee that `prev_ptr` cannot be used again.
|
||||||
unsafe { self.node_allocator.deallocate(prev_ptr) };
|
unsafe { self.node_allocator.deallocate(prev_ptr) };
|
||||||
}
|
}
|
||||||
@ -471,7 +459,7 @@ impl FreeListAllocatorState {
|
|||||||
if let Some(next_ptr) = node.next {
|
if let Some(next_ptr) = node.next {
|
||||||
let next = unsafe { *next_ptr.as_ptr() };
|
let next = unsafe { *next_ptr.as_ptr() };
|
||||||
|
|
||||||
if next.ty == SuballocationType::Free {
|
if next.allocation_type == SuballocationType::Free {
|
||||||
// SAFETY: Same as above.
|
// SAFETY: Same as above.
|
||||||
self.allocate(next_ptr);
|
self.allocate(next_ptr);
|
||||||
|
|
||||||
@ -483,9 +471,123 @@ impl FreeListAllocatorState {
|
|||||||
unsafe { (*next_ptr.as_ptr()).prev = Some(node_ptr) };
|
unsafe { (*next_ptr.as_ptr()).prev = Some(node_ptr) };
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if next_ptr == self.tail {
|
||||||
|
self.tail = node_ptr;
|
||||||
|
}
|
||||||
|
|
||||||
|
self.len -= 1;
|
||||||
|
|
||||||
// SAFETY: Same as above.
|
// SAFETY: Same as above.
|
||||||
unsafe { self.node_allocator.deallocate(next_ptr) };
|
unsafe { self.node_allocator.deallocate(next_ptr) };
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Removes the target suballocation from the free-list.
|
||||||
|
///
|
||||||
|
/// # Safety
|
||||||
|
///
|
||||||
|
/// - `node_ptr` must refer to a currently free suballocation of `self`.
|
||||||
|
unsafe fn allocate(&mut self, node_ptr: NonNull<SuballocationListNode>) {
|
||||||
|
debug_assert!(self.free_list.contains(&node_ptr));
|
||||||
|
|
||||||
|
let node = unsafe { *node_ptr.as_ptr() };
|
||||||
|
|
||||||
|
match self
|
||||||
|
.free_list
|
||||||
|
.binary_search_by_key(&node, |&ptr| unsafe { *ptr.as_ptr() })
|
||||||
|
{
|
||||||
|
Ok(index) => {
|
||||||
|
self.free_list.remove(index);
|
||||||
|
}
|
||||||
|
Err(_) => unreachable!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn iter(&self) -> Suballocations<'_> {
|
||||||
|
Suballocations {
|
||||||
|
head: Some(self.head),
|
||||||
|
tail: Some(self.tail),
|
||||||
|
len: self.len,
|
||||||
|
marker: PhantomData,
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct Suballocations<'a> {
|
||||||
|
head: Option<NonNull<SuballocationListNode>>,
|
||||||
|
tail: Option<NonNull<SuballocationListNode>>,
|
||||||
|
len: usize,
|
||||||
|
marker: PhantomData<&'a SuballocationList>,
|
||||||
|
}
|
||||||
|
|
||||||
|
unsafe impl Send for Suballocations<'_> {}
|
||||||
|
unsafe impl Sync for Suballocations<'_> {}
|
||||||
|
|
||||||
|
impl Iterator for Suballocations<'_> {
|
||||||
|
type Item = SuballocationNode;
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
|
if self.len != 0 {
|
||||||
|
if let Some(head) = self.head {
|
||||||
|
let head = unsafe { *head.as_ptr() };
|
||||||
|
self.head = head.next;
|
||||||
|
self.len -= 1;
|
||||||
|
|
||||||
|
Some(SuballocationNode {
|
||||||
|
offset: head.offset,
|
||||||
|
size: head.size,
|
||||||
|
allocation_type: head.allocation_type,
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||||
|
(self.len, Some(self.len))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn last(mut self) -> Option<Self::Item> {
|
||||||
|
self.next_back()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DoubleEndedIterator for Suballocations<'_> {
|
||||||
|
#[inline]
|
||||||
|
fn next_back(&mut self) -> Option<Self::Item> {
|
||||||
|
if self.len != 0 {
|
||||||
|
if let Some(tail) = self.tail {
|
||||||
|
let tail = unsafe { *tail.as_ptr() };
|
||||||
|
self.tail = tail.prev;
|
||||||
|
self.len -= 1;
|
||||||
|
|
||||||
|
Some(SuballocationNode {
|
||||||
|
offset: tail.offset,
|
||||||
|
size: tail.size,
|
||||||
|
allocation_type: tail.allocation_type,
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ExactSizeIterator for Suballocations<'_> {
|
||||||
|
#[inline]
|
||||||
|
fn len(&self) -> usize {
|
||||||
|
self.len
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FusedIterator for Suballocations<'_> {}
|
||||||
|
@ -78,6 +78,15 @@ mod free_list;
|
|||||||
/// [page]: super#pages
|
/// [page]: super#pages
|
||||||
/// [buffer-image granularity]: super#buffer-image-granularity
|
/// [buffer-image granularity]: super#buffer-image-granularity
|
||||||
pub unsafe trait Suballocator {
|
pub unsafe trait Suballocator {
|
||||||
|
/// The type of iterator returned by [`suballocations`].
|
||||||
|
///
|
||||||
|
/// [`suballocations`]: Self::suballocations
|
||||||
|
type Suballocations<'a>: Iterator<Item = SuballocationNode>
|
||||||
|
+ DoubleEndedIterator
|
||||||
|
+ ExactSizeIterator
|
||||||
|
where
|
||||||
|
Self: Sized + 'a;
|
||||||
|
|
||||||
/// Creates a new suballocator for the given [region].
|
/// Creates a new suballocator for the given [region].
|
||||||
///
|
///
|
||||||
/// [region]: Self#regions
|
/// [region]: Self#regions
|
||||||
@ -115,7 +124,7 @@ pub unsafe trait Suballocator {
|
|||||||
/// [buffer-image granularity]: super#buffer-image-granularity
|
/// [buffer-image granularity]: super#buffer-image-granularity
|
||||||
/// [`DeviceMemory`]: crate::memory::DeviceMemory
|
/// [`DeviceMemory`]: crate::memory::DeviceMemory
|
||||||
fn allocate(
|
fn allocate(
|
||||||
&self,
|
&mut self,
|
||||||
layout: DeviceLayout,
|
layout: DeviceLayout,
|
||||||
allocation_type: AllocationType,
|
allocation_type: AllocationType,
|
||||||
buffer_image_granularity: DeviceAlignment,
|
buffer_image_granularity: DeviceAlignment,
|
||||||
@ -126,7 +135,7 @@ pub unsafe trait Suballocator {
|
|||||||
/// # Safety
|
/// # Safety
|
||||||
///
|
///
|
||||||
/// - `suballocation` must refer to a **currently allocated** suballocation of `self`.
|
/// - `suballocation` must refer to a **currently allocated** suballocation of `self`.
|
||||||
unsafe fn deallocate(&self, suballocation: Suballocation);
|
unsafe fn deallocate(&mut self, suballocation: Suballocation);
|
||||||
|
|
||||||
/// Returns the total amount of free space that is left in the [region].
|
/// Returns the total amount of free space that is left in the [region].
|
||||||
///
|
///
|
||||||
@ -137,6 +146,11 @@ pub unsafe trait Suballocator {
|
|||||||
///
|
///
|
||||||
/// There must be no current allocations as they might get freed.
|
/// There must be no current allocations as they might get freed.
|
||||||
fn cleanup(&mut self);
|
fn cleanup(&mut self);
|
||||||
|
|
||||||
|
/// Returns an iterator over the current suballocations.
|
||||||
|
fn suballocations(&self) -> Self::Suballocations<'_>
|
||||||
|
where
|
||||||
|
Self: Sized;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Debug for dyn Suballocator {
|
impl Debug for dyn Suballocator {
|
||||||
@ -299,6 +313,59 @@ impl Display for SuballocatorError {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// A node within a [suballocator]'s list/tree of suballocations.
|
||||||
|
///
|
||||||
|
/// [suballocator]: Suballocator
|
||||||
|
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
|
||||||
|
pub struct SuballocationNode {
|
||||||
|
/// The **absolute** offset within the [region]. That means that this is already offset by the
|
||||||
|
/// region's offset, **not relative to beginning of the region**.
|
||||||
|
///
|
||||||
|
/// [region]: Suballocator#regions
|
||||||
|
pub offset: DeviceSize,
|
||||||
|
|
||||||
|
/// The size of the allocation.
|
||||||
|
pub size: DeviceSize,
|
||||||
|
|
||||||
|
/// Tells us if the allocation is free, and if not, what type of resources can be bound to it.
|
||||||
|
pub allocation_type: SuballocationType,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Tells us if an allocation within a [suballocator]'s list/tree of suballocations is free, and if
|
||||||
|
/// not, what type of resources can be bound to it. The suballocator needs to keep track of this in
|
||||||
|
/// order to be able to respect the buffer-image granularity.
|
||||||
|
///
|
||||||
|
/// [suballocator]: Suballocator
|
||||||
|
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
|
||||||
|
pub enum SuballocationType {
|
||||||
|
/// The type of resource is unknown, it might be either linear or non-linear. What this means
|
||||||
|
/// is that allocations created with this type must always be aligned to the buffer-image
|
||||||
|
/// granularity.
|
||||||
|
Unknown = 0,
|
||||||
|
|
||||||
|
/// The resource is linear, e.g. buffers, linear images. A linear allocation following another
|
||||||
|
/// linear allocation never needs to be aligned to the buffer-image granularity.
|
||||||
|
Linear = 1,
|
||||||
|
|
||||||
|
/// The resource is non-linear, e.g. optimal images. A non-linear allocation following another
|
||||||
|
/// non-linear allocation never needs to be aligned to the buffer-image granularity.
|
||||||
|
NonLinear = 2,
|
||||||
|
|
||||||
|
/// The allocation is free. It can take on any of the allocation types once allocated.
|
||||||
|
Free = 3,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<AllocationType> for SuballocationType {
|
||||||
|
#[inline]
|
||||||
|
fn from(ty: AllocationType) -> Self {
|
||||||
|
match ty {
|
||||||
|
AllocationType::Unknown => SuballocationType::Unknown,
|
||||||
|
AllocationType::Linear => SuballocationType::Linear,
|
||||||
|
AllocationType::NonLinear => SuballocationType::NonLinear,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Checks if resouces A and B share a page.
|
/// Checks if resouces A and B share a page.
|
||||||
///
|
///
|
||||||
/// > **Note**: Assumes `a_offset + a_size > 0` and `a_offset + a_size <= b_offset`.
|
/// > **Note**: Assumes `a_offset + a_size > 0` and `a_offset + a_size <= b_offset`.
|
||||||
@ -367,7 +434,7 @@ mod tests {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
let allocator = allocator.into_inner();
|
let mut allocator = allocator.into_inner();
|
||||||
|
|
||||||
assert!(allocator
|
assert!(allocator
|
||||||
.allocate(DUMMY_LAYOUT, AllocationType::Unknown, DeviceAlignment::MIN)
|
.allocate(DUMMY_LAYOUT, AllocationType::Unknown, DeviceAlignment::MIN)
|
||||||
@ -394,7 +461,7 @@ mod tests {
|
|||||||
const REGION_SIZE: DeviceSize = 10 * 256;
|
const REGION_SIZE: DeviceSize = 10 * 256;
|
||||||
const LAYOUT: DeviceLayout = unwrap(DeviceLayout::from_size_alignment(1, 256));
|
const LAYOUT: DeviceLayout = unwrap(DeviceLayout::from_size_alignment(1, 256));
|
||||||
|
|
||||||
let allocator = FreeListAllocator::new(Region::new(0, REGION_SIZE).unwrap());
|
let mut allocator = FreeListAllocator::new(Region::new(0, REGION_SIZE).unwrap());
|
||||||
let mut allocs = Vec::with_capacity(10);
|
let mut allocs = Vec::with_capacity(10);
|
||||||
|
|
||||||
for _ in 0..10 {
|
for _ in 0..10 {
|
||||||
@ -420,7 +487,7 @@ mod tests {
|
|||||||
const GRANULARITY: DeviceAlignment = unwrap(DeviceAlignment::new(16));
|
const GRANULARITY: DeviceAlignment = unwrap(DeviceAlignment::new(16));
|
||||||
const REGION_SIZE: DeviceSize = 2 * GRANULARITY.as_devicesize();
|
const REGION_SIZE: DeviceSize = 2 * GRANULARITY.as_devicesize();
|
||||||
|
|
||||||
let allocator = FreeListAllocator::new(Region::new(0, REGION_SIZE).unwrap());
|
let mut allocator = FreeListAllocator::new(Region::new(0, REGION_SIZE).unwrap());
|
||||||
let mut linear_allocs = Vec::with_capacity(REGION_SIZE as usize / 2);
|
let mut linear_allocs = Vec::with_capacity(REGION_SIZE as usize / 2);
|
||||||
let mut nonlinear_allocs = Vec::with_capacity(REGION_SIZE as usize / 2);
|
let mut nonlinear_allocs = Vec::with_capacity(REGION_SIZE as usize / 2);
|
||||||
|
|
||||||
@ -479,7 +546,7 @@ mod tests {
|
|||||||
const MAX_ORDER: usize = 10;
|
const MAX_ORDER: usize = 10;
|
||||||
const REGION_SIZE: DeviceSize = BuddyAllocator::MIN_NODE_SIZE << MAX_ORDER;
|
const REGION_SIZE: DeviceSize = BuddyAllocator::MIN_NODE_SIZE << MAX_ORDER;
|
||||||
|
|
||||||
let allocator = BuddyAllocator::new(Region::new(0, REGION_SIZE).unwrap());
|
let mut allocator = BuddyAllocator::new(Region::new(0, REGION_SIZE).unwrap());
|
||||||
let mut allocs = Vec::with_capacity(1 << MAX_ORDER);
|
let mut allocs = Vec::with_capacity(1 << MAX_ORDER);
|
||||||
|
|
||||||
for order in 0..=MAX_ORDER {
|
for order in 0..=MAX_ORDER {
|
||||||
@ -541,7 +608,7 @@ mod tests {
|
|||||||
fn buddy_allocator_respects_alignment() {
|
fn buddy_allocator_respects_alignment() {
|
||||||
const REGION_SIZE: DeviceSize = 4096;
|
const REGION_SIZE: DeviceSize = 4096;
|
||||||
|
|
||||||
let allocator = BuddyAllocator::new(Region::new(0, REGION_SIZE).unwrap());
|
let mut allocator = BuddyAllocator::new(Region::new(0, REGION_SIZE).unwrap());
|
||||||
|
|
||||||
{
|
{
|
||||||
let layout = DeviceLayout::from_size_alignment(1, 4096).unwrap();
|
let layout = DeviceLayout::from_size_alignment(1, 4096).unwrap();
|
||||||
@ -608,7 +675,7 @@ mod tests {
|
|||||||
const GRANULARITY: DeviceAlignment = unwrap(DeviceAlignment::new(256));
|
const GRANULARITY: DeviceAlignment = unwrap(DeviceAlignment::new(256));
|
||||||
const REGION_SIZE: DeviceSize = 2 * GRANULARITY.as_devicesize();
|
const REGION_SIZE: DeviceSize = 2 * GRANULARITY.as_devicesize();
|
||||||
|
|
||||||
let allocator = BuddyAllocator::new(Region::new(0, REGION_SIZE).unwrap());
|
let mut allocator = BuddyAllocator::new(Region::new(0, REGION_SIZE).unwrap());
|
||||||
|
|
||||||
{
|
{
|
||||||
const ALLOCATIONS: DeviceSize = REGION_SIZE / BuddyAllocator::MIN_NODE_SIZE;
|
const ALLOCATIONS: DeviceSize = REGION_SIZE / BuddyAllocator::MIN_NODE_SIZE;
|
||||||
|
Loading…
Reference in New Issue
Block a user