mirror of
https://github.com/vulkano-rs/vulkano.git
synced 2024-11-23 07:15:31 +00:00
Replace Suballocator::cleanup
with Suballocator::reset
(#2585)
This commit is contained in:
parent
a0d45e7497
commit
00fd84726c
4
Cargo.lock
generated
4
Cargo.lock
generated
@ -1537,9 +1537,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "slabbin"
|
||||
version = "1.0.1"
|
||||
version = "1.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dd8305086044614627ed85432d27b87cf9fc047204eaa036a11de6cf0120f273"
|
||||
checksum = "1fd33b7a607dbd960b5e78bb4740d1f86e84250eb03a12960ee1482c2a256063"
|
||||
|
||||
[[package]]
|
||||
name = "smallvec"
|
||||
|
@ -67,7 +67,7 @@ raw-window-metal = "1.0"
|
||||
serde = "1.0"
|
||||
serde_json = "1.0"
|
||||
shaderc = "0.8.3"
|
||||
slabbin = "1.0"
|
||||
slabbin = "1.1"
|
||||
smallvec = "1.8"
|
||||
syn = "2.0"
|
||||
thread_local = "1.1"
|
||||
|
@ -1632,7 +1632,7 @@ impl<S: Suballocator> DeviceMemoryBlock<S> {
|
||||
|
||||
// For bump allocators, reset the free-start once there are no remaining allocations.
|
||||
if self.allocation_count == 0 {
|
||||
self.suballocator.cleanup();
|
||||
self.suballocator.reset();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -61,7 +61,7 @@ use std::cmp;
|
||||
/// [`BumpAllocator`]: super::BumpAllocator
|
||||
#[derive(Debug)]
|
||||
pub struct BuddyAllocator {
|
||||
region_offset: DeviceSize,
|
||||
region: Region,
|
||||
// Total memory remaining in the region.
|
||||
free_size: DeviceSize,
|
||||
// Every order has its own free-list for convenience, so that we don't have to traverse a tree.
|
||||
@ -105,7 +105,7 @@ unsafe impl Suballocator for BuddyAllocator {
|
||||
free_list[max_order].push(region.offset());
|
||||
|
||||
BuddyAllocator {
|
||||
region_offset: region.offset(),
|
||||
region,
|
||||
free_size: region.size(),
|
||||
free_list,
|
||||
}
|
||||
@ -136,7 +136,7 @@ unsafe impl Suballocator for BuddyAllocator {
|
||||
let mut alignment = layout.alignment();
|
||||
|
||||
if buffer_image_granularity != DeviceAlignment::MIN {
|
||||
debug_assert!(is_aligned(self.region_offset, buffer_image_granularity));
|
||||
debug_assert!(is_aligned(self.region.offset(), buffer_image_granularity));
|
||||
|
||||
if allocation_type == AllocationType::Unknown
|
||||
|| allocation_type == AllocationType::NonLinear
|
||||
@ -224,7 +224,7 @@ unsafe impl Suballocator for BuddyAllocator {
|
||||
|
||||
// This can't overflow because the offsets in the free-list are confined to the range
|
||||
// [region.offset, region.offset + region.size).
|
||||
let buddy_offset = ((offset - self.region_offset) ^ size) + self.region_offset;
|
||||
let buddy_offset = ((offset - self.region.offset()) ^ size) + self.region.offset();
|
||||
|
||||
match free_list.binary_search(&buddy_offset) {
|
||||
// If the buddy is in the free-list, we can coalesce.
|
||||
@ -250,6 +250,15 @@ unsafe impl Suballocator for BuddyAllocator {
|
||||
}
|
||||
}
|
||||
|
||||
fn reset(&mut self) {
|
||||
self.free_size = self.region.size();
|
||||
self.free_list.iter_mut().for_each(Vec::clear);
|
||||
|
||||
let max_order =
|
||||
(self.region.size() / BuddyAllocator::MIN_NODE_SIZE).trailing_zeros() as usize;
|
||||
self.free_list[max_order].push(self.region.offset());
|
||||
}
|
||||
|
||||
/// Returns the total amount of free space left in the [region] that is available to the
|
||||
/// allocator, which means that [internal fragmentation] is excluded.
|
||||
///
|
||||
@ -260,9 +269,6 @@ unsafe impl Suballocator for BuddyAllocator {
|
||||
self.free_size
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn cleanup(&mut self) {}
|
||||
|
||||
#[inline]
|
||||
fn suballocations(&self) -> Self::Suballocations<'_> {
|
||||
todo!()
|
||||
|
@ -60,15 +60,6 @@ pub struct BumpAllocator {
|
||||
}
|
||||
|
||||
impl BumpAllocator {
|
||||
/// Resets the free-start back to the beginning of the [region].
|
||||
///
|
||||
/// [region]: Suballocator#regions
|
||||
#[inline]
|
||||
pub fn reset(&mut self) {
|
||||
self.free_start = 0;
|
||||
self.prev_allocation_type = AllocationType::Unknown;
|
||||
}
|
||||
|
||||
fn suballocation_node(&self, part: usize) -> SuballocationNode {
|
||||
if part == 0 {
|
||||
SuballocationNode {
|
||||
@ -153,14 +144,18 @@ unsafe impl Suballocator for BumpAllocator {
|
||||
// such complex, very wow
|
||||
}
|
||||
|
||||
/// Resets the free-start back to the beginning of the [region].
|
||||
///
|
||||
/// [region]: Suballocator#regions
|
||||
#[inline]
|
||||
fn free_size(&self) -> DeviceSize {
|
||||
self.region.size() - self.free_start
|
||||
fn reset(&mut self) {
|
||||
self.free_start = 0;
|
||||
self.prev_allocation_type = AllocationType::Unknown;
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn cleanup(&mut self) {
|
||||
self.reset();
|
||||
fn free_size(&self) -> DeviceSize {
|
||||
self.region.size() - self.free_start
|
||||
}
|
||||
|
||||
#[inline]
|
||||
|
@ -62,7 +62,7 @@ use std::{cmp, iter::FusedIterator, marker::PhantomData, ptr::NonNull};
|
||||
/// [alignment requirements]: super#alignment
|
||||
#[derive(Debug)]
|
||||
pub struct FreeListAllocator {
|
||||
region_offset: DeviceSize,
|
||||
region: Region,
|
||||
// Total memory remaining in the region.
|
||||
free_size: DeviceSize,
|
||||
suballocations: SuballocationList,
|
||||
@ -98,7 +98,7 @@ unsafe impl Suballocator for FreeListAllocator {
|
||||
};
|
||||
|
||||
FreeListAllocator {
|
||||
region_offset: region.offset(),
|
||||
region,
|
||||
free_size: region.size(),
|
||||
suballocations,
|
||||
}
|
||||
@ -164,7 +164,7 @@ unsafe impl Suballocator for FreeListAllocator {
|
||||
let mut offset = align_up(node.offset, alignment);
|
||||
|
||||
if buffer_image_granularity != DeviceAlignment::MIN {
|
||||
debug_assert!(is_aligned(self.region_offset, buffer_image_granularity));
|
||||
debug_assert!(is_aligned(self.region.offset(), buffer_image_granularity));
|
||||
|
||||
if let Some(prev_ptr) = node.prev {
|
||||
let prev = unsafe { *prev_ptr.as_ptr() };
|
||||
@ -250,14 +250,31 @@ unsafe impl Suballocator for FreeListAllocator {
|
||||
unsafe { self.suballocations.deallocate(node_ptr) };
|
||||
}
|
||||
|
||||
fn reset(&mut self) {
|
||||
self.free_size = self.region.size();
|
||||
self.suballocations.free_list.clear();
|
||||
unsafe { self.suballocations.node_allocator.reset() };
|
||||
|
||||
let root_ptr = self.suballocations.node_allocator.allocate();
|
||||
let root = SuballocationListNode {
|
||||
prev: None,
|
||||
next: None,
|
||||
offset: self.region.offset(),
|
||||
size: self.region.size(),
|
||||
allocation_type: SuballocationType::Free,
|
||||
};
|
||||
unsafe { root_ptr.as_ptr().write(root) };
|
||||
|
||||
self.suballocations.head = root_ptr;
|
||||
self.suballocations.tail = root_ptr;
|
||||
self.suballocations.len = 1;
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn free_size(&self) -> DeviceSize {
|
||||
self.free_size
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn cleanup(&mut self) {}
|
||||
|
||||
#[inline]
|
||||
fn suballocations(&self) -> Self::Suballocations<'_> {
|
||||
self.suballocations.iter()
|
||||
|
@ -138,16 +138,14 @@ pub unsafe trait Suballocator {
|
||||
/// - `suballocation` must refer to a **currently allocated** suballocation of `self`.
|
||||
unsafe fn deallocate(&mut self, suballocation: Suballocation);
|
||||
|
||||
/// Resets the suballocator, deallocating all currently allocated suballocations at once.
|
||||
fn reset(&mut self);
|
||||
|
||||
/// Returns the total amount of free space that is left in the [region].
|
||||
///
|
||||
/// [region]: Self#regions
|
||||
fn free_size(&self) -> DeviceSize;
|
||||
|
||||
/// Tries to free some space, if applicable.
|
||||
///
|
||||
/// There must be no current allocations as they might get freed.
|
||||
fn cleanup(&mut self);
|
||||
|
||||
/// Returns an iterator over the current suballocations.
|
||||
fn suballocations(&self) -> Self::Suballocations<'_>
|
||||
where
|
||||
|
Loading…
Reference in New Issue
Block a user