diff --git a/vulkano/src/memory/allocator/mod.rs b/vulkano/src/memory/allocator/mod.rs index 177704ac..a1f0d62d 100644 --- a/vulkano/src/memory/allocator/mod.rs +++ b/vulkano/src/memory/allocator/mod.rs @@ -206,10 +206,7 @@ //! [`Rc`]: std::rc::Rc //! [region]: Suballocator#regions -mod layout; -pub mod suballocator; - -use self::{array_vec::ArrayVec, suballocator::Region}; +use self::{aliasable_box::AliasableBox, array_vec::ArrayVec, suballocator::Region}; pub use self::{ layout::DeviceLayout, suballocator::{ @@ -229,6 +226,7 @@ use crate::{ }; use ash::vk::MAX_MEMORY_TYPES; use parking_lot::{Mutex, MutexGuard}; +use slabbin::SlabAllocator; use std::{ error::Error, fmt::{Debug, Display, Error as FmtError, Formatter}, @@ -239,6 +237,9 @@ use std::{ sync::Arc, }; +mod layout; +pub mod suballocator; + /// General-purpose memory allocators which allocate from any memory type dynamically as needed. /// /// # Safety @@ -952,7 +953,7 @@ impl GenericMemoryAllocator { // This is a false-positive, we only use this const for static initialization. #[allow(clippy::declare_interior_mutable_const)] const EMPTY_POOL: DeviceMemoryPool = DeviceMemoryPool { - blocks: Mutex::new(Vec::new()), + blocks: Mutex::new(DeviceMemoryBlockVec::new()), property_flags: MemoryPropertyFlags::empty(), atom_size: DeviceAlignment::MIN, block_size: 0, @@ -1179,13 +1180,14 @@ unsafe impl MemoryAllocator for GenericMemoryA layout = layout.align_to(pool.atom_size).unwrap(); - let mut blocks = pool.blocks.lock(); + let blocks = &mut *pool.blocks.lock(); + let vec = &mut blocks.vec; // TODO: Incremental sorting - blocks.sort_by_key(|block| block.free_size()); - let (Ok(idx) | Err(idx)) = blocks.binary_search_by_key(&size, |block| block.free_size()); + vec.sort_by_key(|block| block.free_size()); + let (Ok(idx) | Err(idx)) = vec.binary_search_by_key(&size, |block| block.free_size()); - for block in &mut blocks[idx..] { + for block in &mut vec[idx..] { if let Ok(allocation) = block.allocate(layout, allocation_type, self.buffer_image_granularity) { @@ -1216,7 +1218,7 @@ unsafe impl MemoryAllocator for GenericMemoryA export_handle_types, ) { Ok(device_memory) => { - break DeviceMemoryBlock::new(device_memory); + break DeviceMemoryBlock::new(device_memory, &blocks.block_allocator); } // Retry up to 3 times, halving the allocation size each time so long as the // resulting size is still large enough. @@ -1230,8 +1232,8 @@ unsafe impl MemoryAllocator for GenericMemoryA } }; - blocks.push(block); - let block = blocks.last_mut().unwrap(); + vec.push(block); + let block = vec.last_mut().unwrap(); match block.allocate(layout, allocation_type, self.buffer_image_granularity) { Ok(allocation) => Ok(allocation), @@ -1454,15 +1456,16 @@ unsafe impl MemoryAllocator for GenericMemoryA unsafe fn deallocate(&self, allocation: MemoryAlloc) { if let Some(suballocation) = allocation.suballocation { let memory_type_index = allocation.device_memory.memory_type_index(); - let pool = self.pools[memory_type_index as usize].blocks.lock(); + let blocks = self.pools[memory_type_index as usize].blocks.lock(); + let vec = &blocks.vec; let block_ptr = allocation .allocation_handle - .0 + .as_ptr() .cast::>(); // TODO: Maybe do a similar check for dedicated blocks. debug_assert!( - pool.iter().any(|block| ptr::addr_of!(**block) == block_ptr), + vec.iter().any(|block| ptr::addr_of!(**block) == block_ptr), "attempted to deallocate a memory block that does not belong to this allocator", ); @@ -1477,8 +1480,6 @@ unsafe impl MemoryAllocator for GenericMemoryA // SAFETY: The caller must guarantee that `allocation` refers to a currently allocated // allocation of `self`. block.deallocate(suballocation); - - drop(pool); } } } @@ -1546,7 +1547,7 @@ unsafe impl DeviceOwned for GenericMemoryAllocator { /// A pool of [`DeviceMemory`] blocks within [`GenericMemoryAllocator`], specific to a memory type. #[derive(Debug)] pub struct DeviceMemoryPool { - blocks: Mutex>>>, + blocks: Mutex>, // This is cached here for faster access, so we don't need to hop through 3 pointers. property_flags: MemoryPropertyFlags, atom_size: DeviceAlignment, @@ -1564,13 +1565,38 @@ impl DeviceMemoryPool { #[inline] pub fn blocks(&self) -> DeviceMemoryBlocks<'_, S> { DeviceMemoryBlocks { - inner: MutexGuard::leak(self.blocks.lock()).iter(), + inner: MutexGuard::leak(self.blocks.lock()).vec.iter(), // SAFETY: We have just locked the pool above. _guard: unsafe { DeviceMemoryPoolGuard::new(self) }, } } } +impl Drop for DeviceMemoryPool { + fn drop(&mut self) { + let blocks = self.blocks.get_mut(); + + for block in &mut blocks.vec { + unsafe { AliasableBox::drop(block, &blocks.block_allocator) }; + } + } +} + +#[derive(Debug)] +struct DeviceMemoryBlockVec { + vec: Vec>>, + block_allocator: SlabAllocator>, +} + +impl DeviceMemoryBlockVec { + const fn new() -> Self { + DeviceMemoryBlockVec { + vec: Vec::new(), + block_allocator: SlabAllocator::new(32), + } + } +} + /// A [`DeviceMemory`] block within a [`DeviceMemoryPool`]. #[derive(Debug)] pub struct DeviceMemoryBlock { @@ -1580,36 +1606,23 @@ pub struct DeviceMemoryBlock { } impl DeviceMemoryBlock { - fn new(device_memory: Arc) -> Box { + fn new( + device_memory: Arc, + block_allocator: &SlabAllocator, + ) -> AliasableBox { let suballocator = S::new( Region::new(0, device_memory.allocation_size()) .expect("we somehow managed to allocate more than `DeviceLayout::MAX_SIZE` bytes"), ); - Box::new(DeviceMemoryBlock { - device_memory, - suballocator, - allocation_count: 0, - }) - } - - fn allocate( - &mut self, - layout: DeviceLayout, - allocation_type: AllocationType, - buffer_image_granularity: DeviceAlignment, - ) -> Result { - let suballocation = - self.suballocator - .allocate(layout, allocation_type, buffer_image_granularity)?; - - self.allocation_count += 1; - - Ok(MemoryAlloc { - device_memory: self.device_memory.clone(), - suballocation: Some(suballocation), - allocation_handle: AllocationHandle::from_ptr(<*mut _>::cast(self)), - }) + AliasableBox::new( + DeviceMemoryBlock { + device_memory, + suballocator, + allocation_count: 0, + }, + block_allocator, + ) } unsafe fn deallocate(&mut self, suballocation: Suballocation) { @@ -1646,9 +1659,34 @@ impl DeviceMemoryBlock { } } +impl AliasableBox> { + fn allocate( + &mut self, + layout: DeviceLayout, + allocation_type: AllocationType, + buffer_image_granularity: DeviceAlignment, + ) -> Result { + let suballocation = + self.suballocator + .allocate(layout, allocation_type, buffer_image_granularity)?; + + self.allocation_count += 1; + + // It is paramount to soundness that the pointer we give out doesn't go through `DerefMut`, + // as such a pointer would become invalidated when another allocation is made. + let ptr = AliasableBox::as_mut_ptr(self); + + Ok(MemoryAlloc { + device_memory: self.device_memory.clone(), + suballocation: Some(suballocation), + allocation_handle: AllocationHandle::from_ptr(<*mut _>::cast(ptr)), + }) + } +} + /// An iterator over the [`DeviceMemoryBlock`]s within a [`DeviceMemoryPool`]. pub struct DeviceMemoryBlocks<'a, S> { - inner: slice::Iter<'a, Box>>, + inner: slice::Iter<'a, AliasableBox>>, _guard: DeviceMemoryPoolGuard<'a, S>, } @@ -1887,3 +1925,71 @@ mod array_vec { } } } + +mod aliasable_box { + use slabbin::SlabAllocator; + use std::{ + fmt, + marker::PhantomData, + ops::{Deref, DerefMut}, + panic::{RefUnwindSafe, UnwindSafe}, + ptr::NonNull, + }; + + pub struct AliasableBox { + ptr: NonNull, + marker: PhantomData, + } + + unsafe impl Send for AliasableBox {} + unsafe impl Sync for AliasableBox {} + + impl UnwindSafe for AliasableBox {} + impl RefUnwindSafe for AliasableBox {} + + impl Unpin for AliasableBox {} + + impl AliasableBox { + pub fn new(value: T, allocator: &SlabAllocator) -> Self { + let ptr = allocator.allocate(); + + unsafe { ptr.as_ptr().write(value) }; + + AliasableBox { + ptr, + marker: PhantomData, + } + } + + pub fn as_mut_ptr(this: &mut Self) -> *mut T { + this.ptr.as_ptr() + } + + pub unsafe fn drop(this: &mut Self, allocator: &SlabAllocator) { + unsafe { this.ptr.as_ptr().drop_in_place() }; + unsafe { allocator.deallocate(this.ptr) }; + } + } + + impl fmt::Debug for AliasableBox { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Debug::fmt(&**self, f) + } + } + + impl Deref for AliasableBox { + type Target = T; + + #[inline] + fn deref(&self) -> &Self::Target { + unsafe { self.ptr.as_ref() } + } + } + + impl DerefMut for AliasableBox { + #[inline] + fn deref_mut(&mut self) -> &mut Self::Target { + unsafe { self.ptr.as_mut() } + } + } +}