Fix UB in GenericMemoryAllocator::deallocate (#2572)

* Fix UB in `GenericMemoryAllocator::deallocate`

* Use the slab allocator
This commit is contained in:
marc0246 2024-10-10 12:21:59 +02:00 committed by GitHub
parent f6bc05df94
commit 3189169a38
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -206,10 +206,7 @@
//! [`Rc`]: std::rc::Rc //! [`Rc`]: std::rc::Rc
//! [region]: Suballocator#regions //! [region]: Suballocator#regions
mod layout; use self::{aliasable_box::AliasableBox, array_vec::ArrayVec, suballocator::Region};
pub mod suballocator;
use self::{array_vec::ArrayVec, suballocator::Region};
pub use self::{ pub use self::{
layout::DeviceLayout, layout::DeviceLayout,
suballocator::{ suballocator::{
@ -229,6 +226,7 @@ use crate::{
}; };
use ash::vk::MAX_MEMORY_TYPES; use ash::vk::MAX_MEMORY_TYPES;
use parking_lot::{Mutex, MutexGuard}; use parking_lot::{Mutex, MutexGuard};
use slabbin::SlabAllocator;
use std::{ use std::{
error::Error, error::Error,
fmt::{Debug, Display, Error as FmtError, Formatter}, fmt::{Debug, Display, Error as FmtError, Formatter},
@ -239,6 +237,9 @@ use std::{
sync::Arc, sync::Arc,
}; };
mod layout;
pub mod suballocator;
/// General-purpose memory allocators which allocate from any memory type dynamically as needed. /// General-purpose memory allocators which allocate from any memory type dynamically as needed.
/// ///
/// # Safety /// # Safety
@ -952,7 +953,7 @@ impl<S> GenericMemoryAllocator<S> {
// This is a false-positive, we only use this const for static initialization. // This is a false-positive, we only use this const for static initialization.
#[allow(clippy::declare_interior_mutable_const)] #[allow(clippy::declare_interior_mutable_const)]
const EMPTY_POOL: DeviceMemoryPool<S> = DeviceMemoryPool { const EMPTY_POOL: DeviceMemoryPool<S> = DeviceMemoryPool {
blocks: Mutex::new(Vec::new()), blocks: Mutex::new(DeviceMemoryBlockVec::new()),
property_flags: MemoryPropertyFlags::empty(), property_flags: MemoryPropertyFlags::empty(),
atom_size: DeviceAlignment::MIN, atom_size: DeviceAlignment::MIN,
block_size: 0, block_size: 0,
@ -1179,13 +1180,14 @@ unsafe impl<S: Suballocator + Send + 'static> MemoryAllocator for GenericMemoryA
layout = layout.align_to(pool.atom_size).unwrap(); layout = layout.align_to(pool.atom_size).unwrap();
let mut blocks = pool.blocks.lock(); let blocks = &mut *pool.blocks.lock();
let vec = &mut blocks.vec;
// TODO: Incremental sorting // TODO: Incremental sorting
blocks.sort_by_key(|block| block.free_size()); vec.sort_by_key(|block| block.free_size());
let (Ok(idx) | Err(idx)) = blocks.binary_search_by_key(&size, |block| block.free_size()); let (Ok(idx) | Err(idx)) = vec.binary_search_by_key(&size, |block| block.free_size());
for block in &mut blocks[idx..] { for block in &mut vec[idx..] {
if let Ok(allocation) = if let Ok(allocation) =
block.allocate(layout, allocation_type, self.buffer_image_granularity) block.allocate(layout, allocation_type, self.buffer_image_granularity)
{ {
@ -1216,7 +1218,7 @@ unsafe impl<S: Suballocator + Send + 'static> MemoryAllocator for GenericMemoryA
export_handle_types, export_handle_types,
) { ) {
Ok(device_memory) => { Ok(device_memory) => {
break DeviceMemoryBlock::new(device_memory); break DeviceMemoryBlock::new(device_memory, &blocks.block_allocator);
} }
// Retry up to 3 times, halving the allocation size each time so long as the // Retry up to 3 times, halving the allocation size each time so long as the
// resulting size is still large enough. // resulting size is still large enough.
@ -1230,8 +1232,8 @@ unsafe impl<S: Suballocator + Send + 'static> MemoryAllocator for GenericMemoryA
} }
}; };
blocks.push(block); vec.push(block);
let block = blocks.last_mut().unwrap(); let block = vec.last_mut().unwrap();
match block.allocate(layout, allocation_type, self.buffer_image_granularity) { match block.allocate(layout, allocation_type, self.buffer_image_granularity) {
Ok(allocation) => Ok(allocation), Ok(allocation) => Ok(allocation),
@ -1454,15 +1456,16 @@ unsafe impl<S: Suballocator + Send + 'static> MemoryAllocator for GenericMemoryA
unsafe fn deallocate(&self, allocation: MemoryAlloc) { unsafe fn deallocate(&self, allocation: MemoryAlloc) {
if let Some(suballocation) = allocation.suballocation { if let Some(suballocation) = allocation.suballocation {
let memory_type_index = allocation.device_memory.memory_type_index(); let memory_type_index = allocation.device_memory.memory_type_index();
let pool = self.pools[memory_type_index as usize].blocks.lock(); let blocks = self.pools[memory_type_index as usize].blocks.lock();
let vec = &blocks.vec;
let block_ptr = allocation let block_ptr = allocation
.allocation_handle .allocation_handle
.0 .as_ptr()
.cast::<DeviceMemoryBlock<S>>(); .cast::<DeviceMemoryBlock<S>>();
// TODO: Maybe do a similar check for dedicated blocks. // TODO: Maybe do a similar check for dedicated blocks.
debug_assert!( debug_assert!(
pool.iter().any(|block| ptr::addr_of!(**block) == block_ptr), vec.iter().any(|block| ptr::addr_of!(**block) == block_ptr),
"attempted to deallocate a memory block that does not belong to this allocator", "attempted to deallocate a memory block that does not belong to this allocator",
); );
@ -1477,8 +1480,6 @@ unsafe impl<S: Suballocator + Send + 'static> MemoryAllocator for GenericMemoryA
// SAFETY: The caller must guarantee that `allocation` refers to a currently allocated // SAFETY: The caller must guarantee that `allocation` refers to a currently allocated
// allocation of `self`. // allocation of `self`.
block.deallocate(suballocation); block.deallocate(suballocation);
drop(pool);
} }
} }
} }
@ -1546,7 +1547,7 @@ unsafe impl<S> DeviceOwned for GenericMemoryAllocator<S> {
/// A pool of [`DeviceMemory`] blocks within [`GenericMemoryAllocator`], specific to a memory type. /// A pool of [`DeviceMemory`] blocks within [`GenericMemoryAllocator`], specific to a memory type.
#[derive(Debug)] #[derive(Debug)]
pub struct DeviceMemoryPool<S> { pub struct DeviceMemoryPool<S> {
blocks: Mutex<Vec<Box<DeviceMemoryBlock<S>>>>, blocks: Mutex<DeviceMemoryBlockVec<S>>,
// This is cached here for faster access, so we don't need to hop through 3 pointers. // This is cached here for faster access, so we don't need to hop through 3 pointers.
property_flags: MemoryPropertyFlags, property_flags: MemoryPropertyFlags,
atom_size: DeviceAlignment, atom_size: DeviceAlignment,
@ -1564,13 +1565,38 @@ impl<S> DeviceMemoryPool<S> {
#[inline] #[inline]
pub fn blocks(&self) -> DeviceMemoryBlocks<'_, S> { pub fn blocks(&self) -> DeviceMemoryBlocks<'_, S> {
DeviceMemoryBlocks { DeviceMemoryBlocks {
inner: MutexGuard::leak(self.blocks.lock()).iter(), inner: MutexGuard::leak(self.blocks.lock()).vec.iter(),
// SAFETY: We have just locked the pool above. // SAFETY: We have just locked the pool above.
_guard: unsafe { DeviceMemoryPoolGuard::new(self) }, _guard: unsafe { DeviceMemoryPoolGuard::new(self) },
} }
} }
} }
impl<S> Drop for DeviceMemoryPool<S> {
fn drop(&mut self) {
let blocks = self.blocks.get_mut();
for block in &mut blocks.vec {
unsafe { AliasableBox::drop(block, &blocks.block_allocator) };
}
}
}
#[derive(Debug)]
struct DeviceMemoryBlockVec<S> {
vec: Vec<AliasableBox<DeviceMemoryBlock<S>>>,
block_allocator: SlabAllocator<DeviceMemoryBlock<S>>,
}
impl<S> DeviceMemoryBlockVec<S> {
const fn new() -> Self {
DeviceMemoryBlockVec {
vec: Vec::new(),
block_allocator: SlabAllocator::new(32),
}
}
}
/// A [`DeviceMemory`] block within a [`DeviceMemoryPool`]. /// A [`DeviceMemory`] block within a [`DeviceMemoryPool`].
#[derive(Debug)] #[derive(Debug)]
pub struct DeviceMemoryBlock<S> { pub struct DeviceMemoryBlock<S> {
@ -1580,36 +1606,23 @@ pub struct DeviceMemoryBlock<S> {
} }
impl<S: Suballocator> DeviceMemoryBlock<S> { impl<S: Suballocator> DeviceMemoryBlock<S> {
fn new(device_memory: Arc<DeviceMemory>) -> Box<Self> { fn new(
device_memory: Arc<DeviceMemory>,
block_allocator: &SlabAllocator<Self>,
) -> AliasableBox<Self> {
let suballocator = S::new( let suballocator = S::new(
Region::new(0, device_memory.allocation_size()) Region::new(0, device_memory.allocation_size())
.expect("we somehow managed to allocate more than `DeviceLayout::MAX_SIZE` bytes"), .expect("we somehow managed to allocate more than `DeviceLayout::MAX_SIZE` bytes"),
); );
Box::new(DeviceMemoryBlock { AliasableBox::new(
DeviceMemoryBlock {
device_memory, device_memory,
suballocator, suballocator,
allocation_count: 0, allocation_count: 0,
}) },
} block_allocator,
)
fn allocate(
&mut self,
layout: DeviceLayout,
allocation_type: AllocationType,
buffer_image_granularity: DeviceAlignment,
) -> Result<MemoryAlloc, SuballocatorError> {
let suballocation =
self.suballocator
.allocate(layout, allocation_type, buffer_image_granularity)?;
self.allocation_count += 1;
Ok(MemoryAlloc {
device_memory: self.device_memory.clone(),
suballocation: Some(suballocation),
allocation_handle: AllocationHandle::from_ptr(<*mut _>::cast(self)),
})
} }
unsafe fn deallocate(&mut self, suballocation: Suballocation) { unsafe fn deallocate(&mut self, suballocation: Suballocation) {
@ -1646,9 +1659,34 @@ impl<S: Suballocator> DeviceMemoryBlock<S> {
} }
} }
impl<S: Suballocator> AliasableBox<DeviceMemoryBlock<S>> {
fn allocate(
&mut self,
layout: DeviceLayout,
allocation_type: AllocationType,
buffer_image_granularity: DeviceAlignment,
) -> Result<MemoryAlloc, SuballocatorError> {
let suballocation =
self.suballocator
.allocate(layout, allocation_type, buffer_image_granularity)?;
self.allocation_count += 1;
// It is paramount to soundness that the pointer we give out doesn't go through `DerefMut`,
// as such a pointer would become invalidated when another allocation is made.
let ptr = AliasableBox::as_mut_ptr(self);
Ok(MemoryAlloc {
device_memory: self.device_memory.clone(),
suballocation: Some(suballocation),
allocation_handle: AllocationHandle::from_ptr(<*mut _>::cast(ptr)),
})
}
}
/// An iterator over the [`DeviceMemoryBlock`]s within a [`DeviceMemoryPool`]. /// An iterator over the [`DeviceMemoryBlock`]s within a [`DeviceMemoryPool`].
pub struct DeviceMemoryBlocks<'a, S> { pub struct DeviceMemoryBlocks<'a, S> {
inner: slice::Iter<'a, Box<DeviceMemoryBlock<S>>>, inner: slice::Iter<'a, AliasableBox<DeviceMemoryBlock<S>>>,
_guard: DeviceMemoryPoolGuard<'a, S>, _guard: DeviceMemoryPoolGuard<'a, S>,
} }
@ -1887,3 +1925,71 @@ mod array_vec {
} }
} }
} }
mod aliasable_box {
use slabbin::SlabAllocator;
use std::{
fmt,
marker::PhantomData,
ops::{Deref, DerefMut},
panic::{RefUnwindSafe, UnwindSafe},
ptr::NonNull,
};
pub struct AliasableBox<T> {
ptr: NonNull<T>,
marker: PhantomData<T>,
}
unsafe impl<T: Send> Send for AliasableBox<T> {}
unsafe impl<T: Sync> Sync for AliasableBox<T> {}
impl<T: UnwindSafe> UnwindSafe for AliasableBox<T> {}
impl<T: RefUnwindSafe> RefUnwindSafe for AliasableBox<T> {}
impl<T> Unpin for AliasableBox<T> {}
impl<T> AliasableBox<T> {
pub fn new(value: T, allocator: &SlabAllocator<T>) -> Self {
let ptr = allocator.allocate();
unsafe { ptr.as_ptr().write(value) };
AliasableBox {
ptr,
marker: PhantomData,
}
}
pub fn as_mut_ptr(this: &mut Self) -> *mut T {
this.ptr.as_ptr()
}
pub unsafe fn drop(this: &mut Self, allocator: &SlabAllocator<T>) {
unsafe { this.ptr.as_ptr().drop_in_place() };
unsafe { allocator.deallocate(this.ptr) };
}
}
impl<T: fmt::Debug> fmt::Debug for AliasableBox<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt(&**self, f)
}
}
impl<T> Deref for AliasableBox<T> {
type Target = T;
#[inline]
fn deref(&self) -> &Self::Target {
unsafe { self.ptr.as_ref() }
}
}
impl<T> DerefMut for AliasableBox<T> {
#[inline]
fn deref_mut(&mut self) -> &mut Self::Target {
unsafe { self.ptr.as_mut() }
}
}
}