Descriptor set allocator API 2.0 (#2400)

* Make `DescriptorSetAllocator` object-safe, remove the generics

* Fix tests

* Fix examples

* Fix docs

* Avoid panics in `deallocate`

* Typo

* Clarify docs

* Clarify safety preconditions of `allocate`

* Remove unnecessary `Arc` clone

* Tidy up a little

* Debug assert
This commit is contained in:
marc0246 2023-11-12 15:10:22 +01:00 committed by GitHub
parent d637450887
commit bbd1de36ef
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
32 changed files with 493 additions and 503 deletions

View File

@ -476,8 +476,10 @@ fn main() -> Result<(), impl Error> {
};
let mut framebuffers = window_size_dependent_setup(&images, render_pass.clone(), &mut viewport);
let descriptor_set_allocator =
StandardDescriptorSetAllocator::new(device.clone(), Default::default());
let descriptor_set_allocator = Arc::new(StandardDescriptorSetAllocator::new(
device.clone(),
Default::default(),
));
// A byproduct of always using the same set of uniform buffers is that we can also create one
// descriptor set for each, reusing them in the same way as the buffers.
@ -485,7 +487,7 @@ fn main() -> Result<(), impl Error> {
.iter()
.map(|buffer| {
PersistentDescriptorSet::new(
&descriptor_set_allocator,
descriptor_set_allocator.clone(),
pipeline.layout().set_layouts()[0].clone(),
[WriteDescriptorSet::buffer(0, buffer.clone())],
[],
@ -498,7 +500,7 @@ fn main() -> Result<(), impl Error> {
let sampler = Sampler::new(device.clone(), SamplerCreateInfo::simple_repeat_linear()).unwrap();
let sampler_sets = textures.map(|texture| {
PersistentDescriptorSet::new(
&descriptor_set_allocator,
descriptor_set_allocator.clone(),
pipeline.layout().set_layouts()[1].clone(),
[
WriteDescriptorSet::sampler(0, sampler.clone()),

View File

@ -151,8 +151,10 @@ fn main() {
};
let memory_allocator = Arc::new(StandardMemoryAllocator::new_default(device.clone()));
let descriptor_set_allocator =
StandardDescriptorSetAllocator::new(device.clone(), Default::default());
let descriptor_set_allocator = Arc::new(StandardDescriptorSetAllocator::new(
device.clone(),
Default::default(),
));
let command_buffer_allocator =
StandardCommandBufferAllocator::new(device.clone(), Default::default());
@ -183,7 +185,7 @@ fn main() {
// descriptor sets that each contain the buffer you want to run the shader on.
let layout = pipeline.layout().set_layouts().get(0).unwrap();
let set = PersistentDescriptorSet::new(
&descriptor_set_allocator,
descriptor_set_allocator,
layout.clone(),
[WriteDescriptorSet::buffer(0, data_buffer.clone())],
[],

View File

@ -169,7 +169,7 @@ impl AmbientLightingSystem {
let layout = self.pipeline.layout().set_layouts().get(0).unwrap();
let descriptor_set = PersistentDescriptorSet::new(
&self.descriptor_set_allocator,
self.descriptor_set_allocator.clone(),
layout.clone(),
[WriteDescriptorSet::image_view(0, color_input)],
[],

View File

@ -180,7 +180,7 @@ impl DirectionalLightingSystem {
let layout = self.pipeline.layout().set_layouts().get(0).unwrap();
let descriptor_set = PersistentDescriptorSet::new(
&self.descriptor_set_allocator,
self.descriptor_set_allocator.clone(),
layout.clone(),
[
WriteDescriptorSet::image_view(0, color_input),

View File

@ -192,7 +192,7 @@ impl PointLightingSystem {
let layout = self.pipeline.layout().set_layouts().get(0).unwrap();
let descriptor_set = PersistentDescriptorSet::new(
&self.descriptor_set_allocator,
self.descriptor_set_allocator.clone(),
layout.clone(),
[
WriteDescriptorSet::image_view(0, color_input),

View File

@ -145,8 +145,10 @@ fn main() {
};
let memory_allocator = Arc::new(StandardMemoryAllocator::new_default(device.clone()));
let descriptor_set_allocator =
StandardDescriptorSetAllocator::new(device.clone(), Default::default());
let descriptor_set_allocator = Arc::new(StandardDescriptorSetAllocator::new(
device.clone(),
Default::default(),
));
let command_buffer_allocator =
StandardCommandBufferAllocator::new(device.clone(), Default::default());
@ -211,7 +213,7 @@ fn main() {
let layout = pipeline.layout().set_layouts().get(0).unwrap();
let set = PersistentDescriptorSet::new(
&descriptor_set_allocator,
descriptor_set_allocator,
layout.clone(),
[
// When writing to the dynamic buffer binding, the range of the buffer that the shader

View File

@ -202,8 +202,10 @@ fn main() {
};
let memory_allocator = Arc::new(StandardMemoryAllocator::new_default(device.clone()));
let descriptor_set_allocator =
StandardDescriptorSetAllocator::new(device.clone(), Default::default());
let descriptor_set_allocator = Arc::new(StandardDescriptorSetAllocator::new(
device.clone(),
Default::default(),
));
let command_buffer_allocator =
StandardCommandBufferAllocator::new(device.clone(), Default::default());
@ -223,7 +225,7 @@ fn main() {
let layout = pipeline.layout().set_layouts().get(0).unwrap();
let set = PersistentDescriptorSet::new(
&descriptor_set_allocator,
descriptor_set_allocator,
layout.clone(),
[WriteDescriptorSet::image_view(0, view)],
[],

View File

@ -269,15 +269,17 @@ mod linux {
}
});
let descriptor_set_allocator =
StandardDescriptorSetAllocator::new(device.clone(), Default::default());
let descriptor_set_allocator = Arc::new(StandardDescriptorSetAllocator::new(
device.clone(),
Default::default(),
));
let command_buffer_allocator =
StandardCommandBufferAllocator::new(device.clone(), Default::default());
let layout = pipeline.layout().set_layouts().get(0).unwrap();
let set = PersistentDescriptorSet::new(
&descriptor_set_allocator,
descriptor_set_allocator,
layout.clone(),
[
WriteDescriptorSet::sampler(0, sampler),

View File

@ -201,8 +201,10 @@ fn main() -> Result<(), impl Error> {
)
.unwrap();
let descriptor_set_allocator =
StandardDescriptorSetAllocator::new(device.clone(), Default::default());
let descriptor_set_allocator = Arc::new(StandardDescriptorSetAllocator::new(
device.clone(),
Default::default(),
));
let command_buffer_allocator =
StandardCommandBufferAllocator::new(device.clone(), Default::default());
let mut uploads = AutoCommandBufferBuilder::primary(
@ -379,7 +381,7 @@ fn main() -> Result<(), impl Error> {
let layout = pipeline.layout().set_layouts().get(0).unwrap();
let set = PersistentDescriptorSet::new(
&descriptor_set_allocator,
descriptor_set_allocator,
layout.clone(),
[
WriteDescriptorSet::sampler(0, sampler),

View File

@ -201,8 +201,10 @@ fn main() -> Result<(), impl Error> {
)
.unwrap();
let descriptor_set_allocator =
StandardDescriptorSetAllocator::new(device.clone(), Default::default());
let descriptor_set_allocator = Arc::new(StandardDescriptorSetAllocator::new(
device.clone(),
Default::default(),
));
let command_buffer_allocator =
StandardCommandBufferAllocator::new(device.clone(), Default::default());
let mut uploads = AutoCommandBufferBuilder::primary(
@ -327,7 +329,7 @@ fn main() -> Result<(), impl Error> {
let layout = pipeline.layout().set_layouts().get(0).unwrap();
let set = PersistentDescriptorSet::new(
&descriptor_set_allocator,
descriptor_set_allocator,
layout.clone(),
[
WriteDescriptorSet::sampler(0, sampler),

View File

@ -207,8 +207,10 @@ fn main() -> Result<(), impl Error> {
)
.unwrap();
let descriptor_set_allocator =
StandardDescriptorSetAllocator::new(device.clone(), Default::default());
let descriptor_set_allocator = Arc::new(StandardDescriptorSetAllocator::new(
device.clone(),
Default::default(),
));
let command_buffer_allocator =
StandardCommandBufferAllocator::new(device.clone(), Default::default());
let mut uploads = AutoCommandBufferBuilder::primary(
@ -348,7 +350,7 @@ fn main() -> Result<(), impl Error> {
// Use `image_view` instead of `image_view_sampler`, since the sampler is already in the
// layout.
let set = PersistentDescriptorSet::new(
&descriptor_set_allocator,
descriptor_set_allocator,
layout.clone(),
[WriteDescriptorSet::image_view(1, texture)],
[],

View File

@ -361,8 +361,10 @@ fn main() -> Result<(), impl Error> {
let mut recreate_swapchain = false;
let mut previous_frame_end = Some(sync::now(device.clone()).boxed());
let descriptor_set_allocator =
StandardDescriptorSetAllocator::new(device.clone(), Default::default());
let descriptor_set_allocator = Arc::new(StandardDescriptorSetAllocator::new(
device.clone(),
Default::default(),
));
let command_buffer_allocator =
StandardCommandBufferAllocator::new(device.clone(), Default::default());
@ -453,7 +455,7 @@ fn main() -> Result<(), impl Error> {
// Pass the two buffers to the compute shader.
let layout = compute_pipeline.layout().set_layouts().get(0).unwrap();
let cs_desciptor_set = PersistentDescriptorSet::new(
&descriptor_set_allocator,
descriptor_set_allocator.clone(),
layout.clone(),
[
WriteDescriptorSet::buffer(0, vertices.clone()),

View File

@ -139,7 +139,7 @@ impl FractalComputePipeline {
let pipeline_layout = self.pipeline.layout();
let desc_layout = pipeline_layout.set_layouts().get(0).unwrap();
let set = PersistentDescriptorSet::new(
&self.descriptor_set_allocator,
self.descriptor_set_allocator.clone(),
desc_layout.clone(),
[
WriteDescriptorSet::image_view(0, image_view),

View File

@ -187,7 +187,7 @@ impl PixelsDrawPipeline {
.unwrap();
PersistentDescriptorSet::new(
&self.descriptor_set_allocator,
self.descriptor_set_allocator.clone(),
layout.clone(),
[
WriteDescriptorSet::sampler(0, sampler),

View File

@ -174,7 +174,7 @@ impl GameOfLifeComputePipeline {
let pipeline_layout = self.compute_life_pipeline.layout();
let desc_layout = pipeline_layout.set_layouts().get(0).unwrap();
let set = PersistentDescriptorSet::new(
&self.descriptor_set_allocator,
self.descriptor_set_allocator.clone(),
desc_layout.clone(),
[
WriteDescriptorSet::image_view(0, self.image.clone()),

View File

@ -183,7 +183,7 @@ impl PixelsDrawPipeline {
.unwrap();
PersistentDescriptorSet::new(
&self.descriptor_set_allocator,
self.descriptor_set_allocator.clone(),
layout.clone(),
[
WriteDescriptorSet::sampler(0, sampler),

View File

@ -133,8 +133,10 @@ fn main() {
};
let memory_allocator = Arc::new(StandardMemoryAllocator::new_default(device.clone()));
let descriptor_set_allocator =
StandardDescriptorSetAllocator::new(device.clone(), Default::default());
let descriptor_set_allocator = Arc::new(StandardDescriptorSetAllocator::new(
device.clone(),
Default::default(),
));
let command_buffer_allocator =
StandardCommandBufferAllocator::new(device.clone(), Default::default());
@ -155,7 +157,7 @@ fn main() {
let layout = pipeline.layout().set_layouts().get(0).unwrap();
let set = PersistentDescriptorSet::new(
&descriptor_set_allocator,
descriptor_set_allocator,
layout.clone(),
[WriteDescriptorSet::buffer(0, data_buffer.clone())],
[],

View File

@ -261,8 +261,10 @@ fn main() -> Result<(), impl Error> {
)
.unwrap();
let descriptor_set_allocator =
StandardDescriptorSetAllocator::new(device.clone(), Default::default());
let descriptor_set_allocator = Arc::new(StandardDescriptorSetAllocator::new(
device.clone(),
Default::default(),
));
let command_buffer_allocator =
StandardCommandBufferAllocator::new(device.clone(), Default::default());
let mut uploads = AutoCommandBufferBuilder::primary(
@ -447,7 +449,7 @@ fn main() -> Result<(), impl Error> {
let layout = pipeline.layout().set_layouts().get(0).unwrap();
let set = PersistentDescriptorSet::new_variable(
&descriptor_set_allocator,
descriptor_set_allocator,
layout.clone(),
2,
[

View File

@ -125,8 +125,10 @@ fn main() {
};
let memory_allocator = Arc::new(StandardMemoryAllocator::new_default(device.clone()));
let descriptor_set_allocator =
StandardDescriptorSetAllocator::new(device.clone(), Default::default());
let descriptor_set_allocator = Arc::new(StandardDescriptorSetAllocator::new(
device.clone(),
Default::default(),
));
let command_buffer_allocator =
StandardCommandBufferAllocator::new(device.clone(), Default::default());
@ -151,7 +153,7 @@ fn main() {
let layout = pipeline.layout().set_layouts().get(0).unwrap();
let set = PersistentDescriptorSet::new(
&descriptor_set_allocator,
descriptor_set_allocator,
layout.clone(),
[WriteDescriptorSet::buffer(0, data_buffer.clone())],
[],

View File

@ -133,8 +133,10 @@ fn main() {
};
let memory_allocator = Arc::new(StandardMemoryAllocator::new_default(device.clone()));
let descriptor_set_allocator =
StandardDescriptorSetAllocator::new(device.clone(), Default::default());
let descriptor_set_allocator = Arc::new(StandardDescriptorSetAllocator::new(
device.clone(),
Default::default(),
));
let command_buffer_allocator =
StandardCommandBufferAllocator::new(device.clone(), Default::default());
@ -155,7 +157,7 @@ fn main() {
let layout = pipeline.layout().set_layouts().get(0).unwrap();
let set = PersistentDescriptorSet::new(
&descriptor_set_allocator,
descriptor_set_allocator,
layout.clone(),
[WriteDescriptorSet::buffer(0, data_buffer.clone())],
[],

View File

@ -181,7 +181,7 @@ fn main() {
data_buffer: Subbuffer<[u32]>,
parameters: shaders::Parameters,
command_buffer_allocator: &StandardCommandBufferAllocator,
descriptor_set_allocator: &StandardDescriptorSetAllocator,
descriptor_set_allocator: Arc<StandardDescriptorSetAllocator>,
) {
let layout = pipeline.layout().set_layouts().get(0).unwrap();
let set = PersistentDescriptorSet::new(
@ -226,8 +226,10 @@ fn main() {
let memory_allocator = Arc::new(StandardMemoryAllocator::new_default(device.clone()));
let command_buffer_allocator =
StandardCommandBufferAllocator::new(device.clone(), Default::default());
let descriptor_set_allocator =
StandardDescriptorSetAllocator::new(device.clone(), Default::default());
let descriptor_set_allocator = Arc::new(StandardDescriptorSetAllocator::new(
device.clone(),
Default::default(),
));
// Prepare test array `[0, 1, 2, 3....]`.
let data_buffer = Buffer::from_iter(
@ -300,7 +302,7 @@ fn main() {
data_buffer.clone(),
shaders::Parameters { value: 2 },
&command_buffer_allocator,
&descriptor_set_allocator,
descriptor_set_allocator.clone(),
);
// Then add 1 to each value.
@ -310,7 +312,7 @@ fn main() {
data_buffer.clone(),
shaders::Parameters { value: 1 },
&command_buffer_allocator,
&descriptor_set_allocator,
descriptor_set_allocator.clone(),
);
// Then multiply each value by 3.
@ -320,7 +322,7 @@ fn main() {
data_buffer.clone(),
shaders::Parameters { value: 3 },
&command_buffer_allocator,
&descriptor_set_allocator,
descriptor_set_allocator,
);
let data_buffer_content = data_buffer.read().unwrap();

View File

@ -318,8 +318,10 @@ fn main() -> Result<(), impl Error> {
}
let memory_allocator = Arc::new(StandardMemoryAllocator::new_default(device.clone()));
let descriptor_set_allocator =
StandardDescriptorSetAllocator::new(device.clone(), Default::default());
let descriptor_set_allocator = Arc::new(StandardDescriptorSetAllocator::new(
device.clone(),
Default::default(),
));
let command_buffer_allocator =
StandardCommandBufferAllocator::new(device.clone(), Default::default());
@ -431,7 +433,7 @@ fn main() -> Result<(), impl Error> {
// Create a new descriptor set for binding vertices as a storage buffer.
use vulkano::pipeline::Pipeline; // Required to access the `layout` method of pipeline.
let descriptor_set = PersistentDescriptorSet::new(
&descriptor_set_allocator,
descriptor_set_allocator.clone(),
compute_pipeline
.layout()
.set_layouts()

View File

@ -134,8 +134,10 @@ fn main() {
};
let memory_allocator = Arc::new(StandardMemoryAllocator::new_default(device.clone()));
let descriptor_set_allocator =
StandardDescriptorSetAllocator::new(device.clone(), Default::default());
let descriptor_set_allocator = Arc::new(StandardDescriptorSetAllocator::new(
device.clone(),
Default::default(),
));
let command_buffer_allocator =
StandardCommandBufferAllocator::new(device.clone(), Default::default());
@ -156,7 +158,7 @@ fn main() {
let layout = pipeline.layout().set_layouts().get(0).unwrap();
let set = PersistentDescriptorSet::new(
&descriptor_set_allocator,
descriptor_set_allocator,
layout.clone(),
[WriteDescriptorSet::buffer(0, data_buffer.clone())],
[],

View File

@ -251,8 +251,10 @@ fn main() -> Result<(), impl Error> {
let mut previous_frame_end = Some(sync::now(device.clone()).boxed());
let rotation_start = Instant::now();
let descriptor_set_allocator =
StandardDescriptorSetAllocator::new(device.clone(), Default::default());
let descriptor_set_allocator = Arc::new(StandardDescriptorSetAllocator::new(
device.clone(),
Default::default(),
));
let command_buffer_allocator =
StandardCommandBufferAllocator::new(device.clone(), Default::default());
@ -340,7 +342,7 @@ fn main() -> Result<(), impl Error> {
let layout = pipeline.layout().set_layouts().get(0).unwrap();
let set = PersistentDescriptorSet::new(
&descriptor_set_allocator,
descriptor_set_allocator.clone(),
layout.clone(),
[WriteDescriptorSet::buffer(0, uniform_buffer_subbuffer)],
[],

View File

@ -203,8 +203,10 @@ fn main() -> Result<(), impl Error> {
)
.unwrap();
let descriptor_set_allocator =
StandardDescriptorSetAllocator::new(device.clone(), Default::default());
let descriptor_set_allocator = Arc::new(StandardDescriptorSetAllocator::new(
device.clone(),
Default::default(),
));
let command_buffer_allocator =
StandardCommandBufferAllocator::new(device.clone(), Default::default());
let mut uploads = AutoCommandBufferBuilder::primary(
@ -338,7 +340,7 @@ fn main() -> Result<(), impl Error> {
let layout = pipeline.layout().set_layouts().get(0).unwrap();
let set = PersistentDescriptorSet::new(
&descriptor_set_allocator,
descriptor_set_allocator,
layout.clone(),
[
WriteDescriptorSet::sampler(0, sampler),

View File

@ -780,11 +780,13 @@ mod tests {
)
.unwrap();
let ds_allocator =
StandardDescriptorSetAllocator::new(device.clone(), Default::default());
let ds_allocator = Arc::new(StandardDescriptorSetAllocator::new(
device.clone(),
Default::default(),
));
let set = PersistentDescriptorSet::new(
&ds_allocator,
ds_allocator.clone(),
set_layout.clone(),
[WriteDescriptorSet::sampler(
0,
@ -855,7 +857,7 @@ mod tests {
.unwrap();
let set = PersistentDescriptorSet::new(
&ds_allocator,
ds_allocator.clone(),
set_layout,
[WriteDescriptorSet::sampler(
0,

View File

@ -22,7 +22,14 @@ use crate::{
Validated, VulkanError,
};
use crossbeam_queue::ArrayQueue;
use std::{cell::UnsafeCell, mem::ManuallyDrop, num::NonZeroU64, sync::Arc, thread};
use std::{
cell::UnsafeCell,
fmt::{Debug, Error as FmtError, Formatter},
mem,
num::NonZeroU64,
ptr,
sync::Arc,
};
use thread_local::ThreadLocal;
const MAX_POOLS: usize = 32;
@ -31,36 +38,121 @@ const MAX_POOLS: usize = 32;
///
/// # Safety
///
/// A Vulkan descriptor pool must be externally synchronized as if it owned the descriptor sets that
/// were allocated from it. This includes allocating from the pool, freeing from the pool and
/// resetting the pool or individual descriptor sets. The implementation of `DescriptorSetAllocator`
/// is expected to manage this.
/// A Vulkan descriptor pool must be externally synchronized as if it owned the descriptor sets
/// that were allocated from it. This includes allocating from the pool, freeing from the pool and
/// resetting the pool. The implementation of `DescriptorSetAllocator` is expected to manage this.
///
/// The destructor of the [`DescriptorSetAlloc`] is expected to free the descriptor set, reset the
/// descriptor set, or add it to a pool so that it gets reused. If the implementation frees or
/// resets the descriptor set, it must not forget that this operation must be externally
/// synchronized.
pub unsafe trait DescriptorSetAllocator: DeviceOwned {
/// Object that represented an allocated descriptor set.
///
/// The destructor of this object should free the descriptor set.
type Alloc: DescriptorSetAlloc;
/// The implementation of `allocate` must return a valid allocation that stays allocated until
/// either `deallocate` is called on it or the allocator is dropped. If the allocator is cloned, it
/// must produce the same allocator, and an allocation must stay allocated until either
/// `deallocate` is called on any of the clones or all clones have been dropped.
///
/// The implementation of `deallocate` is expected to free the descriptor set, reset its descriptor
/// pool, or add it to a pool so that it gets reused. If the implementation frees the descriptor
/// set or resets the descriptor pool, it must not forget that this operation must be externally
/// synchronized. The implementation should not panic as it is used while dropping descriptor sets.
pub unsafe trait DescriptorSetAllocator: DeviceOwned + Send + Sync + 'static {
/// Allocates a descriptor set.
fn allocate(
&self,
layout: &Arc<DescriptorSetLayout>,
variable_descriptor_count: u32,
) -> Result<Self::Alloc, Validated<VulkanError>>;
) -> Result<DescriptorSetAlloc, Validated<VulkanError>>;
/// Deallocates the given `allocation`.
///
/// # Safety
///
/// - `allocation` must refer to a **currently allocated** allocation of `self`.
unsafe fn deallocate(&self, allocation: DescriptorSetAlloc);
}
/// An allocated descriptor set.
pub trait DescriptorSetAlloc: Send + Sync {
/// Returns the internal object that contains the descriptor set.
fn inner(&self) -> &DescriptorPoolAlloc;
impl Debug for dyn DescriptorSetAllocator {
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), FmtError> {
f.debug_struct("DescriptorSetAllocator")
.finish_non_exhaustive()
}
}
/// Returns the descriptor pool that the descriptor set was allocated from.
fn pool(&self) -> &DescriptorPool;
/// An allocation made using a [descriptor set allocator].
///
/// [descriptor set allocator]: DescriptorSetAllocator
#[derive(Debug)]
pub struct DescriptorSetAlloc {
/// The internal object that contains the descriptor set.
pub inner: DescriptorPoolAlloc,
/// The descriptor pool that the descriptor set was allocated from.
///
/// Using this for anything other than looking at the pool's metadata will lead to a Bad
/// Time<sup>TM</sup>. That includes making additional references.
pub pool: Arc<DescriptorPool>,
/// An opaque handle identifying the allocation inside the allocator.
pub handle: AllocationHandle,
}
unsafe impl Send for DescriptorSetAlloc {}
unsafe impl Sync for DescriptorSetAlloc {}
/// An opaque handle identifying an allocation inside an allocator.
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
#[cfg_attr(not(doc), repr(transparent))]
pub struct AllocationHandle(*mut ());
unsafe impl Send for AllocationHandle {}
unsafe impl Sync for AllocationHandle {}
impl AllocationHandle {
/// Creates a null `AllocationHandle`.
///
/// Use this if you don't have anything that you need to associate with the allocation.
#[inline]
pub const fn null() -> Self {
AllocationHandle(ptr::null_mut())
}
/// Stores a pointer in an `AllocationHandle`.
///
/// Use this if you want to associate an allocation with some (host) heap allocation.
#[inline]
pub const fn from_ptr(ptr: *mut ()) -> Self {
AllocationHandle(ptr)
}
/// Stores an index inside an `AllocationHandle`.
///
/// Use this if you want to associate an allocation with some index.
#[allow(clippy::useless_transmute)]
#[inline]
pub const fn from_index(index: usize) -> Self {
// SAFETY: `usize` and `*mut ()` have the same layout.
AllocationHandle(unsafe { mem::transmute::<usize, *mut ()>(index) })
}
/// Retrieves a previously-stored pointer from the `AllocationHandle`.
///
/// If this handle hasn't been created using [`from_ptr`] then this will return an invalid
/// pointer, dereferencing which is undefined behavior.
///
/// [`from_ptr`]: Self::from_ptr
#[inline]
pub const fn as_ptr(self) -> *mut () {
self.0
}
/// Retrieves a previously-stored index from the `AllocationHandle`.
///
/// If this handle hasn't been created using [`from_index`] then this will return a bogus
/// result.
///
/// [`from_index`]: Self::from_index
#[allow(clippy::transmutes_expressible_as_ptr_casts)]
#[inline]
pub const fn as_index(self) -> usize {
// SAFETY: `usize` and `*mut ()` have the same layout.
unsafe { mem::transmute::<*mut (), usize>(self.0) }
}
}
/// Standard implementation of a descriptor set allocator.
@ -135,44 +227,87 @@ impl StandardDescriptorSetAllocator {
}
unsafe impl DescriptorSetAllocator for StandardDescriptorSetAllocator {
type Alloc = StandardDescriptorSetAlloc;
/// Allocates a descriptor set.
#[inline]
fn allocate(
&self,
layout: &Arc<DescriptorSetLayout>,
variable_descriptor_count: u32,
) -> Result<StandardDescriptorSetAlloc, Validated<VulkanError>> {
let max_count = layout.variable_descriptor_count();
let pools = self.pools.get_or(Default::default);
) -> Result<DescriptorSetAlloc, Validated<VulkanError>> {
let is_fixed = layout.variable_descriptor_count() == 0;
let pools = self.pools.get_or_default();
let entry = unsafe { &mut *pools.get() }.get_or_try_insert(layout.id(), || {
if max_count == 0 {
FixedEntry::new(layout.clone(), &self.create_info).map(Entry::Fixed)
if is_fixed {
FixedEntry::new(layout, &self.create_info).map(Entry::Fixed)
} else {
VariableEntry::new(layout.clone(), &self.create_info).map(Entry::Variable)
VariableEntry::new(
layout,
&self.create_info,
Arc::new(ArrayQueue::new(MAX_POOLS)),
)
.map(Entry::Variable)
}
})?;
match entry {
Entry::Fixed(entry) => entry.allocate(&self.create_info),
Entry::Variable(entry) => entry.allocate(variable_descriptor_count, &self.create_info),
Entry::Fixed(entry) => entry.allocate(layout, &self.create_info),
Entry::Variable(entry) => {
entry.allocate(layout, variable_descriptor_count, &self.create_info)
}
}
}
#[inline]
unsafe fn deallocate(&self, allocation: DescriptorSetAlloc) {
let is_fixed = allocation.inner.variable_descriptor_count() == 0;
let ptr = allocation.handle.as_ptr();
if is_fixed {
// SAFETY: The caller must guarantee that `allocation` refers to one allocated by
// `self`, therefore `ptr` must be the same one we gave out on allocation. We also know
// that the pointer must be valid, because the caller must guarantee that the same
// allocation isn't deallocated more than once. That means that since we cloned the
// `Arc` on allocation, at least that strong reference must still keep it alive, and we
// can safely drop this clone at the end of the scope here.
let reserve = unsafe { Arc::from_raw(ptr.cast::<ArrayQueue<DescriptorPoolAlloc>>()) };
// This cannot happen because every allocation is (supposed to be) returned to the pool
// whence it came, so there must be enough room for it.
debug_assert!(reserve.push(allocation.inner).is_ok());
} else {
// SAFETY: Same as the `Arc::from_raw` above.
let reserve = unsafe { Arc::from_raw(ptr.cast::<ArrayQueue<Arc<DescriptorPool>>>()) };
let pool = allocation.pool;
// We have to make sure that we only reset the pool under this condition, because there
// could be other references in other allocations. If the user cloned the pool
// themself, that will most certainly cause a leak.
if Arc::strong_count(&pool) == 1 {
// If there is not enough space in the reserve, we destroy the pool. The only way
// this can happen is if something is resource hogging, forcing new pools to be
// created such that the number exceeds `MAX_POOLS`, and then drops them all at
// once.
let _ = reserve.push(pool);
}
}
}
}
unsafe impl<T: DescriptorSetAllocator> DescriptorSetAllocator for Arc<T> {
type Alloc = T::Alloc;
#[inline]
fn allocate(
&self,
layout: &Arc<DescriptorSetLayout>,
variable_descriptor_count: u32,
) -> Result<Self::Alloc, Validated<VulkanError>> {
) -> Result<DescriptorSetAlloc, Validated<VulkanError>> {
(**self).allocate(layout, variable_descriptor_count)
}
#[inline]
unsafe fn deallocate(&self, allocation: DescriptorSetAlloc) {
(**self).deallocate(allocation)
}
}
unsafe impl DeviceOwned for StandardDescriptorSetAllocator {
@ -184,59 +319,16 @@ unsafe impl DeviceOwned for StandardDescriptorSetAllocator {
#[derive(Debug)]
struct FixedEntry {
// The `FixedPool` struct contains an actual Vulkan pool. Every time it is full we create
// a new pool and replace the current one with the new one.
pool: Arc<FixedPool>,
// The descriptor set layout that this pool is for.
layout: Arc<DescriptorSetLayout>,
pool: Arc<DescriptorPool>,
reserve: Arc<ArrayQueue<DescriptorPoolAlloc>>,
}
impl FixedEntry {
fn new(
layout: Arc<DescriptorSetLayout>,
create_info: &StandardDescriptorSetAllocatorCreateInfo,
) -> Result<Self, Validated<VulkanError>> {
Ok(FixedEntry {
pool: FixedPool::new(&layout, create_info)?,
layout,
})
}
fn allocate(
&mut self,
create_info: &StandardDescriptorSetAllocatorCreateInfo,
) -> Result<StandardDescriptorSetAlloc, Validated<VulkanError>> {
let inner = if let Some(inner) = self.pool.reserve.pop() {
inner
} else {
self.pool = FixedPool::new(&self.layout, create_info)?;
self.pool.reserve.pop().unwrap()
};
Ok(StandardDescriptorSetAlloc {
inner: ManuallyDrop::new(inner),
parent: AllocParent::Fixed(self.pool.clone()),
})
}
}
#[derive(Debug)]
struct FixedPool {
// The actual Vulkan descriptor pool. This field isn't actually used anywhere, but we need to
// keep the pool alive in order to keep the descriptor sets valid.
inner: DescriptorPool,
// List of descriptor sets. When `alloc` is called, a descriptor will be extracted from this
// list. When a `SingleLayoutPoolAlloc` is dropped, its descriptor set is put back in this list.
reserve: ArrayQueue<DescriptorPoolAlloc>,
}
impl FixedPool {
fn new(
layout: &Arc<DescriptorSetLayout>,
create_info: &StandardDescriptorSetAllocatorCreateInfo,
) -> Result<Arc<Self>, Validated<VulkanError>> {
let inner = DescriptorPool::new(
) -> Result<Self, Validated<VulkanError>> {
let pool = DescriptorPool::new(
layout.device().clone(),
DescriptorPoolCreateInfo {
flags: create_info
@ -260,135 +352,68 @@ impl FixedPool {
let allocate_infos =
(0..create_info.set_count).map(|_| DescriptorSetAllocateInfo::new(layout.clone()));
let allocs = unsafe {
inner
.allocate_descriptor_sets(allocate_infos)
.map_err(|err| match err {
let allocs =
unsafe { pool.allocate_descriptor_sets(allocate_infos) }.map_err(|err| match err {
Validated::ValidationError(_) => err,
Validated::Error(vk_err) => match vk_err {
VulkanError::OutOfHostMemory | VulkanError::OutOfDeviceMemory => err,
VulkanError::FragmentedPool => {
// This can't happen as we don't free individual sets.
unreachable!();
}
VulkanError::OutOfPoolMemory => {
VulkanError::FragmentedPool => unreachable!(),
// We created the pool with an exact size.
unreachable!();
}
_ => {
VulkanError::OutOfPoolMemory => unreachable!(),
// Shouldn't ever be returned.
unreachable!();
}
_ => unreachable!(),
},
})?
};
})?;
let reserve = ArrayQueue::new(create_info.set_count);
for alloc in allocs {
let _ = reserve.push(alloc);
}
Ok(Arc::new(FixedPool { inner, reserve }))
Ok(FixedEntry {
pool: Arc::new(pool),
reserve: Arc::new(reserve),
})
}
fn allocate(
&mut self,
layout: &Arc<DescriptorSetLayout>,
create_info: &StandardDescriptorSetAllocatorCreateInfo,
) -> Result<DescriptorSetAlloc, Validated<VulkanError>> {
let inner = if let Some(inner) = self.reserve.pop() {
inner
} else {
*self = FixedEntry::new(layout, create_info)?;
self.reserve.pop().unwrap()
};
Ok(DescriptorSetAlloc {
inner,
pool: self.pool.clone(),
handle: AllocationHandle::from_ptr(Arc::into_raw(self.reserve.clone()) as _),
})
}
}
#[derive(Debug)]
struct VariableEntry {
// The `VariablePool` struct contains an actual Vulkan pool. Every time it is full
// we grab one from the reserve, or create a new pool if there are none.
pool: Arc<VariablePool>,
// When a `VariablePool` is dropped, it returns its Vulkan pool here for reuse.
reserve: Arc<ArrayQueue<DescriptorPool>>,
// The descriptor set layout that this pool is for.
layout: Arc<DescriptorSetLayout>,
pool: Arc<DescriptorPool>,
reserve: Arc<ArrayQueue<Arc<DescriptorPool>>>,
// The number of sets currently allocated from the Vulkan pool.
allocations: usize,
}
impl VariableEntry {
fn new(
layout: Arc<DescriptorSetLayout>,
layout: &DescriptorSetLayout,
create_info: &StandardDescriptorSetAllocatorCreateInfo,
reserve: Arc<ArrayQueue<Arc<DescriptorPool>>>,
) -> Result<Self, Validated<VulkanError>> {
let reserve = Arc::new(ArrayQueue::new(MAX_POOLS));
Ok(VariableEntry {
pool: VariablePool::new(&layout, reserve.clone(), create_info)?,
reserve,
layout,
allocations: 0,
})
}
fn allocate(
&mut self,
variable_descriptor_count: u32,
create_info: &StandardDescriptorSetAllocatorCreateInfo,
) -> Result<StandardDescriptorSetAlloc, Validated<VulkanError>> {
if self.allocations >= create_info.set_count {
self.pool = if let Some(inner) = self.reserve.pop() {
Arc::new(VariablePool {
inner: ManuallyDrop::new(inner),
reserve: self.reserve.clone(),
})
} else {
VariablePool::new(&self.layout, self.reserve.clone(), create_info)?
};
self.allocations = 0;
}
let allocate_info = DescriptorSetAllocateInfo {
variable_descriptor_count,
..DescriptorSetAllocateInfo::new(self.layout.clone())
};
let mut sets = unsafe {
self.pool
.inner
.allocate_descriptor_sets([allocate_info])
.map_err(|err| match err {
Validated::ValidationError(_) => err,
Validated::Error(vk_err) => match vk_err {
VulkanError::OutOfHostMemory | VulkanError::OutOfDeviceMemory => err,
VulkanError::FragmentedPool => {
// This can't happen as we don't free individual sets.
unreachable!();
}
VulkanError::OutOfPoolMemory => {
// We created the pool to fit the maximum variable descriptor count.
unreachable!();
}
_ => {
// Shouldn't ever be returned.
unreachable!();
}
},
})?
};
self.allocations += 1;
Ok(StandardDescriptorSetAlloc {
inner: ManuallyDrop::new(sets.next().unwrap()),
parent: AllocParent::Variable(self.pool.clone()),
})
}
}
#[derive(Debug)]
struct VariablePool {
// The actual Vulkan descriptor pool.
inner: ManuallyDrop<DescriptorPool>,
// Where we return the Vulkan descriptor pool in our `Drop` impl.
reserve: Arc<ArrayQueue<DescriptorPool>>,
}
impl VariablePool {
fn new(
layout: &Arc<DescriptorSetLayout>,
reserve: Arc<ArrayQueue<DescriptorPool>>,
create_info: &StandardDescriptorSetAllocatorCreateInfo,
) -> Result<Arc<Self>, VulkanError> {
DescriptorPool::new(
let pool = DescriptorPool::new(
layout.device().clone(),
DescriptorPoolCreateInfo {
flags: create_info
@ -407,30 +432,82 @@ impl VariablePool {
..Default::default()
},
)
.map(|inner| {
Arc::new(Self {
inner: ManuallyDrop::new(inner),
.map_err(Validated::unwrap)?;
Ok(VariableEntry {
pool: Arc::new(pool),
reserve,
allocations: 0,
})
})
.map_err(Validated::unwrap)
}
}
impl Drop for VariablePool {
fn drop(&mut self) {
let inner = unsafe { ManuallyDrop::take(&mut self.inner) };
if thread::panicking() {
return;
}
unsafe { inner.reset() }.unwrap();
fn allocate(
&mut self,
layout: &Arc<DescriptorSetLayout>,
variable_descriptor_count: u32,
create_info: &StandardDescriptorSetAllocatorCreateInfo,
) -> Result<DescriptorSetAlloc, Validated<VulkanError>> {
if self.allocations >= create_info.set_count {
// This can happen if there's only ever one allocation alive at any point in time. In
// that case, when deallocating the last set before reaching `set_count`, there will be
// 2 references to the pool (one here and one in the allocation) and so the pool won't
// be returned to the reserve when deallocating. However, since there are no other
// allocations alive, there would be no other allocations that could return it to the
// reserve. To avoid dropping the pool unneccessarily, we simply continue using it. In
// the case where there are other references, we drop ours, at which point an
// allocation still holding a reference will be able to put the pool into the reserve
// when deallocated. If the user created a reference themself that will most certainly
// lead to a memory leak.
//
// TODO: This can still run into the A/B/A problem causing the pool to be dropped.
if Arc::strong_count(&self.pool) == 1 {
// SAFETY: We checked that the pool has a single strong reference above, meaning
// that all the allocations we gave out must have been deallocated.
unsafe { self.pool.reset() }?;
// If there is not enough space in the reserve, we destroy the pool. The only way this can
// happen is if something is resource hogging, forcing new pools to be created such that
// the number exceeds `MAX_POOLS`, and then drops them all at once.
let _ = self.reserve.push(inner);
self.allocations = 0;
} else {
if let Some(pool) = self.reserve.pop() {
// SAFETY: We checked that the pool has a single strong reference when
// deallocating, meaning that all the allocations we gave out must have been
// deallocated.
unsafe { pool.reset() }?;
self.pool = pool;
self.allocations = 0;
} else {
*self = VariableEntry::new(layout, create_info, self.reserve.clone())?;
}
}
}
let allocate_info = DescriptorSetAllocateInfo {
variable_descriptor_count,
..DescriptorSetAllocateInfo::new(layout.clone())
};
let mut sets = unsafe { self.pool.allocate_descriptor_sets([allocate_info]) }.map_err(
|err| match err {
Validated::ValidationError(_) => err,
Validated::Error(vk_err) => match vk_err {
VulkanError::OutOfHostMemory | VulkanError::OutOfDeviceMemory => err,
// This can't happen as we don't free individual sets.
VulkanError::FragmentedPool => unreachable!(),
// We created the pool to fit the maximum variable descriptor count.
VulkanError::OutOfPoolMemory => unreachable!(),
// Shouldn't ever be returned.
_ => unreachable!(),
},
},
)?;
self.allocations += 1;
Ok(DescriptorSetAlloc {
inner: sets.next().unwrap(),
pool: self.pool.clone(),
handle: AllocationHandle::from_ptr(Arc::into_raw(self.reserve.clone()) as _),
})
}
}
@ -475,63 +552,6 @@ impl Default for StandardDescriptorSetAllocatorCreateInfo {
}
}
/// A descriptor set allocated from a [`StandardDescriptorSetAllocator`].
#[derive(Debug)]
pub struct StandardDescriptorSetAlloc {
// The actual descriptor set.
inner: ManuallyDrop<DescriptorPoolAlloc>,
// The pool where we allocated from. Needed for our `Drop` impl.
parent: AllocParent,
}
#[derive(Debug)]
enum AllocParent {
Fixed(Arc<FixedPool>),
Variable(Arc<VariablePool>),
}
impl AllocParent {
#[inline]
fn pool(&self) -> &DescriptorPool {
match self {
Self::Fixed(pool) => &pool.inner,
Self::Variable(pool) => &pool.inner,
}
}
}
// This is needed because of the blanket impl of `Send` on `Arc<T>`, which requires that `T` is
// `Send + Sync`. `FixedPool` and `VariablePool` are `Send + !Sync` because `DescriptorPool` is
// `!Sync`. That's fine however because we never access the `DescriptorPool` concurrently.
unsafe impl Send for StandardDescriptorSetAlloc {}
unsafe impl Sync for StandardDescriptorSetAlloc {}
impl DescriptorSetAlloc for StandardDescriptorSetAlloc {
#[inline]
fn inner(&self) -> &DescriptorPoolAlloc {
&self.inner
}
#[inline]
fn pool(&self) -> &DescriptorPool {
self.parent.pool()
}
}
impl Drop for StandardDescriptorSetAlloc {
#[inline]
fn drop(&mut self) {
let inner = unsafe { ManuallyDrop::take(&mut self.inner) };
match &self.parent {
AllocParent::Fixed(pool) => {
let _ = pool.reserve.push(inner);
}
AllocParent::Variable(_) => {}
}
}
}
mod sorted_map {
use smallvec::SmallVec;
@ -573,58 +593,3 @@ mod sorted_map {
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::{
descriptor_set::layout::{
DescriptorSetLayoutBinding, DescriptorSetLayoutCreateInfo, DescriptorType,
},
shader::ShaderStages,
VulkanObject,
};
use std::thread;
#[test]
fn threads_use_different_pools() {
let (device, _) = gfx_dev_and_queue!();
let layout = DescriptorSetLayout::new(
device.clone(),
DescriptorSetLayoutCreateInfo {
bindings: [(
0,
DescriptorSetLayoutBinding {
stages: ShaderStages::all_graphics(),
..DescriptorSetLayoutBinding::descriptor_type(DescriptorType::UniformBuffer)
},
)]
.into(),
..Default::default()
},
)
.unwrap();
let allocator = StandardDescriptorSetAllocator::new(device, Default::default());
let pool1 =
if let AllocParent::Fixed(pool) = &allocator.allocate(&layout, 0).unwrap().parent {
pool.inner.handle()
} else {
unreachable!()
};
thread::spawn(move || {
let pool2 =
if let AllocParent::Fixed(pool) = &allocator.allocate(&layout, 0).unwrap().parent {
pool.inner.handle()
} else {
unreachable!()
};
assert_ne!(pool1, pool2);
})
.join()
.unwrap();
}
}

View File

@ -19,9 +19,8 @@ use super::{
};
use crate::{
descriptor_set::{
allocator::{DescriptorSetAlloc, DescriptorSetAllocator, StandardDescriptorSetAlloc},
update::WriteDescriptorSet,
DescriptorSet, DescriptorSetLayout, DescriptorSetResources,
allocator::DescriptorSetAllocator, update::WriteDescriptorSet, DescriptorSet,
DescriptorSetLayout, DescriptorSetResources,
},
device::{Device, DeviceOwned},
Validated, ValidationError, VulkanError, VulkanObject,
@ -33,8 +32,8 @@ use std::{
};
/// A simple, immutable descriptor set that is expected to be long-lived.
pub struct PersistentDescriptorSet<P = StandardDescriptorSetAlloc> {
inner: UnsafeDescriptorSet<P>,
pub struct PersistentDescriptorSet {
inner: UnsafeDescriptorSet,
resources: DescriptorSetResources,
}
@ -43,15 +42,12 @@ impl PersistentDescriptorSet {
///
/// See `new_with_pool` for more.
#[inline]
pub fn new<A>(
allocator: &A,
pub fn new(
allocator: Arc<dyn DescriptorSetAllocator>,
layout: Arc<DescriptorSetLayout>,
descriptor_writes: impl IntoIterator<Item = WriteDescriptorSet>,
descriptor_copies: impl IntoIterator<Item = CopyDescriptorSet>,
) -> Result<Arc<PersistentDescriptorSet<A::Alloc>>, Validated<VulkanError>>
where
A: DescriptorSetAllocator + ?Sized,
{
) -> Result<Arc<PersistentDescriptorSet>, Validated<VulkanError>> {
Self::new_variable(allocator, layout, 0, descriptor_writes, descriptor_copies)
}
@ -62,16 +58,13 @@ impl PersistentDescriptorSet {
///
/// - Panics if `layout` was created for push descriptors rather than descriptor sets.
/// - Panics if `variable_descriptor_count` is too large for the given `layout`.
pub fn new_variable<A>(
allocator: &A,
pub fn new_variable(
allocator: Arc<dyn DescriptorSetAllocator>,
layout: Arc<DescriptorSetLayout>,
variable_descriptor_count: u32,
descriptor_writes: impl IntoIterator<Item = WriteDescriptorSet>,
descriptor_copies: impl IntoIterator<Item = CopyDescriptorSet>,
) -> Result<Arc<PersistentDescriptorSet<A::Alloc>>, Validated<VulkanError>>
where
A: DescriptorSetAllocator + ?Sized,
{
) -> Result<Arc<PersistentDescriptorSet>, Validated<VulkanError>> {
let mut set = PersistentDescriptorSet {
inner: UnsafeDescriptorSet::new(allocator, &layout, variable_descriptor_count)?,
resources: DescriptorSetResources::new(&layout, variable_descriptor_count),
@ -83,12 +76,7 @@ impl PersistentDescriptorSet {
Ok(Arc::new(set))
}
}
impl<P> PersistentDescriptorSet<P>
where
P: DescriptorSetAlloc,
{
unsafe fn update(
&mut self,
descriptor_writes: impl IntoIterator<Item = WriteDescriptorSet>,
@ -113,58 +101,50 @@ where
}
}
unsafe impl<P> DescriptorSet for PersistentDescriptorSet<P>
where
P: DescriptorSetAlloc,
{
unsafe impl DescriptorSet for PersistentDescriptorSet {
#[inline]
fn alloc(&self) -> &DescriptorPoolAlloc {
self.inner.alloc().inner()
&self.inner.alloc().inner
}
#[inline]
fn pool(&self) -> &DescriptorPool {
self.inner.alloc().pool()
&self.inner.alloc().pool
}
#[inline]
fn resources(&self) -> &DescriptorSetResources {
&self.resources
}
}
unsafe impl<P> VulkanObject for PersistentDescriptorSet<P>
where
P: DescriptorSetAlloc,
{
unsafe impl VulkanObject for PersistentDescriptorSet {
type Handle = ash::vk::DescriptorSet;
#[inline]
fn handle(&self) -> Self::Handle {
self.inner.handle()
}
}
unsafe impl<P> DeviceOwned for PersistentDescriptorSet<P>
where
P: DescriptorSetAlloc,
{
unsafe impl DeviceOwned for PersistentDescriptorSet {
#[inline]
fn device(&self) -> &Arc<Device> {
self.layout().device()
}
}
impl<P> PartialEq for PersistentDescriptorSet<P>
where
P: DescriptorSetAlloc,
{
impl PartialEq for PersistentDescriptorSet {
#[inline]
fn eq(&self, other: &Self) -> bool {
self.inner == other.inner
}
}
impl<P> Eq for PersistentDescriptorSet<P> where P: DescriptorSetAlloc {}
impl Eq for PersistentDescriptorSet {}
impl<P> Hash for PersistentDescriptorSet<P>
where
P: DescriptorSetAlloc,
{
impl Hash for PersistentDescriptorSet {
#[inline]
fn hash<H: Hasher>(&self, state: &mut H) {
self.inner.hash(state);
}

View File

@ -1,7 +1,7 @@
//! Low-level descriptor set.
use super::{
allocator::{DescriptorSetAlloc, DescriptorSetAllocator, StandardDescriptorSetAlloc},
allocator::{DescriptorSetAlloc, DescriptorSetAllocator},
pool::DescriptorPool,
CopyDescriptorSet,
};
@ -14,60 +14,61 @@ use crate::{
Validated, ValidationError, VulkanError, VulkanObject,
};
use smallvec::SmallVec;
use std::{fmt::Debug, hash::Hash, sync::Arc};
use std::{
fmt::Debug,
hash::{Hash, Hasher},
mem::ManuallyDrop,
sync::Arc,
};
/// Low-level descriptor set.
///
/// This descriptor set does not keep track of synchronization,
/// nor does it store any information on what resources have been written to each descriptor.
#[derive(Debug)]
pub struct UnsafeDescriptorSet<P = StandardDescriptorSetAlloc> {
alloc: P,
pub struct UnsafeDescriptorSet {
allocation: ManuallyDrop<DescriptorSetAlloc>,
allocator: Arc<dyn DescriptorSetAllocator>,
}
impl UnsafeDescriptorSet {
/// Allocates a new descriptor set and returns it.
#[inline]
pub fn new<A>(
allocator: &A,
pub fn new(
allocator: Arc<dyn DescriptorSetAllocator>,
layout: &Arc<DescriptorSetLayout>,
variable_descriptor_count: u32,
) -> Result<UnsafeDescriptorSet<A::Alloc>, Validated<VulkanError>>
where
A: DescriptorSetAllocator + ?Sized,
{
) -> Result<UnsafeDescriptorSet, Validated<VulkanError>> {
let allocation = allocator.allocate(layout, variable_descriptor_count)?;
Ok(UnsafeDescriptorSet {
alloc: allocator.allocate(layout, variable_descriptor_count)?,
allocation: ManuallyDrop::new(allocation),
allocator,
})
}
}
impl<P> UnsafeDescriptorSet<P>
where
P: DescriptorSetAlloc,
{
/// Returns the allocation of this descriptor set.
#[inline]
pub fn alloc(&self) -> &P {
&self.alloc
pub fn alloc(&self) -> &DescriptorSetAlloc {
&self.allocation
}
/// Returns the descriptor pool that the descriptor set was allocated from.
#[inline]
pub fn pool(&self) -> &DescriptorPool {
self.alloc.pool()
&self.allocation.pool
}
/// Returns the layout of this descriptor set.
#[inline]
pub fn layout(&self) -> &Arc<DescriptorSetLayout> {
self.alloc.inner().layout()
self.allocation.inner.layout()
}
/// Returns the variable descriptor count that this descriptor set was allocated with.
#[inline]
pub fn variable_descriptor_count(&self) -> u32 {
self.alloc.inner().variable_descriptor_count()
self.allocation.inner.variable_descriptor_count()
}
/// Updates the descriptor set with new values.
@ -205,46 +206,42 @@ where
}
}
unsafe impl<P> VulkanObject for UnsafeDescriptorSet<P>
where
P: DescriptorSetAlloc,
{
impl Drop for UnsafeDescriptorSet {
#[inline]
fn drop(&mut self) {
let allocation = unsafe { ManuallyDrop::take(&mut self.allocation) };
unsafe { self.allocator.deallocate(allocation) };
}
}
unsafe impl VulkanObject for UnsafeDescriptorSet {
type Handle = ash::vk::DescriptorSet;
#[inline]
fn handle(&self) -> Self::Handle {
self.alloc.inner().handle()
self.allocation.inner.handle()
}
}
unsafe impl<P> DeviceOwned for UnsafeDescriptorSet<P>
where
P: DescriptorSetAlloc,
{
unsafe impl DeviceOwned for UnsafeDescriptorSet {
#[inline]
fn device(&self) -> &Arc<Device> {
self.alloc.inner().device()
self.allocation.inner.device()
}
}
impl<P> PartialEq for UnsafeDescriptorSet<P>
where
P: DescriptorSetAlloc,
{
impl PartialEq for UnsafeDescriptorSet {
#[inline]
fn eq(&self, other: &Self) -> bool {
self.alloc.inner() == other.alloc.inner()
self.allocation.inner == other.allocation.inner
}
}
impl<P> Eq for UnsafeDescriptorSet<P> where P: DescriptorSetAlloc {}
impl Eq for UnsafeDescriptorSet {}
impl<P> Hash for UnsafeDescriptorSet<P>
where
P: DescriptorSetAlloc,
{
impl Hash for UnsafeDescriptorSet {
#[inline]
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.alloc.inner().hash(state);
fn hash<H: Hasher>(&self, state: &mut H) {
self.allocation.inner.hash(state);
}
}

View File

@ -1,5 +1,4 @@
use super::{
allocator::DescriptorSetAlloc,
layout::{DescriptorSetLayout, DescriptorType},
sys::UnsafeDescriptorSet,
DescriptorSet,
@ -1625,13 +1624,10 @@ impl CopyDescriptorSet {
}
}
pub(crate) fn validate<P>(
pub(crate) fn validate(
&self,
dst_set: &UnsafeDescriptorSet<P>,
) -> Result<(), Box<ValidationError>>
where
P: DescriptorSetAlloc,
{
dst_set: &UnsafeDescriptorSet,
) -> Result<(), Box<ValidationError>> {
let &Self {
ref src_set,
src_binding,

View File

@ -40,19 +40,25 @@
//!
//! # let device: std::sync::Arc<vulkano::device::Device> = return;
//! # let memory_allocator: std::sync::Arc<vulkano::memory::allocator::StandardMemoryAllocator> = return;
//! # let descriptor_set_allocator: vulkano::descriptor_set::allocator::StandardDescriptorSetAllocator = return;
//! # let descriptor_set_allocator: std::sync::Arc<vulkano::descriptor_set::allocator::StandardDescriptorSetAllocator> = return;
//! #
//! let conversion = SamplerYcbcrConversion::new(device.clone(), SamplerYcbcrConversionCreateInfo {
//! let conversion = SamplerYcbcrConversion::new(
//! device.clone(),
//! SamplerYcbcrConversionCreateInfo {
//! format: Format::G8_B8_R8_3PLANE_420_UNORM,
//! ycbcr_model: SamplerYcbcrModelConversion::YcbcrIdentity,
//! ..Default::default()
//! })
//! },
//! )
//! .unwrap();
//!
//! let sampler = Sampler::new(device.clone(), SamplerCreateInfo {
//! let sampler = Sampler::new(
//! device.clone(),
//! SamplerCreateInfo {
//! sampler_ycbcr_conversion: Some(conversion.clone()),
//! ..Default::default()
//! })
//! },
//! )
//! .unwrap();
//!
//! let descriptor_set_layout = DescriptorSetLayout::new(
@ -63,7 +69,9 @@
//! DescriptorSetLayoutBinding {
//! stages: ShaderStage::Fragment.into(),
//! immutable_samplers: vec![sampler],
//! ..DescriptorSetLayoutBinding::descriptor_type(DescriptorType::CombinedImageSampler)
//! ..DescriptorSetLayoutBinding::descriptor_type(
//! DescriptorType::CombinedImageSampler
//! )
//! },
//! )]
//! .into(),
@ -92,7 +100,7 @@
//! let image_view = ImageView::new(image, create_info).unwrap();
//!
//! let descriptor_set = PersistentDescriptorSet::new(
//! &descriptor_set_allocator,
//! descriptor_set_allocator.clone(),
//! descriptor_set_layout.clone(),
//! [WriteDescriptorSet::image_view(0, image_view)],
//! [],

View File

@ -540,9 +540,12 @@ mod tests {
)
.unwrap();
let ds_allocator = StandardDescriptorSetAllocator::new(device.clone(), Default::default());
let ds_allocator = Arc::new(StandardDescriptorSetAllocator::new(
device.clone(),
Default::default(),
));
let set = PersistentDescriptorSet::new(
&ds_allocator,
ds_allocator,
pipeline.layout().set_layouts().get(0).unwrap().clone(),
[WriteDescriptorSet::buffer(0, data_buffer.clone())],
[],
@ -681,9 +684,12 @@ mod tests {
)
.unwrap();
let ds_allocator = StandardDescriptorSetAllocator::new(device.clone(), Default::default());
let ds_allocator = Arc::new(StandardDescriptorSetAllocator::new(
device.clone(),
Default::default(),
));
let set = PersistentDescriptorSet::new(
&ds_allocator,
ds_allocator,
pipeline.layout().set_layouts().get(0).unwrap().clone(),
[WriteDescriptorSet::buffer(0, data_buffer.clone())],
[],