Add basic synchronization tracking to CommandBufferBuilder (#2099)

This commit is contained in:
Rua 2022-12-07 11:06:06 +01:00 committed by GitHub
parent def369dced
commit 10d7349556
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
16 changed files with 2898 additions and 267 deletions

View File

@ -23,6 +23,7 @@ crossbeam-queue = "0.3"
half = "2"
libloading = "0.7"
nalgebra = { version = "0.31.0", optional = true }
once_cell = "1.16"
parking_lot = { version = "0.12", features = ["send_guard"] }
smallvec = "1.8"
thread_local = "1.1"

View File

@ -597,6 +597,7 @@ pub enum ResourceInCommand {
DescriptorSet { set: u32, binding: u32, index: u32 },
Destination,
FramebufferAttachment { index: u32 },
ImageMemoryBarrier { index: u32 },
IndexBuffer,
IndirectBuffer,
SecondaryCommandBuffer { index: u32 },

View File

@ -166,7 +166,7 @@ where
dynamic_offsets.as_ptr(),
);
let state = self.current_state.invalidate_descriptor_sets(
let state = self.builder_state.invalidate_descriptor_sets(
pipeline_bind_point,
pipeline_layout.clone(),
first_set,
@ -189,6 +189,7 @@ where
self.resources.push(Box::new(pipeline_layout));
self.next_command_index += 1;
self
}
@ -271,9 +272,10 @@ where
index_type.into(),
);
self.current_state.index_buffer = Some((buffer.clone(), index_type));
self.builder_state.index_buffer = Some((buffer.clone(), index_type));
self.resources.push(Box::new(buffer));
self.next_command_index += 1;
self
}
@ -321,9 +323,10 @@ where
pipeline.handle(),
);
self.current_state.pipeline_compute = Some(pipeline.clone());
self.builder_state.pipeline_compute = Some(pipeline.clone());
self.resources.push(Box::new(pipeline));
self.next_command_index += 1;
self
}
@ -357,12 +360,12 @@ where
assert_eq!(self.device(), pipeline.device());
if let Some(last_pipeline) =
self.current_state
self.builder_state
.render_pass
.as_ref()
.and_then(|render_pass_state| match &render_pass_state.render_pass {
RenderPassStateType::BeginRendering(state) if state.pipeline_used => {
self.current_state.pipeline_graphics.as_ref()
self.builder_state.pipeline_graphics.as_ref()
}
_ => None,
})
@ -416,15 +419,16 @@ where
// Reset any states that are fixed in the new pipeline. The pipeline bind command will
// overwrite these states.
self.current_state.reset_dynamic_states(
self.builder_state.reset_dynamic_states(
pipeline
.dynamic_states()
.filter(|(_, d)| !d) // not dynamic
.map(|(s, _)| s),
);
self.current_state.pipeline_graphics = Some(pipeline.clone());
self.builder_state.pipeline_graphics = Some(pipeline.clone());
self.resources.push(Box::new(pipeline));
self.next_command_index += 1;
self
}
@ -532,12 +536,13 @@ where
self.resources.reserve(buffers.len());
for (i, buffer) in buffers.into_iter().enumerate() {
self.current_state
self.builder_state
.vertex_buffers
.insert(first_binding + i as u32, buffer.clone());
self.resources.push(Box::new(buffer));
}
self.next_command_index += 1;
self
}
@ -667,13 +672,13 @@ where
// push constants as set, and never unsets them. See:
// https://github.com/KhronosGroup/Vulkan-Docs/issues/1485
// https://github.com/KhronosGroup/Vulkan-ValidationLayers/issues/2711
self.current_state
self.builder_state
.push_constants
.insert(offset..offset + push_constants.len() as u32);
self.current_state.push_constants_pipeline_layout = Some(pipeline_layout.clone());
self.builder_state.push_constants_pipeline_layout = Some(pipeline_layout.clone());
self.resources.push(Box::new(pipeline_layout));
self.next_command_index += 1;
self
}
@ -841,7 +846,7 @@ where
writes.as_ptr(),
);
let state = self.current_state.invalidate_descriptor_sets(
let state = self.builder_state.invalidate_descriptor_sets(
pipeline_bind_point,
pipeline_layout.clone(),
set_num,
@ -863,6 +868,7 @@ where
self.resources.push(Box::new(pipeline_layout));
self.next_command_index += 1;
self
}
}

View File

@ -13,10 +13,11 @@ use super::{
};
use crate::{
buffer::{BufferAccess, BufferContents, BufferUsage, TypedBufferAccess},
command_buffer::allocator::CommandBufferAllocator,
command_buffer::{allocator::CommandBufferAllocator, ResourceInCommand, ResourceUseRef},
device::{DeviceOwned, QueueFlags},
format::FormatFeatures,
image::{ImageAccess, ImageAspects, ImageLayout, ImageUsage},
sync::PipelineStageAccess,
DeviceSize, RequiresOneOf, Version, VulkanObject,
};
use smallvec::SmallVec;
@ -50,7 +51,7 @@ where
let device = self.device();
// VUID-vkCmdClearColorImage-renderpass
if self.current_state.render_pass.is_some() {
if self.builder_state.render_pass.is_some() {
return Err(ClearError::ForbiddenInsideRenderPass);
}
@ -193,6 +194,7 @@ where
return self;
}
let image_inner = image.inner();
let clear_value = clear_value.into();
let ranges: SmallVec<[_; 8]> = regions
.iter()
@ -203,17 +205,40 @@ where
let fns = self.device().fns();
(fns.v1_0.cmd_clear_color_image)(
self.handle(),
image.inner().image.handle(),
image_inner.image.handle(),
image_layout.into(),
&clear_value,
ranges.len() as u32,
ranges.as_ptr(),
);
let command_index = self.next_command_index;
let command_name = "clear_color_image";
let use_ref = ResourceUseRef {
command_index,
command_name,
resource_in_command: ResourceInCommand::Destination,
secondary_use_ref: None,
};
for mut subresource_range in regions {
subresource_range.array_layers.start += image_inner.first_layer;
subresource_range.array_layers.end += image_inner.first_layer;
subresource_range.mip_levels.start += image_inner.first_mipmap_level;
subresource_range.mip_levels.end += image_inner.first_mipmap_level;
self.resources_usage_state.record_image_access(
&use_ref,
image_inner.image,
subresource_range,
PipelineStageAccess::Clear_TransferWrite,
image_layout,
);
}
self.resources.push(Box::new(image));
// TODO: sync state update
self.next_command_index += 1;
self
}
@ -241,7 +266,7 @@ where
let device = self.device();
// VUID-vkCmdClearDepthStencilImage-renderpass
if self.current_state.render_pass.is_some() {
if self.builder_state.render_pass.is_some() {
return Err(ClearError::ForbiddenInsideRenderPass);
}
@ -402,6 +427,7 @@ where
return self;
}
let image_inner = image.inner();
let clear_value = clear_value.into();
let ranges: SmallVec<[_; 8]> = regions
.iter()
@ -412,17 +438,40 @@ where
let fns = self.device().fns();
(fns.v1_0.cmd_clear_depth_stencil_image)(
self.handle(),
image.inner().image.handle(),
image_inner.image.handle(),
image_layout.into(),
&clear_value,
ranges.len() as u32,
ranges.as_ptr(),
);
let command_index = self.next_command_index;
let command_name = "clear_depth_stencil_image";
let use_ref = ResourceUseRef {
command_index,
command_name,
resource_in_command: ResourceInCommand::Destination,
secondary_use_ref: None,
};
for mut subresource_range in regions {
subresource_range.array_layers.start += image_inner.first_layer;
subresource_range.array_layers.end += image_inner.first_layer;
subresource_range.mip_levels.start += image_inner.first_mipmap_level;
subresource_range.mip_levels.end += image_inner.first_mipmap_level;
self.resources_usage_state.record_image_access(
&use_ref,
image_inner.image,
subresource_range,
PipelineStageAccess::Clear_TransferWrite,
image_layout,
);
}
self.resources.push(Box::new(image));
// TODO: sync state update
self.next_command_index += 1;
self
}
@ -453,7 +502,7 @@ where
let device = self.device();
// VUID-vkCmdFillBuffer-renderpass
if self.current_state.render_pass.is_some() {
if self.builder_state.render_pass.is_some() {
return Err(ClearError::ForbiddenInsideRenderPass);
}
@ -554,10 +603,28 @@ where
data,
);
let command_index = self.next_command_index;
let command_name = "fill_buffer";
let use_ref = ResourceUseRef {
command_index,
command_name,
resource_in_command: ResourceInCommand::Destination,
secondary_use_ref: None,
};
let mut dst_range = dst_offset..dst_offset + size;
dst_range.start += dst_buffer_inner.offset;
dst_range.end += dst_buffer_inner.offset;
self.resources_usage_state.record_buffer_access(
&use_ref,
dst_buffer_inner.buffer,
dst_range,
PipelineStageAccess::Clear_TransferWrite,
);
self.resources.push(Box::new(dst_buffer));
// TODO: sync state update
self.next_command_index += 1;
self
}
@ -596,7 +663,7 @@ where
let device = self.device();
// VUID-vkCmdUpdateBuffer-renderpass
if self.current_state.render_pass.is_some() {
if self.builder_state.render_pass.is_some() {
return Err(ClearError::ForbiddenInsideRenderPass);
}
@ -684,10 +751,28 @@ where
data.as_bytes().as_ptr() as *const _,
);
let command_index = self.next_command_index;
let command_name = "update_buffer";
let use_ref = ResourceUseRef {
command_index,
command_name,
resource_in_command: ResourceInCommand::Destination,
secondary_use_ref: None,
};
let mut dst_range = dst_offset..dst_offset + size_of_val(data) as DeviceSize;
dst_range.start += dst_buffer_inner.offset;
dst_range.end += dst_buffer_inner.offset;
self.resources_usage_state.record_buffer_access(
&use_ref,
dst_buffer_inner.buffer,
dst_range,
PipelineStageAccess::Clear_TransferWrite,
);
self.resources.push(Box::new(dst_buffer));
// TODO: sync state update
self.next_command_index += 1;
self
}
}

View File

@ -14,14 +14,18 @@ use super::{
};
use crate::{
buffer::{BufferAccess, BufferUsage},
command_buffer::{allocator::CommandBufferAllocator, ImageBlit, ImageResolve},
command_buffer::{
allocator::CommandBufferAllocator, ImageBlit, ImageResolve, ResourceInCommand,
ResourceUseRef,
},
device::{DeviceOwned, QueueFlags},
format::{Format, FormatFeatures, NumericType},
image::{
ImageAccess, ImageAspects, ImageDimensions, ImageLayout, ImageSubresourceLayers, ImageType,
ImageUsage, SampleCount, SampleCounts,
ImageAccess, ImageAspects, ImageDimensions, ImageLayout, ImageSubresourceLayers,
ImageSubresourceRange, ImageType, ImageUsage, SampleCount, SampleCounts,
},
sampler::Filter,
sync::PipelineStageAccess,
DeviceSize, Version, VulkanObject,
};
use smallvec::SmallVec;
@ -57,7 +61,7 @@ where
let device = self.device();
// VUID-vkCmdCopyBuffer2-renderpass
if self.current_state.render_pass.is_some() {
if self.builder_state.render_pass.is_some() {
return Err(CopyError::ForbiddenInsideRenderPass);
}
@ -257,11 +261,54 @@ where
);
}
let command_index = self.next_command_index;
let command_name = "copy_buffer";
let src_use_ref = ResourceUseRef {
command_index,
command_name,
resource_in_command: ResourceInCommand::Source,
secondary_use_ref: None,
};
let dst_use_ref = ResourceUseRef {
command_index,
command_name,
resource_in_command: ResourceInCommand::Destination,
secondary_use_ref: None,
};
for region in regions {
let BufferCopy {
src_offset,
dst_offset,
size,
_ne: _,
} = region;
let mut src_range = src_offset..src_offset + size;
src_range.start += src_buffer_inner.offset;
src_range.end += src_buffer_inner.offset;
self.resources_usage_state.record_buffer_access(
&src_use_ref,
src_buffer_inner.buffer,
src_range,
PipelineStageAccess::Copy_TransferRead,
);
let mut dst_range = dst_offset..dst_offset + size;
dst_range.start += dst_buffer_inner.offset;
dst_range.end += dst_buffer_inner.offset;
self.resources_usage_state.record_buffer_access(
&dst_use_ref,
dst_buffer_inner.buffer,
dst_range,
PipelineStageAccess::Copy_TransferWrite,
);
}
self.resources.push(Box::new(src_buffer));
self.resources.push(Box::new(dst_buffer));
// TODO: sync state update
self.next_command_index += 1;
self
}
@ -305,7 +352,7 @@ where
let device = self.device();
// VUID-vkCmdCopyImage2-renderpass
if self.current_state.render_pass.is_some() {
if self.builder_state.render_pass.is_some() {
return Err(CopyError::ForbiddenInsideRenderPass);
}
@ -1127,11 +1174,62 @@ where
);
}
let command_index = self.next_command_index;
let command_name = "copy_image";
let src_use_ref = ResourceUseRef {
command_index,
command_name,
resource_in_command: ResourceInCommand::Source,
secondary_use_ref: None,
};
let dst_use_ref = ResourceUseRef {
command_index,
command_name,
resource_in_command: ResourceInCommand::Destination,
secondary_use_ref: None,
};
for region in regions {
let ImageCopy {
src_subresource,
src_offset: _,
dst_subresource,
dst_offset: _,
extent: _,
_ne: _,
} = region;
let mut src_subresource_range = ImageSubresourceRange::from(src_subresource);
src_subresource_range.array_layers.start += src_image_inner.first_layer;
src_subresource_range.array_layers.end += src_image_inner.first_layer;
src_subresource_range.mip_levels.start += src_image_inner.first_mipmap_level;
src_subresource_range.mip_levels.end += src_image_inner.first_mipmap_level;
self.resources_usage_state.record_image_access(
&src_use_ref,
src_image_inner.image,
src_subresource_range,
PipelineStageAccess::Copy_TransferRead,
src_image_layout,
);
let mut dst_subresource_range = ImageSubresourceRange::from(dst_subresource);
dst_subresource_range.array_layers.start += dst_image_inner.first_layer;
dst_subresource_range.array_layers.end += dst_image_inner.first_layer;
dst_subresource_range.mip_levels.start += dst_image_inner.first_mipmap_level;
dst_subresource_range.mip_levels.end += dst_image_inner.first_mipmap_level;
self.resources_usage_state.record_image_access(
&dst_use_ref,
dst_image_inner.image,
dst_subresource_range,
PipelineStageAccess::Copy_TransferWrite,
dst_image_layout,
);
}
self.resources.push(Box::new(src_image));
self.resources.push(Box::new(dst_image));
// TODO: sync state update
self.next_command_index += 1;
self
}
@ -1159,7 +1257,7 @@ where
let device = self.device();
// VUID-vkCmdCopyBufferToImage2-renderpass
if self.current_state.render_pass.is_some() {
if self.builder_state.render_pass.is_some() {
return Err(CopyError::ForbiddenInsideRenderPass);
}
@ -1705,11 +1803,62 @@ where
);
}
let command_index = self.next_command_index;
let command_name = "copy_buffer_to_image";
let src_use_ref = ResourceUseRef {
command_index,
command_name,
resource_in_command: ResourceInCommand::Source,
secondary_use_ref: None,
};
let dst_use_ref = ResourceUseRef {
command_index,
command_name,
resource_in_command: ResourceInCommand::Destination,
secondary_use_ref: None,
};
for region in regions {
let buffer_copy_size = region.buffer_copy_size(dst_image.format());
let BufferImageCopy {
buffer_offset,
buffer_row_length: _,
buffer_image_height: _,
image_subresource,
image_offset: _,
image_extent: _,
_ne: _,
} = region;
let mut src_range = buffer_offset..buffer_offset + buffer_copy_size;
src_range.start += src_buffer_inner.offset;
src_range.end += src_buffer_inner.offset;
self.resources_usage_state.record_buffer_access(
&src_use_ref,
src_buffer_inner.buffer,
src_range,
PipelineStageAccess::Copy_TransferRead,
);
let mut dst_subresource_range = ImageSubresourceRange::from(image_subresource);
dst_subresource_range.array_layers.start += dst_image_inner.first_layer;
dst_subresource_range.array_layers.end += dst_image_inner.first_layer;
dst_subresource_range.mip_levels.start += dst_image_inner.first_mipmap_level;
dst_subresource_range.mip_levels.end += dst_image_inner.first_mipmap_level;
self.resources_usage_state.record_image_access(
&dst_use_ref,
dst_image_inner.image,
dst_subresource_range,
PipelineStageAccess::Copy_TransferWrite,
dst_image_layout,
);
}
self.resources.push(Box::new(src_buffer));
self.resources.push(Box::new(dst_image));
// TODO: sync state update
self.next_command_index += 1;
self
}
@ -1737,7 +1886,7 @@ where
let device = self.device();
// VUID-vkCmdCopyImageToBuffer2-renderpass
if self.current_state.render_pass.is_some() {
if self.builder_state.render_pass.is_some() {
return Err(CopyError::ForbiddenInsideRenderPass);
}
@ -2273,11 +2422,62 @@ where
);
}
let command_index = self.next_command_index;
let command_name = "copy_image_to_buffer";
let src_use_ref = ResourceUseRef {
command_index,
command_name,
resource_in_command: ResourceInCommand::Source,
secondary_use_ref: None,
};
let dst_use_ref = ResourceUseRef {
command_index,
command_name,
resource_in_command: ResourceInCommand::Destination,
secondary_use_ref: None,
};
for region in regions {
let buffer_copy_size = region.buffer_copy_size(src_image.format());
let BufferImageCopy {
buffer_offset,
buffer_row_length: _,
buffer_image_height: _,
image_subresource,
image_offset: _,
image_extent: _,
_ne: _,
} = region;
let mut src_subresource_range = ImageSubresourceRange::from(image_subresource);
src_subresource_range.array_layers.start += src_image_inner.first_layer;
src_subresource_range.array_layers.end += src_image_inner.first_layer;
src_subresource_range.mip_levels.start += src_image_inner.first_mipmap_level;
src_subresource_range.mip_levels.end += src_image_inner.first_mipmap_level;
self.resources_usage_state.record_image_access(
&src_use_ref,
src_image_inner.image,
src_subresource_range,
PipelineStageAccess::Copy_TransferRead,
src_image_layout,
);
let mut dst_range = buffer_offset..buffer_offset + buffer_copy_size;
dst_range.start += dst_buffer_inner.offset;
dst_range.end += dst_buffer_inner.offset;
self.resources_usage_state.record_buffer_access(
&dst_use_ref,
dst_buffer_inner.buffer,
dst_range,
PipelineStageAccess::Copy_TransferWrite,
);
}
self.resources.push(Box::new(src_image));
self.resources.push(Box::new(dst_buffer));
// TODO: sync state update
self.next_command_index += 1;
self
}
@ -2331,7 +2531,7 @@ where
let device = self.device();
// VUID-vkCmdBlitImage2-renderpass
if self.current_state.render_pass.is_some() {
if self.builder_state.render_pass.is_some() {
return Err(CopyError::ForbiddenInsideRenderPass);
}
@ -2983,11 +3183,61 @@ where
);
}
let command_index = self.next_command_index;
let command_name = "blit_image";
let src_use_ref = ResourceUseRef {
command_index,
command_name,
resource_in_command: ResourceInCommand::Source,
secondary_use_ref: None,
};
let dst_use_ref = ResourceUseRef {
command_index,
command_name,
resource_in_command: ResourceInCommand::Destination,
secondary_use_ref: None,
};
for region in regions {
let ImageBlit {
src_subresource,
src_offsets: _,
dst_subresource,
dst_offsets: _,
_ne: _,
} = region;
let mut src_subresource_range = ImageSubresourceRange::from(src_subresource);
src_subresource_range.array_layers.start += src_image_inner.first_layer;
src_subresource_range.array_layers.end += src_image_inner.first_layer;
src_subresource_range.mip_levels.start += src_image_inner.first_mipmap_level;
src_subresource_range.mip_levels.end += src_image_inner.first_mipmap_level;
self.resources_usage_state.record_image_access(
&src_use_ref,
src_image_inner.image,
src_subresource_range,
PipelineStageAccess::Blit_TransferRead,
src_image_layout,
);
let mut dst_subresource_range = ImageSubresourceRange::from(dst_subresource);
dst_subresource_range.array_layers.start += dst_image_inner.first_layer;
dst_subresource_range.array_layers.end += dst_image_inner.first_layer;
dst_subresource_range.mip_levels.start += dst_image_inner.first_mipmap_level;
dst_subresource_range.mip_levels.end += dst_image_inner.first_mipmap_level;
self.resources_usage_state.record_image_access(
&dst_use_ref,
dst_image_inner.image,
dst_subresource_range,
PipelineStageAccess::Blit_TransferWrite,
dst_image_layout,
);
}
self.resources.push(Box::new(src_image));
self.resources.push(Box::new(dst_image));
// TODO: sync state update
self.next_command_index += 1;
self
}
@ -3020,7 +3270,7 @@ where
let device = self.device();
// VUID-vkCmdResolveImage2-renderpass
if self.current_state.render_pass.is_some() {
if self.builder_state.render_pass.is_some() {
return Err(CopyError::ForbiddenInsideRenderPass);
}
@ -3412,11 +3662,62 @@ where
);
}
let command_index = self.next_command_index;
let command_name = "resolve_image";
let src_use_ref = ResourceUseRef {
command_index,
command_name,
resource_in_command: ResourceInCommand::Source,
secondary_use_ref: None,
};
let dst_use_ref = ResourceUseRef {
command_index,
command_name,
resource_in_command: ResourceInCommand::Destination,
secondary_use_ref: None,
};
for region in regions {
let ImageResolve {
src_subresource,
src_offset: _,
dst_subresource,
dst_offset: _,
extent: _,
_ne: _,
} = region;
let mut src_subresource_range = ImageSubresourceRange::from(src_subresource);
src_subresource_range.array_layers.start += src_image_inner.first_layer;
src_subresource_range.array_layers.end += src_image_inner.first_layer;
src_subresource_range.mip_levels.start += src_image_inner.first_mipmap_level;
src_subresource_range.mip_levels.end += src_image_inner.first_mipmap_level;
self.resources_usage_state.record_image_access(
&src_use_ref,
src_image_inner.image,
src_subresource_range,
PipelineStageAccess::Resolve_TransferRead,
src_image_layout,
);
let mut dst_subresource_range = ImageSubresourceRange::from(dst_subresource);
dst_subresource_range.array_layers.start += dst_image_inner.first_layer;
dst_subresource_range.array_layers.end += dst_image_inner.first_layer;
dst_subresource_range.mip_levels.start += dst_image_inner.first_mipmap_level;
dst_subresource_range.mip_levels.end += dst_image_inner.first_mipmap_level;
self.resources_usage_state.record_image_access(
&dst_use_ref,
dst_image_inner.image,
dst_subresource_range,
PipelineStageAccess::Resolve_TransferWrite,
dst_image_layout,
);
}
self.resources.push(Box::new(src_image));
self.resources.push(Box::new(dst_image));
// TODO: sync state update
self.next_command_index += 1;
self
}
}

View File

@ -84,6 +84,7 @@ where
let fns = self.device().instance().fns();
(fns.ext_debug_utils.cmd_begin_debug_utils_label_ext)(self.handle(), &label_info);
self.next_command_index += 1;
self
}
@ -141,6 +142,7 @@ where
let fns = self.device().instance().fns();
(fns.ext_debug_utils.cmd_end_debug_utils_label_ext)(self.handle());
self.next_command_index += 1;
self
}
@ -208,6 +210,7 @@ where
let fns = self.device().instance().fns();
(fns.ext_debug_utils.cmd_insert_debug_utils_label_ext)(self.handle(), &label_info);
self.next_command_index += 1;
self
}
}

View File

@ -40,7 +40,7 @@ where
) -> Result<(), SetDynamicStateError> {
// VUID-vkCmdDispatch-None-02859
if self
.current_state
.builder_state
.pipeline_graphics
.as_ref()
.map_or(false, |pipeline| {
@ -90,8 +90,9 @@ where
let fns = self.device().fns();
(fns.v1_0.cmd_set_blend_constants)(self.handle(), &constants);
self.current_state.blend_constants = Some(constants);
self.builder_state.blend_constants = Some(constants);
self.next_command_index += 1;
self
}
@ -148,7 +149,7 @@ where
}
if let Some(color_blend_state) = self
.current_state
.builder_state
.pipeline_graphics
.as_ref()
.and_then(|pipeline| pipeline.color_blend_state())
@ -193,8 +194,9 @@ where
enables_vk.as_ptr(),
);
self.current_state.color_write_enable = Some(enables);
self.builder_state.color_write_enable = Some(enables);
self.next_command_index += 1;
self
}
@ -263,8 +265,9 @@ where
(fns.ext_extended_dynamic_state.cmd_set_cull_mode_ext)(self.handle(), cull_mode.into());
}
self.current_state.cull_mode = Some(cull_mode);
self.builder_state.cull_mode = Some(cull_mode);
self.next_command_index += 1;
self
}
@ -333,12 +336,13 @@ where
let fns = self.device().fns();
(fns.v1_0.cmd_set_depth_bias)(self.handle(), constant_factor, clamp, slope_factor);
self.current_state.depth_bias = Some(DepthBias {
self.builder_state.depth_bias = Some(DepthBias {
constant_factor,
clamp,
slope_factor,
});
self.next_command_index += 1;
self
}
@ -405,8 +409,9 @@ where
.cmd_set_depth_bias_enable_ext)(self.handle(), enable.into());
}
self.current_state.depth_bias_enable = Some(enable);
self.builder_state.depth_bias_enable = Some(enable);
self.next_command_index += 1;
self
}
@ -467,8 +472,9 @@ where
let fns = self.device().fns();
(fns.v1_0.cmd_set_depth_bounds)(self.handle(), *bounds.start(), *bounds.end());
self.current_state.depth_bounds = Some(bounds);
self.builder_state.depth_bounds = Some(bounds);
self.next_command_index += 1;
self
}
@ -538,8 +544,9 @@ where
.cmd_set_depth_bounds_test_enable_ext)(self.handle(), enable.into());
}
self.current_state.depth_bounds_test_enable = Some(enable);
self.builder_state.depth_bounds_test_enable = Some(enable);
self.next_command_index += 1;
self
}
@ -613,8 +620,9 @@ where
);
}
self.current_state.depth_compare_op = Some(compare_op);
self.builder_state.depth_compare_op = Some(compare_op);
self.next_command_index += 1;
self
}
@ -682,8 +690,9 @@ where
);
}
self.current_state.depth_test_enable = Some(enable);
self.builder_state.depth_test_enable = Some(enable);
self.next_command_index += 1;
self
}
@ -749,8 +758,9 @@ where
.cmd_set_depth_write_enable_ext)(self.handle(), enable.into());
}
self.current_state.depth_write_enable = Some(enable);
self.builder_state.depth_write_enable = Some(enable);
self.next_command_index += 1;
self
}
@ -775,11 +785,7 @@ where
self.validate_set_discard_rectangle(first_rectangle, &rectangles)
.unwrap();
unsafe {
self.set_discard_rectangle_unchecked(first_rectangle, rectangles);
}
self
unsafe { self.set_discard_rectangle_unchecked(first_rectangle, rectangles) }
}
fn validate_set_discard_rectangle(
@ -872,9 +878,10 @@ where
for (num, rectangle) in rectangles.iter().enumerate() {
let num = num as u32 + first_rectangle;
self.current_state.discard_rectangle.insert(num, *rectangle);
self.builder_state.discard_rectangle.insert(num, *rectangle);
}
self.next_command_index += 1;
self
}
@ -942,8 +949,9 @@ where
(fns.ext_extended_dynamic_state.cmd_set_front_face_ext)(self.handle(), face.into());
}
self.current_state.front_face = Some(face);
self.builder_state.front_face = Some(face);
self.next_command_index += 1;
self
}
@ -1004,8 +1012,9 @@ where
let fns = self.device().fns();
(fns.ext_line_rasterization.cmd_set_line_stipple_ext)(self.handle(), factor, pattern);
self.current_state.line_stipple = Some(LineStipple { factor, pattern });
self.builder_state.line_stipple = Some(LineStipple { factor, pattern });
self.next_command_index += 1;
self
}
@ -1055,8 +1064,9 @@ where
let fns = self.device().fns();
(fns.v1_0.cmd_set_line_width)(self.handle(), line_width);
self.current_state.line_width = Some(line_width);
self.builder_state.line_width = Some(line_width);
self.next_command_index += 1;
self
}
@ -1125,8 +1135,9 @@ where
let fns = self.device().fns();
(fns.ext_extended_dynamic_state2.cmd_set_logic_op_ext)(self.handle(), logic_op.into());
self.current_state.logic_op = Some(logic_op);
self.builder_state.logic_op = Some(logic_op);
self.next_command_index += 1;
self
}
@ -1213,8 +1224,9 @@ where
(fns.ext_extended_dynamic_state2
.cmd_set_patch_control_points_ext)(self.handle(), num);
self.current_state.patch_control_points = Some(num);
self.builder_state.patch_control_points = Some(num);
self.next_command_index += 1;
self
}
@ -1283,8 +1295,9 @@ where
.cmd_set_primitive_restart_enable_ext)(self.handle(), enable.into());
}
self.current_state.primitive_restart_enable = Some(enable);
self.builder_state.primitive_restart_enable = Some(enable);
self.next_command_index += 1;
self
}
@ -1409,8 +1422,9 @@ where
.cmd_set_primitive_topology_ext)(self.handle(), topology.into());
}
self.current_state.primitive_topology = Some(topology);
self.builder_state.primitive_topology = Some(topology);
self.next_command_index += 1;
self
}
@ -1479,8 +1493,9 @@ where
.cmd_set_rasterizer_discard_enable_ext)(self.handle(), enable.into());
}
self.current_state.rasterizer_discard_enable = Some(enable);
self.builder_state.rasterizer_discard_enable = Some(enable);
self.next_command_index += 1;
self
}
@ -1588,9 +1603,10 @@ where
for (num, scissor) in scissors.iter().enumerate() {
let num = num as u32 + first_scissor;
self.current_state.scissor.insert(num, *scissor);
self.builder_state.scissor.insert(num, *scissor);
}
self.next_command_index += 1;
self
}
@ -1709,8 +1725,9 @@ where
);
}
self.current_state.scissor_with_count = Some(scissors);
self.builder_state.scissor_with_count = Some(scissors);
self.next_command_index += 1;
self
}
@ -1766,13 +1783,14 @@ where
let faces = ash::vk::StencilFaceFlags::from(faces);
if faces.intersects(ash::vk::StencilFaceFlags::FRONT) {
self.current_state.stencil_compare_mask.front = Some(compare_mask);
self.builder_state.stencil_compare_mask.front = Some(compare_mask);
}
if faces.intersects(ash::vk::StencilFaceFlags::BACK) {
self.current_state.stencil_compare_mask.back = Some(compare_mask);
self.builder_state.stencil_compare_mask.back = Some(compare_mask);
}
self.next_command_index += 1;
self
}
@ -1891,7 +1909,7 @@ where
let faces = ash::vk::StencilFaceFlags::from(faces);
if faces.intersects(ash::vk::StencilFaceFlags::FRONT) {
self.current_state.stencil_op.front = Some(StencilOps {
self.builder_state.stencil_op.front = Some(StencilOps {
fail_op,
pass_op,
depth_fail_op,
@ -1900,7 +1918,7 @@ where
}
if faces.intersects(ash::vk::StencilFaceFlags::BACK) {
self.current_state.stencil_op.back = Some(StencilOps {
self.builder_state.stencil_op.back = Some(StencilOps {
fail_op,
pass_op,
depth_fail_op,
@ -1908,6 +1926,7 @@ where
});
}
self.next_command_index += 1;
self
}
@ -1959,13 +1978,14 @@ where
let faces = ash::vk::StencilFaceFlags::from(faces);
if faces.intersects(ash::vk::StencilFaceFlags::FRONT) {
self.current_state.stencil_reference.front = Some(reference);
self.builder_state.stencil_reference.front = Some(reference);
}
if faces.intersects(ash::vk::StencilFaceFlags::BACK) {
self.current_state.stencil_reference.back = Some(reference);
self.builder_state.stencil_reference.back = Some(reference);
}
self.next_command_index += 1;
self
}
@ -2031,8 +2051,9 @@ where
.cmd_set_stencil_test_enable_ext)(self.handle(), enable.into());
}
self.current_state.stencil_test_enable = Some(enable);
self.builder_state.stencil_test_enable = Some(enable);
self.next_command_index += 1;
self
}
@ -2084,13 +2105,14 @@ where
let faces = ash::vk::StencilFaceFlags::from(faces);
if faces.intersects(ash::vk::StencilFaceFlags::FRONT) {
self.current_state.stencil_write_mask.front = Some(write_mask);
self.builder_state.stencil_write_mask.front = Some(write_mask);
}
if faces.intersects(ash::vk::StencilFaceFlags::BACK) {
self.current_state.stencil_write_mask.back = Some(write_mask);
self.builder_state.stencil_write_mask.back = Some(write_mask);
}
self.next_command_index += 1;
self
}
@ -2198,9 +2220,10 @@ where
for (num, viewport) in viewports.iter().enumerate() {
let num = num as u32 + first_viewport;
self.current_state.viewport.insert(num, viewport.clone());
self.builder_state.viewport.insert(num, viewport.clone());
}
self.next_command_index += 1;
self
}
@ -2319,8 +2342,9 @@ where
);
}
self.current_state.viewport_with_count = Some(viewports);
self.builder_state.viewport_with_count = Some(viewports);
self.next_command_index += 1;
self
}
}

View File

@ -21,7 +21,7 @@ pub use crate::command_buffer::{
RenderingAttachmentInfo, RenderingAttachmentResolveInfo, RenderingInfo, ResolveImageInfo,
};
use crate::{
buffer::BufferAccess,
buffer::{sys::Buffer, BufferAccess},
command_buffer::{
allocator::{
CommandBufferAllocator, CommandBufferBuilderAlloc, StandardCommandBufferAllocator,
@ -30,12 +30,12 @@ use crate::{
BuildError, CommandBufferBeginError, CommandBufferInheritanceInfo,
CommandBufferInheritanceRenderPassInfo, CommandBufferInheritanceRenderPassType,
CommandBufferInheritanceRenderingInfo, CommandBufferLevel, CommandBufferUsage,
SubpassContents,
ResourceInCommand, ResourceUseRef, SubpassContents,
},
descriptor_set::{DescriptorSetResources, DescriptorSetWithOffsets},
device::{Device, DeviceOwned, QueueFamilyProperties},
device::{Device, DeviceOwned, QueueFamilyProperties, QueueFlags},
format::{Format, FormatFeatures},
image::ImageAspects,
image::{sys::Image, ImageAspects, ImageLayout, ImageSubresourceRange},
pipeline::{
graphics::{
color_blend::LogicOp,
@ -47,17 +47,23 @@ use crate::{
ComputePipeline, DynamicState, GraphicsPipeline, PipelineBindPoint, PipelineLayout,
},
query::{QueryControlFlags, QueryType},
range_map::RangeMap,
range_set::RangeSet,
render_pass::{Framebuffer, Subpass},
OomError, RequiresOneOf, VulkanError, VulkanObject,
sync::{
BufferMemoryBarrier, DependencyInfo, ImageMemoryBarrier, PipelineStage,
PipelineStageAccess, PipelineStageAccessSet, PipelineStages,
},
DeviceSize, OomError, RequiresOneOf, VulkanError, VulkanObject,
};
use ahash::HashMap;
use parking_lot::Mutex;
use smallvec::SmallVec;
use std::{
any::Any,
collections::{hash_map::Entry, HashMap},
collections::hash_map::Entry,
marker::PhantomData,
ops::RangeInclusive,
ops::{Range, RangeInclusive},
ptr,
sync::{atomic::AtomicBool, Arc},
};
@ -83,8 +89,10 @@ where
queue_family_index: u32,
usage: CommandBufferUsage,
next_command_index: usize,
resources: Vec<Box<dyn Any + Send + Sync>>,
current_state: CurrentState,
builder_state: CommandBufferBuilderState,
resources_usage_state: ResourcesState,
_data: PhantomData<L>,
}
@ -542,7 +550,7 @@ where
.map_err(VulkanError::from)?;
}
let mut current_state: CurrentState = Default::default();
let mut builder_state: CommandBufferBuilderState = Default::default();
if let Some(inheritance_info) = &inheritance_info {
let &CommandBufferInheritanceInfo {
@ -553,7 +561,7 @@ where
} = inheritance_info;
if let Some(render_pass) = render_pass {
current_state.render_pass = Some(RenderPassState::from_inheritance(render_pass));
builder_state.render_pass = Some(RenderPassState::from_inheritance(render_pass));
}
}
@ -563,8 +571,10 @@ where
queue_family_index,
usage,
next_command_index: 0,
resources: Vec::new(),
current_state,
builder_state,
resources_usage_state: Default::default(),
_data: PhantomData,
})
@ -581,11 +591,11 @@ where
{
/// Builds the command buffer.
pub fn build(self) -> Result<PrimaryCommandBuffer<A::Alloc>, BuildError> {
if self.current_state.render_pass.is_some() {
if self.builder_state.render_pass.is_some() {
return Err(BuildError::RenderPassActive);
}
if !self.current_state.queries.is_empty() {
if !self.builder_state.queries.is_empty() {
return Err(BuildError::QueryActive);
}
@ -615,7 +625,7 @@ where
{
/// Builds the command buffer.
pub fn build(self) -> Result<SecondaryCommandBuffer<A::Alloc>, BuildError> {
if !self.current_state.queries.is_empty() {
if !self.builder_state.queries.is_empty() {
return Err(BuildError::QueryActive);
}
@ -653,7 +663,7 @@ where
/// Holds the current binding and setting state.
#[derive(Default)]
struct CurrentState {
struct CommandBufferBuilderState {
// Render pass
render_pass: Option<RenderPassState>,
@ -700,7 +710,7 @@ struct CurrentState {
queries: HashMap<ash::vk::QueryType, QueryState>,
}
impl CurrentState {
impl CommandBufferBuilderState {
fn reset_dynamic_states(&mut self, states: impl IntoIterator<Item = DynamicState>) {
for state in states {
match state {
@ -962,3 +972,433 @@ struct QueryState {
flags: QueryControlFlags,
in_subpass: bool,
}
#[derive(Debug, Default)]
struct ResourcesState {
buffers: HashMap<Arc<Buffer>, RangeMap<DeviceSize, BufferRangeState>>,
images: HashMap<Arc<Image>, RangeMap<DeviceSize, ImageRangeState>>,
}
#[derive(Clone, Debug, Default, PartialEq, Eq)]
struct BufferRangeState {
resource_uses: Vec<ResourceUseRef>,
memory_access: MemoryAccessState,
}
#[derive(Clone, Debug, Default, PartialEq, Eq)]
struct ImageRangeState {
resource_uses: Vec<ResourceUseRef>,
memory_access: MemoryAccessState,
expected_layout: ImageLayout,
current_layout: ImageLayout,
}
impl ResourcesState {
fn record_buffer_access(
&mut self,
use_ref: &ResourceUseRef,
buffer: &Arc<Buffer>,
range: Range<DeviceSize>,
stage_access: PipelineStageAccess,
) {
let range_map = self.buffers.entry(buffer.clone()).or_insert_with(|| {
[(0..buffer.size(), Default::default())]
.into_iter()
.collect()
});
range_map.split_at(&range.start);
range_map.split_at(&range.end);
for (_range, state) in range_map.range_mut(&range) {
state.resource_uses.push(*use_ref);
state.memory_access.record_access(use_ref, stage_access);
}
}
fn record_image_access(
&mut self,
use_ref: &ResourceUseRef,
image: &Arc<Image>,
subresource_range: ImageSubresourceRange,
stage_access: PipelineStageAccess,
image_layout: ImageLayout,
) {
let range_map = self.images.entry(image.clone()).or_insert_with(|| {
[(0..image.range_size(), Default::default())]
.into_iter()
.collect()
});
for range in image.iter_ranges(subresource_range) {
range_map.split_at(&range.start);
range_map.split_at(&range.end);
for (_range, state) in range_map.range_mut(&range) {
if state.resource_uses.is_empty() {
state.expected_layout = image_layout;
}
state.resource_uses.push(*use_ref);
state.memory_access.record_access(use_ref, stage_access);
}
}
}
fn record_pipeline_barrier(
&mut self,
command_index: usize,
command_name: &'static str,
dependency_info: &DependencyInfo,
queue_flags: QueueFlags,
) {
for barrier in &dependency_info.buffer_memory_barriers {
let barrier_scopes = BarrierScopes::from_buffer_memory_barrier(barrier, queue_flags);
let &BufferMemoryBarrier {
src_stages: _,
src_access: _,
dst_stages: _,
dst_access: _,
queue_family_ownership_transfer: _,
ref buffer,
ref range,
_ne: _,
} = barrier;
let range_map = self.buffers.entry(buffer.clone()).or_insert_with(|| {
[(0..buffer.size(), Default::default())]
.into_iter()
.collect()
});
range_map.split_at(&range.start);
range_map.split_at(&range.end);
for (_range, state) in range_map.range_mut(range) {
state.memory_access.record_barrier(&barrier_scopes, None);
}
}
for (index, barrier) in dependency_info.image_memory_barriers.iter().enumerate() {
let index = index as u32;
let barrier_scopes = BarrierScopes::from_image_memory_barrier(barrier, queue_flags);
let &ImageMemoryBarrier {
src_stages: _,
src_access: _,
dst_stages: _,
dst_access: _,
old_layout,
new_layout,
queue_family_ownership_transfer: _,
ref image,
ref subresource_range,
_ne,
} = barrier;
// This is only used if there is a layout transition.
let use_ref = ResourceUseRef {
command_index,
command_name,
resource_in_command: ResourceInCommand::ImageMemoryBarrier { index },
secondary_use_ref: None,
};
let layout_transition = (old_layout != new_layout).then_some(&use_ref);
let range_map = self.images.entry(image.clone()).or_insert_with(|| {
[(0..image.range_size(), Default::default())]
.into_iter()
.collect()
});
for range in image.iter_ranges(subresource_range.clone()) {
range_map.split_at(&range.start);
range_map.split_at(&range.end);
for (_range, state) in range_map.range_mut(&range) {
if old_layout != new_layout {
if state.resource_uses.is_empty() {
state.expected_layout = old_layout;
}
state.resource_uses.push(ResourceUseRef {
command_index,
command_name,
resource_in_command: ResourceInCommand::ImageMemoryBarrier { index },
secondary_use_ref: None,
});
state.current_layout = new_layout;
}
state
.memory_access
.record_barrier(&barrier_scopes, layout_transition);
}
}
}
for barrier in &dependency_info.buffer_memory_barriers {
let &BufferMemoryBarrier {
ref buffer,
ref range,
..
} = barrier;
let range_map = self.buffers.get_mut(buffer).unwrap();
for (_range, state) in range_map.range_mut(range) {
state.memory_access.apply_pending();
}
}
for barrier in &dependency_info.image_memory_barriers {
let &ImageMemoryBarrier {
ref image,
ref subresource_range,
..
} = barrier;
let range_map = self.images.get_mut(image).unwrap();
for range in image.iter_ranges(subresource_range.clone()) {
for (_range, state) in range_map.range_mut(&range) {
state.memory_access.apply_pending();
}
}
}
}
}
#[derive(Clone, Debug, Default, PartialEq, Eq)]
struct MemoryAccessState {
mutable: bool,
last_write: Option<WriteState>,
reads_since_last_write: HashMap<PipelineStage, ReadState>,
/// Pending changes that have not yet been applied. This is used during barrier recording.
pending: Option<PendingWriteState>,
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
struct WriteState {
use_ref: ResourceUseRef,
access: PipelineStageAccess,
/// The `dst_stages` and `dst_access` of all barriers that protect against this write.
barriers_since: PipelineStageAccessSet,
/// The `dst_stages` of all barriers that form a dependency chain with this write.
dependency_chain: PipelineStages,
/// The union of all `barriers_since` of all `reads_since_last_write`.
read_barriers_since: PipelineStages,
}
#[derive(Clone, Copy, Debug, Default, PartialEq, Eq)]
struct PendingWriteState {
/// If this is `Some`, then the barrier is treated as a new write,
/// and the previous `last_write` is discarded.
/// Otherwise, the values below are added to the existing `last_write`.
layout_transition: Option<ResourceUseRef>,
barriers_since: PipelineStageAccessSet,
dependency_chain: PipelineStages,
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
struct ReadState {
use_ref: ResourceUseRef,
access: PipelineStageAccess,
/// The `dst_stages` of all barriers that protect against this read.
/// This always includes the stage of `self`.
barriers_since: PipelineStages,
/// Stages of reads recorded after this read,
/// that were in scope of `barriers_since` at the time of recording.
/// This always includes the stage of `self`.
barriered_reads_since: PipelineStages,
/// Pending changes that have not yet been applied. This is used during barrier recording.
pending: Option<PendingReadState>,
}
#[derive(Clone, Copy, Debug, Default, PartialEq, Eq)]
struct PendingReadState {
barriers_since: PipelineStages,
}
impl MemoryAccessState {
fn record_access(&mut self, use_ref: &ResourceUseRef, access: PipelineStageAccess) {
if access.is_write() {
self.mutable = true;
self.last_write = Some(WriteState {
use_ref: *use_ref,
access,
barriers_since: Default::default(),
dependency_chain: Default::default(),
read_barriers_since: Default::default(),
});
self.reads_since_last_write.clear();
} else {
let pipeline_stage = PipelineStage::try_from(access).unwrap();
let pipeline_stages = PipelineStages::from(pipeline_stage);
for read_state in self.reads_since_last_write.values_mut() {
if read_state.barriers_since.intersects(pipeline_stages) {
read_state.barriered_reads_since |= pipeline_stages;
} else {
read_state.barriered_reads_since -= pipeline_stages;
}
}
self.reads_since_last_write.insert(
pipeline_stage,
ReadState {
use_ref: *use_ref,
access,
barriers_since: pipeline_stages,
barriered_reads_since: pipeline_stages,
pending: None,
},
);
}
}
fn record_barrier(
&mut self,
barrier_scopes: &BarrierScopes,
layout_transition: Option<&ResourceUseRef>,
) {
let skip_reads = if let Some(use_ref) = layout_transition {
let pending = self.pending.get_or_insert_with(Default::default);
pending.layout_transition = Some(*use_ref);
true
} else {
self.pending
.map_or(false, |pending| pending.layout_transition.is_some())
};
// If the last write is in the src scope of the barrier, then add the dst scopes.
// If the barrier includes a layout transition, then that layout transition is
// considered the last write, and it is always in the src scope of the barrier.
if layout_transition.is_some()
|| self.last_write.as_ref().map_or(false, |write_state| {
barrier_scopes
.src_access_scope
.contains_enum(write_state.access)
|| barrier_scopes
.src_exec_scope
.intersects(write_state.dependency_chain)
})
{
let pending = self.pending.get_or_insert_with(Default::default);
pending.barriers_since |= barrier_scopes.dst_access_scope;
pending.dependency_chain |= barrier_scopes.dst_exec_scope;
}
// A layout transition counts as a write, which means that `reads_since_last_write` will
// be cleared when applying pending operations.
// Therefore, there is no need to update the reads.
if !skip_reads {
// Gather all reads for which `barriers_since` is in the barrier's `src_exec_scope`.
let reads_in_src_exec_scope = self.reads_since_last_write.iter().fold(
PipelineStages::empty(),
|total, (&stage, read_state)| {
if barrier_scopes
.src_exec_scope
.intersects(read_state.barriers_since)
{
total.union(stage.into())
} else {
total
}
},
);
for read_state in self.reads_since_last_write.values_mut() {
if reads_in_src_exec_scope.intersects(read_state.barriered_reads_since) {
let pending = read_state.pending.get_or_insert_with(Default::default);
pending.barriers_since |= barrier_scopes.dst_exec_scope;
}
}
}
}
fn apply_pending(&mut self) {
if let Some(PendingWriteState {
layout_transition,
barriers_since,
dependency_chain,
}) = self.pending.take()
{
// If there is a pending layout transition, it is treated as the new `last_write`.
if let Some(use_ref) = layout_transition {
self.mutable = true;
self.last_write = Some(WriteState {
use_ref,
access: PipelineStageAccess::ImageLayoutTransition,
barriers_since,
dependency_chain,
read_barriers_since: Default::default(),
});
self.reads_since_last_write.clear();
} else if let Some(write_state) = &mut self.last_write {
write_state.barriers_since |= barriers_since;
write_state.dependency_chain |= dependency_chain;
}
}
for read_state in self.reads_since_last_write.values_mut() {
if let Some(PendingReadState { barriers_since }) = read_state.pending.take() {
read_state.barriers_since |= barriers_since;
if let Some(write_state) = &mut self.last_write {
write_state.read_barriers_since |= read_state.barriers_since;
}
}
}
}
}
struct BarrierScopes {
src_exec_scope: PipelineStages,
src_access_scope: PipelineStageAccessSet,
dst_exec_scope: PipelineStages,
dst_access_scope: PipelineStageAccessSet,
}
impl BarrierScopes {
fn from_buffer_memory_barrier(barrier: &BufferMemoryBarrier, queue_flags: QueueFlags) -> Self {
let src_stages_expanded = barrier.src_stages.expand(queue_flags);
let src_exec_scope = src_stages_expanded.with_earlier();
let src_access_scope = PipelineStageAccessSet::from(barrier.src_access)
& PipelineStageAccessSet::from(src_stages_expanded);
let dst_stages_expanded = barrier.dst_stages.expand(queue_flags);
let dst_exec_scope = dst_stages_expanded.with_later();
let dst_access_scope = PipelineStageAccessSet::from(barrier.dst_access)
& PipelineStageAccessSet::from(dst_stages_expanded);
Self {
src_exec_scope,
src_access_scope,
dst_exec_scope,
dst_access_scope,
}
}
fn from_image_memory_barrier(barrier: &ImageMemoryBarrier, queue_flags: QueueFlags) -> Self {
let src_stages_expanded = barrier.src_stages.expand(queue_flags);
let src_exec_scope = src_stages_expanded.with_earlier();
let src_access_scope = PipelineStageAccessSet::from(barrier.src_access)
& PipelineStageAccessSet::from(src_stages_expanded);
let dst_stages_expanded = barrier.dst_stages.expand(queue_flags);
let dst_exec_scope = dst_stages_expanded.with_later();
let dst_access_scope = PipelineStageAccessSet::from(barrier.dst_access)
& PipelineStageAccessSet::from(dst_stages_expanded);
Self {
src_exec_scope,
src_access_scope,
dst_exec_scope,
dst_access_scope,
}
}
}

View File

@ -7,28 +7,36 @@
// notice may not be copied, modified, or distributed except
// according to those terms.
use super::{CommandBufferBuilder, PipelineExecutionError, RenderPassState, RenderPassStateType};
use super::{
CommandBufferBuilder, DescriptorSetState, PipelineExecutionError, RenderPassState,
RenderPassStateType, ResourcesState,
};
use crate::{
buffer::{view::BufferViewAbstract, BufferAccess, BufferUsage, TypedBufferAccess},
command_buffer::{
allocator::CommandBufferAllocator, commands::pipeline::DescriptorResourceInvalidError,
DispatchIndirectCommand, DrawIndexedIndirectCommand, DrawIndirectCommand, SubpassContents,
DispatchIndirectCommand, DrawIndexedIndirectCommand, DrawIndirectCommand,
ResourceInCommand, ResourceUseRef, SubpassContents,
},
descriptor_set::{layout::DescriptorType, DescriptorBindingResources},
device::{DeviceOwned, QueueFlags},
format::FormatFeatures,
image::{ImageAspects, ImageViewAbstract, SampleCount},
image::{ImageAccess, ImageAspects, ImageViewAbstract, SampleCount},
pipeline::{
graphics::{
input_assembly::PrimitiveTopology, render_pass::PipelineRenderPassType,
input_assembly::{IndexType, PrimitiveTopology},
render_pass::PipelineRenderPassType,
vertex_input::VertexInputRate,
},
DynamicState, GraphicsPipeline, PartialStateMode, Pipeline, PipelineLayout,
DynamicState, GraphicsPipeline, PartialStateMode, Pipeline, PipelineBindPoint,
PipelineLayout,
},
sampler::Sampler,
shader::{DescriptorBindingRequirements, ShaderScalarType, ShaderStage},
shader::{DescriptorBindingRequirements, ShaderScalarType, ShaderStage, ShaderStages},
sync::PipelineStageAccess,
RequiresOneOf, VulkanObject,
};
use ahash::HashMap;
use std::{cmp::min, mem::size_of, sync::Arc};
impl<L, A> CommandBufferBuilder<L, A>
@ -69,13 +77,13 @@ where
}
// VUID-vkCmdDispatch-renderpass
if self.current_state.render_pass.is_some() {
if self.builder_state.render_pass.is_some() {
return Err(PipelineExecutionError::ForbiddenInsideRenderPass);
}
// VUID-vkCmdDispatch-None-02700
let pipeline = self
.current_state
.builder_state
.pipeline_compute
.as_ref()
.ok_or(PipelineExecutionError::PipelineNotBound)?
@ -115,8 +123,23 @@ where
group_counts[2],
);
// TODO: sync state update
let command_index = self.next_command_index;
let command_name = "dispatch";
let pipeline = self
.builder_state
.pipeline_compute
.as_ref()
.unwrap()
.as_ref();
record_descriptor_sets_access(
&mut self.resources_usage_state,
command_index,
command_name,
&self.builder_state.descriptor_sets,
pipeline,
);
self.next_command_index += 1;
self
}
@ -158,13 +181,13 @@ where
}
// VUID-vkCmdDispatchIndirect-renderpass
if self.current_state.render_pass.is_some() {
if self.builder_state.render_pass.is_some() {
return Err(PipelineExecutionError::ForbiddenInsideRenderPass);
}
// VUID-vkCmdDispatchIndirect-None-02700
let pipeline = self
.current_state
.builder_state
.pipeline_compute
.as_ref()
.ok_or(PipelineExecutionError::PipelineNotBound)?
@ -193,10 +216,31 @@ where
indirect_buffer_inner.offset,
);
let command_index = self.next_command_index;
let command_name = "dispatch_indirect";
let pipeline = self
.builder_state
.pipeline_compute
.as_ref()
.unwrap()
.as_ref();
record_descriptor_sets_access(
&mut self.resources_usage_state,
command_index,
command_name,
&self.builder_state.descriptor_sets,
pipeline,
);
record_indirect_buffer_access(
&mut self.resources_usage_state,
command_index,
command_name,
&indirect_buffer,
);
self.resources.push(Box::new(indirect_buffer));
// TODO: sync state update
self.next_command_index += 1;
self
}
@ -242,14 +286,14 @@ where
) -> Result<(), PipelineExecutionError> {
// VUID-vkCmdDraw-renderpass
let render_pass_state = self
.current_state
.builder_state
.render_pass
.as_ref()
.ok_or(PipelineExecutionError::ForbiddenOutsideRenderPass)?;
// VUID-vkCmdDraw-None-02700
let pipeline = self
.current_state
.builder_state
.pipeline_graphics
.as_ref()
.ok_or(PipelineExecutionError::PipelineNotBound)?
@ -287,14 +331,36 @@ where
first_instance,
);
let command_index = self.next_command_index;
let command_name = "draw";
let pipeline = self
.builder_state
.pipeline_graphics
.as_ref()
.unwrap()
.as_ref();
record_descriptor_sets_access(
&mut self.resources_usage_state,
command_index,
command_name,
&self.builder_state.descriptor_sets,
pipeline,
);
record_vertex_buffers_access(
&mut self.resources_usage_state,
command_index,
command_name,
&self.builder_state.vertex_buffers,
pipeline,
);
if let RenderPassStateType::BeginRendering(state) =
&mut self.current_state.render_pass.as_mut().unwrap().render_pass
&mut self.builder_state.render_pass.as_mut().unwrap().render_pass
{
state.pipeline_used = true;
}
// TODO: sync state update
self.next_command_index += 1;
self
}
@ -340,14 +406,14 @@ where
) -> Result<(), PipelineExecutionError> {
// VUID-vkCmdDrawIndirect-renderpass
let render_pass_state = self
.current_state
.builder_state
.render_pass
.as_ref()
.ok_or(PipelineExecutionError::ForbiddenOutsideRenderPass)?;
// VUID-vkCmdDrawIndirect-None-02700
let pipeline = self
.current_state
.builder_state
.pipeline_graphics
.as_ref()
.ok_or(PipelineExecutionError::PipelineNotBound)?
@ -409,16 +475,44 @@ where
stride,
);
let command_index = self.next_command_index;
let command_name = "draw_indirect";
let pipeline = self
.builder_state
.pipeline_graphics
.as_ref()
.unwrap()
.as_ref();
record_descriptor_sets_access(
&mut self.resources_usage_state,
command_index,
command_name,
&self.builder_state.descriptor_sets,
pipeline,
);
record_vertex_buffers_access(
&mut self.resources_usage_state,
command_index,
command_name,
&self.builder_state.vertex_buffers,
pipeline,
);
record_indirect_buffer_access(
&mut self.resources_usage_state,
command_index,
command_name,
&indirect_buffer,
);
if let RenderPassStateType::BeginRendering(state) =
&mut self.current_state.render_pass.as_mut().unwrap().render_pass
&mut self.builder_state.render_pass.as_mut().unwrap().render_pass
{
state.pipeline_used = true;
}
self.resources.push(Box::new(indirect_buffer));
// TODO: sync state update
self.next_command_index += 1;
self
}
@ -487,14 +581,14 @@ where
// VUID-vkCmdDrawIndexed-renderpass
let render_pass_state = self
.current_state
.builder_state
.render_pass
.as_ref()
.ok_or(PipelineExecutionError::ForbiddenOutsideRenderPass)?;
// VUID-vkCmdDrawIndexed-None-02700
let pipeline = self
.current_state
.builder_state
.pipeline_graphics
.as_ref()
.ok_or(PipelineExecutionError::PipelineNotBound)?
@ -536,14 +630,42 @@ where
first_instance,
);
let command_index = self.next_command_index;
let command_name = "draw_indexed";
let pipeline = self
.builder_state
.pipeline_graphics
.as_ref()
.unwrap()
.as_ref();
record_descriptor_sets_access(
&mut self.resources_usage_state,
command_index,
command_name,
&self.builder_state.descriptor_sets,
pipeline,
);
record_vertex_buffers_access(
&mut self.resources_usage_state,
command_index,
command_name,
&self.builder_state.vertex_buffers,
pipeline,
);
record_index_buffer_access(
&mut self.resources_usage_state,
command_index,
command_name,
&self.builder_state.index_buffer,
);
if let RenderPassStateType::BeginRendering(state) =
&mut self.current_state.render_pass.as_mut().unwrap().render_pass
&mut self.builder_state.render_pass.as_mut().unwrap().render_pass
{
state.pipeline_used = true;
}
// TODO: sync state update
self.next_command_index += 1;
self
}
@ -596,14 +718,14 @@ where
) -> Result<(), PipelineExecutionError> {
// VUID-vkCmdDrawIndexedIndirect-renderpass
let render_pass_state = self
.current_state
.builder_state
.render_pass
.as_ref()
.ok_or(PipelineExecutionError::ForbiddenOutsideRenderPass)?;
// VUID-vkCmdDrawIndexedIndirect-None-02700
let pipeline = self
.current_state
.builder_state
.pipeline_graphics
.as_ref()
.ok_or(PipelineExecutionError::PipelineNotBound)?
@ -666,16 +788,50 @@ where
stride,
);
let command_index = self.next_command_index;
let command_name = "draw_indexed_indirect";
let pipeline = self
.builder_state
.pipeline_graphics
.as_ref()
.unwrap()
.as_ref();
record_descriptor_sets_access(
&mut self.resources_usage_state,
command_index,
command_name,
&self.builder_state.descriptor_sets,
pipeline,
);
record_vertex_buffers_access(
&mut self.resources_usage_state,
command_index,
command_name,
&self.builder_state.vertex_buffers,
pipeline,
);
record_index_buffer_access(
&mut self.resources_usage_state,
command_index,
command_name,
&self.builder_state.index_buffer,
);
record_indirect_buffer_access(
&mut self.resources_usage_state,
command_index,
command_name,
&indirect_buffer,
);
if let RenderPassStateType::BeginRendering(state) =
&mut self.current_state.render_pass.as_mut().unwrap().render_pass
&mut self.builder_state.render_pass.as_mut().unwrap().render_pass
{
state.pipeline_used = true;
}
self.resources.push(Box::new(indirect_buffer));
// TODO: sync state update
self.next_command_index += 1;
self
}
@ -685,7 +841,7 @@ where
) -> Result<(), PipelineExecutionError> {
// VUID?
let (index_buffer, index_type) = self
.current_state
.builder_state
.index_buffer
.as_ref()
.ok_or(PipelineExecutionError::IndexBufferNotBound)?;
@ -787,7 +943,7 @@ where
// VUID-vkCmdDispatch-None-02697
let descriptor_set_state = self
.current_state
.builder_state
.descriptor_sets
.get(&pipeline.bind_point())
.ok_or(PipelineExecutionError::PipelineLayoutNotCompatible)?;
@ -1137,7 +1293,7 @@ where
// VUID-vkCmdDispatch-maintenance4-06425
let constants_pipeline_layout = self
.current_state
.builder_state
.push_constants_pipeline_layout
.as_ref()
.ok_or(PipelineExecutionError::PushConstantsMissing)?;
@ -1150,7 +1306,7 @@ where
return Err(PipelineExecutionError::PushConstantsNotCompatible);
}
let set_bytes = &self.current_state.push_constants;
let set_bytes = &self.builder_state.push_constants;
// VUID-vkCmdDispatch-maintenance4-06425
if !pipeline_layout
@ -1179,13 +1335,13 @@ where
match dynamic_state {
DynamicState::BlendConstants => {
// VUID?
if self.current_state.blend_constants.is_none() {
if self.builder_state.blend_constants.is_none() {
return Err(PipelineExecutionError::DynamicStateNotSet { dynamic_state });
}
}
DynamicState::ColorWriteEnable => {
// VUID-vkCmdDraw-attachmentCount-06667
let enables = self.current_state.color_write_enable.as_ref().ok_or(PipelineExecutionError::DynamicStateNotSet { dynamic_state })?;
let enables = self.builder_state.color_write_enable.as_ref().ok_or(PipelineExecutionError::DynamicStateNotSet { dynamic_state })?;
// VUID-vkCmdDraw-attachmentCount-06667
if enables.len() < pipeline.color_blend_state().unwrap().attachments.len() {
@ -1203,49 +1359,49 @@ where
}
DynamicState::CullMode => {
// VUID?
if self.current_state.cull_mode.is_none() {
if self.builder_state.cull_mode.is_none() {
return Err(PipelineExecutionError::DynamicStateNotSet { dynamic_state });
}
}
DynamicState::DepthBias => {
// VUID?
if self.current_state.depth_bias.is_none() {
if self.builder_state.depth_bias.is_none() {
return Err(PipelineExecutionError::DynamicStateNotSet { dynamic_state });
}
}
DynamicState::DepthBiasEnable => {
// VUID-vkCmdDraw-None-04877
if self.current_state.depth_bias_enable.is_none() {
if self.builder_state.depth_bias_enable.is_none() {
return Err(PipelineExecutionError::DynamicStateNotSet { dynamic_state });
}
}
DynamicState::DepthBounds => {
// VUID?
if self.current_state.depth_bounds.is_none() {
if self.builder_state.depth_bounds.is_none() {
return Err(PipelineExecutionError::DynamicStateNotSet { dynamic_state });
}
}
DynamicState::DepthBoundsTestEnable => {
// VUID?
if self.current_state.depth_bounds_test_enable.is_none() {
if self.builder_state.depth_bounds_test_enable.is_none() {
return Err(PipelineExecutionError::DynamicStateNotSet { dynamic_state });
}
}
DynamicState::DepthCompareOp => {
// VUID?
if self.current_state.depth_compare_op.is_none() {
if self.builder_state.depth_compare_op.is_none() {
return Err(PipelineExecutionError::DynamicStateNotSet { dynamic_state });
}
}
DynamicState::DepthTestEnable => {
// VUID?
if self.current_state.depth_test_enable.is_none() {
if self.builder_state.depth_test_enable.is_none() {
return Err(PipelineExecutionError::DynamicStateNotSet { dynamic_state });
}
}
DynamicState::DepthWriteEnable => {
// VUID?
if self.current_state.depth_write_enable.is_none() {
if self.builder_state.depth_write_enable.is_none() {
return Err(PipelineExecutionError::DynamicStateNotSet { dynamic_state });
}
@ -1260,7 +1416,7 @@ where
for num in 0..discard_rectangle_count {
// VUID?
if !self.current_state.discard_rectangle.contains_key(&num) {
if !self.builder_state.discard_rectangle.contains_key(&num) {
return Err(PipelineExecutionError::DynamicStateNotSet { dynamic_state });
}
}
@ -1269,38 +1425,38 @@ where
DynamicState::FragmentShadingRate => todo!(),
DynamicState::FrontFace => {
// VUID?
if self.current_state.front_face.is_none() {
if self.builder_state.front_face.is_none() {
return Err(PipelineExecutionError::DynamicStateNotSet { dynamic_state });
}
}
DynamicState::LineStipple => {
// VUID?
if self.current_state.line_stipple.is_none() {
if self.builder_state.line_stipple.is_none() {
return Err(PipelineExecutionError::DynamicStateNotSet { dynamic_state });
}
}
DynamicState::LineWidth => {
// VUID?
if self.current_state.line_width.is_none() {
if self.builder_state.line_width.is_none() {
return Err(PipelineExecutionError::DynamicStateNotSet { dynamic_state });
}
}
DynamicState::LogicOp => {
// VUID-vkCmdDraw-logicOp-04878
if self.current_state.logic_op.is_none() {
if self.builder_state.logic_op.is_none() {
return Err(PipelineExecutionError::DynamicStateNotSet { dynamic_state });
}
}
DynamicState::PatchControlPoints => {
// VUID-vkCmdDraw-None-04875
if self.current_state.patch_control_points.is_none() {
if self.builder_state.patch_control_points.is_none() {
return Err(PipelineExecutionError::DynamicStateNotSet { dynamic_state });
}
}
DynamicState::PrimitiveRestartEnable => {
// VUID-vkCmdDraw-None-04879
let primitive_restart_enable =
if let Some(enable) = self.current_state.primitive_restart_enable {
if let Some(enable) = self.builder_state.primitive_restart_enable {
enable
} else {
return Err(PipelineExecutionError::DynamicStateNotSet { dynamic_state });
@ -1310,7 +1466,7 @@ where
let topology = match pipeline.input_assembly_state().topology {
PartialStateMode::Fixed(topology) => topology,
PartialStateMode::Dynamic(_) => {
if let Some(topology) = self.current_state.primitive_topology {
if let Some(topology) = self.builder_state.primitive_topology {
topology
} else {
return Err(PipelineExecutionError::DynamicStateNotSet {
@ -1364,7 +1520,7 @@ where
}
DynamicState::PrimitiveTopology => {
// VUID-vkCmdDraw-primitiveTopology-03420
let topology = if let Some(topology) = self.current_state.primitive_topology {
let topology = if let Some(topology) = self.builder_state.primitive_topology {
topology
} else {
return Err(PipelineExecutionError::DynamicStateNotSet { dynamic_state });
@ -1405,7 +1561,7 @@ where
}
DynamicState::RasterizerDiscardEnable => {
// VUID-vkCmdDraw-None-04876
if self.current_state.rasterizer_discard_enable.is_none() {
if self.builder_state.rasterizer_discard_enable.is_none() {
return Err(PipelineExecutionError::DynamicStateNotSet { dynamic_state });
}
}
@ -1416,7 +1572,7 @@ where
DynamicState::Scissor => {
for num in 0..pipeline.viewport_state().unwrap().count().unwrap() {
// VUID?
if !self.current_state.scissor.contains_key(&num) {
if !self.builder_state.scissor.contains_key(&num) {
return Err(PipelineExecutionError::DynamicStateNotSet { dynamic_state });
}
}
@ -1424,7 +1580,7 @@ where
DynamicState::ScissorWithCount => {
// VUID-vkCmdDraw-scissorCount-03418
// VUID-vkCmdDraw-viewportCount-03419
let scissor_count = self.current_state.scissor_with_count.as_ref().ok_or(PipelineExecutionError::DynamicStateNotSet { dynamic_state })?.len() as u32;
let scissor_count = self.builder_state.scissor_with_count.as_ref().ok_or(PipelineExecutionError::DynamicStateNotSet { dynamic_state })?.len() as u32;
// Check if the counts match, but only if the viewport count is fixed.
// If the viewport count is also dynamic, then the
@ -1442,7 +1598,7 @@ where
}
}
DynamicState::StencilCompareMask => {
let state = self.current_state.stencil_compare_mask;
let state = self.builder_state.stencil_compare_mask;
// VUID?
if state.front.is_none() || state.back.is_none() {
@ -1450,7 +1606,7 @@ where
}
}
DynamicState::StencilOp => {
let state = self.current_state.stencil_op;
let state = self.builder_state.stencil_op;
// VUID?
if state.front.is_none() || state.back.is_none() {
@ -1458,7 +1614,7 @@ where
}
}
DynamicState::StencilReference => {
let state = self.current_state.stencil_reference;
let state = self.builder_state.stencil_reference;
// VUID?
if state.front.is_none() || state.back.is_none() {
@ -1467,14 +1623,14 @@ where
}
DynamicState::StencilTestEnable => {
// VUID?
if self.current_state.stencil_test_enable.is_none() {
if self.builder_state.stencil_test_enable.is_none() {
return Err(PipelineExecutionError::DynamicStateNotSet { dynamic_state });
}
// TODO: Check if the stencil buffer is writable
}
DynamicState::StencilWriteMask => {
let state = self.current_state.stencil_write_mask;
let state = self.builder_state.stencil_write_mask;
// VUID?
if state.front.is_none() || state.back.is_none() {
@ -1486,7 +1642,7 @@ where
DynamicState::Viewport => {
for num in 0..pipeline.viewport_state().unwrap().count().unwrap() {
// VUID?
if !self.current_state.viewport.contains_key(&num) {
if !self.builder_state.viewport.contains_key(&num) {
return Err(PipelineExecutionError::DynamicStateNotSet { dynamic_state });
}
}
@ -1495,7 +1651,7 @@ where
DynamicState::ViewportShadingRatePalette => todo!(),
DynamicState::ViewportWithCount => {
// VUID-vkCmdDraw-viewportCount-03417
let viewport_count = self.current_state.viewport_with_count.as_ref().ok_or(PipelineExecutionError::DynamicStateNotSet { dynamic_state })?.len() as u32;
let viewport_count = self.builder_state.viewport_with_count.as_ref().ok_or(PipelineExecutionError::DynamicStateNotSet { dynamic_state })?.len() as u32;
let scissor_count = if let Some(scissor_count) =
pipeline.viewport_state().unwrap().count()
@ -1505,7 +1661,7 @@ where
} else {
// VUID-vkCmdDraw-viewportCount-03419
// The scissor count is also dynamic.
self.current_state.scissor_with_count.as_ref().ok_or(PipelineExecutionError::DynamicStateNotSet { dynamic_state })?.len() as u32
self.builder_state.scissor_with_count.as_ref().ok_or(PipelineExecutionError::DynamicStateNotSet { dynamic_state })?.len() as u32
};
// VUID-vkCmdDraw-viewportCount-03417
@ -1710,7 +1866,7 @@ where
for (&binding_num, binding_desc) in &vertex_input.bindings {
// VUID-vkCmdDraw-None-04007
let vertex_buffer = match self.current_state.vertex_buffers.get(&binding_num) {
let vertex_buffer = match self.builder_state.vertex_buffers.get(&binding_num) {
Some(x) => x,
None => return Err(PipelineExecutionError::VertexBufferNotBound { binding_num }),
};
@ -1807,3 +1963,252 @@ where
Ok(())
}
}
fn record_descriptor_sets_access(
resources_usage_state: &mut ResourcesState,
command_index: usize,
command_name: &'static str,
descriptor_sets_state: &HashMap<PipelineBindPoint, DescriptorSetState>,
pipeline: &impl Pipeline,
) {
let descriptor_sets_state = match descriptor_sets_state.get(&pipeline.bind_point()) {
Some(x) => x,
None => return,
};
for (&(set, binding), binding_reqs) in pipeline.descriptor_binding_requirements() {
let descriptor_type = descriptor_sets_state.pipeline_layout.set_layouts()[set as usize]
.bindings()[&binding]
.descriptor_type;
// TODO: Should input attachments be handled here or in attachment access?
if descriptor_type == DescriptorType::InputAttachment {
continue;
}
let use_iter = move |index: u32| {
let (stages_read, stages_write) = [Some(index), None]
.into_iter()
.filter_map(|index| binding_reqs.descriptors.get(&index))
.fold(
(ShaderStages::empty(), ShaderStages::empty()),
|(stages_read, stages_write), desc_reqs| {
(
stages_read | desc_reqs.memory_read,
stages_write | desc_reqs.memory_write,
)
},
);
let use_ref = ResourceUseRef {
command_index,
command_name,
resource_in_command: ResourceInCommand::DescriptorSet {
set,
binding,
index,
},
secondary_use_ref: None,
};
let stage_access_iter = PipelineStageAccess::iter_descriptor_stages(
descriptor_type,
stages_read,
stages_write,
);
(use_ref, stage_access_iter)
};
match descriptor_sets_state.descriptor_sets[&set]
.resources()
.binding(binding)
.unwrap()
{
DescriptorBindingResources::None(_) => continue,
DescriptorBindingResources::Buffer(elements) => {
for (index, element) in elements.iter().enumerate() {
if let Some(buffer) = element {
let buffer_inner = buffer.inner();
let (use_ref, stage_access_iter) = use_iter(index as u32);
let mut range = 0..buffer.size(); // TODO:
range.start += buffer_inner.offset;
range.end += buffer_inner.offset;
for stage_access in stage_access_iter {
resources_usage_state.record_buffer_access(
&use_ref,
buffer_inner.buffer,
range.clone(),
stage_access,
);
}
}
}
}
DescriptorBindingResources::BufferView(elements) => {
for (index, element) in elements.iter().enumerate() {
if let Some(buffer_view) = element {
let buffer = buffer_view.buffer();
let buffer_inner = buffer.inner();
let (use_ref, stage_access_iter) = use_iter(index as u32);
let mut range = buffer_view.range();
range.start += buffer_inner.offset;
range.end += buffer_inner.offset;
for stage_access in stage_access_iter {
resources_usage_state.record_buffer_access(
&use_ref,
buffer_inner.buffer,
range.clone(),
stage_access,
);
}
}
}
}
DescriptorBindingResources::ImageView(elements) => {
for (index, element) in elements.iter().enumerate() {
if let Some(image_view) = element {
let image = image_view.image();
let image_inner = image.inner();
let layout = image
.descriptor_layouts()
.expect(
"descriptor_layouts must return Some when used in an image view",
)
.layout_for(descriptor_type);
let (use_ref, stage_access_iter) = use_iter(index as u32);
let mut subresource_range = image_view.subresource_range().clone();
subresource_range.array_layers.start += image_inner.first_layer;
subresource_range.array_layers.end += image_inner.first_layer;
subresource_range.mip_levels.start += image_inner.first_mipmap_level;
subresource_range.mip_levels.end += image_inner.first_mipmap_level;
for stage_access in stage_access_iter {
resources_usage_state.record_image_access(
&use_ref,
image_inner.image,
subresource_range.clone(),
stage_access,
layout,
);
}
}
}
}
DescriptorBindingResources::ImageViewSampler(elements) => {
for (index, element) in elements.iter().enumerate() {
if let Some((image_view, _)) = element {
let image = image_view.image();
let image_inner = image.inner();
let layout = image
.descriptor_layouts()
.expect(
"descriptor_layouts must return Some when used in an image view",
)
.layout_for(descriptor_type);
let (use_ref, stage_access_iter) = use_iter(index as u32);
let mut subresource_range = image_view.subresource_range().clone();
subresource_range.array_layers.start += image_inner.first_layer;
subresource_range.array_layers.end += image_inner.first_layer;
subresource_range.mip_levels.start += image_inner.first_mipmap_level;
subresource_range.mip_levels.end += image_inner.first_mipmap_level;
for stage_access in stage_access_iter {
resources_usage_state.record_image_access(
&use_ref,
image_inner.image,
subresource_range.clone(),
stage_access,
layout,
);
}
}
}
}
DescriptorBindingResources::Sampler(_) => (),
}
}
}
fn record_vertex_buffers_access(
resources_usage_state: &mut ResourcesState,
command_index: usize,
command_name: &'static str,
vertex_buffers_state: &HashMap<u32, Arc<dyn BufferAccess>>,
pipeline: &GraphicsPipeline,
) {
for &binding in pipeline.vertex_input_state().bindings.keys() {
let buffer = &vertex_buffers_state[&binding];
let buffer_inner = buffer.inner();
let use_ref = ResourceUseRef {
command_index,
command_name,
resource_in_command: ResourceInCommand::VertexBuffer { binding },
secondary_use_ref: None,
};
let mut range = 0..buffer.size(); // TODO: take range from draw command
range.start += buffer_inner.offset;
range.end += buffer_inner.offset;
resources_usage_state.record_buffer_access(
&use_ref,
buffer_inner.buffer,
range,
PipelineStageAccess::VertexAttributeInput_VertexAttributeRead,
);
}
}
fn record_index_buffer_access(
resources_usage_state: &mut ResourcesState,
command_index: usize,
command_name: &'static str,
index_buffer_state: &Option<(Arc<dyn BufferAccess>, IndexType)>,
) {
let buffer = &index_buffer_state.as_ref().unwrap().0;
let buffer_inner = buffer.inner();
let use_ref = ResourceUseRef {
command_index,
command_name,
resource_in_command: ResourceInCommand::IndexBuffer,
secondary_use_ref: None,
};
let mut range = 0..buffer.size(); // TODO: take range from draw command
range.start += buffer_inner.offset;
range.end += buffer_inner.offset;
resources_usage_state.record_buffer_access(
&use_ref,
buffer_inner.buffer,
range,
PipelineStageAccess::IndexInput_IndexRead,
);
}
fn record_indirect_buffer_access(
resources_usage_state: &mut ResourcesState,
command_index: usize,
command_name: &'static str,
buffer: &Arc<dyn BufferAccess>,
) {
let buffer_inner = buffer.inner();
let use_ref = ResourceUseRef {
command_index,
command_name,
resource_in_command: ResourceInCommand::IndirectBuffer,
secondary_use_ref: None,
};
let mut range = 0..buffer.size(); // TODO: take range from draw command
range.start += buffer_inner.offset;
range.end += buffer_inner.offset;
resources_usage_state.record_buffer_access(
&use_ref,
buffer_inner.buffer,
range,
PipelineStageAccess::DrawIndirect_IndirectCommandRead,
);
}

View File

@ -9,11 +9,11 @@
use super::{CommandBufferBuilder, QueryError, QueryState};
use crate::{
buffer::{BufferUsage, TypedBufferAccess},
command_buffer::allocator::CommandBufferAllocator,
buffer::{BufferAccess, BufferUsage, TypedBufferAccess},
command_buffer::{allocator::CommandBufferAllocator, ResourceInCommand, ResourceUseRef},
device::{DeviceOwned, QueueFlags},
query::{QueryControlFlags, QueryPool, QueryResultElement, QueryResultFlags, QueryType},
sync::{PipelineStage, PipelineStages},
sync::{PipelineStage, PipelineStageAccess, PipelineStages},
DeviceSize, RequiresOneOf, Version, VulkanObject,
};
use std::{ops::Range, sync::Arc};
@ -122,14 +122,14 @@ where
// VUID-vkCmdBeginQuery-queryPool-01922
if self
.current_state
.builder_state
.queries
.contains_key(&query_pool.query_type().into())
{
return Err(QueryError::QueryIsActive);
}
if let Some(render_pass_state) = &self.current_state.render_pass {
if let Some(render_pass_state) = &self.builder_state.render_pass {
// VUID-vkCmdBeginQuery-query-00808
if query + render_pass_state.view_mask.count_ones() > query_pool.query_count() {
return Err(QueryError::OutOfRangeMultiview);
@ -154,19 +154,20 @@ where
(fns.v1_0.cmd_begin_query)(self.handle(), query_pool.handle(), query, flags.into());
let ty = query_pool.query_type();
self.current_state.queries.insert(
self.builder_state.queries.insert(
ty.into(),
QueryState {
query_pool: query_pool.handle(),
query,
ty,
flags,
in_subpass: self.current_state.render_pass.is_some(),
in_subpass: self.builder_state.render_pass.is_some(),
},
);
self.resources.push(Box::new(query_pool));
self.next_command_index += 1;
self
}
@ -200,7 +201,7 @@ where
// VUID-vkCmdEndQuery-None-01923
if !self
.current_state
.builder_state
.queries
.get(&query_pool.query_type().into())
.map_or(false, |state| {
@ -213,7 +214,7 @@ where
// VUID-vkCmdEndQuery-query-00810
query_pool.query(query).ok_or(QueryError::OutOfRange)?;
if let Some(render_pass_state) = &self.current_state.render_pass {
if let Some(render_pass_state) = &self.builder_state.render_pass {
// VUID-vkCmdEndQuery-query-00812
if query + render_pass_state.view_mask.count_ones() > query_pool.query_count() {
return Err(QueryError::OutOfRangeMultiview);
@ -232,12 +233,13 @@ where
let fns = self.device().fns();
(fns.v1_0.cmd_end_query)(self.handle(), query_pool.handle(), query);
self.current_state
self.builder_state
.queries
.remove(&query_pool.query_type().into());
self.resources.push(Box::new(query_pool));
self.next_command_index += 1;
self
}
@ -444,7 +446,7 @@ where
// VUID-vkCmdWriteTimestamp2-query-04903
query_pool.query(query).ok_or(QueryError::OutOfRange)?;
if let Some(render_pass_state) = &self.current_state.render_pass {
if let Some(render_pass_state) = &self.builder_state.render_pass {
// VUID-vkCmdWriteTimestamp2-query-03865
if query + render_pass_state.view_mask.count_ones() > query_pool.query_count() {
return Err(QueryError::OutOfRangeMultiview);
@ -491,6 +493,7 @@ where
self.resources.push(Box::new(query_pool));
self.next_command_index += 1;
self
}
@ -514,7 +517,7 @@ where
&mut self,
query_pool: Arc<QueryPool>,
queries: Range<u32>,
destination: Arc<D>,
dst_buffer: Arc<D>,
flags: QueryResultFlags,
) -> Result<&mut Self, QueryError>
where
@ -524,7 +527,7 @@ where
self.validate_copy_query_pool_results(
&query_pool,
queries.clone(),
destination.as_ref(),
dst_buffer.as_ref(),
flags,
)?;
@ -532,13 +535,8 @@ where
let per_query_len = query_pool.query_type().result_len()
+ flags.intersects(QueryResultFlags::WITH_AVAILABILITY) as DeviceSize;
let stride = per_query_len * std::mem::size_of::<T>() as DeviceSize;
Ok(self.copy_query_pool_results_unchecked(
query_pool,
queries,
destination,
stride,
flags,
))
Ok(self
.copy_query_pool_results_unchecked(query_pool, queries, dst_buffer, stride, flags))
}
}
@ -546,7 +544,7 @@ where
&self,
query_pool: &QueryPool,
queries: Range<u32>,
destination: &D,
dst_buffer: &D,
flags: QueryResultFlags,
) -> Result<(), QueryError>
where
@ -564,18 +562,18 @@ where
}
// VUID-vkCmdCopyQueryPoolResults-renderpass
if self.current_state.render_pass.is_some() {
if self.builder_state.render_pass.is_some() {
return Err(QueryError::ForbiddenInsideRenderPass);
}
let device = self.device();
let buffer_inner = destination.inner();
let buffer_inner = dst_buffer.inner();
// VUID-vkCmdCopyQueryPoolResults-commonparent
assert_eq!(device, buffer_inner.buffer.device());
assert_eq!(device, query_pool.device());
assert!(destination.len() > 0);
assert!(dst_buffer.len() > 0);
// VUID-vkCmdCopyQueryPoolResults-flags-00822
// VUID-vkCmdCopyQueryPoolResults-flags-00823
@ -593,10 +591,10 @@ where
let required_len = per_query_len * count as DeviceSize;
// VUID-vkCmdCopyQueryPoolResults-dstBuffer-00824
if destination.len() < required_len {
if dst_buffer.len() < required_len {
return Err(QueryError::BufferTooSmall {
required_len,
actual_len: destination.len(),
actual_len: dst_buffer.len(),
});
}
@ -626,7 +624,7 @@ where
&mut self,
query_pool: Arc<QueryPool>,
queries: Range<u32>,
destination: Arc<D>,
dst_buffer: Arc<D>,
stride: DeviceSize,
flags: QueryResultFlags,
) -> &mut Self
@ -634,7 +632,7 @@ where
D: TypedBufferAccess<Content = [T]> + 'static,
T: QueryResultElement,
{
let destination_inner = destination.inner();
let dst_buffer_inner = dst_buffer.inner();
let fns = self.device().fns();
(fns.v1_0.cmd_copy_query_pool_results)(
@ -642,17 +640,35 @@ where
query_pool.handle(),
queries.start,
queries.end - queries.start,
destination_inner.buffer.handle(),
destination_inner.offset,
dst_buffer_inner.buffer.handle(),
dst_buffer_inner.offset,
stride,
ash::vk::QueryResultFlags::from(flags) | T::FLAG,
);
let command_index = self.next_command_index;
let command_name = "copy_query_pool_results";
let use_ref = ResourceUseRef {
command_index,
command_name,
resource_in_command: ResourceInCommand::Destination,
secondary_use_ref: None,
};
let mut dst_range = 0..dst_buffer.size(); // TODO:
dst_range.start += dst_buffer_inner.offset;
dst_range.end += dst_buffer_inner.offset;
self.resources_usage_state.record_buffer_access(
&use_ref,
dst_buffer_inner.buffer,
dst_range,
PipelineStageAccess::Copy_TransferWrite,
);
self.resources.push(Box::new(query_pool));
self.resources.push(Box::new(destination));
// TODO: sync state update
self.resources.push(Box::new(dst_buffer));
self.next_command_index += 1;
self
}
@ -680,7 +696,7 @@ where
queries: Range<u32>,
) -> Result<(), QueryError> {
// VUID-vkCmdResetQueryPool-renderpass
if self.current_state.render_pass.is_some() {
if self.builder_state.render_pass.is_some() {
return Err(QueryError::ForbiddenInsideRenderPass);
}
@ -707,7 +723,7 @@ where
// VUID-vkCmdResetQueryPool-None-02841
if self
.current_state
.builder_state
.queries
.values()
.any(|state| state.query_pool == query_pool.handle() && queries.contains(&state.query))
@ -734,6 +750,7 @@ where
self.resources.push(Box::new(query_pool));
self.next_command_index += 1;
self
}
}

View File

@ -71,7 +71,7 @@ where
}
// VUID-vkCmdBeginRenderPass2-renderpass
if self.current_state.render_pass.is_some() {
if self.builder_state.render_pass.is_some() {
return Err(RenderPassError::ForbiddenInsideRenderPass);
}
@ -446,7 +446,7 @@ where
let subpass = render_pass.clone().first_subpass();
let view_mask = subpass.subpass_desc().view_mask;
self.current_state.render_pass = Some(RenderPassState {
self.builder_state.render_pass = Some(RenderPassState {
contents,
render_area_offset,
render_area_extent,
@ -463,6 +463,7 @@ where
// TODO: sync state update
self.next_command_index += 1;
self
}
@ -491,7 +492,7 @@ where
// VUID-vkCmdNextSubpass2-renderpass
let render_pass_state = self
.current_state
.builder_state
.render_pass
.as_ref()
.ok_or(RenderPassError::ForbiddenOutsideRenderPass)?;
@ -512,7 +513,7 @@ where
// VUID?
if self
.current_state
.builder_state
.queries
.values()
.any(|state| state.in_subpass)
@ -561,7 +562,7 @@ where
(fns.v1_0.cmd_next_subpass)(self.handle(), subpass_begin_info.contents);
}
let render_pass_state = self.current_state.render_pass.as_mut().unwrap();
let render_pass_state = self.builder_state.render_pass.as_mut().unwrap();
let begin_render_pass_state = match &mut render_pass_state.render_pass {
RenderPassStateType::BeginRenderPass(x) => x,
_ => unreachable!(),
@ -574,11 +575,12 @@ where
if render_pass_state.view_mask != 0 {
// When multiview is enabled, at the beginning of each subpass, all
// non-render pass state is undefined.
self.current_state = Default::default();
self.builder_state = Default::default();
}
// TODO: sync state update
self.next_command_index += 1;
self
}
@ -601,7 +603,7 @@ where
fn validate_end_render_pass(&self) -> Result<(), RenderPassError> {
// VUID-vkCmdEndRenderPass2-renderpass
let render_pass_state = self
.current_state
.builder_state
.render_pass
.as_ref()
.ok_or(RenderPassError::ForbiddenOutsideRenderPass)?;
@ -628,7 +630,7 @@ where
// VUID?
if self
.current_state
.builder_state
.queries
.values()
.any(|state| state.in_subpass)
@ -671,10 +673,11 @@ where
(fns.v1_0.cmd_end_render_pass)(self.handle());
}
self.current_state.render_pass = None;
self.builder_state.render_pass = None;
// TODO: sync state update
self.next_command_index += 1;
self
}
}
@ -732,7 +735,7 @@ where
}
// VUID-vkCmdBeginRendering-renderpass
if self.current_state.render_pass.is_some() {
if self.builder_state.render_pass.is_some() {
return Err(RenderPassError::ForbiddenInsideRenderPass);
}
@ -1360,7 +1363,7 @@ where
(fns.khr_dynamic_rendering.cmd_begin_rendering_khr)(self.handle(), &rendering_info);
}
self.current_state.render_pass = Some(RenderPassState {
self.builder_state.render_pass = Some(RenderPassState {
contents,
render_area_offset,
render_area_extent,
@ -1460,6 +1463,7 @@ where
// TODO: sync state update
self.next_command_index += 1;
self
}
@ -1480,7 +1484,7 @@ where
fn validate_end_rendering(&self) -> Result<(), RenderPassError> {
// VUID-vkCmdEndRendering-renderpass
let render_pass_state = self
.current_state
.builder_state
.render_pass
.as_ref()
.ok_or(RenderPassError::ForbiddenOutsideRenderPass)?;
@ -1521,10 +1525,11 @@ where
(fns.khr_dynamic_rendering.cmd_end_rendering_khr)(self.handle());
}
self.current_state.render_pass = None;
self.builder_state.render_pass = None;
// TODO: sync state update
self.next_command_index += 1;
self
}
@ -1569,7 +1574,7 @@ where
) -> Result<(), RenderPassError> {
// VUID-vkCmdClearAttachments-renderpass
let render_pass_state = self
.current_state
.builder_state
.render_pass
.as_ref()
.ok_or(RenderPassError::ForbiddenOutsideRenderPass)?;
@ -1829,6 +1834,7 @@ where
// TODO: sync state update
self.next_command_index += 1;
self
}
}

View File

@ -67,7 +67,7 @@ where
// TODO:
// VUID-vkCmdExecuteCommands-pCommandBuffers-00094
if let Some(render_pass_state) = &self.current_state.render_pass {
if let Some(render_pass_state) = &self.builder_state.render_pass {
// VUID-vkCmdExecuteCommands-contents-06018
// VUID-vkCmdExecuteCommands-flags-06024
if render_pass_state.contents != SubpassContents::SecondaryCommandBuffers {
@ -261,7 +261,7 @@ where
}
// VUID-vkCmdExecuteCommands-commandBuffer-00101
if !self.current_state.queries.is_empty()
if !self.builder_state.queries.is_empty()
&& !self.device().enabled_features().inherited_queries
{
return Err(ExecuteCommandsError::RequirementNotMet {
@ -273,7 +273,7 @@ where
});
}
for state in self.current_state.queries.values() {
for state in self.builder_state.queries.values() {
match state.ty {
QueryType::Occlusion => {
// VUID-vkCmdExecuteCommands-commandBuffer-00102
@ -375,15 +375,19 @@ where
(fns.v1_0.cmd_execute_commands)(self.handle(), 1, &command_buffer.handle());
// The secondary command buffer could leave the primary in any state.
self.current_state = Default::default();
self.builder_state = Default::default();
// If the secondary is non-concurrent or one-time use, that restricts the primary as well.
self.usage = std::cmp::min(self.usage, command_buffer.usage);
self.resources.push(Box::new(command_buffer));
let _command_index = self.next_command_index;
let _command_name = "execute_commands";
// TODO: sync state update
self.resources.push(Box::new(command_buffer));
self.next_command_index += 1;
self
}
}

View File

@ -999,7 +999,7 @@ where
Checks for current render pass
*/
if let Some(render_pass_state) = self.current_state.render_pass.as_ref() {
if let Some(render_pass_state) = self.builder_state.render_pass.as_ref() {
// VUID-vkCmdPipelineBarrier2-None-06191
let begin_render_pass_state = match &render_pass_state.render_pass {
RenderPassStateType::BeginRenderPass(x) => x,
@ -1088,13 +1088,13 @@ where
return self;
}
let DependencyInfo {
let &DependencyInfo {
dependency_flags,
memory_barriers,
buffer_memory_barriers,
image_memory_barriers,
ref memory_barriers,
ref buffer_memory_barriers,
ref image_memory_barriers,
_ne: _,
} = dependency_info;
} = &dependency_info;
if self.device().enabled_features().synchronization2 {
let memory_barriers_vk: SmallVec<[_; 2]> = memory_barriers
@ -1341,43 +1341,27 @@ where
);
}
let command_index = self.next_command_index;
let command_name = "pipeline_barrier";
self.resources_usage_state.record_pipeline_barrier(
command_index,
command_name,
&dependency_info,
self.queue_family_properties().queue_flags,
);
self.resources
.reserve(buffer_memory_barriers.len() + image_memory_barriers.len());
for barrier in buffer_memory_barriers {
let BufferMemoryBarrier {
src_stages: _,
src_access: _,
dst_stages: _,
dst_access: _,
queue_family_ownership_transfer: _, // TODO:
buffer,
range: _,
_ne: _,
} = barrier;
self.resources.push(Box::new(buffer));
for barrier in dependency_info.buffer_memory_barriers {
self.resources.push(Box::new(barrier.buffer));
}
for barrier in image_memory_barriers {
let ImageMemoryBarrier {
src_stages: _,
src_access: _,
dst_stages: _,
dst_access: _,
old_layout: _, // TODO:
new_layout: _, // TODO:
queue_family_ownership_transfer: _, // TODO:
image,
subresource_range: _,
_ne: _,
} = barrier;
self.resources.push(Box::new(image));
for barrier in dependency_info.image_memory_barriers {
self.resources.push(Box::new(barrier.image));
}
// TODO: sync state update
self.next_command_index += 1;
self
}
@ -1406,7 +1390,7 @@ where
dependency_info: &DependencyInfo,
) -> Result<(), SynchronizationError> {
// VUID-vkCmdSetEvent2-renderpass
if self.current_state.render_pass.is_some() {
if self.builder_state.render_pass.is_some() {
return Err(SynchronizationError::ForbiddenInsideRenderPass);
}
@ -2508,6 +2492,7 @@ where
// TODO: sync state update
self.next_command_index += 1;
self
}
@ -2981,7 +2966,7 @@ where
}
// VUID-vkCmdWaitEvents2-dependencyFlags-03844
if self.current_state.render_pass.is_some()
if self.builder_state.render_pass.is_some()
&& src_stages.intersects(PipelineStages::HOST)
{
todo!()
@ -3830,6 +3815,7 @@ where
// TODO: sync state update
self.next_command_index += 1;
self
}
@ -3859,7 +3845,7 @@ where
stages: PipelineStages,
) -> Result<(), SynchronizationError> {
// VUID-vkCmdResetEvent2-renderpass
if self.current_state.render_pass.is_some() {
if self.builder_state.render_pass.is_some() {
return Err(SynchronizationError::ForbiddenInsideRenderPass);
}
@ -4081,6 +4067,7 @@ where
// TODO: sync state update
self.next_command_index += 1;
self
}
}

View File

@ -199,6 +199,13 @@ vulkan_enum! {
},*/
}
impl Default for ImageLayout {
#[inline]
fn default() -> Self {
ImageLayout::Undefined
}
}
/// The set of layouts to use for an image when used in descriptor of various kinds.
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub struct ImageDescriptorLayouts {

View File

@ -16,6 +16,7 @@
//! This safety is enforced at runtime by vulkano but it is not magic and you will require some
//! knowledge if you want to avoid errors.
pub(crate) use self::pipeline::{PipelineStageAccess, PipelineStageAccessSet};
pub use self::{
future::{now, FlushError, GpuFuture},
pipeline::{

File diff suppressed because it is too large Load Diff