Rework command buffers again

This commit is contained in:
Pierre Krieger 2017-01-03 15:42:03 +01:00
parent 8119d9d54a
commit 372a6f7d14
44 changed files with 2176 additions and 1671 deletions

View File

@ -34,8 +34,9 @@ use vulkano_win::VkSurfaceBuild;
use vulkano::buffer::BufferUsage;
use vulkano::buffer::CpuAccessibleBuffer;
use vulkano::command_buffer;
use vulkano::command_buffer::AutoCommandBufferBuilder;
use vulkano::command_buffer::CommandBufferBuilder;
use vulkano::command_buffer::DynamicState;
use vulkano::command_buffer::CommandsList;
use vulkano::command_buffer::Submit;
use vulkano::command_buffer::Submission;
use vulkano::descriptor::pipeline_layout::PipelineLayout;
@ -257,7 +258,7 @@ fn main() {
// Before we draw we have to create what is called a pipeline. This is similar to an OpenGL
// program, but much more specific.
let pipeline = GraphicsPipeline::new(&device, GraphicsPipelineParams {
let pipeline = Arc::new(GraphicsPipeline::new(&device, GraphicsPipelineParams {
// We need to indicate the layout of the vertices.
// The type `SingleBufferDefinition` actually contains a template parameter corresponding
// to the type of each vertex. But in this code it is automatically inferred.
@ -313,7 +314,7 @@ fn main() {
// We have to indicate which subpass of which render pass this pipeline is going to be used
// in. The pipeline will only be usable from this particular subpass.
render_pass: Subpass::from(render_pass.clone(), 0).unwrap(),
}).unwrap();
}).unwrap());
// The render pass we created above only describes the layout of our framebuffers. Before we
// can draw we also need to create the actual framebuffers.
@ -370,7 +371,7 @@ fn main() {
//
// Note that we have to pass a queue family when we create the command buffer. The command
// buffer will only be executable on that given queue family.
let command_buffer = command_buffer::empty()
let command_buffer = AutoCommandBufferBuilder::new(device.clone(), queue.family()).unwrap()
// Before we can draw, we have to *enter a render pass*. There are two methods to do
// this: `draw_inline` and `draw_secondary`. The latter is a bit more advanced and is
// not covered here.
@ -390,10 +391,10 @@ fn main() {
// We leave the render pass by calling `draw_end`. Note that if we had multiple
// subpasses we could have called `next_inline` (or `next_secondary`) to jump to the
// next subpass.
.end_render_pass().unwrap()
.end_render_pass()
// Finish building the command buffer by calling `build`.
.build_primary(&device, queue.family()).unwrap();
.build();
// Now all we need to do is submit the command buffer to the queue.
submissions.push(command_buffer.submit(&queue).unwrap());

View File

@ -0,0 +1,117 @@
// Copyright (c) 2016 The vulkano developers
// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT
// license <LICENSE-MIT or http://opensource.org/licenses/MIT>,
// at your option. All files in the project carrying such
// notice may not be copied, modified, or distributed except
// according to those terms.
use std::error::Error;
use std::sync::Arc;
use command_buffer::cb;
use command_buffer::cmd;
use command_buffer::cb::AddCommand;
use command_buffer::cb::CommandBufferBuild;
use command_buffer::pool::CommandPool;
use command_buffer::pool::StandardCommandPool;
use command_buffer::Submit;
use command_buffer::SubmitBuilder;
use device::Device;
use device::Queue;
use instance::QueueFamily;
use OomError;
type Cb<L, P> = cb::DeviceCheckLayer<cb::QueueTyCheckLayer<cb::ContextCheckLayer<cb::StateCacheLayer<cb::AutoPipelineBarriersLayer<cb::UnsafeCommandBufferBuilder<P>, L>>>>>;
pub struct AutoCommandBufferBuilder<L, P = Arc<StandardCommandPool>> where P: CommandPool {
inner: Cb<L, P>
}
impl AutoCommandBufferBuilder<Arc<StandardCommandPool>> {
pub fn new(device: Arc<Device>, queue_family: QueueFamily)
-> Result<AutoCommandBufferBuilder<(), Arc<StandardCommandPool>>, OomError>
{
let pool = Device::standard_command_pool(&device, queue_family);
let cmd = unsafe {
let c = try!(cb::UnsafeCommandBufferBuilder::new(pool, cb::Kind::primary(), cb::Flags::SimultaneousUse /* TODO: */));
let c = cb::BufferedCommandsListLayer::new(c);
let c = cb::AutoPipelineBarriersLayer::new(c);
let c = cb::StateCacheLayer::new(c);
let c = cb::ContextCheckLayer::new(c);
let c = cb::QueueTyCheckLayer::new(c);
let c = cb::DeviceCheckLayer::new(c);
c
};
Ok(AutoCommandBufferBuilder {
inner: cmd,
})
}
}
unsafe impl<L, P, O> CommandBufferBuild for AutoCommandBufferBuilder<L, P>
where Cb<L, P>: CommandBufferBuild<Out = O>,
P: CommandPool
{
type Out = O;
#[inline]
fn build(self) -> Self::Out {
// TODO: wrap around?
self.inner.build()
}
}
unsafe impl<L, P> Submit for AutoCommandBufferBuilder<L, P>
where Cb<L, P>: Submit,
P: CommandPool
{
#[inline]
fn device(&self) -> &Arc<Device> {
self.inner.device()
}
#[inline]
unsafe fn append_submission<'a>(&'a self, base: SubmitBuilder<'a>, queue: &Arc<Queue>)
-> Result<SubmitBuilder<'a>, Box<Error>>
{
self.inner.append_submission(base, queue)
}
}
macro_rules! pass_through {
(($($param:ident),*), $cmd:ty) => {
unsafe impl<L, P $(, $param)*> AddCommand<$cmd> for AutoCommandBufferBuilder<L, P>
where P: CommandPool,
Cb<L, P>: AddCommand<$cmd, Out = Cb<(L, $cmd), P>>
{
type Out = AutoCommandBufferBuilder<(L, $cmd), P>;
#[inline]
fn add(self, command: $cmd) -> Self::Out {
AutoCommandBufferBuilder {
inner: self.inner.add(command),
}
}
}
}
}
pass_through!((Rp, F), cmd::CmdBeginRenderPass<Rp, F>);
pass_through!((S, Pl), cmd::CmdBindDescriptorSets<S, Pl>);
pass_through!((B), cmd::CmdBindIndexBuffer<B>);
pass_through!((Pl), cmd::CmdBindPipeline<Pl>);
pass_through!((V), cmd::CmdBindVertexBuffers<V>);
pass_through!((), cmd::CmdClearAttachments);
pass_through!((S, D), cmd::CmdCopyBuffer<S, D>);
pass_through!((), cmd::CmdDrawRaw);
pass_through!((), cmd::CmdEndRenderPass);
pass_through!((C), cmd::CmdExecuteCommands<C>);
pass_through!((B), cmd::CmdFillBuffer<B>);
pass_through!((), cmd::CmdNextSubpass);
pass_through!((Pc, Pl), cmd::CmdPushConstants<Pc, Pl>);
pass_through!((), cmd::CmdSetState);
//pass_through!((B), cmd::CmdUpdateBuffer<B>);

View File

@ -0,0 +1,84 @@
// Copyright (c) 2016 The vulkano developers
// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT
// license <LICENSE-MIT or http://opensource.org/licenses/MIT>,
// at your option. All files in the project carrying such
// notice may not be copied, modified, or distributed except
// according to those terms.
use command_buffer::cb::AddCommand;
use command_buffer::cb::BufferedCommandsListLayer;
use command_buffer::cb::CommandBufferBuild;
use command_buffer::cmd;
pub struct AutoPipelineBarriersLayer<I, L> {
inner: BufferedCommandsListLayer<I, L>,
}
impl<I> AutoPipelineBarriersLayer<I, ()> {
#[inline] // TODO: remove inline maybe?
pub fn new(inner: BufferedCommandsListLayer<I, ()>) -> AutoPipelineBarriersLayer<I, ()> {
AutoPipelineBarriersLayer {
inner: inner,
}
}
}
/*unsafe impl<C, I, L> AddCommand<C> for AutoPipelineBarriersLayer<I, L>
where I: for<'r> AddCommand<&'r C, Out = I>
{
type Out = AutoPipelineBarriersLayer<I, (L, C)>;
#[inline]
fn add(self, command: C) -> Self::Out {
AutoPipelineBarriersLayer {
inner: AddCommand::add(self.inner, command),
}
}
}*/
unsafe impl<I, L, O> CommandBufferBuild for AutoPipelineBarriersLayer<I, L>
where BufferedCommandsListLayer<I, L>: CommandBufferBuild<Out = O>
{
type Out = O;
#[inline]
fn build(self) -> O {
self.inner.build()
}
}
macro_rules! pass_through {
(($($param:ident),*), $cmd:ty) => {
unsafe impl<I, L $(, $param)*> AddCommand<$cmd> for AutoPipelineBarriersLayer<I, L>
where I: for<'r> AddCommand<&'r $cmd, Out = I>
{
type Out = AutoPipelineBarriersLayer<I, (L, $cmd)>;
#[inline]
fn add(self, command: $cmd) -> Self::Out {
AutoPipelineBarriersLayer {
inner: AddCommand::add(self.inner, command),
}
}
}
}
}
pass_through!((Rp, F), cmd::CmdBeginRenderPass<Rp, F>);
pass_through!((S, Pl), cmd::CmdBindDescriptorSets<S, Pl>);
pass_through!((B), cmd::CmdBindIndexBuffer<B>);
pass_through!((Pl), cmd::CmdBindPipeline<Pl>);
pass_through!((V), cmd::CmdBindVertexBuffers<V>);
pass_through!((), cmd::CmdClearAttachments);
pass_through!((S, D), cmd::CmdCopyBuffer<S, D>);
pass_through!((), cmd::CmdDrawRaw);
pass_through!((), cmd::CmdEndRenderPass);
pass_through!((C), cmd::CmdExecuteCommands<C>);
pass_through!((B), cmd::CmdFillBuffer<B>);
pass_through!((), cmd::CmdNextSubpass);
pass_through!((Pc, Pl), cmd::CmdPushConstants<Pc, Pl>);
pass_through!((), cmd::CmdSetState);
//pass_through!((B), cmd::CmdUpdateBuffer<B>);

View File

@ -0,0 +1,186 @@
// Copyright (c) 2016 The vulkano developers
// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT
// license <LICENSE-MIT or http://opensource.org/licenses/MIT>,
// at your option. All files in the project carrying such
// notice may not be copied, modified, or distributed except
// according to those terms.
use std::error::Error;
use std::sync::Arc;
use command_buffer::cb::AddCommand;
use command_buffer::cb::CommandBufferBuild;
use command_buffer::cb::CommandsList;
use command_buffer::cmd;
use command_buffer::Submit;
use command_buffer::SubmitBuilder;
use device::Device;
use device::Queue;
/// Layer around a command buffer builder or a command buffer that stores the commands and has a
/// buffering mechanism.
///
/// Whenever you add a command (with the `AddCommand` trait), the command is not immediately added
/// to the underlying builder. Pending commands are added for real when you call `flush()` or when
/// you build the builder into a real command buffer.
///
/// The purpose of this buffering mechanism is to allow inserting pipeline barrier commands between
/// commands that are already submitted and commands that are pending thanks to the
/// `add_non_buffered_pipeline_barrier` method.
pub struct BufferedCommandsListLayer<I, L> {
inner: Option<I>,
commands: L,
// Number of commands in the list that haven't been flushed. Since the latest commands appear
// first in the list, it is more convenient to store the number of commands that haven't been
// flushed rather than the number of commands that have been flushed.
non_flushed: u32,
}
/// Helper trait for `BufferedCommandsListLayer`.
///
/// Whenever you manipulate a `BufferedCommandsListLayer<I, L>`, the template parameter `L` should
/// implement `BufferedCommandsListLayerCommands<I>`.
pub unsafe trait BufferedCommandsListLayerCommands<I> {
/// Sends the `num` last commands of the list to `dest`.
fn flush(&self, num: u32, dest: I) -> I;
}
unsafe impl<I> BufferedCommandsListLayerCommands<I> for () {
#[inline]
fn flush(&self, num: u32, dest: I) -> I {
debug_assert_eq!(num, 0);
dest
}
}
unsafe impl<I, L, C> BufferedCommandsListLayerCommands<I> for (L, C)
where I: for<'r> AddCommand<&'r C, Out = I>,
L: BufferedCommandsListLayerCommands<I>,
{
#[inline]
fn flush(&self, num: u32, dest: I) -> I {
if num == 0 {
dest
} else {
self.0.flush(num - 1, dest).add(&self.1)
}
}
}
unsafe impl<I, L> CommandsList for BufferedCommandsListLayer<I, L> {
type List = L;
#[inline]
unsafe fn list(&self) -> &L {
&self.commands
}
}
impl<I> BufferedCommandsListLayer<I, ()> {
/// Builds a new `BufferedCommandsListLayer`.
#[inline]
pub fn new(inner: I) -> BufferedCommandsListLayer<I, ()> {
BufferedCommandsListLayer {
inner: Some(inner),
commands: (),
non_flushed: 0,
}
}
}
impl<'a, I, L> BufferedCommandsListLayer<I, L>
where I: AddCommand<&'a cmd::CmdPipelineBarrier<'a>, Out = I>
{
/// Adds a pipeline barrier to the underlying command buffer and bypasses the flushing
/// mechanism.
#[inline]
pub fn add_non_buffered_pipeline_barrier(&mut self, cmd: &'a cmd::CmdPipelineBarrier<'a>) {
let inner = self.inner.take().unwrap();
self.inner = Some(inner.add(cmd));
}
}
impl<I, L> BufferedCommandsListLayer<I, L> where L: BufferedCommandsListLayerCommands<I> {
/// Flushes all the commands that haven't been flushed to the inner builder.
#[inline]
pub fn flush(&mut self) {
let inner = self.inner.take().unwrap();
self.inner = Some(self.commands.flush(self.non_flushed, inner));
self.non_flushed = 0;
}
}
unsafe impl<I, L, O> CommandBufferBuild for BufferedCommandsListLayer<I, L>
where I: CommandBufferBuild<Out = O>,
L: BufferedCommandsListLayerCommands<I> // Necessary in order to flush
{
type Out = BufferedCommandsListLayer<O, L>;
#[inline]
fn build(mut self) -> Self::Out {
self.flush();
debug_assert_eq!(self.non_flushed, 0);
let inner = self.inner.take().unwrap().build();
BufferedCommandsListLayer {
inner: Some(inner),
commands: self.commands,
non_flushed: 0,
}
}
}
unsafe impl<I, L> Submit for BufferedCommandsListLayer<I, L> where I: Submit {
#[inline]
fn device(&self) -> &Arc<Device> {
self.inner.as_ref().unwrap().device()
}
#[inline]
unsafe fn append_submission<'a>(&'a self, base: SubmitBuilder<'a>, queue: &Arc<Queue>)
-> Result<SubmitBuilder<'a>, Box<Error>>
{
self.inner.as_ref().unwrap().append_submission(base, queue)
}
}
macro_rules! pass_through {
(($($param:ident),*), $cmd:ty) => {
unsafe impl<'a, I, L $(, $param)*> AddCommand<$cmd> for BufferedCommandsListLayer<I, L>
where I: for<'r> AddCommand<&'r $cmd, Out = I>
{
type Out = BufferedCommandsListLayer<I, (L, $cmd)>;
#[inline]
fn add(mut self, command: $cmd) -> Self::Out {
debug_assert!(self.inner.is_some());
BufferedCommandsListLayer {
inner: Some(AddCommand::add(self.inner.take().unwrap(), &command)),
commands: (self.commands, command),
non_flushed: self.non_flushed + 1,
}
}
}
}
}
pass_through!((Rp, F), cmd::CmdBeginRenderPass<Rp, F>);
pass_through!((S, Pl), cmd::CmdBindDescriptorSets<S, Pl>);
pass_through!((B), cmd::CmdBindIndexBuffer<B>);
pass_through!((Pl), cmd::CmdBindPipeline<Pl>);
pass_through!((V), cmd::CmdBindVertexBuffers<V>);
pass_through!((), cmd::CmdClearAttachments);
pass_through!((S, D), cmd::CmdCopyBuffer<S, D>);
pass_through!((), cmd::CmdDrawIndexedRaw);
pass_through!((B), cmd::CmdDrawIndirectRaw<B>);
pass_through!((), cmd::CmdDrawRaw);
pass_through!((), cmd::CmdEndRenderPass);
pass_through!((C), cmd::CmdExecuteCommands<C>);
pass_through!((B), cmd::CmdFillBuffer<B>);
pass_through!((), cmd::CmdNextSubpass);
pass_through!((Pc, Pl), cmd::CmdPushConstants<Pc, Pl>);
pass_through!((), cmd::CmdSetState);
pass_through!((B, D), cmd::CmdUpdateBuffer<'a, B, D>);

View File

@ -0,0 +1,108 @@
// Copyright (c) 2016 The vulkano developers
// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT
// license <LICENSE-MIT or http://opensource.org/licenses/MIT>,
// at your option. All files in the project carrying such
// notice may not be copied, modified, or distributed except
// according to those terms.
use std::error::Error;
use std::sync::Arc;
use command_buffer::cb::AddCommand;
use command_buffer::cmd;
use command_buffer::Submit;
use command_buffer::SubmitBuilder;
use device::Device;
use device::Queue;
pub struct CommandsListLayer<I, L> {
inner: I,
commands: L,
}
///
// TODO: consider changing this to a more flexible API because right now we're forcing
// implementations to hold a tuple of commands
pub unsafe trait CommandsList {
type List;
///
/// # Safety
///
/// This function is unsafe because the commands must not be modified through
/// interior mutability.
unsafe fn list(&self) -> &Self::List;
}
unsafe impl<I, L> CommandsList for CommandsListLayer<I, L> {
type List = L;
#[inline]
unsafe fn list(&self) -> &L {
&self.commands
}
}
impl<I> CommandsListLayer<I, ()> {
#[inline]
pub fn new(inner: I) -> CommandsListLayer<I, ()> {
CommandsListLayer {
inner: inner,
commands: (),
}
}
}
// TODO: implement CommandBufferBuild
unsafe impl<I, L> Submit for CommandsListLayer<I, L> where I: Submit {
#[inline]
fn device(&self) -> &Arc<Device> {
self.inner.device()
}
#[inline]
unsafe fn append_submission<'a>(&'a self, base: SubmitBuilder<'a>, queue: &Arc<Queue>)
-> Result<SubmitBuilder<'a>, Box<Error>>
{
self.inner.append_submission(base, queue)
}
}
macro_rules! pass_through {
(($($param:ident),*), $cmd:ty) => {
unsafe impl<'a, I, L $(, $param)*> AddCommand<$cmd> for CommandsListLayer<I, L>
where I: for<'r> AddCommand<&'r $cmd, Out = I>
{
type Out = CommandsListLayer<I, (L, $cmd)>;
#[inline]
fn add(self, command: $cmd) -> Self::Out {
CommandsListLayer {
inner: AddCommand::add(self.inner, &command),
commands: (self.commands, command),
}
}
}
}
}
pass_through!((Rp, F), cmd::CmdBeginRenderPass<Rp, F>);
pass_through!((S, Pl), cmd::CmdBindDescriptorSets<S, Pl>);
pass_through!((B), cmd::CmdBindIndexBuffer<B>);
pass_through!((Pl), cmd::CmdBindPipeline<Pl>);
pass_through!((V), cmd::CmdBindVertexBuffers<V>);
pass_through!((), cmd::CmdClearAttachments);
pass_through!((S, D), cmd::CmdCopyBuffer<S, D>);
pass_through!((), cmd::CmdDrawIndexedRaw);
pass_through!((B), cmd::CmdDrawIndirectRaw<B>);
pass_through!((), cmd::CmdDrawRaw);
pass_through!((), cmd::CmdEndRenderPass);
pass_through!((C), cmd::CmdExecuteCommands<C>);
pass_through!((B), cmd::CmdFillBuffer<B>);
pass_through!((), cmd::CmdNextSubpass);
pass_through!((Pc, Pl), cmd::CmdPushConstants<Pc, Pl>);
pass_through!((), cmd::CmdSetState);
pass_through!((B, D), cmd::CmdUpdateBuffer<'a, B, D>);

View File

@ -0,0 +1,85 @@
// Copyright (c) 2016 The vulkano developers
// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT
// license <LICENSE-MIT or http://opensource.org/licenses/MIT>,
// at your option. All files in the project carrying such
// notice may not be copied, modified, or distributed except
// according to those terms.
use command_buffer::cb::AddCommand;
use command_buffer::cb::CommandBufferBuild;
use command_buffer::cmd;
/// Layer around a command buffer builder that checks whether the commands can be executed in the
/// given context.
///
/// "The given context" here means being inside/outside a render pass or a secondary command
/// buffer.
pub struct ContextCheckLayer<I> {
inner: I,
}
impl<I> ContextCheckLayer<I> {
/// Builds a new `ContextCheckLayer`.
#[inline]
pub fn new(inner: I) -> ContextCheckLayer<I> {
ContextCheckLayer {
inner: inner,
}
}
/// Destroys the layer and returns the underlying command buffer.
#[inline]
pub fn into_inner(self) -> I {
self.inner
}
}
unsafe impl<I, O> CommandBufferBuild for ContextCheckLayer<I>
where I: CommandBufferBuild<Out = O>
{
type Out = O;
#[inline]
fn build(self) -> O {
self.inner.build()
}
}
// TODO: actually implement
macro_rules! pass_through {
(($($param:ident),*), $cmd:ty) => {
unsafe impl<'a, I, O $(, $param)*> AddCommand<$cmd> for ContextCheckLayer<I>
where I: AddCommand<$cmd, Out = O>
{
type Out = ContextCheckLayer<O>;
#[inline]
fn add(self, command: $cmd) -> Self::Out {
ContextCheckLayer {
inner: self.inner.add(command),
}
}
}
}
}
pass_through!((Rp, F), cmd::CmdBeginRenderPass<Rp, F>);
pass_through!((S, Pl), cmd::CmdBindDescriptorSets<S, Pl>);
pass_through!((B), cmd::CmdBindIndexBuffer<B>);
pass_through!((Pl), cmd::CmdBindPipeline<Pl>);
pass_through!((V), cmd::CmdBindVertexBuffers<V>);
pass_through!((), cmd::CmdClearAttachments);
pass_through!((S, D), cmd::CmdCopyBuffer<S, D>);
pass_through!((), cmd::CmdDrawIndexedRaw);
pass_through!((B), cmd::CmdDrawIndirectRaw<B>);
pass_through!((), cmd::CmdDrawRaw);
pass_through!((), cmd::CmdEndRenderPass);
pass_through!((C), cmd::CmdExecuteCommands<C>);
pass_through!((B), cmd::CmdFillBuffer<B>);
pass_through!((), cmd::CmdNextSubpass);
pass_through!((Pc, Pl), cmd::CmdPushConstants<Pc, Pl>);
pass_through!((), cmd::CmdSetState);
pass_through!((B, D), cmd::CmdUpdateBuffer<'a, B, D>);

View File

@ -0,0 +1,82 @@
// Copyright (c) 2016 The vulkano developers
// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT
// license <LICENSE-MIT or http://opensource.org/licenses/MIT>,
// at your option. All files in the project carrying such
// notice may not be copied, modified, or distributed except
// according to those terms.
use command_buffer::cb::AddCommand;
use command_buffer::cb::CommandBufferBuild;
use command_buffer::cmd;
/// Layer around a command buffer builder that checks whether the commands added to it belong to
/// the same device as the command buffer.
pub struct DeviceCheckLayer<I> {
inner: I,
}
impl<I> DeviceCheckLayer<I> {
/// Builds a new `DeviceCheckLayer`.
#[inline]
pub fn new(inner: I) -> DeviceCheckLayer<I> {
DeviceCheckLayer {
inner: inner,
}
}
/// Destroys the layer and returns the underlying command buffer.
#[inline]
pub fn into_inner(self) -> I {
self.inner
}
}
unsafe impl<I, O> CommandBufferBuild for DeviceCheckLayer<I>
where I: CommandBufferBuild<Out = O>
{
type Out = O;
#[inline]
fn build(self) -> O {
self.inner.build()
}
}
// TODO: actually implement
macro_rules! pass_through {
(($($param:ident),*), $cmd:ty) => {
unsafe impl<'a, I, O $(, $param)*> AddCommand<$cmd> for DeviceCheckLayer<I>
where I: AddCommand<$cmd, Out = O>
{
type Out = DeviceCheckLayer<O>;
#[inline]
fn add(self, command: $cmd) -> Self::Out {
DeviceCheckLayer {
inner: self.inner.add(command),
}
}
}
}
}
pass_through!((Rp, F), cmd::CmdBeginRenderPass<Rp, F>);
pass_through!((S, Pl), cmd::CmdBindDescriptorSets<S, Pl>);
pass_through!((B), cmd::CmdBindIndexBuffer<B>);
pass_through!((Pl), cmd::CmdBindPipeline<Pl>);
pass_through!((V), cmd::CmdBindVertexBuffers<V>);
pass_through!((), cmd::CmdClearAttachments);
pass_through!((S, D), cmd::CmdCopyBuffer<S, D>);
pass_through!((), cmd::CmdDrawIndexedRaw);
pass_through!((B), cmd::CmdDrawIndirectRaw<B>);
pass_through!((), cmd::CmdDrawRaw);
pass_through!((), cmd::CmdEndRenderPass);
pass_through!((C), cmd::CmdExecuteCommands<C>);
pass_through!((B), cmd::CmdFillBuffer<B>);
pass_through!((), cmd::CmdNextSubpass);
pass_through!((Pc, Pl), cmd::CmdPushConstants<Pc, Pl>);
pass_through!((), cmd::CmdSetState);
pass_through!((B, D), cmd::CmdUpdateBuffer<'a, B, D>);

View File

@ -7,38 +7,92 @@
// notice may not be copied, modified, or distributed except
// according to those terms.
use std::sync::Arc;
use command_buffer::pool::StandardCommandPool;
use device::Device;
use instance::QueueFamily;
use OomError;
//! Internals of vulkano's command buffers building.
//!
//! You probably don't need to look inside this module if you're a beginner. The
//! `AutoCommandBufferBuilder` provided in the parent module should be good for most needs.
//!
//! # Builder basics
//!
//! The lowest-level command buffer types are `UnsafeCommandBufferBuilder` and
//! `UnsafeCommandBuffer`. These two types have zero overhead over Vulkan command buffers but are
//! very unsafe to use.
//!
//! Before you can add a command to an unsafe command buffer builder, you should:
//!
//! - Make sure that the buffers or images used by the command stay alive for the duration of the
//! command buffer.
//! - Check that the device used by the buffers or images of the command is the same as the device
//! of the command buffer.
//! - If the command buffer is inside/outside a render pass, check that the command can be executed
//! inside/outside a render pass. Same for secondary command buffers.
//! - Check that the command can be executed on the queue family of the command buffer. Some queue
//! families don't support graphics and/or compute operations .
//! - Make sure that when the command buffer is submitted the buffers and images of the command
//! will be properly synchronized.
//! - Make sure that pipeline barriers are correctly inserted in order to avoid race conditions.
//!
//! In order to allow you to customize which checks are performed, vulkano provides *layers*. They
//! are structs that can be put around a command buffer builder and that will perform them. Keep
//! in mind that all the conditions above must be respected, but if you somehow make sure at
//! compile-time that some requirements are always correct, you can avoid paying some runtime cost
//! by not using all layers.
//!
//! Adding a command to a command buffer builder is done in two steps:
//!
//! - First you must build a struct that represents the command to add. The struct's constructor
//! can perform various checks to make sure that the command itself is valid, or it can provide
//! an unsafe constructor that doesn't perform any check.
//! - Then use the `AddCommand` trait to add it. The trait is implemented on the command buffer
//! builder and on the various layers, and its template parameter is the struct representing
//! the command.
//!
//! Since the `UnsafeCommandBufferBuilder` doesn't keep the command structs alive (as it would
//! incur an overhead), it implements `AddCommand<&T>`.
//!
//! The role of the `CommandsListLayer` and `BufferedCommandsListLayer` layers is to keep the
//! commands alive. They implement `AddCommand<T>` if the builder they wrap around implements
//! `AddCommand<&T>`. In other words they are the lowest level that you should put around an
//! `UnsafeCommandBufferBuilder`.
//!
//! The other layers of this module implement `AddCommand<T>` if the builder they wrap around
//! implements `AddCommand<T>`.
//!
//! # Building a command buffer
//!
//! Once you are satisfied with the commands you added to a builder, use the `CommandBufferBuild`
//! trait to build it.
//!
//! This trait is implemented on the `UnsafeCommandBufferBuilder` but also on all the layers.
//! The builder's layers can choose to add layers around the finished command buffer.
//!
//! # The `CommandsList` trait
//!
//! The `CommandsList` trait is implemented on any command buffer or command buffer builder that
//! exposes a list of commands. It is required by some of the layers.
pub use self::autobarriers::AutobarriersCommandBuffer;
pub use self::unsynced::Flags;
pub use self::unsynced::Kind;
pub use self::unsynced::UnsyncedCommandBuffer;
pub use self::auto_barriers::AutoPipelineBarriersLayer;
pub use self::buffered::BufferedCommandsListLayer;
pub use self::buffered::BufferedCommandsListLayerCommands;
pub use self::commands_list::CommandsList;
pub use self::commands_list::CommandsListLayer;
pub use self::context_check::ContextCheckLayer;
pub use self::device_check::DeviceCheckLayer;
pub use self::queue_ty_check::QueueTyCheckLayer;
pub use self::state_cache::StateCacheLayer;
pub use self::sys::Kind;
pub use self::sys::Flags;
pub use self::sys::UnsafeCommandBufferBuilder;
pub use self::sys::UnsafeCommandBuffer;
pub use self::traits::AddCommand;
pub use self::traits::CommandBufferBuild;
mod autobarriers;
mod unsynced;
pub trait CommandsListBuildPrimaryPool<L, P> {
fn build_primary_with_pool(pool: P, list: L) -> Result<Self, OomError>
where Self: Sized;
}
pub trait CommandsListBuildPrimary<L> {
fn build_primary(device: &Arc<Device>, queue_family: QueueFamily, list: L)
-> Result<Self, OomError>
where Self: Sized;
}
impl<T, L> CommandsListBuildPrimary<L> for T
where T: CommandsListBuildPrimaryPool<L, Arc<StandardCommandPool>>
{
fn build_primary(device: &Arc<Device>, queue_family: QueueFamily, list: L)
-> Result<Self, OomError>
{
let pool = Device::standard_command_pool(device, queue_family);
CommandsListBuildPrimaryPool::build_primary_with_pool(pool, list)
}
}
mod auto_barriers;
mod buffered;
mod commands_list;
mod device_check;
mod context_check;
mod queue_ty_check;
mod state_cache;
mod sys;
mod traits;

View File

@ -0,0 +1,85 @@
// Copyright (c) 2016 The vulkano developers
// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT
// license <LICENSE-MIT or http://opensource.org/licenses/MIT>,
// at your option. All files in the project carrying such
// notice may not be copied, modified, or distributed except
// according to those terms.
use command_buffer::cb::AddCommand;
use command_buffer::cb::CommandBufferBuild;
use command_buffer::cmd;
/// Layer around a command buffer builder that checks whether the commands added to it match the
/// type of the queue family of the underlying builder.
///
/// Commands that perform graphical or compute operations can only be executed on queue families
/// that support graphical or compute operations. This is what this layer verifies.
pub struct QueueTyCheckLayer<I> {
inner: I,
}
impl<I> QueueTyCheckLayer<I> {
/// Builds a new `QueueTyCheckLayer`.
#[inline]
pub fn new(inner: I) -> QueueTyCheckLayer<I> {
QueueTyCheckLayer {
inner: inner,
}
}
/// Destroys the layer and returns the underlying command buffer.
#[inline]
pub fn into_inner(self) -> I {
self.inner
}
}
unsafe impl<I, O> CommandBufferBuild for QueueTyCheckLayer<I>
where I: CommandBufferBuild<Out = O>
{
type Out = O;
#[inline]
fn build(self) -> O {
self.inner.build()
}
}
// TODO: actually implement
macro_rules! pass_through {
(($($param:ident),*), $cmd:ty) => {
unsafe impl<'a, I, O $(, $param)*> AddCommand<$cmd> for QueueTyCheckLayer<I>
where I: AddCommand<$cmd, Out = O>
{
type Out = QueueTyCheckLayer<O>;
#[inline]
fn add(self, command: $cmd) -> Self::Out {
QueueTyCheckLayer {
inner: self.inner.add(command),
}
}
}
}
}
pass_through!((Rp, F), cmd::CmdBeginRenderPass<Rp, F>);
pass_through!((S, Pl), cmd::CmdBindDescriptorSets<S, Pl>);
pass_through!((B), cmd::CmdBindIndexBuffer<B>);
pass_through!((Pl), cmd::CmdBindPipeline<Pl>);
pass_through!((V), cmd::CmdBindVertexBuffers<V>);
pass_through!((), cmd::CmdClearAttachments);
pass_through!((S, D), cmd::CmdCopyBuffer<S, D>);
pass_through!((), cmd::CmdDrawIndexedRaw);
pass_through!((B), cmd::CmdDrawIndirectRaw<B>);
pass_through!((), cmd::CmdDrawRaw);
pass_through!((), cmd::CmdEndRenderPass);
pass_through!((C), cmd::CmdExecuteCommands<C>);
pass_through!((B), cmd::CmdFillBuffer<B>);
pass_through!((), cmd::CmdNextSubpass);
pass_through!((Pc, Pl), cmd::CmdPushConstants<Pc, Pl>);
pass_through!((), cmd::CmdSetState);
pass_through!((B, D), cmd::CmdUpdateBuffer<'a, B, D>);

View File

@ -0,0 +1,197 @@
// Copyright (c) 2016 The vulkano developers
// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT
// license <LICENSE-MIT or http://opensource.org/licenses/MIT>,
// at your option. All files in the project carrying such
// notice may not be copied, modified, or distributed except
// according to those terms.
use command_buffer::cb::AddCommand;
use command_buffer::cb::CommandBufferBuild;
use command_buffer::cmd;
use command_buffer::DynamicState;
use vk;
/// Layer around a command buffer builder that caches the current state of the command buffer and
/// avoids redundant state changes.
///
/// For example if you add a command that sets the current vertex buffer, then later another
/// command that sets the current vertex buffer to the same value, then the second one will be
/// discarded by this layer.
///
/// As a general rule there's no reason not to use this layer unless you know that your commands
/// are already optimized in this regard.
///
/// # Safety
///
/// This layer expects that the commands passed to it all belong to the same device.
///
/// Since this layer can potentially optimize out some commands, a mismatch between devices could
/// potentially go undetected if it is checked in a lower layer.
pub struct StateCacheLayer<I> {
// The inner builder that will actually execute the stuff.
inner: I,
// The dynamic state to synchronize with `CmdSetState`.
dynamic_state: DynamicState,
// The compute pipeline currently bound. 0 if nothing bound.
compute_pipeline: vk::Pipeline,
// The graphics pipeline currently bound. 0 if nothing bound.
graphics_pipeline: vk::Pipeline,
}
impl<I> StateCacheLayer<I> {
/// Builds a new `StateCacheLayer`.
///
/// It is safe to start caching at any point of the construction of a command buffer.
#[inline]
pub fn new(inner: I) -> StateCacheLayer<I> {
StateCacheLayer {
inner: inner,
dynamic_state: DynamicState::none(),
compute_pipeline: 0,
graphics_pipeline: 0,
}
}
/// Destroys the layer and returns the underlying command buffer.
#[inline]
pub fn into_inner(self) -> I {
self.inner
}
}
// TODO: issue with trait ; Arc<GraphicsPipeline> doesn't implement VulkanObject
/*unsafe impl<Pl, I, O> AddCommand<cmd::CmdBindPipeline<Pl>> for StateCacheLayer<I>
where I: AddCommand<cmd::CmdBindPipeline<Pl>, Out = O>,
Pl: VulkanObject<Object = vk::Pipeline>, // TODO: better trait?
{
type Out = StateCacheLayer<O>;
#[inline]
fn add(mut self, command: cmd::CmdBindPipeline<Pl>) -> Self::Out {
let new_command = unsafe {
if command.is_graphics() {
if command.pipeline().internal_object() == self.graphics_pipeline {
command.disabled()
} else {
self.graphics_pipeline = command.pipeline().internal_object();
command
}
} else {
if command.pipeline().internal_object() == self.compute_pipeline {
command.disabled()
} else {
self.compute_pipeline = command.pipeline().internal_object();
command
}
}
};
StateCacheLayer {
inner: self.inner.add(new_command),
dynamic_state: DynamicState::none(),
graphics_pipeline: self.graphics_pipeline,
compute_pipeline: self.compute_pipeline,
}
}
}*/
unsafe impl<Cb, I, O> AddCommand<cmd::CmdExecuteCommands<Cb>> for StateCacheLayer<I>
where I: AddCommand<cmd::CmdExecuteCommands<Cb>, Out = O>
{
type Out = StateCacheLayer<O>;
#[inline]
fn add(self, command: cmd::CmdExecuteCommands<Cb>) -> Self::Out {
// After a secondary command buffer is added, all states at reset to the "unknown" state.
let new_inner = self.inner.add(command);
StateCacheLayer {
inner: new_inner,
dynamic_state: DynamicState::none(),
compute_pipeline: 0,
graphics_pipeline: 0,
}
}
}
unsafe impl<I, O> AddCommand<cmd::CmdSetState> for StateCacheLayer<I>
where I: AddCommand<cmd::CmdSetState, Out = O>
{
type Out = StateCacheLayer<O>;
#[inline]
fn add(mut self, command: cmd::CmdSetState) -> Self::Out {
// We need to synchronize `self.dynamic_state` with the state in `command`.
// While doing so, we tweak `command` to erase the states that are the same as what's
// already in `self.dynamic_state`.
let mut command_state = command.state().clone();
// Handle line width.
if let Some(new_val) = command_state.line_width {
if self.dynamic_state.line_width == Some(new_val) {
command_state.line_width = None;
} else {
self.dynamic_state.line_width = Some(new_val);
}
}
// TODO: missing implementations
StateCacheLayer {
inner: self.inner.add(cmd::CmdSetState::new(command.device().clone(), command_state)),
dynamic_state: self.dynamic_state,
graphics_pipeline: self.graphics_pipeline,
compute_pipeline: self.compute_pipeline,
}
}
}
unsafe impl<I, O> CommandBufferBuild for StateCacheLayer<I>
where I: CommandBufferBuild<Out = O>
{
type Out = O;
#[inline]
fn build(self) -> O {
self.inner.build()
}
}
macro_rules! pass_through {
(($($param:ident),*), $cmd:ty) => {
unsafe impl<'a, I, O $(, $param)*> AddCommand<$cmd> for StateCacheLayer<I>
where I: AddCommand<$cmd, Out = O>
{
type Out = StateCacheLayer<O>;
#[inline]
fn add(self, command: $cmd) -> Self::Out {
StateCacheLayer {
inner: self.inner.add(command),
dynamic_state: self.dynamic_state,
graphics_pipeline: self.graphics_pipeline,
compute_pipeline: self.compute_pipeline,
}
}
}
}
}
pass_through!((Rp, F), cmd::CmdBeginRenderPass<Rp, F>);
pass_through!((S, Pl), cmd::CmdBindDescriptorSets<S, Pl>);
pass_through!((B), cmd::CmdBindIndexBuffer<B>);
pass_through!((Pl), cmd::CmdBindPipeline<Pl>);
pass_through!((V), cmd::CmdBindVertexBuffers<V>);
pass_through!((), cmd::CmdClearAttachments);
pass_through!((S, D), cmd::CmdCopyBuffer<S, D>);
pass_through!((), cmd::CmdDrawIndexedRaw);
pass_through!((B), cmd::CmdDrawIndirectRaw<B>);
pass_through!((), cmd::CmdDrawRaw);
pass_through!((), cmd::CmdEndRenderPass);
pass_through!((B), cmd::CmdFillBuffer<B>);
pass_through!((), cmd::CmdNextSubpass);
pass_through!((Pc, Pl), cmd::CmdPushConstants<Pc, Pl>);
pass_through!((B, D), cmd::CmdUpdateBuffer<'a, B, D>);

View File

@ -7,31 +7,24 @@
// notice may not be copied, modified, or distributed except
// according to those terms.
use std::marker::PhantomData;
use std::error::Error;
use std::ptr;
use std::sync::atomic::AtomicBool;
use std::sync::Arc;
use std::sync::atomic::AtomicBool;
use buffer::Buffer;
use command_buffer::cb::CommandBufferBuild;
use command_buffer::pool::AllocatedCommandBuffer;
use command_buffer::pool::CommandPool;
use command_buffer::CommandsList;
use command_buffer::CommandsListSink;
use command_buffer::CommandsListSinkCaller;
use command_buffer::DynamicState;
use command_buffer::RawCommandBufferPrototype;
use command_buffer::Submit;
use command_buffer::SubmitBuilder;
use device::Device;
use device::Queue;
use framebuffer::EmptySinglePassRenderPassDesc;
use framebuffer::Framebuffer;
use framebuffer::FramebufferRef;
use framebuffer::RenderPass;
use framebuffer::RenderPassRef;
use framebuffer::Framebuffer;
use framebuffer::Subpass;
use framebuffer::FramebufferRef;
use image::Layout;
use image::Image;
use sync::AccessFlagBits;
use sync::PipelineStages;
use OomError;
use VulkanObject;
use VulkanPointers;
@ -40,7 +33,7 @@ use vk;
/// Determines the kind of command buffer that we want to create.
#[derive(Debug, Clone)]
pub enum Kind<'a, R, F: 'a> {
pub enum Kind<R, F> {
/// A primary command buffer can execute all commands and can call secondary command buffers.
Primary,
@ -53,20 +46,21 @@ pub enum Kind<'a, R, F: 'a> {
SecondaryRenderPass {
/// Which subpass this secondary command buffer can be called from.
subpass: Subpass<R>,
/// The framebuffer object that will be used when calling the command buffer.
/// This parameter is optional and is an optimization hint for the implementation.
framebuffer: Option<&'a F>,
framebuffer: Option<F>,
},
}
impl<'a> Kind<'a, RenderPass<EmptySinglePassRenderPassDesc>, Framebuffer<RenderPass<EmptySinglePassRenderPassDesc>, ()>> {
impl Kind<RenderPass<EmptySinglePassRenderPassDesc>, Framebuffer<RenderPass<EmptySinglePassRenderPassDesc>, ()>> {
/// Equivalent to `Kind::Primary`.
///
/// > **Note**: If you use `let kind = Kind::Primary;` in your code, you will probably get a
/// > compilation error because the Rust compiler couldn't determine the template parameters
/// > of `Kind`. To solve that problem in an easy way you can use this function instead.
#[inline]
pub fn primary() -> Kind<'a, RenderPass<EmptySinglePassRenderPassDesc>, Framebuffer<RenderPass<EmptySinglePassRenderPassDesc>, ()>> {
pub fn primary() -> Kind<RenderPass<EmptySinglePassRenderPassDesc>, Framebuffer<RenderPass<EmptySinglePassRenderPassDesc>, ()>> {
Kind::Primary
}
}
@ -87,34 +81,38 @@ pub enum Flags {
OneTimeSubmit,
}
pub struct UnsyncedCommandBuffer<L, P> where P: CommandPool {
// The Vulkan command buffer.
pub struct UnsafeCommandBufferBuilder<P> where P: CommandPool {
// The Vulkan command buffer. Will be 0 if `build()` has been called.
cmd: vk::CommandBuffer,
// Device that owns the command buffer.
device: Arc<Device>,
// Pool that owns the command buffer.
pool: P::Finished,
pool: Option<P>,
// Flags that were used at creation.
flags: Flags,
// True if the command buffer has always been submitted once. Only relevant if `flags` is
// `OneTimeSubmit`.
already_submitted: AtomicBool,
// True if we are a secondary command buffer.
secondary_cb: bool,
// The commands list. Holds resources of the resources list alive.
commands_list: L,
}
impl<L, P> UnsyncedCommandBuffer<L, P> where L: CommandsList, P: CommandPool {
impl<P> UnsafeCommandBufferBuilder<P> where P: CommandPool {
/// Creates a new builder.
pub unsafe fn new<R, F>(list: L, pool: P, kind: Kind<R, F>, flags: Flags)
-> Result<UnsyncedCommandBuffer<L, P>, OomError>
///
/// # Safety
///
/// Creating and destroying an unsafe command buffer is not unsafe per se, but the commands
/// that you add to it are unchecked and do not have any synchronization.
///
/// In other words, it is your job to make sure that the commands you add are valid and that
/// they do not introduce any race condition.
///
/// > **Note**: Some checks are still made with `debug_assert!`. Do not expect to be able to
/// > be able to submit invalid commands.
pub unsafe fn new<R, F>(pool: P, kind: Kind<R, F>, flags: Flags)
-> Result<UnsafeCommandBufferBuilder<P>, OomError>
where R: RenderPassRef, F: FramebufferRef
{
let secondary = match kind {
@ -124,7 +122,7 @@ impl<L, P> UnsyncedCommandBuffer<L, P> where L: CommandsList, P: CommandPool {
let cmd = try!(pool.alloc(secondary, 1)).next().unwrap();
match UnsyncedCommandBuffer::already_allocated(list, pool, cmd, kind, flags) {
match UnsafeCommandBufferBuilder::already_allocated(pool, cmd, kind, flags) {
Ok(cmd) => Ok(cmd),
Err(err) => {
// FIXME: uncomment this and solve the fact that `pool` has been moved
@ -138,12 +136,14 @@ impl<L, P> UnsyncedCommandBuffer<L, P> where L: CommandsList, P: CommandPool {
///
/// # Safety
///
/// - The allocated command buffer must belong to the pool and must not be used anywhere else
/// in the code for the duration of this command buffer.
/// See also `new`.
///
pub unsafe fn already_allocated<R, F>(list: L, pool: P, cmd: AllocatedCommandBuffer,
/// The allocated command buffer must belong to the pool and must not be used anywhere else
/// in the code for the duration of this command buffer.
///
pub unsafe fn already_allocated<R, F>(pool: P, cmd: AllocatedCommandBuffer,
kind: Kind<R, F>, flags: Flags)
-> Result<UnsyncedCommandBuffer<L, P>, OomError>
-> Result<UnsafeCommandBufferBuilder<P>, OomError>
where R: RenderPassRef, F: FramebufferRef
{
let device = pool.device().clone();
@ -201,33 +201,15 @@ impl<L, P> UnsyncedCommandBuffer<L, P> where L: CommandsList, P: CommandPool {
try!(check_errors(vk.BeginCommandBuffer(cmd, &infos)));
{
let mut builder = RawCommandBufferPrototype {
device: device.clone(),
command_buffer: Some(cmd),
current_state: DynamicState::none(),
bound_graphics_pipeline: 0,
bound_compute_pipeline: 0,
bound_index_buffer: (0, 0, 0),
marker: PhantomData,
};
list.append(&mut Sink(&mut builder, &device));
};
try!(check_errors(vk.EndCommandBuffer(cmd)));
Ok(UnsyncedCommandBuffer {
Ok(UnsafeCommandBufferBuilder {
device: device.clone(),
pool: pool.finish(),
pool: Some(pool),
cmd: cmd,
flags: flags,
secondary_cb: match kind {
Kind::Primary => false,
Kind::Secondary | Kind::SecondaryRenderPass { .. } => true,
},
already_submitted: AtomicBool::new(false),
commands_list: list,
})
}
@ -236,22 +218,9 @@ impl<L, P> UnsyncedCommandBuffer<L, P> where L: CommandsList, P: CommandPool {
pub fn device(&self) -> &Arc<Device> {
&self.device
}
/// Returns the list of commands of this command buffer.
///
/// > **Note**: It is important that this getter is not used to modify the list of commands
/// > with interior mutability so that `append` returns something different. Doing so is
/// > unsafe. However this function is not unsafe, because this corner case is already covered
/// > by the unsafetiness of the `CommandsList` trait.
#[inline]
pub fn commands_list(&self) -> &L {
&self.commands_list
}
}
unsafe impl<L, P> VulkanObject for UnsyncedCommandBuffer<L, P>
where P: CommandPool
{
unsafe impl<P> VulkanObject for UnsafeCommandBufferBuilder<P> where P: CommandPool {
type Object = vk::CommandBuffer;
#[inline]
@ -260,34 +229,81 @@ unsafe impl<L, P> VulkanObject for UnsyncedCommandBuffer<L, P>
}
}
// Helper object for UnsyncedCommandBuffer. Implementation detail.
struct Sink<'a>(&'a mut RawCommandBufferPrototype<'a>, &'a Arc<Device>);
impl<'a> CommandsListSink<'a> for Sink<'a> {
impl<P> Drop for UnsafeCommandBufferBuilder<P> where P: CommandPool {
#[inline]
fn device(&self) -> &Arc<Device> {
self.1
}
fn drop(&mut self) {
//unsafe {
if self.cmd == 0 {
return;
}
#[inline]
fn add_command(&mut self, f: Box<CommandsListSinkCaller<'a> + 'a>) {
f.call(self.0)
}
#[inline]
fn add_buffer_transition(&mut self, _: &Buffer, _: usize, _: usize, _: bool,
_: PipelineStages, _: AccessFlagBits)
{
}
#[inline]
fn add_image_transition(&mut self, _: &Image, _: u32, _: u32, _: u32, _: u32,
_: bool, _: Layout, _: PipelineStages, _: AccessFlagBits)
{
}
#[inline]
fn add_image_transition_notification(&mut self, _: &Image, _: u32, _: u32, _: u32,
_: u32, _: Layout, _: PipelineStages, _: AccessFlagBits)
{
// FIXME: vk.FreeCommandBuffers()
//}
}
}
unsafe impl<P> CommandBufferBuild for UnsafeCommandBufferBuilder<P>
where P: CommandPool
{
type Out = UnsafeCommandBuffer<P>;
#[inline]
fn build(mut self) -> Self::Out {
unsafe {
debug_assert_ne!(self.cmd, 0);
let cmd = self.cmd;
let vk = self.device.pointers();
check_errors(vk.EndCommandBuffer(cmd)).unwrap(); // TODO: handle error
self.cmd = 0; // Prevents the `Drop` impl of the builder from destroying the cb.
UnsafeCommandBuffer {
cmd: cmd,
device: self.device.clone(),
pool: self.pool.take().unwrap().finish(),
flags: self.flags,
already_submitted: AtomicBool::new(false),
}
}
}
}
pub struct UnsafeCommandBuffer<P> where P: CommandPool {
// The Vulkan command buffer.
cmd: vk::CommandBuffer,
// Device that owns the command buffer.
device: Arc<Device>,
// Pool that owns the command buffer.
pool: P::Finished,
// Flags that were used at creation.
flags: Flags,
// True if the command buffer has always been submitted once. Only relevant if `flags` is
// `OneTimeSubmit`.
already_submitted: AtomicBool,
}
unsafe impl<P> Submit for UnsafeCommandBuffer<P> where P: CommandPool {
#[inline]
fn device(&self) -> &Arc<Device> {
&self.device
}
#[inline]
unsafe fn append_submission<'a>(&'a self, base: SubmitBuilder<'a>, _queue: &Arc<Queue>)
-> Result<SubmitBuilder<'a>, Box<Error>>
{
Ok(base.add_command_buffer(self))
}
}
unsafe impl<P> VulkanObject for UnsafeCommandBuffer<P> where P: CommandPool {
type Object = vk::CommandBuffer;
#[inline]
fn internal_object(&self) -> vk::CommandBuffer {
self.cmd
}
}

View File

@ -0,0 +1,35 @@
// Copyright (c) 2016 The vulkano developers
// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT
// license <LICENSE-MIT or http://opensource.org/licenses/MIT>,
// at your option. All files in the project carrying such
// notice may not be copied, modified, or distributed except
// according to those terms.
/// Adds a command to a command buffer builder.
pub unsafe trait AddCommand<C> {
/// The new command buffer builder type.
type Out;
/// Adds the command. This takes ownership of the builder and returns a new builder with the
/// command appended at the end of it.
fn add(self, cmd: C) -> Self::Out;
}
/// Turns a command buffer builder into a real command buffer.
pub unsafe trait CommandBufferBuild {
/// The type of the built command buffer.
type Out;
/// Builds the command buffer.
fn build(self) -> Self::Out;
}
/*trait Builder {
fn cmd1<B>(self, b: B) -> <Self as AddCommand<Cmd1<B>>>::Out
where Self: Sized + AddCommand<Cmd1<B>>
{
self.add(Cmd1(b))
}
}*/

View File

@ -12,12 +12,11 @@ use std::ops::Range;
use std::ptr;
use smallvec::SmallVec;
use command_buffer::RawCommandBufferPrototype;
use command_buffer::CommandsList;
use command_buffer::CommandsListSink;
use command_buffer::cb::AddCommand;
use command_buffer::cb::UnsafeCommandBufferBuilder;
use command_buffer::pool::CommandPool;
use device::Device;
use format::ClearValue;
use framebuffer::AttachmentsList;
use framebuffer::FramebufferRef;
use framebuffer::RenderPass;
use framebuffer::RenderPassClearValues;
@ -27,9 +26,7 @@ use VulkanPointers;
use vk;
/// Wraps around a commands list and adds to the end of it a command that enters a render pass.
pub struct CmdBeginRenderPass<L, Rp, F> where L: CommandsList {
// Parent commands list.
previous: L,
pub struct CmdBeginRenderPass<Rp, F> {
// True if only secondary command buffers can be added.
secondary: bool,
// The draw area.
@ -48,13 +45,13 @@ pub struct CmdBeginRenderPass<L, Rp, F> where L: CommandsList {
framebuffer: F,
}
impl<L, F> CmdBeginRenderPass<L, Arc<RenderPass>, F>
where L: CommandsList, F: FramebufferRef
impl<F> CmdBeginRenderPass<Arc<RenderPass>, F>
where F: FramebufferRef
{
/// See the documentation of the `begin_render_pass` method.
// TODO: allow setting more parameters
pub fn new<C>(previous: L, framebuffer: F, secondary: bool, clear_values: C)
-> CmdBeginRenderPass<L, Arc<RenderPass>, F>
pub fn new<C>(framebuffer: F, secondary: bool, clear_values: C)
-> CmdBeginRenderPass<Arc<RenderPass>, F>
where <<F as FramebufferRef>::RenderPass as RenderPassRef>::Desc: RenderPassClearValues<C>
{
let raw_render_pass = framebuffer.inner().render_pass().inner().internal_object();
@ -100,7 +97,6 @@ impl<L, F> CmdBeginRenderPass<L, Arc<RenderPass>, F>
0 .. framebuffer.inner().dimensions()[1]];
CmdBeginRenderPass {
previous: previous,
secondary: secondary,
rect: rect,
clear_values: clear_values,
@ -113,49 +109,42 @@ impl<L, F> CmdBeginRenderPass<L, Arc<RenderPass>, F>
}
}
unsafe impl<L, Rp, F> CommandsList for CmdBeginRenderPass<L, Rp, F>
where L: CommandsList, F: FramebufferRef, F::Attachments: AttachmentsList
unsafe impl<'a, P, Rp, F> AddCommand<&'a CmdBeginRenderPass<Rp, F>> for UnsafeCommandBufferBuilder<P>
where P: CommandPool
{
type Out = UnsafeCommandBufferBuilder<P>;
#[inline]
fn append<'a>(&'a self, builder: &mut CommandsListSink<'a>) {
self.previous.append(builder);
fn add(self, command: &'a CmdBeginRenderPass<Rp, F>) -> Self::Out {
unsafe {
let vk = self.device().pointers();
let cmd = self.internal_object();
assert_eq!(self.device.internal_object(), builder.device().internal_object());
debug_assert!(self.rect[0].start <= self.rect[0].end);
debug_assert!(self.rect[1].start <= self.rect[1].end);
self.framebuffer.inner().add_transition(builder);
builder.add_command(Box::new(move |raw: &mut RawCommandBufferPrototype| {
unsafe {
let vk = raw.device.pointers();
let cmd = raw.command_buffer.clone().take().unwrap();
let begin = vk::RenderPassBeginInfo {
sType: vk::STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
pNext: ptr::null(),
renderPass: self.raw_render_pass,
framebuffer: self.raw_framebuffer,
renderArea: vk::Rect2D {
offset: vk::Offset2D {
x: self.rect[0].start as i32,
y: self.rect[1].start as i32,
},
extent: vk::Extent2D {
width: self.rect[0].end - self.rect[0].start,
height: self.rect[1].end - self.rect[1].start,
},
let begin = vk::RenderPassBeginInfo {
sType: vk::STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
pNext: ptr::null(),
renderPass: command.raw_render_pass,
framebuffer: command.raw_framebuffer,
renderArea: vk::Rect2D {
offset: vk::Offset2D {
x: command.rect[0].start as i32,
y: command.rect[1].start as i32,
},
clearValueCount: self.clear_values.len() as u32,
pClearValues: self.clear_values.as_ptr(),
};
extent: vk::Extent2D {
width: command.rect[0].end - command.rect[0].start,
height: command.rect[1].end - command.rect[1].start,
},
},
clearValueCount: command.clear_values.len() as u32,
pClearValues: command.clear_values.as_ptr(),
};
let contents = if self.secondary { vk::SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS }
else { vk::SUBPASS_CONTENTS_INLINE };
vk.CmdBeginRenderPass(cmd, &begin, contents);
}
}));
let contents = if command.secondary { vk::SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS }
else { vk::SUBPASS_CONTENTS_INLINE };
vk.CmdBeginRenderPass(cmd, &begin, contents);
}
self
}
}

View File

@ -13,10 +13,10 @@ use std::ptr;
use std::sync::Arc;
use smallvec::SmallVec;
use command_buffer::RawCommandBufferPrototype;
use command_buffer::CommandsList;
use command_buffer::CommandsListSink;
use descriptor::descriptor_set::TrackedDescriptorSetsCollection;
use command_buffer::cb::AddCommand;
use command_buffer::cb::UnsafeCommandBufferBuilder;
use command_buffer::pool::CommandPool;
use descriptor::descriptor_set::DescriptorSetsCollection;
use descriptor::pipeline_layout::PipelineLayoutRef;
use descriptor::pipeline_layout::PipelineLayoutSetsCompatible;
use device::Device;
@ -25,9 +25,7 @@ use VulkanPointers;
use vk;
/// Wraps around a commands list and adds at the end of it a command that binds descriptor sets.
pub struct CmdBindDescriptorSets<L, S, P> where L: CommandsList {
// Parent commands list.
previous: L,
pub struct CmdBindDescriptorSets<S, P> {
// The raw Vulkan enum representing the kind of pipeline.
pipeline_ty: vk::PipelineBindPoint,
// The raw pipeline object to bind.
@ -44,8 +42,8 @@ pub struct CmdBindDescriptorSets<L, S, P> where L: CommandsList {
pipeline_layout: P,
}
impl<L, S, P> CmdBindDescriptorSets<L, S, P>
where L: CommandsList, S: TrackedDescriptorSetsCollection, P: PipelineLayoutRef
impl<S, P> CmdBindDescriptorSets<S, P>
where P: PipelineLayoutRef, S: DescriptorSetsCollection
{
/// Builds the command.
///
@ -54,8 +52,8 @@ impl<L, S, P> CmdBindDescriptorSets<L, S, P>
///
/// Returns an error if the sets are not compatible with the pipeline layout.
#[inline]
pub fn new(previous: L, graphics: bool, pipeline_layout: P, sets: S)
-> Result<CmdBindDescriptorSets<L, S, P>, CmdBindDescriptorSetsError>
pub fn new(graphics: bool, pipeline_layout: P, sets: S)
-> Result<CmdBindDescriptorSets<S, P>, CmdBindDescriptorSetsError>
{
if !PipelineLayoutSetsCompatible::is_compatible(pipeline_layout.desc(), &sets) {
return Err(CmdBindDescriptorSetsError::IncompatibleSets);
@ -85,7 +83,6 @@ impl<L, S, P> CmdBindDescriptorSets<L, S, P>
};
Ok(CmdBindDescriptorSets {
previous: previous,
raw_pipeline_layout: raw_pipeline_layout,
raw_sets: raw_sets,
pipeline_ty: if graphics { vk::PIPELINE_BIND_POINT_GRAPHICS }
@ -97,29 +94,25 @@ impl<L, S, P> CmdBindDescriptorSets<L, S, P>
}
}
unsafe impl<L, S, P> CommandsList for CmdBindDescriptorSets<L, S, P>
where L: CommandsList, S: TrackedDescriptorSetsCollection
unsafe impl<'a, P, Pl, S> AddCommand<&'a CmdBindDescriptorSets<S, Pl>> for UnsafeCommandBufferBuilder<P>
where P: CommandPool
{
type Out = UnsafeCommandBufferBuilder<P>;
#[inline]
fn append<'a>(&'a self, builder: &mut CommandsListSink<'a>) {
self.previous.append(builder);
fn add(self, command: &'a CmdBindDescriptorSets<S, Pl>) -> Self::Out {
unsafe {
let vk = self.device().pointers();
let cmd = self.internal_object();
assert_eq!(self.device.internal_object(), builder.device().internal_object());
self.sets.add_transition(builder);
builder.add_command(Box::new(move |raw: &mut RawCommandBufferPrototype| {
unsafe {
let vk = raw.device.pointers();
let cmd = raw.command_buffer.clone().take().unwrap();
for &(first_set, ref sets) in self.raw_sets.iter() {
vk.CmdBindDescriptorSets(cmd, self.pipeline_ty, self.raw_pipeline_layout,
first_set, sets.len() as u32, sets.as_ptr(),
0, ptr::null()); // TODO: dynamic offset not supported
}
for &(first_set, ref sets) in command.raw_sets.iter() {
vk.CmdBindDescriptorSets(cmd, command.pipeline_ty, command.raw_pipeline_layout,
first_set, sets.len() as u32, sets.as_ptr(),
0, ptr::null()); // TODO: dynamic offset not supported
}
}));
}
self
}
}

View File

@ -11,21 +11,17 @@ use std::sync::Arc;
use buffer::Buffer;
use buffer::TypedBuffer;
use command_buffer::RawCommandBufferPrototype;
use command_buffer::CommandsList;
use command_buffer::CommandsListSink;
use command_buffer::cb::AddCommand;
use command_buffer::cb::UnsafeCommandBufferBuilder;
use command_buffer::pool::CommandPool;
use device::Device;
use pipeline::input_assembly::Index;
use sync::AccessFlagBits;
use sync::PipelineStages;
use VulkanObject;
use VulkanPointers;
use vk;
/// Wraps around a commands list and adds a command that binds an index buffer at the end of it.
pub struct CmdBindIndexBuffer<L, B> where L: CommandsList {
// Parent commands list.
previous: L,
pub struct CmdBindIndexBuffer<B> {
// Raw handle of the buffer to bind.
raw_buffer: vk::Buffer,
// Raw offset of the buffer to bind.
@ -38,14 +34,13 @@ pub struct CmdBindIndexBuffer<L, B> where L: CommandsList {
buffer: B,
}
impl<L, B, I> CmdBindIndexBuffer<L, B>
where L: CommandsList,
B: Buffer + TypedBuffer<Content = [I]>,
impl<B, I> CmdBindIndexBuffer<B>
where B: Buffer + TypedBuffer<Content = [I]>,
I: Index + 'static
{
/// Builds the command.
#[inline]
pub fn new(previous: L, buffer: B) -> CmdBindIndexBuffer<L, B> {
pub fn new(buffer: B) -> CmdBindIndexBuffer<B> {
let device;
let raw_buffer;
let offset;
@ -61,7 +56,6 @@ impl<L, B, I> CmdBindIndexBuffer<L, B>
}
CmdBindIndexBuffer {
previous: previous,
raw_buffer: raw_buffer,
offset: offset,
index_type: I::ty() as vk::IndexType,
@ -71,35 +65,19 @@ impl<L, B, I> CmdBindIndexBuffer<L, B>
}
}
unsafe impl<L, B> CommandsList for CmdBindIndexBuffer<L, B>
where L: CommandsList, B: Buffer
unsafe impl<'a, P, B> AddCommand<&'a CmdBindIndexBuffer<B>> for UnsafeCommandBufferBuilder<P>
where P: CommandPool
{
type Out = UnsafeCommandBufferBuilder<P>;
#[inline]
fn append<'a>(&'a self, builder: &mut CommandsListSink<'a>) {
self.previous.append(builder);
assert_eq!(self.device.internal_object(), builder.device().internal_object());
{
let stages = PipelineStages { vertex_input: true, .. PipelineStages::none() };
let access = AccessFlagBits { index_read: true, .. AccessFlagBits::none() };
builder.add_buffer_transition(&self.buffer, 0, self.buffer.size(), false,
stages, access);
fn add(self, command: &'a CmdBindIndexBuffer<B>) -> Self::Out {
unsafe {
let vk = self.device().pointers();
let cmd = self.internal_object();
vk.CmdBindIndexBuffer(cmd, command.raw_buffer, command.offset, command.index_type);
}
builder.add_command(Box::new(move |raw: &mut RawCommandBufferPrototype| {
let params = (self.raw_buffer, self.offset, self.index_type);
if raw.bound_index_buffer == params {
return;
}
raw.bound_index_buffer = params;
unsafe {
let vk = raw.device.pointers();
let cmd = raw.command_buffer.clone().take().unwrap();
vk.CmdBindIndexBuffer(cmd, self.raw_buffer, self.offset, self.index_type);
}
}));
self
}
}

View File

@ -9,9 +9,9 @@
use std::sync::Arc;
use command_buffer::RawCommandBufferPrototype;
use command_buffer::CommandsList;
use command_buffer::CommandsListSink;
use command_buffer::cb::AddCommand;
use command_buffer::cb::UnsafeCommandBufferBuilder;
use command_buffer::pool::CommandPool;
use device::Device;
use pipeline::ComputePipeline;
use pipeline::GraphicsPipeline;
@ -25,9 +25,7 @@ use vk;
/// > after it is executed. In other words, if the command is aware that the same pipeline is
/// > already bound, then it won't bind it again. This optimization is essential, as binding a
/// > pipeline has a non-negligible overhead.
pub struct CmdBindPipeline<L, P> where L: CommandsList {
// Parent commands list.
previous: L,
pub struct CmdBindPipeline<P> {
// The raw pipeline object to bind.
raw_pipeline: vk::Pipeline,
// The raw Vulkan enum representing the kind of pipeline.
@ -39,19 +37,18 @@ pub struct CmdBindPipeline<L, P> where L: CommandsList {
pipeline: P,
}
impl<L> CmdBindPipeline<L, ()> where L: CommandsList {
impl CmdBindPipeline<()> {
/// Builds a command that binds a compute pipeline to the compute pipeline bind point.
///
/// Use this command right before a compute dispatch.
#[inline]
pub fn bind_compute_pipeline<Pl>(previous: L, pipeline: Arc<ComputePipeline<Pl>>)
-> CmdBindPipeline<L, Arc<ComputePipeline<Pl>>>
pub fn bind_compute_pipeline<Pl>(pipeline: Arc<ComputePipeline<Pl>>)
-> CmdBindPipeline<Arc<ComputePipeline<Pl>>>
{
let raw_pipeline = pipeline.internal_object();
let device = pipeline.device().clone();
CmdBindPipeline {
previous: previous,
raw_pipeline: raw_pipeline,
pipeline_ty: vk::PIPELINE_BIND_POINT_COMPUTE,
device: device,
@ -63,14 +60,13 @@ impl<L> CmdBindPipeline<L, ()> where L: CommandsList {
///
/// Use this command right before a draw command.
#[inline]
pub fn bind_graphics_pipeline<V, Pl, R>(previous: L, pipeline: Arc<GraphicsPipeline<V, Pl, R>>)
-> CmdBindPipeline<L, Arc<GraphicsPipeline<V, Pl, R>>>
pub fn bind_graphics_pipeline<V, Pl, R>(pipeline: Arc<GraphicsPipeline<V, Pl, R>>)
-> CmdBindPipeline<Arc<GraphicsPipeline<V, Pl, R>>>
{
let raw_pipeline = pipeline.internal_object();
let device = pipeline.device().clone();
CmdBindPipeline {
previous: previous,
raw_pipeline: raw_pipeline,
pipeline_ty: vk::PIPELINE_BIND_POINT_GRAPHICS,
device: device,
@ -79,62 +75,54 @@ impl<L> CmdBindPipeline<L, ()> where L: CommandsList {
}
}
impl<L, P> CmdBindPipeline<L, P> where L: CommandsList {
impl<P> CmdBindPipeline<P> {
/// This disables the command but keeps it alive. All getters still return the same value, but
/// executing the command will not do anything.
#[inline]
pub fn disabled(mut self) -> CmdBindPipeline<P> {
self.raw_pipeline = 0;
self
}
/// Returns the device the pipeline is assocated with.
#[inline]
pub fn device(&self) -> &Arc<Device> {
&self.device
}
}
unsafe impl<L, P> CommandsList for CmdBindPipeline<L, P> where L: CommandsList {
/// True if this is the graphics pipeline. False if the compute pipeline.
// TODO: should be an enum?
#[inline]
fn append<'a>(&'a self, builder: &mut CommandsListSink<'a>) {
self.previous.append(builder);
pub fn is_graphics(&self) -> bool {
self.pipeline_ty == vk::PIPELINE_BIND_POINT_GRAPHICS
}
assert_eq!(self.device.internal_object(), builder.device().internal_object());
builder.add_command(Box::new(move |raw: &mut RawCommandBufferPrototype| {
// Returning now if the pipeline object is already bound.
// Note that we need to perform this check after validating the device, otherwise the
// pipeline ID could match by mistake.
match self.pipeline_ty {
vk::PIPELINE_BIND_POINT_GRAPHICS => {
if raw.bound_graphics_pipeline == self.raw_pipeline {
return;
} else {
raw.bound_graphics_pipeline = self.raw_pipeline;
}
},
vk::PIPELINE_BIND_POINT_COMPUTE => {
if raw.bound_compute_pipeline == self.raw_pipeline {
return;
} else {
raw.bound_compute_pipeline = self.raw_pipeline;
}
},
_ => unreachable!()
}
// Binding for real.
unsafe {
let vk = raw.device.pointers();
let cmd = raw.command_buffer.clone().take().unwrap();
vk.CmdBindPipeline(cmd, self.pipeline_ty, self.raw_pipeline);
}
}));
/// Returns the pipeline object that will be bound.
///
/// # Safety
///
/// Must not be used to modify the pipeline.
#[inline]
pub unsafe fn pipeline(&self) -> &P {
&self.pipeline
}
}
// TODO:
/*unsafe impl<'a, L, B, D: ?Sized> CommandsListPossibleOutsideRenderPass
for CmdUnsyncedUpdate<'a, L, B, D>
where B: Buffer,
L: CommandsList,
D: Copy + 'static,
unsafe impl<'a, P, Pl> AddCommand<&'a CmdBindPipeline<Pl>> for UnsafeCommandBufferBuilder<P>
where P: CommandPool
{
type Out = UnsafeCommandBufferBuilder<P>;
#[inline]
fn is_outside_render_pass(&self) -> bool {
true
fn add(self, command: &'a CmdBindPipeline<Pl>) -> Self::Out {
if command.raw_pipeline != 0 {
unsafe {
let vk = self.device().pointers();
let cmd = self.internal_object();
vk.CmdBindPipeline(cmd, command.pipeline_ty, command.raw_pipeline);
}
}
self
}
}*/
}

View File

@ -10,9 +10,9 @@
use std::sync::Arc;
use smallvec::SmallVec;
use command_buffer::RawCommandBufferPrototype;
use command_buffer::CommandsList;
use command_buffer::CommandsListSink;
use command_buffer::cb::AddCommand;
use command_buffer::cb::UnsafeCommandBufferBuilder;
use command_buffer::pool::CommandPool;
use device::Device;
use pipeline::vertex::Source;
use VulkanObject;
@ -20,9 +20,7 @@ use VulkanPointers;
use vk;
/// Wraps around a commands list and adds a command that binds an index buffer at the end of it.
pub struct CmdBindVertexBuffers<L, B> where L: CommandsList {
// Parent commands list.
previous: L,
pub struct CmdBindVertexBuffers<B> {
// Raw handles of the buffers to bind.
raw_buffers: SmallVec<[vk::Buffer; 4]>,
// Raw offsets of the buffers to bind.
@ -33,10 +31,10 @@ pub struct CmdBindVertexBuffers<L, B> where L: CommandsList {
buffers: B,
}
impl<L, B> CmdBindVertexBuffers<L, B> where L: CommandsList {
impl<B> CmdBindVertexBuffers<B> {
/// Builds the command.
#[inline]
pub fn new<S>(previous: L, source_def: &S, buffers: B) -> CmdBindVertexBuffers<L, B>
pub fn new<S>(source_def: &S, buffers: B) -> CmdBindVertexBuffers<B>
where S: Source<B>
{
let (device, raw_buffers, offsets) = {
@ -53,7 +51,6 @@ impl<L, B> CmdBindVertexBuffers<L, B> where L: CommandsList {
// be any.
CmdBindVertexBuffers {
previous: previous,
raw_buffers: raw_buffers,
offsets: offsets,
device: device,
@ -62,24 +59,20 @@ impl<L, B> CmdBindVertexBuffers<L, B> where L: CommandsList {
}
}
unsafe impl<L, B> CommandsList for CmdBindVertexBuffers<L, B> where L: CommandsList {
unsafe impl<'a, P, B> AddCommand<&'a CmdBindVertexBuffers<B>> for UnsafeCommandBufferBuilder<P>
where P: CommandPool
{
type Out = UnsafeCommandBufferBuilder<P>;
#[inline]
fn append<'a>(&'a self, builder: &mut CommandsListSink<'a>) {
self.previous.append(builder);
fn add(self, command: &'a CmdBindVertexBuffers<B>) -> Self::Out {
unsafe {
let vk = self.device().pointers();
let cmd = self.internal_object();
vk.CmdBindVertexBuffers(cmd, 0, command.raw_buffers.len() as u32,
command.raw_buffers.as_ptr(), command.offsets.as_ptr());
}
assert_eq!(self.device.internal_object(), builder.device().internal_object());
debug_assert_eq!(self.raw_buffers.len(), self.offsets.len());
// FIXME: perform buffer transitions
builder.add_command(Box::new(move |raw: &mut RawCommandBufferPrototype| {
unsafe {
let vk = raw.device.pointers();
let cmd = raw.command_buffer.clone().take().unwrap();
// TODO: don't bind if not necessary
vk.CmdBindVertexBuffers(cmd, 0, self.raw_buffers.len() as u32,
self.raw_buffers.as_ptr(), self.offsets.as_ptr());
}
}));
self
}
}

View File

@ -11,9 +11,9 @@ use std::error;
use std::fmt;
use smallvec::SmallVec;
use command_buffer::RawCommandBufferPrototype;
use command_buffer::CommandsList;
use command_buffer::CommandsListSink;
use command_buffer::cb::AddCommand;
use command_buffer::cb::UnsafeCommandBufferBuilder;
use command_buffer::pool::CommandPool;
use format::FormatTy;
use image::Image;
use image::Layout;

View File

@ -9,17 +9,16 @@
use smallvec::SmallVec;
use command_buffer::RawCommandBufferPrototype;
use command_buffer::CommandsList;
use command_buffer::CommandsListSink;
use command_buffer::cb::AddCommand;
use command_buffer::cb::UnsafeCommandBufferBuilder;
use command_buffer::pool::CommandPool;
use VulkanObject;
use VulkanPointers;
use vk;
/// Wraps around a commands list and adds at the end of it a command that clears framebuffer
/// attachments.
pub struct CmdClearAttachments<L> {
// Parent commands list.
previous: L,
pub struct CmdClearAttachments {
// The attachments to clear.
attachments: SmallVec<[vk::ClearAttachment; 8]>,
// The rectangles to clear.
@ -28,31 +27,22 @@ pub struct CmdClearAttachments<L> {
// TODO: add constructor
unsafe impl<L> CommandsList for CmdClearAttachments<L>
where L: CommandsList
unsafe impl<'a, P> AddCommand<&'a CmdClearAttachments> for UnsafeCommandBufferBuilder<P>
where P: CommandPool
{
type Out = UnsafeCommandBufferBuilder<P>;
#[inline]
fn append<'a>(&'a self, builder: &mut CommandsListSink<'a>) {
self.previous.append(builder);
fn add(self, command: &'a CmdClearAttachments) -> Self::Out {
unsafe {
let vk = self.device().pointers();
let cmd = self.internal_object();
// According to the Vulkan specifications, the `vkCmdClearAttachments` command doesn't
// need any pipeline barrier.
// Since the thing that is cleared is an attachment of the framebuffer, there's no need to
// provide any additional form of synchronization.
if self.attachments.is_empty() || self.rects.is_empty() {
return;
vk.CmdClearAttachments(cmd, command.attachments.len() as u32,
command.attachments.as_ptr(), command.rects.len() as u32,
command.rects.as_ptr());
}
builder.add_command(Box::new(move |raw: &mut RawCommandBufferPrototype| {
unsafe {
let vk = raw.device.pointers();
let cmd = raw.command_buffer.clone().take().unwrap();
vk.CmdClearAttachments(cmd, self.attachments.len() as u32,
self.attachments.as_ptr(), self.rects.len() as u32,
self.rects.as_ptr());
}
}));
self
}
}

View File

@ -12,22 +12,16 @@ use std::error;
use std::fmt;
use buffer::Buffer;
use command_buffer::RawCommandBufferPrototype;
use command_buffer::CommandsList;
use command_buffer::CommandsListSink;
use sync::AccessFlagBits;
use sync::PipelineStages;
use command_buffer::cb::AddCommand;
use command_buffer::cb::UnsafeCommandBufferBuilder;
use command_buffer::pool::CommandPool;
use VulkanObject;
use VulkanPointers;
use vk;
/// Wraps around a commands list and adds at the end of it a command that copies from a buffer to
/// another.
pub struct CmdCopyBuffer<L, S, D>
where L: CommandsList, S: Buffer, D: Buffer
{
// Parent commands list.
previous: L,
pub struct CmdCopyBuffer<S, D> {
source: S,
source_raw: vk::Buffer,
destination: D,
@ -37,8 +31,8 @@ pub struct CmdCopyBuffer<L, S, D>
size: vk::DeviceSize,
}
impl<L, S, D> CmdCopyBuffer<L, S, D>
where L: CommandsList, S: Buffer, D: Buffer
impl<S, D> CmdCopyBuffer<S, D>
where S: Buffer, D: Buffer
{
/// Builds a new command.
///
@ -49,8 +43,8 @@ impl<L, S, D> CmdCopyBuffer<L, S, D>
///
/// - Panics if the source and destination were not created with the same device.
// FIXME: type safety
pub fn new(previous: L, source: S, destination: D)
-> Result<CmdCopyBuffer<L, S, D>, CmdCopyBufferError>
pub fn new(source: S, destination: D)
-> Result<CmdCopyBuffer<S, D>, CmdCopyBufferError>
{
// TODO:
//assert!(previous.is_outside_render_pass()); // TODO: error
@ -82,7 +76,6 @@ impl<L, S, D> CmdCopyBuffer<L, S, D>
}
Ok(CmdCopyBuffer {
previous: previous,
source: source,
source_raw: source_raw,
destination: destination,
@ -94,42 +87,29 @@ impl<L, S, D> CmdCopyBuffer<L, S, D>
}
}
unsafe impl<L, S, D> CommandsList for CmdCopyBuffer<L, S, D>
where L: CommandsList, S: Buffer, D: Buffer
unsafe impl<'a, P, S, D> AddCommand<&'a CmdCopyBuffer<S, D>> for UnsafeCommandBufferBuilder<P>
where S: Buffer,
D: Buffer,
P: CommandPool,
{
type Out = UnsafeCommandBufferBuilder<P>;
#[inline]
fn append<'a>(&'a self, builder: &mut CommandsListSink<'a>) {
self.previous.append(builder);
fn add(self, command: &'a CmdCopyBuffer<S, D>) -> Self::Out {
unsafe {
let vk = self.device().pointers();
let cmd = self.internal_object();
assert_eq!(self.source.inner().buffer.device().internal_object(),
builder.device().internal_object());
let region = vk::BufferCopy {
srcOffset: command.src_offset,
dstOffset: command.dst_offset,
size: command.size,
};
{
let stages = PipelineStages { transfer: true, .. PipelineStages::none() };
let access = AccessFlagBits { transfer_read: true, .. AccessFlagBits::none() };
builder.add_buffer_transition(&self.source, 0, self.size as usize, false,
stages, access);
}
{
let stages = PipelineStages { transfer: true, .. PipelineStages::none() };
let access = AccessFlagBits { transfer_write: true, .. AccessFlagBits::none() };
builder.add_buffer_transition(&self.destination, 0, self.size as usize, true,
stages, access);
vk.CmdCopyBuffer(cmd, command.source_raw, command.destination_raw, 1, &region);
}
builder.add_command(Box::new(move |raw: &mut RawCommandBufferPrototype| {
unsafe {
let vk = raw.device.pointers();
let cmd = raw.command_buffer.clone().take().unwrap();
let region = vk::BufferCopy {
srcOffset: self.src_offset,
dstOffset: self.dst_offset,
size: self.size,
};
vk.CmdCopyBuffer(cmd, self.source_raw, self.destination_raw, 1, &region);
}
}));
self
}
}

View File

@ -10,89 +10,74 @@
use std::sync::Arc;
use command_buffer::DynamicState;
use command_buffer::cb::AddCommand;
use command_buffer::cmd::CmdBindDescriptorSets;
use command_buffer::cmd::CmdBindPipeline;
use command_buffer::cmd::CmdBindVertexBuffers;
use command_buffer::cmd::CmdDrawRaw;
use command_buffer::cmd::CmdPushConstants;
use command_buffer::cmd::CmdSetState;
use command_buffer::RawCommandBufferPrototype;
use command_buffer::CommandsList;
use command_buffer::CommandsListSink;
use descriptor::descriptor_set::DescriptorSetsCollection;
use descriptor::PipelineLayoutRef;
use descriptor::descriptor_set::collection::TrackedDescriptorSetsCollection;
use pipeline::GraphicsPipeline;
use pipeline::vertex::Source;
use VulkanPointers;
/// Wraps around a commands list and adds a draw command at the end of it.
pub struct CmdDraw<L, V, Pv, Pl, Prp, S, Pc>
where L: CommandsList, Pl: PipelineLayoutRef, S: TrackedDescriptorSetsCollection
{
// Parent commands list.
previous: CmdBindVertexBuffers<
CmdPushConstants<
CmdBindDescriptorSets<
CmdSetState<
CmdBindPipeline<L, Arc<GraphicsPipeline<Pv, Pl, Prp>>>
>,
S, Arc<GraphicsPipeline<Pv, Pl, Prp>>
>,
Pc, Arc<GraphicsPipeline<Pv, Pl, Prp>>
>,
V
>,
// Parameters for vkCmdDraw.
vertex_count: u32,
instance_count: u32,
first_vertex: u32,
first_instance: u32,
pub struct CmdDraw<V, Pv, Pl, Prp, S, Pc> {
vertex_buffers: CmdBindVertexBuffers<V>,
push_constants: CmdPushConstants<Pc, Arc<GraphicsPipeline<Pv, Pl, Prp>>>,
descriptor_sets: CmdBindDescriptorSets<S, Arc<GraphicsPipeline<Pv, Pl, Prp>>>,
set_state: CmdSetState,
bind_pipeline: CmdBindPipeline<Arc<GraphicsPipeline<Pv, Pl, Prp>>>,
draw_raw: CmdDrawRaw,
}
impl<L, V, Pv, Pl, Prp, S, Pc> CmdDraw<L, V, Pv, Pl, Prp, S, Pc>
where L: CommandsList, Pl: PipelineLayoutRef, S: TrackedDescriptorSetsCollection
impl<V, Pv, Pl, Prp, S, Pc> CmdDraw<V, Pv, Pl, Prp, S, Pc>
where Pl: PipelineLayoutRef, S: DescriptorSetsCollection
{
/// See the documentation of the `draw` method.
pub fn new(previous: L, pipeline: Arc<GraphicsPipeline<Pv, Pl, Prp>>,
pub fn new(pipeline: Arc<GraphicsPipeline<Pv, Pl, Prp>>,
dynamic: DynamicState, vertices: V, sets: S, push_constants: Pc)
-> CmdDraw<L, V, Pv, Pl, Prp, S, Pc>
-> CmdDraw<V, Pv, Pl, Prp, S, Pc>
where Pv: Source<V>
{
let (_, vertex_count, instance_count) = pipeline.vertex_definition().decode(&vertices);
let previous = CmdBindPipeline::bind_graphics_pipeline(previous, pipeline.clone());
let device = previous.device().clone();
let previous = CmdSetState::new(previous, device, dynamic);
let previous = CmdBindDescriptorSets::new(previous, true, pipeline.clone(), sets).unwrap() /* TODO: error */;
let previous = CmdPushConstants::new(previous, pipeline.clone(), push_constants).unwrap() /* TODO: error */;
let previous = CmdBindVertexBuffers::new(previous, pipeline.vertex_definition(), vertices);
// TODO: check that dynamic state is not missing some elements required by the pipeline
let bind_pipeline = CmdBindPipeline::bind_graphics_pipeline(pipeline.clone());
let device = bind_pipeline.device().clone();
let set_state = CmdSetState::new(device, dynamic);
let descriptor_sets = CmdBindDescriptorSets::new(true, pipeline.clone(), sets).unwrap() /* TODO: error */;
let push_constants = CmdPushConstants::new(pipeline.clone(), push_constants).unwrap() /* TODO: error */;
let vertex_buffers = CmdBindVertexBuffers::new(pipeline.vertex_definition(), vertices);
let draw_raw = unsafe { CmdDrawRaw::new(vertex_count as u32, instance_count as u32, 0, 0) };
CmdDraw {
previous: previous,
vertex_count: vertex_count as u32,
instance_count: instance_count as u32,
first_vertex: 0,
first_instance: 0,
vertex_buffers: vertex_buffers,
push_constants: push_constants,
descriptor_sets: descriptor_sets,
set_state: set_state,
bind_pipeline: bind_pipeline,
draw_raw: draw_raw,
}
}
}
unsafe impl<L, V, Pv, Pl, Prp, S, Pc> CommandsList for CmdDraw<L, V, Pv, Pl, Prp, S, Pc>
where L: CommandsList, Pl: PipelineLayoutRef, S: TrackedDescriptorSetsCollection
unsafe impl<Cb, V, Pv, Pl, Prp, S, Pc, O, O1, O2, O3, O4, O5> AddCommand<CmdDraw<V, Pv, Pl, Prp, S, Pc>> for Cb
where Cb: AddCommand<CmdBindVertexBuffers<V>, Out = O1>,
O1: AddCommand<CmdPushConstants<Pc, Arc<GraphicsPipeline<Pv, Pl, Prp>>>, Out = O2>,
O2: AddCommand<CmdBindDescriptorSets<S, Arc<GraphicsPipeline<Pv, Pl, Prp>>>, Out = O3>,
O3: AddCommand<CmdSetState, Out = O4>,
O4: AddCommand<CmdBindPipeline<Arc<GraphicsPipeline<Pv, Pl, Prp>>>, Out = O5>,
O5: AddCommand<CmdDrawRaw, Out = O>
{
#[inline]
fn append<'a>(&'a self, builder: &mut CommandsListSink<'a>) {
self.previous.append(builder);
type Out = O;
builder.add_command(Box::new(move |raw: &mut RawCommandBufferPrototype| {
unsafe {
let vk = raw.device.pointers();
let cmd = raw.command_buffer.clone().take().unwrap();
vk.CmdDraw(cmd, self.vertex_count, self.instance_count, self.first_vertex,
self.first_instance);
}
}));
#[inline]
fn add(self, command: CmdDraw<V, Pv, Pl, Prp, S, Pc>) -> O {
self.add(command.vertex_buffers)
.add(command.push_constants)
.add(command.descriptor_sets)
.add(command.set_state)
.add(command.bind_pipeline)
.add(command.draw_raw)
}
}

View File

@ -0,0 +1,55 @@
// Copyright (c) 2016 The vulkano developers
// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT
// license <LICENSE-MIT or http://opensource.org/licenses/MIT>,
// at your option. All files in the project carrying such
// notice may not be copied, modified, or distributed except
// according to those terms.
use command_buffer::cb::AddCommand;
use command_buffer::cb::UnsafeCommandBufferBuilder;
use command_buffer::pool::CommandPool;
use VulkanObject;
use VulkanPointers;
pub struct CmdDrawIndexedRaw {
index_count: u32,
instance_count: u32,
first_vertex: u32,
vertex_offset: i32,
first_instance: u32,
}
impl CmdDrawIndexedRaw {
#[inline]
pub unsafe fn new(index_count: u32, instance_count: u32, first_vertex: u32,
vertex_offset: i32, first_instance: u32) -> CmdDrawIndexedRaw
{
CmdDrawIndexedRaw {
index_count: index_count,
instance_count: instance_count,
first_vertex: first_vertex,
vertex_offset: vertex_offset,
first_instance: first_instance,
}
}
}
unsafe impl<'a, P> AddCommand<&'a CmdDrawIndexedRaw> for UnsafeCommandBufferBuilder<P>
where P: CommandPool
{
type Out = UnsafeCommandBufferBuilder<P>;
#[inline]
fn add(self, command: &'a CmdDrawIndexedRaw) -> Self::Out {
unsafe {
let vk = self.device().pointers();
let cmd = self.internal_object();
vk.CmdDrawIndexed(cmd, command.index_count, command.instance_count,
command.first_vertex, command.vertex_offset, command.first_instance);
}
self
}
}

View File

@ -0,0 +1,59 @@
// Copyright (c) 2016 The vulkano developers
// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT
// license <LICENSE-MIT or http://opensource.org/licenses/MIT>,
// at your option. All files in the project carrying such
// notice may not be copied, modified, or distributed except
// according to those terms.
use buffer::Buffer;
use command_buffer::cb::AddCommand;
use command_buffer::cb::UnsafeCommandBufferBuilder;
use command_buffer::pool::CommandPool;
use VulkanObject;
use VulkanPointers;
use vk;
pub struct CmdDrawIndirectRaw<B> {
buffer: B,
offset: vk::DeviceSize,
draw_count: u32,
stride: u32,
}
impl<B> CmdDrawIndirectRaw<B> where B: Buffer {
#[inline]
pub unsafe fn new(buffer: B, offset: usize, draw_count: u32) -> CmdDrawIndirectRaw<B> {
let real_offset = offset + buffer.inner().offset;
assert_eq!(real_offset % 4, 0);
// FIXME: all checks are missing here
CmdDrawIndirectRaw {
buffer: buffer,
offset: real_offset as vk::DeviceSize,
draw_count: draw_count,
stride: 16, // TODO:
}
}
}
unsafe impl<'a, B, P> AddCommand<&'a CmdDrawIndirectRaw<B>> for UnsafeCommandBufferBuilder<P>
where B: Buffer,
P: CommandPool
{
type Out = UnsafeCommandBufferBuilder<P>;
#[inline]
fn add(self, command: &'a CmdDrawIndirectRaw<B>) -> Self::Out {
unsafe {
let vk = self.device().pointers();
let cmd = self.internal_object();
vk.CmdDrawIndirect(cmd, command.buffer.inner().buffer.internal_object(),
command.offset, command.draw_count, command.stride);
}
self
}
}

View File

@ -0,0 +1,53 @@
// Copyright (c) 2016 The vulkano developers
// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT
// license <LICENSE-MIT or http://opensource.org/licenses/MIT>,
// at your option. All files in the project carrying such
// notice may not be copied, modified, or distributed except
// according to those terms.
use command_buffer::cb::AddCommand;
use command_buffer::cb::UnsafeCommandBufferBuilder;
use command_buffer::pool::CommandPool;
use VulkanObject;
use VulkanPointers;
pub struct CmdDrawRaw {
vertex_count: u32,
instance_count: u32,
first_vertex: u32,
first_instance: u32,
}
impl CmdDrawRaw {
#[inline]
pub unsafe fn new(vertex_count: u32, instance_count: u32, first_vertex: u32,
first_instance: u32) -> CmdDrawRaw
{
CmdDrawRaw {
vertex_count: vertex_count,
instance_count: instance_count,
first_vertex: first_vertex,
first_instance: first_instance,
}
}
}
unsafe impl<'a, P> AddCommand<&'a CmdDrawRaw> for UnsafeCommandBufferBuilder<P>
where P: CommandPool
{
type Out = UnsafeCommandBufferBuilder<P>;
#[inline]
fn add(self, command: &'a CmdDrawRaw) -> Self::Out {
unsafe {
let vk = self.device().pointers();
let cmd = self.internal_object();
vk.CmdDraw(cmd, command.vertex_count, command.instance_count, command.first_vertex,
command.first_instance);
}
self
}
}

View File

@ -1,25 +0,0 @@
// Copyright (c) 2016 The vulkano developers
// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT
// license <LICENSE-MIT or http://opensource.org/licenses/MIT>,
// at your option. All files in the project carrying such
// notice may not be copied, modified, or distributed except
// according to those terms.
use command_buffer::CommandsList;
use command_buffer::CommandsListSink;
#[inline]
pub fn empty() -> EmptyCommandsList {
EmptyCommandsList
}
#[derive(Debug, Copy, Clone)]
pub struct EmptyCommandsList;
unsafe impl CommandsList for EmptyCommandsList {
#[inline]
fn append<'a>(&'a self, builder: &mut CommandsListSink<'a>) {
}
}

View File

@ -7,70 +7,38 @@
// notice may not be copied, modified, or distributed except
// according to those terms.
use std::error;
use std::fmt;
use command_buffer::RawCommandBufferPrototype;
use command_buffer::CommandsList;
use command_buffer::CommandsListSink;
use command_buffer::cb::AddCommand;
use command_buffer::cb::UnsafeCommandBufferBuilder;
use command_buffer::pool::CommandPool;
use VulkanObject;
use VulkanPointers;
/// Wraps around a commands list and adds to the end of it a command that ends the current render
/// pass.
#[derive(Debug, Copy, Clone)]
pub struct CmdEndRenderPass<L> where L: CommandsList {
// Parent commands list.
previous: L,
}
pub struct CmdEndRenderPass;
impl<L> CmdEndRenderPass<L> where L: CommandsList {
impl CmdEndRenderPass {
/// See the documentation of the `end_render_pass` method.
#[inline]
pub fn new(previous: L) -> Result<CmdEndRenderPass<L>, CmdEndRenderPassError> {
// TODO: check that we're in a render pass and that the next subpass is correct
Ok(CmdEndRenderPass {
previous: previous,
})
pub fn new() -> CmdEndRenderPass {
CmdEndRenderPass
}
}
unsafe impl<L> CommandsList for CmdEndRenderPass<L> where L: CommandsList {
unsafe impl<'a, P> AddCommand<&'a CmdEndRenderPass> for UnsafeCommandBufferBuilder<P>
where P: CommandPool
{
type Out = UnsafeCommandBufferBuilder<P>;
#[inline]
fn append<'a>(&'a self, builder: &mut CommandsListSink<'a>) {
self.previous.append(builder);
builder.add_command(Box::new(move |raw: &mut RawCommandBufferPrototype| {
unsafe {
let vk = raw.device.pointers();
let cmd = raw.command_buffer.clone().take().unwrap();
vk.CmdEndRenderPass(cmd);
}
}));
}
}
/// Error that can happen when creating a `CmdEndRenderPass`.
#[derive(Debug, Copy, Clone)]
pub enum CmdEndRenderPassError {
/// It's not possible to end the render pass before you went over all the subpasses.
SubpassesRemaining,
}
impl error::Error for CmdEndRenderPassError {
#[inline]
fn description(&self) -> &str {
match *self {
CmdEndRenderPassError::SubpassesRemaining => {
"it's not possible to end the render pass before you went over all the subpasses"
},
fn add(self, command: &'a CmdEndRenderPass) -> Self::Out {
unsafe {
let vk = self.device().pointers();
let cmd = self.internal_object();
vk.CmdEndRenderPass(cmd);
}
}
}
impl fmt::Display for CmdEndRenderPassError {
#[inline]
fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> {
write!(fmt, "{}", error::Error::description(self))
self
}
}

View File

@ -7,127 +7,55 @@
// notice may not be copied, modified, or distributed except
// according to those terms.
use std::sync::Arc;
use smallvec::SmallVec;
use buffer::Buffer;
use command_buffer::DynamicState;
use command_buffer::RawCommandBufferPrototype;
use command_buffer::SecondaryCommandBuffer;
use command_buffer::cmd::CommandsList;
use command_buffer::cmd::CommandsListSink;
use command_buffer::cmd::CommandsListSinkCaller;
use device::Device;
use image::Layout;
use image::Image;
use sync::AccessFlagBits;
use sync::PipelineStages;
use command_buffer::cb::AddCommand;
use command_buffer::cb::UnsafeCommandBufferBuilder;
use command_buffer::pool::CommandPool;
use VulkanObject;
use VulkanPointers;
use vk;
/// Wraps around a commands list and adds a command at the end of it that executes a secondary
/// command buffer.
pub struct CmdExecuteCommands<Cb, L> where Cb: SecondaryCommandBuffer, L: CommandsList {
// Parent commands list.
previous: L,
pub struct CmdExecuteCommands<Cb> {
// Raw list of command buffers to execute.
raw_list: SmallVec<[vk::CommandBuffer; 4]>,
// Command buffer to execute.
command_buffer: Cb,
}
impl<Cb, L> CmdExecuteCommands<Cb, L>
where Cb: SecondaryCommandBuffer, L: CommandsList
{
impl<Cb> CmdExecuteCommands<Cb> {
/// See the documentation of the `execute_commands` method.
#[inline]
pub fn new(previous: L, command_buffer: Cb) -> CmdExecuteCommands<Cb, L> {
// FIXME: most checks are missing
let raw_list = {
pub fn new(command_buffer: Cb) -> CmdExecuteCommands<Cb> {
unimplemented!() // TODO:
/*let raw_list = {
let mut l = SmallVec::new();
l.push(command_buffer.inner());
l
};
CmdExecuteCommands {
previous: previous,
raw_list: raw_list,
command_buffer: command_buffer,
}
}*/
}
}
// TODO: specialize the trait so that multiple calls to `execute` are grouped together?
unsafe impl<Cb, L> CommandsList for CmdExecuteCommands<Cb, L>
where Cb: SecondaryCommandBuffer, L: CommandsList
unsafe impl<'a, P, Cb> AddCommand<&'a CmdExecuteCommands<Cb>> for UnsafeCommandBufferBuilder<P>
where P: CommandPool
{
type Out = UnsafeCommandBufferBuilder<P>;
#[inline]
fn append<'a>(&'a self, builder: &mut CommandsListSink<'a>) {
self.previous.append(builder);
fn add(self, command: &'a CmdExecuteCommands<Cb>) -> Self::Out {
unsafe {
let vk = self.device().pointers();
let cmd = self.internal_object();
vk.CmdExecuteCommands(cmd, command.raw_list.len() as u32, command.raw_list.as_ptr());
}
assert_eq!(self.command_buffer.device().internal_object(),
builder.device().internal_object());
self.command_buffer.append(&mut FilterOutCommands(builder, self.command_buffer.device()));
builder.add_command(Box::new(move |raw: &mut RawCommandBufferPrototype| {
unsafe {
let vk = raw.device.pointers();
let cmd = raw.command_buffer.clone().take().unwrap();
vk.CmdExecuteCommands(cmd, self.raw_list.len() as u32, self.raw_list.as_ptr());
// vkCmdExecuteCommands resets the state of the command buffer.
raw.current_state = DynamicState::none();
raw.bound_graphics_pipeline = 0;
raw.bound_compute_pipeline = 0;
raw.bound_index_buffer = (0, 0, 0);
}
}));
}
}
struct FilterOutCommands<'a, 'c: 'a>(&'a mut CommandsListSink<'c>, &'a Arc<Device>);
impl<'a, 'c: 'a> CommandsListSink<'c> for FilterOutCommands<'a, 'c> {
#[inline]
fn device(&self) -> &Arc<Device> {
self.1
}
#[inline]
fn add_command(&mut self, _: Box<CommandsListSinkCaller<'c> + 'c>) {
}
// FIXME: this is wrong since the underlying impl will try to perform transitions that are
// performed by the secondary command buffer
#[inline]
fn add_buffer_transition(&mut self, buffer: &'c Buffer, offset: usize, size: usize,
write: bool, stages: PipelineStages, access: AccessFlagBits)
{
self.0.add_buffer_transition(buffer, offset, size, write, stages, access)
}
// FIXME: this is wrong since the underlying impl will try to perform transitions that are
// performed by the secondary command buffer
#[inline]
fn add_image_transition(&mut self, image: &'c Image, first_layer: u32, num_layers: u32,
first_mipmap: u32, num_mipmaps: u32, write: bool, layout: Layout,
stages: PipelineStages, access: AccessFlagBits)
{
self.0.add_image_transition(image, first_layer, num_layers, first_mipmap, num_mipmaps,
write, layout, stages, access)
}
#[inline]
fn add_image_transition_notification(&mut self, image: &'c Image, first_layer: u32,
num_layers: u32, first_mipmap: u32, num_mipmaps: u32,
layout: Layout, stages: PipelineStages,
access: AccessFlagBits)
{
self.0.add_image_transition_notification(image, first_layer, num_layers, first_mipmap,
num_mipmaps, layout, stages, access)
self
}
}

View File

@ -12,22 +12,14 @@ use std::fmt;
use buffer::Buffer;
use buffer::BufferInner;
use command_buffer::RawCommandBufferPrototype;
use command_buffer::cmd::CommandsListPossibleOutsideRenderPass;
use command_buffer::CommandsList;
use command_buffer::CommandsListSink;
use sync::AccessFlagBits;
use sync::PipelineStages;
use command_buffer::cb::AddCommand;
use command_buffer::cb::UnsafeCommandBufferBuilder;
use command_buffer::pool::CommandPool;
use VulkanObject;
use VulkanPointers;
use vk;
/// Wraps around a commands list and adds an update buffer command at the end of it.
pub struct CmdFillBuffer<L, B>
where B: Buffer, L: CommandsList
{
// Parent commands list.
previous: L,
pub struct CmdFillBuffer<B> {
// The buffer to update.
buffer: B,
// Raw buffer handle.
@ -40,16 +32,12 @@ pub struct CmdFillBuffer<L, B>
data: u32,
}
impl<L, B> CmdFillBuffer<L, B>
where B: Buffer,
L: CommandsList + CommandsListPossibleOutsideRenderPass
impl<B> CmdFillBuffer<B>
where B: Buffer
{
/// Builds a command that writes data to a buffer.
pub fn new(previous: L, buffer: B, data: u32)
-> Result<CmdFillBuffer<L, B>, CmdFillBufferError>
{
assert!(previous.is_outside_render_pass()); // TODO: error
// TODO: not safe because of signalling NaNs
pub fn new(buffer: B, data: u32) -> Result<CmdFillBuffer<B>, CmdFillBufferError> {
let size = buffer.size();
let (buffer_handle, offset) = {
@ -64,7 +52,6 @@ impl<L, B> CmdFillBuffer<L, B>
};
Ok(CmdFillBuffer {
previous: previous,
buffer: buffer,
buffer_handle: buffer_handle,
offset: offset as vk::DeviceSize,
@ -74,31 +61,21 @@ impl<L, B> CmdFillBuffer<L, B>
}
}
unsafe impl<L, B> CommandsList for CmdFillBuffer<L, B>
where B: Buffer,
L: CommandsList,
unsafe impl<'a, P, B> AddCommand<&'a CmdFillBuffer<B>> for UnsafeCommandBufferBuilder<P>
where P: CommandPool
{
type Out = UnsafeCommandBufferBuilder<P>;
#[inline]
fn append<'a>(&'a self, builder: &mut CommandsListSink<'a>) {
self.previous.append(builder);
assert_eq!(self.buffer.inner().buffer.device().internal_object(),
builder.device().internal_object());
{
let stages = PipelineStages { transfer: true, .. PipelineStages::none() };
let access = AccessFlagBits { transfer_write: true, .. AccessFlagBits::none() };
builder.add_buffer_transition(&self.buffer, 0, self.buffer.size(), true,
stages, access);
fn add(self, command: &'a CmdFillBuffer<B>) -> Self::Out {
unsafe {
let vk = self.device().pointers();
let cmd = self.internal_object();
vk.CmdFillBuffer(cmd, command.buffer_handle, command.offset,
command.size, command.data);
}
builder.add_command(Box::new(move |raw: &mut RawCommandBufferPrototype| {
unsafe {
let vk = raw.device.pointers();
let cmd = raw.command_buffer.clone().take().unwrap();
vk.CmdFillBuffer(cmd, self.buffer_handle, self.offset, self.size, self.data);
}
}));
self
}
}

View File

@ -1,38 +0,0 @@
// Copyright (c) 2016 The vulkano developers
// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT
// license <LICENSE-MIT or http://opensource.org/licenses/MIT>,
// at your option. All files in the project carrying such
// notice may not be copied, modified, or distributed except
// according to those terms.
use command_buffer::CommandsList;
use command_buffer::CommandsListSink;
/// Wraps around two commands lists and joins them together in one list.
#[derive(Debug, Copy, Clone)]
pub struct CommandsListJoin<A, B> where A: CommandsList, B: CommandsList {
// First commands list.
first: A,
// Second commands list.
second: B,
}
impl<A, B> CommandsListJoin<A, B> where A: CommandsList, B: CommandsList {
#[inline]
pub fn new(first: A, second: B) -> CommandsListJoin<A, B> {
CommandsListJoin {
first: first,
second: second,
}
}
}
unsafe impl<A, B> CommandsList for CommandsListJoin<A, B> where A: CommandsList, B: CommandsList {
#[inline]
fn append<'a>(&'a self, builder: &mut CommandsListSink<'a>) {
self.first.append(builder);
self.second.append(builder);
}
}

View File

@ -7,54 +7,29 @@
// notice may not be copied, modified, or distributed except
// according to those terms.
use std::sync::Arc;
use buffer::Buffer;
use buffer::TypedBuffer;
use command_buffer::cb::AutobarriersCommandBuffer;
use command_buffer::cb::CommandsListBuildPrimary;
use command_buffer::cb::CommandsListBuildPrimaryPool;
use command_buffer::pool::StandardCommandPool;
use command_buffer::DynamicState;
use command_buffer::RawCommandBufferPrototype;
use command_buffer::SecondaryCommandBuffer;
use descriptor::PipelineLayoutRef;
use descriptor::descriptor_set::collection::TrackedDescriptorSetsCollection;
use device::Device;
use framebuffer::FramebufferRef;
use framebuffer::RenderPass;
use framebuffer::RenderPassRef;
use framebuffer::RenderPassClearValues;
use image::Layout;
use image::Image;
use instance::QueueFamily;
use pipeline::ComputePipeline;
use pipeline::GraphicsPipeline;
use pipeline::input_assembly::Index;
use pipeline::vertex::Source;
use sync::AccessFlagBits;
use sync::PipelineStages;
use OomError;
//! All the commands used in the internals of vulkano.
pub use self::begin_render_pass::CmdBeginRenderPass;
pub use self::bind_index_buffer::CmdBindIndexBuffer;
pub use self::bind_descriptor_sets::{CmdBindDescriptorSets, CmdBindDescriptorSetsError};
pub use self::bind_pipeline::CmdBindPipeline;
pub use self::bind_vertex_buffers::CmdBindVertexBuffers;
pub use self::blit_image_unsynced::{BlitRegion, BlitRegionAspect};
pub use self::blit_image_unsynced::{CmdBlitImageUnsynced, CmdBlitImageUnsyncedError};
//pub use self::blit_image_unsynced::{BlitRegion, BlitRegionAspect};
//pub use self::blit_image_unsynced::{CmdBlitImageUnsynced, CmdBlitImageUnsyncedError};
pub use self::clear_attachments::CmdClearAttachments;
pub use self::copy_buffer::{CmdCopyBuffer, CmdCopyBufferError};
pub use self::dispatch::{CmdDispatch, CmdDispatchError};
pub use self::dispatch_indirect::{CmdDispatchIndirect, CmdDispatchIndirectError};
//pub use self::dispatch::{CmdDispatch, CmdDispatchError};
//pub use self::dispatch_indirect::{CmdDispatchIndirect, CmdDispatchIndirectError};
pub use self::draw::CmdDraw;
pub use self::draw_indexed::CmdDrawIndexed;
pub use self::empty::{empty, EmptyCommandsList};
pub use self::end_render_pass::{CmdEndRenderPass, CmdEndRenderPassError};
//pub use self::draw_indexed::CmdDrawIndexed;
pub use self::draw_indexed_raw::CmdDrawIndexedRaw;
pub use self::draw_indirect_raw::CmdDrawIndirectRaw;
pub use self::draw_raw::CmdDrawRaw;
pub use self::end_render_pass::CmdEndRenderPass;
pub use self::execute::CmdExecuteCommands;
pub use self::fill_buffer::{CmdFillBuffer, CmdFillBufferError};
pub use self::join::CommandsListJoin;
pub use self::next_subpass::{CmdNextSubpass, CmdNextSubpassError};
pub use self::next_subpass::CmdNextSubpass;
pub use self::pipeline_barrier::CmdPipelineBarrier;
pub use self::push_constants::{CmdPushConstants, CmdPushConstantsError};
pub use self::set_state::{CmdSetState};
pub use self::update_buffer::{CmdUpdateBuffer, CmdUpdateBufferError};
@ -64,327 +39,21 @@ mod bind_descriptor_sets;
mod bind_index_buffer;
mod bind_pipeline;
mod bind_vertex_buffers;
mod blit_image_unsynced;
//mod blit_image_unsynced;
mod clear_attachments;
mod copy_buffer;
mod dispatch;
mod dispatch_indirect;
//mod dispatch;
//mod dispatch_indirect;
mod draw;
mod draw_indexed;
mod empty;
//mod draw_indexed;
mod draw_indexed_raw;
mod draw_indirect_raw;
mod draw_raw;
mod end_render_pass;
mod execute;
mod fill_buffer;
mod join;
mod next_subpass;
mod pipeline_barrier;
mod push_constants;
mod set_state;
mod update_buffer;
/// A list of commands that can be turned into a command buffer.
///
/// This is just a naked list of commands. It holds buffers, images, etc. but the list of commands
/// itself is not a Vulkan object.
pub unsafe trait CommandsList {
/// Adds a command that writes the content of a buffer.
///
/// After this command is executed, the content of `buffer` will become `data`. If `data` is
/// smaller than `buffer`, then only the beginning of `buffer` will be modified and the rest
/// will be left untouched. If `buffer` is smaller than `data`, `buffer` will be entirely
/// written and no error is generated.
///
/// This command is limited to 64kB (65536 bytes) of data and should only be used for small
/// amounts of data. For large amounts of data, you are encouraged to write the data to a
/// buffer and use `copy_buffer` instead.
#[inline]
fn update_buffer<'a, B, D: ?Sized>(self, buffer: B, data: &'a D)
-> Result<CmdUpdateBuffer<'a, Self, B, D>, CmdUpdateBufferError>
where Self: Sized + CommandsListPossibleOutsideRenderPass, B: Buffer, D: Copy + 'static
{
CmdUpdateBuffer::new(self, buffer, data)
}
/// Adds a command that copies the content of a buffer to another.
///
/// If `source` is smaller than `destination`, only the beginning of `destination` will be
/// modified.
#[inline]
fn copy_buffer<S, D>(self, source: S, destination: D)
-> Result<CmdCopyBuffer<Self, S, D>, CmdCopyBufferError>
where Self: Sized + CommandsListPossibleOutsideRenderPass,
S: Buffer, D: Buffer
{
CmdCopyBuffer::new(self, source, destination)
}
/// Adds a command that writes the content of a buffer.
///
/// This function is similar to the `memset` function in C. The `data` parameter is a number
/// that will be repeatidely written through the entire buffer.
///
/// > **Note**: This function is technically safe because buffers can only contain integers or
/// > floating point numbers, which are always valid whatever their memory representation is.
/// > But unless your buffer actually contains only 32-bits integers, you are encouraged to use
/// > this function only for zeroing the content of a buffer by passing `0` for the data.
#[inline]
fn fill_buffer<B>(self, buffer: B, data: u32)
-> Result<CmdFillBuffer<Self, B>, CmdFillBufferError>
where Self: Sized + CommandsListPossibleOutsideRenderPass, B: Buffer
{
CmdFillBuffer::new(self, buffer, data)
}
/// Adds a command that executes a secondary command buffer.
///
/// When you create a command buffer, you have the possibility to create either a primary
/// command buffer or a secondary command buffer. Secondary command buffers can't be executed
/// directly, but can be executed from a primary command buffer.
///
/// A secondary command buffer can't execute another secondary command buffer. The only way
/// you can use `execute` is to make a primary command buffer call a secondary command buffer.
#[inline]
fn execute_commands<Cb>(self, command_buffer: Cb) -> CmdExecuteCommands<Cb, Self>
where Self: Sized, Cb: SecondaryCommandBuffer
{
CmdExecuteCommands::new(self, command_buffer)
}
/// Adds a command that executes a compute shader.
///
/// The `dimensions` are the number of working groups to start. The GPU will execute the
/// compute shader `dimensions[0] * dimensions[1] * dimensions[2]` times.
///
/// The `pipeline` is the compute pipeline that will be executed, and the sets and push
/// constants will be accessible to all the invocations.
#[inline]
fn dispatch<Pl, S, Pc>(self, pipeline: Arc<ComputePipeline<Pl>>, sets: S,
dimensions: [u32; 3], push_constants: Pc)
-> Result<CmdDispatch<Self, Pl, S, Pc>, CmdDispatchError>
where Self: Sized + CommandsList, Pl: PipelineLayoutRef,
S: TrackedDescriptorSetsCollection
{
CmdDispatch::new(self, pipeline, sets, dimensions, push_constants)
}
/// Adds a command that starts a render pass.
///
/// If `secondary` is true, then you will only be able to add secondary command buffers while
/// you're inside the first subpass on the render pass. If `secondary` is false, you will only
/// be able to add inline draw commands and not secondary command buffers.
///
/// You must call this before you can add draw commands.
#[inline]
fn begin_render_pass<F, C>(self, framebuffer: F, secondary: bool, clear_values: C)
-> CmdBeginRenderPass<Self, Arc<RenderPass>, F>
where Self: Sized, F: FramebufferRef,
<<F as FramebufferRef>::RenderPass as RenderPassRef>::Desc: RenderPassClearValues<C>
{
CmdBeginRenderPass::new(self, framebuffer, secondary, clear_values)
}
/// Adds a command that jumps to the next subpass of the current render pass.
#[inline]
fn next_subpass(self, secondary: bool) -> Result<CmdNextSubpass<Self>, CmdNextSubpassError>
where Self: Sized
{
CmdNextSubpass::new(self, secondary)
}
/// Adds a command that ends the current render pass.
///
/// This must be called after you went through all the subpasses and before you can build
/// the command buffer or add further commands.
#[inline]
fn end_render_pass(self) -> Result<CmdEndRenderPass<Self>, CmdEndRenderPassError>
where Self: Sized
{
CmdEndRenderPass::new(self)
}
/// Adds a command that draws.
///
/// Can only be used from inside a render pass.
#[inline]
fn draw<Pv, Pl, Prp, S, Pc, V>(self, pipeline: Arc<GraphicsPipeline<Pv, Pl, Prp>>,
dynamic: DynamicState, vertices: V, sets: S,
push_constants: Pc)
-> CmdDraw<Self, V, Pv, Pl, Prp, S, Pc>
where Self: Sized + CommandsList,
Pl: PipelineLayoutRef,
S: TrackedDescriptorSetsCollection,
Pv: Source<V>
{
CmdDraw::new(self, pipeline, dynamic, vertices, sets, push_constants)
}
/// Adds a command that draws with an index buffer.
///
/// Can only be used from inside a render pass.
#[inline]
fn draw_indexed<Pv, Pl, Prp, S, Pc, V, Ib, I>(self, pipeline: Arc<GraphicsPipeline<Pv, Pl, Prp>>,
dynamic: DynamicState, vertices: V, indices: Ib,
sets: S, push_constants: Pc)
-> CmdDrawIndexed<Self, V, Ib, Pv, Pl, Prp, S, Pc>
where Self: Sized + CommandsList,
Pl: PipelineLayoutRef,
S: TrackedDescriptorSetsCollection,
Pv: Source<V>,
Ib: Buffer + TypedBuffer<Content = [I]>,
I: Index + 'static
{
CmdDrawIndexed::new(self, pipeline, dynamic, vertices, indices, sets, push_constants)
}
/// Appends another list at the end of this one.
#[inline]
fn join<L>(self, other: L) -> CommandsListJoin<Self, L> where Self: Sized, L: CommandsList {
CommandsListJoin::new(self, other)
}
/// Builds the list as a primary command buffer.
#[inline]
fn build_primary(self, device: &Arc<Device>, queue_family: QueueFamily)
-> Result<AutobarriersCommandBuffer<Self, Arc<StandardCommandPool>>, OomError>
where Self: Sized
{
self.build_primary_custom(device, queue_family)
}
/// Builds the list as a primary command buffer.
#[inline]
fn build_primary_custom<C>(self, device: &Arc<Device>, queue_family: QueueFamily)
-> Result<C, OomError>
where C: CommandsListBuildPrimary<Self>, Self: Sized
{
CommandsListBuildPrimary::build_primary(device, queue_family, self)
}
/// Builds the list as a primary command buffer and with the given pool.
#[inline]
fn build_primary_with_pool<C, P>(self, pool: P) -> Result<C, OomError>
where C: CommandsListBuildPrimaryPool<Self, P>, Self: Sized
{
CommandsListBuildPrimaryPool::build_primary_with_pool(pool, self)
}
/// Appends this list of commands at the end of a command buffer in construction.
///
/// The `CommandsListSink` typically represents a command buffer being constructed.
/// The `append` method must call the methods of that `CommandsListSink` in order to add
/// elements at the end of the command buffer being constructed. The `CommandsListSink` can
/// also typically be a filter around another `CommandsListSink`.
///
/// The lifetime of the `CommandsListSink` is the same as the lifetime of `&self`. This means
/// that the commands you pass to the sink can borrow `self`.
///
/// # Safety
///
/// It is important for safety that `append` always returns the same commands.
///
/// > **Note**: For example, in the case secondary command buffers this function is called once
/// > when the secondary command buffer is created, and once again every time the secondary
/// > command buffer is used. All the calls must match in order for the behavior to be safe.
fn append<'a>(&'a self, builder: &mut CommandsListSink<'a>);
}
unsafe impl CommandsList for Box<CommandsList> {
#[inline]
fn append<'a>(&'a self, builder: &mut CommandsListSink<'a>) {
(**self).append(builder)
}
}
/// Output of the "append" method. The lifetime corresponds to a borrow of the commands list.
///
/// A `CommandsListSink` typically represents a command buffer being constructed. The various
/// methods add elements at the end of that command buffer.
pub trait CommandsListSink<'a> {
/// Returns the device of the sink. Used by the commands in the commands list to make sure that
/// their buffer, images, etc. belong to the same device as the sink.
fn device(&self) -> &Arc<Device>;
/// Requests that a command must be executed.
///
/// Note that the lifetime means that we hold a reference to the content of
/// the commands list in that closure.
fn add_command(&mut self, Box<CommandsListSinkCaller<'a> + 'a>);
/// Requests that a buffer must be transitionned to a given state.
///
/// The parameters are the buffer, and its offset and size, plus a `write` boolean that is
/// `true` if the buffer must be transitionned to a writable state or `false` if it must be
/// transitionned to a readable state.
fn add_buffer_transition(&mut self, buffer: &'a Buffer, offset: usize, size: usize,
write: bool, stages: PipelineStages, access: AccessFlagBits);
/// Requests that an image must be transitionned to a given state.
///
/// If necessary, you must transition the image to the `layout`.
fn add_image_transition(&mut self, image: &'a Image, first_layer: u32, num_layers: u32,
first_mipmap: u32, num_mipmaps: u32, write: bool, layout: Layout,
stages: PipelineStages, access: AccessFlagBits);
/// Notifies the sink that an image has been transitionned by one of the previous commands
/// added with `add_command`.
///
/// The sink doesn't need to perform any operation when this method is called, but should
/// modify its internal state in order to keep track of the state of that image.
fn add_image_transition_notification(&mut self, image: &'a Image, first_layer: u32,
num_layers: u32, first_mipmap: u32, num_mipmaps: u32,
layout: Layout, stages: PipelineStages,
access: AccessFlagBits);
}
/// This trait is equivalent to `FnOnce(&mut RawCommandBufferPrototype<'a>)`. It is necessary
/// because Rust doesn't permit you to call a `Box<FnOnce>`.
///
/// > **Note**: This trait will most likely be removed if Rust fixes that problem with
/// > `Box<FnOnce>`.
pub trait CommandsListSinkCaller<'a> {
/// Consumes a `Box<CommandsListSinkCaller>` and call it on the parameter.
fn call(self: Box<Self>, &mut RawCommandBufferPrototype<'a>);
}
impl<'a, T> CommandsListSinkCaller<'a> for T
where T: FnOnce(&mut RawCommandBufferPrototype<'a>) -> () + 'a
{
fn call(self: Box<Self>, proto: &mut RawCommandBufferPrototype<'a>) {
self(proto);
}
}
/// Extension trait for both `CommandsList` and `CommandsListOutput` that indicates that we're
/// possibly outside a render pass.
///
/// In other words, if this trait is *not* implemented then we're guaranteed *not* to be outside
/// of a render pass. If it is implemented, then we maybe are but that's not sure.
pub unsafe trait CommandsListPossibleOutsideRenderPass {
/// Returns `true` if we're outside a render pass.
fn is_outside_render_pass(&self) -> bool;
}
/// Extension trait for both `CommandsList` and `CommandsListOutput` that indicates that we're
/// possibly inside a render pass.
///
/// In other words, if this trait is *not* implemented then we're guaranteed *not* to be inside
/// a render pass. If it is implemented, then we maybe are but that's not sure.
// TODO: make all return values optional, since we're possibly not in a render pass
pub unsafe trait CommandsListPossibleInsideRenderPass {
type RenderPassRef: RenderPassRef;
/// Returns the number of the subpass we're in. The value is 0-indexed, so immediately after
/// calling `begin_render_pass` the value will be `0`.
///
/// The value should always be strictly inferior to the number of subpasses in the render pass.
fn current_subpass_num(&self) -> u32;
/// If true, only secondary command buffers can be added inside the subpass. If false, only
/// inline draw commands can be added.
fn secondary_subpass(&self) -> bool;
/// Returns the description of the render pass we're in.
// TODO: return a trait object instead?
fn render_pass(&self) -> &Self::RenderPassRef;
//fn current_subpass(&self) -> Subpass<&Self::RenderPassRef>;
}

View File

@ -7,76 +7,45 @@
// notice may not be copied, modified, or distributed except
// according to those terms.
use std::error;
use std::fmt;
use command_buffer::RawCommandBufferPrototype;
use command_buffer::CommandsList;
use command_buffer::CommandsListSink;
use command_buffer::cb::AddCommand;
use command_buffer::cb::UnsafeCommandBufferBuilder;
use command_buffer::pool::CommandPool;
use VulkanObject;
use VulkanPointers;
use vk;
/// Wraps around a commands list and adds to the end of it a command that goes to the next subpass
/// of the current render pass.
#[derive(Debug, Copy, Clone)]
pub struct CmdNextSubpass<L> where L: CommandsList {
// Parent commands list.
previous: L,
pub struct CmdNextSubpass{
// The parameter for vkCmdNextSubpass.
contents: vk::SubpassContents,
}
impl<L> CmdNextSubpass<L> where L: CommandsList {
impl CmdNextSubpass {
/// See the documentation of the `next_subpass` method.
#[inline]
pub fn new(previous: L, secondary: bool) -> Result<CmdNextSubpass<L>, CmdNextSubpassError> {
// TODO: check that we're in a render pass and that the next subpass is correct
Ok(CmdNextSubpass {
previous: previous,
pub fn new(secondary: bool) -> CmdNextSubpass {
CmdNextSubpass {
contents: if secondary { vk::SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS }
else { vk::SUBPASS_CONTENTS_INLINE },
})
}
}
unsafe impl<L> CommandsList for CmdNextSubpass<L> where L: CommandsList {
#[inline]
fn append<'a>(&'a self, builder: &mut CommandsListSink<'a>) {
self.previous.append(builder);
builder.add_command(Box::new(move |raw: &mut RawCommandBufferPrototype| {
unsafe {
let vk = raw.device.pointers();
let cmd = raw.command_buffer.clone().take().unwrap();
vk.CmdNextSubpass(cmd, self.contents);
}
}));
}
}
/// Error that can happen when creating a `CmdNextSubpass`.
#[derive(Debug, Copy, Clone)]
pub enum CmdNextSubpassError {
/// It's not possible to go to the next subpass if none are remaining.
NoSubpassRemaining,
}
impl error::Error for CmdNextSubpassError {
#[inline]
fn description(&self) -> &str {
match *self {
CmdNextSubpassError::NoSubpassRemaining => {
"it's not possible to go to the next subpass if none are remaining"
},
else { vk::SUBPASS_CONTENTS_INLINE }
}
}
}
impl fmt::Display for CmdNextSubpassError {
unsafe impl<'a, P> AddCommand<&'a CmdNextSubpass> for UnsafeCommandBufferBuilder<P>
where P: CommandPool
{
type Out = UnsafeCommandBufferBuilder<P>;
#[inline]
fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> {
write!(fmt, "{}", error::Error::description(self))
fn add(self, command: &'a CmdNextSubpass) -> Self::Out {
unsafe {
let vk = self.device().pointers();
let cmd = self.internal_object();
vk.CmdNextSubpass(cmd, command.contents);
}
self
}
}

View File

@ -1,257 +1,266 @@
// Copyright (c) 2016 The vulkano developers
// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT
// license <LICENSE-MIT or http://opensource.org/licenses/MIT>,
// at your option. All files in the project carrying such
// notice may not be copied, modified, or distributed except
// according to those terms.
use std::marker::PhantomData;
use std::ops::Range;
use std::ptr;
use std::u32;
use smallvec::SmallVec;
use buffer::Buffer;
use buffer::BufferInner;
use command_buffer::CommandsListSink;
use command_buffer::RawCommandBufferPrototype;
use image::Image;
use image::Layout;
use sync::AccessFlagBits;
use sync::PipelineStages;
use VulkanObject;
use VulkanPointers;
use vk;
/// Prototype for a pipeline barrier that's going to be added to a command buffer builder.
///
/// Note: we use a builder-like API here so that users can pass multiple buffers or images of
/// multiple different types. Doing so with a single function would be very tedious in terms of
/// API.
pub struct PipelineBarrierBuilder<'a> {
src_stage_mask: vk::PipelineStageFlags,
dst_stage_mask: vk::PipelineStageFlags,
dependency_flags: vk::DependencyFlags,
memory_barriers: SmallVec<[vk::MemoryBarrier; 2]>,
buffer_barriers: SmallVec<[vk::BufferMemoryBarrier; 8]>,
image_barriers: SmallVec<[vk::ImageMemoryBarrier; 8]>,
marker: PhantomData<&'a ()>,
}
impl<'a> PipelineBarrierBuilder<'a> {
#[inline]
pub fn new() -> PipelineBarrierBuilder<'a> {
PipelineBarrierBuilder {
src_stage_mask: 0,
dst_stage_mask: 0,
dependency_flags: vk::DEPENDENCY_BY_REGION_BIT,
memory_barriers: SmallVec::new(),
buffer_barriers: SmallVec::new(),
image_barriers: SmallVec::new(),
marker: PhantomData,
}
}
/// Returns true if no barrier or execution dependency has been added yet.
#[inline]
pub fn is_empty(&self) -> bool {
self.src_stage_mask == 0 || self.dst_stage_mask == 0
}
/// Adds a pipeline barrier to the command buffer.
///
/// This function itself is not unsafe, but creating a pipeline barrier builder is.
#[inline]
pub fn append_to(self, builder: &mut CommandsListSink<'a>) {
// If barrier is empty, don't do anything.
if self.src_stage_mask == 0 || self.dst_stage_mask == 0 {
debug_assert!(self.src_stage_mask == 0 && self.dst_stage_mask == 0);
debug_assert!(self.memory_barriers.is_empty());
debug_assert!(self.buffer_barriers.is_empty());
debug_assert!(self.image_barriers.is_empty());
return;
}
builder.add_command(Box::new(move |raw: &mut RawCommandBufferPrototype| {
unsafe {
let vk = raw.device.pointers();
let cmd = raw.command_buffer.clone().take().unwrap();
vk.CmdPipelineBarrier(cmd, self.src_stage_mask, self.dst_stage_mask,
self.dependency_flags, self.memory_barriers.len() as u32,
self.memory_barriers.as_ptr(),
self.buffer_barriers.len() as u32,
self.buffer_barriers.as_ptr(),
self.image_barriers.len() as u32,
self.image_barriers.as_ptr());
}
}));
}
/// Merges another pipeline builder into this one.
#[inline]
pub fn merge(&mut self, other: PipelineBarrierBuilder<'a>) {
self.src_stage_mask |= other.src_stage_mask;
self.dst_stage_mask |= other.dst_stage_mask;
self.dependency_flags &= other.dependency_flags;
self.memory_barriers.extend(other.memory_barriers.into_iter());
self.buffer_barriers.extend(other.buffer_barriers.into_iter());
self.image_barriers.extend(other.image_barriers.into_iter());
}
/// Adds an execution dependency. This means that all the stages in `source` of the previous
/// commands must finish before any of the stages in `dest` of the following commands can start.
///
/// # Safety
///
/// - If the pipeline stages include geometry or tessellation stages, then the corresponding
/// features must have been enabled.
/// - There are certain rules regarding the pipeline barriers inside render passes.
///
#[inline]
pub unsafe fn add_execution_dependency(&mut self, source: PipelineStages, dest: PipelineStages,
by_region: bool)
{
if !by_region {
self.dependency_flags = 0;
}
self.src_stage_mask |= source.into();
self.dst_stage_mask |= dest.into();
}
/// Adds a memory barrier. This means that all the memory writes by the given source stages
/// for the given source accesses must be visible by the given dest stages for the given dest
/// accesses.
///
/// Also adds an execution dependency.
///
/// # Safety
///
/// - If the pipeline stages include geometry or tessellation stages, then the corresponding
/// features must have been enabled.
/// - There are certain rules regarding the pipeline barriers inside render passes.
///
pub unsafe fn add_memory_barrier(&mut self, source_stage: PipelineStages,
source_access: AccessFlagBits, dest_stage: PipelineStages,
dest_access: AccessFlagBits, by_region: bool)
{
self.add_execution_dependency(source_stage, dest_stage, by_region);
self.memory_barriers.push(vk::MemoryBarrier {
sType: vk::STRUCTURE_TYPE_MEMORY_BARRIER,
pNext: ptr::null(),
srcAccessMask: source_access.into(),
dstAccessMask: dest_access.into(),
});
}
/// Adds a buffer memory barrier. This means that all the memory writes to the given buffer by
/// the given source stages for the given source accesses must be visible by the given dest
/// stages for the given dest accesses.
///
/// Also adds an execution dependency.
///
/// Also allows transfering buffer ownership between queues.
///
/// # Safety
///
/// - If the pipeline stages include geometry or tessellation stages, then the corresponding
/// features must have been enabled.
/// - There are certain rules regarding the pipeline barriers inside render passes.
/// - The buffer must be alive for at least as long as the command buffer to which this barrier
/// is added.
/// - Queue ownership transfers must be correct.
///
pub unsafe fn add_buffer_memory_barrier<B: ?Sized>
(&mut self, buffer: &'a B, source_stage: PipelineStages,
source_access: AccessFlagBits, dest_stage: PipelineStages,
dest_access: AccessFlagBits, by_region: bool,
queue_transfer: Option<(u32, u32)>, offset: usize, size: usize)
where B: Buffer
{
self.add_execution_dependency(source_stage, dest_stage, by_region);
debug_assert!(size <= buffer.size());
let BufferInner { buffer, offset: org_offset } = buffer.inner();
let offset = offset + org_offset;
let (src_queue, dest_queue) = if let Some((src_queue, dest_queue)) = queue_transfer {
(src_queue, dest_queue)
} else {
(vk::QUEUE_FAMILY_IGNORED, vk::QUEUE_FAMILY_IGNORED)
};
self.buffer_barriers.push(vk::BufferMemoryBarrier {
sType: vk::STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
pNext: ptr::null(),
srcAccessMask: source_access.into(),
dstAccessMask: dest_access.into(),
srcQueueFamilyIndex: src_queue,
dstQueueFamilyIndex: dest_queue,
buffer: buffer.internal_object(),
offset: offset as vk::DeviceSize,
size: size as vk::DeviceSize,
});
}
/// Adds an image memory barrier. This is the equivalent of `add_buffer_memory_barrier` but
/// for images.
///
/// In addition to transfering image ownership between queues, it also allows changing the
/// layout of images.
///
/// # Safety
///
/// - If the pipeline stages include geometry or tessellation stages, then the corresponding
/// features must have been enabled.
/// - There are certain rules regarding the pipeline barriers inside render passes.
/// - The buffer must be alive for at least as long as the command buffer to which this barrier
/// is added.
/// - Queue ownership transfers must be correct.
/// - Image layouts transfers must be correct.
/// - Access flags must be compatible with the image usage flags passed at image creation.
///
pub unsafe fn add_image_memory_barrier<I: ?Sized>(&mut self, image: &'a I, mipmaps: Range<u32>,
layers: Range<u32>, source_stage: PipelineStages, source_access: AccessFlagBits,
dest_stage: PipelineStages, dest_access: AccessFlagBits, by_region: bool,
queue_transfer: Option<(u32, u32)>, current_layout: Layout, new_layout: Layout)
where I: Image
{
self.add_execution_dependency(source_stage, dest_stage, by_region);
debug_assert!(mipmaps.start < mipmaps.end);
// TODO: debug_assert!(mipmaps.end <= image.mipmap_levels());
debug_assert!(layers.start < layers.end);
debug_assert!(layers.end <= image.dimensions().array_layers());
let (src_queue, dest_queue) = if let Some((src_queue, dest_queue)) = queue_transfer {
(src_queue, dest_queue)
} else {
(vk::QUEUE_FAMILY_IGNORED, vk::QUEUE_FAMILY_IGNORED)
};
self.image_barriers.push(vk::ImageMemoryBarrier {
sType: vk::STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
pNext: ptr::null(),
srcAccessMask: source_access.into(),
dstAccessMask: dest_access.into(),
oldLayout: current_layout as u32,
newLayout: new_layout as u32,
srcQueueFamilyIndex: src_queue,
dstQueueFamilyIndex: dest_queue,
image: image.inner().internal_object(),
subresourceRange: vk::ImageSubresourceRange {
aspectMask: 1 | 2 | 4 | 8, // FIXME: wrong
baseMipLevel: mipmaps.start,
levelCount: mipmaps.end - mipmaps.start,
baseArrayLayer: layers.start,
layerCount: layers.end - layers.start,
},
});
}
}
// Copyright (c) 2016 The vulkano developers
// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT
// license <LICENSE-MIT or http://opensource.org/licenses/MIT>,
// at your option. All files in the project carrying such
// notice may not be copied, modified, or distributed except
// according to those terms.
use std::marker::PhantomData;
use std::ops::Range;
use std::ptr;
use std::u32;
use smallvec::SmallVec;
use buffer::Buffer;
use buffer::BufferInner;
use command_buffer::cb::AddCommand;
use command_buffer::cb::UnsafeCommandBufferBuilder;
use command_buffer::pool::CommandPool;
use image::Image;
use image::Layout;
use sync::AccessFlagBits;
use sync::PipelineStages;
use VulkanObject;
use VulkanPointers;
use vk;
/// Command that adds a pipeline barrier to a command buffer builder.
///
/// A pipeline barrier is a low-level system-ish command that is often necessary for safety. By
/// default all commands that you add to a command buffer can potentially run simultaneously.
/// Adding a pipeline barrier separates commands before the barrier from commands after the barrier
/// and prevents them from running simultaneously.
///
/// Please take a look at the Vulkan specifications for more information. Pipeline barriers are a
/// complex topic and explaining them in this documentation would be redundant.
///
/// > **Note**: We use a builder-like API here so that users can pass multiple buffers or images of
/// > multiple different types. Doing so with a single function would be very tedious in terms of
/// > API.
pub struct CmdPipelineBarrier<'a> {
src_stage_mask: vk::PipelineStageFlags,
dst_stage_mask: vk::PipelineStageFlags,
dependency_flags: vk::DependencyFlags,
memory_barriers: SmallVec<[vk::MemoryBarrier; 2]>,
buffer_barriers: SmallVec<[vk::BufferMemoryBarrier; 8]>,
image_barriers: SmallVec<[vk::ImageMemoryBarrier; 8]>,
marker: PhantomData<&'a ()>,
}
impl<'a> CmdPipelineBarrier<'a> {
/// Creates a new empty pipeline barrier command.
#[inline]
pub fn new() -> CmdPipelineBarrier<'a> {
CmdPipelineBarrier {
src_stage_mask: 0,
dst_stage_mask: 0,
dependency_flags: vk::DEPENDENCY_BY_REGION_BIT,
memory_barriers: SmallVec::new(),
buffer_barriers: SmallVec::new(),
image_barriers: SmallVec::new(),
marker: PhantomData,
}
}
/// Returns true if no barrier or execution dependency has been added yet.
#[inline]
pub fn is_empty(&self) -> bool {
self.src_stage_mask == 0 || self.dst_stage_mask == 0
}
/// Merges another pipeline builder into this one.
#[inline]
pub fn merge(&mut self, other: CmdPipelineBarrier<'a>) {
self.src_stage_mask |= other.src_stage_mask;
self.dst_stage_mask |= other.dst_stage_mask;
self.dependency_flags &= other.dependency_flags;
self.memory_barriers.extend(other.memory_barriers.into_iter());
self.buffer_barriers.extend(other.buffer_barriers.into_iter());
self.image_barriers.extend(other.image_barriers.into_iter());
}
/// Adds an execution dependency. This means that all the stages in `source` of the previous
/// commands must finish before any of the stages in `dest` of the following commands can start.
///
/// # Safety
///
/// - If the pipeline stages include geometry or tessellation stages, then the corresponding
/// features must have been enabled in the device.
/// - There are certain rules regarding the pipeline barriers inside render passes.
///
#[inline]
pub unsafe fn add_execution_dependency(&mut self, source: PipelineStages, dest: PipelineStages,
by_region: bool)
{
if !by_region {
self.dependency_flags = 0;
}
self.src_stage_mask |= source.into();
self.dst_stage_mask |= dest.into();
}
/// Adds a memory barrier. This means that all the memory writes by the given source stages
/// for the given source accesses must be visible by the given dest stages for the given dest
/// accesses.
///
/// Also adds an execution dependency similar to `add_execution_dependency`.
///
/// # Safety
///
/// - Same as `add_execution_dependency`.
///
pub unsafe fn add_memory_barrier(&mut self, source_stage: PipelineStages,
source_access: AccessFlagBits, dest_stage: PipelineStages,
dest_access: AccessFlagBits, by_region: bool)
{
self.add_execution_dependency(source_stage, dest_stage, by_region);
self.memory_barriers.push(vk::MemoryBarrier {
sType: vk::STRUCTURE_TYPE_MEMORY_BARRIER,
pNext: ptr::null(),
srcAccessMask: source_access.into(),
dstAccessMask: dest_access.into(),
});
}
/// Adds a buffer memory barrier. This means that all the memory writes to the given buffer by
/// the given source stages for the given source accesses must be visible by the given dest
/// stages for the given dest accesses.
///
/// Also adds an execution dependency similar to `add_execution_dependency`.
///
/// Also allows transfering buffer ownership between queues.
///
/// # Safety
///
/// - Same as `add_execution_dependency`.
/// - The buffer must be alive for at least as long as the command buffer to which this barrier
/// is added.
/// - Queue ownership transfers must be correct.
///
pub unsafe fn add_buffer_memory_barrier<B: ?Sized>
(&mut self, buffer: &'a B, source_stage: PipelineStages,
source_access: AccessFlagBits, dest_stage: PipelineStages,
dest_access: AccessFlagBits, by_region: bool,
queue_transfer: Option<(u32, u32)>, offset: usize, size: usize)
where B: Buffer
{
self.add_execution_dependency(source_stage, dest_stage, by_region);
debug_assert!(size <= buffer.size());
let BufferInner { buffer, offset: org_offset } = buffer.inner();
let offset = offset + org_offset;
let (src_queue, dest_queue) = if let Some((src_queue, dest_queue)) = queue_transfer {
(src_queue, dest_queue)
} else {
(vk::QUEUE_FAMILY_IGNORED, vk::QUEUE_FAMILY_IGNORED)
};
self.buffer_barriers.push(vk::BufferMemoryBarrier {
sType: vk::STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
pNext: ptr::null(),
srcAccessMask: source_access.into(),
dstAccessMask: dest_access.into(),
srcQueueFamilyIndex: src_queue,
dstQueueFamilyIndex: dest_queue,
buffer: buffer.internal_object(),
offset: offset as vk::DeviceSize,
size: size as vk::DeviceSize,
});
}
/// Adds an image memory barrier. This is the equivalent of `add_buffer_memory_barrier` but
/// for images.
///
/// In addition to transfering image ownership between queues, it also allows changing the
/// layout of images.
///
/// Also adds an execution dependency similar to `add_execution_dependency`.
///
/// # Safety
///
/// - Same as `add_execution_dependency`.
/// - The buffer must be alive for at least as long as the command buffer to which this barrier
/// is added.
/// - Queue ownership transfers must be correct.
/// - Image layouts transfers must be correct.
/// - Access flags must be compatible with the image usage flags passed at image creation.
///
pub unsafe fn add_image_memory_barrier<I: ?Sized>(&mut self, image: &'a I, mipmaps: Range<u32>,
layers: Range<u32>, source_stage: PipelineStages, source_access: AccessFlagBits,
dest_stage: PipelineStages, dest_access: AccessFlagBits, by_region: bool,
queue_transfer: Option<(u32, u32)>, current_layout: Layout, new_layout: Layout)
where I: Image
{
self.add_execution_dependency(source_stage, dest_stage, by_region);
debug_assert!(mipmaps.start < mipmaps.end);
// TODO: debug_assert!(mipmaps.end <= image.mipmap_levels());
debug_assert!(layers.start < layers.end);
debug_assert!(layers.end <= image.dimensions().array_layers());
let (src_queue, dest_queue) = if let Some((src_queue, dest_queue)) = queue_transfer {
(src_queue, dest_queue)
} else {
(vk::QUEUE_FAMILY_IGNORED, vk::QUEUE_FAMILY_IGNORED)
};
self.image_barriers.push(vk::ImageMemoryBarrier {
sType: vk::STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
pNext: ptr::null(),
srcAccessMask: source_access.into(),
dstAccessMask: dest_access.into(),
oldLayout: current_layout as u32,
newLayout: new_layout as u32,
srcQueueFamilyIndex: src_queue,
dstQueueFamilyIndex: dest_queue,
image: image.inner().internal_object(),
subresourceRange: vk::ImageSubresourceRange {
aspectMask: 1 | 2 | 4 | 8, // FIXME: wrong
baseMipLevel: mipmaps.start,
levelCount: mipmaps.end - mipmaps.start,
baseArrayLayer: layers.start,
layerCount: layers.end - layers.start,
},
});
}
}
unsafe impl<'a, P> AddCommand<&'a CmdPipelineBarrier<'a>> for UnsafeCommandBufferBuilder<P>
where P: CommandPool
{
type Out = UnsafeCommandBufferBuilder<P>;
#[inline]
fn add(self, command: &'a CmdPipelineBarrier<'a>) -> Self::Out {
// If barrier is empty, don't do anything.
if command.src_stage_mask == 0 || command.dst_stage_mask == 0 {
debug_assert!(command.src_stage_mask == 0 && command.dst_stage_mask == 0);
debug_assert!(command.memory_barriers.is_empty());
debug_assert!(command.buffer_barriers.is_empty());
debug_assert!(command.image_barriers.is_empty());
return self;
}
unsafe {
let vk = self.device().pointers();
let cmd = self.internal_object();
vk.CmdPipelineBarrier(cmd, command.src_stage_mask, command.dst_stage_mask,
command.dependency_flags, command.memory_barriers.len() as u32,
command.memory_barriers.as_ptr(),
command.buffer_barriers.len() as u32,
command.buffer_barriers.as_ptr(),
command.image_barriers.len() as u32,
command.image_barriers.as_ptr());
}
self
}
}

View File

@ -11,9 +11,9 @@ use std::error;
use std::fmt;
use std::sync::Arc;
use command_buffer::RawCommandBufferPrototype;
use command_buffer::CommandsList;
use command_buffer::CommandsListSink;
use command_buffer::cb::AddCommand;
use command_buffer::cb::UnsafeCommandBufferBuilder;
use command_buffer::pool::CommandPool;
use descriptor::pipeline_layout::PipelineLayoutRef;
use descriptor::pipeline_layout::PipelineLayoutPushConstantsCompatible;
use device::Device;
@ -21,9 +21,7 @@ use VulkanObject;
use VulkanPointers;
/// Wraps around a commands list and adds at the end of it a command that updates push constants.
pub struct CmdPushConstants<L, Pc, Pl> where L: CommandsList {
// Parent commands list.
previous: L,
pub struct CmdPushConstants<Pc, Pl> {
// The device of the pipeline object, so that we can compare it with the command buffer's
// device.
device: Arc<Device>,
@ -33,15 +31,15 @@ pub struct CmdPushConstants<L, Pc, Pl> where L: CommandsList {
pipeline_layout: Pl,
}
impl<L, Pc, Pl> CmdPushConstants<L, Pc, Pl>
where L: CommandsList, Pl: PipelineLayoutRef
impl<Pc, Pl> CmdPushConstants<Pc, Pl>
where Pl: PipelineLayoutRef
{
/// Builds the command.
///
/// Returns an error if the push constants are not compatible with the pipeline layout.
#[inline]
pub fn new(previous: L, pipeline_layout: Pl, push_constants: Pc)
-> Result<CmdPushConstants<L, Pc, Pl>, CmdPushConstantsError>
pub fn new(pipeline_layout: Pl, push_constants: Pc)
-> Result<CmdPushConstants<Pc, Pl>, CmdPushConstantsError>
{
if !PipelineLayoutPushConstantsCompatible::is_compatible(pipeline_layout.desc(), &push_constants) {
return Err(CmdPushConstantsError::IncompatibleData);
@ -50,7 +48,6 @@ impl<L, Pc, Pl> CmdPushConstants<L, Pc, Pl>
let device = pipeline_layout.device().clone();
Ok(CmdPushConstants {
previous: previous,
device: device,
push_constants: push_constants,
pipeline_layout: pipeline_layout,
@ -58,37 +55,36 @@ impl<L, Pc, Pl> CmdPushConstants<L, Pc, Pl>
}
}
unsafe impl<L, Pc, Pl> CommandsList for CmdPushConstants<L, Pc, Pl>
where L: CommandsList, Pl: PipelineLayoutRef
unsafe impl<'a, P, Pc, Pl> AddCommand<&'a CmdPushConstants<Pc, Pl>> for UnsafeCommandBufferBuilder<P>
where P: CommandPool,
Pl: PipelineLayoutRef
{
type Out = UnsafeCommandBufferBuilder<P>;
#[inline]
fn append<'a>(&'a self, builder: &mut CommandsListSink<'a>) {
self.previous.append(builder);
fn add(self, command: &'a CmdPushConstants<Pc, Pl>) -> Self::Out {
unsafe {
let vk = self.device().pointers();
let cmd = self.internal_object();
assert_eq!(self.device.internal_object(), builder.device().internal_object());
let data_raw = &command.push_constants as *const Pc as *const u8;
for num_range in 0 .. command.pipeline_layout.desc().num_push_constants_ranges() {
let range = match command.pipeline_layout.desc().push_constants_range(num_range) {
Some(r) => r,
None => continue
};
builder.add_command(Box::new(move |raw: &mut RawCommandBufferPrototype| {
unsafe {
let vk = raw.device.pointers();
let cmd = raw.command_buffer.clone().take().unwrap();
debug_assert_eq!(range.offset % 4, 0);
debug_assert_eq!(range.size % 4, 0);
let data_raw = &self.push_constants as *const Pc as *const u8;
for num_range in 0 .. self.pipeline_layout.desc().num_push_constants_ranges() {
let range = match self.pipeline_layout.desc().push_constants_range(num_range) {
Some(r) => r,
None => continue
};
debug_assert_eq!(range.offset % 4, 0);
debug_assert_eq!(range.size % 4, 0);
vk.CmdPushConstants(cmd, self.pipeline_layout.sys().internal_object(),
range.stages.into(), range.offset as u32, range.size as u32,
data_raw.offset(range.offset as isize) as *const _);
}
vk.CmdPushConstants(cmd, command.pipeline_layout.sys().internal_object(),
range.stages.into(), range.offset as u32, range.size as u32,
data_raw.offset(range.offset as isize) as *const _);
}
}));
}
self
}
}

View File

@ -10,10 +10,10 @@
use std::sync::Arc;
use smallvec::SmallVec;
use command_buffer::RawCommandBufferPrototype;
use command_buffer::DynamicState;
use command_buffer::CommandsList;
use command_buffer::CommandsListSink;
use command_buffer::cb::AddCommand;
use command_buffer::cb::UnsafeCommandBufferBuilder;
use command_buffer::pool::CommandPool;
use device::Device;
use VulkanObject;
use VulkanPointers;
@ -23,67 +23,70 @@ use VulkanPointers;
///
/// Only the values that are `Some` are touched. Parameters that are `None` are left untouched.
/// A state is not modified if the same state is already current.
pub struct CmdSetState<L> where L: CommandsList {
// Parent commands list.
previous: L,
pub struct CmdSetState {
// The device.
device: Arc<Device>,
// The state to set.
dynamic_state: DynamicState,
}
impl<L> CmdSetState<L> where L: CommandsList {
impl CmdSetState {
/// Builds a command.
///
/// Since this command checks whether the dynamic state is supported by the device, you have
/// to pass the device as well when building the command.
// TODO: should check the limits and features of the device
pub fn new(previous: L, device: Arc<Device>, state: DynamicState) -> CmdSetState<L> {
pub fn new(device: Arc<Device>, state: DynamicState) -> CmdSetState {
CmdSetState {
previous: previous,
device: device,
dynamic_state: DynamicState {
// This constructor is explicitely layed out so that we don't forget to
// modify this code if we add a new member to `DynamicState`.
// modify the code of this module if we add a new member to `DynamicState`.
line_width: state.line_width,
viewports: state.viewports,
scissors: state.scissors,
},
}
}
}
unsafe impl<L> CommandsList for CmdSetState<L> where L: CommandsList {
#[inline]
fn append<'a>(&'a self, builder: &mut CommandsListSink<'a>) {
self.previous.append(builder);
pub fn device(&self) -> &Arc<Device> {
&self.device
}
assert_eq!(self.device.internal_object(), builder.device().internal_object());
builder.add_command(Box::new(move |raw: &mut RawCommandBufferPrototype| {
unsafe {
let vk = raw.device.pointers();
let cmd = raw.command_buffer.clone().take().unwrap();
if let Some(line_width) = self.dynamic_state.line_width {
if raw.current_state.line_width != Some(line_width) {
vk.CmdSetLineWidth(cmd, line_width);
raw.current_state.line_width = Some(line_width);
}
}
if let Some(ref viewports) = self.dynamic_state.viewports {
// TODO: cache state
let viewports = viewports.iter().map(|v| v.clone().into()).collect::<SmallVec<[_; 16]>>();
vk.CmdSetViewport(cmd, 0, viewports.len() as u32, viewports.as_ptr());
}
if let Some(ref scissors) = self.dynamic_state.scissors {
// TODO: cache state
let scissors = scissors.iter().map(|v| v.clone().into()).collect::<SmallVec<[_; 16]>>();
vk.CmdSetScissor(cmd, 0, scissors.len() as u32, scissors.as_ptr());
}
}
}));
/// Returns the state that is going to be set.
#[inline]
pub fn state(&self) -> &DynamicState {
&self.dynamic_state
}
}
unsafe impl<'a, P> AddCommand<&'a CmdSetState> for UnsafeCommandBufferBuilder<P>
where P: CommandPool
{
type Out = UnsafeCommandBufferBuilder<P>;
#[inline]
fn add(self, command: &'a CmdSetState) -> Self::Out {
unsafe {
let vk = self.device().pointers();
let cmd = self.internal_object();
if let Some(line_width) = command.dynamic_state.line_width {
vk.CmdSetLineWidth(cmd, line_width);
}
if let Some(ref viewports) = command.dynamic_state.viewports {
let viewports = viewports.iter().map(|v| v.clone().into()).collect::<SmallVec<[_; 16]>>();
vk.CmdSetViewport(cmd, 0, viewports.len() as u32, viewports.as_ptr());
}
if let Some(ref scissors) = command.dynamic_state.scissors {
let scissors = scissors.iter().map(|v| v.clone().into()).collect::<SmallVec<[_; 16]>>();
vk.CmdSetScissor(cmd, 0, scissors.len() as u32, scissors.as_ptr());
}
}
self
}
}

View File

@ -12,22 +12,17 @@ use std::fmt;
use buffer::Buffer;
use buffer::BufferInner;
use command_buffer::RawCommandBufferPrototype;
use command_buffer::cmd::CommandsListPossibleOutsideRenderPass;
use command_buffer::CommandsList;
use command_buffer::CommandsListSink;
use sync::AccessFlagBits;
use sync::PipelineStages;
use command_buffer::cb::AddCommand;
use command_buffer::cb::UnsafeCommandBufferBuilder;
use command_buffer::pool::CommandPool;
use VulkanObject;
use VulkanPointers;
use vk;
/// Wraps around a commands list and adds an update buffer command at the end of it.
pub struct CmdUpdateBuffer<'a, L, B, D: ?Sized>
where B: Buffer, L: CommandsList, D: 'static
/// A buffer update command.
pub struct CmdUpdateBuffer<'a, B, D: ?Sized>
where D: 'a
{
// Parent commands list.
previous: L,
// The buffer to update.
buffer: B,
// Raw buffer handle.
@ -40,9 +35,8 @@ pub struct CmdUpdateBuffer<'a, L, B, D: ?Sized>
data: &'a D,
}
impl<'a, L, B, D: ?Sized> CmdUpdateBuffer<'a, L, B, D>
impl<'a, B, D: ?Sized> CmdUpdateBuffer<'a, B, D>
where B: Buffer,
L: CommandsList + CommandsListPossibleOutsideRenderPass,
D: Copy + 'static,
{
/// Builds a command that writes data to a buffer.
@ -52,11 +46,7 @@ impl<'a, L, B, D: ?Sized> CmdUpdateBuffer<'a, L, B, D>
///
/// The size of the modification must not exceed 65536 bytes. The offset and size must be
/// multiples of four.
pub fn new(previous: L, buffer: B, data: &'a D)
-> Result<CmdUpdateBuffer<'a, L, B, D>, CmdUpdateBufferError>
{
assert!(previous.is_outside_render_pass()); // TODO: error
pub fn new(buffer: B, data: &'a D) -> Result<CmdUpdateBuffer<'a, B, D>, CmdUpdateBufferError> {
let size = buffer.size();
let (buffer_handle, offset) = {
@ -79,7 +69,6 @@ impl<'a, L, B, D: ?Sized> CmdUpdateBuffer<'a, L, B, D>
}
Ok(CmdUpdateBuffer {
previous: previous,
buffer: buffer,
buffer_handle: buffer_handle,
offset: offset as vk::DeviceSize,
@ -89,45 +78,23 @@ impl<'a, L, B, D: ?Sized> CmdUpdateBuffer<'a, L, B, D>
}
}
unsafe impl<'d, L, B, D: ?Sized> CommandsList for CmdUpdateBuffer<'d, L, B, D>
unsafe impl<'a, 'd, P, B, D: ?Sized> AddCommand<&'a CmdUpdateBuffer<'d, B, D>> for UnsafeCommandBufferBuilder<P>
where B: Buffer,
L: CommandsList,
D: Copy + 'static,
P: CommandPool,
{
type Out = UnsafeCommandBufferBuilder<P>;
#[inline]
fn append<'a>(&'a self, builder: &mut CommandsListSink<'a>) {
self.previous.append(builder);
assert_eq!(self.buffer.inner().buffer.device().internal_object(),
builder.device().internal_object());
{
let stages = PipelineStages { transfer: true, .. PipelineStages::none() };
let access = AccessFlagBits { transfer_write: true, .. AccessFlagBits::none() };
builder.add_buffer_transition(&self.buffer, 0, self.buffer.size(), true,
stages, access);
fn add(self, command: &'a CmdUpdateBuffer<'d, B, D>) -> Self::Out {
unsafe {
let vk = self.device().pointers();
let cmd = self.internal_object();
vk.CmdUpdateBuffer(cmd, command.buffer_handle, command.offset, command.size,
command.data as *const D as *const _);
}
builder.add_command(Box::new(move |raw: &mut RawCommandBufferPrototype| {
unsafe {
let vk = raw.device.pointers();
let cmd = raw.command_buffer.clone().take().unwrap();
vk.CmdUpdateBuffer(cmd, self.buffer_handle, self.offset, self.size,
self.data as *const D as *const _);
}
}));
}
}
unsafe impl<'a, L, B, D: ?Sized> CommandsListPossibleOutsideRenderPass
for CmdUpdateBuffer<'a, L, B, D>
where B: Buffer,
L: CommandsList,
D: Copy + 'static,
{
#[inline]
fn is_outside_render_pass(&self) -> bool {
true
self
}
}

View File

@ -0,0 +1,112 @@
// Copyright (c) 2016 The vulkano developers
// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT
// license <LICENSE-MIT or http://opensource.org/licenses/MIT>,
// at your option. All files in the project carrying such
// notice may not be copied, modified, or distributed except
// according to those terms.
use std::sync::Arc;
use buffer::Buffer;
use command_buffer::DynamicState;
use command_buffer::cb::AddCommand;
use command_buffer::cb::CommandBufferBuild;
use command_buffer::cmd;
use descriptor::descriptor_set::DescriptorSetsCollection;
use descriptor::PipelineLayoutRef;
use framebuffer::FramebufferRef;
use framebuffer::RenderPass;
use framebuffer::RenderPassClearValues;
use framebuffer::RenderPassRef;
use pipeline::GraphicsPipeline;
use pipeline::vertex::Source;
///
/// > **Note**: This trait is just a utility trait. Do not implement it yourself. Instead
/// > implement the `AddCommand` and `CommandBufferBuild` traits.
pub unsafe trait CommandBufferBuilder {
/// Adds a command that writes the content of a buffer.
///
/// This function is similar to the `memset` function in C. The `data` parameter is a number
/// that will be repeatidely written through the entire buffer.
///
/// > **Note**: This function is technically safe because buffers can only contain integers or
/// > floating point numbers, which are always valid whatever their memory representation is.
/// > But unless your buffer actually contains only 32-bits integers, you are encouraged to use
/// > this function only for zeroing the content of a buffer by passing `0` for the data.
// TODO: not safe because of signalling NaNs
#[inline]
fn fill_buffer<B, O>(self, buffer: B, data: u32) -> Result<O, cmd::CmdFillBufferError>
where Self: Sized + AddCommand<cmd::CmdFillBuffer<B>, Out = O>,
B: Buffer
{
let cmd = cmd::CmdFillBuffer::new(buffer, data)?;
Ok(self.add(cmd))
}
/// Adds a command that starts a render pass.
///
/// If `secondary` is true, then you will only be able to add secondary command buffers while
/// you're inside the first subpass of the render pass. If `secondary` is false, you will only
/// be able to add inline draw commands and not secondary command buffers.
///
/// You must call this before you can add draw commands.
#[inline]
fn begin_render_pass<F, C, O>(self, framebuffer: F, secondary: bool, clear_values: C)
-> O
where Self: Sized + AddCommand<cmd::CmdBeginRenderPass<Arc<RenderPass>, F>, Out = O>,
F: FramebufferRef,
<<F as FramebufferRef>::RenderPass as RenderPassRef>::Desc: RenderPassClearValues<C>
{
let cmd = cmd::CmdBeginRenderPass::new(framebuffer, secondary, clear_values);
self.add(cmd)
}
/// Adds a command that jumps to the next subpass of the current render pass.
#[inline]
fn next_subpass<O>(self, secondary: bool) -> O
where Self: Sized + AddCommand<cmd::CmdNextSubpass, Out = O>
{
let cmd = cmd::CmdNextSubpass::new(secondary);
self.add(cmd)
}
/// Adds a command that ends the current render pass.
///
/// This must be called after you went through all the subpasses and before you can build
/// the command buffer or add further commands.
#[inline]
fn end_render_pass<O>(self) -> O
where Self: Sized + AddCommand<cmd::CmdEndRenderPass, Out = O>
{
let cmd = cmd::CmdEndRenderPass::new();
self.add(cmd)
}
/// Adds a command that draws.
///
/// Can only be used from inside a render pass.
#[inline]
fn draw<Pv, Pl, Prp, S, Pc, V, O>(self, pipeline: Arc<GraphicsPipeline<Pv, Pl, Prp>>,
dynamic: DynamicState, vertices: V, sets: S,
push_constants: Pc) -> O
where Self: Sized + AddCommand<cmd::CmdDraw<V, Pv, Pl, Prp, S, Pc>, Out = O>,
Pl: PipelineLayoutRef,
S: DescriptorSetsCollection,
Pv: Source<V>
{
let cmd = cmd::CmdDraw::new(pipeline, dynamic, vertices, sets, push_constants);
self.add(cmd)
}
#[inline]
fn build<O>(self) -> O
where Self: Sized + CommandBufferBuild<Out = O>
{
CommandBufferBuild::build(self)
}
}
unsafe impl<T> CommandBufferBuilder for T {}

View File

@ -41,30 +41,22 @@
// API has several different command buffer wrappers, but they all use the same internal
// struct. The restrictions are enforced only in the public types.
pub use self::barrier::PipelineBarrierBuilder;
pub use self::cmd::empty;
pub use self::cmd::CommandsList;
pub use self::cmd::CommandsListSink;
pub use self::cmd::CommandsListSinkCaller;
pub use self::auto::AutoCommandBufferBuilder;
pub use self::submit::Submission;
pub use self::submit::Submit;
pub use self::submit::SubmitBuilder;
pub use self::submit::SubmitChain;
pub use self::helper_trait::CommandBufferBuilder;
use std::sync::Arc;
use std::marker::PhantomData;
use device::Device;
use pipeline::viewport::Viewport;
use pipeline::viewport::Scissor;
use vk;
pub mod cb;
pub mod cmd;
pub mod pool;
mod barrier;
mod auto;
mod helper_trait;
mod submit;
#[repr(C)]
@ -119,27 +111,3 @@ impl Default for DynamicState {
DynamicState::none()
}
}
pub unsafe trait SecondaryCommandBuffer {
// TODO: crappy API
fn inner(&self) -> vk::CommandBuffer;
// TODO: necessary only because inner() has a bad return type
fn device(&self) -> &Arc<Device>;
fn append<'a>(&'a self, builder: &mut CommandsListSink<'a>);
}
/// Opaque struct that contains a command buffer in construction. You cannot create a
/// `RawCommandBufferPrototype` yourself.
pub struct RawCommandBufferPrototype<'a> {
device: Arc<Device>,
command_buffer: Option<vk::CommandBuffer>,
current_state: DynamicState,
bound_graphics_pipeline: vk::Pipeline,
bound_compute_pipeline: vk::Pipeline,
bound_index_buffer: (vk::Buffer, vk::DeviceSize, vk::IndexType),
// Note: if fields are added here, don't forget to reset them
// when vkCmdExecuteCommands is called.
marker: PhantomData<&'a ()>,
}

View File

@ -15,7 +15,7 @@ use std::sync::Arc;
use std::time::Duration;
use smallvec::SmallVec;
use command_buffer::cb::UnsyncedCommandBuffer;
use command_buffer::cb::UnsafeCommandBuffer;
use command_buffer::pool::CommandPool;
use device::Device;
use device::Queue;
@ -247,8 +247,8 @@ impl<'a> SubmitBuilder<'a> {
/// that builds this `SubmitBuilder`. Consequently keeping the `Submit` object alive is enough
/// to guarantee that the command buffer is kept alive as well.
#[inline]
pub fn add_command_buffer<L, P>(self, command_buffer: &'a UnsyncedCommandBuffer<L, P>)
-> SubmitBuilder<'a>
pub fn add_command_buffer<P>(self, command_buffer: &'a UnsafeCommandBuffer<P>)
-> SubmitBuilder<'a>
where P: CommandPool
{
self.add_command_buffer_raw(command_buffer.internal_object())

View File

@ -7,11 +7,9 @@
// notice may not be copied, modified, or distributed except
// according to those terms.
use command_buffer::cmd::CommandsListSink;
use descriptor::descriptor::DescriptorDesc;
use descriptor::descriptor_set::DescriptorSet;
use descriptor::descriptor_set::DescriptorSetDesc;
use descriptor::descriptor_set::TrackedDescriptorSet;
use descriptor::descriptor_set::UnsafeDescriptorSet;
/// A collection of descriptor set objects.
@ -35,12 +33,6 @@ pub unsafe trait DescriptorSetsCollection {
fn descriptor(&self, set: usize, binding: usize) -> Option<DescriptorDesc>;
}
/// Extension trait for a descriptor sets collection so that it can be used with the standard
/// commands list interface.
pub unsafe trait TrackedDescriptorSetsCollection: DescriptorSetsCollection {
fn add_transition<'a>(&'a self, &mut CommandsListSink<'a>);
}
unsafe impl DescriptorSetsCollection for () {
#[inline]
fn num_sets(&self) -> usize {
@ -63,12 +55,6 @@ unsafe impl DescriptorSetsCollection for () {
}
}
unsafe impl TrackedDescriptorSetsCollection for () {
#[inline]
fn add_transition<'a>(&'a self, _: &mut CommandsListSink<'a>) {
}
}
unsafe impl<T> DescriptorSetsCollection for T
where T: DescriptorSet
{
@ -96,14 +82,6 @@ unsafe impl<T> DescriptorSetsCollection for T
}
}
// TODO: we can't be generic over the State because we get a conflicting implementation :-/
unsafe impl<T> TrackedDescriptorSetsCollection for T where T: TrackedDescriptorSet {
#[inline]
fn add_transition<'a>(&'a self, sink: &mut CommandsListSink<'a>) {
self.add_transition(sink);
}
}
macro_rules! impl_collection {
($first:ident $(, $others:ident)*) => (
unsafe impl<$first$(, $others)*> DescriptorSetsCollection for ($first, $($others),*)
@ -148,21 +126,6 @@ macro_rules! impl_collection {
}
}
unsafe impl<$first$(, $others)*> TrackedDescriptorSetsCollection for ($first, $($others),*)
where $first: TrackedDescriptorSet + DescriptorSetDesc /* TODO */
$(, $others: TrackedDescriptorSet + DescriptorSetDesc /* TODO */)*
{
#[inline]
fn add_transition<'a>(&'a self, sink: &mut CommandsListSink<'a>) {
#![allow(non_snake_case)]
let &(ref $first, $(ref $others),*) = self;
$first.add_transition(sink);
$(
$others.add_transition(sink);
)*
}
}
impl_collection!($($others),*);
);

View File

@ -37,11 +37,9 @@
use std::sync::Arc;
use command_buffer::cmd::CommandsListSink;
use descriptor::descriptor::DescriptorDesc;
pub use self::collection::DescriptorSetsCollection;
pub use self::collection::TrackedDescriptorSetsCollection;
pub use self::pool::DescriptorPool;
pub use self::pool::DescriptorPoolAlloc;
pub use self::pool::DescriptorPoolAllocError;
@ -110,25 +108,3 @@ unsafe impl<'a, T> DescriptorSetDesc for &'a T where T: 'a + DescriptorSetDesc {
(**self).desc()
}
}
// TODO: re-read docs
/// Extension trait for descriptor sets so that it can be used with the standard commands list
/// interface.
// TODO: is this used?
pub unsafe trait TrackedDescriptorSet: DescriptorSet {
fn add_transition<'a>(&'a self, &mut CommandsListSink<'a>);
}
unsafe impl<T> TrackedDescriptorSet for Arc<T> where T: TrackedDescriptorSet {
#[inline]
fn add_transition<'a>(&'a self, sink: &mut CommandsListSink<'a>) {
(**self).add_transition(sink);
}
}
unsafe impl<'r, T> TrackedDescriptorSet for &'r T where T: 'r + TrackedDescriptorSet {
#[inline]
fn add_transition<'a>(&'a self, sink: &mut CommandsListSink<'a>) {
(**self).add_transition(sink);
}
}

View File

@ -11,10 +11,8 @@ use std::sync::Arc;
use buffer::Buffer;
use buffer::BufferViewRef;
use command_buffer::cmd::CommandsListSink;
use descriptor::descriptor::DescriptorType;
use descriptor::descriptor_set::DescriptorSet;
use descriptor::descriptor_set::TrackedDescriptorSet;
use descriptor::descriptor_set::UnsafeDescriptorSetLayout;
use descriptor::descriptor_set::DescriptorPool;
use descriptor::descriptor_set::DescriptorPoolAlloc;
@ -65,16 +63,6 @@ unsafe impl<R, P> DescriptorSet for SimpleDescriptorSet<R, P> where P: Descripto
}
}
unsafe impl<R, P> TrackedDescriptorSet for SimpleDescriptorSet<R, P>
where R: SimpleDescriptorSetResourcesCollection,
P: DescriptorPool
{
#[inline]
fn add_transition<'a>(&'a self, sink: &mut CommandsListSink<'a>) {
self.resources.add_transition(sink)
}
}
/// Builds a descriptor set in the form of a `SimpleDescriptorSet` object.
// TODO: more doc
#[macro_export]
@ -217,6 +205,7 @@ unsafe impl<L, R, T> SimpleDescriptorSetBufferExt<L, R> for T
}
}
/*
/// Internal trait related to the `SimpleDescriptorSet` system.
pub unsafe trait SimpleDescriptorSetResourcesCollection {
#[inline]
@ -227,7 +216,7 @@ unsafe impl SimpleDescriptorSetResourcesCollection for () {
#[inline]
fn add_transition<'a>(&'a self, _: &mut CommandsListSink<'a>) {
}
}
}*/
/// Internal object related to the `SimpleDescriptorSet` system.
pub struct SimpleDescriptorSetBuf<B> {
@ -237,7 +226,7 @@ pub struct SimpleDescriptorSetBuf<B> {
access: AccessFlagBits,
}
unsafe impl<B> SimpleDescriptorSetResourcesCollection for SimpleDescriptorSetBuf<B>
/*unsafe impl<B> SimpleDescriptorSetResourcesCollection for SimpleDescriptorSetBuf<B>
where B: Buffer
{
#[inline]
@ -258,7 +247,7 @@ unsafe impl<B> SimpleDescriptorSetResourcesCollection for SimpleDescriptorSetBuf
sink.add_buffer_transition(&self.buffer, 0, self.buffer.size(), self.write, stages, access);
}
}
}*/
/// Internal object related to the `SimpleDescriptorSet` system.
pub struct SimpleDescriptorSetBufView<V> where V: BufferViewRef {
@ -268,7 +257,7 @@ pub struct SimpleDescriptorSetBufView<V> where V: BufferViewRef {
access: AccessFlagBits,
}
unsafe impl<V> SimpleDescriptorSetResourcesCollection for SimpleDescriptorSetBufView<V>
/*unsafe impl<V> SimpleDescriptorSetResourcesCollection for SimpleDescriptorSetBufView<V>
where V: BufferViewRef, V::Buffer: Buffer
{
#[inline]
@ -290,7 +279,7 @@ unsafe impl<V> SimpleDescriptorSetResourcesCollection for SimpleDescriptorSetBuf
sink.add_buffer_transition(self.view.view().buffer(), 0, self.view.view().buffer().size(),
self.write, stages, access);
}
}
}*/
/// Internal object related to the `SimpleDescriptorSet` system.
pub struct SimpleDescriptorSetImg<I> {
@ -305,7 +294,7 @@ pub struct SimpleDescriptorSetImg<I> {
access: AccessFlagBits,
}
unsafe impl<I> SimpleDescriptorSetResourcesCollection for SimpleDescriptorSetImg<I>
/*unsafe impl<I> SimpleDescriptorSetResourcesCollection for SimpleDescriptorSetImg<I>
where I: ImageView
{
#[inline]
@ -341,4 +330,4 @@ unsafe impl<A, B> SimpleDescriptorSetResourcesCollection for (A, B)
self.0.add_transition(sink);
self.1.add_transition(sink);
}
}
}*/

View File

@ -8,7 +8,6 @@
// according to those terms.
use std::cmp;
use command_buffer::cmd::CommandsListSink;
use image::ImageView;
use image::sys::UnsafeImageView;
//use sync::AccessFlagBits;
@ -27,8 +26,6 @@ pub unsafe trait AttachmentsList {
/// For example if one view is 256x256x2 and another one is 128x512x3, then this function
/// should return 128x256x2.
fn intersection_dimensions(&self) -> Option<[u32; 3]>;
fn add_transition<'a>(&'a self, sink: &mut CommandsListSink<'a>);
}
unsafe impl AttachmentsList for () {
@ -41,10 +38,6 @@ unsafe impl AttachmentsList for () {
fn intersection_dimensions(&self) -> Option<[u32; 3]> {
None
}
#[inline]
fn add_transition<'a>(&'a self, sink: &mut CommandsListSink<'a>) {
}
}
macro_rules! impl_into_atch_list {
@ -91,30 +84,6 @@ macro_rules! impl_into_atch_list {
Some(dims)
}
#[inline]
fn add_transition<'a>(&'a self, sink: &mut CommandsListSink<'a>) {
// FIXME: implement
/*// TODO: "wrong" values
let stages = PipelineStages {
color_attachment_output: true,
late_fragment_tests: true,
.. PipelineStages::none()
};
let access = AccessFlagBits {
color_attachment_read: true,
color_attachment_write: true,
depth_stencil_attachment_read: true,
depth_stencil_attachment_write: true,
.. AccessFlagBits::none()
};
// FIXME: adjust layers & mipmaps with the view's parameters
sink.add_image_transition(self.first.parent(), 0, 1, 0, 1, true, Layout::General /* FIXME: wrong */,
stages, access);
self.rest.add_transition(sink);*/
}
}
impl_into_atch_list!($($rest),*);

View File

@ -14,7 +14,6 @@ use std::ptr;
use std::sync::Arc;
use smallvec::SmallVec;
use command_buffer::cmd::CommandsListSink;
use device::Device;
use framebuffer::AttachmentsList;
use framebuffer::FramebufferRef;
@ -159,13 +158,6 @@ impl<Rp, A> Framebuffer<Rp, A> {
pub fn render_pass(&self) -> &Rp {
&self.render_pass
}
#[inline]
pub fn add_transition<'a>(&'a self, sink: &mut CommandsListSink<'a>)
where A: AttachmentsList
{
self.resources.add_transition(sink);
}
}
unsafe impl<Rp, A> FramebufferRef for Framebuffer<Rp, A>