dst/dest inconsistency #51

This commit is contained in:
David AOL 2017-07-02 08:09:50 -05:00
parent 0e4e61bec0
commit b0faf2db95
20 changed files with 179 additions and 178 deletions

View File

@ -18,7 +18,7 @@ enum SourceKind {
pub fn derive(input: TokenStream) -> TokenStream {
let syn_item = syn::parse_macro_input(&input.to_string()).unwrap();
let src = {
let source_code = {
let mut iter = syn_item.attrs.iter().filter_map(|attr| {
match attr.value {
syn::MetaItem::NameValue(ref i, syn::Lit::Str(ref val, _)) if i == "src" => {
@ -40,7 +40,7 @@ pub fn derive(input: TokenStream) -> TokenStream {
}
match source {
SourceKind::Src(src) => src,
SourceKind::Src(source) => source,
SourceKind::Path(path) => {
let root = std::env::var("CARGO_MANIFEST_DIR").unwrap_or(".".into());
@ -78,7 +78,7 @@ pub fn derive(input: TokenStream) -> TokenStream {
_ => panic!("Unexpected shader type ; valid values: vertex, fragment, geometry, tess_ctrl, tess_eval, compute")
};
let spirv_data = match glsl_to_spirv::compile(&src, ty) {
let spirv_data = match glsl_to_spirv::compile(&source_code, ty) {
Ok(compiled) => compiled,
Err(message) => panic!("{}\nfailed to compile shader", message),
};

View File

@ -29,8 +29,8 @@ mod structs;
pub fn build_glsl_shaders<'a, I>(shaders: I)
where I: IntoIterator<Item = (&'a str, ShaderType)>
{
let dest = env::var("OUT_DIR").unwrap();
let dest = Path::new(&dest);
let destination = env::var("OUT_DIR").unwrap();
let destination = Path::new(&destination);
let shaders = shaders.into_iter().collect::<Vec<_>>();
for &(shader, _) in &shaders {
@ -50,8 +50,8 @@ pub fn build_glsl_shaders<'a, I>(shaders: I)
s
};
fs::create_dir_all(&dest.join("shaders").join(shader.parent().unwrap())).unwrap();
let mut file_output = File::create(&dest.join("shaders").join(shader))
fs::create_dir_all(&destination.join("shaders").join(shader.parent().unwrap())).unwrap();
let mut file_output = File::create(&destination.join("shaders").join(shader))
.expect("failed to open shader output");
let content = match glsl_to_spirv::compile(&shader_content, ty) {

View File

@ -113,9 +113,9 @@ impl<T: ?Sized> ImmutableBuffer<T> {
T: 'static + Send + Sync
{
unsafe {
// We automatically set `transfer_dest` to true in order to avoid annoying errors.
// We automatically set `transfer_destination` to true in order to avoid annoying errors.
let actual_usage = BufferUsage {
transfer_dest: true,
transfer_destination: true,
..usage
};
@ -453,7 +453,7 @@ mod tests {
queue.clone())
.unwrap();
let dest = CpuAccessibleBuffer::from_data(device.clone(),
let destination = CpuAccessibleBuffer::from_data(device.clone(),
BufferUsage::all(),
iter::once(queue.family()),
0)
@ -461,7 +461,7 @@ mod tests {
let _ = AutoCommandBufferBuilder::new(device.clone(), queue.family())
.unwrap()
.copy_buffer(buffer, dest.clone())
.copy_buffer(buffer, destination.clone())
.unwrap()
.build()
.unwrap()
@ -470,8 +470,8 @@ mod tests {
.then_signal_fence_and_flush()
.unwrap();
let dest_content = dest.read().unwrap();
assert_eq!(*dest_content, 12);
let destination_content = destination.read().unwrap();
assert_eq!(*destination_content, 12);
}
#[test]
@ -484,7 +484,7 @@ mod tests {
queue.clone())
.unwrap();
let dest = CpuAccessibleBuffer::from_iter(device.clone(),
let destination = CpuAccessibleBuffer::from_iter(device.clone(),
BufferUsage::all(),
iter::once(queue.family()),
(0 .. 512).map(|_| 0u32))
@ -492,7 +492,7 @@ mod tests {
let _ = AutoCommandBufferBuilder::new(device.clone(), queue.family())
.unwrap()
.copy_buffer(buffer, dest.clone())
.copy_buffer(buffer, destination.clone())
.unwrap()
.build()
.unwrap()
@ -501,8 +501,8 @@ mod tests {
.then_signal_fence_and_flush()
.unwrap();
let dest_content = dest.read().unwrap();
for (n, &v) in dest_content.iter().enumerate() {
let destination_content = destination.read().unwrap();
for (n, &v) in destination_content.iter().enumerate() {
assert_eq!(n * 2, v as usize);
}
}
@ -542,7 +542,7 @@ mod tests {
.unwrap()
};
let src = CpuAccessibleBuffer::from_data(device.clone(),
let source = CpuAccessibleBuffer::from_data(device.clone(),
BufferUsage::all(),
iter::once(queue.family()),
0)
@ -550,7 +550,7 @@ mod tests {
let _ = AutoCommandBufferBuilder::new(device.clone(), queue.family())
.unwrap()
.copy_buffer(src, buffer)
.copy_buffer(source, buffer)
.unwrap()
.build()
.unwrap()
@ -571,7 +571,7 @@ mod tests {
.unwrap()
};
let src = CpuAccessibleBuffer::from_data(device.clone(),
let source = CpuAccessibleBuffer::from_data(device.clone(),
BufferUsage::all(),
iter::once(queue.family()),
0)
@ -579,9 +579,9 @@ mod tests {
let _ = AutoCommandBufferBuilder::new(device.clone(), queue.family())
.unwrap()
.copy_buffer(src.clone(), init)
.copy_buffer(source.clone(), init)
.unwrap()
.copy_buffer(buffer, src.clone())
.copy_buffer(buffer, source.clone())
.unwrap()
.build()
.unwrap()
@ -603,7 +603,7 @@ mod tests {
.unwrap()
};
let src = CpuAccessibleBuffer::from_data(device.clone(),
let source = CpuAccessibleBuffer::from_data(device.clone(),
BufferUsage::all(),
iter::once(queue.family()),
0)
@ -611,14 +611,14 @@ mod tests {
let cb1 = AutoCommandBufferBuilder::new(device.clone(), queue.family())
.unwrap()
.copy_buffer(src.clone(), init)
.copy_buffer(source.clone(), init)
.unwrap()
.build()
.unwrap();
let cb2 = AutoCommandBufferBuilder::new(device.clone(), queue.family())
.unwrap()
.copy_buffer(buffer, src.clone())
.copy_buffer(buffer, source.clone())
.unwrap()
.build()
.unwrap();

View File

@ -205,12 +205,12 @@ impl UnsafeBuffer {
}
#[inline]
pub fn usage_transfer_src(&self) -> bool {
pub fn usage_transfer_source(&self) -> bool {
(self.usage & vk::BUFFER_USAGE_TRANSFER_SRC_BIT) != 0
}
#[inline]
pub fn usage_transfer_dest(&self) -> bool {
pub fn usage_transfer_destination(&self) -> bool {
(self.usage & vk::BUFFER_USAGE_TRANSFER_DST_BIT) != 0
}

View File

@ -19,7 +19,7 @@ use vk;
#[derive(Debug, Copy, Clone)]
pub struct BufferUsage {
pub transfer_source: bool,
pub transfer_dest: bool,
pub transfer_destination: bool,
pub uniform_texel_buffer: bool,
pub storage_texel_buffer: bool,
pub uniform_buffer: bool,
@ -35,7 +35,7 @@ impl BufferUsage {
pub fn none() -> BufferUsage {
BufferUsage {
transfer_source: false,
transfer_dest: false,
transfer_destination: false,
uniform_texel_buffer: false,
storage_texel_buffer: false,
uniform_buffer: false,
@ -51,7 +51,7 @@ impl BufferUsage {
pub fn all() -> BufferUsage {
BufferUsage {
transfer_source: true,
transfer_dest: true,
transfer_destination: true,
uniform_texel_buffer: true,
storage_texel_buffer: true,
uniform_buffer: true,
@ -71,11 +71,11 @@ impl BufferUsage {
}
}
/// Builds a `BufferUsage` with `transfer_dest` set to true and the rest to false.
/// Builds a `BufferUsage` with `transfer_destination` set to true and the rest to false.
#[inline]
pub fn transfer_dest() -> BufferUsage {
pub fn transfer_destination() -> BufferUsage {
BufferUsage {
transfer_dest: true,
transfer_destination: true,
..BufferUsage::none()
}
}
@ -89,13 +89,13 @@ impl BufferUsage {
}
}
/// Builds a `BufferUsage` with `vertex_buffer` and `transfer_dest` set to true and the rest
/// Builds a `BufferUsage` with `vertex_buffer` and `transfer_destination` set to true and the rest
/// to false.
#[inline]
pub fn vertex_buffer_transfer_dest() -> BufferUsage {
pub fn vertex_buffer_transfer_destination() -> BufferUsage {
BufferUsage {
vertex_buffer: true,
transfer_dest: true,
transfer_destination: true,
..BufferUsage::none()
}
}
@ -109,12 +109,12 @@ impl BufferUsage {
}
}
/// Builds a `BufferUsage` with `index_buffer` and `transfer_dest` set to true and the rest to false.
/// Builds a `BufferUsage` with `index_buffer` and `transfer_destination` set to true and the rest to false.
#[inline]
pub fn index_buffer_transfer_dest() -> BufferUsage {
pub fn index_buffer_transfer_destination() -> BufferUsage {
BufferUsage {
index_buffer: true,
transfer_dest: true,
transfer_destination: true,
..BufferUsage::none()
}
}
@ -128,13 +128,13 @@ impl BufferUsage {
}
}
/// Builds a `BufferUsage` with `uniform_buffer` and `transfer_dest` set to true and the rest
/// Builds a `BufferUsage` with `uniform_buffer` and `transfer_destination` set to true and the rest
/// to false.
#[inline]
pub fn uniform_buffer_transfer_dest() -> BufferUsage {
pub fn uniform_buffer_transfer_destination() -> BufferUsage {
BufferUsage {
uniform_buffer: true,
transfer_dest: true,
transfer_destination: true,
..BufferUsage::none()
}
}
@ -148,13 +148,13 @@ impl BufferUsage {
}
}
/// Builds a `BufferUsage` with `indirect_buffer` and `transfer_dest` set to true and the rest
/// Builds a `BufferUsage` with `indirect_buffer` and `transfer_destination` set to true and the rest
/// to false.
#[inline]
pub fn indirect_buffer_transfer_dest() -> BufferUsage {
pub fn indirect_buffer_transfer_destination() -> BufferUsage {
BufferUsage {
indirect_buffer: true,
transfer_dest: true,
transfer_destination: true,
..BufferUsage::none()
}
}
@ -167,7 +167,7 @@ impl BitOr for BufferUsage {
fn bitor(self, rhs: Self) -> Self {
BufferUsage {
transfer_source: self.transfer_source || rhs.transfer_source,
transfer_dest: self.transfer_dest || rhs.transfer_dest,
transfer_destination: self.transfer_destination || rhs.transfer_destination,
uniform_texel_buffer: self.uniform_texel_buffer || rhs.uniform_texel_buffer,
storage_texel_buffer: self.storage_texel_buffer || rhs.storage_texel_buffer,
uniform_buffer: self.uniform_buffer || rhs.uniform_buffer,
@ -186,7 +186,7 @@ pub fn usage_to_bits(usage: BufferUsage) -> vk::BufferUsageFlagBits {
if usage.transfer_source {
result |= vk::BUFFER_USAGE_TRANSFER_SRC_BIT;
}
if usage.transfer_dest {
if usage.transfer_destination {
result |= vk::BUFFER_USAGE_TRANSFER_DST_BIT;
}
if usage.uniform_texel_buffer {

View File

@ -160,34 +160,34 @@ impl<P> AutoCommandBufferBuilder<P> {
/// This command will copy from the source to the destination. If their size is not equal, then
/// the amount of data copied is equal to the smallest of the two.
#[inline]
pub fn copy_buffer<S, D, T>(mut self, src: S, dest: D) -> Result<Self, CopyBufferError>
pub fn copy_buffer<S, D, T>(mut self, source: S, destination: D) -> Result<Self, CopyBufferError>
where S: TypedBufferAccess<Content = T> + Send + Sync + 'static,
D: TypedBufferAccess<Content = T> + Send + Sync + 'static,
T: ?Sized,
{
unsafe {
self.ensure_outside_render_pass()?;
let infos = check_copy_buffer(self.device(), &src, &dest)?;
self.inner.copy_buffer(src, dest, iter::once((0, 0, infos.copy_size)))?;
let infos = check_copy_buffer(self.device(), &source, &destination)?;
self.inner.copy_buffer(source, destination, iter::once((0, 0, infos.copy_size)))?;
Ok(self)
}
}
/// Adds a command that copies from a buffer to an image.
pub fn copy_buffer_to_image<S, D>(self, src: S, dest: D)
pub fn copy_buffer_to_image<S, D>(self, source: S, destination: D)
-> Result<Self, CopyBufferToImageError>
where S: BufferAccess + Send + Sync + 'static,
D: ImageAccess + Send + Sync + 'static
{
self.ensure_outside_render_pass()?;
let dims = dest.dimensions().width_height_depth();
self.copy_buffer_to_image_dimensions(src, dest, [0, 0, 0], dims, 0, 1, 0)
let dims = destination.dimensions().width_height_depth();
self.copy_buffer_to_image_dimensions(source, destination, [0, 0, 0], dims, 0, 1, 0)
}
/// Adds a command that copies from a buffer to an image.
pub fn copy_buffer_to_image_dimensions<S, D>(
mut self, src: S, dest: D, offset: [u32; 3], size: [u32; 3], first_layer: u32,
mut self, source: S, destination: D, offset: [u32; 3], size: [u32; 3], first_layer: u32,
num_layers: u32, mipmap: u32) -> Result<Self, CopyBufferToImageError>
where S: BufferAccess + Send + Sync + 'static,
D: ImageAccess + Send + Sync + 'static
@ -202,7 +202,7 @@ impl<P> AutoCommandBufferBuilder<P> {
buffer_offset: 0,
buffer_row_length: 0,
buffer_image_height: 0,
image_aspect: if dest.has_color() {
image_aspect: if destination.has_color() {
UnsafeCommandBufferBuilderImageAspect {
color: true,
depth: false,
@ -218,8 +218,8 @@ impl<P> AutoCommandBufferBuilder<P> {
image_extent: size,
};
let size = src.size();
self.inner.copy_buffer_to_image(src, dest, ImageLayout::TransferDstOptimal, // TODO: let choose layout
let size = source.size();
self.inner.copy_buffer_to_image(source, destination, ImageLayout::TransferDstOptimal, // TODO: let choose layout
iter::once(copy))?;
Ok(self)
}
@ -474,7 +474,7 @@ unsafe impl<P> DeviceOwned for AutoCommandBufferBuilder<P> {
}
// Shortcut function to set the push constants.
unsafe fn push_constants<P, Pl, Pc>(dest: &mut SyncCommandBufferBuilder<P>, pipeline: Pl,
unsafe fn push_constants<P, Pl, Pc>(destination: &mut SyncCommandBufferBuilder<P>, pipeline: Pl,
push_constants: Pc)
where Pl: PipelineLayoutAbstract + Send + Sync + Clone + 'static
{
@ -491,7 +491,7 @@ unsafe fn push_constants<P, Pl, Pc>(dest: &mut SyncCommandBufferBuilder<P>, pipe
.offset(range.offset as isize),
range.size as usize);
dest.push_constants::<_, [u8]>(pipeline.clone(),
destination.push_constants::<_, [u8]>(pipeline.clone(),
range.stages,
range.offset as u32,
range.size as u32,
@ -500,26 +500,26 @@ unsafe fn push_constants<P, Pl, Pc>(dest: &mut SyncCommandBufferBuilder<P>, pipe
}
// Shortcut function to change the state of the pipeline.
unsafe fn set_state<P>(dest: &mut SyncCommandBufferBuilder<P>, dynamic: DynamicState) {
unsafe fn set_state<P>(destination: &mut SyncCommandBufferBuilder<P>, dynamic: DynamicState) {
if let Some(line_width) = dynamic.line_width {
dest.set_line_width(line_width);
destination.set_line_width(line_width);
}
if let Some(ref viewports) = dynamic.viewports {
dest.set_viewport(0, viewports.iter().cloned().collect::<Vec<_>>().into_iter()); // TODO: don't collect
destination.set_viewport(0, viewports.iter().cloned().collect::<Vec<_>>().into_iter()); // TODO: don't collect
}
if let Some(ref scissors) = dynamic.scissors {
dest.set_scissor(0, scissors.iter().cloned().collect::<Vec<_>>().into_iter()); // TODO: don't collect
destination.set_scissor(0, scissors.iter().cloned().collect::<Vec<_>>().into_iter()); // TODO: don't collect
}
}
// Shortcut function to bind vertex buffers.
unsafe fn vertex_buffers<P>(dest: &mut SyncCommandBufferBuilder<P>,
unsafe fn vertex_buffers<P>(destination: &mut SyncCommandBufferBuilder<P>,
vertex_buffers: Vec<Box<BufferAccess + Send + Sync>>)
-> Result<(), SyncCommandBufferBuilderError>
{
let mut binder = dest.bind_vertex_buffers();
let mut binder = destination.bind_vertex_buffers();
for vb in vertex_buffers {
binder.add(vb);
}
@ -527,13 +527,13 @@ unsafe fn vertex_buffers<P>(dest: &mut SyncCommandBufferBuilder<P>,
Ok(())
}
unsafe fn descriptor_sets<P, Pl, S>(dest: &mut SyncCommandBufferBuilder<P>, gfx: bool,
unsafe fn descriptor_sets<P, Pl, S>(destination: &mut SyncCommandBufferBuilder<P>, gfx: bool,
pipeline: Pl, sets: S)
-> Result<(), SyncCommandBufferBuilderError>
where Pl: PipelineLayoutAbstract + Send + Sync + Clone + 'static,
S: DescriptorSetsCollection
{
let mut sets_binder = dest.bind_descriptor_sets();
let mut sets_binder = destination.bind_descriptor_sets();
for set in sets.into_vec() {
sets_binder.add(set);

View File

@ -31,7 +31,7 @@ use vk;
#[derive(Debug)]
pub struct SubmitCommandBufferBuilder<'a> {
wait_semaphores: SmallVec<[vk::Semaphore; 16]>,
dest_stages: SmallVec<[vk::PipelineStageFlags; 8]>,
destination_stages: SmallVec<[vk::PipelineStageFlags; 8]>,
signal_semaphores: SmallVec<[vk::Semaphore; 16]>,
command_buffers: SmallVec<[vk::CommandBuffer; 4]>,
fence: vk::Fence,
@ -44,7 +44,7 @@ impl<'a> SubmitCommandBufferBuilder<'a> {
pub fn new() -> SubmitCommandBufferBuilder<'a> {
SubmitCommandBufferBuilder {
wait_semaphores: SmallVec::new(),
dest_stages: SmallVec::new(),
destination_stages: SmallVec::new(),
signal_semaphores: SmallVec::new(),
command_buffers: SmallVec::new(),
fence: 0,
@ -144,7 +144,7 @@ impl<'a> SubmitCommandBufferBuilder<'a> {
debug_assert!(Into::<vk::PipelineStageFlagBits>::into(stages) != 0);
// TODO: debug assert that the device supports the stages
self.wait_semaphores.push(semaphore.internal_object());
self.dest_stages.push(stages.into());
self.destination_stages.push(stages.into());
}
/// Adds a command buffer that is executed as part of this command.
@ -205,14 +205,14 @@ impl<'a> SubmitCommandBufferBuilder<'a> {
let vk = queue.device().pointers();
let queue = queue.internal_object_guard();
debug_assert_eq!(self.wait_semaphores.len(), self.dest_stages.len());
debug_assert_eq!(self.wait_semaphores.len(), self.destination_stages.len());
let batch = vk::SubmitInfo {
sType: vk::STRUCTURE_TYPE_SUBMIT_INFO,
pNext: ptr::null(),
waitSemaphoreCount: self.wait_semaphores.len() as u32,
pWaitSemaphores: self.wait_semaphores.as_ptr(),
pWaitDstStageMask: self.dest_stages.as_ptr(),
pWaitDstStageMask: self.destination_stages.as_ptr(),
commandBufferCount: self.command_buffers.len() as u32,
pCommandBuffers: self.command_buffers.as_ptr(),
signalSemaphoreCount: self.signal_semaphores.len() as u32,
@ -235,7 +235,7 @@ impl<'a> SubmitCommandBufferBuilder<'a> {
"Can't merge two queue submits that both have a fence");
self.wait_semaphores.extend(other.wait_semaphores);
self.dest_stages.extend(other.dest_stages); // TODO: meh? will be solved if we submit multiple batches
self.destination_stages.extend(other.destination_stages); // TODO: meh? will be solved if we submit multiple batches
self.signal_semaphores.extend(other.signal_semaphores);
self.command_buffers.extend(other.command_buffers);

View File

@ -898,7 +898,7 @@ impl<P> SyncCommandBufferBuilder<P> {
/// usage of the command anyway.
#[inline]
pub unsafe fn copy_buffer_to_image<S, D, R>(&mut self, source: S, destination: D,
dest_layout: ImageLayout, regions: R)
destination_layout: ImageLayout, regions: R)
-> Result<(), SyncCommandBufferBuilderError>
where S: BufferAccess + Send + Sync + 'static,
D: ImageAccess + Send + Sync + 'static,
@ -907,7 +907,7 @@ impl<P> SyncCommandBufferBuilder<P> {
struct Cmd<S, D, R> {
source: Option<S>,
destination: Option<D>,
dest_layout: ImageLayout,
destination_layout: ImageLayout,
regions: Option<R>,
}
@ -919,7 +919,7 @@ impl<P> SyncCommandBufferBuilder<P> {
unsafe fn send(&mut self, out: &mut UnsafeCommandBufferBuilder<P>) {
out.copy_buffer_to_image(self.source.as_ref().unwrap(),
self.destination.as_ref().unwrap(),
self.dest_layout,
self.destination_layout,
self.regions.take().unwrap());
}
@ -960,7 +960,7 @@ impl<P> SyncCommandBufferBuilder<P> {
self.commands.lock().unwrap().commands.push(Box::new(Cmd {
source: Some(source),
destination: Some(destination),
dest_layout,
destination_layout: destination_layout,
regions: Some(regions),
}));
self.prev_cmd_resource(KeyTy::Buffer,
@ -987,8 +987,8 @@ impl<P> SyncCommandBufferBuilder<P> {
transfer_write: true,
..AccessFlagBits::none()
},
dest_layout,
dest_layout)?;
destination_layout,
destination_layout)?;
Ok(())
}

View File

@ -521,11 +521,11 @@ impl<P> UnsafeCommandBufferBuilder<P> {
let source = source.inner();
debug_assert!(source.offset < source.buffer.size());
debug_assert!(source.buffer.usage_transfer_src());
debug_assert!(source.buffer.usage_transfer_source());
let destination = destination.inner();
debug_assert!(destination.offset < destination.buffer.size());
debug_assert!(destination.buffer.usage_transfer_dest());
debug_assert!(destination.buffer.usage_transfer_destination());
let regions: SmallVec<[_; 8]> = regions
.map(|(sr, de, sz)| {
@ -556,20 +556,20 @@ impl<P> UnsafeCommandBufferBuilder<P> {
/// usage of the command anyway.
#[inline]
pub unsafe fn copy_buffer_to_image<S, D, R>(&mut self, source: &S, destination: &D,
dest_layout: ImageLayout, regions: R)
destination_layout: ImageLayout, regions: R)
where S: ?Sized + BufferAccess,
D: ?Sized + ImageAccess,
R: Iterator<Item = UnsafeCommandBufferBuilderBufferImageCopy>
{
let source = source.inner();
debug_assert!(source.offset < source.buffer.size());
debug_assert!(source.buffer.usage_transfer_src());
debug_assert!(source.buffer.usage_transfer_source());
debug_assert_eq!(destination.samples(), 1);
let destination = destination.inner();
debug_assert!(destination.image.usage_transfer_dest());
debug_assert!(dest_layout == ImageLayout::General ||
dest_layout == ImageLayout::TransferDstOptimal);
debug_assert!(destination.image.usage_transfer_destination());
debug_assert!(destination_layout == ImageLayout::General ||
destination_layout == ImageLayout::TransferDstOptimal);
let regions: SmallVec<[_; 8]> = regions
.map(|copy| {
@ -609,7 +609,7 @@ impl<P> UnsafeCommandBufferBuilder<P> {
vk.CmdCopyBufferToImage(cmd,
source.buffer.internal_object(),
destination.image.internal_object(),
dest_layout as u32,
destination_layout as u32,
regions.len() as u32,
regions.as_ptr());
}
@ -756,7 +756,7 @@ impl<P> UnsafeCommandBufferBuilder<P> {
buffer: buffer_inner,
offset,
} = buffer.inner();
debug_assert!(buffer_inner.usage_transfer_dest());
debug_assert!(buffer_inner.usage_transfer_destination());
debug_assert_eq!(offset % 4, 0);
(buffer_inner.internal_object(), offset)
};
@ -991,7 +991,7 @@ impl<P> UnsafeCommandBufferBuilder<P> {
buffer: buffer_inner,
offset,
} = buffer.inner();
debug_assert!(buffer_inner.usage_transfer_dest());
debug_assert!(buffer_inner.usage_transfer_destination());
debug_assert_eq!(offset % 4, 0);
(buffer_inner.internal_object(), offset)
};
@ -1169,7 +1169,7 @@ impl UnsafeCommandBufferBuilderPipelineBarrier {
}
/// Adds an execution dependency. This means that all the stages in `source` of the previous
/// commands must finish before any of the stages in `dest` of the following commands can start.
/// commands must finish before any of the stages in `destination` of the following commands can start.
///
/// # Safety
///
@ -1179,21 +1179,21 @@ impl UnsafeCommandBufferBuilderPipelineBarrier {
///
#[inline]
pub unsafe fn add_execution_dependency(&mut self, source: PipelineStages,
dest: PipelineStages, by_region: bool) {
destination: PipelineStages, by_region: bool) {
if !by_region {
self.dependency_flags = 0;
}
debug_assert_ne!(source, PipelineStages::none());
debug_assert_ne!(dest, PipelineStages::none());
debug_assert_ne!(destination, PipelineStages::none());
self.src_stage_mask |= Into::<vk::PipelineStageFlags>::into(source);
self.dst_stage_mask |= Into::<vk::PipelineStageFlags>::into(dest);
self.dst_stage_mask |= Into::<vk::PipelineStageFlags>::into(destination);
}
/// Adds a memory barrier. This means that all the memory writes by the given source stages
/// for the given source accesses must be visible by the given dest stages for the given dest
/// accesses.
/// for the given source accesses must be visible by the given destination stages for the given
/// destination accesses.
///
/// Also adds an execution dependency similar to `add_execution_dependency`.
///
@ -1202,24 +1202,25 @@ impl UnsafeCommandBufferBuilderPipelineBarrier {
/// - Same as `add_execution_dependency`.
///
pub unsafe fn add_memory_barrier(&mut self, source_stage: PipelineStages,
source_access: AccessFlagBits, dest_stage: PipelineStages,
dest_access: AccessFlagBits, by_region: bool) {
source_access: AccessFlagBits,
destination_stage: PipelineStages,
destination_access: AccessFlagBits, by_region: bool) {
debug_assert!(source_access.is_compatible_with(&source_stage));
debug_assert!(dest_access.is_compatible_with(&dest_stage));
debug_assert!(destination_access.is_compatible_with(&destination_stage));
self.add_execution_dependency(source_stage, dest_stage, by_region);
self.add_execution_dependency(source_stage, destination_stage, by_region);
self.memory_barriers.push(vk::MemoryBarrier {
sType: vk::STRUCTURE_TYPE_MEMORY_BARRIER,
pNext: ptr::null(),
srcAccessMask: source_access.into(),
dstAccessMask: dest_access.into(),
dstAccessMask: destination_access.into(),
});
}
/// Adds a buffer memory barrier. This means that all the memory writes to the given buffer by
/// the given source stages for the given source accesses must be visible by the given dest
/// stages for the given dest accesses.
/// stages for the given destination accesses.
///
/// Also adds an execution dependency similar to `add_execution_dependency`.
///
@ -1234,16 +1235,16 @@ impl UnsafeCommandBufferBuilderPipelineBarrier {
///
pub unsafe fn add_buffer_memory_barrier<B>(&mut self, buffer: &B, source_stage: PipelineStages,
source_access: AccessFlagBits,
dest_stage: PipelineStages,
dest_access: AccessFlagBits, by_region: bool,
destination_stage: PipelineStages,
destination_access: AccessFlagBits, by_region: bool,
queue_transfer: Option<(u32, u32)>, offset: usize,
size: usize)
where B: ?Sized + BufferAccess
{
debug_assert!(source_access.is_compatible_with(&source_stage));
debug_assert!(dest_access.is_compatible_with(&dest_stage));
debug_assert!(destination_access.is_compatible_with(&destination_stage));
self.add_execution_dependency(source_stage, dest_stage, by_region);
self.add_execution_dependency(source_stage, destination_stage, by_region);
debug_assert!(size <= buffer.size());
let BufferInner {
@ -1262,7 +1263,7 @@ impl UnsafeCommandBufferBuilderPipelineBarrier {
sType: vk::STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
pNext: ptr::null(),
srcAccessMask: source_access.into(),
dstAccessMask: dest_access.into(),
dstAccessMask: destination_access.into(),
srcQueueFamilyIndex: src_queue,
dstQueueFamilyIndex: dest_queue,
buffer: buffer.internal_object(),
@ -1291,16 +1292,16 @@ impl UnsafeCommandBufferBuilderPipelineBarrier {
pub unsafe fn add_image_memory_barrier<I>(&mut self, image: &I, mipmaps: Range<u32>,
layers: Range<u32>, source_stage: PipelineStages,
source_access: AccessFlagBits,
dest_stage: PipelineStages,
dest_access: AccessFlagBits, by_region: bool,
destination_stage: PipelineStages,
destination_access: AccessFlagBits, by_region: bool,
queue_transfer: Option<(u32, u32)>,
current_layout: ImageLayout, new_layout: ImageLayout)
where I: ?Sized + ImageAccess
{
debug_assert!(source_access.is_compatible_with(&source_stage));
debug_assert!(dest_access.is_compatible_with(&dest_stage));
debug_assert!(destination_access.is_compatible_with(&destination_stage));
self.add_execution_dependency(source_stage, dest_stage, by_region);
self.add_execution_dependency(source_stage, destination_stage, by_region);
debug_assert_ne!(new_layout, ImageLayout::Undefined);
debug_assert_ne!(new_layout, ImageLayout::Preinitialized);
@ -1334,7 +1335,7 @@ impl UnsafeCommandBufferBuilderPipelineBarrier {
sType: vk::STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
pNext: ptr::null(),
srcAccessMask: source_access.into(),
dstAccessMask: dest_access.into(),
dstAccessMask: destination_access.into(),
oldLayout: current_layout as u32,
newLayout: new_layout as u32,
srcQueueFamilyIndex: src_queue,

View File

@ -33,11 +33,11 @@ pub fn check_copy_buffer<S, D, T>(device: &Device, source: &S, destination: &D)
assert_eq!(destination.inner().buffer.device().internal_object(),
device.internal_object());
if !source.inner().buffer.usage_transfer_src() {
if !source.inner().buffer.usage_transfer_source() {
return Err(CheckCopyBufferError::SourceMissingTransferUsage);
}
if !destination.inner().buffer.usage_transfer_dest() {
if !destination.inner().buffer.usage_transfer_destination() {
return Err(CheckCopyBufferError::DestinationMissingTransferUsage);
}

View File

@ -27,7 +27,7 @@ pub fn check_fill_buffer<B>(device: &Device, buffer: &B) -> Result<(), CheckFill
assert_eq!(buffer.inner().buffer.device().internal_object(),
device.internal_object());
if !buffer.inner().buffer.usage_transfer_dest() {
if !buffer.inner().buffer.usage_transfer_destination() {
return Err(CheckFillBufferError::BufferMissingUsage);
}

View File

@ -31,7 +31,7 @@ pub fn check_update_buffer<B, D>(device: &Device, buffer: &B, data: &D)
assert_eq!(buffer.inner().buffer.device().internal_object(),
device.internal_object());
if !buffer.inner().buffer.usage_transfer_dest() {
if !buffer.inner().buffer.usage_transfer_destination() {
return Err(CheckUpdateBufferError::BufferMissingUsage);
}
@ -108,7 +108,7 @@ mod tests {
#[test]
fn data_too_large() {
let (device, queue) = gfx_dev_and_queue!();
let buffer = CpuAccessibleBuffer::from_iter(device.clone(), BufferUsage::transfer_dest(),
let buffer = CpuAccessibleBuffer::from_iter(device.clone(), BufferUsage::transfer_destination(),
iter::once(queue.family()),
0 .. 65536).unwrap();
let data = (0 .. 65536).collect::<Vec<u32>>();
@ -122,7 +122,7 @@ mod tests {
#[test]
fn data_just_large_enough() {
let (device, queue) = gfx_dev_and_queue!();
let buffer = CpuAccessibleBuffer::from_iter(device.clone(), BufferUsage::transfer_dest(),
let buffer = CpuAccessibleBuffer::from_iter(device.clone(), BufferUsage::transfer_destination(),
iter::once(queue.family()),
0 .. 100000).unwrap();
let data = (0 .. 65536).collect::<Vec<u8>>();
@ -136,7 +136,7 @@ mod tests {
#[test]
fn wrong_alignment() {
let (device, queue) = gfx_dev_and_queue!();
let buffer = CpuAccessibleBuffer::from_iter(device.clone(), BufferUsage::transfer_dest(),
let buffer = CpuAccessibleBuffer::from_iter(device.clone(), BufferUsage::transfer_destination(),
iter::once(queue.family()),
0 .. 100).unwrap();
let data = (0 .. 30).collect::<Vec<u8>>();

View File

@ -518,18 +518,18 @@ pub struct LayoutPassDependencyDescription {
/// The pipeline stages that must be finished on the previous subpass before the destination
/// subpass can start.
pub src_stages: PipelineStages,
pub source_stages: PipelineStages,
/// The pipeline stages of the destination subpass that must wait for the source to be finished.
/// Stages that are earlier of the stages specified here can start before the source is
/// finished.
pub dst_stages: PipelineStages,
pub destination_stages: PipelineStages,
/// The way the source subpass accesses the attachments on which we depend.
pub src_access: AccessFlagBits,
pub source_access: AccessFlagBits,
/// The way the destination subpass accesses the attachments on which we depend.
pub dst_access: AccessFlagBits,
pub destination_access: AccessFlagBits,
/// If false, then the whole subpass must be finished for the next one to start. If true, then
/// the implementation can start the new subpass for some given pixels as long as the previous

View File

@ -236,10 +236,10 @@ macro_rules! ordered_passes_renderpass {
Some(LayoutPassDependencyDescription {
source_subpass: id,
destination_subpass: id + 1,
src_stages: PipelineStages { all_graphics: true, .. PipelineStages::none() }, // TODO: correct values
dst_stages: PipelineStages { all_graphics: true, .. PipelineStages::none() }, // TODO: correct values
src_access: AccessFlagBits::all(), // TODO: correct values
dst_access: AccessFlagBits::all(), // TODO: correct values
source_stages: PipelineStages { all_graphics: true, .. PipelineStages::none() }, // TODO: correct values
destination_stages: PipelineStages { all_graphics: true, .. PipelineStages::none() }, // TODO: correct values
source_access: AccessFlagBits::all(), // TODO: correct values
destination_access: AccessFlagBits::all(), // TODO: correct values
by_region: true, // TODO: correct values
})
}

View File

@ -305,10 +305,10 @@ impl<D> RenderPass<D>
vk::SubpassDependency {
srcSubpass: dependency.source_subpass as u32,
dstSubpass: dependency.destination_subpass as u32,
srcStageMask: dependency.src_stages.into(),
dstStageMask: dependency.dst_stages.into(),
srcAccessMask: dependency.src_access.into(),
dstAccessMask: dependency.dst_access.into(),
srcStageMask: dependency.source_stages.into(),
dstStageMask: dependency.destination_stages.into(),
srcAccessMask: dependency.source_access.into(),
dstAccessMask: dependency.destination_access.into(),
dependencyFlags: if dependency.by_region {
vk::DEPENDENCY_BY_REGION_BIT
} else {

View File

@ -74,7 +74,7 @@ impl<F> ImmutableImage<F> {
{
let usage = ImageUsage {
transfer_source: true, // for blits
transfer_dest: true,
transfer_destination: true,
sampled: true,
..ImageUsage::none()
};

View File

@ -84,7 +84,7 @@ impl<F> StorageImage<F> {
let usage = ImageUsage {
transfer_source: true,
transfer_dest: true,
transfer_destination: true,
sampled: true,
storage: true,
color_attachment: !is_depth,

View File

@ -157,7 +157,7 @@ impl UnsafeImage {
{
return Err(ImageCreationError::UnsupportedUsage);
}
if usage.transfer_dest &&
if usage.transfer_destination &&
(features & vk::FORMAT_FEATURE_TRANSFER_DST_BIT_KHR == 0)
{
return Err(ImageCreationError::UnsupportedUsage);
@ -724,12 +724,12 @@ impl UnsafeImage {
}
#[inline]
pub fn usage_transfer_src(&self) -> bool {
pub fn usage_transfer_source(&self) -> bool {
(self.usage & vk::IMAGE_USAGE_TRANSFER_SRC_BIT) != 0
}
#[inline]
pub fn usage_transfer_dest(&self) -> bool {
pub fn usage_transfer_destination(&self) -> bool {
(self.usage & vk::IMAGE_USAGE_TRANSFER_DST_BIT) != 0
}
@ -1015,12 +1015,12 @@ impl UnsafeImageView {
}
#[inline]
pub fn usage_transfer_src(&self) -> bool {
pub fn usage_transfer_source(&self) -> bool {
(self.usage & vk::IMAGE_USAGE_TRANSFER_SRC_BIT) != 0
}
#[inline]
pub fn usage_transfer_dest(&self) -> bool {
pub fn usage_transfer_destination(&self) -> bool {
(self.usage & vk::IMAGE_USAGE_TRANSFER_DST_BIT) != 0
}

View File

@ -23,7 +23,7 @@ pub struct ImageUsage {
pub transfer_source: bool,
/// Can be used a destination for transfers. Includes blits.
pub transfer_dest: bool,
pub transfer_destination: bool,
/// Can be sampled from a shader.
pub sampled: bool,
@ -56,7 +56,7 @@ impl ImageUsage {
pub fn all() -> ImageUsage {
ImageUsage {
transfer_source: true,
transfer_dest: true,
transfer_destination: true,
sampled: true,
storage: true,
color_attachment: true,
@ -74,7 +74,7 @@ impl ImageUsage {
/// use vulkano::image::ImageUsage as ImageUsage;
///
/// let _usage = ImageUsage {
/// transfer_dest: true,
/// transfer_destination: true,
/// sampled: true,
/// .. ImageUsage::none()
/// };
@ -83,7 +83,7 @@ impl ImageUsage {
pub fn none() -> ImageUsage {
ImageUsage {
transfer_source: false,
transfer_dest: false,
transfer_destination: false,
sampled: false,
storage: false,
color_attachment: false,
@ -101,7 +101,7 @@ impl ImageUsage {
if self.transfer_source {
result |= vk::IMAGE_USAGE_TRANSFER_SRC_BIT;
}
if self.transfer_dest {
if self.transfer_destination {
result |= vk::IMAGE_USAGE_TRANSFER_DST_BIT;
}
if self.sampled {
@ -131,7 +131,7 @@ impl ImageUsage {
pub fn from_bits(val: u32) -> ImageUsage {
ImageUsage {
transfer_source: (val & vk::IMAGE_USAGE_TRANSFER_SRC_BIT) != 0,
transfer_dest: (val & vk::IMAGE_USAGE_TRANSFER_DST_BIT) != 0,
transfer_destination: (val & vk::IMAGE_USAGE_TRANSFER_DST_BIT) != 0,
sampled: (val & vk::IMAGE_USAGE_SAMPLED_BIT) != 0,
storage: (val & vk::IMAGE_USAGE_STORAGE_BIT) != 0,
color_attachment: (val & vk::IMAGE_USAGE_COLOR_ATTACHMENT_BIT) != 0,
@ -149,7 +149,7 @@ impl BitOr for ImageUsage {
fn bitor(self, rhs: Self) -> Self {
ImageUsage {
transfer_source: self.transfer_source || rhs.transfer_source,
transfer_dest: self.transfer_dest || rhs.transfer_dest,
transfer_destination: self.transfer_destination || rhs.transfer_destination,
sampled: self.sampled || rhs.sampled,
storage: self.storage || rhs.storage,
color_attachment: self.color_attachment || rhs.color_attachment,

View File

@ -80,12 +80,12 @@ pub struct AttachmentBlend {
pub enabled: bool,
pub color_op: BlendOp,
pub color_src: BlendFactor,
pub color_dst: BlendFactor,
pub color_source: BlendFactor,
pub color_destination: BlendFactor,
pub alpha_op: BlendOp,
pub alpha_src: BlendFactor,
pub alpha_dst: BlendFactor,
pub alpha_source: BlendFactor,
pub alpha_destination: BlendFactor,
pub mask_red: bool,
pub mask_green: bool,
@ -100,11 +100,11 @@ impl AttachmentBlend {
AttachmentBlend {
enabled: false,
color_op: BlendOp::Add,
color_src: BlendFactor::Zero,
color_dst: BlendFactor::One,
color_source: BlendFactor::Zero,
color_destination: BlendFactor::One,
alpha_op: BlendOp::Add,
alpha_src: BlendFactor::Zero,
alpha_dst: BlendFactor::One,
alpha_source: BlendFactor::Zero,
alpha_destination: BlendFactor::One,
mask_red: true,
mask_green: true,
mask_blue: true,
@ -119,11 +119,11 @@ impl AttachmentBlend {
AttachmentBlend {
enabled: true,
color_op: BlendOp::Add,
color_src: BlendFactor::Zero,
color_dst: BlendFactor::DstColor,
color_source: BlendFactor::Zero,
color_destination: BlendFactor::DstColor,
alpha_op: BlendOp::Add,
alpha_src: BlendFactor::Zero,
alpha_dst: BlendFactor::DstColor,
alpha_source: BlendFactor::Zero,
alpha_destination: BlendFactor::DstColor,
mask_red: true,
mask_green: true,
mask_blue: true,
@ -138,11 +138,11 @@ impl AttachmentBlend {
AttachmentBlend {
enabled: true,
color_op: BlendOp::Add,
color_src: BlendFactor::SrcAlpha,
color_dst: BlendFactor::OneMinusSrcAlpha,
color_source: BlendFactor::SrcAlpha,
color_destination: BlendFactor::OneMinusSrcAlpha,
alpha_op: BlendOp::Add,
alpha_src: BlendFactor::SrcAlpha,
alpha_dst: BlendFactor::OneMinusSrcAlpha,
alpha_source: BlendFactor::SrcAlpha,
alpha_destination: BlendFactor::OneMinusSrcAlpha,
mask_red: true,
mask_green: true,
mask_blue: true,
@ -157,11 +157,11 @@ impl Into<vk::PipelineColorBlendAttachmentState> for AttachmentBlend {
fn into(self) -> vk::PipelineColorBlendAttachmentState {
vk::PipelineColorBlendAttachmentState {
blendEnable: if self.enabled { vk::TRUE } else { vk::FALSE },
srcColorBlendFactor: self.color_src as u32,
dstColorBlendFactor: self.color_dst as u32,
srcColorBlendFactor: self.color_source as u32,
dstColorBlendFactor: self.color_destination as u32,
colorBlendOp: self.color_op as u32,
srcAlphaBlendFactor: self.alpha_src as u32,
dstAlphaBlendFactor: self.alpha_dst as u32,
srcAlphaBlendFactor: self.alpha_source as u32,
dstAlphaBlendFactor: self.alpha_destination as u32,
alphaBlendOp: self.alpha_op as u32,
colorWriteMask: {
let mut mask = 0;
@ -195,33 +195,33 @@ impl Into<vk::PipelineColorBlendAttachmentState> for AttachmentBlend {
pub enum LogicOp {
/// Returns `0`.
Clear = vk::LOGIC_OP_CLEAR,
/// Returns `src & dest`.
/// Returns `source & destination`.
And = vk::LOGIC_OP_AND,
/// Returns `src & !dest`.
/// Returns `source & !destination`.
AndReverse = vk::LOGIC_OP_AND_REVERSE,
/// Returns `src`.
/// Returns `source`.
Copy = vk::LOGIC_OP_COPY,
/// Returns `!src & dest`.
/// Returns `!source & destination`.
AndInverted = vk::LOGIC_OP_AND_INVERTED,
/// Returns `dest`.
/// Returns `destination`.
Noop = vk::LOGIC_OP_NO_OP,
/// Returns `src ^ dest`.
/// Returns `source ^ destination`.
Xor = vk::LOGIC_OP_XOR,
/// Returns `src | dest`.
/// Returns `source | destination`.
Or = vk::LOGIC_OP_OR,
/// Returns `!(src | dest)`.
/// Returns `!(source | destination)`.
Nor = vk::LOGIC_OP_NOR,
/// Returns `!(src ^ dest)`.
/// Returns `!(source ^ destination)`.
Equivalent = vk::LOGIC_OP_EQUIVALENT,
/// Returns `!dest`.
/// Returns `!destination`.
Invert = vk::LOGIC_OP_INVERT,
/// Returns `src | !dest.
/// Returns `source | !destination.
OrReverse = vk::LOGIC_OP_OR_REVERSE,
/// Returns `!src`.
/// Returns `!source`.
CopyInverted = vk::LOGIC_OP_COPY_INVERTED,
/// Returns `!src | dest`.
/// Returns `!source | destination`.
OrInverted = vk::LOGIC_OP_OR_INVERTED,
/// Returns `!(src & dest)`.
/// Returns `!(source & destination)`.
Nand = vk::LOGIC_OP_NAND,
/// Returns `!0` (all bits set to 1).
Set = vk::LOGIC_OP_SET,