Migrate to using Ash as a base (#1593)

* Migrate to using Ash as a base

* Small change to test, to make it applicable to Vulkan 1.1
This commit is contained in:
Rua 2021-05-30 22:42:51 +02:00 committed by GitHub
parent 728fab92d1
commit f4b189792a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
101 changed files with 3864 additions and 3576 deletions

View File

@ -9,9 +9,9 @@
- **Breaking** Vulkano-shaders now checks if the device supports the shader's SPIR-V version, when loading the shader.
- **Breaking** (but unlikely) Vulkano-shaders now compiles to SPIR-V 1.0 by default. If your shader needs features only available in a higher version, you can specify the target version on the `shader!` macro with the new `vulkan_version: "major.minor"` and `spirv_version: "major.minor"` arguments.
- **Breaking** change to `ImageFormatProperties::sample_counts` field.
- `sample_counts` field is originaly represented as u32 type, which is now represented by `SampleCounts` struct-type which is a boolean collection of supported `sample_counts`.
- Added conversion function between SampleCountFlagBits (u32-type) and `SampleCounts` type.
- **Breaking** Changes to how image sample counts are represented.
- Instead of an integer, functions with a parameter for number of image samples now take a value of `SampleCount`, an enum with variants named `SampleN`, where `N` is a power-of-two integer. It can be converted to a Vulkan `SampleCountFlags`, and from an integer with `try_from`.
- `sample_counts` field is originaly represented as u32 type, which is now represented by `SampleCounts` struct-type which is a boolean collection of supported `sample_counts`. It can be converted to and from a Vulkan `SampleCountFlags`.
- **Breaking** Changes to shader interfaces and pipeline layouts.
- The module `descriptor::pipeline_layout` has been renamed to `pipeline::layout`.
- The trait `ShaderInterfaceDef` has been replaced by a simple struct `ShaderInterface`, and its `elements` method returns a slice instead of an iterator. This means you no longer need to define a new type for a shader interface. The accompanying type `ShaderInterfaceDefEntry` has been renamed to `ShaderInterfaceEntry` to match. The `ShaderInterfaceDefMatch` trait and `EmptyShaderInterfaceDef` struct have been removed.
@ -21,6 +21,10 @@
- `ComputeEntryPoint` and `GraphicsEntryPoint` now take a value specifying the push constants descriptor, instead of having a type parameter. The corresponding associated type on `EntryPointAbstract` has been removed.
- The `GraphicsEntryPointAbstract` trait has been removed. `GraphicsPipelineBuilder` now takes a `GraphicsEntryPoint` object directly, and has lifetime parameters for the 5 shader types instead. `EntryPointDummy` is no longer needed and has been removed.
- **Breaking** The constructors of `Instance` now take an additional argument to specify the maximum API version.
- **Breaking** Vulkano is now built on top of [Ash](https://github.com/MaikKlein/ash) instead of vk-sys.
- The `EntryPoints`, `InstancePointers` and `DevicePointers` types from vk-sys have been replaced with a new module `fns` containing `EntryFunctions`, `InstanceFunctions` and `DeviceFunctions`. Rather than containing the functions directly, there is a member for each Vulkan version and extension, which is loaded from Ash.
- The functions to retrieve the function pointers have been renamed to `fns`.
- The `TYPE` associated constant has been removed from the `VulkanObject` trait. This is now provided by the Ash `Handle` trait, which the object returned by `internal_object` must implement.
- Added `DeviceExtensions::khr_spirv_1_4`, which allows SPIR-V 1.4 shaders in Vulkan 1.1.
- Added `FunctionPointers::api_version` to query the highest supported instance version.
- Added `Instance::api_version` and `Device::api_version` to return the actual supported Vulkan version. These may differ between instance and device, and be lower than what `FunctionPointers::api_version` and `PhysicalDevice::api_version` return.

View File

@ -39,7 +39,7 @@ fn main() {
// As with other examples, the first step is to create an instance.
let instance = Instance::new(
None,
Version::major_minor(1, 1),
Version::V1_1,
&InstanceExtensions::none(),
None,
)

View File

@ -51,7 +51,7 @@ vulkano::impl_vertex!(Vertex, position);
fn main() {
let required_extensions = vulkano_win::required_extensions();
let instance =
Instance::new(None, Version::major_minor(1, 1), &required_extensions, None).unwrap();
Instance::new(None, Version::V1_1, &required_extensions, None).unwrap();
let physical = PhysicalDevice::enumerate(&instance).next().unwrap();
println!(

View File

@ -56,7 +56,7 @@ fn main() {
let layers = vec!["VK_LAYER_KHRONOS_validation"];
// Important: pass the extension(s) and layer(s) when creating the vulkano instance
let instance = Instance::new(None, Version::major_minor(1, 1), &extensions, layers)
let instance = Instance::new(None, Version::V1_1, &extensions, layers)
.expect("failed to create Vulkan instance");
///////////////////////////////////////////////////////////////////////////////////////////////////////////////

View File

@ -52,7 +52,7 @@ fn main() {
let required_extensions = vulkano_win::required_extensions();
let instance =
Instance::new(None, Version::major_minor(1, 1), &required_extensions, None).unwrap();
Instance::new(None, Version::V1_1, &required_extensions, None).unwrap();
let physical = PhysicalDevice::enumerate(&instance).next().unwrap();
let event_loop = EventLoop::new();

View File

@ -30,13 +30,7 @@ use vulkano::sync::GpuFuture;
use vulkano::Version;
fn main() {
let instance = Instance::new(
None,
Version::major_minor(1, 1),
&InstanceExtensions::none(),
None,
)
.unwrap();
let instance = Instance::new(None, Version::V1_1, &InstanceExtensions::none(), None).unwrap();
let physical = PhysicalDevice::enumerate(&instance).next().unwrap();
let queue_family = physical

View File

@ -34,7 +34,7 @@ use vulkano::Version;
fn main() {
let instance = Instance::new(
None,
Version::major_minor(1, 1),
Version::V1_1,
&InstanceExtensions {
// This extension is required to obtain physical device metadata
// about the device workgroup size limits

View File

@ -42,7 +42,7 @@ fn main() {
let required_extensions = vulkano_win::required_extensions();
let instance =
Instance::new(None, Version::major_minor(1, 1), &required_extensions, None).unwrap();
Instance::new(None, Version::V1_1, &required_extensions, None).unwrap();
let physical = PhysicalDevice::enumerate(&instance).next().unwrap();
println!(
"Using device: {} (type: {:?})",

View File

@ -27,7 +27,7 @@ fn main() {
let instance = Instance::new(
None,
Version::major_minor(1, 1),
Version::V1_1,
&InstanceExtensions::none(),
None,
)

View File

@ -66,7 +66,7 @@ impl_vertex!(Vertex, position);
fn main() {
let required_extensions = vulkano_win::required_extensions();
let instance =
Instance::new(None, Version::major_minor(1, 1), &required_extensions, None).unwrap();
Instance::new(None, Version::V1_1, &required_extensions, None).unwrap();
let physical = PhysicalDevice::enumerate(&instance).next().unwrap();
println!(
"Using device: {} (type: {:?})",

View File

@ -63,8 +63,7 @@ impl_vertex!(InstanceData, position_offset, scale);
fn main() {
let required_extensions = vulkano_win::required_extensions();
let instance =
Instance::new(None, Version::major_minor(1, 1), &required_extensions, None).unwrap();
let instance = Instance::new(None, Version::V1_1, &required_extensions, None).unwrap();
let physical = PhysicalDevice::enumerate(&instance).next().unwrap();
println!(

View File

@ -77,7 +77,9 @@ use vulkano::command_buffer::{
use vulkano::device::{Device, DeviceExtensions};
use vulkano::format::ClearValue;
use vulkano::format::Format;
use vulkano::image::{view::ImageView, AttachmentImage, ImageDimensions, StorageImage};
use vulkano::image::{
view::ImageView, AttachmentImage, ImageDimensions, SampleCount, StorageImage,
};
use vulkano::instance::{Instance, PhysicalDevice};
use vulkano::pipeline::viewport::Viewport;
use vulkano::pipeline::GraphicsPipeline;
@ -89,7 +91,7 @@ fn main() {
// The usual Vulkan initialization.
let required_extensions = vulkano_win::required_extensions();
let instance =
Instance::new(None, Version::major_minor(1, 1), &required_extensions, None).unwrap();
Instance::new(None, Version::V1_1, &required_extensions, None).unwrap();
let physical = PhysicalDevice::enumerate(&instance).next().unwrap();
let queue_family = physical
.queue_families()
@ -112,7 +114,7 @@ fn main() {
AttachmentImage::transient_multisampled(
device.clone(),
[1024, 1024],
4,
SampleCount::Sample4,
Format::R8G8B8A8Unorm,
)
.unwrap(),

View File

@ -54,7 +54,7 @@ struct WindowSurface {
fn main() {
let required_extensions = vulkano_win::required_extensions();
let instance =
Instance::new(None, Version::major_minor(1, 1), &required_extensions, None).unwrap();
Instance::new(None, Version::V1_1, &required_extensions, None).unwrap();
let event_loop = EventLoop::new();
// A hashmap that contains all of our created windows and their resources

View File

@ -37,7 +37,7 @@ use winit::window::{Window, WindowBuilder};
fn main() {
let required_extensions = vulkano_win::required_extensions();
let instance =
Instance::new(None, Version::major_minor(1, 1), &required_extensions, None).unwrap();
Instance::new(None, Version::V1_1, &required_extensions, None).unwrap();
let physical = PhysicalDevice::enumerate(&instance).next().unwrap();
println!(

View File

@ -40,13 +40,7 @@ use vulkano::Version;
fn main() {
// As with other examples, the first step is to create an instance.
let instance = Instance::new(
None,
Version::major_minor(1, 1),
&InstanceExtensions::none(),
None,
)
.unwrap();
let instance = Instance::new(None, Version::V1_1, &InstanceExtensions::none(), None).unwrap();
// Choose which physical device to use.
let physical = PhysicalDevice::enumerate(&instance).next().unwrap();

View File

@ -21,13 +21,7 @@ use vulkano::sync::GpuFuture;
use vulkano::Version;
fn main() {
let instance = Instance::new(
None,
Version::major_minor(1, 1),
&InstanceExtensions::none(),
None,
)
.unwrap();
let instance = Instance::new(None, Version::V1_1, &InstanceExtensions::none(), None).unwrap();
let physical = PhysicalDevice::enumerate(&instance).next().unwrap();
let queue_family = physical
.queue_families()

View File

@ -65,8 +65,7 @@ vulkano::impl_vertex!(Vertex, position, color);
fn main() {
let required_extensions = vulkano_win::required_extensions();
let instance =
Instance::new(None, Version::major_minor(1, 1), &required_extensions, None).unwrap();
let instance = Instance::new(None, Version::V1_1, &required_extensions, None).unwrap();
let physical = vk::instance::PhysicalDevice::enumerate(&instance)
.next()
.unwrap();

View File

@ -25,7 +25,7 @@ use vulkano::Version;
fn main() {
let instance = Instance::new(
None,
Version::major_minor(1, 1),
Version::V1_1,
&InstanceExtensions::none(),
None,
)

View File

@ -21,13 +21,7 @@ use vulkano::sync::GpuFuture;
use vulkano::Version;
fn main() {
let instance = Instance::new(
None,
Version::major_minor(1, 1),
&InstanceExtensions::none(),
None,
)
.unwrap();
let instance = Instance::new(None, Version::V1_1, &InstanceExtensions::none(), None).unwrap();
let physical = PhysicalDevice::enumerate(&instance).next().unwrap();
let queue_family = physical
.queue_families()

View File

@ -44,8 +44,7 @@ fn main() {
// `triangle` example if you haven't done so yet.
let required_extensions = vulkano_win::required_extensions();
let instance =
Instance::new(None, Version::major_minor(1, 1), &required_extensions, None).unwrap();
let instance = Instance::new(None, Version::V1_1, &required_extensions, None).unwrap();
let physical = PhysicalDevice::enumerate(&instance).next().unwrap();
println!(
"Using device: {} (type: {:?})",

View File

@ -137,8 +137,7 @@ mod fs {
fn main() {
let required_extensions = vulkano_win::required_extensions();
let instance =
Instance::new(None, Version::major_minor(1, 1), &required_extensions, None).unwrap();
let instance = Instance::new(None, Version::V1_1, &required_extensions, None).unwrap();
let physical = PhysicalDevice::enumerate(&instance).next().unwrap();
println!(

View File

@ -49,8 +49,7 @@ fn main() {
let required_extensions = vulkano_win::required_extensions();
// Now creating the instance.
let instance =
Instance::new(None, Version::major_minor(1, 1), &required_extensions, None).unwrap();
let instance = Instance::new(None, Version::V1_1, &required_extensions, None).unwrap();
// We then choose which physical device to use.
//

View File

@ -225,14 +225,14 @@ where
(1, 0) => {}
(1, 1) | (1, 2) | (1, 3) => {
cap_checks.push(quote! {
if device.api_version() < Version::major_minor(1, 1) {
if device.api_version() < Version::V1_1 {
panic!("Device API version 1.1 required");
}
});
}
(1, 4) => {
cap_checks.push(quote! {
if device.api_version() < Version::major_minor(1, 2)
if device.api_version() < Version::V1_2
&& !device.loaded_extensions().khr_spirv_1_4 {
panic!("Device API version 1.2 or extension VK_KHR_spirv_1_4 required");
}
@ -240,7 +240,7 @@ where
}
(1, 5) => {
cap_checks.push(quote! {
if device.api_version() < Version::major_minor(1, 2) {
if device.api_version() < Version::V1_2 {
panic!("Device API version 1.2 required");
}
});

View File

@ -14,6 +14,7 @@ readme = "../README.md"
build = "build.rs"
[dependencies]
ash = "0.32"
crossbeam-queue = "0.3"
fnv = "1.0"
half = "1.7"
@ -21,4 +22,3 @@ lazy_static = "1.4"
parking_lot = { version = "0.11.1", features = ["send_guard"] }
shared_library = "0.1"
smallvec = "1.6"
vk-sys = { version = "0.6.1", path = "../vk-sys" }

View File

@ -24,7 +24,6 @@
//! sparse binding.
//! - Type safety.
use crate::buffer::BufferUsage;
use crate::check_errors;
use crate::device::Device;
use crate::device::DeviceOwned;
@ -32,23 +31,23 @@ use crate::memory::DeviceMemory;
use crate::memory::DeviceMemoryAllocError;
use crate::memory::MemoryRequirements;
use crate::sync::Sharing;
use crate::vk;
use crate::Error;
use crate::OomError;
use crate::VulkanObject;
use crate::{buffer::BufferUsage, Version};
use ash::vk::Handle;
use smallvec::SmallVec;
use std::error;
use std::fmt;
use std::hash::Hash;
use std::hash::Hasher;
use std::mem;
use std::mem::MaybeUninit;
use std::ptr;
use std::sync::Arc;
/// Data storage in a GPU-accessible location.
pub struct UnsafeBuffer {
buffer: vk::Buffer,
buffer: ash::vk::Buffer,
device: Arc<Device>,
size: usize,
usage: BufferUsage,
@ -74,7 +73,7 @@ impl UnsafeBuffer {
where
I: Iterator<Item = u32>,
{
let vk = device.pointers();
let fns = device.fns();
// Ensure we're not trying to create an empty buffer.
let size = if size == 0 {
@ -100,7 +99,7 @@ impl UnsafeBuffer {
sparse_level.into()
} else {
0
ash::vk::BufferCreateFlags::empty()
};
if usage.device_address
@ -108,7 +107,7 @@ impl UnsafeBuffer {
|| device.enabled_features().ext_buffer_device_address)
{
usage.device_address = false;
if vk::BufferUsageFlags::from(usage) == 0 {
if ash::vk::BufferUsageFlags::from(usage).is_empty() {
// return an error iff device_address was the only requested usage and the
// feature isn't enabled. Otherwise we'll hit that assert below.
// TODO: This is weird, why not just return an error always if the feature is not enabled?
@ -117,32 +116,33 @@ impl UnsafeBuffer {
}
}
let usage_bits = usage.into();
let usage_bits = ash::vk::BufferUsageFlags::from(usage);
// Checking for empty BufferUsage.
assert!(
usage_bits != 0,
!usage_bits.is_empty(),
"Can't create buffer with empty BufferUsage"
);
let buffer = {
let (sh_mode, sh_indices) = match sharing {
Sharing::Exclusive => (vk::SHARING_MODE_EXCLUSIVE, SmallVec::<[u32; 8]>::new()),
Sharing::Concurrent(ids) => (vk::SHARING_MODE_CONCURRENT, ids.collect()),
Sharing::Exclusive => {
(ash::vk::SharingMode::EXCLUSIVE, SmallVec::<[u32; 8]>::new())
}
Sharing::Concurrent(ids) => (ash::vk::SharingMode::CONCURRENT, ids.collect()),
};
let infos = vk::BufferCreateInfo {
sType: vk::STRUCTURE_TYPE_BUFFER_CREATE_INFO,
pNext: ptr::null(),
let infos = ash::vk::BufferCreateInfo {
flags,
size: size as u64,
usage: usage_bits,
sharingMode: sh_mode,
queueFamilyIndexCount: sh_indices.len() as u32,
pQueueFamilyIndices: sh_indices.as_ptr(),
sharing_mode: sh_mode,
queue_family_index_count: sh_indices.len() as u32,
p_queue_family_indices: sh_indices.as_ptr(),
..Default::default()
};
let mut output = MaybeUninit::uninit();
check_errors(vk.CreateBuffer(
check_errors(fns.v1_0.create_buffer(
device.internal_object(),
&infos,
ptr::null(),
@ -157,53 +157,62 @@ impl UnsafeBuffer {
al * (1 + (val - 1) / al)
}
let mut output = if device.loaded_extensions().khr_get_memory_requirements2 {
let infos = vk::BufferMemoryRequirementsInfo2KHR {
sType: vk::STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR,
pNext: ptr::null_mut(),
let mut output = if device.api_version() >= Version::V1_1
|| device.loaded_extensions().khr_get_memory_requirements2
{
let infos = ash::vk::BufferMemoryRequirementsInfo2 {
buffer: buffer,
..Default::default()
};
let mut output2 = if device.loaded_extensions().khr_dedicated_allocation {
Some(vk::MemoryDedicatedRequirementsKHR {
sType: vk::STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR,
pNext: ptr::null_mut(),
prefersDedicatedAllocation: mem::zeroed(),
requiresDedicatedAllocation: mem::zeroed(),
})
Some(ash::vk::MemoryDedicatedRequirementsKHR::default())
} else {
None
};
let mut output = vk::MemoryRequirements2KHR {
sType: vk::STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR,
pNext: output2
let mut output = ash::vk::MemoryRequirements2 {
p_next: output2
.as_mut()
.map(|o| o as *mut vk::MemoryDedicatedRequirementsKHR)
.map(|o| o as *mut ash::vk::MemoryDedicatedRequirementsKHR)
.unwrap_or(ptr::null_mut()) as *mut _,
memoryRequirements: mem::zeroed(),
..Default::default()
};
vk.GetBufferMemoryRequirements2KHR(device.internal_object(), &infos, &mut output);
debug_assert!(output.memoryRequirements.size >= size as u64);
debug_assert!(output.memoryRequirements.memoryTypeBits != 0);
if device.api_version() >= Version::V1_1 {
fns.v1_1.get_buffer_memory_requirements2(
device.internal_object(),
&infos,
&mut output,
);
} else {
fns.khr_get_memory_requirements2
.get_buffer_memory_requirements2_khr(
device.internal_object(),
&infos,
&mut output,
);
}
let mut out = MemoryRequirements::from(output.memoryRequirements);
debug_assert!(output.memory_requirements.size >= size as u64);
debug_assert!(output.memory_requirements.memory_type_bits != 0);
let mut out = MemoryRequirements::from(output.memory_requirements);
if let Some(output2) = output2 {
debug_assert_eq!(output2.requiresDedicatedAllocation, 0);
out.prefer_dedicated = output2.prefersDedicatedAllocation != 0;
debug_assert_eq!(output2.requires_dedicated_allocation, 0);
out.prefer_dedicated = output2.prefers_dedicated_allocation != 0;
}
out
} else {
let mut output: MaybeUninit<vk::MemoryRequirements> = MaybeUninit::uninit();
vk.GetBufferMemoryRequirements(
let mut output: MaybeUninit<ash::vk::MemoryRequirements> = MaybeUninit::uninit();
fns.v1_0.get_buffer_memory_requirements(
device.internal_object(),
buffer,
output.as_mut_ptr(),
);
let output = output.assume_init();
debug_assert!(output.size >= size as u64);
debug_assert!(output.memoryTypeBits != 0);
debug_assert!(output.memory_type_bits != 0);
MemoryRequirements::from(output)
};
@ -245,12 +254,12 @@ impl UnsafeBuffer {
/// Binds device memory to this buffer.
pub unsafe fn bind_memory(&self, memory: &DeviceMemory, offset: usize) -> Result<(), OomError> {
let vk = self.device.pointers();
let fns = self.device.fns();
// We check for correctness in debug mode.
debug_assert!({
let mut mem_reqs = MaybeUninit::uninit();
vk.GetBufferMemoryRequirements(
fns.v1_0.get_buffer_memory_requirements(
self.device.internal_object(),
self.buffer,
mem_reqs.as_mut_ptr(),
@ -259,7 +268,7 @@ impl UnsafeBuffer {
let mem_reqs = mem_reqs.assume_init();
mem_reqs.size <= (memory.size() - offset) as u64
&& (offset as u64 % mem_reqs.alignment) == 0
&& mem_reqs.memoryTypeBits & (1 << memory.memory_type().id()) != 0
&& mem_reqs.memory_type_bits & (1 << memory.memory_type().id()) != 0
});
// Check for alignment correctness.
@ -276,11 +285,11 @@ impl UnsafeBuffer {
}
}
check_errors(vk.BindBufferMemory(
check_errors(fns.v1_0.bind_buffer_memory(
self.device.internal_object(),
self.buffer,
memory.internal_object(),
offset as vk::DeviceSize,
offset as ash::vk::DeviceSize,
))?;
Ok(())
}
@ -300,17 +309,15 @@ impl UnsafeBuffer {
/// Returns a key unique to each `UnsafeBuffer`. Can be used for the `conflicts_key` method.
#[inline]
pub fn key(&self) -> u64 {
self.buffer
self.buffer.as_raw()
}
}
unsafe impl VulkanObject for UnsafeBuffer {
type Object = vk::Buffer;
const TYPE: vk::ObjectType = vk::OBJECT_TYPE_BUFFER;
type Object = ash::vk::Buffer;
#[inline]
fn internal_object(&self) -> vk::Buffer {
fn internal_object(&self) -> ash::vk::Buffer {
self.buffer
}
}
@ -333,8 +340,9 @@ impl Drop for UnsafeBuffer {
#[inline]
fn drop(&mut self) {
unsafe {
let vk = self.device.pointers();
vk.DestroyBuffer(self.device.internal_object(), self.buffer, ptr::null());
let fns = self.device.fns();
fns.v1_0
.destroy_buffer(self.device.internal_object(), self.buffer, ptr::null());
}
}
}
@ -373,15 +381,15 @@ impl SparseLevel {
}
}
impl From<SparseLevel> for vk::BufferCreateFlags {
impl From<SparseLevel> for ash::vk::BufferCreateFlags {
#[inline]
fn from(val: SparseLevel) -> Self {
let mut result = vk::BUFFER_CREATE_SPARSE_BINDING_BIT;
let mut result = ash::vk::BufferCreateFlags::SPARSE_BINDING;
if val.sparse_residency {
result |= vk::BUFFER_CREATE_SPARSE_RESIDENCY_BIT;
result |= ash::vk::BufferCreateFlags::SPARSE_RESIDENCY;
}
if val.sparse_aliased {
result |= vk::BUFFER_CREATE_SPARSE_ALIASED_BIT;
result |= ash::vk::BufferCreateFlags::SPARSE_ALIASED;
}
result
}

View File

@ -7,12 +7,6 @@
// notice may not be copied, modified, or distributed except
// according to those terms.
use std::hash::Hash;
use std::hash::Hasher;
use std::num::NonZeroU64;
use std::ops::Range;
use std::ptr;
use crate::buffer::sys::{DeviceAddressUsageNotEnabledError, UnsafeBuffer};
use crate::buffer::BufferSlice;
use crate::device::DeviceOwned;
@ -20,8 +14,11 @@ use crate::device::Queue;
use crate::image::ImageAccess;
use crate::memory::Content;
use crate::sync::AccessError;
use crate::{vk, SafeDeref, VulkanObject};
use crate::{SafeDeref, VulkanObject};
use std::hash::Hash;
use std::hash::Hasher;
use std::num::NonZeroU64;
use std::ops::Range;
/// Trait for objects that represent a way for the GPU to have access to a buffer or a slice of a
/// buffer.
@ -161,14 +158,14 @@ pub unsafe trait BufferAccess: DeviceOwned {
let dev = self.device();
unsafe {
let info = vk::BufferDeviceAddressInfo {
sType: vk::STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO,
pNext: ptr::null(),
let info = ash::vk::BufferDeviceAddressInfo {
buffer: inner.buffer.internal_object(),
..Default::default()
};
let ptr = dev
.pointers()
.GetBufferDeviceAddressEXT(dev.internal_object(), &info);
.fns()
.ext_buffer_device_address
.get_buffer_device_address_ext(dev.internal_object(), &info);
if ptr == 0 {
panic!("got null ptr from a valid GetBufferDeviceAddressEXT call");

View File

@ -7,7 +7,6 @@
// notice may not be copied, modified, or distributed except
// according to those terms.
use crate::vk;
use std::ops::BitOr;
/// Describes how a buffer is going to be used. This is **not** just an optimization.
@ -174,38 +173,38 @@ impl BufferUsage {
}
}
impl From<BufferUsage> for vk::BufferUsageFlags {
impl From<BufferUsage> for ash::vk::BufferUsageFlags {
fn from(val: BufferUsage) -> Self {
let mut result = 0;
let mut result = ash::vk::BufferUsageFlags::empty();
if val.transfer_source {
result |= vk::BUFFER_USAGE_TRANSFER_SRC_BIT;
result |= ash::vk::BufferUsageFlags::TRANSFER_SRC;
}
if val.transfer_destination {
result |= vk::BUFFER_USAGE_TRANSFER_DST_BIT;
result |= ash::vk::BufferUsageFlags::TRANSFER_DST;
}
if val.uniform_texel_buffer {
result |= vk::BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT;
result |= ash::vk::BufferUsageFlags::UNIFORM_TEXEL_BUFFER;
}
if val.storage_texel_buffer {
result |= vk::BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT;
result |= ash::vk::BufferUsageFlags::STORAGE_TEXEL_BUFFER;
}
if val.uniform_buffer {
result |= vk::BUFFER_USAGE_UNIFORM_BUFFER_BIT;
result |= ash::vk::BufferUsageFlags::UNIFORM_BUFFER;
}
if val.storage_buffer {
result |= vk::BUFFER_USAGE_STORAGE_BUFFER_BIT;
result |= ash::vk::BufferUsageFlags::STORAGE_BUFFER;
}
if val.index_buffer {
result |= vk::BUFFER_USAGE_INDEX_BUFFER_BIT;
result |= ash::vk::BufferUsageFlags::INDEX_BUFFER;
}
if val.vertex_buffer {
result |= vk::BUFFER_USAGE_VERTEX_BUFFER_BIT;
result |= ash::vk::BufferUsageFlags::VERTEX_BUFFER;
}
if val.indirect_buffer {
result |= vk::BUFFER_USAGE_INDIRECT_BUFFER_BIT;
result |= ash::vk::BufferUsageFlags::INDIRECT_BUFFER;
}
if val.device_address {
result |= vk::BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT;
result |= ash::vk::BufferUsageFlags::SHADER_DEVICE_ADDRESS;
}
result
}

View File

@ -44,7 +44,6 @@ use crate::device::Device;
use crate::device::DeviceOwned;
use crate::format::Format;
use crate::format::Pixel;
use crate::vk;
use crate::Error;
use crate::OomError;
use crate::SafeDeref;
@ -61,7 +60,7 @@ pub struct BufferView<B>
where
B: BufferAccess,
{
view: vk::BufferView,
view: ash::vk::BufferView,
buffer: B,
atomic_accesses: bool,
}
@ -123,41 +122,40 @@ where
}
let format_props = {
let vk_i = device.instance().pointers();
let fns_i = device.instance().fns();
let mut output = MaybeUninit::uninit();
vk_i.GetPhysicalDeviceFormatProperties(
fns_i.v1_0.get_physical_device_format_properties(
device.physical_device().internal_object(),
format as u32,
format.into(),
output.as_mut_ptr(),
);
output.assume_init().bufferFeatures
output.assume_init().buffer_features
};
if buffer.usage().uniform_texel_buffer {
if (format_props & vk::FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT) == 0 {
if (format_props & ash::vk::FormatFeatureFlags::UNIFORM_TEXEL_BUFFER).is_empty() {
return Err(BufferViewCreationError::UnsupportedFormat);
}
}
if buffer.usage().storage_texel_buffer {
if (format_props & vk::FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT) == 0 {
if (format_props & ash::vk::FormatFeatureFlags::STORAGE_TEXEL_BUFFER).is_empty() {
return Err(BufferViewCreationError::UnsupportedFormat);
}
}
let infos = vk::BufferViewCreateInfo {
sType: vk::STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO,
pNext: ptr::null(),
flags: 0, // reserved,
let infos = ash::vk::BufferViewCreateInfo {
flags: ash::vk::BufferViewCreateFlags::empty(),
buffer: buffer.internal_object(),
format: format as u32,
format: format.into(),
offset: offset as u64,
range: size as u64,
..Default::default()
};
let vk = device.pointers();
let fns = device.fns();
let mut output = MaybeUninit::uninit();
check_errors(vk.CreateBufferView(
check_errors(fns.v1_0.create_buffer_view(
device.internal_object(),
&infos,
ptr::null(),
@ -169,8 +167,9 @@ where
Ok(BufferView {
view,
buffer: org_buffer,
atomic_accesses: (format_props & vk::FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT)
!= 0,
atomic_accesses: !(format_props
& ash::vk::FormatFeatureFlags::STORAGE_TEXEL_BUFFER_ATOMIC)
.is_empty(),
})
}
@ -203,12 +202,10 @@ unsafe impl<B> VulkanObject for BufferView<B>
where
B: BufferAccess,
{
type Object = vk::BufferView;
const TYPE: vk::ObjectType = vk::OBJECT_TYPE_BUFFER_VIEW;
type Object = ash::vk::BufferView;
#[inline]
fn internal_object(&self) -> vk::BufferView {
fn internal_object(&self) -> ash::vk::BufferView {
self.view
}
}
@ -242,8 +239,8 @@ where
#[inline]
fn drop(&mut self) {
unsafe {
let vk = self.buffer.inner().buffer.device().pointers();
vk.DestroyBufferView(
let fns = self.buffer.inner().buffer.device().fns();
fns.v1_0.destroy_buffer_view(
self.buffer.inner().buffer.device().internal_object(),
self.view,
ptr::null(),

View File

@ -110,7 +110,7 @@ pub struct AutoCommandBufferBuilder<L, P = StandardCommandPoolBuilder> {
render_pass_state: Option<RenderPassState>,
// If any queries are active, this hashmap contains their state.
query_state: FnvHashMap<vk::QueryType, QueryState>,
query_state: FnvHashMap<ash::vk::QueryType, QueryState>,
_data: PhantomData<L>,
}
@ -119,12 +119,12 @@ pub struct AutoCommandBufferBuilder<L, P = StandardCommandPoolBuilder> {
struct RenderPassState {
subpass: (Arc<RenderPass>, u32),
contents: SubpassContents,
framebuffer: vk::Framebuffer, // Always null for secondary command buffers
framebuffer: ash::vk::Framebuffer, // Always null for secondary command buffers
}
// The state of an active query.
struct QueryState {
query_pool: vk::QueryPool,
query_pool: ash::vk::QueryPool,
query: u32,
ty: QueryType,
flags: QueryControlFlags,
@ -290,7 +290,7 @@ impl<L> AutoCommandBufferBuilder<L, StandardCommandPoolBuilder> {
let render_pass_state = RenderPassState {
subpass: (subpass.render_pass().clone(), subpass.index()),
contents: SubpassContents::Inline,
framebuffer: 0, // Only needed for primary command buffers
framebuffer: ash::vk::Framebuffer::null(), // Only needed for primary command buffers
};
(Some(render_pass), Some(render_pass_state))
}
@ -1956,8 +1956,8 @@ where
match state.ty {
QueryType::Occlusion => match command_buffer.inheritance().occlusion_query {
Some(inherited_flags) => {
let inherited_flags = vk::QueryControlFlags::from(inherited_flags);
let state_flags = vk::QueryControlFlags::from(state.flags);
let inherited_flags = ash::vk::QueryControlFlags::from(inherited_flags);
let state_flags = ash::vk::QueryControlFlags::from(state.flags);
if inherited_flags & state_flags != state_flags {
return Err(AutoCommandBufferBuilderContextError::QueryNotInherited);
@ -1967,8 +1967,9 @@ where
},
QueryType::PipelineStatistics(state_flags) => {
let inherited_flags = command_buffer.inheritance().query_statistics_flags;
let inherited_flags = vk::QueryPipelineStatisticFlags::from(inherited_flags);
let state_flags = vk::QueryPipelineStatisticFlags::from(state_flags);
let inherited_flags =
ash::vk::QueryPipelineStatisticFlags::from(inherited_flags);
let state_flags = ash::vk::QueryPipelineStatisticFlags::from(state_flags);
if inherited_flags & state_flags != state_flags {
return Err(AutoCommandBufferBuilderContextError::QueryNotInherited);

View File

@ -187,12 +187,19 @@ impl Default for DynamicState {
/// Describes what a subpass in a command buffer will contain.
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
#[repr(u32)]
#[repr(i32)]
pub enum SubpassContents {
/// The subpass will only directly contain commands.
Inline = vk::SUBPASS_CONTENTS_INLINE,
Inline = ash::vk::SubpassContents::INLINE.as_raw(),
/// The subpass will only contain secondary command buffers invocations.
SecondaryCommandBuffers = vk::SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS,
SecondaryCommandBuffers = ash::vk::SubpassContents::SECONDARY_COMMAND_BUFFERS.as_raw(),
}
impl From<SubpassContents> for ash::vk::SubpassContents {
#[inline]
fn from(val: SubpassContents) -> Self {
Self::from_raw(val as i32)
}
}
/// Determines the kind of command buffer to create.
@ -279,29 +286,26 @@ impl CommandBufferLevel<Framebuffer<()>> {
/// The safest option is `SimultaneousUse`, but it may be slower than the other two.
// NOTE: The ordering is important: the variants are listed from least to most permissive!
#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]
#[repr(u32)]
pub enum CommandBufferUsage {
/// The command buffer can only be submitted once before being destroyed. Any further submit is
/// forbidden. This makes it possible for the implementation to perform additional
/// optimizations.
OneTimeSubmit,
OneTimeSubmit = ash::vk::CommandBufferUsageFlags::ONE_TIME_SUBMIT.as_raw(),
/// The command buffer can be used multiple times, but must not execute or record more than once
/// simultaneously. In other words, it is as if executing the command buffer borrows it mutably.
MultipleSubmit,
MultipleSubmit = 0,
/// The command buffer can be executed multiple times in parallel on different queues.
/// If it's a secondary command buffer, it can be recorded to multiple primary command buffers
/// at once.
SimultaneousUse,
SimultaneousUse = ash::vk::CommandBufferUsageFlags::SIMULTANEOUS_USE.as_raw(),
}
impl From<CommandBufferUsage> for vk::CommandBufferUsageFlags {
impl From<CommandBufferUsage> for ash::vk::CommandBufferUsageFlags {
#[inline]
fn from(val: CommandBufferUsage) -> Self {
match val {
CommandBufferUsage::OneTimeSubmit => vk::COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
CommandBufferUsage::MultipleSubmit => 0,
CommandBufferUsage::SimultaneousUse => vk::COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT,
}
Self::from_raw(val as u32)
}
}

View File

@ -7,6 +7,14 @@
// notice may not be copied, modified, or distributed except
// according to those terms.
use crate::check_errors;
use crate::device::Device;
use crate::device::DeviceOwned;
use crate::instance::QueueFamily;
use crate::Error;
use crate::OomError;
use crate::Version;
use crate::VulkanObject;
use smallvec::SmallVec;
use std::error;
use std::fmt;
@ -16,16 +24,6 @@ use std::ptr;
use std::sync::Arc;
use std::vec::IntoIter as VecIntoIter;
use crate::instance::QueueFamily;
use crate::check_errors;
use crate::device::Device;
use crate::device::DeviceOwned;
use crate::vk;
use crate::Error;
use crate::OomError;
use crate::VulkanObject;
/// Low-level implementation of a command pool.
///
/// A command pool is always tied to a specific queue family. Command buffers allocated from a pool
@ -35,7 +33,7 @@ use crate::VulkanObject;
/// safe. In other words, you can only use a pool from one thread at a time.
#[derive(Debug)]
pub struct UnsafeCommandPool {
pool: vk::CommandPool,
pool: ash::vk::CommandPool,
device: Arc<Device>,
// Index of the associated queue family in the physical device.
@ -74,32 +72,31 @@ impl UnsafeCommandPool {
"Device doesn't match physical device when creating a command pool"
);
let vk = device.pointers();
let fns = device.fns();
let flags = {
let flag1 = if transient {
vk::COMMAND_POOL_CREATE_TRANSIENT_BIT
ash::vk::CommandPoolCreateFlags::TRANSIENT
} else {
0
ash::vk::CommandPoolCreateFlags::empty()
};
let flag2 = if reset_cb {
vk::COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT
ash::vk::CommandPoolCreateFlags::RESET_COMMAND_BUFFER
} else {
0
ash::vk::CommandPoolCreateFlags::empty()
};
flag1 | flag2
};
let pool = unsafe {
let infos = vk::CommandPoolCreateInfo {
sType: vk::STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
pNext: ptr::null(),
let infos = ash::vk::CommandPoolCreateInfo {
flags: flags,
queueFamilyIndex: queue_family.id(),
queue_family_index: queue_family.id(),
..Default::default()
};
let mut output = MaybeUninit::uninit();
check_errors(vk.CreateCommandPool(
check_errors(fns.v1_0.create_command_pool(
device.internal_object(),
&infos,
ptr::null(),
@ -127,13 +124,16 @@ impl UnsafeCommandPool {
///
pub unsafe fn reset(&self, release_resources: bool) -> Result<(), OomError> {
let flags = if release_resources {
vk::COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT
ash::vk::CommandPoolResetFlags::RELEASE_RESOURCES
} else {
0
ash::vk::CommandPoolResetFlags::empty()
};
let vk = self.device.pointers();
check_errors(vk.ResetCommandPool(self.device.internal_object(), self.pool, flags))?;
let fns = self.device.fns();
check_errors(
fns.v1_0
.reset_command_pool(self.device.internal_object(), self.pool, flags),
)?;
Ok(())
}
@ -148,16 +148,28 @@ impl UnsafeCommandPool {
/// simply ignore any possible error.
pub fn trim(&self) -> Result<(), CommandPoolTrimError> {
unsafe {
if !self.device.loaded_extensions().khr_maintenance1 {
if !(self.device.api_version() >= Version::V1_1
|| self.device.loaded_extensions().khr_maintenance1)
{
return Err(CommandPoolTrimError::Maintenance1ExtensionNotEnabled);
}
let vk = self.device.pointers();
vk.TrimCommandPoolKHR(
self.device.internal_object(),
self.pool,
0, /* reserved */
);
let fns = self.device.fns();
if self.device.api_version() >= Version::V1_1 {
fns.v1_1.trim_command_pool(
self.device.internal_object(),
self.pool,
ash::vk::CommandPoolTrimFlags::empty(),
);
} else {
fns.khr_maintenance1.trim_command_pool_khr(
self.device.internal_object(),
self.pool,
ash::vk::CommandPoolTrimFlagsKHR::empty(),
);
}
Ok(())
}
}
@ -178,22 +190,21 @@ impl UnsafeCommandPool {
});
}
let infos = vk::CommandBufferAllocateInfo {
sType: vk::STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
pNext: ptr::null(),
commandPool: self.pool,
let infos = ash::vk::CommandBufferAllocateInfo {
command_pool: self.pool,
level: if secondary {
vk::COMMAND_BUFFER_LEVEL_SECONDARY
ash::vk::CommandBufferLevel::SECONDARY
} else {
vk::COMMAND_BUFFER_LEVEL_PRIMARY
ash::vk::CommandBufferLevel::PRIMARY
},
commandBufferCount: count as u32,
command_buffer_count: count as u32,
..Default::default()
};
unsafe {
let vk = self.device.pointers();
let fns = self.device.fns();
let mut out = Vec::with_capacity(count);
check_errors(vk.AllocateCommandBuffers(
check_errors(fns.v1_0.allocate_command_buffers(
self.device.internal_object(),
&infos,
out.as_mut_ptr(),
@ -220,8 +231,8 @@ impl UnsafeCommandPool {
{
let command_buffers: SmallVec<[_; 4]> =
command_buffers.map(|cb| cb.command_buffer).collect();
let vk = self.device.pointers();
vk.FreeCommandBuffers(
let fns = self.device.fns();
fns.v1_0.free_command_buffers(
self.device.internal_object(),
self.pool,
command_buffers.len() as u32,
@ -247,12 +258,10 @@ unsafe impl DeviceOwned for UnsafeCommandPool {
}
unsafe impl VulkanObject for UnsafeCommandPool {
type Object = vk::CommandPool;
const TYPE: vk::ObjectType = vk::OBJECT_TYPE_COMMAND_POOL;
type Object = ash::vk::CommandPool;
#[inline]
fn internal_object(&self) -> vk::CommandPool {
fn internal_object(&self) -> ash::vk::CommandPool {
self.pool
}
}
@ -261,15 +270,16 @@ impl Drop for UnsafeCommandPool {
#[inline]
fn drop(&mut self) {
unsafe {
let vk = self.device.pointers();
vk.DestroyCommandPool(self.device.internal_object(), self.pool, ptr::null());
let fns = self.device.fns();
fns.v1_0
.destroy_command_pool(self.device.internal_object(), self.pool, ptr::null());
}
}
}
/// Opaque type that represents a command buffer allocated from a pool.
pub struct UnsafeCommandPoolAlloc {
command_buffer: vk::CommandBuffer,
command_buffer: ash::vk::CommandBuffer,
device: Arc<Device>,
}
@ -281,12 +291,10 @@ unsafe impl DeviceOwned for UnsafeCommandPoolAlloc {
}
unsafe impl VulkanObject for UnsafeCommandPoolAlloc {
type Object = vk::CommandBuffer;
const TYPE: vk::ObjectType = vk::OBJECT_TYPE_COMMAND_BUFFER;
type Object = ash::vk::CommandBuffer;
#[inline]
fn internal_object(&self) -> vk::CommandBuffer {
fn internal_object(&self) -> ash::vk::CommandBuffer {
self.command_buffer
}
}
@ -295,7 +303,7 @@ unsafe impl VulkanObject for UnsafeCommandPoolAlloc {
#[derive(Debug)]
pub struct UnsafeCommandPoolAllocIter {
device: Arc<Device>,
list: VecIntoIter<vk::CommandBuffer>,
list: VecIntoIter<ash::vk::CommandBuffer>,
}
impl Iterator for UnsafeCommandPoolAllocIter {
@ -354,6 +362,7 @@ impl From<Error> for CommandPoolTrimError {
mod tests {
use crate::command_buffer::pool::CommandPoolTrimError;
use crate::command_buffer::pool::UnsafeCommandPool;
use crate::Version;
#[test]
fn basic_create() {
@ -384,11 +393,18 @@ mod tests {
#[test]
fn check_maintenance_when_trim() {
let (device, queue) = gfx_dev_and_queue!();
let pool = UnsafeCommandPool::new(device, queue.family(), false, false).unwrap();
let pool = UnsafeCommandPool::new(device.clone(), queue.family(), false, false).unwrap();
match pool.trim() {
Err(CommandPoolTrimError::Maintenance1ExtensionNotEnabled) => (),
_ => panic!(),
if device.api_version() >= Version::V1_1 {
match pool.trim() {
Err(CommandPoolTrimError::Maintenance1ExtensionNotEnabled) => panic!(),
_ => (),
}
} else {
match pool.trim() {
Err(CommandPoolTrimError::Maintenance1ExtensionNotEnabled) => (),
_ => panic!(),
}
}
}

View File

@ -13,7 +13,6 @@ use crate::descriptor::DescriptorSet;
use crate::pipeline::input_assembly::IndexType;
use crate::pipeline::ComputePipelineAbstract;
use crate::pipeline::GraphicsPipelineAbstract;
use crate::vk;
use crate::VulkanObject;
use smallvec::SmallVec;
use std::ops::Range;
@ -27,24 +26,24 @@ pub struct StateCacher {
// The dynamic state to synchronize with `CmdSetState`.
dynamic_state: DynamicState,
// The compute pipeline currently bound. 0 if nothing bound.
compute_pipeline: vk::Pipeline,
compute_pipeline: ash::vk::Pipeline,
// The graphics pipeline currently bound. 0 if nothing bound.
graphics_pipeline: vk::Pipeline,
graphics_pipeline: ash::vk::Pipeline,
// The descriptor sets for the compute pipeline.
compute_descriptor_sets: SmallVec<[(vk::DescriptorSet, SmallVec<[u32; 32]>); 12]>,
compute_descriptor_sets: SmallVec<[(ash::vk::DescriptorSet, SmallVec<[u32; 32]>); 12]>,
// The descriptor sets for the graphics pipeline.
graphics_descriptor_sets: SmallVec<[(vk::DescriptorSet, SmallVec<[u32; 32]>); 12]>,
graphics_descriptor_sets: SmallVec<[(ash::vk::DescriptorSet, SmallVec<[u32; 32]>); 12]>,
// If the user starts comparing descriptor sets, but drops the helper struct in the middle of
// the processing then we will end up in a weird state. This bool is true when we start
// comparing sets, and is set to false when we end up comparing. If it was true when we start
// comparing, we know that something bad happened and we flush the cache.
poisoned_descriptor_sets: bool,
// The vertex buffers currently bound.
vertex_buffers: SmallVec<[(vk::Buffer, vk::DeviceSize); 12]>,
vertex_buffers: SmallVec<[(ash::vk::Buffer, ash::vk::DeviceSize); 12]>,
// Same as `poisoned_descriptor_sets` but for vertex buffers.
poisoned_vertex_buffers: bool,
// The index buffer, offset, and index type currently bound. `None` if nothing bound.
index_buffer: Option<(vk::Buffer, usize, IndexType)>,
index_buffer: Option<(ash::vk::Buffer, usize, IndexType)>,
}
/// Outcome of an operation.
@ -62,8 +61,8 @@ impl StateCacher {
pub fn new() -> StateCacher {
StateCacher {
dynamic_state: DynamicState::none(),
compute_pipeline: 0,
graphics_pipeline: 0,
compute_pipeline: ash::vk::Pipeline::null(),
graphics_pipeline: ash::vk::Pipeline::null(),
compute_descriptor_sets: SmallVec::new(),
graphics_descriptor_sets: SmallVec::new(),
poisoned_descriptor_sets: false,
@ -78,8 +77,8 @@ impl StateCacher {
#[inline]
pub fn invalidate(&mut self) {
self.dynamic_state = DynamicState::none();
self.compute_pipeline = 0;
self.graphics_pipeline = 0;
self.compute_pipeline = ash::vk::Pipeline::null();
self.graphics_pipeline = ash::vk::Pipeline::null();
self.compute_descriptor_sets = SmallVec::new();
self.graphics_descriptor_sets = SmallVec::new();
self.vertex_buffers = SmallVec::new();
@ -241,7 +240,7 @@ pub struct StateCacherDescriptorSets<'s> {
// Reference to the parent's `poisoned_descriptor_sets`.
poisoned: &'s mut bool,
// Reference to the descriptor sets list to compare to.
state: &'s mut SmallVec<[(vk::DescriptorSet, SmallVec<[u32; 32]>); 12]>,
state: &'s mut SmallVec<[(ash::vk::DescriptorSet, SmallVec<[u32; 32]>); 12]>,
// Next offset within the list to compare to.
offset: usize,
// Contains the return value of `compare`.
@ -295,7 +294,7 @@ pub struct StateCacherVertexBuffers<'s> {
// Reference to the parent's `poisoned_vertex_buffers`.
poisoned: &'s mut bool,
// Reference to the vertex buffers list to compare to.
state: &'s mut SmallVec<[(vk::Buffer, vk::DeviceSize); 12]>,
state: &'s mut SmallVec<[(ash::vk::Buffer, ash::vk::DeviceSize); 12]>,
// Next offset within the list to compare to.
offset: usize,
// Contains the offset of the first vertex buffer that differs.
@ -314,7 +313,7 @@ impl<'s> StateCacherVertexBuffers<'s> {
let raw = {
let inner = buffer.inner();
let raw = inner.buffer.internal_object();
let offset = inner.offset as vk::DeviceSize;
let offset = inner.offset as ash::vk::DeviceSize;
(raw, offset)
};

View File

@ -7,25 +7,21 @@
// notice may not be copied, modified, or distributed except
// according to those terms.
use smallvec::SmallVec;
use std::error;
use std::fmt;
use std::marker::PhantomData;
use std::ptr;
use crate::buffer::sys::UnsafeBuffer;
use crate::check_errors;
use crate::device::Queue;
use crate::image::sys::UnsafeImage;
use crate::memory::DeviceMemory;
use crate::sync::Fence;
use crate::sync::Semaphore;
use crate::check_errors;
use crate::vk;
use crate::Error;
use crate::OomError;
use crate::SynchronizedVulkanObject;
use crate::VulkanObject;
use smallvec::SmallVec;
use std::error;
use std::fmt;
use std::marker::PhantomData;
// TODO: correctly implement Debug on all the structs of this module
@ -33,7 +29,7 @@ use crate::VulkanObject;
// TODO: example here
pub struct SubmitBindSparseBuilder<'a> {
infos: SmallVec<[SubmitBindSparseBatchBuilder<'a>; 1]>,
fence: vk::Fence,
fence: ash::vk::Fence,
}
impl<'a> SubmitBindSparseBuilder<'a> {
@ -42,7 +38,7 @@ impl<'a> SubmitBindSparseBuilder<'a> {
pub fn new() -> SubmitBindSparseBuilder<'a> {
SubmitBindSparseBuilder {
infos: SmallVec::new(),
fence: 0,
fence: ash::vk::Fence::null(),
}
}
@ -76,7 +72,7 @@ impl<'a> SubmitBindSparseBuilder<'a> {
/// ```
#[inline]
pub fn has_fence(&self) -> bool {
self.fence != 0
self.fence != ash::vk::Fence::null()
}
/// Adds an operation that signals a fence after this submission ends.
@ -130,7 +126,7 @@ impl<'a> SubmitBindSparseBuilder<'a> {
&mut self,
other: SubmitBindSparseBuilder<'a>,
) -> Result<(), SubmitBindSparseBuilder<'a>> {
if self.fence != 0 && other.fence != 0 {
if self.fence != ash::vk::Fence::null() && other.fence != ash::vk::Fence::null() {
return Err(other);
}
@ -143,7 +139,7 @@ impl<'a> SubmitBindSparseBuilder<'a> {
unsafe {
debug_assert!(queue.family().supports_sparse_binding());
let vk = queue.device().pointers();
let fns = queue.device().fns();
let queue = queue.internal_object_guard();
// We start by storing all the `VkSparseBufferMemoryBindInfo`s of the whole command
@ -152,10 +148,10 @@ impl<'a> SubmitBindSparseBuilder<'a> {
.infos
.iter()
.flat_map(|infos| infos.buffer_binds.iter())
.map(|buf_bind| vk::SparseBufferMemoryBindInfo {
.map(|buf_bind| ash::vk::SparseBufferMemoryBindInfo {
buffer: buf_bind.buffer,
bindCount: buf_bind.binds.len() as u32,
pBinds: buf_bind.binds.as_ptr(),
bind_count: buf_bind.binds.len() as u32,
p_binds: buf_bind.binds.as_ptr(),
})
.collect();
@ -164,10 +160,10 @@ impl<'a> SubmitBindSparseBuilder<'a> {
.infos
.iter()
.flat_map(|infos| infos.image_opaque_binds.iter())
.map(|img_bind| vk::SparseImageOpaqueMemoryBindInfo {
.map(|img_bind| ash::vk::SparseImageOpaqueMemoryBindInfo {
image: img_bind.image,
bindCount: img_bind.binds.len() as u32,
pBinds: img_bind.binds.as_ptr(),
bind_count: img_bind.binds.len() as u32,
p_binds: img_bind.binds.as_ptr(),
})
.collect();
@ -176,10 +172,10 @@ impl<'a> SubmitBindSparseBuilder<'a> {
.infos
.iter()
.flat_map(|infos| infos.image_binds.iter())
.map(|img_bind| vk::SparseImageMemoryBindInfo {
.map(|img_bind| ash::vk::SparseImageMemoryBindInfo {
image: img_bind.image,
bindCount: img_bind.binds.len() as u32,
pBinds: img_bind.binds.as_ptr(),
bind_count: img_bind.binds.len() as u32,
p_binds: img_bind.binds.as_ptr(),
})
.collect();
@ -194,20 +190,18 @@ impl<'a> SubmitBindSparseBuilder<'a> {
let mut next_image_bind = 0;
for builder in self.infos.iter() {
bs_infos.push(vk::BindSparseInfo {
sType: vk::STRUCTURE_TYPE_BIND_SPARSE_INFO,
pNext: ptr::null(),
waitSemaphoreCount: builder.wait_semaphores.len() as u32,
pWaitSemaphores: builder.wait_semaphores.as_ptr(),
bufferBindCount: builder.buffer_binds.len() as u32,
pBufferBinds: if next_buffer_bind != 0 {
bs_infos.push(ash::vk::BindSparseInfo {
wait_semaphore_count: builder.wait_semaphores.len() as u32,
p_wait_semaphores: builder.wait_semaphores.as_ptr(),
buffer_bind_count: builder.buffer_binds.len() as u32,
p_buffer_binds: if next_buffer_bind != 0 {
// We need that `if` because `.as_ptr().offset(0)` is technically UB.
buffer_binds_storage.as_ptr().offset(next_buffer_bind)
} else {
buffer_binds_storage.as_ptr()
},
imageOpaqueBindCount: builder.image_opaque_binds.len() as u32,
pImageOpaqueBinds: if next_image_opaque_bind != 0 {
image_opaque_bind_count: builder.image_opaque_binds.len() as u32,
p_image_opaque_binds: if next_image_opaque_bind != 0 {
// We need that `if` because `.as_ptr().offset(0)` is technically UB.
image_opaque_binds_storage
.as_ptr()
@ -215,15 +209,16 @@ impl<'a> SubmitBindSparseBuilder<'a> {
} else {
image_opaque_binds_storage.as_ptr()
},
imageBindCount: builder.image_binds.len() as u32,
pImageBinds: if next_image_bind != 0 {
image_bind_count: builder.image_binds.len() as u32,
p_image_binds: if next_image_bind != 0 {
// We need that `if` because `.as_ptr().offset(0)` is technically UB.
image_binds_storage.as_ptr().offset(next_image_bind)
} else {
image_binds_storage.as_ptr()
},
signalSemaphoreCount: builder.signal_semaphores.len() as u32,
pSignalSemaphores: builder.signal_semaphores.as_ptr(),
signal_semaphore_count: builder.signal_semaphores.len() as u32,
p_signal_semaphores: builder.signal_semaphores.as_ptr(),
..Default::default()
});
next_buffer_bind += builder.buffer_binds.len() as isize;
@ -243,7 +238,7 @@ impl<'a> SubmitBindSparseBuilder<'a> {
};
// Finally executing the command.
check_errors(vk.QueueBindSparse(
check_errors(fns.v1_0.queue_bind_sparse(
*queue,
bs_infos.len() as u32,
bs_infos.as_ptr(),
@ -263,11 +258,11 @@ impl<'a> fmt::Debug for SubmitBindSparseBuilder<'a> {
/// A single batch of a sparse bind operation.
pub struct SubmitBindSparseBatchBuilder<'a> {
wait_semaphores: SmallVec<[vk::Semaphore; 8]>,
wait_semaphores: SmallVec<[ash::vk::Semaphore; 8]>,
buffer_binds: SmallVec<[SubmitBindSparseBufferBindBuilder<'a>; 2]>,
image_opaque_binds: SmallVec<[SubmitBindSparseImageOpaqueBindBuilder<'a>; 2]>,
image_binds: SmallVec<[SubmitBindSparseImageBindBuilder<'a>; 2]>,
signal_semaphores: SmallVec<[vk::Semaphore; 8]>,
signal_semaphores: SmallVec<[ash::vk::Semaphore; 8]>,
marker: PhantomData<&'a ()>,
}
@ -349,8 +344,8 @@ impl<'a> SubmitBindSparseBatchBuilder<'a> {
}
pub struct SubmitBindSparseBufferBindBuilder<'a> {
buffer: vk::Buffer,
binds: SmallVec<[vk::SparseMemoryBind; 1]>,
buffer: ash::vk::Buffer,
binds: SmallVec<[ash::vk::SparseMemoryBind; 1]>,
marker: PhantomData<&'a ()>,
}
@ -374,29 +369,29 @@ impl<'a> SubmitBindSparseBufferBindBuilder<'a> {
memory: &DeviceMemory,
memory_offset: usize,
) {
self.binds.push(vk::SparseMemoryBind {
resourceOffset: offset as vk::DeviceSize,
size: size as vk::DeviceSize,
self.binds.push(ash::vk::SparseMemoryBind {
resource_offset: offset as ash::vk::DeviceSize,
size: size as ash::vk::DeviceSize,
memory: memory.internal_object(),
memoryOffset: memory_offset as vk::DeviceSize,
flags: 0, // Flags are only relevant for images.
memory_offset: memory_offset as ash::vk::DeviceSize,
flags: ash::vk::SparseMemoryBindFlags::empty(), // Flags are only relevant for images.
});
}
pub unsafe fn add_unbind(&mut self, offset: usize, size: usize) {
self.binds.push(vk::SparseMemoryBind {
resourceOffset: offset as vk::DeviceSize,
size: size as vk::DeviceSize,
memory: 0,
memoryOffset: 0,
flags: 0,
self.binds.push(ash::vk::SparseMemoryBind {
resource_offset: offset as ash::vk::DeviceSize,
size: size as ash::vk::DeviceSize,
memory: ash::vk::DeviceMemory::null(),
memory_offset: 0,
flags: ash::vk::SparseMemoryBindFlags::empty(),
});
}
}
pub struct SubmitBindSparseImageOpaqueBindBuilder<'a> {
image: vk::Image,
binds: SmallVec<[vk::SparseMemoryBind; 1]>,
image: ash::vk::Image,
binds: SmallVec<[ash::vk::SparseMemoryBind; 1]>,
marker: PhantomData<&'a ()>,
}
@ -421,33 +416,33 @@ impl<'a> SubmitBindSparseImageOpaqueBindBuilder<'a> {
memory_offset: usize,
bind_metadata: bool,
) {
self.binds.push(vk::SparseMemoryBind {
resourceOffset: offset as vk::DeviceSize,
size: size as vk::DeviceSize,
self.binds.push(ash::vk::SparseMemoryBind {
resource_offset: offset as ash::vk::DeviceSize,
size: size as ash::vk::DeviceSize,
memory: memory.internal_object(),
memoryOffset: memory_offset as vk::DeviceSize,
memory_offset: memory_offset as ash::vk::DeviceSize,
flags: if bind_metadata {
vk::SPARSE_MEMORY_BIND_METADATA_BIT
ash::vk::SparseMemoryBindFlags::METADATA
} else {
0
ash::vk::SparseMemoryBindFlags::empty()
},
});
}
pub unsafe fn add_unbind(&mut self, offset: usize, size: usize) {
self.binds.push(vk::SparseMemoryBind {
resourceOffset: offset as vk::DeviceSize,
size: size as vk::DeviceSize,
memory: 0,
memoryOffset: 0,
flags: 0, // TODO: is that relevant?
self.binds.push(ash::vk::SparseMemoryBind {
resource_offset: offset as ash::vk::DeviceSize,
size: size as ash::vk::DeviceSize,
memory: ash::vk::DeviceMemory::null(),
memory_offset: 0,
flags: ash::vk::SparseMemoryBindFlags::empty(), // TODO: is that relevant?
});
}
}
pub struct SubmitBindSparseImageBindBuilder<'a> {
image: vk::Image,
binds: SmallVec<[vk::SparseImageMemoryBind; 1]>,
image: ash::vk::Image,
binds: SmallVec<[ash::vk::SparseImageMemoryBind; 1]>,
marker: PhantomData<&'a ()>,
}

View File

@ -20,7 +20,6 @@ use crate::swapchain::Swapchain;
use crate::sync::Semaphore;
use crate::check_errors;
use crate::vk;
use crate::Error;
use crate::OomError;
use crate::SynchronizedVulkanObject;
@ -29,11 +28,11 @@ use crate::VulkanObject;
/// Prototype for a submission that presents a swapchain on the screen.
// TODO: example here
pub struct SubmitPresentBuilder<'a> {
wait_semaphores: SmallVec<[vk::Semaphore; 8]>,
swapchains: SmallVec<[vk::SwapchainKHR; 4]>,
wait_semaphores: SmallVec<[ash::vk::Semaphore; 8]>,
swapchains: SmallVec<[ash::vk::SwapchainKHR; 4]>,
image_indices: SmallVec<[u32; 4]>,
present_regions: SmallVec<[vk::PresentRegionKHR; 4]>,
rect_layers: SmallVec<[vk::RectLayerKHR; 4]>,
present_regions: SmallVec<[ash::vk::PresentRegionKHR; 4]>,
rect_layers: SmallVec<[ash::vk::RectLayerKHR; 4]>,
marker: PhantomData<&'a ()>,
}
@ -108,15 +107,15 @@ impl<'a> SubmitPresentBuilder<'a> {
for rectangle in &present_region.rectangles {
self.rect_layers.push(rectangle.into());
}
vk::PresentRegionKHR {
rectangleCount: present_region.rectangles.len() as u32,
ash::vk::PresentRegionKHR {
rectangle_count: present_region.rectangles.len() as u32,
// Set this to null for now; in submit fill it with self.rect_layers
pRectangles: ptr::null(),
p_rectangles: ptr::null(),
}
}
None => vk::PresentRegionKHR {
rectangleCount: 0,
pRectangles: ptr::null(),
None => ash::vk::PresentRegionKHR {
rectangle_count: 0,
p_rectangles: ptr::null(),
},
};
self.present_regions.push(vk_present_region);
@ -146,40 +145,39 @@ impl<'a> SubmitPresentBuilder<'a> {
debug_assert_eq!(self.swapchains.len(), self.present_regions.len());
let mut current_index = 0;
for present_region in &mut self.present_regions {
present_region.pRectangles = self.rect_layers[current_index..].as_ptr();
current_index += present_region.rectangleCount as usize;
present_region.p_rectangles = self.rect_layers[current_index..].as_ptr();
current_index += present_region.rectangle_count as usize;
}
Some(vk::PresentRegionsKHR {
sType: vk::STRUCTURE_TYPE_PRESENT_REGIONS_KHR,
pNext: ptr::null(),
swapchainCount: self.present_regions.len() as u32,
pRegions: self.present_regions.as_ptr(),
Some(ash::vk::PresentRegionsKHR {
swapchain_count: self.present_regions.len() as u32,
p_regions: self.present_regions.as_ptr(),
..Default::default()
})
} else {
None
}
};
let mut results = vec![vk::SUCCESS; self.swapchains.len()];
let mut results = vec![ash::vk::Result::SUCCESS; self.swapchains.len()];
let vk = queue.device().pointers();
let fns = queue.device().fns();
let queue = queue.internal_object_guard();
let infos = vk::PresentInfoKHR {
sType: vk::STRUCTURE_TYPE_PRESENT_INFO_KHR,
pNext: present_regions
let infos = ash::vk::PresentInfoKHR {
p_next: present_regions
.as_ref()
.map(|pr| pr as *const vk::PresentRegionsKHR as *const _)
.map(|pr| pr as *const ash::vk::PresentRegionsKHR as *const _)
.unwrap_or(ptr::null()),
waitSemaphoreCount: self.wait_semaphores.len() as u32,
pWaitSemaphores: self.wait_semaphores.as_ptr(),
swapchainCount: self.swapchains.len() as u32,
pSwapchains: self.swapchains.as_ptr(),
pImageIndices: self.image_indices.as_ptr(),
pResults: results.as_mut_ptr(),
wait_semaphore_count: self.wait_semaphores.len() as u32,
p_wait_semaphores: self.wait_semaphores.as_ptr(),
swapchain_count: self.swapchains.len() as u32,
p_swapchains: self.swapchains.as_ptr(),
p_image_indices: self.image_indices.as_ptr(),
p_results: results.as_mut_ptr(),
..Default::default()
};
check_errors(vk.QueuePresentKHR(*queue, &infos))?;
check_errors(fns.khr_swapchain.queue_present_khr(*queue, &infos))?;
for result in results {
check_errors(result)?;

View File

@ -7,34 +7,30 @@
// notice may not be copied, modified, or distributed except
// according to those terms.
use smallvec::SmallVec;
use std::error;
use std::fmt;
use std::marker::PhantomData;
use std::ptr;
use crate::check_errors;
use crate::command_buffer::sys::UnsafeCommandBuffer;
use crate::device::Queue;
use crate::sync::Fence;
use crate::sync::PipelineStages;
use crate::sync::Semaphore;
use crate::check_errors;
use crate::vk;
use crate::Error;
use crate::OomError;
use crate::SynchronizedVulkanObject;
use crate::VulkanObject;
use smallvec::SmallVec;
use std::error;
use std::fmt;
use std::marker::PhantomData;
/// Prototype for a submission that executes command buffers.
// TODO: example here
#[derive(Debug)]
pub struct SubmitCommandBufferBuilder<'a> {
wait_semaphores: SmallVec<[vk::Semaphore; 16]>,
destination_stages: SmallVec<[vk::PipelineStageFlags; 8]>,
signal_semaphores: SmallVec<[vk::Semaphore; 16]>,
command_buffers: SmallVec<[vk::CommandBuffer; 4]>,
fence: vk::Fence,
wait_semaphores: SmallVec<[ash::vk::Semaphore; 16]>,
destination_stages: SmallVec<[ash::vk::PipelineStageFlags; 8]>,
signal_semaphores: SmallVec<[ash::vk::Semaphore; 16]>,
command_buffers: SmallVec<[ash::vk::CommandBuffer; 4]>,
fence: ash::vk::Fence,
marker: PhantomData<&'a ()>,
}
@ -47,7 +43,7 @@ impl<'a> SubmitCommandBufferBuilder<'a> {
destination_stages: SmallVec::new(),
signal_semaphores: SmallVec::new(),
command_buffers: SmallVec::new(),
fence: 0,
fence: ash::vk::Fence::null(),
marker: PhantomData,
}
}
@ -72,7 +68,7 @@ impl<'a> SubmitCommandBufferBuilder<'a> {
/// ```
#[inline]
pub fn has_fence(&self) -> bool {
self.fence != 0
self.fence != ash::vk::Fence::null()
}
/// Adds an operation that signals a fence after this submission ends.
@ -141,7 +137,7 @@ impl<'a> SubmitCommandBufferBuilder<'a> {
///
#[inline]
pub unsafe fn add_wait_semaphore(&mut self, semaphore: &'a Semaphore, stages: PipelineStages) {
debug_assert!(vk::PipelineStageFlags::from(stages) != 0);
debug_assert!(!ash::vk::PipelineStageFlags::from(stages).is_empty());
// TODO: debug assert that the device supports the stages
self.wait_semaphores.push(semaphore.internal_object());
self.destination_stages.push(stages.into());
@ -202,24 +198,23 @@ impl<'a> SubmitCommandBufferBuilder<'a> {
///
pub fn submit(self, queue: &Queue) -> Result<(), SubmitCommandBufferError> {
unsafe {
let vk = queue.device().pointers();
let fns = queue.device().fns();
let queue = queue.internal_object_guard();
debug_assert_eq!(self.wait_semaphores.len(), self.destination_stages.len());
let batch = vk::SubmitInfo {
sType: vk::STRUCTURE_TYPE_SUBMIT_INFO,
pNext: ptr::null(),
waitSemaphoreCount: self.wait_semaphores.len() as u32,
pWaitSemaphores: self.wait_semaphores.as_ptr(),
pWaitDstStageMask: self.destination_stages.as_ptr(),
commandBufferCount: self.command_buffers.len() as u32,
pCommandBuffers: self.command_buffers.as_ptr(),
signalSemaphoreCount: self.signal_semaphores.len() as u32,
pSignalSemaphores: self.signal_semaphores.as_ptr(),
let batch = ash::vk::SubmitInfo {
wait_semaphore_count: self.wait_semaphores.len() as u32,
p_wait_semaphores: self.wait_semaphores.as_ptr(),
p_wait_dst_stage_mask: self.destination_stages.as_ptr(),
command_buffer_count: self.command_buffers.len() as u32,
p_command_buffers: self.command_buffers.as_ptr(),
signal_semaphore_count: self.signal_semaphores.len() as u32,
p_signal_semaphores: self.signal_semaphores.as_ptr(),
..Default::default()
};
check_errors(vk.QueueSubmit(*queue, 1, &batch, self.fence))?;
check_errors(fns.v1_0.queue_submit(*queue, 1, &batch, self.fence))?;
Ok(())
}
}
@ -232,7 +227,7 @@ impl<'a> SubmitCommandBufferBuilder<'a> {
// TODO: create multiple batches instead
pub fn merge(mut self, other: Self) -> Self {
assert!(
self.fence == 0 || other.fence == 0,
self.fence == ash::vk::Fence::null() || other.fence == ash::vk::Fence::null(),
"Can't merge two queue submits that both have a fence"
);
@ -241,7 +236,7 @@ impl<'a> SubmitCommandBufferBuilder<'a> {
self.signal_semaphores.extend(other.signal_semaphores);
self.command_buffers.extend(other.command_buffers);
if self.fence == 0 {
if self.fence == ash::vk::Fence::null() {
self.fence = other.fence;
}

File diff suppressed because it is too large Load Diff

View File

@ -7,15 +7,15 @@
// notice may not be copied, modified, or distributed except
// according to those terms.
use std::error;
use std::fmt;
use crate::device::Device;
use crate::format::FormatTy;
use crate::image::ImageAccess;
use crate::image::ImageDimensions;
use crate::image::SampleCount;
use crate::sampler::Filter;
use crate::VulkanObject;
use std::error;
use std::fmt;
/// Checks whether a blit image command is valid.
///
@ -72,7 +72,7 @@ where
return Err(CheckBlitImageError::DestinationFormatNotSupported);
}
if source.samples() != 1 || destination.samples() != 1 {
if source.samples() != SampleCount::Sample1 || destination.samples() != SampleCount::Sample1 {
return Err(CheckBlitImageError::UnexpectedMultisampled);
}

View File

@ -7,9 +7,6 @@
// notice may not be copied, modified, or distributed except
// according to those terms.
use std::error;
use std::fmt;
use crate::buffer::TypedBufferAccess;
use crate::device::Device;
use crate::device::DeviceOwned;
@ -17,7 +14,10 @@ use crate::format::Format;
use crate::format::IncompatiblePixelsType;
use crate::format::Pixel;
use crate::image::ImageAccess;
use crate::image::SampleCount;
use crate::VulkanObject;
use std::error;
use std::fmt;
/// Type of operation to check.
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
@ -80,7 +80,7 @@ where
}
}
if image.samples() != 1 {
if image.samples() != SampleCount::Sample1 {
return Err(CheckCopyBufferImageError::UnexpectedMultisampled);
}

View File

@ -43,7 +43,7 @@ where
// TODO: The sum of offset and the address of the range of VkDeviceMemory object that is
// backing buffer, must be a multiple of the type indicated by indexType
// TODO: fullDrawIndexUint32 feature
// TODO: full_draw_index_uint32 feature
Ok(CheckIndexBuffer {
num_indices: buffer.len(),

View File

@ -45,7 +45,6 @@ use crate::format::Format;
use crate::image::view::ImageViewType;
use crate::sync::AccessFlags;
use crate::sync::PipelineStages;
use crate::vk;
use std::cmp;
use std::error;
use std::fmt;
@ -535,19 +534,26 @@ pub struct DescriptorBufferDesc {
///
/// This is mostly the same as a `DescriptorDescTy` but with less precise information.
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
#[repr(u32)]
#[repr(i32)]
pub enum DescriptorType {
Sampler = vk::DESCRIPTOR_TYPE_SAMPLER,
CombinedImageSampler = vk::DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
SampledImage = vk::DESCRIPTOR_TYPE_SAMPLED_IMAGE,
StorageImage = vk::DESCRIPTOR_TYPE_STORAGE_IMAGE,
UniformTexelBuffer = vk::DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER,
StorageTexelBuffer = vk::DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER,
UniformBuffer = vk::DESCRIPTOR_TYPE_UNIFORM_BUFFER,
StorageBuffer = vk::DESCRIPTOR_TYPE_STORAGE_BUFFER,
UniformBufferDynamic = vk::DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC,
StorageBufferDynamic = vk::DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC,
InputAttachment = vk::DESCRIPTOR_TYPE_INPUT_ATTACHMENT,
Sampler = ash::vk::DescriptorType::SAMPLER.as_raw(),
CombinedImageSampler = ash::vk::DescriptorType::COMBINED_IMAGE_SAMPLER.as_raw(),
SampledImage = ash::vk::DescriptorType::SAMPLED_IMAGE.as_raw(),
StorageImage = ash::vk::DescriptorType::STORAGE_IMAGE.as_raw(),
UniformTexelBuffer = ash::vk::DescriptorType::UNIFORM_TEXEL_BUFFER.as_raw(),
StorageTexelBuffer = ash::vk::DescriptorType::STORAGE_TEXEL_BUFFER.as_raw(),
UniformBuffer = ash::vk::DescriptorType::UNIFORM_BUFFER.as_raw(),
StorageBuffer = ash::vk::DescriptorType::STORAGE_BUFFER.as_raw(),
UniformBufferDynamic = ash::vk::DescriptorType::UNIFORM_BUFFER_DYNAMIC.as_raw(),
StorageBufferDynamic = ash::vk::DescriptorType::STORAGE_BUFFER_DYNAMIC.as_raw(),
InputAttachment = ash::vk::DescriptorType::INPUT_ATTACHMENT.as_raw(),
}
impl From<DescriptorType> for ash::vk::DescriptorType {
#[inline]
fn from(val: DescriptorType) -> Self {
Self::from_raw(val as i32)
}
}
/// Error when checking whether a descriptor is a superset of another one.
@ -746,27 +752,27 @@ impl ShaderStages {
}
}
impl From<ShaderStages> for vk::ShaderStageFlags {
impl From<ShaderStages> for ash::vk::ShaderStageFlags {
#[inline]
fn from(val: ShaderStages) -> vk::ShaderStageFlags {
let mut result = 0;
fn from(val: ShaderStages) -> ash::vk::ShaderStageFlags {
let mut result = ash::vk::ShaderStageFlags::empty();
if val.vertex {
result |= vk::SHADER_STAGE_VERTEX_BIT;
result |= ash::vk::ShaderStageFlags::VERTEX;
}
if val.tessellation_control {
result |= vk::SHADER_STAGE_TESSELLATION_CONTROL_BIT;
result |= ash::vk::ShaderStageFlags::TESSELLATION_CONTROL;
}
if val.tessellation_evaluation {
result |= vk::SHADER_STAGE_TESSELLATION_EVALUATION_BIT;
result |= ash::vk::ShaderStageFlags::TESSELLATION_EVALUATION;
}
if val.geometry {
result |= vk::SHADER_STAGE_GEOMETRY_BIT;
result |= ash::vk::ShaderStageFlags::GEOMETRY;
}
if val.fragment {
result |= vk::SHADER_STAGE_FRAGMENT_BIT;
result |= ash::vk::ShaderStageFlags::FRAGMENT;
}
if val.compute {
result |= vk::SHADER_STAGE_COMPUTE_BIT;
result |= ash::vk::ShaderStageFlags::COMPUTE;
}
result
}

View File

@ -7,12 +7,6 @@
// notice may not be copied, modified, or distributed except
// according to those terms.
use std::error;
use std::fmt;
use std::hash::Hash;
use std::hash::Hasher;
use std::sync::Arc;
use crate::buffer::BufferAccess;
use crate::buffer::BufferViewRef;
use crate::descriptor::descriptor::DescriptorDesc;
@ -33,9 +27,15 @@ use crate::device::Device;
use crate::device::DeviceOwned;
use crate::format::Format;
use crate::image::view::ImageViewAbstract;
use crate::image::SampleCount;
use crate::sampler::Sampler;
use crate::OomError;
use crate::VulkanObject;
use std::error;
use std::fmt;
use std::hash::Hash;
use std::hash::Hasher;
use std::sync::Arc;
/// An immutable descriptor set that is expected to be long-lived.
///
@ -674,9 +674,9 @@ impl<R> PersistentDescriptorSetBuilderArray<R> {
return Err(PersistentDescriptorSetError::NotIdentitySwizzled);
}
if multisampled && image_view.image().samples() == 1 {
if multisampled && image_view.image().samples() == SampleCount::Sample1 {
return Err(PersistentDescriptorSetError::ExpectedMultisampled);
} else if !multisampled && image_view.image().samples() != 1 {
} else if !multisampled && image_view.image().samples() != SampleCount::Sample1 {
return Err(PersistentDescriptorSetError::UnexpectedMultisampled);
}
@ -906,9 +906,9 @@ where
}
}
if desc.multisampled && image_view.image().samples() == 1 {
if desc.multisampled && image_view.image().samples() == SampleCount::Sample1 {
return Err(PersistentDescriptorSetError::ExpectedMultisampled);
} else if !desc.multisampled && image_view.image().samples() != 1 {
} else if !desc.multisampled && image_view.image().samples() != SampleCount::Sample1 {
return Err(PersistentDescriptorSetError::UnexpectedMultisampled);
}

View File

@ -7,6 +7,18 @@
// notice may not be copied, modified, or distributed except
// according to those terms.
use crate::buffer::BufferAccess;
use crate::buffer::BufferInner;
use crate::buffer::BufferView;
use crate::check_errors;
use crate::descriptor::descriptor::DescriptorType;
use crate::descriptor::descriptor_set::UnsafeDescriptorSetLayout;
use crate::device::Device;
use crate::device::DeviceOwned;
use crate::image::view::ImageViewAbstract;
use crate::sampler::Sampler;
use crate::OomError;
use crate::VulkanObject;
use smallvec::SmallVec;
use std::cmp;
use std::error;
@ -17,21 +29,6 @@ use std::ptr;
use std::sync::Arc;
use std::vec::IntoIter as VecIntoIter;
use crate::buffer::BufferAccess;
use crate::buffer::BufferInner;
use crate::buffer::BufferView;
use crate::descriptor::descriptor::DescriptorType;
use crate::descriptor::descriptor_set::UnsafeDescriptorSetLayout;
use crate::device::Device;
use crate::device::DeviceOwned;
use crate::image::view::ImageViewAbstract;
use crate::sampler::Sampler;
use crate::check_errors;
use crate::vk;
use crate::OomError;
use crate::VulkanObject;
/// A pool from which descriptor sets can be allocated.
///
/// Since the destructor of `Alloc` must free the descriptor set, this trait is usually implemented
@ -232,7 +229,7 @@ descriptors_count! {
/// A pool has a maximum number of descriptor sets and a maximum number of descriptors (one value
/// per descriptor type) it can allocate.
pub struct UnsafeDescriptorPool {
pool: vk::DescriptorPool,
pool: ash::vk::DescriptorPool,
device: Arc<Device>,
}
@ -256,7 +253,7 @@ impl UnsafeDescriptorPool {
max_sets: u32,
free_descriptor_set_bit: bool,
) -> Result<UnsafeDescriptorPool, OomError> {
let vk = device.pointers();
let fns = device.fns();
assert_ne!(max_sets, 0, "The maximum number of sets can't be 0");
@ -265,40 +262,40 @@ impl UnsafeDescriptorPool {
macro_rules! elem {
($field:ident, $ty:expr) => {
if count.$field >= 1 {
pool_sizes.push(vk::DescriptorPoolSize {
pool_sizes.push(ash::vk::DescriptorPoolSize {
ty: $ty,
descriptorCount: count.$field,
descriptor_count: count.$field,
});
}
};
}
elem!(uniform_buffer, vk::DESCRIPTOR_TYPE_UNIFORM_BUFFER);
elem!(storage_buffer, vk::DESCRIPTOR_TYPE_STORAGE_BUFFER);
elem!(uniform_buffer, ash::vk::DescriptorType::UNIFORM_BUFFER);
elem!(storage_buffer, ash::vk::DescriptorType::STORAGE_BUFFER);
elem!(
uniform_buffer_dynamic,
vk::DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
ash::vk::DescriptorType::UNIFORM_BUFFER_DYNAMIC
);
elem!(
storage_buffer_dynamic,
vk::DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
ash::vk::DescriptorType::STORAGE_BUFFER_DYNAMIC
);
elem!(
uniform_texel_buffer,
vk::DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER
ash::vk::DescriptorType::UNIFORM_TEXEL_BUFFER
);
elem!(
storage_texel_buffer,
vk::DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
ash::vk::DescriptorType::STORAGE_TEXEL_BUFFER
);
elem!(sampled_image, vk::DESCRIPTOR_TYPE_SAMPLED_IMAGE);
elem!(storage_image, vk::DESCRIPTOR_TYPE_STORAGE_IMAGE);
elem!(sampler, vk::DESCRIPTOR_TYPE_SAMPLER);
elem!(sampled_image, ash::vk::DescriptorType::SAMPLED_IMAGE);
elem!(storage_image, ash::vk::DescriptorType::STORAGE_IMAGE);
elem!(sampler, ash::vk::DescriptorType::SAMPLER);
elem!(
combined_image_sampler,
vk::DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
ash::vk::DescriptorType::COMBINED_IMAGE_SAMPLER
);
elem!(input_attachment, vk::DESCRIPTOR_TYPE_INPUT_ATTACHMENT);
elem!(input_attachment, ash::vk::DescriptorType::INPUT_ATTACHMENT);
assert!(
!pool_sizes.is_empty(),
@ -306,21 +303,20 @@ impl UnsafeDescriptorPool {
);
let pool = unsafe {
let infos = vk::DescriptorPoolCreateInfo {
sType: vk::STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
pNext: ptr::null(),
let infos = ash::vk::DescriptorPoolCreateInfo {
flags: if free_descriptor_set_bit {
vk::DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT
ash::vk::DescriptorPoolCreateFlags::FREE_DESCRIPTOR_SET
} else {
0
ash::vk::DescriptorPoolCreateFlags::empty()
},
maxSets: max_sets,
poolSizeCount: pool_sizes.len() as u32,
pPoolSizes: pool_sizes.as_ptr(),
max_sets: max_sets,
pool_size_count: pool_sizes.len() as u32,
p_pool_sizes: pool_sizes.as_ptr(),
..Default::default()
};
let mut output = MaybeUninit::uninit();
check_errors(vk.CreateDescriptorPool(
check_errors(fns.v1_0.create_descriptor_pool(
device.internal_object(),
&infos,
ptr::null(),
@ -381,7 +377,7 @@ impl UnsafeDescriptorPool {
// Actual implementation of `alloc`. Separated so that it is not inlined.
unsafe fn alloc_impl(
&mut self,
layouts: &SmallVec<[vk::DescriptorSetLayout; 8]>,
layouts: &SmallVec<[ash::vk::DescriptorSetLayout; 8]>,
) -> Result<UnsafeDescriptorPoolAllocIter, DescriptorPoolAllocError> {
let num = layouts.len();
@ -391,34 +387,36 @@ impl UnsafeDescriptorPool {
});
}
let infos = vk::DescriptorSetAllocateInfo {
sType: vk::STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
pNext: ptr::null(),
descriptorPool: self.pool,
descriptorSetCount: layouts.len() as u32,
pSetLayouts: layouts.as_ptr(),
let infos = ash::vk::DescriptorSetAllocateInfo {
descriptor_pool: self.pool,
descriptor_set_count: layouts.len() as u32,
p_set_layouts: layouts.as_ptr(),
..Default::default()
};
let mut output = Vec::with_capacity(num);
let vk = self.device.pointers();
let ret =
vk.AllocateDescriptorSets(self.device.internal_object(), &infos, output.as_mut_ptr());
let fns = self.device.fns();
let ret = fns.v1_0.allocate_descriptor_sets(
self.device.internal_object(),
&infos,
output.as_mut_ptr(),
);
// According to the specs, because `VK_ERROR_FRAGMENTED_POOL` was added after version
// 1.0 of Vulkan, any negative return value except out-of-memory errors must be
// considered as a fragmented pool error.
match ret {
vk::ERROR_OUT_OF_HOST_MEMORY => {
ash::vk::Result::ERROR_OUT_OF_HOST_MEMORY => {
return Err(DescriptorPoolAllocError::OutOfHostMemory);
}
vk::ERROR_OUT_OF_DEVICE_MEMORY => {
ash::vk::Result::ERROR_OUT_OF_DEVICE_MEMORY => {
return Err(DescriptorPoolAllocError::OutOfDeviceMemory);
}
vk::ERROR_OUT_OF_POOL_MEMORY_KHR => {
ash::vk::Result::ERROR_OUT_OF_POOL_MEMORY_KHR => {
return Err(DescriptorPoolAllocError::OutOfPoolMemory);
}
c if (c as i32) < 0 => {
c if c.as_raw() < 0 => {
return Err(DescriptorPoolAllocError::FragmentedPool);
}
_ => (),
@ -459,10 +457,10 @@ impl UnsafeDescriptorPool {
// Actual implementation of `free`. Separated so that it is not inlined.
unsafe fn free_impl(
&mut self,
sets: &SmallVec<[vk::DescriptorSet; 8]>,
sets: &SmallVec<[ash::vk::DescriptorSet; 8]>,
) -> Result<(), OomError> {
let vk = self.device.pointers();
check_errors(vk.FreeDescriptorSets(
let fns = self.device.fns();
check_errors(fns.v1_0.free_descriptor_sets(
self.device.internal_object(),
self.pool,
sets.len() as u32,
@ -475,11 +473,11 @@ impl UnsafeDescriptorPool {
///
/// This destroys all descriptor sets and empties the pool.
pub unsafe fn reset(&mut self) -> Result<(), OomError> {
let vk = self.device.pointers();
check_errors(vk.ResetDescriptorPool(
let fns = self.device.fns();
check_errors(fns.v1_0.reset_descriptor_pool(
self.device.internal_object(),
self.pool,
0, /* reserved flags */
ash::vk::DescriptorPoolResetFlags::empty(),
))?;
Ok(())
}
@ -505,8 +503,9 @@ impl Drop for UnsafeDescriptorPool {
#[inline]
fn drop(&mut self) {
unsafe {
let vk = self.device.pointers();
vk.DestroyDescriptorPool(self.device.internal_object(), self.pool, ptr::null());
let fns = self.device.fns();
fns.v1_0
.destroy_descriptor_pool(self.device.internal_object(), self.pool, ptr::null());
}
}
}
@ -551,7 +550,7 @@ impl fmt::Display for DescriptorPoolAllocError {
/// Iterator to the descriptor sets allocated from an unsafe descriptor pool.
#[derive(Debug)]
pub struct UnsafeDescriptorPoolAllocIter {
sets: VecIntoIter<vk::DescriptorSet>,
sets: VecIntoIter<ash::vk::DescriptorSet>,
}
impl Iterator for UnsafeDescriptorPoolAllocIter {
@ -576,7 +575,7 @@ impl ExactSizeIterator for UnsafeDescriptorPoolAllocIter {}
/// doesn't hold the pool or the device it is associated to.
/// Instead it is an object meant to be used with the `UnsafeDescriptorPool`.
pub struct UnsafeDescriptorSet {
set: vk::DescriptorSet,
set: ash::vk::DescriptorSet,
}
impl UnsafeDescriptorSet {
@ -604,7 +603,7 @@ impl UnsafeDescriptorSet {
where
I: Iterator<Item = DescriptorWrite>,
{
let vk = device.pointers();
let fns = device.fns();
// In this function, we build 4 arrays: one array of image descriptors (image_descriptors),
// one for buffer descriptors (buffer_descriptors), one for buffer view descriptors
@ -635,17 +634,16 @@ impl UnsafeDescriptorSet {
// The whole struct thats written here is valid, except for pImageInfo, pBufferInfo
// and pTexelBufferView which are placeholder values.
raw_writes.push(vk::WriteDescriptorSet {
sType: vk::STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
pNext: ptr::null(),
dstSet: self.set,
dstBinding: indiv_write.binding,
dstArrayElement: indiv_write.first_array_element,
descriptorCount: indiv_write.inner.len() as u32,
descriptorType: indiv_write.ty() as u32,
pImageInfo: ptr::null(),
pBufferInfo: ptr::null(),
pTexelBufferView: ptr::null(),
raw_writes.push(ash::vk::WriteDescriptorSet {
dst_set: self.set,
dst_binding: indiv_write.binding,
dst_array_element: indiv_write.first_array_element,
descriptor_count: indiv_write.inner.len() as u32,
descriptor_type: indiv_write.ty().into(),
p_image_info: ptr::null(),
p_buffer_info: ptr::null(),
p_texel_buffer_view: ptr::null(),
..Default::default()
});
match indiv_write.inner[0] {
@ -678,7 +676,7 @@ impl UnsafeDescriptorSet {
match *elem {
DescriptorWriteInner::UniformBuffer(buffer, offset, size)
| DescriptorWriteInner::DynamicUniformBuffer(buffer, offset, size) => {
buffer_descriptors.push(vk::DescriptorBufferInfo {
buffer_descriptors.push(ash::vk::DescriptorBufferInfo {
buffer,
offset: offset as u64,
range: size as u64,
@ -686,45 +684,45 @@ impl UnsafeDescriptorSet {
}
DescriptorWriteInner::StorageBuffer(buffer, offset, size)
| DescriptorWriteInner::DynamicStorageBuffer(buffer, offset, size) => {
buffer_descriptors.push(vk::DescriptorBufferInfo {
buffer_descriptors.push(ash::vk::DescriptorBufferInfo {
buffer,
offset: offset as u64,
range: size as u64,
});
}
DescriptorWriteInner::Sampler(sampler) => {
image_descriptors.push(vk::DescriptorImageInfo {
image_descriptors.push(ash::vk::DescriptorImageInfo {
sampler,
imageView: 0,
imageLayout: 0,
image_view: ash::vk::ImageView::null(),
image_layout: ash::vk::ImageLayout::UNDEFINED,
});
}
DescriptorWriteInner::CombinedImageSampler(sampler, view, layout) => {
image_descriptors.push(vk::DescriptorImageInfo {
image_descriptors.push(ash::vk::DescriptorImageInfo {
sampler,
imageView: view,
imageLayout: layout,
image_view: view,
image_layout: layout,
});
}
DescriptorWriteInner::StorageImage(view, layout) => {
image_descriptors.push(vk::DescriptorImageInfo {
sampler: 0,
imageView: view,
imageLayout: layout,
image_descriptors.push(ash::vk::DescriptorImageInfo {
sampler: ash::vk::Sampler::null(),
image_view: view,
image_layout: layout,
});
}
DescriptorWriteInner::SampledImage(view, layout) => {
image_descriptors.push(vk::DescriptorImageInfo {
sampler: 0,
imageView: view,
imageLayout: layout,
image_descriptors.push(ash::vk::DescriptorImageInfo {
sampler: ash::vk::Sampler::null(),
image_view: view,
image_layout: layout,
});
}
DescriptorWriteInner::InputAttachment(view, layout) => {
image_descriptors.push(vk::DescriptorImageInfo {
sampler: 0,
imageView: view,
imageLayout: layout,
image_descriptors.push(ash::vk::DescriptorImageInfo {
sampler: ash::vk::Sampler::null(),
image_view: view,
image_layout: layout,
});
}
DescriptorWriteInner::UniformTexelBuffer(view)
@ -738,17 +736,17 @@ impl UnsafeDescriptorSet {
// Now that `image_descriptors`, `buffer_descriptors` and `buffer_views_descriptors` are
// entirely filled and will never move again, we can fill the pointers in `raw_writes`.
for (i, write) in raw_writes.iter_mut().enumerate() {
write.pImageInfo = match raw_writes_img_infos[i] {
write.p_image_info = match raw_writes_img_infos[i] {
Some(off) => image_descriptors.as_ptr().offset(off as isize),
None => ptr::null(),
};
write.pBufferInfo = match raw_writes_buf_infos[i] {
write.p_buffer_info = match raw_writes_buf_infos[i] {
Some(off) => buffer_descriptors.as_ptr().offset(off as isize),
None => ptr::null(),
};
write.pTexelBufferView = match raw_writes_buf_view_infos[i] {
write.p_texel_buffer_view = match raw_writes_buf_view_infos[i] {
Some(off) => buffer_views_descriptors.as_ptr().offset(off as isize),
None => ptr::null(),
};
@ -757,7 +755,7 @@ impl UnsafeDescriptorSet {
// It is forbidden to call `vkUpdateDescriptorSets` with 0 writes, so we need to perform
// this emptiness check.
if !raw_writes.is_empty() {
vk.UpdateDescriptorSets(
fns.v1_0.update_descriptor_sets(
device.internal_object(),
raw_writes.len() as u32,
raw_writes.as_ptr(),
@ -769,12 +767,10 @@ impl UnsafeDescriptorSet {
}
unsafe impl VulkanObject for UnsafeDescriptorSet {
type Object = vk::DescriptorSet;
const TYPE: vk::ObjectType = vk::OBJECT_TYPE_DESCRIPTOR_SET;
type Object = ash::vk::DescriptorSet;
#[inline]
fn internal_object(&self) -> vk::DescriptorSet {
fn internal_object(&self) -> ash::vk::DescriptorSet {
self.set
}
}
@ -798,17 +794,17 @@ pub struct DescriptorWrite {
#[derive(Debug, Clone)]
enum DescriptorWriteInner {
Sampler(vk::Sampler),
StorageImage(vk::ImageView, vk::ImageLayout),
SampledImage(vk::ImageView, vk::ImageLayout),
CombinedImageSampler(vk::Sampler, vk::ImageView, vk::ImageLayout),
UniformTexelBuffer(vk::BufferView),
StorageTexelBuffer(vk::BufferView),
UniformBuffer(vk::Buffer, usize, usize),
StorageBuffer(vk::Buffer, usize, usize),
DynamicUniformBuffer(vk::Buffer, usize, usize),
DynamicStorageBuffer(vk::Buffer, usize, usize),
InputAttachment(vk::ImageView, vk::ImageLayout),
Sampler(ash::vk::Sampler),
StorageImage(ash::vk::ImageView, ash::vk::ImageLayout),
SampledImage(ash::vk::ImageView, ash::vk::ImageLayout),
CombinedImageSampler(ash::vk::Sampler, ash::vk::ImageView, ash::vk::ImageLayout),
UniformTexelBuffer(ash::vk::BufferView),
StorageTexelBuffer(ash::vk::BufferView),
UniformBuffer(ash::vk::Buffer, usize, usize),
StorageBuffer(ash::vk::Buffer, usize, usize),
DynamicUniformBuffer(ash::vk::Buffer, usize, usize),
DynamicStorageBuffer(ash::vk::Buffer, usize, usize),
InputAttachment(ash::vk::ImageView, ash::vk::ImageLayout),
}
macro_rules! smallvec {
@ -834,8 +830,10 @@ impl DescriptorWrite {
binding,
first_array_element: array_element,
inner: smallvec!({
let layout = layouts.storage_image as u32;
DescriptorWriteInner::StorageImage(image_view.inner().internal_object(), layout)
DescriptorWriteInner::StorageImage(
image_view.inner().internal_object(),
layouts.storage_image.into(),
)
}),
}
}
@ -863,8 +861,10 @@ impl DescriptorWrite {
binding,
first_array_element: array_element,
inner: smallvec!({
let layout = layouts.sampled_image as u32;
DescriptorWriteInner::SampledImage(image_view.inner().internal_object(), layout)
DescriptorWriteInner::SampledImage(
image_view.inner().internal_object(),
layouts.sampled_image.into(),
)
}),
}
}
@ -888,11 +888,10 @@ impl DescriptorWrite {
binding,
first_array_element: array_element,
inner: smallvec!({
let layout = layouts.combined_image_sampler as u32;
DescriptorWriteInner::CombinedImageSampler(
sampler.internal_object(),
image_view.inner().internal_object(),
layout,
layouts.combined_image_sampler.into(),
)
}),
}
@ -1100,8 +1099,10 @@ impl DescriptorWrite {
binding,
first_array_element: array_element,
inner: smallvec!({
let layout = layouts.input_attachment as u32;
DescriptorWriteInner::InputAttachment(image_view.inner().internal_object(), layout)
DescriptorWriteInner::InputAttachment(
image_view.inner().internal_object(),
layouts.input_attachment.into(),
)
}),
}
}

View File

@ -7,22 +7,19 @@
// notice may not be copied, modified, or distributed except
// according to those terms.
use smallvec::SmallVec;
use std::fmt;
use std::mem::MaybeUninit;
use std::ptr;
use std::sync::Arc;
use crate::check_errors;
use crate::vk;
use crate::OomError;
use crate::VulkanObject;
use crate::descriptor::descriptor::DescriptorDesc;
use crate::descriptor::descriptor_set::DescriptorSetDesc;
use crate::descriptor::descriptor_set::DescriptorsCount;
use crate::device::Device;
use crate::device::DeviceOwned;
use crate::OomError;
use crate::VulkanObject;
use smallvec::SmallVec;
use std::fmt;
use std::mem::MaybeUninit;
use std::ptr;
use std::sync::Arc;
/// Describes to the Vulkan implementation the layout of all descriptors within a descriptor set.
///
@ -31,7 +28,7 @@ use crate::device::DeviceOwned;
/// confusions.
pub struct UnsafeDescriptorSetLayout {
// The layout.
layout: vk::DescriptorSetLayout,
layout: ash::vk::DescriptorSetLayout,
// The device this layout belongs to.
device: Arc<Device>,
// Descriptors.
@ -71,12 +68,12 @@ impl UnsafeDescriptorSetLayout {
let ty = desc.ty.ty();
descriptors_count.add_num(ty, desc.array_count);
Some(vk::DescriptorSetLayoutBinding {
Some(ash::vk::DescriptorSetLayoutBinding {
binding: binding as u32,
descriptorType: ty as u32,
descriptorCount: desc.array_count,
stageFlags: desc.stages.into(),
pImmutableSamplers: ptr::null(), // FIXME: not yet implemented
descriptor_type: ty.into(),
descriptor_count: desc.array_count,
stage_flags: desc.stages.into(),
p_immutable_samplers: ptr::null(), // FIXME: not yet implemented
})
})
.collect::<SmallVec<[_; 32]>>();
@ -84,17 +81,16 @@ impl UnsafeDescriptorSetLayout {
// Note that it seems legal to have no descriptor at all in the set.
let layout = unsafe {
let infos = vk::DescriptorSetLayoutCreateInfo {
sType: vk::STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO,
pNext: ptr::null(),
flags: 0, // reserved
bindingCount: bindings.len() as u32,
pBindings: bindings.as_ptr(),
let infos = ash::vk::DescriptorSetLayoutCreateInfo {
flags: ash::vk::DescriptorSetLayoutCreateFlags::empty(),
binding_count: bindings.len() as u32,
p_bindings: bindings.as_ptr(),
..Default::default()
};
let mut output = MaybeUninit::uninit();
let vk = device.pointers();
check_errors(vk.CreateDescriptorSetLayout(
let fns = device.fns();
check_errors(fns.v1_0.create_descriptor_set_layout(
device.internal_object(),
&infos,
ptr::null(),
@ -146,12 +142,10 @@ impl fmt::Debug for UnsafeDescriptorSetLayout {
}
unsafe impl VulkanObject for UnsafeDescriptorSetLayout {
type Object = vk::DescriptorSetLayout;
const TYPE: vk::ObjectType = vk::OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT;
type Object = ash::vk::DescriptorSetLayout;
#[inline]
fn internal_object(&self) -> vk::DescriptorSetLayout {
fn internal_object(&self) -> ash::vk::DescriptorSetLayout {
self.layout
}
}
@ -160,8 +154,12 @@ impl Drop for UnsafeDescriptorSetLayout {
#[inline]
fn drop(&mut self) {
unsafe {
let vk = self.device.pointers();
vk.DestroyDescriptorSetLayout(self.device.internal_object(), self.layout, ptr::null());
let fns = self.device.fns();
fns.v1_0.destroy_descriptor_set_layout(
self.device.internal_object(),
self.layout,
ptr::null(),
);
}
}
}

View File

@ -17,7 +17,6 @@ use std::str;
use crate::check_errors;
use crate::extensions::SupportedExtensionsError;
use crate::instance::PhysicalDevice;
use crate::vk;
use crate::VulkanObject;
macro_rules! device_extensions {
@ -30,22 +29,22 @@ macro_rules! device_extensions {
impl $rawname {
/// See the docs of supported_by_device().
pub fn supported_by_device_raw(physical_device: PhysicalDevice) -> Result<Self, SupportedExtensionsError> {
let vk = physical_device.instance().pointers();
let fns = physical_device.instance().fns();
let properties: Vec<vk::ExtensionProperties> = unsafe {
let properties: Vec<ash::vk::ExtensionProperties> = unsafe {
let mut num = 0;
check_errors(vk.EnumerateDeviceExtensionProperties(
check_errors(fns.v1_0.enumerate_device_extension_properties(
physical_device.internal_object(), ptr::null(), &mut num, ptr::null_mut()
))?;
let mut properties = Vec::with_capacity(num as usize);
check_errors(vk.EnumerateDeviceExtensionProperties(
check_errors(fns.v1_0.enumerate_device_extension_properties(
physical_device.internal_object(), ptr::null(), &mut num, properties.as_mut_ptr()
))?;
properties.set_len(num as usize);
properties
};
Ok($rawname(properties.iter().map(|x| unsafe { CStr::from_ptr(x.extensionName.as_ptr()) }.to_owned()).collect()))
Ok($rawname(properties.iter().map(|x| unsafe { CStr::from_ptr(x.extension_name.as_ptr()) }.to_owned()).collect()))
}
/// Returns an `Extensions` object with extensions supported by the `PhysicalDevice`.
@ -61,16 +60,16 @@ macro_rules! device_extensions {
impl $sname {
/// See the docs of supported_by_device().
pub fn supported_by_device_raw(physical_device: PhysicalDevice) -> Result<Self, SupportedExtensionsError> {
let vk = physical_device.instance().pointers();
let fns = physical_device.instance().fns();
let properties: Vec<vk::ExtensionProperties> = unsafe {
let properties: Vec<ash::vk::ExtensionProperties> = unsafe {
let mut num = 0;
check_errors(vk.EnumerateDeviceExtensionProperties(
check_errors(fns.v1_0.enumerate_device_extension_properties(
physical_device.internal_object(), ptr::null(), &mut num, ptr::null_mut()
))?;
let mut properties = Vec::with_capacity(num as usize);
check_errors(vk.EnumerateDeviceExtensionProperties(
check_errors(fns.v1_0.enumerate_device_extension_properties(
physical_device.internal_object(), ptr::null(), &mut num, properties.as_mut_ptr()
))?;
properties.set_len(num as usize);
@ -79,7 +78,7 @@ macro_rules! device_extensions {
let mut extensions = $sname::none();
for property in properties {
let name = unsafe { CStr::from_ptr(property.extensionName.as_ptr()) };
let name = unsafe { CStr::from_ptr(property.extension_name.as_ptr()) };
$(
// TODO: Check specVersion?
if name.to_bytes() == &$s[..] {

View File

@ -7,7 +7,6 @@
// notice may not be copied, modified, or distributed except
// according to those terms.
use crate::vk;
use crate::Version;
use std::marker::PhantomPinned;
use std::ptr::addr_of_mut;
@ -103,7 +102,7 @@ macro_rules! features {
fn from(features: &Features) -> Self {
let mut features_ffi = FeaturesFfi::default();
$(
$(features_ffi.$ffi_struct.$($ffi_field).+ |= features.$member as vk::Bool32;)*
$(features_ffi.$ffi_struct.$($ffi_field).+ |= features.$member as ash::vk::Bool32;)*
)+
features_ffi
}
@ -121,174 +120,181 @@ macro_rules! features {
}
features! {
{robust_buffer_access => vulkan_1_0.features.robustBufferAccess},
{full_draw_index_uint32 => vulkan_1_0.features.fullDrawIndexUint32},
{image_cube_array => vulkan_1_0.features.imageCubeArray},
{independent_blend => vulkan_1_0.features.independentBlend},
{geometry_shader => vulkan_1_0.features.geometryShader},
{tessellation_shader => vulkan_1_0.features.tessellationShader},
{sample_rate_shading => vulkan_1_0.features.sampleRateShading},
{dual_src_blend => vulkan_1_0.features.dualSrcBlend},
{logic_op => vulkan_1_0.features.logicOp},
{multi_draw_indirect => vulkan_1_0.features.multiDrawIndirect},
{draw_indirect_first_instance => vulkan_1_0.features.drawIndirectFirstInstance},
{depth_clamp => vulkan_1_0.features.depthClamp},
{depth_bias_clamp => vulkan_1_0.features.depthBiasClamp},
{fill_mode_non_solid => vulkan_1_0.features.fillModeNonSolid},
{depth_bounds => vulkan_1_0.features.depthBounds},
{wide_lines => vulkan_1_0.features.wideLines},
{large_points => vulkan_1_0.features.largePoints},
{alpha_to_one => vulkan_1_0.features.alphaToOne},
{multi_viewport => vulkan_1_0.features.multiViewport},
{sampler_anisotropy => vulkan_1_0.features.samplerAnisotropy},
{texture_compression_etc2 => vulkan_1_0.features.textureCompressionETC2},
{texture_compression_astc_ldr => vulkan_1_0.features.textureCompressionASTC_LDR},
{texture_compression_bc => vulkan_1_0.features.textureCompressionBC},
{occlusion_query_precise => vulkan_1_0.features.occlusionQueryPrecise},
{pipeline_statistics_query => vulkan_1_0.features.pipelineStatisticsQuery},
{vertex_pipeline_stores_and_atomics => vulkan_1_0.features.vertexPipelineStoresAndAtomics},
{fragment_stores_and_atomics => vulkan_1_0.features.fragmentStoresAndAtomics},
{shader_tessellation_and_geometry_point_size => vulkan_1_0.features.shaderTessellationAndGeometryPointSize},
{shader_image_gather_extended => vulkan_1_0.features.shaderImageGatherExtended},
{shader_storage_image_extended_formats => vulkan_1_0.features.shaderStorageImageExtendedFormats},
{shader_storage_image_multisample => vulkan_1_0.features.shaderStorageImageMultisample},
{shader_storage_image_read_without_format => vulkan_1_0.features.shaderStorageImageReadWithoutFormat},
{shader_storage_image_write_without_format => vulkan_1_0.features.shaderStorageImageWriteWithoutFormat},
{shader_uniform_buffer_array_dynamic_indexing => vulkan_1_0.features.shaderUniformBufferArrayDynamicIndexing},
{shader_sampled_image_array_dynamic_indexing => vulkan_1_0.features.shaderSampledImageArrayDynamicIndexing},
{shader_storage_buffer_array_dynamic_indexing => vulkan_1_0.features.shaderStorageBufferArrayDynamicIndexing},
{shader_storage_image_array_dynamic_indexing => vulkan_1_0.features.shaderStorageImageArrayDynamicIndexing},
{shader_clip_distance => vulkan_1_0.features.shaderClipDistance},
{shader_cull_distance => vulkan_1_0.features.shaderCullDistance},
{shader_float64 => vulkan_1_0.features.shaderFloat64},
{shader_int64 => vulkan_1_0.features.shaderInt64},
{shader_int16 => vulkan_1_0.features.shaderInt16},
{shader_resource_residency => vulkan_1_0.features.shaderResourceResidency},
{shader_resource_min_lod => vulkan_1_0.features.shaderResourceMinLod},
{sparse_binding => vulkan_1_0.features.sparseBinding},
{sparse_residency_buffer => vulkan_1_0.features.sparseResidencyBuffer},
{sparse_residency_image2d => vulkan_1_0.features.sparseResidencyImage2D},
{sparse_residency_image3d => vulkan_1_0.features.sparseResidencyImage3D},
{sparse_residency2_samples => vulkan_1_0.features.sparseResidency2Samples},
{sparse_residency4_samples => vulkan_1_0.features.sparseResidency4Samples},
{sparse_residency8_samples => vulkan_1_0.features.sparseResidency8Samples},
{sparse_residency16_samples => vulkan_1_0.features.sparseResidency16Samples},
{sparse_residency_aliased => vulkan_1_0.features.sparseResidencyAliased},
{variable_multisample_rate => vulkan_1_0.features.variableMultisampleRate},
{inherited_queries => vulkan_1_0.features.inheritedQueries},
// Vulkan 1.0
{robust_buffer_access => vulkan_1_0.features.robust_buffer_access},
{full_draw_index_uint32 => vulkan_1_0.features.full_draw_index_uint32},
{image_cube_array => vulkan_1_0.features.image_cube_array},
{independent_blend => vulkan_1_0.features.independent_blend},
{geometry_shader => vulkan_1_0.features.geometry_shader},
{tessellation_shader => vulkan_1_0.features.tessellation_shader},
{sample_rate_shading => vulkan_1_0.features.sample_rate_shading},
{dual_src_blend => vulkan_1_0.features.dual_src_blend},
{logic_op => vulkan_1_0.features.logic_op},
{multi_draw_indirect => vulkan_1_0.features.multi_draw_indirect},
{draw_indirect_first_instance => vulkan_1_0.features.draw_indirect_first_instance},
{depth_clamp => vulkan_1_0.features.depth_clamp},
{depth_bias_clamp => vulkan_1_0.features.depth_bias_clamp},
{fill_mode_non_solid => vulkan_1_0.features.fill_mode_non_solid},
{depth_bounds => vulkan_1_0.features.depth_bounds},
{wide_lines => vulkan_1_0.features.wide_lines},
{large_points => vulkan_1_0.features.large_points},
{alpha_to_one => vulkan_1_0.features.alpha_to_one},
{multi_viewport => vulkan_1_0.features.multi_viewport},
{sampler_anisotropy => vulkan_1_0.features.sampler_anisotropy},
{texture_compression_etc2 => vulkan_1_0.features.texture_compression_etc2},
{texture_compression_astc_ldr => vulkan_1_0.features.texture_compression_astc_ldr},
{texture_compression_bc => vulkan_1_0.features.texture_compression_bc},
{occlusion_query_precise => vulkan_1_0.features.occlusion_query_precise},
{pipeline_statistics_query => vulkan_1_0.features.pipeline_statistics_query},
{vertex_pipeline_stores_and_atomics => vulkan_1_0.features.vertex_pipeline_stores_and_atomics},
{fragment_stores_and_atomics => vulkan_1_0.features.fragment_stores_and_atomics},
{shader_tessellation_and_geometry_point_size => vulkan_1_0.features.shader_tessellation_and_geometry_point_size},
{shader_image_gather_extended => vulkan_1_0.features.shader_image_gather_extended},
{shader_storage_image_extended_formats => vulkan_1_0.features.shader_storage_image_extended_formats},
{shader_storage_image_multisample => vulkan_1_0.features.shader_storage_image_multisample},
{shader_storage_image_read_without_format => vulkan_1_0.features.shader_storage_image_read_without_format},
{shader_storage_image_write_without_format => vulkan_1_0.features.shader_storage_image_write_without_format},
{shader_uniform_buffer_array_dynamic_indexing => vulkan_1_0.features.shader_uniform_buffer_array_dynamic_indexing},
{shader_sampled_image_array_dynamic_indexing => vulkan_1_0.features.shader_sampled_image_array_dynamic_indexing},
{shader_storage_buffer_array_dynamic_indexing => vulkan_1_0.features.shader_storage_buffer_array_dynamic_indexing},
{shader_storage_image_array_dynamic_indexing => vulkan_1_0.features.shader_storage_image_array_dynamic_indexing},
{shader_clip_distance => vulkan_1_0.features.shader_clip_distance},
{shader_cull_distance => vulkan_1_0.features.shader_cull_distance},
{shader_float64 => vulkan_1_0.features.shader_float64},
{shader_int64 => vulkan_1_0.features.shader_int64},
{shader_int16 => vulkan_1_0.features.shader_int16},
{shader_resource_residency => vulkan_1_0.features.shader_resource_residency},
{shader_resource_min_lod => vulkan_1_0.features.shader_resource_min_lod},
{sparse_binding => vulkan_1_0.features.sparse_binding},
{sparse_residency_buffer => vulkan_1_0.features.sparse_residency_buffer},
{sparse_residency_image2d => vulkan_1_0.features.sparse_residency_image2_d},
{sparse_residency_image3d => vulkan_1_0.features.sparse_residency_image3_d},
{sparse_residency2_samples => vulkan_1_0.features.sparse_residency2_samples},
{sparse_residency4_samples => vulkan_1_0.features.sparse_residency4_samples},
{sparse_residency8_samples => vulkan_1_0.features.sparse_residency8_samples},
{sparse_residency16_samples => vulkan_1_0.features.sparse_residency16_samples},
{sparse_residency_aliased => vulkan_1_0.features.sparse_residency_aliased},
{variable_multisample_rate => vulkan_1_0.features.variable_multisample_rate},
{inherited_queries => vulkan_1_0.features.inherited_queries},
{storage_buffer_16bit => vulkan_1_1.storageBuffer16BitAccess | khr_16bit_storage.storageBuffer16BitAccess},
{storage_uniform_16bit => vulkan_1_1.uniformAndStorageBuffer16BitAccess | khr_16bit_storage.uniformAndStorageBuffer16BitAccess},
{storage_push_constant_16bit => vulkan_1_1.storagePushConstant16 | khr_16bit_storage.storagePushConstant16},
{storage_input_output_16bit => vulkan_1_1.storageInputOutput16 | khr_16bit_storage.storageInputOutput16},
// Vulkan 1.1
{storage_buffer_16bit => vulkan_1_1.storage_buffer16_bit_access | khr_16bit_storage.storage_buffer16_bit_access},
{storage_uniform_16bit => vulkan_1_1.uniform_and_storage_buffer16_bit_access | khr_16bit_storage.uniform_and_storage_buffer16_bit_access},
{storage_push_constant_16bit => vulkan_1_1.storage_push_constant16 | khr_16bit_storage.storage_push_constant16},
{storage_input_output_16bit => vulkan_1_1.storage_input_output16 | khr_16bit_storage.storage_input_output16},
{multiview => vulkan_1_1.multiview | khr_multiview.multiview},
{multiview_geometry_shader => vulkan_1_1.multiviewGeometryShader | khr_multiview.multiviewGeometryShader},
{multiview_tessellation_shader => vulkan_1_1.multiviewTessellationShader | khr_multiview.multiviewTessellationShader},
{variable_pointers_storage_buffer => vulkan_1_1.variablePointersStorageBuffer | khr_variable_pointers.variablePointersStorageBuffer},
{variable_pointers => vulkan_1_1.variablePointers | khr_variable_pointers.variablePointers},
{protected_memory => vulkan_1_1.protectedMemory | protected_memory.protectedMemory},
{sampler_ycbcr_conversion => vulkan_1_1.samplerYcbcrConversion | khr_sampler_ycbcr_conversion.samplerYcbcrConversion},
{shader_draw_parameters => vulkan_1_1.shaderDrawParameters | shader_draw_parameters.shaderDrawParameters},
{multiview_geometry_shader => vulkan_1_1.multiview_geometry_shader | khr_multiview.multiview_geometry_shader},
{multiview_tessellation_shader => vulkan_1_1.multiview_tessellation_shader | khr_multiview.multiview_tessellation_shader},
{variable_pointers_storage_buffer => vulkan_1_1.variable_pointers_storage_buffer | khr_variable_pointers.variable_pointers_storage_buffer},
{variable_pointers => vulkan_1_1.variable_pointers | khr_variable_pointers.variable_pointers},
{protected_memory => vulkan_1_1.protected_memory | protected_memory.protected_memory},
{sampler_ycbcr_conversion => vulkan_1_1.sampler_ycbcr_conversion | khr_sampler_ycbcr_conversion.sampler_ycbcr_conversion},
{shader_draw_parameters => vulkan_1_1.shader_draw_parameters | shader_draw_parameters.shader_draw_parameters},
{sampler_mirror_clamp_to_edge => vulkan_1_2.samplerMirrorClampToEdge},
{draw_indirect_count => vulkan_1_2.drawIndirectCount},
{storage_buffer_8bit => vulkan_1_2.storageBuffer8BitAccess | khr_8bit_storage.storageBuffer8BitAccess},
{storage_uniform_8bit => vulkan_1_2.uniformAndStorageBuffer8BitAccess | khr_8bit_storage.uniformAndStorageBuffer8BitAccess},
{storage_push_constant_8bit => vulkan_1_2.storagePushConstant8 | khr_8bit_storage.storagePushConstant8},
{shader_buffer_int64_atomics => vulkan_1_2.shaderBufferInt64Atomics | khr_shader_atomic_int64.shaderBufferInt64Atomics},
{shader_shared_int64_atomics => vulkan_1_2.shaderSharedInt64Atomics | khr_shader_atomic_int64.shaderSharedInt64Atomics},
{shader_float16 => vulkan_1_2.shaderFloat16 | khr_shader_float16_int8.shaderFloat16},
{shader_int8 => vulkan_1_2.shaderInt8 | khr_shader_float16_int8.shaderInt8},
{descriptor_indexing => vulkan_1_2.descriptorIndexing},
{shader_input_attachment_array_dynamic_indexing => vulkan_1_2.shaderInputAttachmentArrayDynamicIndexing | ext_descriptor_indexing.shaderInputAttachmentArrayDynamicIndexing},
{shader_uniform_texel_buffer_array_dynamic_indexing => vulkan_1_2.shaderUniformTexelBufferArrayDynamicIndexing | ext_descriptor_indexing.shaderUniformTexelBufferArrayDynamicIndexing},
{shader_storage_texel_buffer_array_dynamic_indexing => vulkan_1_2.shaderStorageTexelBufferArrayDynamicIndexing | ext_descriptor_indexing.shaderStorageTexelBufferArrayDynamicIndexing},
{shader_uniform_buffer_array_non_uniform_indexing => vulkan_1_2.shaderUniformBufferArrayNonUniformIndexing | ext_descriptor_indexing.shaderUniformBufferArrayNonUniformIndexing},
{shader_sampled_image_array_non_uniform_indexing => vulkan_1_2.shaderSampledImageArrayNonUniformIndexing | ext_descriptor_indexing.shaderSampledImageArrayNonUniformIndexing},
{shader_storage_buffer_array_non_uniform_indexing => vulkan_1_2.shaderStorageBufferArrayNonUniformIndexing | ext_descriptor_indexing.shaderStorageBufferArrayNonUniformIndexing},
{shader_storage_image_array_non_uniform_indexing => vulkan_1_2.shaderStorageImageArrayNonUniformIndexing | ext_descriptor_indexing.shaderStorageImageArrayNonUniformIndexing},
{shader_input_attachment_array_non_uniform_indexing => vulkan_1_2.shaderInputAttachmentArrayNonUniformIndexing | ext_descriptor_indexing.shaderInputAttachmentArrayNonUniformIndexing},
{shader_uniform_texel_buffer_array_non_uniform_indexing => vulkan_1_2.shaderUniformTexelBufferArrayNonUniformIndexing | ext_descriptor_indexing.shaderUniformTexelBufferArrayNonUniformIndexing},
{shader_storage_texel_buffer_array_non_uniform_indexing => vulkan_1_2.shaderStorageTexelBufferArrayNonUniformIndexing | ext_descriptor_indexing.shaderStorageTexelBufferArrayNonUniformIndexing},
{descriptor_binding_uniform_buffer_update_after_bind => vulkan_1_2.descriptorBindingUniformBufferUpdateAfterBind | ext_descriptor_indexing.descriptorBindingUniformBufferUpdateAfterBind},
{descriptor_binding_sampled_image_update_after_bind => vulkan_1_2.descriptorBindingSampledImageUpdateAfterBind | ext_descriptor_indexing.descriptorBindingSampledImageUpdateAfterBind},
{descriptor_binding_storage_image_update_after_bind => vulkan_1_2.descriptorBindingStorageImageUpdateAfterBind | ext_descriptor_indexing.descriptorBindingStorageImageUpdateAfterBind},
{descriptor_binding_storage_buffer_update_after_bind => vulkan_1_2.descriptorBindingStorageBufferUpdateAfterBind | ext_descriptor_indexing.descriptorBindingStorageBufferUpdateAfterBind},
{descriptor_binding_uniform_texel_buffer_update_after_bind => vulkan_1_2.descriptorBindingUniformTexelBufferUpdateAfterBind | ext_descriptor_indexing.descriptorBindingUniformTexelBufferUpdateAfterBind},
{descriptor_binding_storage_texel_buffer_update_after_bind => vulkan_1_2.descriptorBindingStorageTexelBufferUpdateAfterBind | ext_descriptor_indexing.descriptorBindingStorageTexelBufferUpdateAfterBind},
{descriptor_binding_update_unused_while_pending => vulkan_1_2.descriptorBindingUpdateUnusedWhilePending | ext_descriptor_indexing.descriptorBindingUpdateUnusedWhilePending},
{descriptor_binding_partially_bound => vulkan_1_2.descriptorBindingPartiallyBound | ext_descriptor_indexing.descriptorBindingPartiallyBound},
{descriptor_binding_variable_descriptor_count => vulkan_1_2.descriptorBindingVariableDescriptorCount | ext_descriptor_indexing.descriptorBindingVariableDescriptorCount},
{runtime_descriptor_array => vulkan_1_2.runtimeDescriptorArray | ext_descriptor_indexing.runtimeDescriptorArray},
{sampler_filter_minmax => vulkan_1_2.samplerFilterMinmax},
{scalar_block_layout => vulkan_1_2.scalarBlockLayout | ext_scalar_block_layout.scalarBlockLayout},
{imageless_framebuffer => vulkan_1_2.imagelessFramebuffer | khr_imageless_framebuffer.imagelessFramebuffer},
{uniform_buffer_standard_layout => vulkan_1_2.uniformBufferStandardLayout | khr_uniform_buffer_standard_layout.uniformBufferStandardLayout},
{shader_subgroup_extended_types => vulkan_1_2.shaderSubgroupExtendedTypes | khr_shader_subgroup_extended_types.shaderSubgroupExtendedTypes},
{separate_depth_stencil_layouts => vulkan_1_2.separateDepthStencilLayouts | khr_separate_depth_stencil_layouts.separateDepthStencilLayouts},
{host_query_reset => vulkan_1_2.hostQueryReset | ext_host_query_reset.hostQueryReset},
{timeline_semaphore => vulkan_1_2.timelineSemaphore | khr_timeline_semaphore.timelineSemaphore},
{buffer_device_address => vulkan_1_2.bufferDeviceAddress | khr_buffer_device_address.bufferDeviceAddress},
{buffer_device_address_capture_replay => vulkan_1_2.bufferDeviceAddressCaptureReplay | khr_buffer_device_address.bufferDeviceAddressCaptureReplay},
{buffer_device_address_multi_device => vulkan_1_2.bufferDeviceAddressMultiDevice | khr_buffer_device_address.bufferDeviceAddressMultiDevice},
{vulkan_memory_model => vulkan_1_2.vulkanMemoryModel | khr_vulkan_memory_model.vulkanMemoryModel},
{vulkan_memory_model_device_scope => vulkan_1_2.vulkanMemoryModelDeviceScope | khr_vulkan_memory_model.vulkanMemoryModelDeviceScope},
{vulkan_memory_model_availability_visibility_chains => vulkan_1_2.vulkanMemoryModelAvailabilityVisibilityChains | khr_vulkan_memory_model.vulkanMemoryModelAvailabilityVisibilityChains},
{shader_output_viewport_index => vulkan_1_2.shaderOutputViewportIndex},
{shader_output_layer => vulkan_1_2.shaderOutputLayer},
{subgroup_broadcast_dynamic_id => vulkan_1_2.subgroupBroadcastDynamicId},
// Vulkan 1.2
{sampler_mirror_clamp_to_edge => vulkan_1_2.sampler_mirror_clamp_to_edge},
{draw_indirect_count => vulkan_1_2.draw_indirect_count},
{storage_buffer_8bit => vulkan_1_2.storage_buffer8_bit_access | khr_8bit_storage.storage_buffer8_bit_access},
{storage_uniform_8bit => vulkan_1_2.uniform_and_storage_buffer8_bit_access | khr_8bit_storage.uniform_and_storage_buffer8_bit_access},
{storage_push_constant_8bit => vulkan_1_2.storage_push_constant8 | khr_8bit_storage.storage_push_constant8},
{shader_buffer_int64_atomics => vulkan_1_2.shader_buffer_int64_atomics | khr_shader_atomic_int64.shader_buffer_int64_atomics},
{shader_shared_int64_atomics => vulkan_1_2.shader_shared_int64_atomics | khr_shader_atomic_int64.shader_shared_int64_atomics},
{shader_float16 => vulkan_1_2.shader_float16 | khr_shader_float16_int8.shader_float16},
{shader_int8 => vulkan_1_2.shader_int8 | khr_shader_float16_int8.shader_int8},
{descriptor_indexing => vulkan_1_2.descriptor_indexing},
{shader_input_attachment_array_dynamic_indexing => vulkan_1_2.shader_input_attachment_array_dynamic_indexing | ext_descriptor_indexing.shader_input_attachment_array_dynamic_indexing},
{shader_uniform_texel_buffer_array_dynamic_indexing => vulkan_1_2.shader_uniform_texel_buffer_array_dynamic_indexing | ext_descriptor_indexing.shader_uniform_texel_buffer_array_dynamic_indexing},
{shader_storage_texel_buffer_array_dynamic_indexing => vulkan_1_2.shader_storage_texel_buffer_array_dynamic_indexing | ext_descriptor_indexing.shader_storage_texel_buffer_array_dynamic_indexing},
{shader_uniform_buffer_array_non_uniform_indexing => vulkan_1_2.shader_uniform_buffer_array_non_uniform_indexing | ext_descriptor_indexing.shader_uniform_buffer_array_non_uniform_indexing},
{shader_sampled_image_array_non_uniform_indexing => vulkan_1_2.shader_sampled_image_array_non_uniform_indexing | ext_descriptor_indexing.shader_sampled_image_array_non_uniform_indexing},
{shader_storage_buffer_array_non_uniform_indexing => vulkan_1_2.shader_storage_buffer_array_non_uniform_indexing | ext_descriptor_indexing.shader_storage_buffer_array_non_uniform_indexing},
{shader_storage_image_array_non_uniform_indexing => vulkan_1_2.shader_storage_image_array_non_uniform_indexing | ext_descriptor_indexing.shader_storage_image_array_non_uniform_indexing},
{shader_input_attachment_array_non_uniform_indexing => vulkan_1_2.shader_input_attachment_array_non_uniform_indexing | ext_descriptor_indexing.shader_input_attachment_array_non_uniform_indexing},
{shader_uniform_texel_buffer_array_non_uniform_indexing => vulkan_1_2.shader_uniform_texel_buffer_array_non_uniform_indexing | ext_descriptor_indexing.shader_uniform_texel_buffer_array_non_uniform_indexing},
{shader_storage_texel_buffer_array_non_uniform_indexing => vulkan_1_2.shader_storage_texel_buffer_array_non_uniform_indexing | ext_descriptor_indexing.shader_storage_texel_buffer_array_non_uniform_indexing},
{descriptor_binding_uniform_buffer_update_after_bind => vulkan_1_2.descriptor_binding_uniform_buffer_update_after_bind | ext_descriptor_indexing.descriptor_binding_uniform_buffer_update_after_bind},
{descriptor_binding_sampled_image_update_after_bind => vulkan_1_2.descriptor_binding_sampled_image_update_after_bind | ext_descriptor_indexing.descriptor_binding_sampled_image_update_after_bind},
{descriptor_binding_storage_image_update_after_bind => vulkan_1_2.descriptor_binding_storage_image_update_after_bind | ext_descriptor_indexing.descriptor_binding_storage_image_update_after_bind},
{descriptor_binding_storage_buffer_update_after_bind => vulkan_1_2.descriptor_binding_storage_buffer_update_after_bind | ext_descriptor_indexing.descriptor_binding_storage_buffer_update_after_bind},
{descriptor_binding_uniform_texel_buffer_update_after_bind => vulkan_1_2.descriptor_binding_uniform_texel_buffer_update_after_bind | ext_descriptor_indexing.descriptor_binding_uniform_texel_buffer_update_after_bind},
{descriptor_binding_storage_texel_buffer_update_after_bind => vulkan_1_2.descriptor_binding_storage_texel_buffer_update_after_bind | ext_descriptor_indexing.descriptor_binding_storage_texel_buffer_update_after_bind},
{descriptor_binding_update_unused_while_pending => vulkan_1_2.descriptor_binding_update_unused_while_pending | ext_descriptor_indexing.descriptor_binding_update_unused_while_pending},
{descriptor_binding_partially_bound => vulkan_1_2.descriptor_binding_partially_bound | ext_descriptor_indexing.descriptor_binding_partially_bound},
{descriptor_binding_variable_descriptor_count => vulkan_1_2.descriptor_binding_variable_descriptor_count | ext_descriptor_indexing.descriptor_binding_variable_descriptor_count},
{runtime_descriptor_array => vulkan_1_2.runtime_descriptor_array | ext_descriptor_indexing.runtime_descriptor_array},
{sampler_filter_minmax => vulkan_1_2.sampler_filter_minmax},
{scalar_block_layout => vulkan_1_2.scalar_block_layout | ext_scalar_block_layout.scalar_block_layout},
{imageless_framebuffer => vulkan_1_2.imageless_framebuffer | khr_imageless_framebuffer.imageless_framebuffer},
{uniform_buffer_standard_layout => vulkan_1_2.uniform_buffer_standard_layout | khr_uniform_buffer_standard_layout.uniform_buffer_standard_layout},
{shader_subgroup_extended_types => vulkan_1_2.shader_subgroup_extended_types | khr_shader_subgroup_extended_types.shader_subgroup_extended_types},
{separate_depth_stencil_layouts => vulkan_1_2.separate_depth_stencil_layouts | khr_separate_depth_stencil_layouts.separate_depth_stencil_layouts},
{host_query_reset => vulkan_1_2.host_query_reset | ext_host_query_reset.host_query_reset},
{timeline_semaphore => vulkan_1_2.timeline_semaphore | khr_timeline_semaphore.timeline_semaphore},
{buffer_device_address => vulkan_1_2.buffer_device_address | khr_buffer_device_address.buffer_device_address},
{buffer_device_address_capture_replay => vulkan_1_2.buffer_device_address_capture_replay | khr_buffer_device_address.buffer_device_address_capture_replay},
{buffer_device_address_multi_device => vulkan_1_2.buffer_device_address_multi_device | khr_buffer_device_address.buffer_device_address_multi_device},
{vulkan_memory_model => vulkan_1_2.vulkan_memory_model | khr_vulkan_memory_model.vulkan_memory_model},
{vulkan_memory_model_device_scope => vulkan_1_2.vulkan_memory_model_device_scope | khr_vulkan_memory_model.vulkan_memory_model_device_scope},
{vulkan_memory_model_availability_visibility_chains => vulkan_1_2.vulkan_memory_model_availability_visibility_chains | khr_vulkan_memory_model.vulkan_memory_model_availability_visibility_chains},
{shader_output_viewport_index => vulkan_1_2.shader_output_viewport_index},
{shader_output_layer => vulkan_1_2.shader_output_layer},
{subgroup_broadcast_dynamic_id => vulkan_1_2.subgroup_broadcast_dynamic_id},
{ext_buffer_device_address => ext_buffer_address.bufferDeviceAddress},
{ext_buffer_device_address_capture_replay => ext_buffer_address.bufferDeviceAddressCaptureReplay},
{ext_buffer_device_address_multi_device => ext_buffer_address.bufferDeviceAddressMultiDevice},
// Extensions
{ext_buffer_device_address => ext_buffer_address.buffer_device_address},
{ext_buffer_device_address_capture_replay => ext_buffer_address.buffer_device_address_capture_replay},
{ext_buffer_device_address_multi_device => ext_buffer_address.buffer_device_address_multi_device},
}
#[derive(Default)]
pub(crate) struct FeaturesFfi {
_pinned: PhantomPinned,
vulkan_1_0: vk::PhysicalDeviceFeatures2KHR,
vulkan_1_1: vk::PhysicalDeviceVulkan11Features,
vulkan_1_2: vk::PhysicalDeviceVulkan12Features,
vulkan_1_0: ash::vk::PhysicalDeviceFeatures2KHR,
vulkan_1_1: ash::vk::PhysicalDeviceVulkan11Features,
vulkan_1_2: ash::vk::PhysicalDeviceVulkan12Features,
protected_memory: vk::PhysicalDeviceProtectedMemoryFeatures,
shader_draw_parameters: vk::PhysicalDeviceShaderDrawParametersFeatures,
protected_memory: ash::vk::PhysicalDeviceProtectedMemoryFeatures,
shader_draw_parameters: ash::vk::PhysicalDeviceShaderDrawParametersFeatures,
khr_16bit_storage: vk::PhysicalDevice16BitStorageFeaturesKHR,
khr_8bit_storage: vk::PhysicalDevice8BitStorageFeaturesKHR,
khr_buffer_device_address: vk::PhysicalDeviceBufferDeviceAddressFeaturesKHR,
khr_imageless_framebuffer: vk::PhysicalDeviceImagelessFramebufferFeaturesKHR,
khr_multiview: vk::PhysicalDeviceMultiviewFeaturesKHR,
khr_sampler_ycbcr_conversion: vk::PhysicalDeviceSamplerYcbcrConversionFeaturesKHR,
khr_separate_depth_stencil_layouts: vk::PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR,
khr_shader_atomic_int64: vk::PhysicalDeviceShaderAtomicInt64FeaturesKHR,
khr_shader_float16_int8: vk::PhysicalDeviceShaderFloat16Int8FeaturesKHR,
khr_shader_subgroup_extended_types: vk::PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR,
khr_timeline_semaphore: vk::PhysicalDeviceTimelineSemaphoreFeaturesKHR,
khr_uniform_buffer_standard_layout: vk::PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR,
khr_variable_pointers: vk::PhysicalDeviceVariablePointersFeaturesKHR,
khr_vulkan_memory_model: vk::PhysicalDeviceVulkanMemoryModelFeaturesKHR,
khr_16bit_storage: ash::vk::PhysicalDevice16BitStorageFeaturesKHR,
khr_8bit_storage: ash::vk::PhysicalDevice8BitStorageFeaturesKHR,
khr_buffer_device_address: ash::vk::PhysicalDeviceBufferDeviceAddressFeaturesKHR,
khr_imageless_framebuffer: ash::vk::PhysicalDeviceImagelessFramebufferFeaturesKHR,
khr_multiview: ash::vk::PhysicalDeviceMultiviewFeaturesKHR,
khr_sampler_ycbcr_conversion: ash::vk::PhysicalDeviceSamplerYcbcrConversionFeaturesKHR,
khr_separate_depth_stencil_layouts:
ash::vk::PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR,
khr_shader_atomic_int64: ash::vk::PhysicalDeviceShaderAtomicInt64FeaturesKHR,
khr_shader_float16_int8: ash::vk::PhysicalDeviceShaderFloat16Int8FeaturesKHR,
khr_shader_subgroup_extended_types:
ash::vk::PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR,
khr_timeline_semaphore: ash::vk::PhysicalDeviceTimelineSemaphoreFeaturesKHR,
khr_uniform_buffer_standard_layout:
ash::vk::PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR,
khr_variable_pointers: ash::vk::PhysicalDeviceVariablePointersFeaturesKHR,
khr_vulkan_memory_model: ash::vk::PhysicalDeviceVulkanMemoryModelFeaturesKHR,
ext_buffer_address: vk::PhysicalDeviceBufferAddressFeaturesEXT,
ext_descriptor_indexing: vk::PhysicalDeviceDescriptorIndexingFeaturesEXT,
ext_host_query_reset: vk::PhysicalDeviceHostQueryResetFeaturesEXT,
ext_scalar_block_layout: vk::PhysicalDeviceScalarBlockLayoutFeaturesEXT,
ext_buffer_address: ash::vk::PhysicalDeviceBufferAddressFeaturesEXT,
ext_descriptor_indexing: ash::vk::PhysicalDeviceDescriptorIndexingFeaturesEXT,
ext_host_query_reset: ash::vk::PhysicalDeviceHostQueryResetFeaturesEXT,
ext_scalar_block_layout: ash::vk::PhysicalDeviceScalarBlockLayoutFeaturesEXT,
}
macro_rules! push_struct {
($self:ident, $struct:ident) => {
$self.$struct.pNext = $self.vulkan_1_0.pNext;
$self.vulkan_1_0.pNext = addr_of_mut!($self.$struct) as _;
$self.$struct.p_next = $self.vulkan_1_0.p_next;
$self.vulkan_1_0.p_next = addr_of_mut!($self.$struct) as _;
};
}
impl FeaturesFfi {
pub(crate) fn make_chain(&mut self, api_version: Version) {
if api_version >= Version::major_minor(1, 2) {
if api_version >= Version::V1_2 {
push_struct!(self, vulkan_1_1);
push_struct!(self, vulkan_1_2);
} else {
if api_version >= Version::major_minor(1, 1) {
if api_version >= Version::V1_1 {
push_struct!(self, protected_memory);
push_struct!(self, shader_draw_parameters);
}
@ -315,11 +321,11 @@ impl FeaturesFfi {
push_struct!(self, ext_buffer_address);
}
pub(crate) fn head_as_ref(&self) -> &vk::PhysicalDeviceFeatures2KHR {
pub(crate) fn head_as_ref(&self) -> &ash::vk::PhysicalDeviceFeatures2KHR {
&self.vulkan_1_0
}
pub(crate) fn head_as_mut(&mut self) -> &mut vk::PhysicalDeviceFeatures2KHR {
pub(crate) fn head_as_mut(&mut self) -> &mut ash::vk::PhysicalDeviceFeatures2KHR {
&mut self.vulkan_1_0
}
}

View File

@ -24,7 +24,7 @@
//! use vulkano::Version;
//!
//! // Creating the instance. See the documentation of the `instance` module.
//! let instance = match Instance::new(None, Version::major_minor(1, 1), &InstanceExtensions::none(), None) {
//! let instance = match Instance::new(None, Version::V1_1, &InstanceExtensions::none(), None) {
//! Ok(i) => i,
//! Err(err) => panic!("Couldn't build instance: {:?}", err)
//! };
@ -97,6 +97,7 @@ pub(crate) use self::features::FeaturesFfi;
use crate::check_errors;
use crate::command_buffer::pool::StandardCommandPool;
use crate::descriptor::descriptor_set::StdDescriptorPool;
use crate::fns::DeviceFunctions;
use crate::format::Format;
use crate::image::ImageCreateFlags;
use crate::image::ImageFormatProperties;
@ -107,13 +108,12 @@ use crate::instance::Instance;
use crate::instance::PhysicalDevice;
use crate::instance::QueueFamily;
use crate::memory::pool::StdMemoryPool;
use crate::vk;
use crate::Error;
use crate::OomError;
use crate::SynchronizedVulkanObject;
use crate::Version;
use crate::VulkanHandle;
use crate::VulkanObject;
use ash::vk::Handle;
use fnv::FnvHasher;
use smallvec::SmallVec;
use std::collections::hash_map::Entry;
@ -124,6 +124,7 @@ use std::fmt;
use std::hash::BuildHasherDefault;
use std::hash::Hash;
use std::hash::Hasher;
use std::mem;
use std::mem::MaybeUninit;
use std::ops::Deref;
use std::ptr;
@ -139,13 +140,13 @@ mod features;
pub struct Device {
instance: Arc<Instance>,
physical_device: usize,
device: vk::Device,
device: ash::vk::Device,
// The highest version that is supported for this device.
// This is the minimum of Instance::max_api_version and PhysicalDevice::api_version.
api_version: Version,
vk: vk::DevicePointers,
fns: DeviceFunctions,
standard_pool: Mutex<Weak<StdMemoryPool>>,
standard_descriptor_pool: Mutex<Weak<StdDescriptorPool>>,
standard_command_pools:
@ -154,9 +155,9 @@ pub struct Device {
extensions: DeviceExtensions,
active_queue_families: SmallVec<[u32; 8]>,
allocation_count: Mutex<u32>,
fence_pool: Mutex<Vec<vk::Fence>>,
semaphore_pool: Mutex<Vec<vk::Semaphore>>,
event_pool: Mutex<Vec<vk::Event>>,
fence_pool: Mutex<Vec<ash::vk::Fence>>,
semaphore_pool: Mutex<Vec<ash::vk::Semaphore>>,
event_pool: Mutex<Vec<ash::vk::Event>>,
}
// The `StandardCommandPool` type doesn't implement Send/Sync, so we have to manually reimplement
@ -196,7 +197,7 @@ impl Device {
Ext: Into<RawDeviceExtensions>,
{
let instance = phys.instance();
let vk_i = instance.pointers();
let fns_i = instance.fns();
let max_api_version = instance.max_api_version();
let api_version = std::cmp::min(max_api_version, phys.api_version());
@ -268,29 +269,28 @@ impl Device {
// turning `queues` into an array of `vkDeviceQueueCreateInfo` suitable for Vulkan
let queues = queues
.iter()
.map(|&(queue_id, ref priorities)| {
vk::DeviceQueueCreateInfo {
sType: vk::STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO,
pNext: ptr::null(),
flags: 0, // reserved
queueFamilyIndex: queue_id,
queueCount: priorities.len() as u32,
pQueuePriorities: priorities.as_ptr(),
}
})
.map(
|&(queue_id, ref priorities)| ash::vk::DeviceQueueCreateInfo {
flags: ash::vk::DeviceQueueCreateFlags::empty(),
queue_family_index: queue_id,
queue_count: priorities.len() as u32,
p_queue_priorities: priorities.as_ptr(),
..Default::default()
},
)
.collect::<SmallVec<[_; 16]>>();
// TODO: The plan regarding `robustBufferAccess` is to check the shaders' code to see
// TODO: The plan regarding `robust_buffer_access` is to check the shaders' code to see
// if they can possibly perform out-of-bounds reads and writes. If the user tries
// to use a shader that can perform out-of-bounds operations without having
// `robustBufferAccess` enabled, an error is returned.
// `robust_buffer_access` enabled, an error is returned.
//
// However for the moment this verification isn't performed. In order to be safe,
// we always enable the `robustBufferAccess` feature as it is guaranteed to be
// we always enable the `robust_buffer_access` feature as it is guaranteed to be
// supported everywhere.
//
// The only alternative (while waiting for shaders introspection to work) is to
// make all shaders depend on `robustBufferAccess`. But since usually the
// make all shaders depend on `robust_buffer_access`. But since usually the
// majority of shaders don't need this feature, it would be very annoying to have
// to enable it manually when you don't need it.
//
@ -304,29 +304,29 @@ impl Device {
.loaded_extensions()
.khr_get_physical_device_properties2;
let infos = vk::DeviceCreateInfo {
sType: vk::STRUCTURE_TYPE_DEVICE_CREATE_INFO,
pNext: if has_khr_get_physical_device_properties2 {
let infos = ash::vk::DeviceCreateInfo {
p_next: if has_khr_get_physical_device_properties2 {
features_ffi.head_as_ref() as *const _ as _
} else {
ptr::null()
},
flags: 0, // reserved
queueCreateInfoCount: queues.len() as u32,
pQueueCreateInfos: queues.as_ptr(),
enabledLayerCount: layers_ptr.len() as u32,
ppEnabledLayerNames: layers_ptr.as_ptr(),
enabledExtensionCount: extensions_list.len() as u32,
ppEnabledExtensionNames: extensions_list.as_ptr(),
pEnabledFeatures: if has_khr_get_physical_device_properties2 {
flags: ash::vk::DeviceCreateFlags::empty(),
queue_create_info_count: queues.len() as u32,
p_queue_create_infos: queues.as_ptr(),
enabled_layer_count: layers_ptr.len() as u32,
pp_enabled_layer_names: layers_ptr.as_ptr(),
enabled_extension_count: extensions_list.len() as u32,
pp_enabled_extension_names: extensions_list.as_ptr(),
p_enabled_features: if has_khr_get_physical_device_properties2 {
ptr::null()
} else {
&features_ffi.head_as_ref().features
},
..Default::default()
};
let mut output = MaybeUninit::uninit();
check_errors(vk_i.CreateDevice(
check_errors(fns_i.v1_0.create_device(
phys.internal_object(),
&infos,
ptr::null(),
@ -336,8 +336,8 @@ impl Device {
};
// loading the function pointers of the newly-created device
let vk = vk::DevicePointers::load(|name| unsafe {
vk_i.GetDeviceProcAddr(device, name.as_ptr()) as *const _
let fns = DeviceFunctions::load(|name| unsafe {
mem::transmute(fns_i.v1_0.get_device_proc_addr(device, name.as_ptr()))
});
let mut active_queue_families: SmallVec<[u32; 8]> = SmallVec::new();
@ -355,7 +355,7 @@ impl Device {
physical_device: phys.index(),
device: device,
api_version,
vk: vk,
fns,
standard_pool: Mutex::new(Weak::new()),
standard_descriptor_pool: Mutex::new(Weak::new()),
standard_command_pools: Mutex::new(Default::default()),
@ -392,10 +392,10 @@ impl Device {
self.api_version
}
/// Grants access to the pointers to the Vulkan functions of the device.
/// Grants access to the Vulkan functions of the device.
#[inline]
pub fn pointers(&self) -> &vk::DevicePointers {
&self.vk
pub fn fns(&self) -> &DeviceFunctions {
&self.fns
}
/// Waits until all work on this device has finished. You should never need to call
@ -410,7 +410,7 @@ impl Device {
/// while this function is waiting.
///
pub unsafe fn wait(&self) -> Result<(), OomError> {
check_errors(self.vk.DeviceWaitIdle(self.device))?;
check_errors(self.fns.v1_0.device_wait_idle(self.device))?;
Ok(())
}
@ -521,15 +521,15 @@ impl Device {
&self.allocation_count
}
pub(crate) fn fence_pool(&self) -> &Mutex<Vec<vk::Fence>> {
pub(crate) fn fence_pool(&self) -> &Mutex<Vec<ash::vk::Fence>> {
&self.fence_pool
}
pub(crate) fn semaphore_pool(&self) -> &Mutex<Vec<vk::Semaphore>> {
pub(crate) fn semaphore_pool(&self) -> &Mutex<Vec<ash::vk::Semaphore>> {
&self.semaphore_pool
}
pub(crate) fn event_pool(&self) -> &Mutex<Vec<vk::Event>> {
pub(crate) fn event_pool(&self) -> &Mutex<Vec<ash::vk::Event>> {
&self.event_pool
}
@ -543,7 +543,9 @@ impl Device {
name: &CStr,
) -> Result<(), OomError> {
assert!(object.device().internal_object() == self.internal_object());
unsafe { self.set_object_name_raw(T::TYPE, object.internal_object().value(), name) }
unsafe {
self.set_object_name_raw(T::Object::TYPE, object.internal_object().as_raw(), name)
}
}
/// Assigns a human-readable name to `object` for debugging purposes.
@ -552,18 +554,22 @@ impl Device {
/// `object` must be a Vulkan handle owned by this device, and its type must be accurately described by `ty`.
pub unsafe fn set_object_name_raw(
&self,
ty: vk::ObjectType,
ty: ash::vk::ObjectType,
object: u64,
name: &CStr,
) -> Result<(), OomError> {
let info = vk::DebugUtilsObjectNameInfoEXT {
sType: vk::STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT,
pNext: ptr::null(),
objectType: ty,
objectHandle: object,
pObjectName: name.as_ptr(),
let info = ash::vk::DebugUtilsObjectNameInfoEXT {
object_type: ty,
object_handle: object,
p_object_name: name.as_ptr(),
..Default::default()
};
check_errors(self.vk.SetDebugUtilsObjectNameEXT(self.device, &info))?;
check_errors(
self.instance
.fns()
.ext_debug_utils
.set_debug_utils_object_name_ext(self.device, &info),
)?;
Ok(())
}
@ -578,13 +584,13 @@ impl Device {
usage: ImageUsage,
create_flags: ImageCreateFlags,
) -> Result<ImageFormatProperties, String> {
let vk_i = self.instance().pointers();
let fns_i = self.instance().fns();
let mut output = MaybeUninit::uninit();
let physical_device = self.physical_device().internal_object();
unsafe {
let r = vk_i.GetPhysicalDeviceImageFormatProperties(
let r = fns_i.v1_0.get_physical_device_image_format_properties(
physical_device,
format as u32,
format.into(),
ty.into(),
tiling.into(),
usage.into(),
@ -613,12 +619,10 @@ impl fmt::Debug for Device {
}
unsafe impl VulkanObject for Device {
type Object = vk::Device;
const TYPE: vk::ObjectType = vk::OBJECT_TYPE_DEVICE;
type Object = ash::vk::Device;
#[inline]
fn internal_object(&self) -> vk::Device {
fn internal_object(&self) -> ash::vk::Device {
self.device
}
}
@ -628,15 +632,21 @@ impl Drop for Device {
fn drop(&mut self) {
unsafe {
for &raw_fence in self.fence_pool.lock().unwrap().iter() {
self.vk.DestroyFence(self.device, raw_fence, ptr::null());
self.fns
.v1_0
.destroy_fence(self.device, raw_fence, ptr::null());
}
for &raw_sem in self.semaphore_pool.lock().unwrap().iter() {
self.vk.DestroySemaphore(self.device, raw_sem, ptr::null());
self.fns
.v1_0
.destroy_semaphore(self.device, raw_sem, ptr::null());
}
for &raw_event in self.event_pool.lock().unwrap().iter() {
self.vk.DestroyEvent(self.device, raw_event, ptr::null());
self.fns
.v1_0
.destroy_event(self.device, raw_event, ptr::null());
}
self.vk.DestroyDevice(self.device, ptr::null());
self.fns.v1_0.destroy_device(self.device, ptr::null());
}
}
}
@ -706,9 +716,12 @@ impl Iterator for QueuesIter {
self.next_queue += 1;
let mut output = MaybeUninit::uninit();
self.device
.vk
.GetDeviceQueue(self.device.device, family, id, output.as_mut_ptr());
self.device.fns.v1_0.get_device_queue(
self.device.device,
family,
id,
output.as_mut_ptr(),
);
Some(Arc::new(Queue {
queue: Mutex::new(output.assume_init()),
@ -809,7 +822,7 @@ impl From<Error> for DeviceCreationError {
// TODO: should use internal synchronization?
#[derive(Debug)]
pub struct Queue {
queue: Mutex<vk::Queue>,
queue: Mutex<ash::vk::Queue>,
device: Arc<Device>,
family: u32,
id: u32, // id within family
@ -851,9 +864,9 @@ impl Queue {
#[inline]
pub fn wait(&self) -> Result<(), OomError> {
unsafe {
let vk = self.device.pointers();
let fns = self.device.fns();
let queue = self.queue.lock().unwrap();
check_errors(vk.QueueWaitIdle(*queue))?;
check_errors(fns.v1_0.queue_wait_idle(*queue))?;
Ok(())
}
}
@ -874,10 +887,10 @@ unsafe impl DeviceOwned for Queue {
}
unsafe impl SynchronizedVulkanObject for Queue {
type Object = vk::Queue;
type Object = ash::vk::Queue;
#[inline]
fn internal_object_guard(&self) -> MutexGuard<vk::Queue> {
fn internal_object_guard(&self) -> MutexGuard<ash::vk::Queue> {
self.queue.lock().unwrap()
}
}

82
vulkano/src/fns.rs Normal file
View File

@ -0,0 +1,82 @@
// Copyright (c) 2021 The vulkano developers
// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or
// https://www.apache.org/licenses/LICENSE-2.0> or the MIT
// license <LICENSE-MIT or https://opensource.org/licenses/MIT>,
// at your option. All files in the project carrying such
// notice may not be copied, modified, or distributed except
// according to those terms.
use ash::vk::*;
use std::ffi::CStr;
use std::os::raw::c_void;
macro_rules! fns {
($struct_name:ident, { $($member:ident => $fn_struct:ident,)+ }) => {
pub struct $struct_name {
$(
pub $member: $fn_struct,
)+
}
impl $struct_name {
pub fn load<F>(mut load_fn: F) -> $struct_name
where F: FnMut(&CStr) -> *const c_void
{
$struct_name {
$(
$member: ash::vk::$fn_struct::load(&mut load_fn),
)+
}
}
}
};
}
// TODO: would be nice if these could be generated automatically from Vulkano's list of extensions
fns!(EntryFunctions, {
v1_0 => EntryFnV1_0,
v1_1 => EntryFnV1_1,
v1_2 => EntryFnV1_2,
});
fns!(InstanceFunctions, {
v1_0 => InstanceFnV1_0,
v1_1 => InstanceFnV1_1,
v1_2 => InstanceFnV1_2,
khr_android_surface => KhrAndroidSurfaceFn,
khr_display => KhrDisplayFn,
khr_get_physical_device_properties2 => KhrGetPhysicalDeviceProperties2Fn, // Promoted
khr_surface => KhrSurfaceFn,
khr_wayland_surface => KhrWaylandSurfaceFn,
khr_win32_surface => KhrWin32SurfaceFn,
khr_xcb_surface => KhrXcbSurfaceFn,
khr_xlib_surface => KhrXlibSurfaceFn,
// This is an instance extension, so it should be loaded with `vkGetInstanceProcAddr`, despite
// having device-level functions. This is an unfortunate exception in the spec that even causes
// the LunarG people headaches:
// https://github.com/KhronosGroup/Vulkan-Loader/issues/116#issuecomment-580982393
ext_debug_utils => ExtDebugUtilsFn,
mvk_ios_surface => MvkIosSurfaceFn,
mvk_macos_surface => MvkMacosSurfaceFn,
nn_vi_surface => NnViSurfaceFn,
});
fns!(DeviceFunctions, {
v1_0 => DeviceFnV1_0,
v1_1 => DeviceFnV1_1,
v1_2 => DeviceFnV1_2,
khr_external_memory_fd => KhrExternalMemoryFdFn,
khr_get_memory_requirements2 => KhrGetMemoryRequirements2Fn, // Promoted
khr_maintenance1 => KhrMaintenance1Fn, // Promoted
khr_swapchain => KhrSwapchainFn,
ext_buffer_device_address => ExtBufferDeviceAddressFn,
ext_full_screen_exclusive => ExtFullScreenExclusiveFn,
});

View File

@ -94,7 +94,6 @@
use crate::image::ImageAspects;
use crate::instance::PhysicalDevice;
use crate::vk;
use crate::VulkanObject;
use half::f16;
use std::convert::TryFrom;
@ -106,11 +105,11 @@ macro_rules! formats {
($($name:ident => { vk: $vk:ident, bdim: $bdim:expr, size: $sz:expr, ty: $f_ty:ident$(, planes: $planes:expr)?},)+) => (
/// An enumeration of all the possible formats.
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
#[repr(u32)]
#[repr(i32)]
#[allow(missing_docs)]
#[allow(non_camel_case_types)]
pub enum Format {
$($name = vk::$vk,)+
$($name = ash::vk::Format::$vk.as_raw(),)+
}
impl Format {
@ -168,216 +167,216 @@ macro_rules! formats {
}
}
impl TryFrom<vk::Format> for Format {
impl TryFrom<ash::vk::Format> for Format {
type Error = ();
#[inline]
fn try_from(val: vk::Format) -> Result<Format, ()> {
fn try_from(val: ash::vk::Format) -> Result<Format, ()> {
match val {
$(
vk::$vk => Ok(Format::$name),
ash::vk::Format::$vk => Ok(Format::$name),
)+
_ => Err(()),
}
}
}
impl From<Format> for vk::Format {
impl From<Format> for ash::vk::Format {
#[inline]
fn from(val: Format) -> Self {
val as vk::Format
ash::vk::Format::from_raw(val as i32)
}
}
);
}
formats! {
R4G4UnormPack8 => {vk: FORMAT_R4G4_UNORM_PACK8, bdim: (1, 1), size: Some(1), ty: Float},
R4G4B4A4UnormPack16 => {vk: FORMAT_R4G4B4A4_UNORM_PACK16, bdim: (1, 1), size: Some(2), ty: Float},
B4G4R4A4UnormPack16 => {vk: FORMAT_B4G4R4A4_UNORM_PACK16, bdim: (1, 1), size: Some(2), ty: Float},
R5G6B5UnormPack16 => {vk: FORMAT_R5G6B5_UNORM_PACK16, bdim: (1, 1), size: Some(2), ty: Float},
B5G6R5UnormPack16 => {vk: FORMAT_B5G6R5_UNORM_PACK16, bdim: (1, 1), size: Some(2), ty: Float},
R5G5B5A1UnormPack16 => {vk: FORMAT_R5G5B5A1_UNORM_PACK16, bdim: (1, 1), size: Some(2), ty: Float},
B5G5R5A1UnormPack16 => {vk: FORMAT_B5G5R5A1_UNORM_PACK16, bdim: (1, 1), size: Some(2), ty: Float},
A1R5G5B5UnormPack16 => {vk: FORMAT_A1R5G5B5_UNORM_PACK16, bdim: (1, 1), size: Some(2), ty: Float},
R8Unorm => {vk: FORMAT_R8_UNORM, bdim: (1, 1), size: Some(1), ty: Float},
R8Snorm => {vk: FORMAT_R8_SNORM, bdim: (1, 1), size: Some(1), ty: Float},
R8Uscaled => {vk: FORMAT_R8_USCALED, bdim: (1, 1), size: Some(1), ty: Float},
R8Sscaled => {vk: FORMAT_R8_SSCALED, bdim: (1, 1), size: Some(1), ty: Float},
R8Uint => {vk: FORMAT_R8_UINT, bdim: (1, 1), size: Some(1), ty: Uint},
R8Sint => {vk: FORMAT_R8_SINT, bdim: (1, 1), size: Some(1), ty: Sint},
R8Srgb => {vk: FORMAT_R8_SRGB, bdim: (1, 1), size: Some(1), ty: Float},
R8G8Unorm => {vk: FORMAT_R8G8_UNORM, bdim: (1, 1), size: Some(2), ty: Float},
R8G8Snorm => {vk: FORMAT_R8G8_SNORM, bdim: (1, 1), size: Some(2), ty: Float},
R8G8Uscaled => {vk: FORMAT_R8G8_USCALED, bdim: (1, 1), size: Some(2), ty: Float},
R8G8Sscaled => {vk: FORMAT_R8G8_SSCALED, bdim: (1, 1), size: Some(2), ty: Float},
R8G8Uint => {vk: FORMAT_R8G8_UINT, bdim: (1, 1), size: Some(2), ty: Uint},
R8G8Sint => {vk: FORMAT_R8G8_SINT, bdim: (1, 1), size: Some(2), ty: Sint},
R8G8Srgb => {vk: FORMAT_R8G8_SRGB, bdim: (1, 1), size: Some(2), ty: Float},
R8G8B8Unorm => {vk: FORMAT_R8G8B8_UNORM, bdim: (1, 1), size: Some(3), ty: Float},
R8G8B8Snorm => {vk: FORMAT_R8G8B8_SNORM, bdim: (1, 1), size: Some(3), ty: Float},
R8G8B8Uscaled => {vk: FORMAT_R8G8B8_USCALED, bdim: (1, 1), size: Some(3), ty: Float},
R8G8B8Sscaled => {vk: FORMAT_R8G8B8_SSCALED, bdim: (1, 1), size: Some(3), ty: Float},
R8G8B8Uint => {vk: FORMAT_R8G8B8_UINT, bdim: (1, 1), size: Some(3), ty: Uint},
R8G8B8Sint => {vk: FORMAT_R8G8B8_SINT, bdim: (1, 1), size: Some(3), ty: Sint},
R8G8B8Srgb => {vk: FORMAT_R8G8B8_SRGB, bdim: (1, 1), size: Some(3), ty: Float},
B8G8R8Unorm => {vk: FORMAT_B8G8R8_UNORM, bdim: (1, 1), size: Some(3), ty: Float},
B8G8R8Snorm => {vk: FORMAT_B8G8R8_SNORM, bdim: (1, 1), size: Some(3), ty: Float},
B8G8R8Uscaled => {vk: FORMAT_B8G8R8_USCALED, bdim: (1, 1), size: Some(3), ty: Float},
B8G8R8Sscaled => {vk: FORMAT_B8G8R8_SSCALED, bdim: (1, 1), size: Some(3), ty: Float},
B8G8R8Uint => {vk: FORMAT_B8G8R8_UINT, bdim: (1, 1), size: Some(3), ty: Uint},
B8G8R8Sint => {vk: FORMAT_B8G8R8_SINT, bdim: (1, 1), size: Some(3), ty: Sint},
B8G8R8Srgb => {vk: FORMAT_B8G8R8_SRGB, bdim: (1, 1), size: Some(3), ty: Float},
R8G8B8A8Unorm => {vk: FORMAT_R8G8B8A8_UNORM, bdim: (1, 1), size: Some(4), ty: Float},
R8G8B8A8Snorm => {vk: FORMAT_R8G8B8A8_SNORM, bdim: (1, 1), size: Some(4), ty: Float},
R8G8B8A8Uscaled => {vk: FORMAT_R8G8B8A8_USCALED, bdim: (1, 1), size: Some(4), ty: Float},
R8G8B8A8Sscaled => {vk: FORMAT_R8G8B8A8_SSCALED, bdim: (1, 1), size: Some(4), ty: Float},
R8G8B8A8Uint => {vk: FORMAT_R8G8B8A8_UINT, bdim: (1, 1), size: Some(4), ty: Uint},
R8G8B8A8Sint => {vk: FORMAT_R8G8B8A8_SINT, bdim: (1, 1), size: Some(4), ty: Sint},
R8G8B8A8Srgb => {vk: FORMAT_R8G8B8A8_SRGB, bdim: (1, 1), size: Some(4), ty: Float},
B8G8R8A8Unorm => {vk: FORMAT_B8G8R8A8_UNORM, bdim: (1, 1), size: Some(4), ty: Float},
B8G8R8A8Snorm => {vk: FORMAT_B8G8R8A8_SNORM, bdim: (1, 1), size: Some(4), ty: Float},
B8G8R8A8Uscaled => {vk: FORMAT_B8G8R8A8_USCALED, bdim: (1, 1), size: Some(4), ty: Float},
B8G8R8A8Sscaled => {vk: FORMAT_B8G8R8A8_SSCALED, bdim: (1, 1), size: Some(4), ty: Float},
B8G8R8A8Uint => {vk: FORMAT_B8G8R8A8_UINT, bdim: (1, 1), size: Some(4), ty: Uint},
B8G8R8A8Sint => {vk: FORMAT_B8G8R8A8_SINT, bdim: (1, 1), size: Some(4), ty: Sint},
B8G8R8A8Srgb => {vk: FORMAT_B8G8R8A8_SRGB, bdim: (1, 1), size: Some(4), ty: Float},
A8B8G8R8UnormPack32 => {vk: FORMAT_A8B8G8R8_UNORM_PACK32, bdim: (1, 1), size: Some(4), ty: Float},
A8B8G8R8SnormPack32 => {vk: FORMAT_A8B8G8R8_SNORM_PACK32, bdim: (1, 1), size: Some(4), ty: Float},
A8B8G8R8UscaledPack32 => {vk: FORMAT_A8B8G8R8_USCALED_PACK32, bdim: (1, 1), size: Some(4), ty: Float},
A8B8G8R8SscaledPack32 => {vk: FORMAT_A8B8G8R8_SSCALED_PACK32, bdim: (1, 1), size: Some(4), ty: Float},
A8B8G8R8UintPack32 => {vk: FORMAT_A8B8G8R8_UINT_PACK32, bdim: (1, 1), size: Some(4), ty: Uint},
A8B8G8R8SintPack32 => {vk: FORMAT_A8B8G8R8_SINT_PACK32, bdim: (1, 1), size: Some(4), ty: Sint},
A8B8G8R8SrgbPack32 => {vk: FORMAT_A8B8G8R8_SRGB_PACK32, bdim: (1, 1), size: Some(4), ty: Float},
A2R10G10B10UnormPack32 => {vk: FORMAT_A2R10G10B10_UNORM_PACK32, bdim: (1, 1), size: Some(4), ty: Float},
A2R10G10B10SnormPack32 => {vk: FORMAT_A2R10G10B10_SNORM_PACK32, bdim: (1, 1), size: Some(4), ty: Float},
A2R10G10B10UscaledPack32 => {vk: FORMAT_A2R10G10B10_USCALED_PACK32, bdim: (1, 1), size: Some(4), ty: Float},
A2R10G10B10SscaledPack32 => {vk: FORMAT_A2R10G10B10_SSCALED_PACK32, bdim: (1, 1), size: Some(4), ty: Float},
A2R10G10B10UintPack32 => {vk: FORMAT_A2R10G10B10_UINT_PACK32, bdim: (1, 1), size: Some(4), ty: Uint},
A2R10G10B10SintPack32 => {vk: FORMAT_A2R10G10B10_SINT_PACK32, bdim: (1, 1), size: Some(4), ty: Sint},
A2B10G10R10UnormPack32 => {vk: FORMAT_A2B10G10R10_UNORM_PACK32, bdim: (1, 1), size: Some(4), ty: Float},
A2B10G10R10SnormPack32 => {vk: FORMAT_A2B10G10R10_SNORM_PACK32, bdim: (1, 1), size: Some(4), ty: Float},
A2B10G10R10UscaledPack32 => {vk: FORMAT_A2B10G10R10_USCALED_PACK32, bdim: (1, 1), size: Some(4), ty: Float},
A2B10G10R10SscaledPack32 => {vk: FORMAT_A2B10G10R10_SSCALED_PACK32, bdim: (1, 1), size: Some(4), ty: Float},
A2B10G10R10UintPack32 => {vk: FORMAT_A2B10G10R10_UINT_PACK32, bdim: (1, 1), size: Some(4), ty: Uint},
A2B10G10R10SintPack32 => {vk: FORMAT_A2B10G10R10_SINT_PACK32, bdim: (1, 1), size: Some(4), ty: Sint},
R16Unorm => {vk: FORMAT_R16_UNORM, bdim: (1, 1), size: Some(2), ty: Float},
R16Snorm => {vk: FORMAT_R16_SNORM, bdim: (1, 1), size: Some(2), ty: Float},
R16Uscaled => {vk: FORMAT_R16_USCALED, bdim: (1, 1), size: Some(2), ty: Float},
R16Sscaled => {vk: FORMAT_R16_SSCALED, bdim: (1, 1), size: Some(2), ty: Float},
R16Uint => {vk: FORMAT_R16_UINT, bdim: (1, 1), size: Some(2), ty: Uint},
R16Sint => {vk: FORMAT_R16_SINT, bdim: (1, 1), size: Some(2), ty: Sint},
R16Sfloat => {vk: FORMAT_R16_SFLOAT, bdim: (1, 1), size: Some(2), ty: Float},
R16G16Unorm => {vk: FORMAT_R16G16_UNORM, bdim: (1, 1), size: Some(4), ty: Float},
R16G16Snorm => {vk: FORMAT_R16G16_SNORM, bdim: (1, 1), size: Some(4), ty: Float},
R16G16Uscaled => {vk: FORMAT_R16G16_USCALED, bdim: (1, 1), size: Some(4), ty: Float},
R16G16Sscaled => {vk: FORMAT_R16G16_SSCALED, bdim: (1, 1), size: Some(4), ty: Float},
R16G16Uint => {vk: FORMAT_R16G16_UINT, bdim: (1, 1), size: Some(4), ty: Uint},
R16G16Sint => {vk: FORMAT_R16G16_SINT, bdim: (1, 1), size: Some(4), ty: Sint},
R16G16Sfloat => {vk: FORMAT_R16G16_SFLOAT, bdim: (1, 1), size: Some(4), ty: Float},
R16G16B16Unorm => {vk: FORMAT_R16G16B16_UNORM, bdim: (1, 1), size: Some(6), ty: Float},
R16G16B16Snorm => {vk: FORMAT_R16G16B16_SNORM, bdim: (1, 1), size: Some(6), ty: Float},
R16G16B16Uscaled => {vk: FORMAT_R16G16B16_USCALED, bdim: (1, 1), size: Some(6), ty: Float},
R16G16B16Sscaled => {vk: FORMAT_R16G16B16_SSCALED, bdim: (1, 1), size: Some(6), ty: Float},
R16G16B16Uint => {vk: FORMAT_R16G16B16_UINT, bdim: (1, 1), size: Some(6), ty: Uint},
R16G16B16Sint => {vk: FORMAT_R16G16B16_SINT, bdim: (1, 1), size: Some(6), ty: Sint},
R16G16B16Sfloat => {vk: FORMAT_R16G16B16_SFLOAT, bdim: (1, 1), size: Some(6), ty: Float},
R16G16B16A16Unorm => {vk: FORMAT_R16G16B16A16_UNORM, bdim: (1, 1), size: Some(8), ty: Float},
R16G16B16A16Snorm => {vk: FORMAT_R16G16B16A16_SNORM, bdim: (1, 1), size: Some(8), ty: Float},
R16G16B16A16Uscaled => {vk: FORMAT_R16G16B16A16_USCALED, bdim: (1, 1), size: Some(8), ty: Float},
R16G16B16A16Sscaled => {vk: FORMAT_R16G16B16A16_SSCALED, bdim: (1, 1), size: Some(8), ty: Float},
R16G16B16A16Uint => {vk: FORMAT_R16G16B16A16_UINT, bdim: (1, 1), size: Some(8), ty: Uint},
R16G16B16A16Sint => {vk: FORMAT_R16G16B16A16_SINT, bdim: (1, 1), size: Some(8), ty: Sint},
R16G16B16A16Sfloat => {vk: FORMAT_R16G16B16A16_SFLOAT, bdim: (1, 1), size: Some(8), ty: Float},
R32Uint => {vk: FORMAT_R32_UINT, bdim: (1, 1), size: Some(4), ty: Uint},
R32Sint => {vk: FORMAT_R32_SINT, bdim: (1, 1), size: Some(4), ty: Sint},
R32Sfloat => {vk: FORMAT_R32_SFLOAT, bdim: (1, 1), size: Some(4), ty: Float},
R32G32Uint => {vk: FORMAT_R32G32_UINT, bdim: (1, 1), size: Some(8), ty: Uint},
R32G32Sint => {vk: FORMAT_R32G32_SINT, bdim: (1, 1), size: Some(8), ty: Sint},
R32G32Sfloat => {vk: FORMAT_R32G32_SFLOAT, bdim: (1, 1), size: Some(8), ty: Float},
R32G32B32Uint => {vk: FORMAT_R32G32B32_UINT, bdim: (1, 1), size: Some(12), ty: Uint},
R32G32B32Sint => {vk: FORMAT_R32G32B32_SINT, bdim: (1, 1), size: Some(12), ty: Sint},
R32G32B32Sfloat => {vk: FORMAT_R32G32B32_SFLOAT, bdim: (1, 1), size: Some(12), ty: Float},
R32G32B32A32Uint => {vk: FORMAT_R32G32B32A32_UINT, bdim: (1, 1), size: Some(16), ty: Uint},
R32G32B32A32Sint => {vk: FORMAT_R32G32B32A32_SINT, bdim: (1, 1), size: Some(16), ty: Sint},
R32G32B32A32Sfloat => {vk: FORMAT_R32G32B32A32_SFLOAT, bdim: (1, 1), size: Some(16), ty: Float},
R64Uint => {vk: FORMAT_R64_UINT, bdim: (1, 1), size: Some(8), ty: Uint},
R64Sint => {vk: FORMAT_R64_SINT, bdim: (1, 1), size: Some(8), ty: Sint},
R64Sfloat => {vk: FORMAT_R64_SFLOAT, bdim: (1, 1), size: Some(8), ty: Float},
R64G64Uint => {vk: FORMAT_R64G64_UINT, bdim: (1, 1), size: Some(16), ty: Uint},
R64G64Sint => {vk: FORMAT_R64G64_SINT, bdim: (1, 1), size: Some(16), ty: Sint},
R64G64Sfloat => {vk: FORMAT_R64G64_SFLOAT, bdim: (1, 1), size: Some(16), ty: Float},
R64G64B64Uint => {vk: FORMAT_R64G64B64_UINT, bdim: (1, 1), size: Some(24), ty: Uint},
R64G64B64Sint => {vk: FORMAT_R64G64B64_SINT, bdim: (1, 1), size: Some(24), ty: Sint},
R64G64B64Sfloat => {vk: FORMAT_R64G64B64_SFLOAT, bdim: (1, 1), size: Some(24), ty: Float},
R64G64B64A64Uint => {vk: FORMAT_R64G64B64A64_UINT, bdim: (1, 1), size: Some(32), ty: Uint},
R64G64B64A64Sint => {vk: FORMAT_R64G64B64A64_SINT, bdim: (1, 1), size: Some(32), ty: Sint},
R64G64B64A64Sfloat => {vk: FORMAT_R64G64B64A64_SFLOAT, bdim: (1, 1), size: Some(32), ty: Float},
B10G11R11UfloatPack32 => {vk: FORMAT_B10G11R11_UFLOAT_PACK32, bdim: (1, 1), size: Some(4), ty: Float},
E5B9G9R9UfloatPack32 => {vk: FORMAT_E5B9G9R9_UFLOAT_PACK32, bdim: (1, 1), size: Some(4), ty: Float},
D16Unorm => {vk: FORMAT_D16_UNORM, bdim: (1, 1), size: Some(2), ty: Depth},
X8_D24UnormPack32 => {vk: FORMAT_X8_D24_UNORM_PACK32, bdim: (1, 1), size: Some(4), ty: Depth},
D32Sfloat => {vk: FORMAT_D32_SFLOAT, bdim: (1, 1), size: Some(4), ty: Depth},
S8Uint => {vk: FORMAT_S8_UINT, bdim: (1, 1), size: Some(1), ty: Stencil},
D16Unorm_S8Uint => {vk: FORMAT_D16_UNORM_S8_UINT, bdim: (1, 1), size: None, ty: DepthStencil},
D24Unorm_S8Uint => {vk: FORMAT_D24_UNORM_S8_UINT, bdim: (1, 1), size: None, ty: DepthStencil},
D32Sfloat_S8Uint => {vk: FORMAT_D32_SFLOAT_S8_UINT, bdim: (1, 1), size: None, ty: DepthStencil},
BC1_RGBUnormBlock => {vk: FORMAT_BC1_RGB_UNORM_BLOCK, bdim: (4, 4), size: Some(8), ty: Compressed},
BC1_RGBSrgbBlock => {vk: FORMAT_BC1_RGB_SRGB_BLOCK, bdim: (4, 4), size: Some(8), ty: Compressed},
BC1_RGBAUnormBlock => {vk: FORMAT_BC1_RGBA_UNORM_BLOCK, bdim: (4, 4), size: Some(8), ty: Compressed},
BC1_RGBASrgbBlock => {vk: FORMAT_BC1_RGBA_SRGB_BLOCK, bdim: (4, 4), size: Some(8), ty: Compressed},
BC2UnormBlock => {vk: FORMAT_BC2_UNORM_BLOCK, bdim: (4, 4), size: Some(16), ty: Compressed},
BC2SrgbBlock => {vk: FORMAT_BC2_SRGB_BLOCK, bdim: (4, 4), size: Some(16), ty: Compressed},
BC3UnormBlock => {vk: FORMAT_BC3_UNORM_BLOCK, bdim: (4, 4), size: Some(16), ty: Compressed},
BC3SrgbBlock => {vk: FORMAT_BC3_SRGB_BLOCK, bdim: (4, 4), size: Some(16), ty: Compressed},
BC4UnormBlock => {vk: FORMAT_BC4_UNORM_BLOCK, bdim: (4, 4), size: Some(8), ty: Compressed},
BC4SnormBlock => {vk: FORMAT_BC4_SNORM_BLOCK, bdim: (4, 4), size: Some(8), ty: Compressed},
BC5UnormBlock => {vk: FORMAT_BC5_UNORM_BLOCK, bdim: (4, 4), size: Some(16), ty: Compressed},
BC5SnormBlock => {vk: FORMAT_BC5_SNORM_BLOCK, bdim: (4, 4), size: Some(16), ty: Compressed},
BC6HUfloatBlock => {vk: FORMAT_BC6H_UFLOAT_BLOCK, bdim: (4, 4), size: Some(16), ty: Compressed},
BC6HSfloatBlock => {vk: FORMAT_BC6H_SFLOAT_BLOCK, bdim: (4, 4), size: Some(16), ty: Compressed},
BC7UnormBlock => {vk: FORMAT_BC7_UNORM_BLOCK, bdim: (4, 4), size: Some(16), ty: Compressed},
BC7SrgbBlock => {vk: FORMAT_BC7_SRGB_BLOCK, bdim: (4, 4), size: Some(16), ty: Compressed},
ETC2_R8G8B8UnormBlock => {vk: FORMAT_ETC2_R8G8B8_UNORM_BLOCK, bdim: (4, 4), size: Some(8), ty: Compressed},
ETC2_R8G8B8SrgbBlock => {vk: FORMAT_ETC2_R8G8B8_SRGB_BLOCK, bdim: (4, 4), size: Some(8), ty: Compressed},
ETC2_R8G8B8A1UnormBlock => {vk: FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK, bdim: (4, 4), size: Some(8), ty: Compressed},
ETC2_R8G8B8A1SrgbBlock => {vk: FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK, bdim: (4, 4), size: Some(8), ty: Compressed},
ETC2_R8G8B8A8UnormBlock => {vk: FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK, bdim: (4, 4), size: Some(16), ty: Compressed},
ETC2_R8G8B8A8SrgbBlock => {vk: FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK, bdim: (4, 4), size: Some(16), ty: Compressed},
EAC_R11UnormBlock => {vk: FORMAT_EAC_R11_UNORM_BLOCK, bdim: (4, 4), size: Some(8), ty: Compressed},
EAC_R11SnormBlock => {vk: FORMAT_EAC_R11_SNORM_BLOCK, bdim: (4, 4), size: Some(8), ty: Compressed},
EAC_R11G11UnormBlock => {vk: FORMAT_EAC_R11G11_UNORM_BLOCK, bdim: (4, 4), size: Some(16), ty: Compressed},
EAC_R11G11SnormBlock => {vk: FORMAT_EAC_R11G11_SNORM_BLOCK, bdim: (4, 4), size: Some(16), ty: Compressed},
ASTC_4x4UnormBlock => {vk: FORMAT_ASTC_4x4_UNORM_BLOCK, bdim: (4, 4), size: Some(16), ty: Compressed},
ASTC_4x4SrgbBlock => {vk: FORMAT_ASTC_4x4_SRGB_BLOCK, bdim: (4, 4), size: Some(16), ty: Compressed},
ASTC_5x4UnormBlock => {vk: FORMAT_ASTC_5x4_UNORM_BLOCK, bdim: (5, 4), size: Some(16), ty: Compressed},
ASTC_5x4SrgbBlock => {vk: FORMAT_ASTC_5x4_SRGB_BLOCK, bdim: (5, 4), size: Some(16), ty: Compressed},
ASTC_5x5UnormBlock => {vk: FORMAT_ASTC_5x5_UNORM_BLOCK, bdim: (5, 5), size: Some(16), ty: Compressed},
ASTC_5x5SrgbBlock => {vk: FORMAT_ASTC_5x5_SRGB_BLOCK, bdim: (5, 5), size: Some(16), ty: Compressed},
ASTC_6x5UnormBlock => {vk: FORMAT_ASTC_6x5_UNORM_BLOCK, bdim: (6, 5), size: Some(16), ty: Compressed},
ASTC_6x5SrgbBlock => {vk: FORMAT_ASTC_6x5_SRGB_BLOCK, bdim: (6, 5), size: Some(16), ty: Compressed},
ASTC_6x6UnormBlock => {vk: FORMAT_ASTC_6x6_UNORM_BLOCK, bdim: (6, 6), size: Some(16), ty: Compressed},
ASTC_6x6SrgbBlock => {vk: FORMAT_ASTC_6x6_SRGB_BLOCK, bdim: (6, 6), size: Some(16), ty: Compressed},
ASTC_8x5UnormBlock => {vk: FORMAT_ASTC_8x5_UNORM_BLOCK, bdim: (8, 5), size: Some(16), ty: Compressed},
ASTC_8x5SrgbBlock => {vk: FORMAT_ASTC_8x5_SRGB_BLOCK, bdim: (8, 5), size: Some(16), ty: Compressed},
ASTC_8x6UnormBlock => {vk: FORMAT_ASTC_8x6_UNORM_BLOCK, bdim: (8, 6), size: Some(16), ty: Compressed},
ASTC_8x6SrgbBlock => {vk: FORMAT_ASTC_8x6_SRGB_BLOCK, bdim: (8, 6), size: Some(16), ty: Compressed},
ASTC_8x8UnormBlock => {vk: FORMAT_ASTC_8x8_UNORM_BLOCK, bdim: (8, 8), size: Some(16), ty: Compressed},
ASTC_8x8SrgbBlock => {vk: FORMAT_ASTC_8x8_SRGB_BLOCK, bdim: (8, 8), size: Some(16), ty: Compressed},
ASTC_10x5UnormBlock => {vk: FORMAT_ASTC_10x5_UNORM_BLOCK, bdim: (10, 5), size: Some(16), ty: Compressed},
ASTC_10x5SrgbBlock => {vk: FORMAT_ASTC_10x5_SRGB_BLOCK, bdim: (10, 5), size: Some(16), ty: Compressed},
ASTC_10x6UnormBlock => {vk: FORMAT_ASTC_10x6_UNORM_BLOCK, bdim: (10, 6), size: Some(16), ty: Compressed},
ASTC_10x6SrgbBlock => {vk: FORMAT_ASTC_10x6_SRGB_BLOCK, bdim: (10, 6), size: Some(16), ty: Compressed},
ASTC_10x8UnormBlock => {vk: FORMAT_ASTC_10x8_UNORM_BLOCK, bdim: (10, 8), size: Some(16), ty: Compressed},
ASTC_10x8SrgbBlock => {vk: FORMAT_ASTC_10x8_SRGB_BLOCK, bdim: (10, 8), size: Some(16), ty: Compressed},
ASTC_10x10UnormBlock => {vk: FORMAT_ASTC_10x10_UNORM_BLOCK, bdim: (10, 10), size: Some(16), ty: Compressed},
ASTC_10x10SrgbBlock => {vk: FORMAT_ASTC_10x10_SRGB_BLOCK, bdim: (10, 10), size: Some(16), ty: Compressed},
ASTC_12x10UnormBlock => {vk: FORMAT_ASTC_12x10_UNORM_BLOCK, bdim: (12, 10), size: Some(16), ty: Compressed},
ASTC_12x10SrgbBlock => {vk: FORMAT_ASTC_12x10_SRGB_BLOCK, bdim: (12, 10), size: Some(16), ty: Compressed},
ASTC_12x12UnormBlock => {vk: FORMAT_ASTC_12x12_UNORM_BLOCK, bdim: (12, 12), size: Some(16), ty: Compressed},
ASTC_12x12SrgbBlock => {vk: FORMAT_ASTC_12x12_SRGB_BLOCK, bdim: (12, 12), size: Some(16), ty: Compressed},
G8B8R8_3PLANE420Unorm => {vk: FORMAT_G8_B8_R8_3PLANE_420_UNORM, bdim: (1, 1), size: None, ty: Ycbcr, planes: 3},
G8B8R8_2PLANE420Unorm => {vk: FORMAT_G8_B8R8_2PLANE_420_UNORM, bdim: (1, 1), size: None, ty: Ycbcr, planes: 2},
R4G4UnormPack8 => {vk: R4G4_UNORM_PACK8, bdim: (1, 1), size: Some(1), ty: Float},
R4G4B4A4UnormPack16 => {vk: R4G4B4A4_UNORM_PACK16, bdim: (1, 1), size: Some(2), ty: Float},
B4G4R4A4UnormPack16 => {vk: B4G4R4A4_UNORM_PACK16, bdim: (1, 1), size: Some(2), ty: Float},
R5G6B5UnormPack16 => {vk: R5G6B5_UNORM_PACK16, bdim: (1, 1), size: Some(2), ty: Float},
B5G6R5UnormPack16 => {vk: B5G6R5_UNORM_PACK16, bdim: (1, 1), size: Some(2), ty: Float},
R5G5B5A1UnormPack16 => {vk: R5G5B5A1_UNORM_PACK16, bdim: (1, 1), size: Some(2), ty: Float},
B5G5R5A1UnormPack16 => {vk: B5G5R5A1_UNORM_PACK16, bdim: (1, 1), size: Some(2), ty: Float},
A1R5G5B5UnormPack16 => {vk: A1R5G5B5_UNORM_PACK16, bdim: (1, 1), size: Some(2), ty: Float},
R8Unorm => {vk: R8_UNORM, bdim: (1, 1), size: Some(1), ty: Float},
R8Snorm => {vk: R8_SNORM, bdim: (1, 1), size: Some(1), ty: Float},
R8Uscaled => {vk: R8_USCALED, bdim: (1, 1), size: Some(1), ty: Float},
R8Sscaled => {vk: R8_SSCALED, bdim: (1, 1), size: Some(1), ty: Float},
R8Uint => {vk: R8_UINT, bdim: (1, 1), size: Some(1), ty: Uint},
R8Sint => {vk: R8_SINT, bdim: (1, 1), size: Some(1), ty: Sint},
R8Srgb => {vk: R8_SRGB, bdim: (1, 1), size: Some(1), ty: Float},
R8G8Unorm => {vk: R8G8_UNORM, bdim: (1, 1), size: Some(2), ty: Float},
R8G8Snorm => {vk: R8G8_SNORM, bdim: (1, 1), size: Some(2), ty: Float},
R8G8Uscaled => {vk: R8G8_USCALED, bdim: (1, 1), size: Some(2), ty: Float},
R8G8Sscaled => {vk: R8G8_SSCALED, bdim: (1, 1), size: Some(2), ty: Float},
R8G8Uint => {vk: R8G8_UINT, bdim: (1, 1), size: Some(2), ty: Uint},
R8G8Sint => {vk: R8G8_SINT, bdim: (1, 1), size: Some(2), ty: Sint},
R8G8Srgb => {vk: R8G8_SRGB, bdim: (1, 1), size: Some(2), ty: Float},
R8G8B8Unorm => {vk: R8G8B8_UNORM, bdim: (1, 1), size: Some(3), ty: Float},
R8G8B8Snorm => {vk: R8G8B8_SNORM, bdim: (1, 1), size: Some(3), ty: Float},
R8G8B8Uscaled => {vk: R8G8B8_USCALED, bdim: (1, 1), size: Some(3), ty: Float},
R8G8B8Sscaled => {vk: R8G8B8_SSCALED, bdim: (1, 1), size: Some(3), ty: Float},
R8G8B8Uint => {vk: R8G8B8_UINT, bdim: (1, 1), size: Some(3), ty: Uint},
R8G8B8Sint => {vk: R8G8B8_SINT, bdim: (1, 1), size: Some(3), ty: Sint},
R8G8B8Srgb => {vk: R8G8B8_SRGB, bdim: (1, 1), size: Some(3), ty: Float},
B8G8R8Unorm => {vk: B8G8R8_UNORM, bdim: (1, 1), size: Some(3), ty: Float},
B8G8R8Snorm => {vk: B8G8R8_SNORM, bdim: (1, 1), size: Some(3), ty: Float},
B8G8R8Uscaled => {vk: B8G8R8_USCALED, bdim: (1, 1), size: Some(3), ty: Float},
B8G8R8Sscaled => {vk: B8G8R8_SSCALED, bdim: (1, 1), size: Some(3), ty: Float},
B8G8R8Uint => {vk: B8G8R8_UINT, bdim: (1, 1), size: Some(3), ty: Uint},
B8G8R8Sint => {vk: B8G8R8_SINT, bdim: (1, 1), size: Some(3), ty: Sint},
B8G8R8Srgb => {vk: B8G8R8_SRGB, bdim: (1, 1), size: Some(3), ty: Float},
R8G8B8A8Unorm => {vk: R8G8B8A8_UNORM, bdim: (1, 1), size: Some(4), ty: Float},
R8G8B8A8Snorm => {vk: R8G8B8A8_SNORM, bdim: (1, 1), size: Some(4), ty: Float},
R8G8B8A8Uscaled => {vk: R8G8B8A8_USCALED, bdim: (1, 1), size: Some(4), ty: Float},
R8G8B8A8Sscaled => {vk: R8G8B8A8_SSCALED, bdim: (1, 1), size: Some(4), ty: Float},
R8G8B8A8Uint => {vk: R8G8B8A8_UINT, bdim: (1, 1), size: Some(4), ty: Uint},
R8G8B8A8Sint => {vk: R8G8B8A8_SINT, bdim: (1, 1), size: Some(4), ty: Sint},
R8G8B8A8Srgb => {vk: R8G8B8A8_SRGB, bdim: (1, 1), size: Some(4), ty: Float},
B8G8R8A8Unorm => {vk: B8G8R8A8_UNORM, bdim: (1, 1), size: Some(4), ty: Float},
B8G8R8A8Snorm => {vk: B8G8R8A8_SNORM, bdim: (1, 1), size: Some(4), ty: Float},
B8G8R8A8Uscaled => {vk: B8G8R8A8_USCALED, bdim: (1, 1), size: Some(4), ty: Float},
B8G8R8A8Sscaled => {vk: B8G8R8A8_SSCALED, bdim: (1, 1), size: Some(4), ty: Float},
B8G8R8A8Uint => {vk: B8G8R8A8_UINT, bdim: (1, 1), size: Some(4), ty: Uint},
B8G8R8A8Sint => {vk: B8G8R8A8_SINT, bdim: (1, 1), size: Some(4), ty: Sint},
B8G8R8A8Srgb => {vk: B8G8R8A8_SRGB, bdim: (1, 1), size: Some(4), ty: Float},
A8B8G8R8UnormPack32 => {vk: A8B8G8R8_UNORM_PACK32, bdim: (1, 1), size: Some(4), ty: Float},
A8B8G8R8SnormPack32 => {vk: A8B8G8R8_SNORM_PACK32, bdim: (1, 1), size: Some(4), ty: Float},
A8B8G8R8UscaledPack32 => {vk: A8B8G8R8_USCALED_PACK32, bdim: (1, 1), size: Some(4), ty: Float},
A8B8G8R8SscaledPack32 => {vk: A8B8G8R8_SSCALED_PACK32, bdim: (1, 1), size: Some(4), ty: Float},
A8B8G8R8UintPack32 => {vk: A8B8G8R8_UINT_PACK32, bdim: (1, 1), size: Some(4), ty: Uint},
A8B8G8R8SintPack32 => {vk: A8B8G8R8_SINT_PACK32, bdim: (1, 1), size: Some(4), ty: Sint},
A8B8G8R8SrgbPack32 => {vk: A8B8G8R8_SRGB_PACK32, bdim: (1, 1), size: Some(4), ty: Float},
A2R10G10B10UnormPack32 => {vk: A2R10G10B10_UNORM_PACK32, bdim: (1, 1), size: Some(4), ty: Float},
A2R10G10B10SnormPack32 => {vk: A2R10G10B10_SNORM_PACK32, bdim: (1, 1), size: Some(4), ty: Float},
A2R10G10B10UscaledPack32 => {vk: A2R10G10B10_USCALED_PACK32, bdim: (1, 1), size: Some(4), ty: Float},
A2R10G10B10SscaledPack32 => {vk: A2R10G10B10_SSCALED_PACK32, bdim: (1, 1), size: Some(4), ty: Float},
A2R10G10B10UintPack32 => {vk: A2R10G10B10_UINT_PACK32, bdim: (1, 1), size: Some(4), ty: Uint},
A2R10G10B10SintPack32 => {vk: A2R10G10B10_SINT_PACK32, bdim: (1, 1), size: Some(4), ty: Sint},
A2B10G10R10UnormPack32 => {vk: A2B10G10R10_UNORM_PACK32, bdim: (1, 1), size: Some(4), ty: Float},
A2B10G10R10SnormPack32 => {vk: A2B10G10R10_SNORM_PACK32, bdim: (1, 1), size: Some(4), ty: Float},
A2B10G10R10UscaledPack32 => {vk: A2B10G10R10_USCALED_PACK32, bdim: (1, 1), size: Some(4), ty: Float},
A2B10G10R10SscaledPack32 => {vk: A2B10G10R10_SSCALED_PACK32, bdim: (1, 1), size: Some(4), ty: Float},
A2B10G10R10UintPack32 => {vk: A2B10G10R10_UINT_PACK32, bdim: (1, 1), size: Some(4), ty: Uint},
A2B10G10R10SintPack32 => {vk: A2B10G10R10_SINT_PACK32, bdim: (1, 1), size: Some(4), ty: Sint},
R16Unorm => {vk: R16_UNORM, bdim: (1, 1), size: Some(2), ty: Float},
R16Snorm => {vk: R16_SNORM, bdim: (1, 1), size: Some(2), ty: Float},
R16Uscaled => {vk: R16_USCALED, bdim: (1, 1), size: Some(2), ty: Float},
R16Sscaled => {vk: R16_SSCALED, bdim: (1, 1), size: Some(2), ty: Float},
R16Uint => {vk: R16_UINT, bdim: (1, 1), size: Some(2), ty: Uint},
R16Sint => {vk: R16_SINT, bdim: (1, 1), size: Some(2), ty: Sint},
R16Sfloat => {vk: R16_SFLOAT, bdim: (1, 1), size: Some(2), ty: Float},
R16G16Unorm => {vk: R16G16_UNORM, bdim: (1, 1), size: Some(4), ty: Float},
R16G16Snorm => {vk: R16G16_SNORM, bdim: (1, 1), size: Some(4), ty: Float},
R16G16Uscaled => {vk: R16G16_USCALED, bdim: (1, 1), size: Some(4), ty: Float},
R16G16Sscaled => {vk: R16G16_SSCALED, bdim: (1, 1), size: Some(4), ty: Float},
R16G16Uint => {vk: R16G16_UINT, bdim: (1, 1), size: Some(4), ty: Uint},
R16G16Sint => {vk: R16G16_SINT, bdim: (1, 1), size: Some(4), ty: Sint},
R16G16Sfloat => {vk: R16G16_SFLOAT, bdim: (1, 1), size: Some(4), ty: Float},
R16G16B16Unorm => {vk: R16G16B16_UNORM, bdim: (1, 1), size: Some(6), ty: Float},
R16G16B16Snorm => {vk: R16G16B16_SNORM, bdim: (1, 1), size: Some(6), ty: Float},
R16G16B16Uscaled => {vk: R16G16B16_USCALED, bdim: (1, 1), size: Some(6), ty: Float},
R16G16B16Sscaled => {vk: R16G16B16_SSCALED, bdim: (1, 1), size: Some(6), ty: Float},
R16G16B16Uint => {vk: R16G16B16_UINT, bdim: (1, 1), size: Some(6), ty: Uint},
R16G16B16Sint => {vk: R16G16B16_SINT, bdim: (1, 1), size: Some(6), ty: Sint},
R16G16B16Sfloat => {vk: R16G16B16_SFLOAT, bdim: (1, 1), size: Some(6), ty: Float},
R16G16B16A16Unorm => {vk: R16G16B16A16_UNORM, bdim: (1, 1), size: Some(8), ty: Float},
R16G16B16A16Snorm => {vk: R16G16B16A16_SNORM, bdim: (1, 1), size: Some(8), ty: Float},
R16G16B16A16Uscaled => {vk: R16G16B16A16_USCALED, bdim: (1, 1), size: Some(8), ty: Float},
R16G16B16A16Sscaled => {vk: R16G16B16A16_SSCALED, bdim: (1, 1), size: Some(8), ty: Float},
R16G16B16A16Uint => {vk: R16G16B16A16_UINT, bdim: (1, 1), size: Some(8), ty: Uint},
R16G16B16A16Sint => {vk: R16G16B16A16_SINT, bdim: (1, 1), size: Some(8), ty: Sint},
R16G16B16A16Sfloat => {vk: R16G16B16A16_SFLOAT, bdim: (1, 1), size: Some(8), ty: Float},
R32Uint => {vk: R32_UINT, bdim: (1, 1), size: Some(4), ty: Uint},
R32Sint => {vk: R32_SINT, bdim: (1, 1), size: Some(4), ty: Sint},
R32Sfloat => {vk: R32_SFLOAT, bdim: (1, 1), size: Some(4), ty: Float},
R32G32Uint => {vk: R32G32_UINT, bdim: (1, 1), size: Some(8), ty: Uint},
R32G32Sint => {vk: R32G32_SINT, bdim: (1, 1), size: Some(8), ty: Sint},
R32G32Sfloat => {vk: R32G32_SFLOAT, bdim: (1, 1), size: Some(8), ty: Float},
R32G32B32Uint => {vk: R32G32B32_UINT, bdim: (1, 1), size: Some(12), ty: Uint},
R32G32B32Sint => {vk: R32G32B32_SINT, bdim: (1, 1), size: Some(12), ty: Sint},
R32G32B32Sfloat => {vk: R32G32B32_SFLOAT, bdim: (1, 1), size: Some(12), ty: Float},
R32G32B32A32Uint => {vk: R32G32B32A32_UINT, bdim: (1, 1), size: Some(16), ty: Uint},
R32G32B32A32Sint => {vk: R32G32B32A32_SINT, bdim: (1, 1), size: Some(16), ty: Sint},
R32G32B32A32Sfloat => {vk: R32G32B32A32_SFLOAT, bdim: (1, 1), size: Some(16), ty: Float},
R64Uint => {vk: R64_UINT, bdim: (1, 1), size: Some(8), ty: Uint},
R64Sint => {vk: R64_SINT, bdim: (1, 1), size: Some(8), ty: Sint},
R64Sfloat => {vk: R64_SFLOAT, bdim: (1, 1), size: Some(8), ty: Float},
R64G64Uint => {vk: R64G64_UINT, bdim: (1, 1), size: Some(16), ty: Uint},
R64G64Sint => {vk: R64G64_SINT, bdim: (1, 1), size: Some(16), ty: Sint},
R64G64Sfloat => {vk: R64G64_SFLOAT, bdim: (1, 1), size: Some(16), ty: Float},
R64G64B64Uint => {vk: R64G64B64_UINT, bdim: (1, 1), size: Some(24), ty: Uint},
R64G64B64Sint => {vk: R64G64B64_SINT, bdim: (1, 1), size: Some(24), ty: Sint},
R64G64B64Sfloat => {vk: R64G64B64_SFLOAT, bdim: (1, 1), size: Some(24), ty: Float},
R64G64B64A64Uint => {vk: R64G64B64A64_UINT, bdim: (1, 1), size: Some(32), ty: Uint},
R64G64B64A64Sint => {vk: R64G64B64A64_SINT, bdim: (1, 1), size: Some(32), ty: Sint},
R64G64B64A64Sfloat => {vk: R64G64B64A64_SFLOAT, bdim: (1, 1), size: Some(32), ty: Float},
B10G11R11UfloatPack32 => {vk: B10G11R11_UFLOAT_PACK32, bdim: (1, 1), size: Some(4), ty: Float},
E5B9G9R9UfloatPack32 => {vk: E5B9G9R9_UFLOAT_PACK32, bdim: (1, 1), size: Some(4), ty: Float},
D16Unorm => {vk: D16_UNORM, bdim: (1, 1), size: Some(2), ty: Depth},
X8_D24UnormPack32 => {vk: X8_D24_UNORM_PACK32, bdim: (1, 1), size: Some(4), ty: Depth},
D32Sfloat => {vk: D32_SFLOAT, bdim: (1, 1), size: Some(4), ty: Depth},
S8Uint => {vk: S8_UINT, bdim: (1, 1), size: Some(1), ty: Stencil},
D16Unorm_S8Uint => {vk: D16_UNORM_S8_UINT, bdim: (1, 1), size: None, ty: DepthStencil},
D24Unorm_S8Uint => {vk: D24_UNORM_S8_UINT, bdim: (1, 1), size: None, ty: DepthStencil},
D32Sfloat_S8Uint => {vk: D32_SFLOAT_S8_UINT, bdim: (1, 1), size: None, ty: DepthStencil},
BC1_RGBUnormBlock => {vk: BC1_RGB_UNORM_BLOCK, bdim: (4, 4), size: Some(8), ty: Compressed},
BC1_RGBSrgbBlock => {vk: BC1_RGB_SRGB_BLOCK, bdim: (4, 4), size: Some(8), ty: Compressed},
BC1_RGBAUnormBlock => {vk: BC1_RGBA_UNORM_BLOCK, bdim: (4, 4), size: Some(8), ty: Compressed},
BC1_RGBASrgbBlock => {vk: BC1_RGBA_SRGB_BLOCK, bdim: (4, 4), size: Some(8), ty: Compressed},
BC2UnormBlock => {vk: BC2_UNORM_BLOCK, bdim: (4, 4), size: Some(16), ty: Compressed},
BC2SrgbBlock => {vk: BC2_SRGB_BLOCK, bdim: (4, 4), size: Some(16), ty: Compressed},
BC3UnormBlock => {vk: BC3_UNORM_BLOCK, bdim: (4, 4), size: Some(16), ty: Compressed},
BC3SrgbBlock => {vk: BC3_SRGB_BLOCK, bdim: (4, 4), size: Some(16), ty: Compressed},
BC4UnormBlock => {vk: BC4_UNORM_BLOCK, bdim: (4, 4), size: Some(8), ty: Compressed},
BC4SnormBlock => {vk: BC4_SNORM_BLOCK, bdim: (4, 4), size: Some(8), ty: Compressed},
BC5UnormBlock => {vk: BC5_UNORM_BLOCK, bdim: (4, 4), size: Some(16), ty: Compressed},
BC5SnormBlock => {vk: BC5_SNORM_BLOCK, bdim: (4, 4), size: Some(16), ty: Compressed},
BC6HUfloatBlock => {vk: BC6H_UFLOAT_BLOCK, bdim: (4, 4), size: Some(16), ty: Compressed},
BC6HSfloatBlock => {vk: BC6H_SFLOAT_BLOCK, bdim: (4, 4), size: Some(16), ty: Compressed},
BC7UnormBlock => {vk: BC7_UNORM_BLOCK, bdim: (4, 4), size: Some(16), ty: Compressed},
BC7SrgbBlock => {vk: BC7_SRGB_BLOCK, bdim: (4, 4), size: Some(16), ty: Compressed},
ETC2_R8G8B8UnormBlock => {vk: ETC2_R8G8B8_UNORM_BLOCK, bdim: (4, 4), size: Some(8), ty: Compressed},
ETC2_R8G8B8SrgbBlock => {vk: ETC2_R8G8B8_SRGB_BLOCK, bdim: (4, 4), size: Some(8), ty: Compressed},
ETC2_R8G8B8A1UnormBlock => {vk: ETC2_R8G8B8A1_UNORM_BLOCK, bdim: (4, 4), size: Some(8), ty: Compressed},
ETC2_R8G8B8A1SrgbBlock => {vk: ETC2_R8G8B8A1_SRGB_BLOCK, bdim: (4, 4), size: Some(8), ty: Compressed},
ETC2_R8G8B8A8UnormBlock => {vk: ETC2_R8G8B8A8_UNORM_BLOCK, bdim: (4, 4), size: Some(16), ty: Compressed},
ETC2_R8G8B8A8SrgbBlock => {vk: ETC2_R8G8B8A8_SRGB_BLOCK, bdim: (4, 4), size: Some(16), ty: Compressed},
EAC_R11UnormBlock => {vk: EAC_R11_UNORM_BLOCK, bdim: (4, 4), size: Some(8), ty: Compressed},
EAC_R11SnormBlock => {vk: EAC_R11_SNORM_BLOCK, bdim: (4, 4), size: Some(8), ty: Compressed},
EAC_R11G11UnormBlock => {vk: EAC_R11G11_UNORM_BLOCK, bdim: (4, 4), size: Some(16), ty: Compressed},
EAC_R11G11SnormBlock => {vk: EAC_R11G11_SNORM_BLOCK, bdim: (4, 4), size: Some(16), ty: Compressed},
ASTC_4x4UnormBlock => {vk: ASTC_4X4_UNORM_BLOCK, bdim: (4, 4), size: Some(16), ty: Compressed},
ASTC_4x4SrgbBlock => {vk: ASTC_4X4_SRGB_BLOCK, bdim: (4, 4), size: Some(16), ty: Compressed},
ASTC_5x4UnormBlock => {vk: ASTC_5X4_UNORM_BLOCK, bdim: (5, 4), size: Some(16), ty: Compressed},
ASTC_5x4SrgbBlock => {vk: ASTC_5X4_SRGB_BLOCK, bdim: (5, 4), size: Some(16), ty: Compressed},
ASTC_5x5UnormBlock => {vk: ASTC_5X5_UNORM_BLOCK, bdim: (5, 5), size: Some(16), ty: Compressed},
ASTC_5x5SrgbBlock => {vk: ASTC_5X5_SRGB_BLOCK, bdim: (5, 5), size: Some(16), ty: Compressed},
ASTC_6x5UnormBlock => {vk: ASTC_6X5_UNORM_BLOCK, bdim: (6, 5), size: Some(16), ty: Compressed},
ASTC_6x5SrgbBlock => {vk: ASTC_6X5_SRGB_BLOCK, bdim: (6, 5), size: Some(16), ty: Compressed},
ASTC_6x6UnormBlock => {vk: ASTC_6X6_UNORM_BLOCK, bdim: (6, 6), size: Some(16), ty: Compressed},
ASTC_6x6SrgbBlock => {vk: ASTC_6X6_SRGB_BLOCK, bdim: (6, 6), size: Some(16), ty: Compressed},
ASTC_8x5UnormBlock => {vk: ASTC_8X5_UNORM_BLOCK, bdim: (8, 5), size: Some(16), ty: Compressed},
ASTC_8x5SrgbBlock => {vk: ASTC_8X5_SRGB_BLOCK, bdim: (8, 5), size: Some(16), ty: Compressed},
ASTC_8x6UnormBlock => {vk: ASTC_8X6_UNORM_BLOCK, bdim: (8, 6), size: Some(16), ty: Compressed},
ASTC_8x6SrgbBlock => {vk: ASTC_8X6_SRGB_BLOCK, bdim: (8, 6), size: Some(16), ty: Compressed},
ASTC_8x8UnormBlock => {vk: ASTC_8X8_UNORM_BLOCK, bdim: (8, 8), size: Some(16), ty: Compressed},
ASTC_8x8SrgbBlock => {vk: ASTC_8X8_SRGB_BLOCK, bdim: (8, 8), size: Some(16), ty: Compressed},
ASTC_10x5UnormBlock => {vk: ASTC_10X5_UNORM_BLOCK, bdim: (10, 5), size: Some(16), ty: Compressed},
ASTC_10x5SrgbBlock => {vk: ASTC_10X5_SRGB_BLOCK, bdim: (10, 5), size: Some(16), ty: Compressed},
ASTC_10x6UnormBlock => {vk: ASTC_10X6_UNORM_BLOCK, bdim: (10, 6), size: Some(16), ty: Compressed},
ASTC_10x6SrgbBlock => {vk: ASTC_10X6_SRGB_BLOCK, bdim: (10, 6), size: Some(16), ty: Compressed},
ASTC_10x8UnormBlock => {vk: ASTC_10X8_UNORM_BLOCK, bdim: (10, 8), size: Some(16), ty: Compressed},
ASTC_10x8SrgbBlock => {vk: ASTC_10X8_SRGB_BLOCK, bdim: (10, 8), size: Some(16), ty: Compressed},
ASTC_10x10UnormBlock => {vk: ASTC_10X10_UNORM_BLOCK, bdim: (10, 10), size: Some(16), ty: Compressed},
ASTC_10x10SrgbBlock => {vk: ASTC_10X10_SRGB_BLOCK, bdim: (10, 10), size: Some(16), ty: Compressed},
ASTC_12x10UnormBlock => {vk: ASTC_12X10_UNORM_BLOCK, bdim: (12, 10), size: Some(16), ty: Compressed},
ASTC_12x10SrgbBlock => {vk: ASTC_12X10_SRGB_BLOCK, bdim: (12, 10), size: Some(16), ty: Compressed},
ASTC_12x12UnormBlock => {vk: ASTC_12X12_UNORM_BLOCK, bdim: (12, 12), size: Some(16), ty: Compressed},
ASTC_12x12SrgbBlock => {vk: ASTC_12X12_SRGB_BLOCK, bdim: (12, 12), size: Some(16), ty: Compressed},
G8B8R8_3PLANE420Unorm => {vk: G8_B8_R8_3PLANE_420_UNORM, bdim: (1, 1), size: None, ty: Ycbcr, planes: 3},
G8B8R8_2PLANE420Unorm => {vk: G8_B8R8_2PLANE_420_UNORM, bdim: (1, 1), size: None, ty: Ycbcr, planes: 2},
}
impl Format {
@ -404,9 +403,9 @@ impl Format {
#[inline]
pub fn properties(&self, physical_device: PhysicalDevice) -> FormatProperties {
let vk_properties = unsafe {
let vk_i = physical_device.instance().pointers();
let fns_i = physical_device.instance().fns();
let mut output = MaybeUninit::uninit();
vk_i.GetPhysicalDeviceFormatProperties(
fns_i.v1_0.get_physical_device_format_properties(
physical_device.internal_object(),
(*self).into(),
output.as_mut_ptr(),
@ -415,9 +414,9 @@ impl Format {
};
FormatProperties {
linear_tiling_features: vk_properties.linearTilingFeatures.into(),
optimal_tiling_features: vk_properties.optimalTilingFeatures.into(),
buffer_features: vk_properties.bufferFeatures.into(),
linear_tiling_features: vk_properties.linear_tiling_features.into(),
optimal_tiling_features: vk_properties.optimal_tiling_features.into(),
buffer_features: vk_properties.buffer_features.into(),
}
}
@ -723,37 +722,37 @@ pub struct FormatFeatures {
pub ext_fragment_density_map: bool,
}
impl From<vk::FormatFeatureFlags> for FormatFeatures {
impl From<ash::vk::FormatFeatureFlags> for FormatFeatures {
#[inline]
#[rustfmt::skip]
fn from(val: vk::FormatFeatureFlags) -> FormatFeatures {
fn from(val: ash::vk::FormatFeatureFlags) -> FormatFeatures {
FormatFeatures {
sampled_image: (val & vk::FORMAT_FEATURE_SAMPLED_IMAGE_BIT) != 0,
storage_image: (val & vk::FORMAT_FEATURE_STORAGE_IMAGE_BIT) != 0,
storage_image_atomic: (val & vk::FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT) != 0,
uniform_texel_buffer: (val & vk::FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT) != 0,
storage_texel_buffer: (val & vk::FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT) != 0,
storage_texel_buffer_atomic: (val & vk::FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT) != 0,
vertex_buffer: (val & vk::FORMAT_FEATURE_VERTEX_BUFFER_BIT) != 0,
color_attachment: (val & vk::FORMAT_FEATURE_COLOR_ATTACHMENT_BIT) != 0,
color_attachment_blend: (val & vk::FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT) != 0,
depth_stencil_attachment: (val & vk::FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT) != 0,
blit_src: (val & vk::FORMAT_FEATURE_BLIT_SRC_BIT) != 0,
blit_dst: (val & vk::FORMAT_FEATURE_BLIT_DST_BIT) != 0,
sampled_image_filter_linear: (val & vk::FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT) != 0,
transfer_src: (val & vk::FORMAT_FEATURE_TRANSFER_SRC_BIT) != 0,
transfer_dst: (val & vk::FORMAT_FEATURE_TRANSFER_DST_BIT) != 0,
midpoint_chroma_samples: (val & vk::FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT) != 0,
sampled_image_ycbcr_conversion_linear_filter: (val & vk::FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT) != 0,
sampled_image_ycbcr_conversion_separate_reconstruction_filter: (val & vk::FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT) != 0,
sampled_image_ycbcr_conversion_chroma_reconstruction_explicit: (val & vk::FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT) != 0,
sampled_image_ycbcr_conversion_chroma_reconstruction_explicit_forceable: (val & vk::FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT) != 0,
disjoint: (val & vk::FORMAT_FEATURE_DISJOINT_BIT) != 0,
cosited_chroma_samples: (val & vk::FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT) != 0,
sampled_image_filter_minmax: (val & vk::FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT) != 0,
img_sampled_image_filter_cubic: (val & vk::FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG) != 0,
khr_acceleration_structure_vertex_buffer: (val & vk::FORMAT_FEATURE_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR) != 0,
ext_fragment_density_map: (val & vk::FORMAT_FEATURE_FRAGMENT_DENSITY_MAP_BIT_EXT) != 0,
sampled_image: !(val & ash::vk::FormatFeatureFlags::SAMPLED_IMAGE).is_empty(),
storage_image: !(val & ash::vk::FormatFeatureFlags::STORAGE_IMAGE).is_empty(),
storage_image_atomic: !(val & ash::vk::FormatFeatureFlags::STORAGE_IMAGE_ATOMIC).is_empty(),
uniform_texel_buffer: !(val & ash::vk::FormatFeatureFlags::UNIFORM_TEXEL_BUFFER).is_empty(),
storage_texel_buffer: !(val & ash::vk::FormatFeatureFlags::STORAGE_TEXEL_BUFFER).is_empty(),
storage_texel_buffer_atomic: !(val & ash::vk::FormatFeatureFlags::STORAGE_TEXEL_BUFFER_ATOMIC).is_empty(),
vertex_buffer: !(val & ash::vk::FormatFeatureFlags::VERTEX_BUFFER).is_empty(),
color_attachment: !(val & ash::vk::FormatFeatureFlags::COLOR_ATTACHMENT).is_empty(),
color_attachment_blend: !(val & ash::vk::FormatFeatureFlags::COLOR_ATTACHMENT_BLEND).is_empty(),
depth_stencil_attachment: !(val & ash::vk::FormatFeatureFlags::DEPTH_STENCIL_ATTACHMENT).is_empty(),
blit_src: !(val & ash::vk::FormatFeatureFlags::BLIT_SRC).is_empty(),
blit_dst: !(val & ash::vk::FormatFeatureFlags::BLIT_DST).is_empty(),
sampled_image_filter_linear: !(val & ash::vk::FormatFeatureFlags::SAMPLED_IMAGE_FILTER_LINEAR).is_empty(),
transfer_src: !(val & ash::vk::FormatFeatureFlags::TRANSFER_SRC).is_empty(),
transfer_dst: !(val & ash::vk::FormatFeatureFlags::TRANSFER_DST).is_empty(),
midpoint_chroma_samples: !(val & ash::vk::FormatFeatureFlags::MIDPOINT_CHROMA_SAMPLES).is_empty(),
sampled_image_ycbcr_conversion_linear_filter: !(val & ash::vk::FormatFeatureFlags::SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER).is_empty(),
sampled_image_ycbcr_conversion_separate_reconstruction_filter: !(val & ash::vk::FormatFeatureFlags::SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER).is_empty(),
sampled_image_ycbcr_conversion_chroma_reconstruction_explicit: !(val & ash::vk::FormatFeatureFlags::SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT).is_empty(),
sampled_image_ycbcr_conversion_chroma_reconstruction_explicit_forceable: !(val & ash::vk::FormatFeatureFlags::SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE).is_empty(),
disjoint: !(val & ash::vk::FormatFeatureFlags::DISJOINT).is_empty(),
cosited_chroma_samples: !(val & ash::vk::FormatFeatureFlags::COSITED_CHROMA_SAMPLES).is_empty(),
sampled_image_filter_minmax: !(val & ash::vk::FormatFeatureFlags::SAMPLED_IMAGE_FILTER_MINMAX).is_empty(),
img_sampled_image_filter_cubic: !(val & ash::vk::FormatFeatureFlags::SAMPLED_IMAGE_FILTER_CUBIC_IMG).is_empty(),
khr_acceleration_structure_vertex_buffer: !(val & ash::vk::FormatFeatureFlags::ACCELERATION_STRUCTURE_VERTEX_BUFFER_KHR).is_empty(),
ext_fragment_density_map: !(val & ash::vk::FormatFeatureFlags::FRAGMENT_DENSITY_MAP_EXT).is_empty(),
}
}
}

View File

@ -7,7 +7,6 @@
// notice may not be copied, modified, or distributed except
// according to those terms.
use crate::vk;
use std::ops::BitOr;
/// An individual data type within an image.
@ -16,22 +15,22 @@ use std::ops::BitOr;
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
#[repr(u32)]
pub enum ImageAspect {
Color = vk::IMAGE_ASPECT_COLOR_BIT,
Depth = vk::IMAGE_ASPECT_DEPTH_BIT,
Stencil = vk::IMAGE_ASPECT_STENCIL_BIT,
Metadata = vk::IMAGE_ASPECT_METADATA_BIT,
Plane0 = vk::IMAGE_ASPECT_PLANE_0_BIT,
Plane1 = vk::IMAGE_ASPECT_PLANE_1_BIT,
Plane2 = vk::IMAGE_ASPECT_PLANE_2_BIT,
MemoryPlane0 = vk::IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT,
MemoryPlane1 = vk::IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT,
MemoryPlane2 = vk::IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT,
Color = ash::vk::ImageAspectFlags::COLOR.as_raw(),
Depth = ash::vk::ImageAspectFlags::DEPTH.as_raw(),
Stencil = ash::vk::ImageAspectFlags::STENCIL.as_raw(),
Metadata = ash::vk::ImageAspectFlags::METADATA.as_raw(),
Plane0 = ash::vk::ImageAspectFlags::PLANE_0.as_raw(),
Plane1 = ash::vk::ImageAspectFlags::PLANE_1.as_raw(),
Plane2 = ash::vk::ImageAspectFlags::PLANE_2.as_raw(),
MemoryPlane0 = ash::vk::ImageAspectFlags::MEMORY_PLANE_0_EXT.as_raw(),
MemoryPlane1 = ash::vk::ImageAspectFlags::MEMORY_PLANE_1_EXT.as_raw(),
MemoryPlane2 = ash::vk::ImageAspectFlags::MEMORY_PLANE_2_EXT.as_raw(),
}
impl From<ImageAspect> for vk::ImageAspectFlags {
impl From<ImageAspect> for ash::vk::ImageAspectFlags {
#[inline]
fn from(value: ImageAspect) -> vk::ImageAspectFlags {
value as u32
fn from(val: ImageAspect) -> Self {
Self::from_raw(val as u32)
}
}
@ -89,58 +88,58 @@ impl BitOr for ImageAspects {
}
}
impl From<ImageAspects> for vk::ImageAspectFlags {
impl From<ImageAspects> for ash::vk::ImageAspectFlags {
#[inline]
fn from(value: ImageAspects) -> vk::ImageAspectFlags {
let mut result = 0;
fn from(value: ImageAspects) -> ash::vk::ImageAspectFlags {
let mut result = ash::vk::ImageAspectFlags::empty();
if value.color {
result |= vk::IMAGE_ASPECT_COLOR_BIT;
result |= ash::vk::ImageAspectFlags::COLOR;
}
if value.depth {
result |= vk::IMAGE_ASPECT_DEPTH_BIT;
result |= ash::vk::ImageAspectFlags::DEPTH;
}
if value.stencil {
result |= vk::IMAGE_ASPECT_STENCIL_BIT;
result |= ash::vk::ImageAspectFlags::STENCIL;
}
if value.metadata {
result |= vk::IMAGE_ASPECT_METADATA_BIT;
result |= ash::vk::ImageAspectFlags::METADATA;
}
if value.plane0 {
result |= vk::IMAGE_ASPECT_PLANE_0_BIT;
result |= ash::vk::ImageAspectFlags::PLANE_0;
}
if value.plane1 {
result |= vk::IMAGE_ASPECT_PLANE_1_BIT;
result |= ash::vk::ImageAspectFlags::PLANE_1;
}
if value.plane2 {
result |= vk::IMAGE_ASPECT_PLANE_2_BIT;
result |= ash::vk::ImageAspectFlags::PLANE_2;
}
if value.memory_plane0 {
result |= vk::IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT;
result |= ash::vk::ImageAspectFlags::MEMORY_PLANE_0_EXT;
}
if value.memory_plane1 {
result |= vk::IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT;
result |= ash::vk::ImageAspectFlags::MEMORY_PLANE_1_EXT;
}
if value.memory_plane2 {
result |= vk::IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT
result |= ash::vk::ImageAspectFlags::MEMORY_PLANE_2_EXT
}
result
}
}
impl From<vk::ImageAspectFlags> for ImageAspects {
impl From<ash::vk::ImageAspectFlags> for ImageAspects {
#[inline]
fn from(val: vk::ImageAspectFlags) -> ImageAspects {
fn from(val: ash::vk::ImageAspectFlags) -> ImageAspects {
ImageAspects {
color: (val & vk::IMAGE_ASPECT_COLOR_BIT) != 0,
depth: (val & vk::IMAGE_ASPECT_DEPTH_BIT) != 0,
stencil: (val & vk::IMAGE_ASPECT_STENCIL_BIT) != 0,
metadata: (val & vk::IMAGE_ASPECT_METADATA_BIT) != 0,
plane0: (val & vk::IMAGE_ASPECT_PLANE_0_BIT) != 0,
plane1: (val & vk::IMAGE_ASPECT_PLANE_1_BIT) != 0,
plane2: (val & vk::IMAGE_ASPECT_PLANE_2_BIT) != 0,
memory_plane0: (val & vk::IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT) != 0,
memory_plane1: (val & vk::IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT) != 0,
memory_plane2: (val & vk::IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT) != 0,
color: !(val & ash::vk::ImageAspectFlags::COLOR).is_empty(),
depth: !(val & ash::vk::ImageAspectFlags::DEPTH).is_empty(),
stencil: !(val & ash::vk::ImageAspectFlags::STENCIL).is_empty(),
metadata: !(val & ash::vk::ImageAspectFlags::METADATA).is_empty(),
plane0: !(val & ash::vk::ImageAspectFlags::PLANE_0).is_empty(),
plane1: !(val & ash::vk::ImageAspectFlags::PLANE_1).is_empty(),
plane2: !(val & ash::vk::ImageAspectFlags::PLANE_2).is_empty(),
memory_plane0: !(val & ash::vk::ImageAspectFlags::MEMORY_PLANE_0_EXT).is_empty(),
memory_plane1: !(val & ash::vk::ImageAspectFlags::MEMORY_PLANE_1_EXT).is_empty(),
memory_plane2: !(val & ash::vk::ImageAspectFlags::MEMORY_PLANE_2_EXT).is_empty(),
}
}
}

View File

@ -23,6 +23,7 @@ use crate::image::ImageDimensions;
use crate::image::ImageInner;
use crate::image::ImageLayout;
use crate::image::ImageUsage;
use crate::image::SampleCount;
use crate::memory::pool::AllocFromRequirementsFilter;
use crate::memory::pool::AllocLayout;
use crate::memory::pool::MappingRequirement;
@ -104,7 +105,13 @@ impl AttachmentImage {
dimensions: [u32; 2],
format: Format,
) -> Result<Arc<AttachmentImage>, ImageCreationError> {
AttachmentImage::new_impl(device, dimensions, format, ImageUsage::none(), 1)
AttachmentImage::new_impl(
device,
dimensions,
format,
ImageUsage::none(),
SampleCount::Sample1,
)
}
/// Same as `new`, but creates an image that can be used as an input attachment.
@ -121,7 +128,7 @@ impl AttachmentImage {
..ImageUsage::none()
};
AttachmentImage::new_impl(device, dimensions, format, base_usage, 1)
AttachmentImage::new_impl(device, dimensions, format, base_usage, SampleCount::Sample1)
}
/// Same as `new`, but creates a multisampled image.
@ -132,7 +139,7 @@ impl AttachmentImage {
pub fn multisampled(
device: Arc<Device>,
dimensions: [u32; 2],
samples: u32,
samples: SampleCount,
format: Format,
) -> Result<Arc<AttachmentImage>, ImageCreationError> {
AttachmentImage::new_impl(device, dimensions, format, ImageUsage::none(), samples)
@ -145,7 +152,7 @@ impl AttachmentImage {
pub fn multisampled_input_attachment(
device: Arc<Device>,
dimensions: [u32; 2],
samples: u32,
samples: SampleCount,
format: Format,
) -> Result<Arc<AttachmentImage>, ImageCreationError> {
let base_usage = ImageUsage {
@ -168,7 +175,7 @@ impl AttachmentImage {
format: Format,
usage: ImageUsage,
) -> Result<Arc<AttachmentImage>, ImageCreationError> {
AttachmentImage::new_impl(device, dimensions, format, usage, 1)
AttachmentImage::new_impl(device, dimensions, format, usage, SampleCount::Sample1)
}
/// Same as `with_usage`, but creates a multisampled image.
@ -181,7 +188,7 @@ impl AttachmentImage {
pub fn multisampled_with_usage(
device: Arc<Device>,
dimensions: [u32; 2],
samples: u32,
samples: SampleCount,
format: Format,
usage: ImageUsage,
) -> Result<Arc<AttachmentImage>, ImageCreationError> {
@ -202,7 +209,7 @@ impl AttachmentImage {
..ImageUsage::none()
};
AttachmentImage::new_impl(device, dimensions, format, base_usage, 1)
AttachmentImage::new_impl(device, dimensions, format, base_usage, SampleCount::Sample1)
}
/// Same as `sampled`, except that the image can be used as an input attachment.
@ -220,7 +227,7 @@ impl AttachmentImage {
..ImageUsage::none()
};
AttachmentImage::new_impl(device, dimensions, format, base_usage, 1)
AttachmentImage::new_impl(device, dimensions, format, base_usage, SampleCount::Sample1)
}
/// Same as `sampled`, but creates a multisampled image.
@ -233,7 +240,7 @@ impl AttachmentImage {
pub fn sampled_multisampled(
device: Arc<Device>,
dimensions: [u32; 2],
samples: u32,
samples: SampleCount,
format: Format,
) -> Result<Arc<AttachmentImage>, ImageCreationError> {
let base_usage = ImageUsage {
@ -252,7 +259,7 @@ impl AttachmentImage {
pub fn sampled_multisampled_input_attachment(
device: Arc<Device>,
dimensions: [u32; 2],
samples: u32,
samples: SampleCount,
format: Format,
) -> Result<Arc<AttachmentImage>, ImageCreationError> {
let base_usage = ImageUsage {
@ -281,7 +288,7 @@ impl AttachmentImage {
..ImageUsage::none()
};
AttachmentImage::new_impl(device, dimensions, format, base_usage, 1)
AttachmentImage::new_impl(device, dimensions, format, base_usage, SampleCount::Sample1)
}
/// Same as `transient`, except that the image can be used as an input attachment.
@ -299,7 +306,7 @@ impl AttachmentImage {
..ImageUsage::none()
};
AttachmentImage::new_impl(device, dimensions, format, base_usage, 1)
AttachmentImage::new_impl(device, dimensions, format, base_usage, SampleCount::Sample1)
}
/// Same as `transient`, but creates a multisampled image.
@ -312,7 +319,7 @@ impl AttachmentImage {
pub fn transient_multisampled(
device: Arc<Device>,
dimensions: [u32; 2],
samples: u32,
samples: SampleCount,
format: Format,
) -> Result<Arc<AttachmentImage>, ImageCreationError> {
let base_usage = ImageUsage {
@ -331,7 +338,7 @@ impl AttachmentImage {
pub fn transient_multisampled_input_attachment(
device: Arc<Device>,
dimensions: [u32; 2],
samples: u32,
samples: SampleCount,
format: Format,
) -> Result<Arc<AttachmentImage>, ImageCreationError> {
let base_usage = ImageUsage {
@ -349,7 +356,7 @@ impl AttachmentImage {
dimensions: [u32; 2],
format: Format,
base_usage: ImageUsage,
samples: u32,
samples: SampleCount,
) -> Result<Arc<AttachmentImage>, ImageCreationError> {
// TODO: check dimensions against the max_framebuffer_width/height/layers limits

View File

@ -31,6 +31,7 @@ use crate::image::ImageInner;
use crate::image::ImageLayout;
use crate::image::ImageUsage;
use crate::image::MipmapsCount;
use crate::image::SampleCount;
use crate::instance::QueueFamily;
use crate::memory::pool::AllocFromRequirementsFilter;
use crate::memory::pool::AllocLayout;
@ -267,7 +268,7 @@ impl ImmutableImage {
format,
flags,
dimensions,
1,
SampleCount::Sample1,
mipmaps,
sharing,
false,

View File

@ -7,8 +7,6 @@
// notice may not be copied, modified, or distributed except
// according to those terms.
use crate::vk;
/// Layout of an image.
///
/// > **Note**: In vulkano, image layouts are mostly a low-level detail. You can ignore them,
@ -27,18 +25,25 @@ use crate::vk;
/// Transitioning between layouts can only be done through a GPU-side operation that is part of
/// a command buffer.
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
#[repr(u32)]
#[repr(i32)]
pub enum ImageLayout {
Undefined = vk::IMAGE_LAYOUT_UNDEFINED,
General = vk::IMAGE_LAYOUT_GENERAL,
ColorAttachmentOptimal = vk::IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
DepthStencilAttachmentOptimal = vk::IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
DepthStencilReadOnlyOptimal = vk::IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL,
ShaderReadOnlyOptimal = vk::IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
TransferSrcOptimal = vk::IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
TransferDstOptimal = vk::IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
Preinitialized = vk::IMAGE_LAYOUT_PREINITIALIZED,
PresentSrc = vk::IMAGE_LAYOUT_PRESENT_SRC_KHR,
Undefined = ash::vk::ImageLayout::UNDEFINED.as_raw(),
General = ash::vk::ImageLayout::GENERAL.as_raw(),
ColorAttachmentOptimal = ash::vk::ImageLayout::COLOR_ATTACHMENT_OPTIMAL.as_raw(),
DepthStencilAttachmentOptimal = ash::vk::ImageLayout::DEPTH_STENCIL_ATTACHMENT_OPTIMAL.as_raw(),
DepthStencilReadOnlyOptimal = ash::vk::ImageLayout::DEPTH_STENCIL_READ_ONLY_OPTIMAL.as_raw(),
ShaderReadOnlyOptimal = ash::vk::ImageLayout::SHADER_READ_ONLY_OPTIMAL.as_raw(),
TransferSrcOptimal = ash::vk::ImageLayout::TRANSFER_SRC_OPTIMAL.as_raw(),
TransferDstOptimal = ash::vk::ImageLayout::TRANSFER_DST_OPTIMAL.as_raw(),
Preinitialized = ash::vk::ImageLayout::PREINITIALIZED.as_raw(),
PresentSrc = ash::vk::ImageLayout::PRESENT_SRC_KHR.as_raw(),
}
impl From<ImageLayout> for ash::vk::ImageLayout {
#[inline]
fn from(val: ImageLayout) -> Self {
Self::from_raw(val as i32)
}
}
/// The set of layouts to use for an image when used in descriptor of various kinds.

View File

@ -73,6 +73,43 @@ pub mod traits;
mod usage;
pub mod view;
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[repr(u32)]
pub enum SampleCount {
Sample1 = ash::vk::SampleCountFlags::TYPE_1.as_raw(),
Sample2 = ash::vk::SampleCountFlags::TYPE_2.as_raw(),
Sample4 = ash::vk::SampleCountFlags::TYPE_4.as_raw(),
Sample8 = ash::vk::SampleCountFlags::TYPE_8.as_raw(),
Sample16 = ash::vk::SampleCountFlags::TYPE_16.as_raw(),
Sample32 = ash::vk::SampleCountFlags::TYPE_32.as_raw(),
Sample64 = ash::vk::SampleCountFlags::TYPE_64.as_raw(),
}
impl From<SampleCount> for ash::vk::SampleCountFlags {
#[inline]
fn from(val: SampleCount) -> Self {
Self::from_raw(val as u32)
}
}
impl TryFrom<u32> for SampleCount {
type Error = ();
#[inline]
fn try_from(val: u32) -> Result<Self, Self::Error> {
match val {
1 => Ok(Self::Sample1),
2 => Ok(Self::Sample2),
4 => Ok(Self::Sample4),
8 => Ok(Self::Sample8),
16 => Ok(Self::Sample16),
32 => Ok(Self::Sample32),
64 => Ok(Self::Sample64),
_ => Err(()),
}
}
}
/// Specifies how many sample counts supported for an image used for storage operations.
#[derive(Debug, Copy, Clone)]
pub struct SampleCounts {
@ -92,44 +129,44 @@ pub struct SampleCounts {
pub sample64: bool,
}
impl From<vk::SampleCountFlags> for SampleCounts {
fn from(sample_counts: vk::SampleCountFlags) -> SampleCounts {
impl From<ash::vk::SampleCountFlags> for SampleCounts {
fn from(sample_counts: ash::vk::SampleCountFlags) -> SampleCounts {
SampleCounts {
sample1: (sample_counts & vk::SAMPLE_COUNT_1_BIT) != 0,
sample2: (sample_counts & vk::SAMPLE_COUNT_2_BIT) != 0,
sample4: (sample_counts & vk::SAMPLE_COUNT_4_BIT) != 0,
sample8: (sample_counts & vk::SAMPLE_COUNT_8_BIT) != 0,
sample16: (sample_counts & vk::SAMPLE_COUNT_16_BIT) != 0,
sample32: (sample_counts & vk::SAMPLE_COUNT_32_BIT) != 0,
sample64: (sample_counts & vk::SAMPLE_COUNT_64_BIT) != 0,
sample1: !(sample_counts & ash::vk::SampleCountFlags::TYPE_1).is_empty(),
sample2: !(sample_counts & ash::vk::SampleCountFlags::TYPE_2).is_empty(),
sample4: !(sample_counts & ash::vk::SampleCountFlags::TYPE_4).is_empty(),
sample8: !(sample_counts & ash::vk::SampleCountFlags::TYPE_8).is_empty(),
sample16: !(sample_counts & ash::vk::SampleCountFlags::TYPE_16).is_empty(),
sample32: !(sample_counts & ash::vk::SampleCountFlags::TYPE_32).is_empty(),
sample64: !(sample_counts & ash::vk::SampleCountFlags::TYPE_64).is_empty(),
}
}
}
impl From<SampleCounts> for vk::SampleCountFlags {
fn from(val: SampleCounts) -> vk::SampleCountFlags {
let mut sample_counts = vk::SampleCountFlags::default();
impl From<SampleCounts> for ash::vk::SampleCountFlags {
fn from(val: SampleCounts) -> ash::vk::SampleCountFlags {
let mut sample_counts = ash::vk::SampleCountFlags::default();
if val.sample1 {
sample_counts |= vk::SAMPLE_COUNT_1_BIT;
sample_counts |= ash::vk::SampleCountFlags::TYPE_1;
}
if val.sample2 {
sample_counts |= vk::SAMPLE_COUNT_2_BIT;
sample_counts |= ash::vk::SampleCountFlags::TYPE_2;
}
if val.sample4 {
sample_counts |= vk::SAMPLE_COUNT_4_BIT;
sample_counts |= ash::vk::SampleCountFlags::TYPE_4;
}
if val.sample8 {
sample_counts |= vk::SAMPLE_COUNT_8_BIT;
sample_counts |= ash::vk::SampleCountFlags::TYPE_8;
}
if val.sample16 {
sample_counts |= vk::SAMPLE_COUNT_16_BIT;
sample_counts |= ash::vk::SampleCountFlags::TYPE_16;
}
if val.sample32 {
sample_counts |= vk::SAMPLE_COUNT_32_BIT;
sample_counts |= ash::vk::SampleCountFlags::TYPE_32;
}
if val.sample64 {
sample_counts |= vk::SAMPLE_COUNT_64_BIT;
sample_counts |= ash::vk::SampleCountFlags::TYPE_64;
}
sample_counts
@ -171,23 +208,23 @@ pub enum Extent {
E3D([u32; 3]),
}
impl From<vk::Extent2D> for Extent {
fn from(extent: vk::Extent2D) -> Self {
impl From<ash::vk::Extent2D> for Extent {
fn from(extent: ash::vk::Extent2D) -> Self {
Extent::E2D([extent.width, extent.height])
}
}
impl From<vk::Extent3D> for Extent {
fn from(extent: vk::Extent3D) -> Self {
impl From<ash::vk::Extent3D> for Extent {
fn from(extent: ash::vk::Extent3D) -> Self {
Extent::E3D([extent.width, extent.height, extent.depth])
}
}
impl TryFrom<Extent> for vk::Extent2D {
impl TryFrom<Extent> for ash::vk::Extent2D {
type Error = ();
fn try_from(extent: Extent) -> Result<Self, Self::Error> {
match extent {
Extent::E2D(a) => Ok(vk::Extent2D {
Extent::E2D(a) => Ok(ash::vk::Extent2D {
width: a[0],
height: a[1],
}),
@ -196,12 +233,12 @@ impl TryFrom<Extent> for vk::Extent2D {
}
}
impl TryFrom<Extent> for vk::Extent3D {
impl TryFrom<Extent> for ash::vk::Extent3D {
type Error = ();
fn try_from(extent: Extent) -> Result<Self, Self::Error> {
match extent {
Extent::E3D(a) => Ok(vk::Extent3D {
Extent::E3D(a) => Ok(ash::vk::Extent3D {
width: a[0],
height: a[1],
depth: a[2],
@ -220,14 +257,14 @@ pub struct ImageFormatProperties {
pub max_resource_size: usize,
}
impl From<vk::ImageFormatProperties> for ImageFormatProperties {
fn from(props: vk::ImageFormatProperties) -> Self {
impl From<ash::vk::ImageFormatProperties> for ImageFormatProperties {
fn from(props: ash::vk::ImageFormatProperties) -> Self {
Self {
max_extent: props.maxExtent.into(),
max_mip_levels: props.maxMipLevels.into(),
max_array_layers: props.maxArrayLayers,
sample_counts: props.sampleCounts.into(),
max_resource_size: props.maxResourceSize as usize,
max_extent: props.max_extent.into(),
max_mip_levels: props.max_mip_levels.into(),
max_array_layers: props.max_array_layers,
sample_counts: props.sample_counts.into(),
max_resource_size: props.max_resource_size as usize,
}
}
}
@ -259,59 +296,54 @@ impl ImageCreateFlags {
}
}
impl From<ImageCreateFlags> for vk::ImageCreateFlags {
impl From<ImageCreateFlags> for ash::vk::ImageCreateFlags {
fn from(flags: ImageCreateFlags) -> Self {
let mut vk_flags = Self::default();
if flags.sparse_binding {
vk_flags |= vk::IMAGE_CREATE_SPARSE_BINDING_BIT
vk_flags |= ash::vk::ImageCreateFlags::SPARSE_BINDING
};
if flags.sparse_residency {
vk_flags |= vk::IMAGE_CREATE_SPARSE_RESIDENCY_BIT
vk_flags |= ash::vk::ImageCreateFlags::SPARSE_RESIDENCY
};
if flags.sparse_aliased {
vk_flags |= vk::IMAGE_CREATE_SPARSE_ALIASED_BIT
vk_flags |= ash::vk::ImageCreateFlags::SPARSE_ALIASED
};
if flags.mutable_format {
vk_flags |= vk::IMAGE_CREATE_MUTABLE_FORMAT_BIT
vk_flags |= ash::vk::ImageCreateFlags::MUTABLE_FORMAT
};
if flags.cube_compatible {
vk_flags |= vk::IMAGE_CREATE_CUBE_COMPATIBLE_BIT
vk_flags |= ash::vk::ImageCreateFlags::CUBE_COMPATIBLE
};
if flags.array_2d_compatible {
vk_flags |= vk::IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR
vk_flags |= ash::vk::ImageCreateFlags::TYPE_2D_ARRAY_COMPATIBLE_KHR
};
vk_flags
}
}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
#[repr(i32)]
pub enum ImageType {
Dim1d,
Dim2d,
Dim3d,
Dim1d = ash::vk::ImageType::TYPE_1D.as_raw(),
Dim2d = ash::vk::ImageType::TYPE_2D.as_raw(),
Dim3d = ash::vk::ImageType::TYPE_3D.as_raw(),
}
impl From<ImageType> for vk::ImageType {
fn from(image_type: ImageType) -> Self {
match image_type {
ImageType::Dim1d => vk::IMAGE_TYPE_1D,
ImageType::Dim2d => vk::IMAGE_TYPE_2D,
ImageType::Dim3d => vk::IMAGE_TYPE_3D,
}
impl From<ImageType> for ash::vk::ImageType {
fn from(val: ImageType) -> Self {
ash::vk::ImageType::from_raw(val as i32)
}
}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
#[repr(i32)]
pub enum ImageTiling {
Optimal,
Linear,
Optimal = ash::vk::ImageTiling::OPTIMAL.as_raw(),
Linear = ash::vk::ImageTiling::LINEAR.as_raw(),
}
impl From<ImageTiling> for vk::ImageTiling {
fn from(image_tiling: ImageTiling) -> Self {
match image_tiling {
ImageTiling::Optimal => vk::IMAGE_TILING_OPTIMAL,
ImageTiling::Linear => vk::IMAGE_TILING_LINEAR,
}
impl From<ImageTiling> for ash::vk::ImageTiling {
fn from(val: ImageTiling) -> Self {
ash::vk::ImageTiling::from_raw(val as i32)
}
}

View File

@ -23,6 +23,7 @@ use crate::image::ImageDimensions;
use crate::image::ImageInner;
use crate::image::ImageLayout;
use crate::image::ImageUsage;
use crate::image::SampleCount;
use crate::instance::QueueFamily;
use crate::memory::pool::AllocFromRequirementsFilter;
use crate::memory::pool::AllocLayout;
@ -132,7 +133,7 @@ impl StorageImage {
format,
flags,
dimensions,
1,
SampleCount::Sample1,
1,
sharing,
false,

View File

@ -23,20 +23,21 @@ use crate::image::ImageCreateFlags;
use crate::image::ImageDimensions;
use crate::image::ImageUsage;
use crate::image::MipmapsCount;
use crate::image::SampleCount;
use crate::memory::DeviceMemory;
use crate::memory::DeviceMemoryAllocError;
use crate::memory::MemoryRequirements;
use crate::sync::Sharing;
use crate::vk;
use crate::Error;
use crate::OomError;
use crate::Version;
use crate::VulkanObject;
use ash::vk::Handle;
use smallvec::SmallVec;
use std::error;
use std::fmt;
use std::hash::Hash;
use std::hash::Hasher;
use std::mem;
use std::mem::MaybeUninit;
use std::ops::Range;
use std::ptr;
@ -56,14 +57,14 @@ use std::sync::Arc;
/// - The image layout must be manually enforced and transitioned.
///
pub struct UnsafeImage {
image: vk::Image,
image: ash::vk::Image,
device: Arc<Device>,
usage: ImageUsage,
format: Format,
flags: ImageCreateFlags,
dimensions: ImageDimensions,
samples: u32,
samples: SampleCount,
mipmaps: u32,
// Features that are supported for this particular format.
@ -90,7 +91,7 @@ impl UnsafeImage {
format: Format,
flags: ImageCreateFlags,
dimensions: ImageDimensions,
num_samples: u32,
num_samples: SampleCount,
mipmaps: Mi,
sharing: Sharing<I>,
linear_tiling: bool,
@ -101,8 +102,8 @@ impl UnsafeImage {
I: Iterator<Item = u32>,
{
let sharing = match sharing {
Sharing::Exclusive => (vk::SHARING_MODE_EXCLUSIVE, SmallVec::<[u32; 8]>::new()),
Sharing::Concurrent(ids) => (vk::SHARING_MODE_CONCURRENT, ids.collect()),
Sharing::Exclusive => (ash::vk::SharingMode::EXCLUSIVE, SmallVec::<[u32; 8]>::new()),
Sharing::Concurrent(ids) => (ash::vk::SharingMode::CONCURRENT, ids.collect()),
};
UnsafeImage::new_impl(
@ -126,9 +127,9 @@ impl UnsafeImage {
format: Format,
flags: ImageCreateFlags,
dimensions: ImageDimensions,
num_samples: u32,
num_samples: SampleCount,
mipmaps: MipmapsCount,
(sh_mode, sh_indices): (vk::SharingMode, SmallVec<[u32; 8]>),
(sh_mode, sh_indices): (ash::vk::SharingMode, SmallVec<[u32; 8]>),
linear_tiling: bool,
preinitialized_layout: bool,
) -> Result<(UnsafeImage, MemoryRequirements), ImageCreationError> {
@ -142,8 +143,8 @@ impl UnsafeImage {
unimplemented!();
}
let vk = device.pointers();
let vk_i = device.instance().pointers();
let fns = device.fns();
let fns_i = device.instance().fns();
// Checking if image usage conforms to what is supported.
let format_features = {
@ -176,7 +177,8 @@ impl UnsafeImage {
{
return Err(ImageCreationError::UnsupportedUsage);
}
if device.loaded_extensions().khr_maintenance1 {
if device.api_version() >= Version::V1_1 || device.loaded_extensions().khr_maintenance1
{
if usage.transfer_source && !features.transfer_src {
return Err(ImageCreationError::UnsupportedUsage);
}
@ -240,61 +242,57 @@ impl UnsafeImage {
};
// Checking whether the number of samples is supported.
if num_samples == 0 {
return Err(ImageCreationError::UnsupportedSamplesCount {
obtained: num_samples,
});
} else if !num_samples.is_power_of_two() {
return Err(ImageCreationError::UnsupportedSamplesCount {
obtained: num_samples,
});
} else {
let mut supported_samples = 0x7f; // all bits up to VK_SAMPLE_COUNT_64_BIT
let mut supported_samples = ash::vk::SampleCountFlags::from_raw(0x7f); // all bits up to VK_SAMPLE_COUNT_64_BIT
if usage.sampled {
match format.ty() {
FormatTy::Float | FormatTy::Compressed => {
supported_samples &= device
.physical_device()
.limits()
.sampled_image_color_sample_counts();
}
FormatTy::Uint | FormatTy::Sint => {
supported_samples &= device
.physical_device()
.limits()
.sampled_image_integer_sample_counts();
}
FormatTy::Depth => {
supported_samples &= device
.physical_device()
.limits()
.sampled_image_depth_sample_counts();
}
FormatTy::Stencil => {
supported_samples &= device
.physical_device()
.limits()
.sampled_image_stencil_sample_counts();
}
FormatTy::DepthStencil => {
supported_samples &= device
.physical_device()
.limits()
.sampled_image_depth_sample_counts();
supported_samples &= device
.physical_device()
.limits()
.sampled_image_stencil_sample_counts();
}
FormatTy::Ycbcr => {
/*
* VUID-VkImageCreateInfo-format-02562: If the image format is one of
* those formats requiring sampler ycbcr conversion, samples *must* be
* VK_SAMPLE_COUNT_1_BIT
*/
supported_samples &= vk::SAMPLE_COUNT_1_BIT;
}
if usage.sampled {
match format.ty() {
FormatTy::Float | FormatTy::Compressed => {
supported_samples &= device
.physical_device()
.limits()
.sampled_image_color_sample_counts()
.into();
}
FormatTy::Uint | FormatTy::Sint => {
supported_samples &= device
.physical_device()
.limits()
.sampled_image_integer_sample_counts()
.into();
}
FormatTy::Depth => {
supported_samples &= device
.physical_device()
.limits()
.sampled_image_depth_sample_counts()
.into();
}
FormatTy::Stencil => {
supported_samples &= device
.physical_device()
.limits()
.sampled_image_stencil_sample_counts()
.into();
}
FormatTy::DepthStencil => {
supported_samples &= device
.physical_device()
.limits()
.sampled_image_depth_sample_counts()
.into();
supported_samples &= device
.physical_device()
.limits()
.sampled_image_stencil_sample_counts()
.into();
}
FormatTy::Ycbcr => {
/*
* VUID-VkImageCreateInfo-format-02562: If the image format is one of
* those formats requiring sampler ycbcr conversion, samples *must* be
* VK_SAMPLE_COUNT_1_BIT
*/
supported_samples &= ash::vk::SampleCountFlags::TYPE_1;
}
}
@ -302,7 +300,8 @@ impl UnsafeImage {
supported_samples &= device
.physical_device()
.limits()
.storage_image_sample_counts();
.storage_image_sample_counts()
.into();
}
if usage.color_attachment
@ -315,29 +314,34 @@ impl UnsafeImage {
supported_samples &= device
.physical_device()
.limits()
.framebuffer_color_sample_counts();
.framebuffer_color_sample_counts()
.into();
}
FormatTy::Depth => {
supported_samples &= device
.physical_device()
.limits()
.framebuffer_depth_sample_counts();
.framebuffer_depth_sample_counts()
.into();
}
FormatTy::Stencil => {
supported_samples &= device
.physical_device()
.limits()
.framebuffer_stencil_sample_counts();
.framebuffer_stencil_sample_counts()
.into();
}
FormatTy::DepthStencil => {
supported_samples &= device
.physical_device()
.limits()
.framebuffer_depth_sample_counts();
.framebuffer_depth_sample_counts()
.into();
supported_samples &= device
.physical_device()
.limits()
.framebuffer_stencil_sample_counts();
.framebuffer_stencil_sample_counts()
.into();
}
FormatTy::Ycbcr => {
/*
@ -349,7 +353,7 @@ impl UnsafeImage {
}
}
if (num_samples & supported_samples) == 0 {
if (ash::vk::SampleCountFlags::from(num_samples) & supported_samples).is_empty() {
let err = ImageCreationError::UnsupportedSamplesCount {
obtained: num_samples,
};
@ -359,7 +363,7 @@ impl UnsafeImage {
// If the `shaderStorageImageMultisample` feature is not enabled and we have
// `usage_storage` set to true, then the number of samples must be 1.
if usage.storage && num_samples > 1 {
if usage.storage && num_samples as u32 > 1 {
if !device.enabled_features().shader_storage_image_multisample {
return Err(ImageCreationError::ShaderStorageImageMultisampleFeatureNotEnabled);
}
@ -374,12 +378,12 @@ impl UnsafeImage {
if width == 0 || array_layers == 0 {
return Err(ImageCreationError::UnsupportedDimensions { dimensions });
}
let extent = vk::Extent3D {
let extent = ash::vk::Extent3D {
width,
height: 1,
depth: 1,
};
(vk::IMAGE_TYPE_1D, extent, array_layers)
(ash::vk::ImageType::TYPE_1D, extent, array_layers)
}
ImageDimensions::Dim2d {
width,
@ -389,12 +393,12 @@ impl UnsafeImage {
if width == 0 || height == 0 || array_layers == 0 {
return Err(ImageCreationError::UnsupportedDimensions { dimensions });
}
let extent = vk::Extent3D {
let extent = ash::vk::Extent3D {
width,
height,
depth: 1,
};
(vk::IMAGE_TYPE_2D, extent, array_layers)
(ash::vk::ImageType::TYPE_2D, extent, array_layers)
}
ImageDimensions::Dim3d {
width,
@ -404,24 +408,27 @@ impl UnsafeImage {
if width == 0 || height == 0 || depth == 0 {
return Err(ImageCreationError::UnsupportedDimensions { dimensions });
}
let extent = vk::Extent3D {
let extent = ash::vk::Extent3D {
width,
height,
depth,
};
(vk::IMAGE_TYPE_3D, extent, 1)
(ash::vk::ImageType::TYPE_3D, extent, 1)
}
};
// Checking flags requirements.
if flags.cube_compatible {
if !(ty == vk::IMAGE_TYPE_2D && extent.width == extent.height && array_layers >= 6) {
if !(ty == ash::vk::ImageType::TYPE_2D
&& extent.width == extent.height
&& array_layers >= 6)
{
return Err(ImageCreationError::CreationFlagRequirementsNotMet);
}
}
if flags.array_2d_compatible {
if !(ty == vk::IMAGE_TYPE_3D) {
if !(ty == ash::vk::ImageType::TYPE_3D) {
return Err(ImageCreationError::CreationFlagRequirementsNotMet);
}
}
@ -432,13 +439,13 @@ impl UnsafeImage {
capabilities_error = Some(err);
}
match ty {
vk::IMAGE_TYPE_1D => {
ash::vk::ImageType::TYPE_1D => {
if extent.width > device.physical_device().limits().max_image_dimension_1d() {
let err = ImageCreationError::UnsupportedDimensions { dimensions };
capabilities_error = Some(err);
}
}
vk::IMAGE_TYPE_2D => {
ash::vk::ImageType::TYPE_2D => {
let limit = device.physical_device().limits().max_image_dimension_2d();
if extent.width > limit || extent.height > limit {
let err = ImageCreationError::UnsupportedDimensions { dimensions };
@ -453,7 +460,7 @@ impl UnsafeImage {
}
}
}
vk::IMAGE_TYPE_3D => {
ash::vk::ImageType::TYPE_3D => {
let limit = device.physical_device().limits().max_image_dimension_3d();
if extent.width > limit || extent.height > limit || extent.depth > limit {
let err = ImageCreationError::UnsupportedDimensions { dimensions };
@ -469,20 +476,20 @@ impl UnsafeImage {
// implementation for additional image capabilities.
if let Some(capabilities_error) = capabilities_error {
let tiling = if linear_tiling {
vk::IMAGE_TILING_LINEAR
ash::vk::ImageTiling::LINEAR
} else {
vk::IMAGE_TILING_OPTIMAL
ash::vk::ImageTiling::OPTIMAL
};
let mut output = MaybeUninit::uninit();
let physical_device = device.physical_device().internal_object();
let r = vk_i.GetPhysicalDeviceImageFormatProperties(
let r = fns_i.v1_0.get_physical_device_image_format_properties(
physical_device,
format as u32,
format.into(),
ty,
tiling,
usage_bits,
0, /* TODO */
ash::vk::ImageCreateFlags::empty(), /* TODO */
output.as_mut_ptr(),
);
@ -496,12 +503,12 @@ impl UnsafeImage {
let output = output.assume_init();
if extent.width > output.maxExtent.width
|| extent.height > output.maxExtent.height
|| extent.depth > output.maxExtent.depth
|| mipmaps > output.maxMipLevels
|| array_layers > output.maxArrayLayers
|| (num_samples & output.sampleCounts) == 0
if extent.width > output.max_extent.width
|| extent.height > output.max_extent.height
|| extent.depth > output.max_extent.depth
|| mipmaps > output.max_mip_levels
|| array_layers > output.max_array_layers
|| (ash::vk::SampleCountFlags::from(num_samples) & output.sample_counts).is_empty()
{
return Err(capabilities_error);
}
@ -509,34 +516,33 @@ impl UnsafeImage {
// Everything now ok. Creating the image.
let image = {
let infos = vk::ImageCreateInfo {
sType: vk::STRUCTURE_TYPE_IMAGE_CREATE_INFO,
pNext: ptr::null(),
let infos = ash::vk::ImageCreateInfo {
flags: flags.into(),
imageType: ty,
format: format as u32,
image_type: ty,
format: format.into(),
extent,
mipLevels: mipmaps,
arrayLayers: array_layers,
samples: num_samples,
mip_levels: mipmaps,
array_layers: array_layers,
samples: num_samples.into(),
tiling: if linear_tiling {
vk::IMAGE_TILING_LINEAR
ash::vk::ImageTiling::LINEAR
} else {
vk::IMAGE_TILING_OPTIMAL
ash::vk::ImageTiling::OPTIMAL
},
usage: usage_bits,
sharingMode: sh_mode,
queueFamilyIndexCount: sh_indices.len() as u32,
pQueueFamilyIndices: sh_indices.as_ptr(),
initialLayout: if preinitialized_layout {
vk::IMAGE_LAYOUT_PREINITIALIZED
sharing_mode: sh_mode,
queue_family_index_count: sh_indices.len() as u32,
p_queue_family_indices: sh_indices.as_ptr(),
initial_layout: if preinitialized_layout {
ash::vk::ImageLayout::PREINITIALIZED
} else {
vk::IMAGE_LAYOUT_UNDEFINED
ash::vk::ImageLayout::UNDEFINED
},
..Default::default()
};
let mut output = MaybeUninit::uninit();
check_errors(vk.CreateImage(
check_errors(fns.v1_0.create_image(
device.internal_object(),
&infos,
ptr::null(),
@ -545,47 +551,62 @@ impl UnsafeImage {
output.assume_init()
};
let mem_reqs = if device.loaded_extensions().khr_get_memory_requirements2 {
let infos = vk::ImageMemoryRequirementsInfo2KHR {
sType: vk::STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR,
pNext: ptr::null_mut(),
let mem_reqs = if device.api_version() >= Version::V1_1
|| device.loaded_extensions().khr_get_memory_requirements2
{
let infos = ash::vk::ImageMemoryRequirementsInfo2 {
image,
..Default::default()
};
let mut output2 = if device.loaded_extensions().khr_dedicated_allocation {
Some(vk::MemoryDedicatedRequirementsKHR {
sType: vk::STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR,
pNext: ptr::null_mut(),
prefersDedicatedAllocation: mem::zeroed(),
requiresDedicatedAllocation: mem::zeroed(),
})
let mut output2 = if device.api_version() >= Version::V1_1
|| device.loaded_extensions().khr_dedicated_allocation
{
Some(ash::vk::MemoryDedicatedRequirements::default())
} else {
None
};
let mut output = vk::MemoryRequirements2KHR {
sType: vk::STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR,
pNext: output2
let mut output = ash::vk::MemoryRequirements2 {
p_next: output2
.as_mut()
.map(|o| o as *mut vk::MemoryDedicatedRequirementsKHR)
.map(|o| o as *mut _)
.unwrap_or(ptr::null_mut()) as *mut _,
memoryRequirements: mem::zeroed(),
..Default::default()
};
vk.GetImageMemoryRequirements2KHR(device.internal_object(), &infos, &mut output);
debug_assert!(output.memoryRequirements.memoryTypeBits != 0);
if device.api_version() >= Version::V1_1 {
fns.v1_1.get_image_memory_requirements2(
device.internal_object(),
&infos,
&mut output,
);
} else {
fns.khr_get_memory_requirements2
.get_image_memory_requirements2_khr(
device.internal_object(),
&infos,
&mut output,
);
}
let mut out = MemoryRequirements::from(output.memoryRequirements);
debug_assert!(output.memory_requirements.memory_type_bits != 0);
let mut out = MemoryRequirements::from(output.memory_requirements);
if let Some(output2) = output2 {
debug_assert_eq!(output2.requiresDedicatedAllocation, 0);
out.prefer_dedicated = output2.prefersDedicatedAllocation != 0;
debug_assert_eq!(output2.requires_dedicated_allocation, 0);
out.prefer_dedicated = output2.prefers_dedicated_allocation != 0;
}
out
} else {
let mut output: MaybeUninit<vk::MemoryRequirements> = MaybeUninit::uninit();
vk.GetImageMemoryRequirements(device.internal_object(), image, output.as_mut_ptr());
let mut output: MaybeUninit<ash::vk::MemoryRequirements> = MaybeUninit::uninit();
fns.v1_0.get_image_memory_requirements(
device.internal_object(),
image,
output.as_mut_ptr(),
);
let output = output.assume_init();
debug_assert!(output.memoryTypeBits != 0);
debug_assert!(output.memory_type_bits != 0);
MemoryRequirements::from(output)
};
@ -611,12 +632,12 @@ impl UnsafeImage {
/// This function is for example used at the swapchain's initialization.
pub unsafe fn from_raw(
device: Arc<Device>,
handle: u64,
handle: ash::vk::Image,
usage: ImageUsage,
format: Format,
flags: ImageCreateFlags,
dimensions: ImageDimensions,
samples: u32,
samples: SampleCount,
mipmaps: u32,
) -> UnsafeImage {
let format_properties = format.properties(device.physical_device());
@ -639,12 +660,12 @@ impl UnsafeImage {
}
pub unsafe fn bind_memory(&self, memory: &DeviceMemory, offset: usize) -> Result<(), OomError> {
let vk = self.device.pointers();
let fns = self.device.fns();
// We check for correctness in debug mode.
debug_assert!({
let mut mem_reqs = MaybeUninit::uninit();
vk.GetImageMemoryRequirements(
fns.v1_0.get_image_memory_requirements(
self.device.internal_object(),
self.image,
mem_reqs.as_mut_ptr(),
@ -653,14 +674,14 @@ impl UnsafeImage {
let mem_reqs = mem_reqs.assume_init();
mem_reqs.size <= (memory.size() - offset) as u64
&& (offset as u64 % mem_reqs.alignment) == 0
&& mem_reqs.memoryTypeBits & (1 << memory.memory_type().id()) != 0
&& mem_reqs.memory_type_bits & (1 << memory.memory_type().id()) != 0
});
check_errors(vk.BindImageMemory(
check_errors(fns.v1_0.bind_image_memory(
self.device.internal_object(),
self.image,
memory.internal_object(),
offset as vk::DeviceSize,
offset as ash::vk::DeviceSize,
))?;
Ok(())
}
@ -690,14 +711,14 @@ impl UnsafeImage {
}
#[inline]
pub fn samples(&self) -> u32 {
pub fn samples(&self) -> SampleCount {
self.samples
}
/// Returns a key unique to each `UnsafeImage`. Can be used for the `conflicts_key` method.
#[inline]
pub fn key(&self) -> u64 {
self.image
self.image.as_raw()
}
/// Queries the layout of an image in memory. Only valid for images with linear tiling.
@ -792,18 +813,18 @@ impl UnsafeImage {
// Implementation of the `*_layout` functions.
unsafe fn linear_layout_impl(&self, mip_level: u32, aspect: ImageAspect) -> LinearLayout {
let vk = self.device.pointers();
let fns = self.device.fns();
assert!(mip_level < self.mipmaps);
let subresource = vk::ImageSubresource {
aspectMask: vk::ImageAspectFlags::from(aspect),
mipLevel: mip_level,
arrayLayer: 0,
let subresource = ash::vk::ImageSubresource {
aspect_mask: ash::vk::ImageAspectFlags::from(aspect),
mip_level: mip_level,
array_layer: 0,
};
let mut out = MaybeUninit::uninit();
vk.GetImageSubresourceLayout(
fns.v1_0.get_image_subresource_layout(
self.device.internal_object(),
self.image,
&subresource,
@ -814,9 +835,9 @@ impl UnsafeImage {
LinearLayout {
offset: out.offset as usize,
size: out.size as usize,
row_pitch: out.rowPitch as usize,
array_pitch: out.arrayPitch as usize,
depth_pitch: out.depthPitch as usize,
row_pitch: out.row_pitch as usize,
array_pitch: out.array_pitch as usize,
depth_pitch: out.depth_pitch as usize,
}
}
@ -845,12 +866,10 @@ impl UnsafeImage {
}
unsafe impl VulkanObject for UnsafeImage {
type Object = vk::Image;
const TYPE: vk::ObjectType = vk::OBJECT_TYPE_IMAGE;
type Object = ash::vk::Image;
#[inline]
fn internal_object(&self) -> vk::Image {
fn internal_object(&self) -> ash::vk::Image {
self.image
}
}
@ -870,8 +889,9 @@ impl Drop for UnsafeImage {
}
unsafe {
let vk = self.device.pointers();
vk.DestroyImage(self.device.internal_object(), self.image, ptr::null());
let fns = self.device.fns();
fns.v1_0
.destroy_image(self.device.internal_object(), self.image, ptr::null());
}
}
}
@ -908,7 +928,7 @@ pub enum ImageCreationError {
valid_range: Range<u32>,
},
/// The requested number of samples is not supported, or is 0.
UnsupportedSamplesCount { obtained: u32 },
UnsupportedSamplesCount { obtained: SampleCount },
/// The dimensions are too large, or one of the dimensions is 0.
UnsupportedDimensions { dimensions: ImageDimensions },
/// The requested format is not supported by the Vulkan implementation.
@ -1014,17 +1034,16 @@ pub struct LinearLayout {
#[cfg(test)]
mod tests {
use std::iter::Empty;
use std::u32;
use super::ImageCreateFlags;
use super::ImageCreationError;
use super::ImageUsage;
use super::UnsafeImage;
use crate::format::Format;
use crate::image::ImageDimensions;
use crate::image::SampleCount;
use crate::sync::Sharing;
use std::iter::Empty;
use std::u32;
#[test]
fn create_sampled() {
@ -1046,7 +1065,7 @@ mod tests {
height: 32,
array_layers: 1,
},
1,
SampleCount::Sample1,
1,
Sharing::Exclusive::<Empty<_>>,
false,
@ -1077,7 +1096,7 @@ mod tests {
height: 32,
array_layers: 1,
},
1,
SampleCount::Sample1,
1,
Sharing::Exclusive::<Empty<_>>,
false,
@ -1087,74 +1106,6 @@ mod tests {
.unwrap();
}
#[test]
fn zero_sample() {
let (device, _) = gfx_dev_and_queue!();
let usage = ImageUsage {
sampled: true,
..ImageUsage::none()
};
let res = unsafe {
UnsafeImage::new(
device,
usage,
Format::R8G8B8A8Unorm,
ImageCreateFlags::none(),
ImageDimensions::Dim2d {
width: 32,
height: 32,
array_layers: 1,
},
0,
1,
Sharing::Exclusive::<Empty<_>>,
false,
false,
)
};
match res {
Err(ImageCreationError::UnsupportedSamplesCount { .. }) => (),
_ => panic!(),
};
}
#[test]
fn non_po2_sample() {
let (device, _) = gfx_dev_and_queue!();
let usage = ImageUsage {
sampled: true,
..ImageUsage::none()
};
let res = unsafe {
UnsafeImage::new(
device,
usage,
Format::R8G8B8A8Unorm,
ImageCreateFlags::none(),
ImageDimensions::Dim2d {
width: 32,
height: 32,
array_layers: 1,
},
5,
1,
Sharing::Exclusive::<Empty<_>>,
false,
false,
)
};
match res {
Err(ImageCreationError::UnsupportedSamplesCount { .. }) => (),
_ => panic!(),
};
}
#[test]
fn zero_mipmap() {
let (device, _) = gfx_dev_and_queue!();
@ -1175,7 +1126,7 @@ mod tests {
height: 32,
array_layers: 1,
},
1,
SampleCount::Sample1,
0,
Sharing::Exclusive::<Empty<_>>,
false,
@ -1210,7 +1161,7 @@ mod tests {
height: 32,
array_layers: 1,
},
1,
SampleCount::Sample1,
u32::MAX,
Sharing::Exclusive::<Empty<_>>,
false,
@ -1250,7 +1201,7 @@ mod tests {
height: 32,
array_layers: 1,
},
2,
SampleCount::Sample2,
1,
Sharing::Exclusive::<Empty<_>>,
false,
@ -1285,7 +1236,7 @@ mod tests {
height: 32,
array_layers: 1,
},
1,
SampleCount::Sample1,
u32::MAX,
Sharing::Exclusive::<Empty<_>>,
false,
@ -1321,7 +1272,7 @@ mod tests {
height: 32,
array_layers: 1,
},
1,
SampleCount::Sample1,
1,
Sharing::Exclusive::<Empty<_>>,
false,
@ -1358,7 +1309,7 @@ mod tests {
height: 64,
array_layers: 1,
},
1,
SampleCount::Sample1,
1,
Sharing::Exclusive::<Empty<_>>,
false,

View File

@ -15,6 +15,7 @@ use crate::image::sys::UnsafeImage;
use crate::image::ImageDescriptorLayouts;
use crate::image::ImageDimensions;
use crate::image::ImageLayout;
use crate::image::SampleCount;
use crate::sync::AccessError;
use crate::SafeDeref;
use std::hash::Hash;
@ -66,7 +67,7 @@ pub unsafe trait ImageAccess {
/// Returns the number of samples of this image.
#[inline]
fn samples(&self) -> u32 {
fn samples(&self) -> SampleCount {
self.inner().image.samples()
}

View File

@ -7,7 +7,6 @@
// notice may not be copied, modified, or distributed except
// according to those terms.
use crate::vk;
use std::ops::BitOr;
/// Describes how an image is going to be used. This is **not** just an optimization.
@ -154,50 +153,52 @@ impl ImageUsage {
}
}
impl From<ImageUsage> for vk::ImageUsageFlags {
impl From<ImageUsage> for ash::vk::ImageUsageFlags {
#[inline]
fn from(val: ImageUsage) -> Self {
let mut result = 0;
let mut result = ash::vk::ImageUsageFlags::empty();
if val.transfer_source {
result |= vk::IMAGE_USAGE_TRANSFER_SRC_BIT;
result |= ash::vk::ImageUsageFlags::TRANSFER_SRC;
}
if val.transfer_destination {
result |= vk::IMAGE_USAGE_TRANSFER_DST_BIT;
result |= ash::vk::ImageUsageFlags::TRANSFER_DST;
}
if val.sampled {
result |= vk::IMAGE_USAGE_SAMPLED_BIT;
result |= ash::vk::ImageUsageFlags::SAMPLED;
}
if val.storage {
result |= vk::IMAGE_USAGE_STORAGE_BIT;
result |= ash::vk::ImageUsageFlags::STORAGE;
}
if val.color_attachment {
result |= vk::IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
result |= ash::vk::ImageUsageFlags::COLOR_ATTACHMENT;
}
if val.depth_stencil_attachment {
result |= vk::IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
result |= ash::vk::ImageUsageFlags::DEPTH_STENCIL_ATTACHMENT;
}
if val.transient_attachment {
result |= vk::IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT;
result |= ash::vk::ImageUsageFlags::TRANSIENT_ATTACHMENT;
}
if val.input_attachment {
result |= vk::IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
result |= ash::vk::ImageUsageFlags::INPUT_ATTACHMENT;
}
result
}
}
impl From<vk::ImageUsageFlags> for ImageUsage {
impl From<ash::vk::ImageUsageFlags> for ImageUsage {
#[inline]
fn from(val: vk::ImageUsageFlags) -> ImageUsage {
fn from(val: ash::vk::ImageUsageFlags) -> ImageUsage {
ImageUsage {
transfer_source: (val & vk::IMAGE_USAGE_TRANSFER_SRC_BIT) != 0,
transfer_destination: (val & vk::IMAGE_USAGE_TRANSFER_DST_BIT) != 0,
sampled: (val & vk::IMAGE_USAGE_SAMPLED_BIT) != 0,
storage: (val & vk::IMAGE_USAGE_STORAGE_BIT) != 0,
color_attachment: (val & vk::IMAGE_USAGE_COLOR_ATTACHMENT_BIT) != 0,
depth_stencil_attachment: (val & vk::IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) != 0,
transient_attachment: (val & vk::IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT) != 0,
input_attachment: (val & vk::IMAGE_USAGE_INPUT_ATTACHMENT_BIT) != 0,
transfer_source: !(val & ash::vk::ImageUsageFlags::TRANSFER_SRC).is_empty(),
transfer_destination: !(val & ash::vk::ImageUsageFlags::TRANSFER_DST).is_empty(),
sampled: !(val & ash::vk::ImageUsageFlags::SAMPLED).is_empty(),
storage: !(val & ash::vk::ImageUsageFlags::STORAGE).is_empty(),
color_attachment: !(val & ash::vk::ImageUsageFlags::COLOR_ATTACHMENT).is_empty(),
depth_stencil_attachment: !(val & ash::vk::ImageUsageFlags::DEPTH_STENCIL_ATTACHMENT)
.is_empty(),
transient_attachment: !(val & ash::vk::ImageUsageFlags::TRANSIENT_ATTACHMENT)
.is_empty(),
input_attachment: !(val & ash::vk::ImageUsageFlags::INPUT_ATTACHMENT).is_empty(),
}
}
}

View File

@ -22,7 +22,6 @@ use crate::image::ImageAccess;
use crate::image::ImageDimensions;
use crate::memory::DeviceMemoryAllocError;
use crate::sampler::Sampler;
use crate::vk;
use crate::OomError;
use crate::SafeDeref;
use crate::VulkanObject;
@ -283,7 +282,7 @@ impl From<OomError> for ImageViewCreationError {
/// A low-level wrapper around a `vkImageView`.
pub struct UnsafeImageView {
view: vk::ImageView,
view: ash::vk::ImageView,
device: Arc<Device>,
}
@ -307,7 +306,7 @@ impl UnsafeImageView {
mipmap_levels: Range<u32>,
array_layers: Range<u32>,
) -> Result<UnsafeImageView, OomError> {
let vk = image.device().pointers();
let fns = image.device().fns();
debug_assert!(mipmap_levels.end > mipmap_levels.start);
debug_assert!(mipmap_levels.end <= image.mipmap_levels());
@ -322,25 +321,24 @@ impl UnsafeImageView {
let aspects = image.format().aspects();
let view = {
let infos = vk::ImageViewCreateInfo {
sType: vk::STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
pNext: ptr::null(),
flags: 0, // reserved
let infos = ash::vk::ImageViewCreateInfo {
flags: ash::vk::ImageViewCreateFlags::empty(),
image: image.internal_object(),
viewType: ty.into(),
format: image.format() as u32,
view_type: ty.into(),
format: image.format().into(),
components: component_mapping.into(),
subresourceRange: vk::ImageSubresourceRange {
aspectMask: aspects.into(),
baseMipLevel: mipmap_levels.start,
levelCount: mipmap_levels.end - mipmap_levels.start,
baseArrayLayer: array_layers.start,
layerCount: array_layers.end - array_layers.start,
subresource_range: ash::vk::ImageSubresourceRange {
aspect_mask: aspects.into(),
base_mip_level: mipmap_levels.start,
level_count: mipmap_levels.end - mipmap_levels.start,
base_array_layer: array_layers.start,
layer_count: array_layers.end - array_layers.start,
},
..Default::default()
};
let mut output = MaybeUninit::uninit();
check_errors(vk.CreateImageView(
check_errors(fns.v1_0.create_image_view(
image.device().internal_object(),
&infos,
ptr::null(),
@ -357,12 +355,10 @@ impl UnsafeImageView {
}
unsafe impl VulkanObject for UnsafeImageView {
type Object = vk::ImageView;
const TYPE: vk::ObjectType = vk::OBJECT_TYPE_IMAGE_VIEW;
type Object = ash::vk::ImageView;
#[inline]
fn internal_object(&self) -> vk::ImageView {
fn internal_object(&self) -> ash::vk::ImageView {
self.view
}
}
@ -378,8 +374,9 @@ impl Drop for UnsafeImageView {
#[inline]
fn drop(&mut self) {
unsafe {
let vk = self.device.pointers();
vk.DestroyImageView(self.device.internal_object(), self.view, ptr::null());
let fns = self.device.fns();
fns.v1_0
.destroy_image_view(self.device.internal_object(), self.view, ptr::null());
}
}
}
@ -403,27 +400,20 @@ impl Hash for UnsafeImageView {
/// The geometry type of an image view.
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
#[repr(i32)]
pub enum ImageViewType {
Dim1d,
Dim1dArray,
Dim2d,
Dim2dArray,
Dim3d,
Cubemap,
CubemapArray,
Dim1d = ash::vk::ImageViewType::TYPE_1D.as_raw(),
Dim1dArray = ash::vk::ImageViewType::TYPE_1D_ARRAY.as_raw(),
Dim2d = ash::vk::ImageViewType::TYPE_2D.as_raw(),
Dim2dArray = ash::vk::ImageViewType::TYPE_2D_ARRAY.as_raw(),
Dim3d = ash::vk::ImageViewType::TYPE_3D.as_raw(),
Cubemap = ash::vk::ImageViewType::CUBE.as_raw(),
CubemapArray = ash::vk::ImageViewType::CUBE_ARRAY.as_raw(),
}
impl From<ImageViewType> for vk::ImageViewType {
fn from(image_view_type: ImageViewType) -> Self {
match image_view_type {
ImageViewType::Dim1d => vk::IMAGE_VIEW_TYPE_1D,
ImageViewType::Dim1dArray => vk::IMAGE_VIEW_TYPE_1D_ARRAY,
ImageViewType::Dim2d => vk::IMAGE_VIEW_TYPE_2D,
ImageViewType::Dim2dArray => vk::IMAGE_VIEW_TYPE_2D_ARRAY,
ImageViewType::Dim3d => vk::IMAGE_VIEW_TYPE_3D,
ImageViewType::Cubemap => vk::IMAGE_VIEW_TYPE_CUBE,
ImageViewType::CubemapArray => vk::IMAGE_VIEW_TYPE_CUBE_ARRAY,
}
impl From<ImageViewType> for ash::vk::ImageViewType {
fn from(val: ImageViewType) -> Self {
Self::from_raw(val as i32)
}
}
@ -459,38 +449,45 @@ impl ComponentMapping {
}
}
impl From<ComponentMapping> for vk::ComponentMapping {
impl From<ComponentMapping> for ash::vk::ComponentMapping {
#[inline]
fn from(value: ComponentMapping) -> Self {
Self {
r: value.r as u32,
g: value.g as u32,
b: value.b as u32,
a: value.a as u32,
r: value.r.into(),
g: value.g.into(),
b: value.b.into(),
a: value.a.into(),
}
}
}
/// Describes the value that an individual component must return when being accessed.
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
#[repr(u32)]
#[repr(i32)]
pub enum ComponentSwizzle {
/// Returns the value that this component should normally have.
///
/// This is the `Default` value.
Identity = vk::COMPONENT_SWIZZLE_IDENTITY,
Identity = ash::vk::ComponentSwizzle::IDENTITY.as_raw(),
/// Always return zero.
Zero = vk::COMPONENT_SWIZZLE_ZERO,
Zero = ash::vk::ComponentSwizzle::ZERO.as_raw(),
/// Always return one.
One = vk::COMPONENT_SWIZZLE_ONE,
One = ash::vk::ComponentSwizzle::ONE.as_raw(),
/// Returns the value of the first component.
Red = vk::COMPONENT_SWIZZLE_R,
Red = ash::vk::ComponentSwizzle::R.as_raw(),
/// Returns the value of the second component.
Green = vk::COMPONENT_SWIZZLE_G,
Green = ash::vk::ComponentSwizzle::G.as_raw(),
/// Returns the value of the third component.
Blue = vk::COMPONENT_SWIZZLE_B,
Blue = ash::vk::ComponentSwizzle::B.as_raw(),
/// Returns the value of the fourth component.
Alpha = vk::COMPONENT_SWIZZLE_A,
Alpha = ash::vk::ComponentSwizzle::A.as_raw(),
}
impl From<ComponentSwizzle> for ash::vk::ComponentSwizzle {
#[inline]
fn from(val: ComponentSwizzle) -> Self {
Self::from_raw(val as i32)
}
}
impl Default for ComponentSwizzle {

View File

@ -37,6 +37,10 @@
//! variable, it will be immediately destroyed and your callback will not work.
//!
use crate::check_errors;
use crate::instance::Instance;
use crate::Error;
use crate::VulkanObject;
use std::error;
use std::ffi::CStr;
use std::fmt;
@ -46,14 +50,6 @@ use std::panic;
use std::ptr;
use std::sync::Arc;
use crate::instance::Instance;
use crate::check_errors;
use crate::vk;
use crate::vk::{Bool32, DebugUtilsMessengerCallbackDataEXT};
use crate::Error;
use crate::VulkanObject;
/// Registration of a callback called by validation layers.
///
/// The callback can be called as long as this object is alive.
@ -61,7 +57,7 @@ use crate::VulkanObject;
to be called"]
pub struct DebugCallback {
instance: Arc<Instance>,
debug_report_callback: vk::DebugUtilsMessengerEXT,
debug_report_callback: ash::vk::DebugUtilsMessengerEXT,
user_callback: Box<Box<dyn Fn(&Message) + Send>>,
}
@ -86,99 +82,104 @@ impl DebugCallback {
// that can't be cast to a `*const c_void`.
let user_callback = Box::new(Box::new(user_callback) as Box<_>);
extern "system" fn callback(
severity: vk::DebugUtilsMessageSeverityFlagsEXT,
ty: vk::DebugUtilsMessageTypeFlagsEXT,
callback_data: *const DebugUtilsMessengerCallbackDataEXT,
unsafe extern "system" fn callback(
severity: ash::vk::DebugUtilsMessageSeverityFlagsEXT,
ty: ash::vk::DebugUtilsMessageTypeFlagsEXT,
callback_data: *const ash::vk::DebugUtilsMessengerCallbackDataEXT,
user_data: *mut c_void,
) -> Bool32 {
unsafe {
let user_callback = user_data as *mut Box<dyn Fn()> as *const _;
let user_callback: &Box<dyn Fn(&Message)> = &*user_callback;
) -> ash::vk::Bool32 {
let user_callback = user_data as *mut Box<dyn Fn()> as *const _;
let user_callback: &Box<dyn Fn(&Message)> = &*user_callback;
let layer_prefix = (*callback_data).pMessageIdName.as_ref().map(|msg_id_name| {
let layer_prefix = (*callback_data)
.p_message_id_name
.as_ref()
.map(|msg_id_name| {
CStr::from_ptr(msg_id_name)
.to_str()
.expect("debug callback message not utf-8")
});
let description = CStr::from_ptr((*callback_data).pMessage)
.to_str()
.expect("debug callback message not utf-8");
let description = CStr::from_ptr((*callback_data).p_message)
.to_str()
.expect("debug callback message not utf-8");
let message = Message {
severity: MessageSeverity {
information: (severity & vk::DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT)
!= 0,
warning: (severity & vk::DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT) != 0,
error: (severity & vk::DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT) != 0,
verbose: (severity & vk::DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT) != 0,
},
ty: MessageType {
general: (ty & vk::DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT) != 0,
validation: (ty & vk::DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT) != 0,
performance: (ty & vk::DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT) != 0,
},
layer_prefix,
description,
};
let message = Message {
severity: MessageSeverity {
information: !(severity & ash::vk::DebugUtilsMessageSeverityFlagsEXT::INFO)
.is_empty(),
warning: !(severity & ash::vk::DebugUtilsMessageSeverityFlagsEXT::WARNING)
.is_empty(),
error: !(severity & ash::vk::DebugUtilsMessageSeverityFlagsEXT::ERROR)
.is_empty(),
verbose: !(severity & ash::vk::DebugUtilsMessageSeverityFlagsEXT::VERBOSE)
.is_empty(),
},
ty: MessageType {
general: !(ty & ash::vk::DebugUtilsMessageTypeFlagsEXT::GENERAL).is_empty(),
validation: !(ty & ash::vk::DebugUtilsMessageTypeFlagsEXT::VALIDATION)
.is_empty(),
performance: !(ty & ash::vk::DebugUtilsMessageTypeFlagsEXT::PERFORMANCE)
.is_empty(),
},
layer_prefix,
description,
};
// Since we box the closure, the type system doesn't detect that the `UnwindSafe`
// bound is enforced. Therefore we enforce it manually.
let _ = panic::catch_unwind(panic::AssertUnwindSafe(move || {
user_callback(&message);
}));
// Since we box the closure, the type system doesn't detect that the `UnwindSafe`
// bound is enforced. Therefore we enforce it manually.
let _ = panic::catch_unwind(panic::AssertUnwindSafe(move || {
user_callback(&message);
}));
vk::FALSE
}
ash::vk::FALSE
}
let severity = {
let mut flags = 0;
let mut flags = ash::vk::DebugUtilsMessageSeverityFlagsEXT::empty();
if severity.information {
flags |= vk::DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT;
flags |= ash::vk::DebugUtilsMessageSeverityFlagsEXT::INFO;
}
if severity.warning {
flags |= vk::DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT;
flags |= ash::vk::DebugUtilsMessageSeverityFlagsEXT::WARNING;
}
if severity.error {
flags |= vk::DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT;
flags |= ash::vk::DebugUtilsMessageSeverityFlagsEXT::ERROR;
}
if severity.verbose {
flags |= vk::DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT;
flags |= ash::vk::DebugUtilsMessageSeverityFlagsEXT::VERBOSE;
}
flags
};
let ty = {
let mut flags = 0;
let mut flags = ash::vk::DebugUtilsMessageTypeFlagsEXT::empty();
if ty.general {
flags |= vk::DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT;
flags |= ash::vk::DebugUtilsMessageTypeFlagsEXT::GENERAL;
}
if ty.validation {
flags |= vk::DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT;
flags |= ash::vk::DebugUtilsMessageTypeFlagsEXT::VALIDATION;
}
if ty.performance {
flags |= vk::DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT;
flags |= ash::vk::DebugUtilsMessageTypeFlagsEXT::PERFORMANCE;
}
flags
};
let infos = vk::DebugUtilsMessengerCreateInfoEXT {
sType: vk::STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT,
pNext: ptr::null(),
flags: 0,
messageSeverity: severity,
messageType: ty,
pfnUserCallback: callback,
pUserData: &*user_callback as &Box<_> as *const Box<_> as *const c_void as *mut _,
let infos = ash::vk::DebugUtilsMessengerCreateInfoEXT {
flags: ash::vk::DebugUtilsMessengerCreateFlagsEXT::empty(),
message_severity: severity,
message_type: ty,
pfn_user_callback: Some(callback),
p_user_data: &*user_callback as &Box<_> as *const Box<_> as *const c_void as *mut _,
..Default::default()
};
let vk = instance.pointers();
let fns = instance.fns();
let debug_report_callback = unsafe {
let mut output = MaybeUninit::uninit();
check_errors(vk.CreateDebugUtilsMessengerEXT(
check_errors(fns.ext_debug_utils.create_debug_utils_messenger_ext(
instance.internal_object(),
&infos,
ptr::null(),
@ -218,8 +219,8 @@ impl Drop for DebugCallback {
#[inline]
fn drop(&mut self) {
unsafe {
let vk = self.instance.pointers();
vk.DestroyDebugUtilsMessengerEXT(
let fns = self.instance.fns();
fns.ext_debug_utils.destroy_debug_utils_messenger_ext(
self.instance.internal_object(),
self.debug_report_callback,
ptr::null(),

View File

@ -18,7 +18,6 @@ use crate::check_errors;
use crate::extensions::SupportedExtensionsError;
use crate::instance::loader;
use crate::instance::loader::LoadingError;
use crate::vk;
macro_rules! instance_extensions {
($sname:ident, $rawname:ident, $($ext:ident => $s:expr,)*) => (
@ -38,22 +37,22 @@ macro_rules! instance_extensions {
-> Result<Self, SupportedExtensionsError>
where L: loader::Loader
{
let entry_points = ptrs.entry_points();
let fns = ptrs.fns();
let properties: Vec<vk::ExtensionProperties> = unsafe {
let properties: Vec<ash::vk::ExtensionProperties> = unsafe {
let mut num = 0;
check_errors(entry_points.EnumerateInstanceExtensionProperties(
check_errors(fns.v1_0.enumerate_instance_extension_properties(
ptr::null(), &mut num, ptr::null_mut()
))?;
let mut properties = Vec::with_capacity(num as usize);
check_errors(entry_points.EnumerateInstanceExtensionProperties(
check_errors(fns.v1_0.enumerate_instance_extension_properties(
ptr::null(), &mut num, properties.as_mut_ptr()
))?;
properties.set_len(num as usize);
properties
};
Ok($rawname(properties.iter().map(|x| unsafe { CStr::from_ptr(x.extensionName.as_ptr()) }.to_owned()).collect()))
Ok($rawname(properties.iter().map(|x| unsafe { CStr::from_ptr(x.extension_name.as_ptr()) }.to_owned()).collect()))
}
/// Returns a `RawExtensions` object with extensions supported by the core driver.
@ -89,16 +88,16 @@ macro_rules! instance_extensions {
-> Result<Self, SupportedExtensionsError>
where L: loader::Loader
{
let entry_points = ptrs.entry_points();
let fns = ptrs.fns();
let properties: Vec<vk::ExtensionProperties> = unsafe {
let properties: Vec<ash::vk::ExtensionProperties> = unsafe {
let mut num = 0;
check_errors(entry_points.EnumerateInstanceExtensionProperties(
check_errors(fns.v1_0.enumerate_instance_extension_properties(
ptr::null(), &mut num, ptr::null_mut()
))?;
let mut properties = Vec::with_capacity(num as usize);
check_errors(entry_points.EnumerateInstanceExtensionProperties(
check_errors(fns.v1_0.enumerate_instance_extension_properties(
ptr::null(), &mut num, properties.as_mut_ptr()
))?;
properties.set_len(num as usize);
@ -107,7 +106,7 @@ macro_rules! instance_extensions {
let mut extensions = $sname::none();
for property in properties {
let name = unsafe { CStr::from_ptr(property.extensionName.as_ptr()) };
let name = unsafe { CStr::from_ptr(property.extension_name.as_ptr()) };
$(
// TODO: Check specVersion?
if name.to_bytes() == &$s[..] {

View File

@ -8,19 +8,20 @@
// according to those terms.
use crate::check_errors;
use crate::fns::InstanceFunctions;
use crate::instance::loader;
use crate::instance::loader::FunctionPointers;
use crate::instance::loader::Loader;
use crate::instance::loader::LoadingError;
use crate::instance::physical_device::{init_physical_devices, PhysicalDeviceInfos};
use crate::instance::{InstanceExtensions, RawInstanceExtensions};
use crate::vk;
use crate::Error;
use crate::OomError;
use crate::Version;
use crate::VulkanObject;
use smallvec::SmallVec;
use std::borrow::Cow;
use std::convert::TryInto;
use std::error;
use std::ffi::CString;
use std::fmt;
@ -54,7 +55,7 @@ use std::sync::Arc;
/// // compile-time.
/// let app_infos = app_info_from_cargo_toml!();
///
/// let _instance = Instance::new(Some(&app_infos), Version::major_minor(1, 1), &InstanceExtensions::none(), None).unwrap();
/// let _instance = Instance::new(Some(&app_infos), Version::V1_1, &InstanceExtensions::none(), None).unwrap();
/// # }
/// ```
///
@ -115,7 +116,7 @@ use std::sync::Arc;
/// .. InstanceExtensions::none()
/// };
///
/// let instance = match Instance::new(None, Version::major_minor(1, 1), &extensions, None) {
/// let instance = match Instance::new(None, Version::V1_1, &extensions, None) {
/// Ok(i) => i,
/// Err(err) => panic!("Couldn't build instance: {:?}", err)
/// };
@ -162,13 +163,13 @@ use std::sync::Arc;
/// let layer_names = layers.iter()
/// .map(|l| l.name());
///
/// let instance = Instance::new(None, Version::major_minor(1, 1), &InstanceExtensions::none(), layer_names)?;
/// let instance = Instance::new(None, Version::V1_1, &InstanceExtensions::none(), layer_names)?;
/// # Ok(instance)
/// # }
/// ```
// TODO: mention that extensions must be supported by layers as well
pub struct Instance {
instance: vk::Instance,
instance: ash::vk::Instance,
//alloc: Option<Box<Alloc + Send + Sync>>,
// The highest version that is supported for this instance.
@ -179,7 +180,7 @@ pub struct Instance {
max_api_version: Version,
pub(super) physical_devices: Vec<PhysicalDeviceInfos>,
vk: vk::InstancePointers,
fns: InstanceFunctions,
extensions: RawInstanceExtensions,
layers: SmallVec<[CString; 16]>,
function_pointers: OwnedOrRef<FunctionPointers<Box<dyn Loader + Send + Sync>>>,
@ -202,7 +203,7 @@ impl Instance {
/// use vulkano::instance::InstanceExtensions;
/// use vulkano::Version;
///
/// let instance = match Instance::new(None, Version::major_minor(1, 1), &InstanceExtensions::none(), None) {
/// let instance = match Instance::new(None, Version::V1_1, &InstanceExtensions::none(), None) {
/// Ok(i) => i,
/// Err(err) => panic!("Couldn't build instance: {:?}", err)
/// };
@ -302,32 +303,31 @@ impl Instance {
// Building the `vk::ApplicationInfo` if required.
let app_infos = if let Some(app_infos) = app_infos {
Some(vk::ApplicationInfo {
sType: vk::STRUCTURE_TYPE_APPLICATION_INFO,
pNext: ptr::null(),
pApplicationName: app_infos_strings
Some(ash::vk::ApplicationInfo {
p_application_name: app_infos_strings
.as_ref()
.unwrap()
.0
.as_ref()
.map(|s| s.as_ptr())
.unwrap_or(ptr::null()),
applicationVersion: app_infos
application_version: app_infos
.application_version
.map(|v| v.into_vulkan_version())
.map(|v| v.try_into().expect("Version out of range"))
.unwrap_or(0),
pEngineName: app_infos_strings
p_engine_name: app_infos_strings
.as_ref()
.unwrap()
.1
.as_ref()
.map(|s| s.as_ptr())
.unwrap_or(ptr::null()),
engineVersion: app_infos
engine_version: app_infos
.engine_version
.map(|v| v.into_vulkan_version())
.map(|v| v.try_into().expect("Version out of range"))
.unwrap_or(0),
apiVersion: max_api_version.into_vulkan_version(),
api_version: max_api_version.try_into().expect("Version out of range"),
..Default::default()
})
} else {
None
@ -347,29 +347,31 @@ impl Instance {
// Creating the Vulkan instance.
let instance = unsafe {
let mut output = MaybeUninit::uninit();
let infos = vk::InstanceCreateInfo {
sType: vk::STRUCTURE_TYPE_INSTANCE_CREATE_INFO,
pNext: ptr::null(),
flags: 0,
pApplicationInfo: if let Some(app) = app_infos.as_ref() {
let infos = ash::vk::InstanceCreateInfo {
flags: ash::vk::InstanceCreateFlags::empty(),
p_application_info: if let Some(app) = app_infos.as_ref() {
app as *const _
} else {
ptr::null()
},
enabledLayerCount: layers_ptr.len() as u32,
ppEnabledLayerNames: layers_ptr.as_ptr(),
enabledExtensionCount: extensions_list.len() as u32,
ppEnabledExtensionNames: extensions_list.as_ptr(),
enabled_layer_count: layers_ptr.len() as u32,
pp_enabled_layer_names: layers_ptr.as_ptr(),
enabled_extension_count: extensions_list.len() as u32,
pp_enabled_extension_names: extensions_list.as_ptr(),
..Default::default()
};
let entry_points = function_pointers.entry_points();
check_errors(entry_points.CreateInstance(&infos, ptr::null(), output.as_mut_ptr()))?;
let fns = function_pointers.fns();
check_errors(
fns.v1_0
.create_instance(&infos, ptr::null(), output.as_mut_ptr()),
)?;
output.assume_init()
};
// Loading the function pointers of the newly-created instance.
let vk = {
vk::InstancePointers::load(|name| {
let fns = {
InstanceFunctions::load(|name| {
function_pointers.get_instance_proc_addr(instance, name.as_ptr())
})
};
@ -380,7 +382,7 @@ impl Instance {
max_api_version,
//alloc: None,
physical_devices: Vec::new(),
vk,
fns,
extensions,
layers,
function_pointers,
@ -418,8 +420,8 @@ impl Instance {
/// Grants access to the Vulkan functions of the instance.
#[inline]
pub fn pointers(&self) -> &vk::InstancePointers {
&self.vk
pub fn fns(&self) -> &InstanceFunctions {
&self.fns
}
/// Returns the list of extensions that have been loaded.
@ -434,7 +436,7 @@ impl Instance {
/// use vulkano::Version;
///
/// let extensions = InstanceExtensions::supported_by_core().unwrap();
/// let instance = Instance::new(None, Version::major_minor(1, 1), &extensions, None).unwrap();
/// let instance = Instance::new(None, Version::V1_1, &extensions, None).unwrap();
/// assert_eq!(instance.loaded_extensions(), extensions);
/// ```
#[inline]
@ -463,12 +465,10 @@ impl fmt::Debug for Instance {
}
unsafe impl VulkanObject for Instance {
type Object = vk::Instance;
const TYPE: vk::ObjectType = vk::OBJECT_TYPE_INSTANCE;
type Object = ash::vk::Instance;
#[inline]
fn internal_object(&self) -> vk::Instance {
fn internal_object(&self) -> ash::vk::Instance {
self.instance
}
}
@ -477,7 +477,7 @@ impl Drop for Instance {
#[inline]
fn drop(&mut self) {
unsafe {
self.vk.DestroyInstance(self.instance, ptr::null());
self.fns.v1_0.destroy_instance(self.instance, ptr::null());
}
}
}

View File

@ -16,7 +16,6 @@ use std::vec::IntoIter;
use crate::check_errors;
use crate::instance::loader;
use crate::instance::loader::LoadingError;
use crate::vk;
use crate::Error;
use crate::OomError;
use crate::Version;
@ -56,14 +55,18 @@ where
L: loader::Loader,
{
unsafe {
let entry_points = ptrs.entry_points();
let fns = ptrs.fns();
let mut num = 0;
check_errors(entry_points.EnumerateInstanceLayerProperties(&mut num, ptr::null_mut()))?;
check_errors(
fns.v1_0
.enumerate_instance_layer_properties(&mut num, ptr::null_mut()),
)?;
let mut layers: Vec<vk::LayerProperties> = Vec::with_capacity(num as usize);
let mut layers: Vec<ash::vk::LayerProperties> = Vec::with_capacity(num as usize);
check_errors({
entry_points.EnumerateInstanceLayerProperties(&mut num, layers.as_mut_ptr())
fns.v1_0
.enumerate_instance_layer_properties(&mut num, layers.as_mut_ptr())
})?;
layers.set_len(num as usize);
@ -75,7 +78,7 @@ where
/// Properties of a layer.
pub struct LayerProperties {
props: vk::LayerProperties,
props: ash::vk::LayerProperties,
}
impl LayerProperties {
@ -96,7 +99,7 @@ impl LayerProperties {
#[inline]
pub fn name(&self) -> &str {
unsafe {
CStr::from_ptr(self.props.layerName.as_ptr())
CStr::from_ptr(self.props.layer_name.as_ptr())
.to_str()
.unwrap()
}
@ -140,7 +143,7 @@ impl LayerProperties {
/// ```
#[inline]
pub fn vulkan_version(&self) -> Version {
Version::from_vulkan_version(self.props.specVersion)
Version::from(self.props.spec_version)
}
/// Returns an implementation-specific version number for this layer.
@ -158,7 +161,7 @@ impl LayerProperties {
/// ```
#[inline]
pub fn implementation_version(&self) -> u32 {
self.props.implementationVersion
self.props.implementation_version
}
}
@ -223,7 +226,7 @@ impl From<Error> for LayersListError {
/// Iterator that produces the list of layers that are available.
// TODO: #[derive(Debug, Clone)]
pub struct LayersIterator {
iter: IntoIter<vk::LayerProperties>,
iter: IntoIter<ash::vk::LayerProperties>,
}
impl Iterator for LayersIterator {

View File

@ -9,11 +9,11 @@
//! The `Limits` struct provides a nicer API around `vkPhysicalDeviceLimits`.
use crate::vk;
use crate::image::SampleCounts;
/// Limits of a physical device.
pub struct Limits<'a> {
limits: &'a vk::PhysicalDeviceLimits,
limits: &'a ash::vk::PhysicalDeviceLimits,
}
macro_rules! limits_impl {
@ -21,7 +21,7 @@ macro_rules! limits_impl {
impl<'a> Limits<'a> {
/// Builds the `Limits` object.
#[inline]
pub(crate) fn from_vk_limits(limits: &'a vk::PhysicalDeviceLimits) -> Limits<'a> {
pub(crate) fn from_vk_limits(limits: &'a ash::vk::PhysicalDeviceLimits) -> Limits<'a> {
Limits {
limits
}
@ -30,7 +30,7 @@ macro_rules! limits_impl {
$(
#[inline]
pub fn $name(&self) -> $t {
self.limits.$target
<$t>::from(self.limits.$target)
}
)*
}
@ -38,110 +38,110 @@ macro_rules! limits_impl {
}
limits_impl! {
max_image_dimension_1d: u32 => maxImageDimension1D,
max_image_dimension_2d: u32 => maxImageDimension2D,
max_image_dimension_3d: u32 => maxImageDimension3D,
max_image_dimension_cube: u32 => maxImageDimensionCube,
max_image_array_layers: u32 => maxImageArrayLayers,
max_texel_buffer_elements: u32 => maxTexelBufferElements,
max_uniform_buffer_range: u32 => maxUniformBufferRange,
max_storage_buffer_range: u32 => maxStorageBufferRange,
max_push_constants_size: u32 => maxPushConstantsSize,
max_memory_allocation_count: u32 => maxMemoryAllocationCount,
max_sampler_allocation_count: u32 => maxSamplerAllocationCount,
buffer_image_granularity: u64 => bufferImageGranularity,
sparse_address_space_size: u64 => sparseAddressSpaceSize,
max_bound_descriptor_sets: u32 => maxBoundDescriptorSets,
max_per_stage_descriptor_samplers: u32 => maxPerStageDescriptorSamplers,
max_per_stage_descriptor_uniform_buffers: u32 => maxPerStageDescriptorUniformBuffers,
max_per_stage_descriptor_storage_buffers: u32 => maxPerStageDescriptorStorageBuffers,
max_per_stage_descriptor_sampled_images: u32 => maxPerStageDescriptorSampledImages,
max_per_stage_descriptor_storage_images: u32 => maxPerStageDescriptorStorageImages,
max_per_stage_descriptor_input_attachments: u32 => maxPerStageDescriptorInputAttachments,
max_per_stage_resources: u32 => maxPerStageResources,
max_descriptor_set_samplers: u32 => maxDescriptorSetSamplers,
max_descriptor_set_uniform_buffers: u32 => maxDescriptorSetUniformBuffers,
max_descriptor_set_uniform_buffers_dynamic: u32 => maxDescriptorSetUniformBuffersDynamic,
max_descriptor_set_storage_buffers: u32 => maxDescriptorSetStorageBuffers,
max_descriptor_set_storage_buffers_dynamic: u32 => maxDescriptorSetStorageBuffersDynamic,
max_descriptor_set_sampled_images: u32 => maxDescriptorSetSampledImages,
max_descriptor_set_storage_images: u32 => maxDescriptorSetStorageImages,
max_descriptor_set_input_attachments: u32 => maxDescriptorSetInputAttachments,
max_vertex_input_attributes: u32 => maxVertexInputAttributes,
max_vertex_input_bindings: u32 => maxVertexInputBindings,
max_vertex_input_attribute_offset: u32 => maxVertexInputAttributeOffset,
max_vertex_input_binding_stride: u32 => maxVertexInputBindingStride,
max_vertex_output_components: u32 => maxVertexOutputComponents,
max_tessellation_generation_level: u32 => maxTessellationGenerationLevel,
max_tessellation_patch_size: u32 => maxTessellationPatchSize,
max_tessellation_control_per_vertex_input_components: u32 => maxTessellationControlPerVertexInputComponents,
max_tessellation_control_per_vertex_output_components: u32 => maxTessellationControlPerVertexOutputComponents,
max_tessellation_control_per_patch_output_components: u32 => maxTessellationControlPerPatchOutputComponents,
max_tessellation_control_total_output_components: u32 => maxTessellationControlTotalOutputComponents,
max_tessellation_evaluation_input_components: u32 => maxTessellationEvaluationInputComponents,
max_tessellation_evaluation_output_components: u32 => maxTessellationEvaluationOutputComponents,
max_geometry_shader_invocations: u32 => maxGeometryShaderInvocations,
max_geometry_input_components: u32 => maxGeometryInputComponents,
max_geometry_output_components: u32 => maxGeometryOutputComponents,
max_geometry_output_vertices: u32 => maxGeometryOutputVertices,
max_geometry_total_output_components: u32 => maxGeometryTotalOutputComponents,
max_fragment_input_components: u32 => maxFragmentInputComponents,
max_fragment_output_attachments: u32 => maxFragmentOutputAttachments,
max_fragment_dual_src_attachments: u32 => maxFragmentDualSrcAttachments,
max_fragment_combined_output_resources: u32 => maxFragmentCombinedOutputResources,
max_compute_shared_memory_size: u32 => maxComputeSharedMemorySize,
max_compute_work_group_count: [u32; 3] => maxComputeWorkGroupCount,
max_compute_work_group_invocations: u32 => maxComputeWorkGroupInvocations,
max_compute_work_group_size: [u32; 3] => maxComputeWorkGroupSize,
sub_pixel_precision_bits: u32 => subPixelPrecisionBits,
sub_texel_precision_bits: u32 => subTexelPrecisionBits,
mipmap_precision_bits: u32 => mipmapPrecisionBits,
max_draw_indexed_index_value: u32 => maxDrawIndexedIndexValue,
max_draw_indirect_count: u32 => maxDrawIndirectCount,
max_sampler_lod_bias: f32 => maxSamplerLodBias,
max_sampler_anisotropy: f32 => maxSamplerAnisotropy,
max_viewports: u32 => maxViewports,
max_viewport_dimensions: [u32; 2] => maxViewportDimensions,
viewport_bounds_range: [f32; 2] => viewportBoundsRange,
viewport_sub_pixel_bits: u32 => viewportSubPixelBits,
min_memory_map_alignment: usize => minMemoryMapAlignment,
min_texel_buffer_offset_alignment: u64 => minTexelBufferOffsetAlignment,
min_uniform_buffer_offset_alignment: u64 => minUniformBufferOffsetAlignment,
min_storage_buffer_offset_alignment: u64 => minStorageBufferOffsetAlignment,
min_texel_offset: i32 => minTexelOffset,
max_texel_offset: u32 => maxTexelOffset,
min_texel_gather_offset: i32 => minTexelGatherOffset,
max_texel_gather_offset: u32 => maxTexelGatherOffset,
min_interpolation_offset: f32 => minInterpolationOffset,
max_interpolation_offset: f32 => maxInterpolationOffset,
sub_pixel_interpolation_offset_bits: u32 => subPixelInterpolationOffsetBits,
max_framebuffer_width: u32 => maxFramebufferWidth,
max_framebuffer_height: u32 => maxFramebufferHeight,
max_framebuffer_layers: u32 => maxFramebufferLayers,
framebuffer_color_sample_counts: u32 => framebufferColorSampleCounts, // FIXME: SampleCountFlag
framebuffer_depth_sample_counts: u32 => framebufferDepthSampleCounts, // FIXME: SampleCountFlag
framebuffer_stencil_sample_counts: u32 => framebufferStencilSampleCounts, // FIXME: SampleCountFlag
framebuffer_no_attachments_sample_counts: u32 => framebufferNoAttachmentsSampleCounts, // FIXME: SampleCountFlag
max_color_attachments: u32 => maxColorAttachments,
sampled_image_color_sample_counts: u32 => sampledImageColorSampleCounts, // FIXME: SampleCountFlag
sampled_image_integer_sample_counts: u32 => sampledImageIntegerSampleCounts, // FIXME: SampleCountFlag
sampled_image_depth_sample_counts: u32 => sampledImageDepthSampleCounts, // FIXME: SampleCountFlag
sampled_image_stencil_sample_counts: u32 => sampledImageStencilSampleCounts, // FIXME: SampleCountFlag
storage_image_sample_counts: u32 => storageImageSampleCounts, // FIXME: SampleCountFlag
max_sample_mask_words: u32 => maxSampleMaskWords,
timestamp_compute_and_graphics: u32 => timestampComputeAndGraphics, // TODO: these are booleans
timestamp_period: f32 => timestampPeriod,
max_clip_distances: u32 => maxClipDistances,
max_cull_distances: u32 => maxCullDistances,
max_combined_clip_and_cull_distances: u32 => maxCombinedClipAndCullDistances,
discrete_queue_priorities: u32 => discreteQueuePriorities,
point_size_range: [f32; 2] => pointSizeRange,
line_width_range: [f32; 2] => lineWidthRange,
point_size_granularity: f32 => pointSizeGranularity,
line_width_granularity: f32 => lineWidthGranularity,
strict_lines: u32 => strictLines, // TODO: these are booleans
standard_sample_locations: u32 => standardSampleLocations, // TODO: these are booleans
optimal_buffer_copy_offset_alignment: u64 => optimalBufferCopyOffsetAlignment,
optimal_buffer_copy_row_pitch_alignment: u64 => optimalBufferCopyRowPitchAlignment,
non_coherent_atom_size: u64 => nonCoherentAtomSize,
max_image_dimension_1d: u32 => max_image_dimension1_d,
max_image_dimension_2d: u32 => max_image_dimension2_d,
max_image_dimension_3d: u32 => max_image_dimension3_d,
max_image_dimension_cube: u32 => max_image_dimension_cube,
max_image_array_layers: u32 => max_image_array_layers,
max_texel_buffer_elements: u32 => max_texel_buffer_elements,
max_uniform_buffer_range: u32 => max_uniform_buffer_range,
max_storage_buffer_range: u32 => max_storage_buffer_range,
max_push_constants_size: u32 => max_push_constants_size,
max_memory_allocation_count: u32 => max_memory_allocation_count,
max_sampler_allocation_count: u32 => max_sampler_allocation_count,
buffer_image_granularity: u64 => buffer_image_granularity,
sparse_address_space_size: u64 => sparse_address_space_size,
max_bound_descriptor_sets: u32 => max_bound_descriptor_sets,
max_per_stage_descriptor_samplers: u32 => max_per_stage_descriptor_samplers,
max_per_stage_descriptor_uniform_buffers: u32 => max_per_stage_descriptor_uniform_buffers,
max_per_stage_descriptor_storage_buffers: u32 => max_per_stage_descriptor_storage_buffers,
max_per_stage_descriptor_sampled_images: u32 => max_per_stage_descriptor_sampled_images,
max_per_stage_descriptor_storage_images: u32 => max_per_stage_descriptor_storage_images,
max_per_stage_descriptor_input_attachments: u32 => max_per_stage_descriptor_input_attachments,
max_per_stage_resources: u32 => max_per_stage_resources,
max_descriptor_set_samplers: u32 => max_descriptor_set_samplers,
max_descriptor_set_uniform_buffers: u32 => max_descriptor_set_uniform_buffers,
max_descriptor_set_uniform_buffers_dynamic: u32 => max_descriptor_set_uniform_buffers_dynamic,
max_descriptor_set_storage_buffers: u32 => max_descriptor_set_storage_buffers,
max_descriptor_set_storage_buffers_dynamic: u32 => max_descriptor_set_storage_buffers_dynamic,
max_descriptor_set_sampled_images: u32 => max_descriptor_set_sampled_images,
max_descriptor_set_storage_images: u32 => max_descriptor_set_storage_images,
max_descriptor_set_input_attachments: u32 => max_descriptor_set_input_attachments,
max_vertex_input_attributes: u32 => max_vertex_input_attributes,
max_vertex_input_bindings: u32 => max_vertex_input_bindings,
max_vertex_input_attribute_offset: u32 => max_vertex_input_attribute_offset,
max_vertex_input_binding_stride: u32 => max_vertex_input_binding_stride,
max_vertex_output_components: u32 => max_vertex_output_components,
max_tessellation_generation_level: u32 => max_tessellation_generation_level,
max_tessellation_patch_size: u32 => max_tessellation_patch_size,
max_tessellation_control_per_vertex_input_components: u32 => max_tessellation_control_per_vertex_input_components,
max_tessellation_control_per_vertex_output_components: u32 => max_tessellation_control_per_vertex_output_components,
max_tessellation_control_per_patch_output_components: u32 => max_tessellation_control_per_patch_output_components,
max_tessellation_control_total_output_components: u32 => max_tessellation_control_total_output_components,
max_tessellation_evaluation_input_components: u32 => max_tessellation_evaluation_input_components,
max_tessellation_evaluation_output_components: u32 => max_tessellation_evaluation_output_components,
max_geometry_shader_invocations: u32 => max_geometry_shader_invocations,
max_geometry_input_components: u32 => max_geometry_input_components,
max_geometry_output_components: u32 => max_geometry_output_components,
max_geometry_output_vertices: u32 => max_geometry_output_vertices,
max_geometry_total_output_components: u32 => max_geometry_total_output_components,
max_fragment_input_components: u32 => max_fragment_input_components,
max_fragment_output_attachments: u32 => max_fragment_output_attachments,
max_fragment_dual_src_attachments: u32 => max_fragment_dual_src_attachments,
max_fragment_combined_output_resources: u32 => max_fragment_combined_output_resources,
max_compute_shared_memory_size: u32 => max_compute_shared_memory_size,
max_compute_work_group_count: [u32; 3] => max_compute_work_group_count,
max_compute_work_group_invocations: u32 => max_compute_work_group_invocations,
max_compute_work_group_size: [u32; 3] => max_compute_work_group_size,
sub_pixel_precision_bits: u32 => sub_pixel_precision_bits,
sub_texel_precision_bits: u32 => sub_texel_precision_bits,
mipmap_precision_bits: u32 => mipmap_precision_bits,
max_draw_indexed_index_value: u32 => max_draw_indexed_index_value,
max_draw_indirect_count: u32 => max_draw_indirect_count,
max_sampler_lod_bias: f32 => max_sampler_lod_bias,
max_sampler_anisotropy: f32 => max_sampler_anisotropy,
max_viewports: u32 => max_viewports,
max_viewport_dimensions: [u32; 2] => max_viewport_dimensions,
viewport_bounds_range: [f32; 2] => viewport_bounds_range,
viewport_sub_pixel_bits: u32 => viewport_sub_pixel_bits,
min_memory_map_alignment: usize => min_memory_map_alignment,
min_texel_buffer_offset_alignment: u64 => min_texel_buffer_offset_alignment,
min_uniform_buffer_offset_alignment: u64 => min_uniform_buffer_offset_alignment,
min_storage_buffer_offset_alignment: u64 => min_storage_buffer_offset_alignment,
min_texel_offset: i32 => min_texel_offset,
max_texel_offset: u32 => max_texel_offset,
min_texel_gather_offset: i32 => min_texel_gather_offset,
max_texel_gather_offset: u32 => max_texel_gather_offset,
min_interpolation_offset: f32 => min_interpolation_offset,
max_interpolation_offset: f32 => max_interpolation_offset,
sub_pixel_interpolation_offset_bits: u32 => sub_pixel_interpolation_offset_bits,
max_framebuffer_width: u32 => max_framebuffer_width,
max_framebuffer_height: u32 => max_framebuffer_height,
max_framebuffer_layers: u32 => max_framebuffer_layers,
framebuffer_color_sample_counts: SampleCounts => framebuffer_color_sample_counts, // FIXME: SampleCountFlag
framebuffer_depth_sample_counts: SampleCounts => framebuffer_depth_sample_counts, // FIXME: SampleCountFlag
framebuffer_stencil_sample_counts: SampleCounts => framebuffer_stencil_sample_counts, // FIXME: SampleCountFlag
framebuffer_no_attachments_sample_counts: SampleCounts => framebuffer_no_attachments_sample_counts, // FIXME: SampleCountFlag
max_color_attachments: u32 => max_color_attachments,
sampled_image_color_sample_counts: SampleCounts => sampled_image_color_sample_counts, // FIXME: SampleCountFlag
sampled_image_integer_sample_counts: SampleCounts => sampled_image_integer_sample_counts, // FIXME: SampleCountFlag
sampled_image_depth_sample_counts: SampleCounts => sampled_image_depth_sample_counts, // FIXME: SampleCountFlag
sampled_image_stencil_sample_counts: SampleCounts => sampled_image_stencil_sample_counts, // FIXME: SampleCountFlag
storage_image_sample_counts: SampleCounts => storage_image_sample_counts, // FIXME: SampleCountFlag
max_sample_mask_words: u32 => max_sample_mask_words,
timestamp_compute_and_graphics: u32 => timestamp_compute_and_graphics, // TODO: these are booleans
timestamp_period: f32 => timestamp_period,
max_clip_distances: u32 => max_clip_distances,
max_cull_distances: u32 => max_cull_distances,
max_combined_clip_and_cull_distances: u32 => max_combined_clip_and_cull_distances,
discrete_queue_priorities: u32 => discrete_queue_priorities,
point_size_range: [f32; 2] => point_size_range,
line_width_range: [f32; 2] => line_width_range,
point_size_granularity: f32 => point_size_granularity,
line_width_granularity: f32 => line_width_granularity,
strict_lines: u32 => strict_lines, // TODO: these are booleans
standard_sample_locations: u32 => standard_sample_locations, // TODO: these are booleans
optimal_buffer_copy_offset_alignment: u64 => optimal_buffer_copy_offset_alignment,
optimal_buffer_copy_row_pitch_alignment: u64 => optimal_buffer_copy_row_pitch_alignment,
non_coherent_atom_size: u64 => non_coherent_atom_size,
}

View File

@ -22,7 +22,7 @@
//! a Vulkan implementation from the system.
use crate::check_errors;
use crate::vk;
use crate::fns::EntryFunctions;
use crate::OomError;
use crate::SafeDeref;
use crate::Version;
@ -42,7 +42,11 @@ pub unsafe trait Loader {
/// Calls the `vkGetInstanceProcAddr` function. The parameters are the same.
///
/// The returned function must stay valid for as long as `self` is alive.
fn get_instance_proc_addr(&self, instance: vk::Instance, name: *const c_char) -> *const c_void;
fn get_instance_proc_addr(
&self,
instance: ash::vk::Instance,
name: *const c_char,
) -> *const c_void;
}
unsafe impl<T> Loader for T
@ -51,7 +55,11 @@ where
T::Target: Loader,
{
#[inline]
fn get_instance_proc_addr(&self, instance: vk::Instance, name: *const c_char) -> *const c_void {
fn get_instance_proc_addr(
&self,
instance: ash::vk::Instance,
name: *const c_char,
) -> *const c_void {
(**self).get_instance_proc_addr(instance, name)
}
}
@ -60,7 +68,7 @@ where
pub struct DynamicLibraryLoader {
vk_lib: shared_library::dynamic_library::DynamicLibrary,
get_proc_addr:
extern "system" fn(instance: vk::Instance, pName: *const c_char) -> *const c_void,
extern "system" fn(instance: ash::vk::Instance, pName: *const c_char) -> *const c_void,
}
impl DynamicLibraryLoader {
@ -94,7 +102,11 @@ impl DynamicLibraryLoader {
unsafe impl Loader for DynamicLibraryLoader {
#[inline]
fn get_instance_proc_addr(&self, instance: vk::Instance, name: *const c_char) -> *const c_void {
fn get_instance_proc_addr(
&self,
instance: ash::vk::Instance,
name: *const c_char,
) -> *const c_void {
(self.get_proc_addr)(instance, name)
}
}
@ -102,7 +114,7 @@ unsafe impl Loader for DynamicLibraryLoader {
/// Wraps around a loader and contains function pointers.
pub struct FunctionPointers<L> {
loader: L,
entry_points: vk::EntryPoints,
fns: EntryFunctions,
}
impl<L> FunctionPointers<L> {
@ -111,20 +123,17 @@ impl<L> FunctionPointers<L> {
where
L: Loader,
{
let entry_points = vk::EntryPoints::load(|name| unsafe {
mem::transmute(loader.get_instance_proc_addr(0, name.as_ptr()))
let fns = EntryFunctions::load(|name| unsafe {
mem::transmute(loader.get_instance_proc_addr(ash::vk::Instance::null(), name.as_ptr()))
});
FunctionPointers {
loader,
entry_points,
}
FunctionPointers { loader, fns }
}
/// Returns the collection of Vulkan entry points from the Vulkan loader.
#[inline]
pub fn entry_points(&self) -> &vk::EntryPoints {
&self.entry_points
pub fn fns(&self) -> &EntryFunctions {
&self.fns
}
/// Returns the highest Vulkan version that is supported for instances.
@ -138,7 +147,7 @@ impl<L> FunctionPointers<L> {
// to determine the version of Vulkan.
unsafe {
let name = CStr::from_bytes_with_nul_unchecked(b"vkEnumerateInstanceVersion\0");
let func = self.get_instance_proc_addr(0, name.as_ptr());
let func = self.get_instance_proc_addr(ash::vk::Instance::null(), name.as_ptr());
if func.is_null() {
Ok(Version {
@ -147,11 +156,11 @@ impl<L> FunctionPointers<L> {
patch: 0,
})
} else {
type Pfn = extern "system" fn(pApiVersion: *mut u32) -> vk::Result;
type Pfn = extern "system" fn(pApiVersion: *mut u32) -> ash::vk::Result;
let func: Pfn = mem::transmute(func);
let mut api_version = 0;
check_errors(func(&mut api_version))?;
Ok(Version::from_vulkan_version(api_version))
Ok(Version::from(api_version))
}
}
}
@ -160,7 +169,7 @@ impl<L> FunctionPointers<L> {
#[inline]
pub fn get_instance_proc_addr(
&self,
instance: vk::Instance,
instance: ash::vk::Instance,
name: *const c_char,
) -> *const c_void
where
@ -184,16 +193,16 @@ macro_rules! statically_linked_vulkan_loader {
() => {{
extern "C" {
fn vkGetInstanceProcAddr(
instance: vk::Instance,
instance: ash::vk::Instance,
pName: *const c_char,
) -> vk::PFN_vkVoidFunction;
) -> ash::vk::PFN_vkVoidFunction;
}
struct StaticallyLinkedVulkanLoader;
unsafe impl Loader for StaticallyLinkedVulkanLoader {
fn get_instance_proc_addr(
&self,
instance: vk::Instance,
instance: ash::vk::Instance,
name: *const c_char,
) -> extern "system" fn() -> () {
unsafe { vkGetInstanceProcAddr(instance, name) }

View File

@ -18,7 +18,7 @@
//! use vulkano::instance::InstanceExtensions;
//! use vulkano::Version;
//!
//! let instance = match Instance::new(None, Version::major_minor(1, 1), &InstanceExtensions::none(), None) {
//! let instance = match Instance::new(None, Version::V1_1, &InstanceExtensions::none(), None) {
//! Ok(i) => i,
//! Err(err) => panic!("Couldn't build instance: {:?}", err)
//! };
@ -33,7 +33,7 @@
//! # use vulkano::Version;
//! use vulkano::instance::PhysicalDevice;
//!
//! # let instance = Instance::new(None, Version::major_minor(1, 1), &InstanceExtensions::none(), None).unwrap();
//! # let instance = Instance::new(None, Version::V1_1, &InstanceExtensions::none(), None).unwrap();
//! for physical_device in PhysicalDevice::enumerate(&instance) {
//! println!("Available device: {}", physical_device.name());
//! }

View File

@ -12,12 +12,10 @@ use crate::device::{Features, FeaturesFfi};
use crate::instance::limits::Limits;
use crate::instance::{Instance, InstanceCreationError};
use crate::sync::PipelineStage;
use crate::vk;
use crate::Version;
use crate::VulkanObject;
use std::ffi::CStr;
use std::ffi::{c_void, CStr};
use std::hash::Hash;
use std::mem;
use std::mem::MaybeUninit;
use std::ptr;
use std::sync::Arc;
@ -25,19 +23,19 @@ use std::sync::Arc;
pub(super) fn init_physical_devices(
instance: &Instance,
) -> Result<Vec<PhysicalDeviceInfos>, InstanceCreationError> {
let vk = instance.pointers();
let fns = instance.fns();
let extensions = instance.loaded_extensions();
let physical_devices: Vec<vk::PhysicalDevice> = unsafe {
let physical_devices: Vec<ash::vk::PhysicalDevice> = unsafe {
let mut num = 0;
check_errors(vk.EnumeratePhysicalDevices(
check_errors(fns.v1_0.enumerate_physical_devices(
instance.internal_object(),
&mut num,
ptr::null_mut(),
))?;
let mut devices = Vec::with_capacity(num as usize);
check_errors(vk.EnumeratePhysicalDevices(
check_errors(fns.v1_0.enumerate_physical_devices(
instance.internal_object(),
&mut num,
devices.as_mut_ptr(),
@ -48,7 +46,9 @@ pub(super) fn init_physical_devices(
// Getting the properties of all physical devices.
// If possible, we use VK_KHR_get_physical_device_properties2.
let physical_devices = if extensions.khr_get_physical_device_properties2 {
let physical_devices = if instance.api_version() >= Version::V1_1
|| extensions.khr_get_physical_device_properties2
{
init_physical_devices_inner2(instance, physical_devices)
} else {
init_physical_devices_inner(instance, physical_devices)
@ -60,37 +60,45 @@ pub(super) fn init_physical_devices(
/// Initialize all physical devices
fn init_physical_devices_inner(
instance: &Instance,
physical_devices: Vec<vk::PhysicalDevice>,
physical_devices: Vec<ash::vk::PhysicalDevice>,
) -> Vec<PhysicalDeviceInfos> {
let vk = instance.pointers();
let fns = instance.fns();
let mut output = Vec::with_capacity(physical_devices.len());
for device in physical_devices.into_iter() {
let properties: vk::PhysicalDeviceProperties = unsafe {
let properties: ash::vk::PhysicalDeviceProperties = unsafe {
let mut output = MaybeUninit::uninit();
vk.GetPhysicalDeviceProperties(device, output.as_mut_ptr());
fns.v1_0
.get_physical_device_properties(device, output.as_mut_ptr());
output.assume_init()
};
let queue_families = unsafe {
let mut num = 0;
vk.GetPhysicalDeviceQueueFamilyProperties(device, &mut num, ptr::null_mut());
fns.v1_0
.get_physical_device_queue_family_properties(device, &mut num, ptr::null_mut());
let mut families = Vec::with_capacity(num as usize);
vk.GetPhysicalDeviceQueueFamilyProperties(device, &mut num, families.as_mut_ptr());
fns.v1_0.get_physical_device_queue_family_properties(
device,
&mut num,
families.as_mut_ptr(),
);
families.set_len(num as usize);
families
};
let memory: vk::PhysicalDeviceMemoryProperties = unsafe {
let memory: ash::vk::PhysicalDeviceMemoryProperties = unsafe {
let mut output = MaybeUninit::uninit();
vk.GetPhysicalDeviceMemoryProperties(device, output.as_mut_ptr());
fns.v1_0
.get_physical_device_memory_properties(device, output.as_mut_ptr());
output.assume_init()
};
let available_features: Features = unsafe {
let mut output = FeaturesFfi::default();
vk.GetPhysicalDeviceFeatures(device, &mut output.head_as_mut().features);
fns.v1_0
.get_physical_device_features(device, &mut output.head_as_mut().features);
Features::from(&output)
};
@ -110,39 +118,37 @@ fn init_physical_devices_inner(
/// TODO: Query extension-specific physical device properties, once a new instance extension is supported.
fn init_physical_devices_inner2(
instance: &Instance,
physical_devices: Vec<vk::PhysicalDevice>,
physical_devices: Vec<ash::vk::PhysicalDevice>,
) -> Vec<PhysicalDeviceInfos> {
let vk = instance.pointers();
let fns = instance.fns();
let mut output = Vec::with_capacity(physical_devices.len());
for device in physical_devices.into_iter() {
let mut extended_properties = PhysicalDeviceExtendedProperties::empty();
let properties: vk::PhysicalDeviceProperties = unsafe {
let mut subgroup_properties = vk::PhysicalDeviceSubgroupProperties {
sType: vk::STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES,
pNext: ptr::null_mut(),
subgroupSize: 0,
supportedStages: 0,
supportedOperations: 0,
quadOperationsInAllStages: 0,
};
let properties: ash::vk::PhysicalDeviceProperties = unsafe {
let mut subgroup_properties = ash::vk::PhysicalDeviceSubgroupProperties::default();
let mut output = vk::PhysicalDeviceProperties2KHR {
sType: vk::STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2_KHR,
pNext: if instance.api_version() >= Version::major_minor(1, 1) {
&mut subgroup_properties
let mut output = ash::vk::PhysicalDeviceProperties2 {
p_next: if instance.api_version() >= Version::V1_1 {
&mut subgroup_properties as *mut _ as *mut c_void
} else {
ptr::null_mut()
},
properties: mem::zeroed(),
..Default::default()
};
vk.GetPhysicalDeviceProperties2KHR(device, &mut output);
if instance.api_version() >= Version::V1_1 {
fns.v1_1
.get_physical_device_properties2(device, &mut output);
} else {
fns.khr_get_physical_device_properties2
.get_physical_device_properties2_khr(device, &mut output);
}
extended_properties = PhysicalDeviceExtendedProperties {
subgroup_size: if instance.api_version() >= Version::major_minor(1, 1) {
Some(subgroup_properties.subgroupSize)
subgroup_size: if instance.api_version() >= Version::V1_1 {
Some(subgroup_properties.subgroup_size)
} else {
None
},
@ -155,43 +161,74 @@ fn init_physical_devices_inner2(
let queue_families = unsafe {
let mut num = 0;
vk.GetPhysicalDeviceQueueFamilyProperties2KHR(device, &mut num, ptr::null_mut());
let mut families = (0..num)
.map(|_| vk::QueueFamilyProperties2KHR {
sType: vk::STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR,
pNext: ptr::null_mut(),
queueFamilyProperties: mem::zeroed(),
})
.collect::<Vec<_>>();
if instance.api_version() >= Version::V1_1 {
fns.v1_1.get_physical_device_queue_family_properties2(
device,
&mut num,
ptr::null_mut(),
);
} else {
fns.khr_get_physical_device_properties2
.get_physical_device_queue_family_properties2_khr(
device,
&mut num,
ptr::null_mut(),
);
}
let mut families = vec![ash::vk::QueueFamilyProperties2::default(); num as usize];
if instance.api_version() >= Version::V1_1 {
fns.v1_1.get_physical_device_queue_family_properties2(
device,
&mut num,
families.as_mut_ptr(),
);
} else {
fns.khr_get_physical_device_properties2
.get_physical_device_queue_family_properties2_khr(
device,
&mut num,
families.as_mut_ptr(),
);
}
vk.GetPhysicalDeviceQueueFamilyProperties2KHR(device, &mut num, families.as_mut_ptr());
families
.into_iter()
.map(|family| family.queueFamilyProperties)
.map(|family| family.queue_family_properties)
.collect()
};
let memory: vk::PhysicalDeviceMemoryProperties = unsafe {
let mut output = vk::PhysicalDeviceMemoryProperties2KHR {
sType: vk::STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR,
pNext: ptr::null_mut(),
memoryProperties: mem::zeroed(),
};
vk.GetPhysicalDeviceMemoryProperties2KHR(device, &mut output);
output.memoryProperties
let memory: ash::vk::PhysicalDeviceMemoryProperties = unsafe {
let mut output = ash::vk::PhysicalDeviceMemoryProperties2KHR::default();
if instance.api_version() >= Version::V1_1 {
fns.v1_1
.get_physical_device_memory_properties2(device, &mut output);
} else {
fns.khr_get_physical_device_properties2
.get_physical_device_memory_properties2_khr(device, &mut output);
}
output.memory_properties
};
let available_features: Features = unsafe {
let max_api_version = instance.max_api_version();
let api_version = std::cmp::min(
max_api_version,
Version::from_vulkan_version(properties.apiVersion),
);
let api_version = std::cmp::min(max_api_version, Version::from(properties.api_version));
let mut output = FeaturesFfi::default();
output.make_chain(api_version);
vk.GetPhysicalDeviceFeatures2KHR(device, output.head_as_mut());
if instance.api_version() >= Version::V1_1 {
fns.v1_1
.get_physical_device_features2(device, output.head_as_mut());
} else {
fns.khr_get_physical_device_properties2
.get_physical_device_features2_khr(device, output.head_as_mut());
}
Features::from(&output)
};
@ -208,11 +245,11 @@ fn init_physical_devices_inner2(
}
pub(super) struct PhysicalDeviceInfos {
device: vk::PhysicalDevice,
properties: vk::PhysicalDeviceProperties,
device: ash::vk::PhysicalDevice,
properties: ash::vk::PhysicalDeviceProperties,
extended_properties: PhysicalDeviceExtendedProperties,
queue_families: Vec<vk::QueueFamilyProperties>,
memory: vk::PhysicalDeviceMemoryProperties,
queue_families: Vec<ash::vk::QueueFamilyProperties>,
memory: ash::vk::PhysicalDeviceMemoryProperties,
available_features: Features,
}
@ -255,7 +292,7 @@ impl PhysicalDeviceExtendedProperties {
/// # use vulkano::Version;
/// use vulkano::instance::PhysicalDevice;
///
/// # let instance = Instance::new(None, Version::major_minor(1, 1), &InstanceExtensions::none(), None).unwrap();
/// # let instance = Instance::new(None, Version::V1_1, &InstanceExtensions::none(), None).unwrap();
/// for physical_device in PhysicalDevice::enumerate(&instance) {
/// print_infos(physical_device);
/// }
@ -281,7 +318,7 @@ impl<'a> PhysicalDevice<'a> {
/// # use vulkano::Version;
/// use vulkano::instance::PhysicalDevice;
///
/// # let instance = Instance::new(None, Version::major_minor(1, 1), &InstanceExtensions::none(), None).unwrap();
/// # let instance = Instance::new(None, Version::V1_1, &InstanceExtensions::none(), None).unwrap();
/// for physical_device in PhysicalDevice::enumerate(&instance) {
/// println!("Available device: {}", physical_device.name());
/// }
@ -306,7 +343,7 @@ impl<'a> PhysicalDevice<'a> {
/// use vulkano::instance::PhysicalDevice;
/// use vulkano::Version;
///
/// let instance = Instance::new(None, Version::major_minor(1, 1), &InstanceExtensions::none(), None).unwrap();
/// let instance = Instance::new(None, Version::V1_1, &InstanceExtensions::none(), None).unwrap();
/// let first_physical_device = PhysicalDevice::from_index(&instance, 0).unwrap();
/// ```
#[inline]
@ -351,7 +388,7 @@ impl<'a> PhysicalDevice<'a> {
#[inline]
pub fn name(&self) -> &str {
unsafe {
let val = &self.infos().properties.deviceName;
let val = &self.infos().properties.device_name;
let val = CStr::from_ptr(val.as_ptr());
val.to_str()
.expect("physical device name contained non-UTF8 characters")
@ -368,7 +405,7 @@ impl<'a> PhysicalDevice<'a> {
/// # use vulkano::Version;
/// use vulkano::instance::PhysicalDevice;
///
/// # let instance = Instance::new(None, Version::major_minor(1, 1), &InstanceExtensions::none(), None).unwrap();
/// # let instance = Instance::new(None, Version::V1_1, &InstanceExtensions::none(), None).unwrap();
/// for physical_device in PhysicalDevice::enumerate(&instance) {
/// println!("Available device: {} (type: {:?})",
/// physical_device.name(), physical_device.ty());
@ -378,13 +415,13 @@ impl<'a> PhysicalDevice<'a> {
pub fn ty(&self) -> PhysicalDeviceType {
match self.instance.physical_devices[self.device]
.properties
.deviceType
.device_type
{
vk::PHYSICAL_DEVICE_TYPE_OTHER => PhysicalDeviceType::Other,
vk::PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU => PhysicalDeviceType::IntegratedGpu,
vk::PHYSICAL_DEVICE_TYPE_DISCRETE_GPU => PhysicalDeviceType::DiscreteGpu,
vk::PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU => PhysicalDeviceType::VirtualGpu,
vk::PHYSICAL_DEVICE_TYPE_CPU => PhysicalDeviceType::Cpu,
ash::vk::PhysicalDeviceType::OTHER => PhysicalDeviceType::Other,
ash::vk::PhysicalDeviceType::INTEGRATED_GPU => PhysicalDeviceType::IntegratedGpu,
ash::vk::PhysicalDeviceType::DISCRETE_GPU => PhysicalDeviceType::DiscreteGpu,
ash::vk::PhysicalDeviceType::VIRTUAL_GPU => PhysicalDeviceType::VirtualGpu,
ash::vk::PhysicalDeviceType::CPU => PhysicalDeviceType::Cpu,
_ => panic!("Unrecognized Vulkan device type"),
}
}
@ -392,8 +429,8 @@ impl<'a> PhysicalDevice<'a> {
/// Returns the version of Vulkan supported by this device.
#[inline]
pub fn api_version(&self) -> Version {
let val = self.infos().properties.apiVersion;
Version::from_vulkan_version(val)
let val = self.infos().properties.api_version;
Version::from(val)
}
/// Returns the Vulkan features that are supported by this physical device.
@ -436,7 +473,7 @@ impl<'a> PhysicalDevice<'a> {
/// Returns the memory type with the given index, or `None` if out of range.
#[inline]
pub fn memory_type_by_id(&self, id: u32) -> Option<MemoryType<'a>> {
if id < self.infos().memory.memoryTypeCount {
if id < self.infos().memory.memory_type_count {
Some(MemoryType {
physical_device: *self,
id,
@ -458,7 +495,7 @@ impl<'a> PhysicalDevice<'a> {
/// Returns the memory heap with the given index, or `None` if out of range.
#[inline]
pub fn memory_heap_by_id(&self, id: u32) -> Option<MemoryHeap<'a>> {
if id < self.infos().memory.memoryHeapCount {
if id < self.infos().memory.memory_heap_count {
Some(MemoryHeap {
physical_device: *self,
id,
@ -483,19 +520,19 @@ impl<'a> PhysicalDevice<'a> {
/// example.
#[inline]
pub fn driver_version(&self) -> u32 {
self.infos().properties.driverVersion
self.infos().properties.driver_version
}
/// Returns the PCI ID of the device.
#[inline]
pub fn pci_device_id(&self) -> u32 {
self.infos().properties.deviceID
self.infos().properties.device_id
}
/// Returns the PCI ID of the vendor.
#[inline]
pub fn pci_vendor_id(&self) -> u32 {
self.infos().properties.vendorID
self.infos().properties.vendor_id
}
/// Returns a unique identifier for the device.
@ -504,8 +541,8 @@ impl<'a> PhysicalDevice<'a> {
/// time the program is run.
#[inline]
pub fn uuid(&self) -> &[u8; 16] {
// must be equal to vk::UUID_SIZE
&self.infos().properties.pipelineCacheUUID
// must be equal to ash::vk::UUID_SIZE
&self.infos().properties.pipeline_cache_uuid
}
#[inline]
@ -521,12 +558,10 @@ impl<'a> PhysicalDevice<'a> {
}
unsafe impl<'a> VulkanObject for PhysicalDevice<'a> {
type Object = vk::PhysicalDevice;
const TYPE: vk::ObjectType = vk::OBJECT_TYPE_PHYSICAL_DEVICE;
type Object = ash::vk::PhysicalDevice;
#[inline]
fn internal_object(&self) -> vk::PhysicalDevice {
fn internal_object(&self) -> ash::vk::PhysicalDevice {
self.infos().device
}
}
@ -609,7 +644,7 @@ impl<'a> QueueFamily<'a> {
/// Guaranteed to be at least 1 (or else that family wouldn't exist).
#[inline]
pub fn queues_count(&self) -> usize {
self.physical_device.infos().queue_families[self.id as usize].queueCount as usize
self.physical_device.infos().queue_families[self.id as usize].queue_count as usize
}
/// If timestamps are supported, returns the number of bits supported by timestamp operations.
@ -618,7 +653,7 @@ impl<'a> QueueFamily<'a> {
#[inline]
pub fn timestamp_valid_bits(&self) -> Option<u32> {
let value =
self.physical_device.infos().queue_families[self.id as usize].timestampValidBits;
self.physical_device.infos().queue_families[self.id as usize].timestamp_valid_bits;
if value == 0 {
None
} else {
@ -631,20 +666,20 @@ impl<'a> QueueFamily<'a> {
#[inline]
pub fn min_image_transfer_granularity(&self) -> [u32; 3] {
let ref granularity = self.physical_device.infos().queue_families[self.id as usize]
.minImageTransferGranularity;
.min_image_transfer_granularity;
[granularity.width, granularity.height, granularity.depth]
}
/// Returns `true` if queues of this family can execute graphics operations.
#[inline]
pub fn supports_graphics(&self) -> bool {
(self.flags() & vk::QUEUE_GRAPHICS_BIT) != 0
!(self.flags() & ash::vk::QueueFlags::GRAPHICS).is_empty()
}
/// Returns `true` if queues of this family can execute compute operations.
#[inline]
pub fn supports_compute(&self) -> bool {
(self.flags() & vk::QUEUE_COMPUTE_BIT) != 0
!(self.flags() & ash::vk::QueueFlags::COMPUTE).is_empty()
}
/// Returns `true` if queues of this family can execute transfer operations.
@ -654,25 +689,25 @@ impl<'a> QueueFamily<'a> {
/// > to indicate a special relationship with the DMA module and more efficient transfers.
#[inline]
pub fn explicitly_supports_transfers(&self) -> bool {
(self.flags() & vk::QUEUE_TRANSFER_BIT) != 0
!(self.flags() & ash::vk::QueueFlags::TRANSFER).is_empty()
}
/// Returns `true` if queues of this family can execute sparse resources binding operations.
#[inline]
pub fn supports_sparse_binding(&self) -> bool {
(self.flags() & vk::QUEUE_SPARSE_BINDING_BIT) != 0
!(self.flags() & ash::vk::QueueFlags::SPARSE_BINDING).is_empty()
}
/// Returns `true` if the queues of this family support a particular pipeline stage.
#[inline]
pub fn supports_stage(&self, stage: PipelineStage) -> bool {
(self.flags() & stage.required_queue_flags()) != 0
!(self.flags() & stage.required_queue_flags()).is_empty()
}
/// Internal utility function that returns the flags of this queue family.
#[inline]
fn flags(&self) -> u32 {
self.physical_device.infos().queue_families[self.id as usize].queueFlags
fn flags(&self) -> ash::vk::QueueFlags {
self.physical_device.infos().queue_families[self.id as usize].queue_flags
}
}
@ -743,7 +778,7 @@ impl<'a> MemoryType<'a> {
/// Returns the heap that corresponds to this memory type.
#[inline]
pub fn heap(&self) -> MemoryHeap<'a> {
let heap_id = self.physical_device.infos().memory.memoryTypes[self.id as usize].heapIndex;
let heap_id = self.physical_device.infos().memory.memory_types[self.id as usize].heap_index;
MemoryHeap {
physical_device: self.physical_device,
id: heap_id,
@ -754,13 +789,13 @@ impl<'a> MemoryType<'a> {
/// efficient for GPU accesses.
#[inline]
pub fn is_device_local(&self) -> bool {
(self.flags() & vk::MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0
!(self.flags() & ash::vk::MemoryPropertyFlags::DEVICE_LOCAL).is_empty()
}
/// Returns true if the memory type can be accessed by the host.
#[inline]
pub fn is_host_visible(&self) -> bool {
(self.flags() & vk::MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0
!(self.flags() & ash::vk::MemoryPropertyFlags::HOST_VISIBLE).is_empty()
}
/// Returns true if modifications made by the host or the GPU on this memory type are
@ -769,7 +804,7 @@ impl<'a> MemoryType<'a> {
/// You don't need to worry about this, as this library handles that for you.
#[inline]
pub fn is_host_coherent(&self) -> bool {
(self.flags() & vk::MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0
!(self.flags() & ash::vk::MemoryPropertyFlags::HOST_COHERENT).is_empty()
}
/// Returns true if memory of this memory type is cached by the host. Host memory accesses to
@ -777,7 +812,7 @@ impl<'a> MemoryType<'a> {
/// is coherent.
#[inline]
pub fn is_host_cached(&self) -> bool {
(self.flags() & vk::MEMORY_PROPERTY_HOST_CACHED_BIT) != 0
!(self.flags() & ash::vk::MemoryPropertyFlags::HOST_CACHED).is_empty()
}
/// Returns true if allocations made to this memory type is lazy.
@ -789,13 +824,13 @@ impl<'a> MemoryType<'a> {
/// type is never host-visible.
#[inline]
pub fn is_lazily_allocated(&self) -> bool {
(self.flags() & vk::MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0
!(self.flags() & ash::vk::MemoryPropertyFlags::LAZILY_ALLOCATED).is_empty()
}
/// Internal utility function that returns the flags of this queue family.
#[inline]
fn flags(&self) -> u32 {
self.physical_device.infos().memory.memoryTypes[self.id as usize].propertyFlags
fn flags(&self) -> ash::vk::MemoryPropertyFlags {
self.physical_device.infos().memory.memory_types[self.id as usize].property_flags
}
}
@ -811,7 +846,7 @@ impl<'a> Iterator for MemoryTypesIter<'a> {
#[inline]
fn next(&mut self) -> Option<MemoryType<'a>> {
if self.current_id >= self.physical_device.infos().memory.memoryTypeCount {
if self.current_id >= self.physical_device.infos().memory.memory_type_count {
return None;
}
@ -826,7 +861,7 @@ impl<'a> Iterator for MemoryTypesIter<'a> {
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let len = self.physical_device.infos().memory.memoryTypeCount;
let len = self.physical_device.infos().memory.memory_type_count;
let remain = (len - self.current_id) as usize;
(remain, Some(remain))
}
@ -857,22 +892,22 @@ impl<'a> MemoryHeap<'a> {
/// Returns the size in bytes on this heap.
#[inline]
pub fn size(&self) -> usize {
self.physical_device.infos().memory.memoryHeaps[self.id as usize].size as usize
self.physical_device.infos().memory.memory_heaps[self.id as usize].size as usize
}
/// Returns true if the heap is local to the GPU.
#[inline]
pub fn is_device_local(&self) -> bool {
let flags = self.physical_device.infos().memory.memoryHeaps[self.id as usize].flags;
(flags & vk::MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0
let flags = self.physical_device.infos().memory.memory_heaps[self.id as usize].flags;
!(flags & ash::vk::MemoryHeapFlags::DEVICE_LOCAL).is_empty()
}
/// Returns true if the heap is multi-instance enabled, that is allocation from such
/// heap will replicate to each physical-device's instance of heap.
#[inline]
pub fn is_multi_instance(&self) -> bool {
let flags = self.physical_device.infos().memory.memoryHeaps[self.id as usize].flags;
(flags & vk::MEMORY_HEAP_MULTI_INSTANCE_BIT) != 0
let flags = self.physical_device.infos().memory.memory_heaps[self.id as usize].flags;
!(flags & ash::vk::MemoryHeapFlags::MULTI_INSTANCE).is_empty()
}
}
@ -888,7 +923,7 @@ impl<'a> Iterator for MemoryHeapsIter<'a> {
#[inline]
fn next(&mut self) -> Option<MemoryHeap<'a>> {
if self.current_id >= self.physical_device.infos().memory.memoryHeapCount {
if self.current_id >= self.physical_device.infos().memory.memory_heap_count {
return None;
}
@ -903,7 +938,7 @@ impl<'a> Iterator for MemoryHeapsIter<'a> {
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let len = self.physical_device.infos().memory.memoryHeapCount;
let len = self.physical_device.infos().memory.memory_heap_count;
let remain = (len - self.current_id) as usize;
(remain, Some(remain))
}

View File

@ -62,8 +62,7 @@
#![allow(dead_code)] // TODO: remove
#![allow(unused_variables)] // TODO: remove
extern crate vk_sys as vk;
pub use ash::vk::Handle;
pub use half;
use std::error;
use std::fmt;
@ -84,6 +83,7 @@ pub mod format;
mod version;
#[macro_use]
pub mod render_pass;
mod fns;
pub mod image;
pub mod instance;
pub mod memory;
@ -99,30 +99,10 @@ unsafe impl<'a, T: ?Sized> SafeDeref for &'a T {}
unsafe impl<T: ?Sized> SafeDeref for Arc<T> {}
unsafe impl<T: ?Sized> SafeDeref for Box<T> {}
pub trait VulkanHandle {
fn value(&self) -> u64;
}
impl VulkanHandle for usize {
#[inline]
fn value(&self) -> u64 {
*self as u64
}
}
impl VulkanHandle for u64 {
#[inline]
fn value(&self) -> u64 {
*self
}
}
/// Gives access to the internal identifier of an object.
pub unsafe trait VulkanObject {
/// The type of the object.
type Object: VulkanHandle;
/// The `ObjectType` of the internal Vulkan handle.
const TYPE: vk::ObjectType;
type Object: ash::vk::Handle;
/// Returns a reference to the object.
fn internal_object(&self) -> Self::Object;
@ -132,7 +112,7 @@ pub unsafe trait VulkanObject {
// TODO: remove ; crappy design
pub unsafe trait SynchronizedVulkanObject {
/// The type of the object.
type Object: VulkanHandle;
type Object: ash::vk::Handle;
/// Returns a reference to the object.
fn internal_object_guard(&self) -> MutexGuard<Self::Object>;
@ -176,15 +156,15 @@ impl From<Error> for OomError {
/// All possible success codes returned by any Vulkan function.
#[derive(Debug, Copy, Clone)]
#[repr(u32)]
#[repr(i32)]
enum Success {
Success = vk::SUCCESS,
NotReady = vk::NOT_READY,
Timeout = vk::TIMEOUT,
EventSet = vk::EVENT_SET,
EventReset = vk::EVENT_RESET,
Incomplete = vk::INCOMPLETE,
Suboptimal = vk::SUBOPTIMAL_KHR,
Success = ash::vk::Result::SUCCESS.as_raw(),
NotReady = ash::vk::Result::NOT_READY.as_raw(),
Timeout = ash::vk::Result::TIMEOUT.as_raw(),
EventSet = ash::vk::Result::EVENT_SET.as_raw(),
EventReset = ash::vk::Result::EVENT_RESET.as_raw(),
Incomplete = ash::vk::Result::INCOMPLETE.as_raw(),
Suboptimal = ash::vk::Result::SUBOPTIMAL_KHR.as_raw(),
}
/// All possible errors returned by any Vulkan function.
@ -192,58 +172,60 @@ enum Success {
/// This type is not public. Instead all public error types should implement `From<Error>` and
/// panic for error code that aren't supposed to happen.
#[derive(Debug, Copy, Clone)]
#[repr(u32)]
#[repr(i32)]
// TODO: being pub is necessary because of the weird visibility rules in rustc
pub(crate) enum Error {
OutOfHostMemory = vk::ERROR_OUT_OF_HOST_MEMORY,
OutOfDeviceMemory = vk::ERROR_OUT_OF_DEVICE_MEMORY,
InitializationFailed = vk::ERROR_INITIALIZATION_FAILED,
DeviceLost = vk::ERROR_DEVICE_LOST,
MemoryMapFailed = vk::ERROR_MEMORY_MAP_FAILED,
LayerNotPresent = vk::ERROR_LAYER_NOT_PRESENT,
ExtensionNotPresent = vk::ERROR_EXTENSION_NOT_PRESENT,
FeatureNotPresent = vk::ERROR_FEATURE_NOT_PRESENT,
IncompatibleDriver = vk::ERROR_INCOMPATIBLE_DRIVER,
TooManyObjects = vk::ERROR_TOO_MANY_OBJECTS,
FormatNotSupported = vk::ERROR_FORMAT_NOT_SUPPORTED,
SurfaceLost = vk::ERROR_SURFACE_LOST_KHR,
NativeWindowInUse = vk::ERROR_NATIVE_WINDOW_IN_USE_KHR,
OutOfDate = vk::ERROR_OUT_OF_DATE_KHR,
IncompatibleDisplay = vk::ERROR_INCOMPATIBLE_DISPLAY_KHR,
ValidationFailed = vk::ERROR_VALIDATION_FAILED_EXT,
OutOfPoolMemory = vk::ERROR_OUT_OF_POOL_MEMORY_KHR,
FullscreenExclusiveLost = vk::ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT,
OutOfHostMemory = ash::vk::Result::ERROR_OUT_OF_HOST_MEMORY.as_raw(),
OutOfDeviceMemory = ash::vk::Result::ERROR_OUT_OF_DEVICE_MEMORY.as_raw(),
InitializationFailed = ash::vk::Result::ERROR_INITIALIZATION_FAILED.as_raw(),
DeviceLost = ash::vk::Result::ERROR_DEVICE_LOST.as_raw(),
MemoryMapFailed = ash::vk::Result::ERROR_MEMORY_MAP_FAILED.as_raw(),
LayerNotPresent = ash::vk::Result::ERROR_LAYER_NOT_PRESENT.as_raw(),
ExtensionNotPresent = ash::vk::Result::ERROR_EXTENSION_NOT_PRESENT.as_raw(),
FeatureNotPresent = ash::vk::Result::ERROR_FEATURE_NOT_PRESENT.as_raw(),
IncompatibleDriver = ash::vk::Result::ERROR_INCOMPATIBLE_DRIVER.as_raw(),
TooManyObjects = ash::vk::Result::ERROR_TOO_MANY_OBJECTS.as_raw(),
FormatNotSupported = ash::vk::Result::ERROR_FORMAT_NOT_SUPPORTED.as_raw(),
SurfaceLost = ash::vk::Result::ERROR_SURFACE_LOST_KHR.as_raw(),
NativeWindowInUse = ash::vk::Result::ERROR_NATIVE_WINDOW_IN_USE_KHR.as_raw(),
OutOfDate = ash::vk::Result::ERROR_OUT_OF_DATE_KHR.as_raw(),
IncompatibleDisplay = ash::vk::Result::ERROR_INCOMPATIBLE_DISPLAY_KHR.as_raw(),
ValidationFailed = ash::vk::Result::ERROR_VALIDATION_FAILED_EXT.as_raw(),
OutOfPoolMemory = ash::vk::Result::ERROR_OUT_OF_POOL_MEMORY_KHR.as_raw(),
FullscreenExclusiveLost = ash::vk::Result::ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT.as_raw(),
}
/// Checks whether the result returned correctly.
fn check_errors(result: vk::Result) -> Result<Success, Error> {
fn check_errors(result: ash::vk::Result) -> Result<Success, Error> {
match result {
vk::SUCCESS => Ok(Success::Success),
vk::NOT_READY => Ok(Success::NotReady),
vk::TIMEOUT => Ok(Success::Timeout),
vk::EVENT_SET => Ok(Success::EventSet),
vk::EVENT_RESET => Ok(Success::EventReset),
vk::INCOMPLETE => Ok(Success::Incomplete),
vk::ERROR_OUT_OF_HOST_MEMORY => Err(Error::OutOfHostMemory),
vk::ERROR_OUT_OF_DEVICE_MEMORY => Err(Error::OutOfDeviceMemory),
vk::ERROR_INITIALIZATION_FAILED => Err(Error::InitializationFailed),
vk::ERROR_DEVICE_LOST => Err(Error::DeviceLost),
vk::ERROR_MEMORY_MAP_FAILED => Err(Error::MemoryMapFailed),
vk::ERROR_LAYER_NOT_PRESENT => Err(Error::LayerNotPresent),
vk::ERROR_EXTENSION_NOT_PRESENT => Err(Error::ExtensionNotPresent),
vk::ERROR_FEATURE_NOT_PRESENT => Err(Error::FeatureNotPresent),
vk::ERROR_INCOMPATIBLE_DRIVER => Err(Error::IncompatibleDriver),
vk::ERROR_TOO_MANY_OBJECTS => Err(Error::TooManyObjects),
vk::ERROR_FORMAT_NOT_SUPPORTED => Err(Error::FormatNotSupported),
vk::ERROR_SURFACE_LOST_KHR => Err(Error::SurfaceLost),
vk::ERROR_NATIVE_WINDOW_IN_USE_KHR => Err(Error::NativeWindowInUse),
vk::SUBOPTIMAL_KHR => Ok(Success::Suboptimal),
vk::ERROR_OUT_OF_DATE_KHR => Err(Error::OutOfDate),
vk::ERROR_INCOMPATIBLE_DISPLAY_KHR => Err(Error::IncompatibleDisplay),
vk::ERROR_VALIDATION_FAILED_EXT => Err(Error::ValidationFailed),
vk::ERROR_OUT_OF_POOL_MEMORY_KHR => Err(Error::OutOfPoolMemory),
vk::ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT => Err(Error::FullscreenExclusiveLost),
vk::ERROR_INVALID_SHADER_NV => panic!(
ash::vk::Result::SUCCESS => Ok(Success::Success),
ash::vk::Result::NOT_READY => Ok(Success::NotReady),
ash::vk::Result::TIMEOUT => Ok(Success::Timeout),
ash::vk::Result::EVENT_SET => Ok(Success::EventSet),
ash::vk::Result::EVENT_RESET => Ok(Success::EventReset),
ash::vk::Result::INCOMPLETE => Ok(Success::Incomplete),
ash::vk::Result::ERROR_OUT_OF_HOST_MEMORY => Err(Error::OutOfHostMemory),
ash::vk::Result::ERROR_OUT_OF_DEVICE_MEMORY => Err(Error::OutOfDeviceMemory),
ash::vk::Result::ERROR_INITIALIZATION_FAILED => Err(Error::InitializationFailed),
ash::vk::Result::ERROR_DEVICE_LOST => Err(Error::DeviceLost),
ash::vk::Result::ERROR_MEMORY_MAP_FAILED => Err(Error::MemoryMapFailed),
ash::vk::Result::ERROR_LAYER_NOT_PRESENT => Err(Error::LayerNotPresent),
ash::vk::Result::ERROR_EXTENSION_NOT_PRESENT => Err(Error::ExtensionNotPresent),
ash::vk::Result::ERROR_FEATURE_NOT_PRESENT => Err(Error::FeatureNotPresent),
ash::vk::Result::ERROR_INCOMPATIBLE_DRIVER => Err(Error::IncompatibleDriver),
ash::vk::Result::ERROR_TOO_MANY_OBJECTS => Err(Error::TooManyObjects),
ash::vk::Result::ERROR_FORMAT_NOT_SUPPORTED => Err(Error::FormatNotSupported),
ash::vk::Result::ERROR_SURFACE_LOST_KHR => Err(Error::SurfaceLost),
ash::vk::Result::ERROR_NATIVE_WINDOW_IN_USE_KHR => Err(Error::NativeWindowInUse),
ash::vk::Result::SUBOPTIMAL_KHR => Ok(Success::Suboptimal),
ash::vk::Result::ERROR_OUT_OF_DATE_KHR => Err(Error::OutOfDate),
ash::vk::Result::ERROR_INCOMPATIBLE_DISPLAY_KHR => Err(Error::IncompatibleDisplay),
ash::vk::Result::ERROR_VALIDATION_FAILED_EXT => Err(Error::ValidationFailed),
ash::vk::Result::ERROR_OUT_OF_POOL_MEMORY_KHR => Err(Error::OutOfPoolMemory),
ash::vk::Result::ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT => {
Err(Error::FullscreenExclusiveLost)
}
ash::vk::Result::ERROR_INVALID_SHADER_NV => panic!(
"Vulkan function returned \
VK_ERROR_INVALID_SHADER_NV"
),

View File

@ -7,23 +7,6 @@
// notice may not be copied, modified, or distributed except
// according to those terms.
use std::error;
use std::fmt;
use std::marker::PhantomData;
use std::mem::MaybeUninit;
use std::ops::Deref;
use std::ops::DerefMut;
use std::ops::Range;
use std::os::raw::c_void;
use std::ptr;
use std::sync::Arc;
use std::sync::Mutex;
#[cfg(target_os = "linux")]
use std::fs::File;
#[cfg(target_os = "linux")]
use std::os::unix::io::{FromRawFd, IntoRawFd};
use crate::check_errors;
use crate::device::Device;
use crate::device::DeviceOwned;
@ -31,10 +14,24 @@ use crate::instance::MemoryType;
use crate::memory::Content;
use crate::memory::DedicatedAlloc;
use crate::memory::ExternalMemoryHandleType;
use crate::vk;
use crate::Error;
use crate::OomError;
use crate::VulkanObject;
use std::error;
use std::fmt;
#[cfg(target_os = "linux")]
use std::fs::File;
use std::marker::PhantomData;
use std::mem::MaybeUninit;
use std::ops::Deref;
use std::ops::DerefMut;
use std::ops::Range;
use std::os::raw::c_void;
#[cfg(target_os = "linux")]
use std::os::unix::io::{FromRawFd, IntoRawFd};
use std::ptr;
use std::sync::Arc;
use std::sync::Mutex;
#[repr(C)]
pub struct BaseOutStructure {
@ -56,9 +53,9 @@ pub(crate) unsafe fn ptr_chain_iter<T>(ptr: &mut T) -> impl Iterator<Item = *mut
}
pub unsafe trait ExtendsMemoryAllocateInfo {}
unsafe impl ExtendsMemoryAllocateInfo for vk::MemoryDedicatedAllocateInfoKHR {}
unsafe impl ExtendsMemoryAllocateInfo for vk::ExportMemoryAllocateInfo {}
unsafe impl ExtendsMemoryAllocateInfo for vk::ImportMemoryFdInfoKHR {}
unsafe impl ExtendsMemoryAllocateInfo for ash::vk::MemoryDedicatedAllocateInfoKHR {}
unsafe impl ExtendsMemoryAllocateInfo for ash::vk::ExportMemoryAllocateInfo {}
unsafe impl ExtendsMemoryAllocateInfo for ash::vk::ImportMemoryFdInfoKHR {}
/// Represents memory that has been allocated.
///
@ -76,7 +73,7 @@ unsafe impl ExtendsMemoryAllocateInfo for vk::ImportMemoryFdInfoKHR {}
/// let memory = DeviceMemory::alloc(device.clone(), mem_ty, 1024).unwrap();
/// ```
pub struct DeviceMemory {
memory: vk::DeviceMemory,
memory: ash::vk::DeviceMemory,
device: Arc<Device>,
size: usize,
memory_type_index: u32,
@ -99,10 +96,10 @@ pub struct DeviceMemory {
/// ```
pub struct DeviceMemoryBuilder<'a> {
device: Arc<Device>,
allocate: vk::MemoryAllocateInfo,
dedicated_info: Option<vk::MemoryDedicatedAllocateInfoKHR>,
export_info: Option<vk::ExportMemoryAllocateInfo>,
import_info: Option<vk::ImportMemoryFdInfoKHR>,
allocate: ash::vk::MemoryAllocateInfo,
dedicated_info: Option<ash::vk::MemoryDedicatedAllocateInfoKHR>,
export_info: Option<ash::vk::ExportMemoryAllocateInfo>,
import_info: Option<ash::vk::ImportMemoryFdInfoKHR>,
marker: PhantomData<&'a ()>,
}
@ -110,11 +107,10 @@ impl<'a> DeviceMemoryBuilder<'a> {
/// Returns a new `DeviceMemoryBuilder` given the required device, memory type and size fields.
/// Validation of parameters is done when the builder is built.
pub fn new(device: Arc<Device>, memory_index: u32, size: usize) -> DeviceMemoryBuilder<'a> {
let allocate = vk::MemoryAllocateInfo {
sType: vk::STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
pNext: ptr::null(),
allocationSize: size as u64,
memoryTypeIndex: memory_index,
let allocate = ash::vk::MemoryAllocateInfo {
allocation_size: size as u64,
memory_type_index: memory_index,
..Default::default()
};
DeviceMemoryBuilder {
@ -138,17 +134,15 @@ impl<'a> DeviceMemoryBuilder<'a> {
assert!(self.dedicated_info.is_none());
let mut dedicated_info = match dedicated {
DedicatedAlloc::Buffer(buffer) => vk::MemoryDedicatedAllocateInfoKHR {
sType: vk::STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR,
pNext: ptr::null(),
image: 0,
DedicatedAlloc::Buffer(buffer) => ash::vk::MemoryDedicatedAllocateInfoKHR {
image: ash::vk::Image::null(),
buffer: buffer.internal_object(),
..Default::default()
},
DedicatedAlloc::Image(image) => vk::MemoryDedicatedAllocateInfoKHR {
sType: vk::STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR,
pNext: ptr::null(),
DedicatedAlloc::Image(image) => ash::vk::MemoryDedicatedAllocateInfoKHR {
image: image.internal_object(),
buffer: 0,
buffer: ash::vk::Buffer::null(),
..Default::default()
},
DedicatedAlloc::None => return self,
};
@ -169,10 +163,9 @@ impl<'a> DeviceMemoryBuilder<'a> {
) -> DeviceMemoryBuilder<'a> {
assert!(self.export_info.is_none());
let mut export_info = vk::ExportMemoryAllocateInfo {
sType: vk::STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO,
pNext: ptr::null(),
handleTypes: handle_types.into(),
let mut export_info = ash::vk::ExportMemoryAllocateInfo {
handle_types: handle_types.into(),
..Default::default()
};
self = self.push_next(&mut export_info);
@ -193,11 +186,10 @@ impl<'a> DeviceMemoryBuilder<'a> {
) -> DeviceMemoryBuilder<'a> {
assert!(self.import_info.is_none());
let mut import_info = vk::ImportMemoryFdInfoKHR {
sType: vk::STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR,
pNext: ptr::null(),
handleType: handle_types.into(),
let mut import_info = ash::vk::ImportMemoryFdInfoKHR {
handle_type: handle_types.into(),
fd: fd.into_raw_fd(),
..Default::default()
};
self = self.push_next(&mut import_info);
@ -221,7 +213,7 @@ impl<'a> DeviceMemoryBuilder<'a> {
// Convert next to our ptr structure
let next_ptr = next as *mut T as *mut BaseOutStructure;
// Previous head (can be null)
let mut prev_head = self.allocate.pNext as *mut BaseOutStructure;
let mut prev_head = self.allocate.p_next as *mut BaseOutStructure;
// Retrieve end of next chain
let last_next = ptr_chain_iter(next).last().unwrap();
// Set end of next chain's next to be previous head only if previous head's next'
@ -239,7 +231,7 @@ impl<'a> DeviceMemoryBuilder<'a> {
/// is returned if the requested allocation is too large or if the total number of allocations
/// would exceed per-device limits.
pub fn build(self) -> Result<Arc<DeviceMemory>, DeviceMemoryAllocError> {
if self.allocate.allocationSize == 0 {
if self.allocate.allocation_size == 0 {
return Err(DeviceMemoryAllocError::InvalidSize)?;
}
@ -250,7 +242,7 @@ impl<'a> DeviceMemoryBuilder<'a> {
let memory_type = self
.device
.physical_device()
.memory_type_by_id(self.allocate.memoryTypeIndex)
.memory_type_by_id(self.allocate.memory_type_index)
.ok_or(DeviceMemoryAllocError::SpecViolation(1714))?;
if self.device.physical_device().internal_object()
@ -271,11 +263,11 @@ impl<'a> DeviceMemoryBuilder<'a> {
// returned by vkGetPhysicalDeviceMemoryProperties for the VkPhysicalDevice that device was created
// from".
let reported_heap_size = memory_type.heap().size() as u64;
if reported_heap_size != 0 && self.allocate.allocationSize > reported_heap_size {
if reported_heap_size != 0 && self.allocate.allocation_size > reported_heap_size {
return Err(DeviceMemoryAllocError::SpecViolation(1713));
}
let mut export_handle_bits = 0;
let mut export_handle_bits = ash::vk::ExternalMemoryHandleTypeFlags::empty();
if self.dedicated_info.is_some() {
if !self.device.loaded_extensions().khr_dedicated_allocation {
return Err(DeviceMemoryAllocError::MissingExtension(
@ -287,16 +279,18 @@ impl<'a> DeviceMemoryBuilder<'a> {
if self.export_info.is_some() || self.import_info.is_some() {
// TODO: check exportFromImportedHandleTypes
export_handle_bits = match self.export_info {
Some(export_info) => export_info.handleTypes,
None => 0,
Some(export_info) => export_info.handle_types,
None => ash::vk::ExternalMemoryHandleTypeFlags::empty(),
};
let import_handle_bits = match self.import_info {
Some(import_info) => import_info.handleType,
None => 0,
Some(import_info) => import_info.handle_type,
None => ash::vk::ExternalMemoryHandleTypeFlags::empty(),
};
if export_handle_bits & vk::EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT != 0 {
if !(export_handle_bits & ash::vk::ExternalMemoryHandleTypeFlags::DMA_BUF_EXT)
.is_empty()
{
if !self.device.loaded_extensions().ext_external_memory_dmabuf {
return Err(DeviceMemoryAllocError::MissingExtension(
"ext_external_memory_dmabuf",
@ -304,7 +298,8 @@ impl<'a> DeviceMemoryBuilder<'a> {
};
}
if export_handle_bits & vk::EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT != 0 {
if !(export_handle_bits & ash::vk::ExternalMemoryHandleTypeFlags::OPAQUE_FD).is_empty()
{
if !self.device.loaded_extensions().khr_external_memory_fd {
return Err(DeviceMemoryAllocError::MissingExtension(
"khr_external_memory_fd",
@ -312,7 +307,9 @@ impl<'a> DeviceMemoryBuilder<'a> {
}
}
if import_handle_bits & vk::EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT != 0 {
if !(import_handle_bits & ash::vk::ExternalMemoryHandleTypeFlags::DMA_BUF_EXT)
.is_empty()
{
if !self.device.loaded_extensions().ext_external_memory_dmabuf {
return Err(DeviceMemoryAllocError::MissingExtension(
"ext_external_memory_dmabuf",
@ -320,7 +317,8 @@ impl<'a> DeviceMemoryBuilder<'a> {
}
}
if import_handle_bits & vk::EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT != 0 {
if !(import_handle_bits & ash::vk::ExternalMemoryHandleTypeFlags::OPAQUE_FD).is_empty()
{
if !self.device.loaded_extensions().khr_external_memory_fd {
return Err(DeviceMemoryAllocError::MissingExtension(
"khr_external_memory_fd",
@ -340,10 +338,10 @@ impl<'a> DeviceMemoryBuilder<'a> {
if *allocation_count >= physical_device.limits().max_memory_allocation_count() {
return Err(DeviceMemoryAllocError::TooManyObjects);
}
let vk = self.device.pointers();
let fns = self.device.fns();
let mut output = MaybeUninit::uninit();
check_errors(vk.AllocateMemory(
check_errors(fns.v1_0.allocate_memory(
self.device.internal_object(),
&self.allocate,
ptr::null(),
@ -356,8 +354,8 @@ impl<'a> DeviceMemoryBuilder<'a> {
Ok(Arc::new(DeviceMemory {
memory: memory,
device: self.device,
size: self.allocate.allocationSize as usize,
memory_type_index: self.allocate.memoryTypeIndex,
size: self.allocate.allocation_size as usize,
memory_type_index: self.allocate.memory_type_index,
handle_types: ExternalMemoryHandleType::from(export_handle_bits),
mapped: Mutex::new(false),
}))
@ -433,7 +431,7 @@ impl DeviceMemory {
size: usize,
resource: DedicatedAlloc,
) -> Result<MappedDeviceMemory, DeviceMemoryAllocError> {
let vk = device.pointers();
let fns = device.fns();
assert!(memory_type.is_host_visible());
let mem = DeviceMemory::dedicated_alloc(device.clone(), memory_type, size, resource)?;
@ -508,7 +506,7 @@ impl DeviceMemory {
size: usize,
resource: DedicatedAlloc,
) -> Result<MappedDeviceMemory, DeviceMemoryAllocError> {
let vk = device.pointers();
let fns = device.fns();
assert!(memory_type.is_host_visible());
let mem = DeviceMemory::dedicated_alloc_with_exportable_fd(
@ -525,16 +523,16 @@ impl DeviceMemory {
device: Arc<Device>,
mem: DeviceMemory,
) -> Result<MappedDeviceMemory, DeviceMemoryAllocError> {
let vk = device.pointers();
let fns = device.fns();
let coherent = mem.memory_type().is_host_coherent();
let ptr = unsafe {
let mut output = MaybeUninit::uninit();
check_errors(vk.MapMemory(
check_errors(fns.v1_0.map_memory(
device.internal_object(),
mem.memory,
0,
mem.size as vk::DeviceSize,
0, /* reserved flags */
mem.size as ash::vk::DeviceSize,
ash::vk::MemoryMapFlags::empty(),
output.as_mut_ptr(),
))?;
output.assume_init()
@ -574,33 +572,32 @@ impl DeviceMemory {
&self,
handle_type: ExternalMemoryHandleType,
) -> Result<File, DeviceMemoryAllocError> {
let vk = self.device.pointers();
let fns = self.device.fns();
// VUID-VkMemoryGetFdInfoKHR-handleType-00672: "handleType must be defined as a POSIX file
// descriptor handle".
let bits = vk::ExternalMemoryHandleTypeFlags::from(handle_type);
if bits != vk::EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT
&& bits != vk::EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT
let bits = ash::vk::ExternalMemoryHandleTypeFlags::from(handle_type);
if bits != ash::vk::ExternalMemoryHandleTypeFlags::DMA_BUF_EXT
&& bits != ash::vk::ExternalMemoryHandleTypeFlags::OPAQUE_FD
{
return Err(DeviceMemoryAllocError::SpecViolation(672))?;
}
// VUID-VkMemoryGetFdInfoKHR-handleType-00671: "handleType must have been included in
// VkExportMemoryAllocateInfo::handleTypes when memory was created".
if bits & vk::ExternalMemoryHandleTypeFlags::from(self.handle_types) == 0 {
if (bits & ash::vk::ExternalMemoryHandleTypeFlags::from(self.handle_types)).is_empty() {
return Err(DeviceMemoryAllocError::SpecViolation(671))?;
}
let fd = unsafe {
let info = vk::MemoryGetFdInfoKHR {
sType: vk::STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR,
pNext: ptr::null(),
let info = ash::vk::MemoryGetFdInfoKHR {
memory: self.memory,
handleType: handle_type.into(),
handle_type: handle_type.into(),
..Default::default()
};
let mut output = MaybeUninit::uninit();
check_errors(vk.GetMemoryFdKHR(
check_errors(fns.khr_external_memory_fd.get_memory_fd_khr(
self.device.internal_object(),
&info,
output.as_mut_ptr(),
@ -631,12 +628,10 @@ impl fmt::Debug for DeviceMemory {
}
unsafe impl VulkanObject for DeviceMemory {
type Object = vk::DeviceMemory;
const TYPE: vk::ObjectType = vk::OBJECT_TYPE_DEVICE_MEMORY;
type Object = ash::vk::DeviceMemory;
#[inline]
fn internal_object(&self) -> vk::DeviceMemory {
fn internal_object(&self) -> ash::vk::DeviceMemory {
self.memory
}
}
@ -645,8 +640,9 @@ impl Drop for DeviceMemory {
#[inline]
fn drop(&mut self) {
unsafe {
let vk = self.device.pointers();
vk.FreeMemory(self.device.internal_object(), self.memory, ptr::null());
let fns = self.device.fns();
fns.v1_0
.free_memory(self.device.internal_object(), self.memory, ptr::null());
let mut allocation_count = self
.device
.allocation_count()
@ -704,8 +700,9 @@ impl MappedDeviceMemory {
pub fn unmap(self) -> DeviceMemory {
unsafe {
let device = self.memory.device();
let vk = device.pointers();
vk.UnmapMemory(device.internal_object(), self.memory.memory);
let fns = device.fns();
fns.v1_0
.unmap_memory(device.internal_object(), self.memory.memory);
}
self.memory
@ -730,7 +727,7 @@ impl MappedDeviceMemory {
where
T: Content,
{
let vk = self.memory.device().pointers();
let fns = self.memory.device().fns();
let pointer = T::ref_from_ptr(
(self.pointer as usize + range.start) as *mut _,
range.end - range.start,
@ -738,16 +735,19 @@ impl MappedDeviceMemory {
.unwrap(); // TODO: error
if !self.coherent {
let range = vk::MappedMemoryRange {
sType: vk::STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,
pNext: ptr::null(),
let range = ash::vk::MappedMemoryRange {
memory: self.memory.internal_object(),
offset: range.start as u64,
size: (range.end - range.start) as u64,
..Default::default()
};
// TODO: check result?
vk.InvalidateMappedMemoryRanges(self.memory.device().internal_object(), 1, &range);
fns.v1_0.invalidate_mapped_memory_ranges(
self.memory.device().internal_object(),
1,
&range,
);
}
CpuAccess {
@ -821,19 +821,19 @@ impl DeviceMemoryMapping {
}
// VUID-vkMapMemory-offset-00679: "offset must be less than the size of memory"
if size != vk::WHOLE_SIZE && offset >= memory.size() as u64 {
if size != ash::vk::WHOLE_SIZE && offset >= memory.size() as u64 {
return Err(DeviceMemoryAllocError::SpecViolation(679));
}
// VUID-vkMapMemory-size-00680: "If size is not equal to VK_WHOLE_SIZE, size must be
// greater than 0".
if size != vk::WHOLE_SIZE && size == 0 {
if size != ash::vk::WHOLE_SIZE && size == 0 {
return Err(DeviceMemoryAllocError::SpecViolation(680));
}
// VUID-vkMapMemory-size-00681: "If size is not equal to VK_WHOLE_SIZE, size must be less
// than or equal to the size of the memory minus offset".
if size != vk::WHOLE_SIZE && size > memory.size() as u64 - offset {
if size != ash::vk::WHOLE_SIZE && size > memory.size() as u64 - offset {
return Err(DeviceMemoryAllocError::SpecViolation(681));
}
@ -865,15 +865,15 @@ impl DeviceMemoryMapping {
// VUID-vkMapMemory-device-parameter, VUID-vkMapMemory-memory-parameter and
// VUID-vkMapMemory-ppData-parameter satisfied via Vulkano internally.
let vk = device.pointers();
let fns = device.fns();
let ptr = unsafe {
let mut output = MaybeUninit::uninit();
check_errors(vk.MapMemory(
check_errors(fns.v1_0.map_memory(
device.internal_object(),
memory.memory,
0,
memory.size as vk::DeviceSize,
0, /* reserved flags */
memory.size as ash::vk::DeviceSize,
ash::vk::MemoryMapFlags::empty(),
output.as_mut_ptr(),
))?;
output.assume_init()
@ -906,8 +906,9 @@ impl Drop for DeviceMemoryMapping {
let mut mapped = self.memory.mapped.lock().expect("Poisoned mutex");
unsafe {
let vk = self.device.pointers();
vk.UnmapMemory(self.device.internal_object(), self.memory.memory);
let fns = self.device.fns();
fns.v1_0
.unmap_memory(self.device.internal_object(), self.memory.memory);
}
*mapped = false;
@ -969,19 +970,22 @@ impl<'a, T: ?Sized + 'a> Drop for CpuAccess<'a, T> {
fn drop(&mut self) {
// If the memory doesn't have the `coherent` flag, we need to flush the data.
if !self.coherent {
let vk = self.mem.as_ref().device().pointers();
let fns = self.mem.as_ref().device().fns();
let range = vk::MappedMemoryRange {
sType: vk::STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,
pNext: ptr::null(),
let range = ash::vk::MappedMemoryRange {
memory: self.mem.as_ref().internal_object(),
offset: self.range.start as u64,
size: (self.range.end - self.range.start) as u64,
..Default::default()
};
// TODO: check result?
unsafe {
vk.FlushMappedMemoryRanges(self.mem.as_ref().device().internal_object(), 1, &range);
fns.v1_0.flush_mapped_memory_ranges(
self.mem.as_ref().device().internal_object(),
1,
&range,
);
}
}
}

View File

@ -7,7 +7,6 @@
// notice may not be copied, modified, or distributed except
// according to those terms.
use crate::vk;
use std::ops::BitOr;
/// Describes the handle type used for Vulkan external memory apis. This is **not** just a
@ -78,65 +77,70 @@ impl ExternalMemoryHandleType {
}
}
impl From<ExternalMemoryHandleType> for vk::ExternalMemoryHandleTypeFlags {
impl From<ExternalMemoryHandleType> for ash::vk::ExternalMemoryHandleTypeFlags {
#[inline]
fn from(val: ExternalMemoryHandleType) -> Self {
let mut result = 0;
let mut result = ash::vk::ExternalMemoryHandleTypeFlags::empty();
if val.opaque_fd {
result |= vk::EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT;
result |= ash::vk::ExternalMemoryHandleTypeFlags::OPAQUE_FD;
}
if val.opaque_win32 {
result |= vk::EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT;
result |= ash::vk::ExternalMemoryHandleTypeFlags::OPAQUE_WIN32;
}
if val.opaque_win32_kmt {
result |= vk::EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT;
result |= ash::vk::ExternalMemoryHandleTypeFlags::OPAQUE_WIN32_KMT;
}
if val.d3d11_texture {
result |= vk::EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT;
result |= ash::vk::ExternalMemoryHandleTypeFlags::D3D11_TEXTURE;
}
if val.d3d11_texture_kmt {
result |= vk::EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT;
result |= ash::vk::ExternalMemoryHandleTypeFlags::D3D11_TEXTURE_KMT;
}
if val.d3d12_heap {
result |= vk::EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT;
result |= ash::vk::ExternalMemoryHandleTypeFlags::D3D12_HEAP;
}
if val.d3d12_resource {
result |= vk::EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT;
result |= ash::vk::ExternalMemoryHandleTypeFlags::D3D12_RESOURCE;
}
if val.dma_buf {
result |= vk::EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT;
result |= ash::vk::ExternalMemoryHandleTypeFlags::DMA_BUF_EXT;
}
if val.android_hardware_buffer {
result |= vk::EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
result |= ash::vk::ExternalMemoryHandleTypeFlags::ANDROID_HARDWARE_BUFFER_ANDROID;
}
if val.host_allocation {
result |= vk::EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT;
result |= ash::vk::ExternalMemoryHandleTypeFlags::HOST_ALLOCATION_EXT;
}
if val.host_mapped_foreign_memory {
result |= vk::EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT
result |= ash::vk::ExternalMemoryHandleTypeFlags::HOST_MAPPED_FOREIGN_MEMORY_EXT
}
result
}
}
impl From<vk::ExternalMemoryHandleTypeFlags> for ExternalMemoryHandleType {
fn from(val: vk::ExternalMemoryHandleTypeFlags) -> Self {
impl From<ash::vk::ExternalMemoryHandleTypeFlags> for ExternalMemoryHandleType {
fn from(val: ash::vk::ExternalMemoryHandleTypeFlags) -> Self {
ExternalMemoryHandleType {
opaque_fd: (val & vk::EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT) != 0,
opaque_win32: (val & vk::EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT) != 0,
opaque_win32_kmt: (val & vk::EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT) != 0,
d3d11_texture: (val & vk::EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT) != 0,
d3d11_texture_kmt: (val & vk::EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT) != 0,
d3d12_heap: (val & vk::EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT) != 0,
d3d12_resource: (val & vk::EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT) != 0,
dma_buf: (val & vk::EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT) != 0,
android_hardware_buffer: (val
& vk::EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID)
!= 0,
host_allocation: (val & vk::EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT) != 0,
host_mapped_foreign_memory: (val
& vk::EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT)
!= 0,
opaque_fd: !(val & ash::vk::ExternalMemoryHandleTypeFlags::OPAQUE_FD).is_empty(),
opaque_win32: !(val & ash::vk::ExternalMemoryHandleTypeFlags::OPAQUE_WIN32).is_empty(),
opaque_win32_kmt: !(val & ash::vk::ExternalMemoryHandleTypeFlags::OPAQUE_WIN32_KMT)
.is_empty(),
d3d11_texture: !(val & ash::vk::ExternalMemoryHandleTypeFlags::D3D11_TEXTURE)
.is_empty(),
d3d11_texture_kmt: !(val & ash::vk::ExternalMemoryHandleTypeFlags::D3D11_TEXTURE_KMT)
.is_empty(),
d3d12_heap: !(val & ash::vk::ExternalMemoryHandleTypeFlags::D3D12_HEAP).is_empty(),
d3d12_resource: !(val & ash::vk::ExternalMemoryHandleTypeFlags::D3D12_RESOURCE)
.is_empty(),
dma_buf: !(val & ash::vk::ExternalMemoryHandleTypeFlags::DMA_BUF_EXT).is_empty(),
android_hardware_buffer: !(val
& ash::vk::ExternalMemoryHandleTypeFlags::ANDROID_HARDWARE_BUFFER_ANDROID)
.is_empty(),
host_allocation: !(val & ash::vk::ExternalMemoryHandleTypeFlags::HOST_ALLOCATION_EXT)
.is_empty(),
host_mapped_foreign_memory: !(val
& ash::vk::ExternalMemoryHandleTypeFlags::HOST_MAPPED_FOREIGN_MEMORY_EXT)
.is_empty(),
}
}
}

View File

@ -91,7 +91,6 @@ use std::slice;
use crate::buffer::sys::UnsafeBuffer;
use crate::image::sys::UnsafeImage;
use crate::vk;
pub use self::device_memory::CpuAccess;
pub use self::device_memory::DeviceMemory;
@ -130,13 +129,13 @@ pub struct MemoryRequirements {
pub prefer_dedicated: bool,
}
impl From<vk::MemoryRequirements> for MemoryRequirements {
impl From<ash::vk::MemoryRequirements> for MemoryRequirements {
#[inline]
fn from(val: vk::MemoryRequirements) -> Self {
fn from(val: ash::vk::MemoryRequirements) -> Self {
MemoryRequirements {
size: val.size as usize,
alignment: val.alignment as usize,
memory_type_bits: val.memoryTypeBits,
memory_type_bits: val.memory_type_bits,
prefer_dedicated: false,
}
}

View File

@ -22,7 +22,6 @@
//! will take precedence if it is activated, otherwise the blending operation is applied.
//!
use crate::vk;
/// Describes how the color output of the fragment shader is written to the attachment. See the
/// documentation of the `blend` module for more info.
@ -151,30 +150,30 @@ impl AttachmentBlend {
}
}
impl From<AttachmentBlend> for vk::PipelineColorBlendAttachmentState {
impl From<AttachmentBlend> for ash::vk::PipelineColorBlendAttachmentState {
#[inline]
fn from(val: AttachmentBlend) -> Self {
vk::PipelineColorBlendAttachmentState {
blendEnable: if val.enabled { vk::TRUE } else { vk::FALSE },
srcColorBlendFactor: val.color_source as u32,
dstColorBlendFactor: val.color_destination as u32,
colorBlendOp: val.color_op as u32,
srcAlphaBlendFactor: val.alpha_source as u32,
dstAlphaBlendFactor: val.alpha_destination as u32,
alphaBlendOp: val.alpha_op as u32,
colorWriteMask: {
let mut mask = 0;
ash::vk::PipelineColorBlendAttachmentState {
blend_enable: if val.enabled { ash::vk::TRUE } else { ash::vk::FALSE },
src_color_blend_factor: val.color_source.into(),
dst_color_blend_factor: val.color_destination.into(),
color_blend_op: val.color_op.into(),
src_alpha_blend_factor: val.alpha_source.into(),
dst_alpha_blend_factor: val.alpha_destination.into(),
alpha_blend_op: val.alpha_op.into(),
color_write_mask: {
let mut mask = ash::vk::ColorComponentFlags::empty();
if val.mask_red {
mask |= vk::COLOR_COMPONENT_R_BIT;
mask |= ash::vk::ColorComponentFlags::R;
}
if val.mask_green {
mask |= vk::COLOR_COMPONENT_G_BIT;
mask |= ash::vk::ColorComponentFlags::G;
}
if val.mask_blue {
mask |= vk::COLOR_COMPONENT_B_BIT;
mask |= ash::vk::ColorComponentFlags::B;
}
if val.mask_alpha {
mask |= vk::COLOR_COMPONENT_A_BIT;
mask |= ash::vk::ColorComponentFlags::A;
}
mask
},
@ -190,40 +189,47 @@ impl From<AttachmentBlend> for vk::PipelineColorBlendAttachmentState {
///
/// Also note that some implementations don't support logic operations.
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
#[repr(u32)]
#[repr(i32)]
pub enum LogicOp {
/// Returns `0`.
Clear = vk::LOGIC_OP_CLEAR,
Clear = ash::vk::LogicOp::CLEAR.as_raw(),
/// Returns `source & destination`.
And = vk::LOGIC_OP_AND,
And = ash::vk::LogicOp::AND.as_raw(),
/// Returns `source & !destination`.
AndReverse = vk::LOGIC_OP_AND_REVERSE,
AndReverse = ash::vk::LogicOp::AND_REVERSE.as_raw(),
/// Returns `source`.
Copy = vk::LOGIC_OP_COPY,
Copy = ash::vk::LogicOp::COPY.as_raw(),
/// Returns `!source & destination`.
AndInverted = vk::LOGIC_OP_AND_INVERTED,
AndInverted = ash::vk::LogicOp::AND_INVERTED.as_raw(),
/// Returns `destination`.
Noop = vk::LOGIC_OP_NO_OP,
Noop = ash::vk::LogicOp::NO_OP.as_raw(),
/// Returns `source ^ destination`.
Xor = vk::LOGIC_OP_XOR,
Xor = ash::vk::LogicOp::XOR.as_raw(),
/// Returns `source | destination`.
Or = vk::LOGIC_OP_OR,
Or = ash::vk::LogicOp::OR.as_raw(),
/// Returns `!(source | destination)`.
Nor = vk::LOGIC_OP_NOR,
Nor = ash::vk::LogicOp::NOR.as_raw(),
/// Returns `!(source ^ destination)`.
Equivalent = vk::LOGIC_OP_EQUIVALENT,
Equivalent = ash::vk::LogicOp::EQUIVALENT.as_raw(),
/// Returns `!destination`.
Invert = vk::LOGIC_OP_INVERT,
Invert = ash::vk::LogicOp::INVERT.as_raw(),
/// Returns `source | !destination.
OrReverse = vk::LOGIC_OP_OR_REVERSE,
OrReverse = ash::vk::LogicOp::OR_REVERSE.as_raw(),
/// Returns `!source`.
CopyInverted = vk::LOGIC_OP_COPY_INVERTED,
CopyInverted = ash::vk::LogicOp::COPY_INVERTED.as_raw(),
/// Returns `!source | destination`.
OrInverted = vk::LOGIC_OP_OR_INVERTED,
OrInverted = ash::vk::LogicOp::OR_INVERTED.as_raw(),
/// Returns `!(source & destination)`.
Nand = vk::LOGIC_OP_NAND,
Nand = ash::vk::LogicOp::NAND.as_raw(),
/// Returns `!0` (all bits set to 1).
Set = vk::LOGIC_OP_SET,
Set = ash::vk::LogicOp::SET.as_raw(),
}
impl From<LogicOp> for ash::vk::LogicOp {
#[inline]
fn from(val: LogicOp) -> Self {
Self::from_raw(val as i32)
}
}
impl Default for LogicOp {
@ -234,35 +240,49 @@ impl Default for LogicOp {
}
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
#[repr(u32)]
#[repr(i32)]
pub enum BlendOp {
Add = vk::BLEND_OP_ADD,
Subtract = vk::BLEND_OP_SUBTRACT,
ReverseSubtract = vk::BLEND_OP_REVERSE_SUBTRACT,
Min = vk::BLEND_OP_MIN,
Max = vk::BLEND_OP_MAX,
Add = ash::vk::BlendOp::ADD.as_raw(),
Subtract = ash::vk::BlendOp::SUBTRACT.as_raw(),
ReverseSubtract = ash::vk::BlendOp::REVERSE_SUBTRACT.as_raw(),
Min = ash::vk::BlendOp::MIN.as_raw(),
Max = ash::vk::BlendOp::MAX.as_raw(),
}
impl From<BlendOp> for ash::vk::BlendOp {
#[inline]
fn from(val: BlendOp) -> Self {
Self::from_raw(val as i32)
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
#[repr(u32)]
#[repr(i32)]
pub enum BlendFactor {
Zero = vk::BLEND_FACTOR_ZERO,
One = vk::BLEND_FACTOR_ONE,
SrcColor = vk::BLEND_FACTOR_SRC_COLOR,
OneMinusSrcColor = vk::BLEND_FACTOR_ONE_MINUS_SRC_COLOR,
DstColor = vk::BLEND_FACTOR_DST_COLOR,
OneMinusDstColor = vk::BLEND_FACTOR_ONE_MINUS_DST_COLOR,
SrcAlpha = vk::BLEND_FACTOR_SRC_ALPHA,
OneMinusSrcAlpha = vk::BLEND_FACTOR_ONE_MINUS_SRC_ALPHA,
DstAlpha = vk::BLEND_FACTOR_DST_ALPHA,
OneMinusDstAlpha = vk::BLEND_FACTOR_ONE_MINUS_DST_ALPHA,
ConstantColor = vk::BLEND_FACTOR_CONSTANT_COLOR,
OneMinusConstantColor = vk::BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR,
ConstantAlpha = vk::BLEND_FACTOR_CONSTANT_ALPHA,
OneMinusConstantAlpha = vk::BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA,
SrcAlphaSaturate = vk::BLEND_FACTOR_SRC_ALPHA_SATURATE,
Src1Color = vk::BLEND_FACTOR_SRC1_COLOR,
OneMinusSrc1Color = vk::BLEND_FACTOR_ONE_MINUS_SRC1_COLOR,
Src1Alpha = vk::BLEND_FACTOR_SRC1_ALPHA,
OneMinusSrc1Alpha = vk::BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA,
Zero = ash::vk::BlendFactor::ZERO.as_raw(),
One = ash::vk::BlendFactor::ONE.as_raw(),
SrcColor = ash::vk::BlendFactor::SRC_COLOR.as_raw(),
OneMinusSrcColor = ash::vk::BlendFactor::ONE_MINUS_SRC_COLOR.as_raw(),
DstColor = ash::vk::BlendFactor::DST_COLOR.as_raw(),
OneMinusDstColor = ash::vk::BlendFactor::ONE_MINUS_DST_COLOR.as_raw(),
SrcAlpha = ash::vk::BlendFactor::SRC_ALPHA.as_raw(),
OneMinusSrcAlpha = ash::vk::BlendFactor::ONE_MINUS_SRC_ALPHA.as_raw(),
DstAlpha = ash::vk::BlendFactor::DST_ALPHA.as_raw(),
OneMinusDstAlpha = ash::vk::BlendFactor::ONE_MINUS_DST_ALPHA.as_raw(),
ConstantColor = ash::vk::BlendFactor::CONSTANT_COLOR.as_raw(),
OneMinusConstantColor = ash::vk::BlendFactor::ONE_MINUS_CONSTANT_COLOR.as_raw(),
ConstantAlpha = ash::vk::BlendFactor::CONSTANT_ALPHA.as_raw(),
OneMinusConstantAlpha = ash::vk::BlendFactor::ONE_MINUS_CONSTANT_ALPHA.as_raw(),
SrcAlphaSaturate = ash::vk::BlendFactor::SRC_ALPHA_SATURATE.as_raw(),
Src1Color = ash::vk::BlendFactor::SRC1_COLOR.as_raw(),
OneMinusSrc1Color = ash::vk::BlendFactor::ONE_MINUS_SRC1_COLOR.as_raw(),
Src1Alpha = ash::vk::BlendFactor::SRC1_ALPHA.as_raw(),
OneMinusSrc1Alpha = ash::vk::BlendFactor::ONE_MINUS_SRC1_ALPHA.as_raw(),
}
impl From<BlendFactor> for ash::vk::BlendFactor {
#[inline]
fn from(val: BlendFactor) -> Self {
Self::from_raw(val as i32)
}
}

View File

@ -23,7 +23,6 @@
use crate::check_errors;
use crate::device::Device;
use crate::vk;
use crate::OomError;
use crate::VulkanObject;
use std::mem::MaybeUninit;
@ -35,7 +34,7 @@ use std::sync::Arc;
/// See [the documentation of the module](index.html) for more info.
pub struct PipelineCache {
device: Arc<Device>,
cache: vk::PipelineCache,
cache: ash::vk::PipelineCache,
}
impl PipelineCache {
@ -106,21 +105,20 @@ impl PipelineCache {
device: Arc<Device>,
initial_data: Option<&[u8]>,
) -> Result<Arc<PipelineCache>, OomError> {
let vk = device.pointers();
let fns = device.fns();
let cache = {
let infos = vk::PipelineCacheCreateInfo {
sType: vk::STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO,
pNext: ptr::null(),
flags: 0, // reserved
initialDataSize: initial_data.map(|d| d.len()).unwrap_or(0),
pInitialData: initial_data
let infos = ash::vk::PipelineCacheCreateInfo {
flags: ash::vk::PipelineCacheCreateFlags::empty(),
initial_data_size: initial_data.map(|d| d.len()).unwrap_or(0),
p_initial_data: initial_data
.map(|d| d.as_ptr() as *const _)
.unwrap_or(ptr::null()),
..Default::default()
};
let mut output = MaybeUninit::uninit();
check_errors(vk.CreatePipelineCache(
check_errors(fns.v1_0.create_pipeline_cache(
device.internal_object(),
&infos,
ptr::null(),
@ -150,7 +148,7 @@ impl PipelineCache {
I: IntoIterator<Item = &'a &'a Arc<PipelineCache>>,
{
unsafe {
let vk = self.device.pointers();
let fns = self.device.fns();
let pipelines = pipelines
.into_iter()
@ -160,7 +158,7 @@ impl PipelineCache {
})
.collect::<Vec<_>>();
check_errors(vk.MergePipelineCaches(
check_errors(fns.v1_0.merge_pipeline_caches(
self.device.internal_object(),
self.cache,
pipelines.len() as u32,
@ -201,10 +199,10 @@ impl PipelineCache {
/// ```
pub fn get_data(&self) -> Result<Vec<u8>, OomError> {
unsafe {
let vk = self.device.pointers();
let fns = self.device.fns();
let mut num = 0;
check_errors(vk.GetPipelineCacheData(
check_errors(fns.v1_0.get_pipeline_cache_data(
self.device.internal_object(),
self.cache,
&mut num,
@ -212,7 +210,7 @@ impl PipelineCache {
))?;
let mut data: Vec<u8> = Vec::with_capacity(num as usize);
check_errors(vk.GetPipelineCacheData(
check_errors(fns.v1_0.get_pipeline_cache_data(
self.device.internal_object(),
self.cache,
&mut num,
@ -226,12 +224,10 @@ impl PipelineCache {
}
unsafe impl VulkanObject for PipelineCache {
type Object = vk::PipelineCache;
const TYPE: vk::ObjectType = vk::OBJECT_TYPE_PIPELINE_CACHE;
type Object = ash::vk::PipelineCache;
#[inline]
fn internal_object(&self) -> vk::PipelineCache {
fn internal_object(&self) -> ash::vk::PipelineCache {
self.cache
}
}
@ -240,8 +236,9 @@ impl Drop for PipelineCache {
#[inline]
fn drop(&mut self) {
unsafe {
let vk = self.device.pointers();
vk.DestroyPipelineCache(self.device.internal_object(), self.cache, ptr::null());
let fns = self.device.fns();
fns.v1_0
.destroy_pipeline_cache(self.device.internal_object(), self.cache, ptr::null());
}
}
}

View File

@ -16,7 +16,6 @@ use crate::pipeline::layout::PipelineLayoutCreationError;
use crate::pipeline::layout::PipelineLayoutNotSupersetError;
use crate::pipeline::shader::EntryPointAbstract;
use crate::pipeline::shader::SpecializationConstants;
use crate::vk;
use crate::Error;
use crate::OomError;
use crate::SafeDeref;
@ -46,7 +45,7 @@ pub struct ComputePipeline {
}
struct Inner {
pipeline: vk::Pipeline,
pipeline: ash::vk::Pipeline,
device: Arc<Device>,
}
@ -123,48 +122,46 @@ impl ComputePipeline {
Cs: EntryPointAbstract,
Css: SpecializationConstants,
{
let vk = device.pointers();
let fns = device.fns();
let pipeline = {
let spec_descriptors = Css::descriptors();
let specialization = vk::SpecializationInfo {
mapEntryCount: spec_descriptors.len() as u32,
pMapEntries: spec_descriptors.as_ptr() as *const _,
dataSize: mem::size_of_val(spec_constants),
pData: spec_constants as *const Css as *const _,
let specialization = ash::vk::SpecializationInfo {
map_entry_count: spec_descriptors.len() as u32,
p_map_entries: spec_descriptors.as_ptr() as *const _,
data_size: mem::size_of_val(spec_constants),
p_data: spec_constants as *const Css as *const _,
};
let stage = vk::PipelineShaderStageCreateInfo {
sType: vk::STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
pNext: ptr::null(),
flags: 0,
stage: vk::SHADER_STAGE_COMPUTE_BIT,
let stage = ash::vk::PipelineShaderStageCreateInfo {
flags: ash::vk::PipelineShaderStageCreateFlags::empty(),
stage: ash::vk::ShaderStageFlags::COMPUTE,
module: shader.module().internal_object(),
pName: shader.name().as_ptr(),
pSpecializationInfo: if specialization.dataSize == 0 {
p_name: shader.name().as_ptr(),
p_specialization_info: if specialization.data_size == 0 {
ptr::null()
} else {
&specialization
},
..Default::default()
};
let infos = vk::ComputePipelineCreateInfo {
sType: vk::STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO,
pNext: ptr::null(),
flags: 0,
let infos = ash::vk::ComputePipelineCreateInfo {
flags: ash::vk::PipelineCreateFlags::empty(),
stage,
layout: pipeline_layout.internal_object(),
basePipelineHandle: 0,
basePipelineIndex: 0,
base_pipeline_handle: ash::vk::Pipeline::null(),
base_pipeline_index: 0,
..Default::default()
};
let cache_handle = match cache {
Some(ref cache) => cache.internal_object(),
None => vk::NULL_HANDLE,
None => ash::vk::PipelineCache::null(),
};
let mut output = MaybeUninit::uninit();
check_errors(vk.CreateComputePipelines(
check_errors(fns.v1_0.create_compute_pipelines(
device.internal_object(),
cache_handle,
1,
@ -240,15 +237,13 @@ where
/// Opaque object that represents the inside of the compute pipeline. Can be made into a trait
/// object.
#[derive(Debug, Copy, Clone)]
pub struct ComputePipelineSys<'a>(vk::Pipeline, PhantomData<&'a ()>);
pub struct ComputePipelineSys<'a>(ash::vk::Pipeline, PhantomData<&'a ()>);
unsafe impl<'a> VulkanObject for ComputePipelineSys<'a> {
type Object = vk::Pipeline;
const TYPE: vk::ObjectType = vk::OBJECT_TYPE_PIPELINE;
type Object = ash::vk::Pipeline;
#[inline]
fn internal_object(&self) -> vk::Pipeline {
fn internal_object(&self) -> ash::vk::Pipeline {
self.0
}
}
@ -261,12 +256,10 @@ unsafe impl DeviceOwned for ComputePipeline {
}
unsafe impl VulkanObject for ComputePipeline {
type Object = vk::Pipeline;
const TYPE: vk::ObjectType = vk::OBJECT_TYPE_PIPELINE;
type Object = ash::vk::Pipeline;
#[inline]
fn internal_object(&self) -> vk::Pipeline {
fn internal_object(&self) -> ash::vk::Pipeline {
self.inner.pipeline
}
}
@ -275,8 +268,9 @@ impl Drop for Inner {
#[inline]
fn drop(&mut self) {
unsafe {
let vk = self.device.pointers();
vk.DestroyPipeline(self.device.internal_object(), self.pipeline, ptr::null());
let fns = self.device.fns();
fns.v1_0
.destroy_pipeline(self.device.internal_object(), self.pipeline, ptr::null());
}
}
}

View File

@ -20,7 +20,6 @@
//! value in the stencil buffer at each fragment's location. Depending on the outcome of the
//! depth and stencil tests, the value of the stencil buffer at that location can be updated.
use crate::vk;
use std::ops::Range;
use std::u32;
@ -164,25 +163,39 @@ impl Default for Stencil {
/// Operation to perform after the depth and stencil tests.
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
#[repr(u32)]
#[repr(i32)]
pub enum StencilOp {
Keep = vk::STENCIL_OP_KEEP,
Zero = vk::STENCIL_OP_ZERO,
Replace = vk::STENCIL_OP_REPLACE,
IncrementAndClamp = vk::STENCIL_OP_INCREMENT_AND_CLAMP,
DecrementAndClamp = vk::STENCIL_OP_DECREMENT_AND_CLAMP,
Invert = vk::STENCIL_OP_INVERT,
IncrementAndWrap = vk::STENCIL_OP_INCREMENT_AND_WRAP,
DecrementAndWrap = vk::STENCIL_OP_DECREMENT_AND_WRAP,
Keep = ash::vk::StencilOp::KEEP.as_raw(),
Zero = ash::vk::StencilOp::ZERO.as_raw(),
Replace = ash::vk::StencilOp::REPLACE.as_raw(),
IncrementAndClamp = ash::vk::StencilOp::INCREMENT_AND_CLAMP.as_raw(),
DecrementAndClamp = ash::vk::StencilOp::DECREMENT_AND_CLAMP.as_raw(),
Invert = ash::vk::StencilOp::INVERT.as_raw(),
IncrementAndWrap = ash::vk::StencilOp::INCREMENT_AND_WRAP.as_raw(),
DecrementAndWrap = ash::vk::StencilOp::DECREMENT_AND_WRAP.as_raw(),
}
impl From<StencilOp> for ash::vk::StencilOp {
#[inline]
fn from(val: StencilOp) -> Self {
Self::from_raw(val as i32)
}
}
/// Enum to specify which stencil state to use
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
#[repr(u32)]
pub enum StencilFaceFlags {
StencilFaceFrontBit = vk::STENCIL_FACE_FRONT_BIT,
StencilFaceBackBit = vk::STENCIL_FACE_BACK_BIT,
StencilFrontAndBack = vk::STENCIL_FRONT_AND_BACK,
StencilFaceFrontBit = ash::vk::StencilFaceFlags::FRONT.as_raw(),
StencilFaceBackBit = ash::vk::StencilFaceFlags::BACK.as_raw(),
StencilFrontAndBack = ash::vk::StencilFaceFlags::FRONT_AND_BACK.as_raw(),
}
impl From<StencilFaceFlags> for ash::vk::StencilFaceFlags {
#[inline]
fn from(val: StencilFaceFlags) -> Self {
Self::from_raw(val as u32)
}
}
/// Container for dynamic StencilFaceFlags and value
@ -222,22 +235,29 @@ impl DepthBounds {
///
/// Used for both depth testing and stencil testing.
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
#[repr(u32)]
#[repr(i32)]
pub enum Compare {
/// The test never passes.
Never = vk::COMPARE_OP_NEVER,
Never = ash::vk::CompareOp::NEVER.as_raw(),
/// The test passes if `value < reference_value`.
Less = vk::COMPARE_OP_LESS,
Less = ash::vk::CompareOp::LESS.as_raw(),
/// The test passes if `value == reference_value`.
Equal = vk::COMPARE_OP_EQUAL,
Equal = ash::vk::CompareOp::EQUAL.as_raw(),
/// The test passes if `value <= reference_value`.
LessOrEqual = vk::COMPARE_OP_LESS_OR_EQUAL,
LessOrEqual = ash::vk::CompareOp::LESS_OR_EQUAL.as_raw(),
/// The test passes if `value > reference_value`.
Greater = vk::COMPARE_OP_GREATER,
Greater = ash::vk::CompareOp::GREATER.as_raw(),
/// The test passes if `value != reference_value`.
NotEqual = vk::COMPARE_OP_NOT_EQUAL,
NotEqual = ash::vk::CompareOp::NOT_EQUAL.as_raw(),
/// The test passes if `value >= reference_value`.
GreaterOrEqual = vk::COMPARE_OP_GREATER_OR_EQUAL,
GreaterOrEqual = ash::vk::CompareOp::GREATER_OR_EQUAL.as_raw(),
/// The test always passes.
Always = vk::COMPARE_OP_ALWAYS,
Always = ash::vk::CompareOp::ALWAYS.as_raw(),
}
impl From<Compare> for ash::vk::CompareOp {
#[inline]
fn from(val: Compare) -> Self {
Self::from_raw(val as i32)
}
}

View File

@ -13,6 +13,7 @@
use crate::check_errors;
use crate::device::Device;
use crate::image::SampleCount;
use crate::pipeline::blend::AttachmentBlend;
use crate::pipeline::blend::AttachmentsBlend;
use crate::pipeline::blend::Blend;
@ -43,7 +44,6 @@ use crate::pipeline::viewport::Scissor;
use crate::pipeline::viewport::Viewport;
use crate::pipeline::viewport::ViewportsState;
use crate::render_pass::Subpass;
use crate::vk;
use crate::VulkanObject;
use smallvec::SmallVec;
use std::mem;
@ -57,7 +57,7 @@ use std::u32;
pub struct GraphicsPipelineBuilder<'vs, 'tcs, 'tes, 'gs, 'fs, Vdef, Vss, Tcss, Tess, Gss, Fss> {
vertex_input: Vdef,
vertex_shader: Option<(GraphicsEntryPoint<'vs>, Vss)>,
input_assembly: vk::PipelineInputAssemblyStateCreateInfo,
input_assembly: ash::vk::PipelineInputAssemblyStateCreateInfo,
// Note: the `input_assembly_topology` member is temporary in order to not lose information
// about the number of patches per primitive.
input_assembly_topology: PrimitiveTopology,
@ -65,7 +65,7 @@ pub struct GraphicsPipelineBuilder<'vs, 'tcs, 'tes, 'gs, 'fs, Vdef, Vss, Tcss, T
geometry_shader: Option<(GraphicsEntryPoint<'gs>, Gss)>,
viewport: Option<ViewportsState>,
raster: Rasterization,
multisample: vk::PipelineMultisampleStateCreateInfo,
multisample: ash::vk::PipelineMultisampleStateCreateInfo,
fragment_shader: Option<(GraphicsEntryPoint<'fs>, Fss)>,
depth_stencil: DepthStencil,
blend: Blend,
@ -97,30 +97,24 @@ impl
{
/// Builds a new empty builder.
pub(super) fn new() -> Self {
unsafe {
GraphicsPipelineBuilder {
vertex_input: BufferlessDefinition,
vertex_shader: None,
input_assembly: vk::PipelineInputAssemblyStateCreateInfo {
sType: vk::STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,
topology: PrimitiveTopology::TriangleList.into(),
..mem::zeroed()
},
input_assembly_topology: PrimitiveTopology::TriangleList,
tessellation: None,
geometry_shader: None,
viewport: None,
raster: Default::default(),
multisample: vk::PipelineMultisampleStateCreateInfo {
sType: vk::STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,
..mem::zeroed()
},
fragment_shader: None,
depth_stencil: DepthStencil::disabled(),
blend: Blend::pass_through(),
subpass: None,
cache: None,
}
GraphicsPipelineBuilder {
vertex_input: BufferlessDefinition,
vertex_shader: None,
input_assembly: ash::vk::PipelineInputAssemblyStateCreateInfo {
topology: PrimitiveTopology::TriangleList.into(),
..Default::default()
},
input_assembly_topology: PrimitiveTopology::TriangleList,
tessellation: None,
geometry_shader: None,
viewport: None,
raster: Default::default(),
multisample: ash::vk::PipelineMultisampleStateCreateInfo::default(),
fragment_shader: None,
depth_stencil: DepthStencil::disabled(),
blend: Blend::pass_through(),
subpass: None,
cache: None,
}
}
}
@ -197,7 +191,7 @@ where
) -> Result<GraphicsPipeline<Vdef>, GraphicsPipelineCreationError> {
// TODO: return errors instead of panicking if missing param
let vk = device.pointers();
let fns = device.fns();
// Checking that the pipeline layout matches the shader stages.
// TODO: more details in the errors
@ -235,7 +229,7 @@ where
}
// Will contain the list of dynamic states. Filled throughout this function.
let mut dynamic_states: SmallVec<[vk::DynamicState; 8]> = SmallVec::new();
let mut dynamic_states: SmallVec<[ash::vk::DynamicState; 8]> = SmallVec::new();
// Creating the specialization constants of the various stages.
let vertex_shader_specialization = {
@ -246,11 +240,11 @@ where
}
let constants = &shader.1;
vk::SpecializationInfo {
mapEntryCount: spec_descriptors.len() as u32,
pMapEntries: spec_descriptors.as_ptr() as *const _,
dataSize: mem::size_of_val(constants),
pData: constants as *const Vss as *const _,
ash::vk::SpecializationInfo {
map_entry_count: spec_descriptors.len() as u32,
p_map_entries: spec_descriptors.as_ptr() as *const _,
data_size: mem::size_of_val(constants),
p_data: constants as *const Vss as *const _,
}
};
@ -263,11 +257,11 @@ where
}
let constants = &shader.1;
vk::SpecializationInfo {
mapEntryCount: spec_descriptors.len() as u32,
pMapEntries: spec_descriptors.as_ptr() as *const _,
dataSize: mem::size_of_val(constants),
pData: constants as *const Tcss as *const _,
ash::vk::SpecializationInfo {
map_entry_count: spec_descriptors.len() as u32,
p_map_entries: spec_descriptors.as_ptr() as *const _,
data_size: mem::size_of_val(constants),
p_data: constants as *const Tcss as *const _,
}
};
let tes_spec = {
@ -278,11 +272,11 @@ where
}
let constants = &shader.1;
vk::SpecializationInfo {
mapEntryCount: spec_descriptors.len() as u32,
pMapEntries: spec_descriptors.as_ptr() as *const _,
dataSize: mem::size_of_val(constants),
pData: constants as *const Tess as *const _,
ash::vk::SpecializationInfo {
map_entry_count: spec_descriptors.len() as u32,
p_map_entries: spec_descriptors.as_ptr() as *const _,
data_size: mem::size_of_val(constants),
p_data: constants as *const Tess as *const _,
}
};
Some((tcs_spec, tes_spec))
@ -297,11 +291,11 @@ where
}
let constants = &shader.1;
Some(vk::SpecializationInfo {
mapEntryCount: spec_descriptors.len() as u32,
pMapEntries: spec_descriptors.as_ptr() as *const _,
dataSize: mem::size_of_val(constants),
pData: constants as *const Gss as *const _,
Some(ash::vk::SpecializationInfo {
map_entry_count: spec_descriptors.len() as u32,
p_map_entries: spec_descriptors.as_ptr() as *const _,
data_size: mem::size_of_val(constants),
p_data: constants as *const Gss as *const _,
})
} else {
None
@ -314,11 +308,11 @@ where
}
let constants = &shader.1;
Some(vk::SpecializationInfo {
mapEntryCount: spec_descriptors.len() as u32,
pMapEntries: spec_descriptors.as_ptr() as *const _,
dataSize: mem::size_of_val(constants),
pData: constants as *const Fss as *const _,
Some(ash::vk::SpecializationInfo {
map_entry_count: spec_descriptors.len() as u32,
p_map_entries: spec_descriptors.as_ptr() as *const _,
data_size: mem::size_of_val(constants),
p_data: constants as *const Fss as *const _,
})
} else {
None
@ -333,11 +327,9 @@ where
_ => return Err(GraphicsPipelineCreationError::WrongShaderType),
};
stages.push(vk::PipelineShaderStageCreateInfo {
sType: vk::STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
pNext: ptr::null(),
flags: 0, // reserved
stage: vk::SHADER_STAGE_VERTEX_BIT,
stages.push(ash::vk::PipelineShaderStageCreateInfo {
flags: ash::vk::PipelineShaderStageCreateFlags::empty(),
stage: ash::vk::ShaderStageFlags::VERTEX,
module: self
.vertex_shader
.as_ref()
@ -345,8 +337,9 @@ where
.0
.module()
.internal_object(),
pName: self.vertex_shader.as_ref().unwrap().0.name().as_ptr(),
pSpecializationInfo: &vertex_shader_specialization as *const _,
p_name: self.vertex_shader.as_ref().unwrap().0.name().as_ptr(),
p_specialization_info: &vertex_shader_specialization as *const _,
..Default::default()
});
if let Some(ref tess) = self.tessellation {
@ -366,34 +359,32 @@ where
_ => return Err(GraphicsPipelineCreationError::WrongShaderType),
};
stages.push(vk::PipelineShaderStageCreateInfo {
sType: vk::STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
pNext: ptr::null(),
flags: 0, // reserved
stage: vk::SHADER_STAGE_TESSELLATION_CONTROL_BIT,
stages.push(ash::vk::PipelineShaderStageCreateInfo {
flags: ash::vk::PipelineShaderStageCreateFlags::empty(),
stage: ash::vk::ShaderStageFlags::TESSELLATION_CONTROL,
module: tess
.tessellation_control_shader
.0
.module()
.internal_object(),
pName: tess.tessellation_control_shader.0.name().as_ptr(),
pSpecializationInfo: &tess_shader_specialization.as_ref().unwrap().0
p_name: tess.tessellation_control_shader.0.name().as_ptr(),
p_specialization_info: &tess_shader_specialization.as_ref().unwrap().0
as *const _,
..Default::default()
});
stages.push(vk::PipelineShaderStageCreateInfo {
sType: vk::STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
pNext: ptr::null(),
flags: 0, // reserved
stage: vk::SHADER_STAGE_TESSELLATION_EVALUATION_BIT,
stages.push(ash::vk::PipelineShaderStageCreateInfo {
flags: ash::vk::PipelineShaderStageCreateFlags::empty(),
stage: ash::vk::ShaderStageFlags::TESSELLATION_EVALUATION,
module: tess
.tessellation_evaluation_shader
.0
.module()
.internal_object(),
pName: tess.tessellation_evaluation_shader.0.name().as_ptr(),
pSpecializationInfo: &tess_shader_specialization.as_ref().unwrap().1
p_name: tess.tessellation_evaluation_shader.0.name().as_ptr(),
p_specialization_info: &tess_shader_specialization.as_ref().unwrap().1
as *const _,
..Default::default()
});
}
@ -407,15 +398,14 @@ where
_ => return Err(GraphicsPipelineCreationError::WrongShaderType),
};
stages.push(vk::PipelineShaderStageCreateInfo {
sType: vk::STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
pNext: ptr::null(),
flags: 0, // reserved
stage: vk::SHADER_STAGE_GEOMETRY_BIT,
stages.push(ash::vk::PipelineShaderStageCreateInfo {
flags: ash::vk::PipelineShaderStageCreateFlags::empty(),
stage: ash::vk::ShaderStageFlags::GEOMETRY,
module: geometry_shader.0.module().internal_object(),
pName: geometry_shader.0.name().as_ptr(),
pSpecializationInfo: geometry_shader_specialization.as_ref().unwrap()
p_name: geometry_shader.0.name().as_ptr(),
p_specialization_info: geometry_shader_specialization.as_ref().unwrap()
as *const _,
..Default::default()
});
}
@ -425,15 +415,14 @@ where
_ => return Err(GraphicsPipelineCreationError::WrongShaderType),
};
stages.push(vk::PipelineShaderStageCreateInfo {
sType: vk::STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
pNext: ptr::null(),
flags: 0, // reserved
stage: vk::SHADER_STAGE_FRAGMENT_BIT,
stages.push(ash::vk::PipelineShaderStageCreateInfo {
flags: ash::vk::PipelineShaderStageCreateFlags::empty(),
stage: ash::vk::ShaderStageFlags::FRAGMENT,
module: fragment_shader.0.module().internal_object(),
pName: fragment_shader.0.name().as_ptr(),
pSpecializationInfo: fragment_shader_specialization.as_ref().unwrap()
p_name: fragment_shader.0.name().as_ptr(),
p_specialization_info: fragment_shader_specialization.as_ref().unwrap()
as *const _,
..Default::default()
});
}
@ -467,10 +456,10 @@ where
);
}
binding_descriptions.push(vk::VertexInputBindingDescription {
binding_descriptions.push(ash::vk::VertexInputBindingDescription {
binding: num as u32,
stride: stride as u32,
inputRate: rate as u32,
input_rate: rate.into(),
});
}
@ -501,10 +490,10 @@ where
.find(|b| b.binding == binding)
.is_some());
attribute_descriptions.push(vk::VertexInputAttributeDescription {
attribute_descriptions.push(ash::vk::VertexInputAttributeDescription {
location: loc as u32,
binding: binding as u32,
format: info.format as u32,
format: info.format.into(),
offset: info.offset as u32,
});
}
@ -546,17 +535,16 @@ where
);
}
let vertex_input_state = vk::PipelineVertexInputStateCreateInfo {
sType: vk::STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,
pNext: ptr::null(),
flags: 0, // reserved
vertexBindingDescriptionCount: binding_descriptions.len() as u32,
pVertexBindingDescriptions: binding_descriptions.as_ptr(),
vertexAttributeDescriptionCount: attribute_descriptions.len() as u32,
pVertexAttributeDescriptions: attribute_descriptions.as_ptr(),
let vertex_input_state = ash::vk::PipelineVertexInputStateCreateInfo {
flags: ash::vk::PipelineVertexInputStateCreateFlags::empty(),
vertex_binding_description_count: binding_descriptions.len() as u32,
p_vertex_binding_descriptions: binding_descriptions.as_ptr(),
vertex_attribute_description_count: attribute_descriptions.len() as u32,
p_vertex_attribute_descriptions: attribute_descriptions.as_ptr(),
..Default::default()
};
if self.input_assembly.primitiveRestartEnable != vk::FALSE
if self.input_assembly.primitive_restart_enable != ash::vk::FALSE
&& !self.input_assembly_topology.supports_primitive_restart()
{
return Err(
@ -594,11 +582,10 @@ where
return Err(GraphicsPipelineCreationError::MaxTessellationPatchSizeExceeded);
}
Some(vk::PipelineTessellationStateCreateInfo {
sType: vk::STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO,
pNext: ptr::null(),
flags: 0, // reserved,
patchControlPoints: vertices_per_patch,
Some(ash::vk::PipelineTessellationStateCreateInfo {
flags: ash::vk::PipelineTessellationStateCreateFlags::empty(),
patch_control_points: vertices_per_patch,
..Default::default()
})
}
_ => {
@ -614,10 +601,10 @@ where
ViewportsState::Fixed { ref data } => (
data.iter()
.map(|e| e.0.clone().into())
.collect::<SmallVec<[vk::Viewport; 4]>>(),
.collect::<SmallVec<[ash::vk::Viewport; 4]>>(),
data.iter()
.map(|e| e.1.clone().into())
.collect::<SmallVec<[vk::Rect2D; 4]>>(),
.collect::<SmallVec<[ash::vk::Rect2D; 4]>>(),
data.len() as u32,
),
ViewportsState::DynamicViewports { ref scissors } => {
@ -625,8 +612,8 @@ where
let scissors = scissors
.iter()
.map(|e| e.clone().into())
.collect::<SmallVec<[vk::Rect2D; 4]>>();
dynamic_states.push(vk::DYNAMIC_STATE_VIEWPORT);
.collect::<SmallVec<[ash::vk::Rect2D; 4]>>();
dynamic_states.push(ash::vk::DynamicState::VIEWPORT);
(SmallVec::new(), scissors, num)
}
ViewportsState::DynamicScissors { ref viewports } => {
@ -634,13 +621,13 @@ where
let viewports = viewports
.iter()
.map(|e| e.clone().into())
.collect::<SmallVec<[vk::Viewport; 4]>>();
dynamic_states.push(vk::DYNAMIC_STATE_SCISSOR);
.collect::<SmallVec<[ash::vk::Viewport; 4]>>();
dynamic_states.push(ash::vk::DynamicState::SCISSOR);
(viewports, SmallVec::new(), num)
}
ViewportsState::Dynamic { num } => {
dynamic_states.push(vk::DYNAMIC_STATE_VIEWPORT);
dynamic_states.push(vk::DYNAMIC_STATE_SCISSOR);
dynamic_states.push(ash::vk::DynamicState::VIEWPORT);
dynamic_states.push(ash::vk::DynamicState::SCISSOR);
(SmallVec::new(), SmallVec::new(), num)
}
};
@ -672,22 +659,21 @@ where
}
}
let viewport_info = vk::PipelineViewportStateCreateInfo {
sType: vk::STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,
pNext: ptr::null(),
flags: 0, // reserved
viewportCount: vp_num,
pViewports: if vp_vp.is_empty() {
let viewport_info = ash::vk::PipelineViewportStateCreateInfo {
flags: ash::vk::PipelineViewportStateCreateFlags::empty(),
viewport_count: vp_num,
p_viewports: if vp_vp.is_empty() {
ptr::null()
} else {
vp_vp.as_ptr()
}, // validation layer crashes if you just pass the pointer
scissorCount: vp_num,
pScissors: if vp_sc.is_empty() {
scissor_count: vp_num,
p_scissors: if vp_sc.is_empty() {
ptr::null()
} else {
vp_sc.as_ptr()
}, // validation layer crashes if you just pass the pointer
..Default::default()
};
if let Some(line_width) = self.raster.line_width {
@ -695,22 +681,22 @@ where
return Err(GraphicsPipelineCreationError::WideLinesFeatureNotEnabled);
}
} else {
dynamic_states.push(vk::DYNAMIC_STATE_LINE_WIDTH);
dynamic_states.push(ash::vk::DynamicState::LINE_WIDTH);
}
let (db_enable, db_const, db_clamp, db_slope) = match self.raster.depth_bias {
DepthBiasControl::Dynamic => {
dynamic_states.push(vk::DYNAMIC_STATE_DEPTH_BIAS);
(vk::TRUE, 0.0, 0.0, 0.0)
dynamic_states.push(ash::vk::DynamicState::DEPTH_BIAS);
(ash::vk::TRUE, 0.0, 0.0, 0.0)
}
DepthBiasControl::Disabled => (vk::FALSE, 0.0, 0.0, 0.0),
DepthBiasControl::Disabled => (ash::vk::FALSE, 0.0, 0.0, 0.0),
DepthBiasControl::Static(bias) => {
if bias.clamp != 0.0 && !device.enabled_features().depth_bias_clamp {
return Err(GraphicsPipelineCreationError::DepthBiasClampFeatureNotEnabled);
}
(
vk::TRUE,
ash::vk::TRUE,
bias.constant_factor,
bias.clamp,
bias.slope_factor,
@ -728,42 +714,46 @@ where
return Err(GraphicsPipelineCreationError::FillModeNonSolidFeatureNotEnabled);
}
let rasterization = vk::PipelineRasterizationStateCreateInfo {
sType: vk::STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,
pNext: ptr::null(),
flags: 0, // reserved
depthClampEnable: if self.raster.depth_clamp {
vk::TRUE
let rasterization = ash::vk::PipelineRasterizationStateCreateInfo {
flags: ash::vk::PipelineRasterizationStateCreateFlags::empty(),
depth_clamp_enable: if self.raster.depth_clamp {
ash::vk::TRUE
} else {
vk::FALSE
ash::vk::FALSE
},
rasterizerDiscardEnable: if self.raster.rasterizer_discard {
vk::TRUE
rasterizer_discard_enable: if self.raster.rasterizer_discard {
ash::vk::TRUE
} else {
vk::FALSE
ash::vk::FALSE
},
polygonMode: self.raster.polygon_mode as u32,
cullMode: self.raster.cull_mode as u32,
frontFace: self.raster.front_face as u32,
depthBiasEnable: db_enable,
depthBiasConstantFactor: db_const,
depthBiasClamp: db_clamp,
depthBiasSlopeFactor: db_slope,
lineWidth: self.raster.line_width.unwrap_or(1.0),
polygon_mode: self.raster.polygon_mode.into(),
cull_mode: self.raster.cull_mode.into(),
front_face: self.raster.front_face.into(),
depth_bias_enable: db_enable,
depth_bias_constant_factor: db_const,
depth_bias_clamp: db_clamp,
depth_bias_slope_factor: db_slope,
line_width: self.raster.line_width.unwrap_or(1.0),
..Default::default()
};
self.multisample.rasterizationSamples =
self.subpass.as_ref().unwrap().num_samples().unwrap_or(1);
if self.multisample.sampleShadingEnable != vk::FALSE {
self.multisample.rasterization_samples = self
.subpass
.as_ref()
.unwrap()
.num_samples()
.unwrap_or(SampleCount::Sample1)
.into();
if self.multisample.sample_shading_enable != ash::vk::FALSE {
debug_assert!(
self.multisample.minSampleShading >= 0.0
&& self.multisample.minSampleShading <= 1.0
self.multisample.min_sample_shading >= 0.0
&& self.multisample.min_sample_shading <= 1.0
);
if !device.enabled_features().sample_rate_shading {
return Err(GraphicsPipelineCreationError::SampleRateShadingFeatureNotEnabled);
}
}
if self.multisample.alphaToOneEnable != vk::FALSE {
if self.multisample.alpha_to_one_enable != ash::vk::FALSE {
if !device.enabled_features().alpha_to_one {
return Err(GraphicsPipelineCreationError::AlphaToOneFeatureNotEnabled);
}
@ -771,22 +761,22 @@ where
let depth_stencil = {
let db = match self.depth_stencil.depth_bounds_test {
DepthBounds::Disabled => (vk::FALSE, 0.0, 0.0),
DepthBounds::Disabled => (ash::vk::FALSE, 0.0, 0.0),
DepthBounds::Fixed(ref range) => {
if !device.enabled_features().depth_bounds {
return Err(GraphicsPipelineCreationError::DepthBoundsFeatureNotEnabled);
}
(vk::TRUE, range.start, range.end)
(ash::vk::TRUE, range.start, range.end)
}
DepthBounds::Dynamic => {
if !device.enabled_features().depth_bounds {
return Err(GraphicsPipelineCreationError::DepthBoundsFeatureNotEnabled);
}
dynamic_states.push(vk::DYNAMIC_STATE_DEPTH_BOUNDS);
dynamic_states.push(ash::vk::DynamicState::DEPTH_BOUNDS);
(vk::TRUE, 0.0, 1.0)
(ash::vk::TRUE, 0.0, 1.0)
}
};
@ -796,7 +786,7 @@ where
) {
(Some(_), Some(_)) => (),
(None, None) => {
dynamic_states.push(vk::DYNAMIC_STATE_STENCIL_COMPARE_MASK);
dynamic_states.push(ash::vk::DynamicState::STENCIL_COMPARE_MASK);
}
_ => return Err(GraphicsPipelineCreationError::WrongStencilState),
};
@ -807,7 +797,7 @@ where
) {
(Some(_), Some(_)) => (),
(None, None) => {
dynamic_states.push(vk::DYNAMIC_STATE_STENCIL_WRITE_MASK);
dynamic_states.push(ash::vk::DynamicState::STENCIL_WRITE_MASK);
}
_ => return Err(GraphicsPipelineCreationError::WrongStencilState),
};
@ -818,7 +808,7 @@ where
) {
(Some(_), Some(_)) => (),
(None, None) => {
dynamic_states.push(vk::DYNAMIC_STATE_STENCIL_REFERENCE);
dynamic_states.push(ash::vk::DynamicState::STENCIL_REFERENCE);
}
_ => return Err(GraphicsPipelineCreationError::WrongStencilState),
};
@ -844,71 +834,70 @@ where
// FIXME: stencil writability
vk::PipelineDepthStencilStateCreateInfo {
sType: vk::STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO,
pNext: ptr::null(),
flags: 0, // reserved
depthTestEnable: if !self.depth_stencil.depth_write
ash::vk::PipelineDepthStencilStateCreateInfo {
flags: ash::vk::PipelineDepthStencilStateCreateFlags::empty(),
depth_test_enable: if !self.depth_stencil.depth_write
&& self.depth_stencil.depth_compare == Compare::Always
{
vk::FALSE
ash::vk::FALSE
} else {
vk::TRUE
ash::vk::TRUE
},
depthWriteEnable: if self.depth_stencil.depth_write {
vk::TRUE
depth_write_enable: if self.depth_stencil.depth_write {
ash::vk::TRUE
} else {
vk::FALSE
ash::vk::FALSE
},
depthCompareOp: self.depth_stencil.depth_compare as u32,
depthBoundsTestEnable: db.0,
stencilTestEnable: if self.depth_stencil.stencil_front.always_keep()
depth_compare_op: self.depth_stencil.depth_compare.into(),
depth_bounds_test_enable: db.0,
stencil_test_enable: if self.depth_stencil.stencil_front.always_keep()
&& self.depth_stencil.stencil_back.always_keep()
{
vk::FALSE
ash::vk::FALSE
} else {
vk::TRUE
ash::vk::TRUE
},
front: vk::StencilOpState {
failOp: self.depth_stencil.stencil_front.fail_op as u32,
passOp: self.depth_stencil.stencil_front.pass_op as u32,
depthFailOp: self.depth_stencil.stencil_front.depth_fail_op as u32,
compareOp: self.depth_stencil.stencil_front.compare as u32,
compareMask: self
front: ash::vk::StencilOpState {
fail_op: self.depth_stencil.stencil_front.fail_op.into(),
pass_op: self.depth_stencil.stencil_front.pass_op.into(),
depth_fail_op: self.depth_stencil.stencil_front.depth_fail_op.into(),
compare_op: self.depth_stencil.stencil_front.compare.into(),
compare_mask: self
.depth_stencil
.stencil_front
.compare_mask
.unwrap_or(u32::MAX),
writeMask: self
write_mask: self
.depth_stencil
.stencil_front
.write_mask
.unwrap_or(u32::MAX),
reference: self.depth_stencil.stencil_front.reference.unwrap_or(0),
},
back: vk::StencilOpState {
failOp: self.depth_stencil.stencil_back.fail_op as u32,
passOp: self.depth_stencil.stencil_back.pass_op as u32,
depthFailOp: self.depth_stencil.stencil_back.depth_fail_op as u32,
compareOp: self.depth_stencil.stencil_back.compare as u32,
compareMask: self
back: ash::vk::StencilOpState {
fail_op: self.depth_stencil.stencil_back.fail_op.into(),
pass_op: self.depth_stencil.stencil_back.pass_op.into(),
depth_fail_op: self.depth_stencil.stencil_back.depth_fail_op.into(),
compare_op: self.depth_stencil.stencil_back.compare.into(),
compare_mask: self
.depth_stencil
.stencil_back
.compare_mask
.unwrap_or(u32::MAX),
writeMask: self
write_mask: self
.depth_stencil
.stencil_back
.write_mask
.unwrap_or(u32::MAX),
reference: self.depth_stencil.stencil_back.reference.unwrap_or(0),
},
minDepthBounds: db.1,
maxDepthBounds: db.2,
min_depth_bounds: db.1,
max_depth_bounds: db.2,
..Default::default()
}
};
let blend_atch: SmallVec<[vk::PipelineColorBlendAttachmentState; 8]> = {
let blend_atch: SmallVec<[ash::vk::PipelineColorBlendAttachmentState; 8]> = {
let num_atch = self.subpass.as_ref().unwrap().num_color_attachments();
match self.blend.attachments {
@ -933,65 +922,61 @@ where
}
};
let blend = vk::PipelineColorBlendStateCreateInfo {
sType: vk::STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,
pNext: ptr::null(),
flags: 0, // reserved
logicOpEnable: if self.blend.logic_op.is_some() {
let blend = ash::vk::PipelineColorBlendStateCreateInfo {
flags: ash::vk::PipelineColorBlendStateCreateFlags::empty(),
logic_op_enable: if self.blend.logic_op.is_some() {
if !device.enabled_features().logic_op {
return Err(GraphicsPipelineCreationError::LogicOpFeatureNotEnabled);
}
vk::TRUE
ash::vk::TRUE
} else {
vk::FALSE
ash::vk::FALSE
},
logicOp: self.blend.logic_op.unwrap_or(Default::default()) as u32,
attachmentCount: blend_atch.len() as u32,
pAttachments: blend_atch.as_ptr(),
blendConstants: if let Some(c) = self.blend.blend_constants {
logic_op: self.blend.logic_op.unwrap_or(Default::default()).into(),
attachment_count: blend_atch.len() as u32,
p_attachments: blend_atch.as_ptr(),
blend_constants: if let Some(c) = self.blend.blend_constants {
c
} else {
dynamic_states.push(vk::DYNAMIC_STATE_BLEND_CONSTANTS);
dynamic_states.push(ash::vk::DynamicState::BLEND_CONSTANTS);
[0.0, 0.0, 0.0, 0.0]
},
..Default::default()
};
let dynamic_states = if !dynamic_states.is_empty() {
Some(vk::PipelineDynamicStateCreateInfo {
sType: vk::STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO,
pNext: ptr::null(),
flags: 0, // reserved
dynamicStateCount: dynamic_states.len() as u32,
pDynamicStates: dynamic_states.as_ptr(),
Some(ash::vk::PipelineDynamicStateCreateInfo {
flags: ash::vk::PipelineDynamicStateCreateFlags::empty(),
dynamic_state_count: dynamic_states.len() as u32,
p_dynamic_states: dynamic_states.as_ptr(),
..Default::default()
})
} else {
None
};
let pipeline = unsafe {
let infos = vk::GraphicsPipelineCreateInfo {
sType: vk::STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
pNext: ptr::null(),
flags: 0, // TODO: some flags are available but none are critical
stageCount: stages.len() as u32,
pStages: stages.as_ptr(),
pVertexInputState: &vertex_input_state,
pInputAssemblyState: &self.input_assembly,
pTessellationState: tessellation
let infos = ash::vk::GraphicsPipelineCreateInfo {
flags: ash::vk::PipelineCreateFlags::empty(), // TODO: some flags are available but none are critical
stage_count: stages.len() as u32,
p_stages: stages.as_ptr(),
p_vertex_input_state: &vertex_input_state,
p_input_assembly_state: &self.input_assembly,
p_tessellation_state: tessellation
.as_ref()
.map(|t| t as *const _)
.unwrap_or(ptr::null()),
pViewportState: &viewport_info,
pRasterizationState: &rasterization,
pMultisampleState: &self.multisample,
pDepthStencilState: &depth_stencil,
pColorBlendState: &blend,
pDynamicState: dynamic_states
p_viewport_state: &viewport_info,
p_rasterization_state: &rasterization,
p_multisample_state: &self.multisample,
p_depth_stencil_state: &depth_stencil,
p_color_blend_state: &blend,
p_dynamic_state: dynamic_states
.as_ref()
.map(|s| s as *const _)
.unwrap_or(ptr::null()),
layout: pipeline_layout.internal_object(),
renderPass: self
render_pass: self
.subpass
.as_ref()
.unwrap()
@ -999,17 +984,18 @@ where
.inner()
.internal_object(),
subpass: self.subpass.as_ref().unwrap().index(),
basePipelineHandle: 0, // TODO:
basePipelineIndex: -1, // TODO:
base_pipeline_handle: ash::vk::Pipeline::null(), // TODO:
base_pipeline_index: -1, // TODO:
..Default::default()
};
let cache_handle = match self.cache {
Some(cache) => cache.internal_object(),
None => vk::NULL_HANDLE,
None => ash::vk::PipelineCache::null(),
};
let mut output = MaybeUninit::uninit();
check_errors(vk.CreateGraphicsPipelines(
check_errors(fns.v1_0.create_graphics_pipelines(
device.internal_object(),
cache_handle,
1,
@ -1023,7 +1009,7 @@ where
// Some drivers return `VK_SUCCESS` but provide a null handle if they
// fail to create the pipeline (due to invalid shaders, etc)
// This check ensures that we don't create an invalid `GraphicsPipeline` instance
if pipeline == vk::NULL_HANDLE {
if pipeline == ash::vk::Pipeline::null() {
panic!("vkCreateGraphicsPipelines provided a NULL handle");
}
@ -1139,7 +1125,11 @@ impl<'vs, 'tcs, 'tes, 'gs, 'fs, Vdef, Vss, Tcss, Tess, Gss, Fss>
/// Sets whether primitive restart if enabled.
#[inline]
pub fn primitive_restart(mut self, enabled: bool) -> Self {
self.input_assembly.primitiveRestartEnable = if enabled { vk::TRUE } else { vk::FALSE };
self.input_assembly.primitive_restart_enable = if enabled {
ash::vk::TRUE
} else {
ash::vk::FALSE
};
self
}
@ -1517,7 +1507,7 @@ impl<'vs, 'tcs, 'tes, 'gs, 'fs, Vdef, Vss, Tcss, Tess, Gss, Fss>
/// Sample shading is disabled by default.
#[inline]
pub fn sample_shading_disabled(mut self) -> Self {
self.multisample.sampleShadingEnable = vk::FALSE;
self.multisample.sample_shading_enable = ash::vk::FALSE;
self
}
@ -1540,20 +1530,20 @@ impl<'vs, 'tcs, 'tes, 'gs, 'fs, Vdef, Vss, Tcss, Tess, Gss, Fss>
#[inline]
pub fn sample_shading_enabled(mut self, min_fract: f32) -> Self {
assert!(min_fract >= 0.0 && min_fract <= 1.0);
self.multisample.sampleShadingEnable = vk::TRUE;
self.multisample.minSampleShading = min_fract;
self.multisample.sample_shading_enable = ash::vk::TRUE;
self.multisample.min_sample_shading = min_fract;
self
}
// TODO: doc
pub fn alpha_to_coverage_disabled(mut self) -> Self {
self.multisample.alphaToCoverageEnable = vk::FALSE;
self.multisample.alpha_to_coverage_enable = ash::vk::FALSE;
self
}
// TODO: doc
pub fn alpha_to_coverage_enabled(mut self) -> Self {
self.multisample.alphaToCoverageEnable = vk::TRUE;
self.multisample.alpha_to_coverage_enable = ash::vk::TRUE;
self
}
@ -1562,7 +1552,7 @@ impl<'vs, 'tcs, 'tes, 'gs, 'fs, Vdef, Vss, Tcss, Tess, Gss, Fss>
/// Alpha-to-one is disabled by default.
#[inline]
pub fn alpha_to_one_disabled(mut self) -> Self {
self.multisample.alphaToOneEnable = vk::FALSE;
self.multisample.alpha_to_one_enable = ash::vk::FALSE;
self
}
@ -1574,7 +1564,7 @@ impl<'vs, 'tcs, 'tes, 'gs, 'fs, Vdef, Vss, Tcss, Tess, Gss, Fss>
/// Alpha-to-one is disabled by default.
#[inline]
pub fn alpha_to_one_enabled(mut self) -> Self {
self.multisample.alphaToOneEnable = vk::TRUE;
self.multisample.alpha_to_one_enable = ash::vk::TRUE;
self
}
@ -1758,17 +1748,7 @@ where
geometry_shader: self.geometry_shader.clone(),
viewport: self.viewport.clone(),
raster: self.raster.clone(),
multisample: vk::PipelineMultisampleStateCreateInfo {
sType: self.multisample.sType,
pNext: self.multisample.pNext,
flags: self.multisample.flags,
rasterizationSamples: self.multisample.rasterizationSamples,
sampleShadingEnable: self.multisample.sampleShadingEnable,
minSampleShading: self.multisample.minSampleShading,
pSampleMask: self.multisample.pSampleMask,
alphaToCoverageEnable: self.multisample.alphaToCoverageEnable,
alphaToOneEnable: self.multisample.alphaToOneEnable,
},
multisample: self.multisample,
fragment_shader: self.fragment_shader.clone(),
depth_stencil: self.depth_stencil.clone(),
blend: self.blend.clone(),

View File

@ -20,7 +20,6 @@ use crate::pipeline::vertex::VertexDefinition;
use crate::pipeline::vertex::VertexSource;
use crate::render_pass::RenderPass;
use crate::render_pass::Subpass;
use crate::vk;
use crate::SafeDeref;
use crate::VulkanObject;
use std::fmt;
@ -63,7 +62,7 @@ pub struct GraphicsPipeline<VertexDefinition> {
#[derive(PartialEq, Eq, Hash)]
struct Inner {
pipeline: vk::Pipeline,
pipeline: ash::vk::Pipeline,
device: Arc<Device>,
}
@ -176,12 +175,10 @@ impl<Mv> fmt::Debug for GraphicsPipeline<Mv> {
}
unsafe impl<Mv> VulkanObject for GraphicsPipeline<Mv> {
type Object = vk::Pipeline;
const TYPE: vk::ObjectType = vk::OBJECT_TYPE_PIPELINE;
type Object = ash::vk::Pipeline;
#[inline]
fn internal_object(&self) -> vk::Pipeline {
fn internal_object(&self) -> ash::vk::Pipeline {
self.inner.pipeline
}
}
@ -190,8 +187,9 @@ impl Drop for Inner {
#[inline]
fn drop(&mut self) {
unsafe {
let vk = self.device.pointers();
vk.DestroyPipeline(self.device.internal_object(), self.pipeline, ptr::null());
let fns = self.device.fns();
fns.v1_0
.destroy_pipeline(self.device.internal_object(), self.pipeline, ptr::null());
}
}
}
@ -402,15 +400,13 @@ impl Hash for dyn GraphicsPipelineAbstract + Send + Sync {
/// Opaque object that represents the inside of the graphics pipeline.
#[derive(Debug, Copy, Clone)]
pub struct GraphicsPipelineSys<'a>(vk::Pipeline, PhantomData<&'a ()>);
pub struct GraphicsPipelineSys<'a>(ash::vk::Pipeline, PhantomData<&'a ()>);
unsafe impl<'a> VulkanObject for GraphicsPipelineSys<'a> {
type Object = vk::Pipeline;
const TYPE: vk::ObjectType = vk::OBJECT_TYPE_PIPELINE;
type Object = ash::vk::Pipeline;
#[inline]
fn internal_object(&self) -> vk::Pipeline {
fn internal_object(&self) -> ash::vk::Pipeline {
self.0
}
}

View File

@ -12,7 +12,6 @@
//! The input assembly is the stage where lists of vertices are turned into primitives.
//!
use crate::vk;
/// How the input assembly stage should behave.
#[derive(Copy, Clone, Debug)]
@ -59,29 +58,29 @@ pub enum PrimitiveTopology {
PatchList { vertices_per_patch: u32 },
}
impl From<PrimitiveTopology> for vk::PrimitiveTopology {
impl From<PrimitiveTopology> for ash::vk::PrimitiveTopology {
#[inline]
fn from(val: PrimitiveTopology) -> vk::PrimitiveTopology {
fn from(val: PrimitiveTopology) -> ash::vk::PrimitiveTopology {
match val {
PrimitiveTopology::PointList => vk::PRIMITIVE_TOPOLOGY_POINT_LIST,
PrimitiveTopology::LineList => vk::PRIMITIVE_TOPOLOGY_LINE_LIST,
PrimitiveTopology::LineStrip => vk::PRIMITIVE_TOPOLOGY_LINE_STRIP,
PrimitiveTopology::TriangleList => vk::PRIMITIVE_TOPOLOGY_TRIANGLE_LIST,
PrimitiveTopology::TriangleStrip => vk::PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP,
PrimitiveTopology::TriangleFan => vk::PRIMITIVE_TOPOLOGY_TRIANGLE_FAN,
PrimitiveTopology::PointList => ash::vk::PrimitiveTopology::POINT_LIST,
PrimitiveTopology::LineList => ash::vk::PrimitiveTopology::LINE_LIST,
PrimitiveTopology::LineStrip => ash::vk::PrimitiveTopology::LINE_STRIP,
PrimitiveTopology::TriangleList => ash::vk::PrimitiveTopology::TRIANGLE_LIST,
PrimitiveTopology::TriangleStrip => ash::vk::PrimitiveTopology::TRIANGLE_STRIP,
PrimitiveTopology::TriangleFan => ash::vk::PrimitiveTopology::TRIANGLE_FAN,
PrimitiveTopology::LineListWithAdjacency => {
vk::PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY
ash::vk::PrimitiveTopology::LINE_LIST_WITH_ADJACENCY
}
PrimitiveTopology::LineStripWithAdjacency => {
vk::PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY
ash::vk::PrimitiveTopology::LINE_STRIP_WITH_ADJACENCY
}
PrimitiveTopology::TriangleListWithAdjacency => {
vk::PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY
ash::vk::PrimitiveTopology::TRIANGLE_LIST_WITH_ADJACENCY
}
PrimitiveTopology::TriangleStripWithAdjacency => {
vk::PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY
ash::vk::PrimitiveTopology::TRIANGLE_STRIP_WITH_ADJACENCY
}
PrimitiveTopology::PatchList { .. } => vk::PRIMITIVE_TOPOLOGY_PATCH_LIST,
PrimitiveTopology::PatchList { .. } => ash::vk::PrimitiveTopology::PATCH_LIST,
}
}
}
@ -124,8 +123,15 @@ unsafe impl Index for u32 {
/// An enumeration of all valid index types.
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
#[allow(missing_docs)]
#[repr(u32)]
#[repr(i32)]
pub enum IndexType {
U16 = vk::INDEX_TYPE_UINT16,
U32 = vk::INDEX_TYPE_UINT32,
U16 = ash::vk::IndexType::UINT16.as_raw(),
U32 = ash::vk::IndexType::UINT32.as_raw(),
}
impl From<IndexType> for ash::vk::IndexType {
#[inline]
fn from(val: IndexType) -> Self {
Self::from_raw(val as i32)
}
}

View File

@ -15,7 +15,6 @@ use crate::device::DeviceOwned;
use crate::pipeline::layout::PipelineLayoutDesc;
use crate::pipeline::layout::PipelineLayoutDescPcRange;
use crate::pipeline::layout::PipelineLayoutLimitsError;
use crate::vk;
use crate::Error;
use crate::OomError;
use crate::VulkanObject;
@ -30,7 +29,7 @@ use std::sync::Arc;
/// descriptor sets and push constants available to your shaders.
pub struct PipelineLayout {
device: Arc<Device>,
layout: vk::PipelineLayout,
layout: ash::vk::PipelineLayout,
descriptor_set_layouts: SmallVec<[Arc<UnsafeDescriptorSetLayout>; 16]>,
desc: PipelineLayoutDesc,
}
@ -42,7 +41,7 @@ impl PipelineLayout {
device: Arc<Device>,
desc: PipelineLayoutDesc,
) -> Result<PipelineLayout, PipelineLayoutCreationError> {
let vk = device.pointers();
let fns = device.fns();
desc.check_against_limits(&device)?;
@ -80,8 +79,8 @@ impl PipelineLayout {
return Err(PipelineLayoutCreationError::InvalidPushConstant);
}
out.push(vk::PushConstantRange {
stageFlags: stages.into(),
out.push(ash::vk::PushConstantRange {
stage_flags: stages.into(),
offset: offset as u32,
size: size as u32,
});
@ -94,14 +93,14 @@ impl PipelineLayout {
// We check that with a debug_assert because it's supposed to be enforced by the
// `PipelineLayoutDesc`.
debug_assert!({
let mut stages = 0;
let mut stages = ash::vk::ShaderStageFlags::empty();
let mut outcome = true;
for pc in push_constants.iter() {
if (stages & pc.stageFlags) != 0 {
if !(stages & pc.stage_flags).is_empty() {
outcome = false;
break;
}
stages &= pc.stageFlags;
stages &= pc.stage_flags;
}
outcome
});
@ -111,18 +110,17 @@ impl PipelineLayout {
// Build the final object.
let layout = unsafe {
let infos = vk::PipelineLayoutCreateInfo {
sType: vk::STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
pNext: ptr::null(),
flags: 0, // reserved
setLayoutCount: layouts_ids.len() as u32,
pSetLayouts: layouts_ids.as_ptr(),
pushConstantRangeCount: push_constants.len() as u32,
pPushConstantRanges: push_constants.as_ptr(),
let infos = ash::vk::PipelineLayoutCreateInfo {
flags: ash::vk::PipelineLayoutCreateFlags::empty(),
set_layout_count: layouts_ids.len() as u32,
p_set_layouts: layouts_ids.as_ptr(),
push_constant_range_count: push_constants.len() as u32,
p_push_constant_ranges: push_constants.as_ptr(),
..Default::default()
};
let mut output = MaybeUninit::uninit();
check_errors(vk.CreatePipelineLayout(
check_errors(fns.v1_0.create_pipeline_layout(
device.internal_object(),
&infos,
ptr::null(),
@ -164,8 +162,7 @@ unsafe impl DeviceOwned for PipelineLayout {
}
unsafe impl VulkanObject for PipelineLayout {
type Object = vk::PipelineLayout;
const TYPE: vk::ObjectType = vk::OBJECT_TYPE_PIPELINE_LAYOUT;
type Object = ash::vk::PipelineLayout;
fn internal_object(&self) -> Self::Object {
self.layout
@ -186,8 +183,12 @@ impl Drop for PipelineLayout {
#[inline]
fn drop(&mut self) {
unsafe {
let vk = self.device.pointers();
vk.DestroyPipelineLayout(self.device.internal_object(), self.layout, ptr::null());
let fns = self.device.fns();
fns.v1_0.destroy_pipeline_layout(
self.device.internal_object(),
self.layout,
ptr::null(),
);
}
}
}

View File

@ -13,7 +13,6 @@
//! of pixels or samples.
//!
use crate::vk;
/// State of the rasterizer.
#[derive(Clone, Debug)]
@ -95,13 +94,20 @@ pub struct DepthBias {
#[repr(u32)]
pub enum CullMode {
/// No culling.
None = vk::CULL_MODE_NONE,
None = ash::vk::CullModeFlags::NONE.as_raw(),
/// The faces facing the front of the screen (ie. facing the user) will be removed.
Front = vk::CULL_MODE_FRONT_BIT,
Front = ash::vk::CullModeFlags::FRONT.as_raw(),
/// The faces facing the back of the screen will be removed.
Back = vk::CULL_MODE_BACK_BIT,
Back = ash::vk::CullModeFlags::BACK.as_raw(),
/// All faces will be removed.
FrontAndBack = vk::CULL_MODE_FRONT_AND_BACK,
FrontAndBack = ash::vk::CullModeFlags::FRONT_AND_BACK.as_raw(),
}
impl From<CullMode> for ash::vk::CullModeFlags {
#[inline]
fn from(val: CullMode) -> Self {
Self::from_raw(val as u32)
}
}
impl Default for CullMode {
@ -113,15 +119,22 @@ impl Default for CullMode {
/// Specifies which triangle orientation corresponds to the front or the triangle.
#[derive(Copy, Clone, Debug)]
#[repr(u32)]
#[repr(i32)]
pub enum FrontFace {
/// Triangles whose vertices are oriented counter-clockwise on the screen will be considered
/// as facing their front. Otherwise they will be considered as facing their back.
CounterClockwise = vk::FRONT_FACE_COUNTER_CLOCKWISE,
CounterClockwise = ash::vk::FrontFace::COUNTER_CLOCKWISE.as_raw(),
/// Triangles whose vertices are oriented clockwise on the screen will be considered
/// as facing their front. Otherwise they will be considered as facing their back.
Clockwise = vk::FRONT_FACE_CLOCKWISE,
Clockwise = ash::vk::FrontFace::CLOCKWISE.as_raw(),
}
impl From<FrontFace> for ash::vk::FrontFace {
#[inline]
fn from(val: FrontFace) -> Self {
Self::from_raw(val as i32)
}
}
impl Default for FrontFace {
@ -132,11 +145,18 @@ impl Default for FrontFace {
}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
#[repr(u32)]
#[repr(i32)]
pub enum PolygonMode {
Fill = vk::POLYGON_MODE_FILL,
Line = vk::POLYGON_MODE_LINE,
Point = vk::POLYGON_MODE_POINT,
Fill = ash::vk::PolygonMode::FILL.as_raw(),
Line = ash::vk::PolygonMode::LINE.as_raw(),
Point = ash::vk::PolygonMode::POINT.as_raw(),
}
impl From<PolygonMode> for ash::vk::PolygonMode {
#[inline]
fn from(val: PolygonMode) -> Self {
Self::from_raw(val as i32)
}
}
impl Default for PolygonMode {

View File

@ -22,7 +22,6 @@ use crate::device::Device;
use crate::format::Format;
use crate::pipeline::input_assembly::PrimitiveTopology;
use crate::pipeline::layout::PipelineLayoutDesc;
use crate::vk;
use crate::OomError;
use crate::VulkanObject;
use std::borrow::Cow;
@ -42,7 +41,7 @@ use std::sync::Arc;
#[derive(Debug)]
pub struct ShaderModule {
// The module.
module: vk::ShaderModule,
module: ash::vk::ShaderModule,
// Pointer to the device.
device: Arc<Device>,
}
@ -90,17 +89,16 @@ impl ShaderModule {
spirv_len: usize,
) -> Result<Arc<ShaderModule>, OomError> {
let module = {
let infos = vk::ShaderModuleCreateInfo {
sType: vk::STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO,
pNext: ptr::null(),
flags: 0, // reserved
codeSize: spirv_len,
pCode: spirv,
let infos = ash::vk::ShaderModuleCreateInfo {
flags: ash::vk::ShaderModuleCreateFlags::empty(),
code_size: spirv_len,
p_code: spirv,
..Default::default()
};
let vk = device.pointers();
let fns = device.fns();
let mut output = MaybeUninit::uninit();
check_errors(vk.CreateShaderModule(
check_errors(fns.v1_0.create_shader_module(
device.internal_object(),
&infos,
ptr::null(),
@ -175,12 +173,10 @@ impl ShaderModule {
}
unsafe impl VulkanObject for ShaderModule {
type Object = vk::ShaderModule;
const TYPE: vk::ObjectType = vk::OBJECT_TYPE_SHADER_MODULE;
type Object = ash::vk::ShaderModule;
#[inline]
fn internal_object(&self) -> vk::ShaderModule {
fn internal_object(&self) -> ash::vk::ShaderModule {
self.module
}
}
@ -189,8 +185,9 @@ impl Drop for ShaderModule {
#[inline]
fn drop(&mut self) {
unsafe {
let vk = self.device.pointers();
vk.DestroyShaderModule(self.device.internal_object(), self.module, ptr::null());
let fns = self.device.fns();
fns.v1_0
.destroy_shader_module(self.device.internal_object(), self.module, ptr::null());
}
}
}

View File

@ -11,7 +11,6 @@ use crate::buffer::BufferAccess;
use crate::format::Format;
use crate::pipeline::shader::ShaderInterface;
use crate::pipeline::vertex::VertexMemberTy;
use crate::vk;
use crate::SafeDeref;
use std::error;
use std::fmt;
@ -53,12 +52,19 @@ where
/// How the vertex source should be unrolled.
#[derive(Copy, Clone, Debug)]
#[repr(u32)]
#[repr(i32)]
pub enum InputRate {
/// Each element of the source corresponds to a vertex.
Vertex = vk::VERTEX_INPUT_RATE_VERTEX,
Vertex = ash::vk::VertexInputRate::VERTEX.as_raw(),
/// Each element of the source corresponds to an instance.
Instance = vk::VERTEX_INPUT_RATE_INSTANCE,
Instance = ash::vk::VertexInputRate::INSTANCE.as_raw(),
}
impl From<InputRate> for ash::vk::VertexInputRate {
#[inline]
fn from(val: InputRate) -> Self {
Self::from_raw(val as i32)
}
}
/// Information about a single attribute within a vertex.

View File

@ -48,7 +48,6 @@
//! In all cases the number of viewports and scissor boxes must be the same.
//!
use crate::vk;
use std::ops::Range;
/// List of viewports and scissors that are used when creating a graphics pipeline object.
@ -141,16 +140,16 @@ pub struct Viewport {
pub depth_range: Range<f32>,
}
impl From<Viewport> for vk::Viewport {
impl From<Viewport> for ash::vk::Viewport {
#[inline]
fn from(val: Viewport) -> Self {
vk::Viewport {
ash::vk::Viewport {
x: val.origin[0],
y: val.origin[1],
width: val.dimensions[0],
height: val.dimensions[1],
minDepth: val.depth_range.start,
maxDepth: val.depth_range.end,
min_depth: val.depth_range.start,
max_depth: val.depth_range.end,
}
}
}
@ -186,15 +185,15 @@ impl Default for Scissor {
}
}
impl From<Scissor> for vk::Rect2D {
impl From<Scissor> for ash::vk::Rect2D {
#[inline]
fn from(val: Scissor) -> Self {
vk::Rect2D {
offset: vk::Offset2D {
ash::vk::Rect2D {
offset: ash::vk::Offset2D {
x: val.origin[0],
y: val.origin[1],
},
extent: vk::Extent2D {
extent: ash::vk::Extent2D {
width: val.dimensions[0],
height: val.dimensions[1],
},

View File

@ -16,7 +16,6 @@
use crate::check_errors;
use crate::device::Device;
use crate::device::DeviceOwned;
use crate::vk;
use crate::Error;
use crate::OomError;
use crate::Success;
@ -32,7 +31,7 @@ use std::sync::Arc;
/// A collection of one or more queries of a particular type.
#[derive(Debug)]
pub struct QueryPool {
pool: vk::QueryPool,
pool: ash::vk::QueryPool,
device: Arc<Device>,
num_slots: u32,
ty: QueryType,
@ -53,22 +52,23 @@ impl QueryPool {
flags.into()
}
QueryType::Occlusion | QueryType::Timestamp => 0,
QueryType::Occlusion | QueryType::Timestamp => {
ash::vk::QueryPipelineStatisticFlags::empty()
}
};
let pool = unsafe {
let infos = vk::QueryPoolCreateInfo {
sType: vk::STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO,
pNext: ptr::null(),
flags: 0, // reserved
queryType: ty.into(),
queryCount: num_slots,
pipelineStatistics: statistics,
let infos = ash::vk::QueryPoolCreateInfo {
flags: ash::vk::QueryPoolCreateFlags::empty(),
query_type: ty.into(),
query_count: num_slots,
pipeline_statistics: statistics,
..Default::default()
};
let mut output = MaybeUninit::uninit();
let vk = device.pointers();
check_errors(vk.CreateQueryPool(
let fns = device.fns();
check_errors(fns.v1_0.create_query_pool(
device.internal_object(),
&infos,
ptr::null(),
@ -125,12 +125,10 @@ impl QueryPool {
}
unsafe impl VulkanObject for QueryPool {
type Object = vk::QueryPool;
const TYPE: vk::ObjectType = vk::OBJECT_TYPE_QUERY_POOL;
type Object = ash::vk::QueryPool;
#[inline]
fn internal_object(&self) -> vk::QueryPool {
fn internal_object(&self) -> ash::vk::QueryPool {
self.pool
}
}
@ -146,8 +144,9 @@ impl Drop for QueryPool {
#[inline]
fn drop(&mut self) {
unsafe {
let vk = self.device.pointers();
vk.DestroyQueryPool(self.device.internal_object(), self.pool, ptr::null());
let fns = self.device.fns();
fns.v1_0
.destroy_query_pool(self.device.internal_object(), self.pool, ptr::null());
}
}
}
@ -277,16 +276,16 @@ impl<'a> QueriesRange<'a> {
)?;
let result = unsafe {
let vk = self.pool.device.pointers();
check_errors(vk.GetQueryPoolResults(
let fns = self.pool.device.fns();
check_errors(fns.v1_0.get_query_pool_results(
self.pool.device.internal_object(),
self.pool.internal_object(),
self.range.start,
self.range.end - self.range.start,
std::mem::size_of_val(destination),
destination.as_mut_ptr() as *mut c_void,
stride as vk::DeviceSize,
vk::QueryResultFlags::from(flags) | T::FLAG,
stride as ash::vk::DeviceSize,
ash::vk::QueryResultFlags::from(flags) | T::FLAG,
))?
};
@ -408,15 +407,15 @@ impl error::Error for GetResultsError {
/// This is implemented for `u32` and `u64`. Unless you really know what you're doing, you should
/// not implement this trait for any other type.
pub unsafe trait QueryResultElement {
const FLAG: vk::QueryResultFlags;
const FLAG: ash::vk::QueryResultFlags;
}
unsafe impl QueryResultElement for u32 {
const FLAG: vk::QueryResultFlags = 0;
const FLAG: ash::vk::QueryResultFlags = ash::vk::QueryResultFlags::empty();
}
unsafe impl QueryResultElement for u64 {
const FLAG: vk::QueryResultFlags = vk::QUERY_RESULT_64_BIT;
const FLAG: ash::vk::QueryResultFlags = ash::vk::QueryResultFlags::TYPE_64;
}
/// The type of query that a query pool should perform.
@ -448,13 +447,13 @@ impl QueryType {
}
}
impl From<QueryType> for vk::QueryType {
impl From<QueryType> for ash::vk::QueryType {
#[inline]
fn from(value: QueryType) -> Self {
match value {
QueryType::Occlusion => vk::QUERY_TYPE_OCCLUSION,
QueryType::PipelineStatistics(_) => vk::QUERY_TYPE_PIPELINE_STATISTICS,
QueryType::Timestamp => vk::QUERY_TYPE_TIMESTAMP,
QueryType::Occlusion => ash::vk::QueryType::OCCLUSION,
QueryType::PipelineStatistics(_) => ash::vk::QueryType::PIPELINE_STATISTICS,
QueryType::Timestamp => ash::vk::QueryType::TIMESTAMP,
}
}
}
@ -468,12 +467,12 @@ pub struct QueryControlFlags {
pub precise: bool,
}
impl From<QueryControlFlags> for vk::QueryControlFlags {
impl From<QueryControlFlags> for ash::vk::QueryControlFlags {
#[inline]
fn from(value: QueryControlFlags) -> Self {
let mut result = 0;
let mut result = ash::vk::QueryControlFlags::empty();
if value.precise {
result |= vk::QUERY_CONTROL_PRECISE_BIT;
result |= ash::vk::QueryControlFlags::PRECISE;
}
result
}
@ -592,41 +591,42 @@ impl QueryPipelineStatisticFlags {
}
}
impl From<QueryPipelineStatisticFlags> for vk::QueryPipelineStatisticFlags {
fn from(value: QueryPipelineStatisticFlags) -> vk::QueryPipelineStatisticFlags {
let mut result = 0;
impl From<QueryPipelineStatisticFlags> for ash::vk::QueryPipelineStatisticFlags {
fn from(value: QueryPipelineStatisticFlags) -> ash::vk::QueryPipelineStatisticFlags {
let mut result = ash::vk::QueryPipelineStatisticFlags::empty();
if value.input_assembly_vertices {
result |= vk::QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT;
result |= ash::vk::QueryPipelineStatisticFlags::INPUT_ASSEMBLY_VERTICES;
}
if value.input_assembly_primitives {
result |= vk::QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT;
result |= ash::vk::QueryPipelineStatisticFlags::INPUT_ASSEMBLY_PRIMITIVES;
}
if value.vertex_shader_invocations {
result |= vk::QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT;
result |= ash::vk::QueryPipelineStatisticFlags::VERTEX_SHADER_INVOCATIONS;
}
if value.geometry_shader_invocations {
result |= vk::QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT;
result |= ash::vk::QueryPipelineStatisticFlags::GEOMETRY_SHADER_INVOCATIONS;
}
if value.geometry_shader_primitives {
result |= vk::QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT;
result |= ash::vk::QueryPipelineStatisticFlags::GEOMETRY_SHADER_PRIMITIVES;
}
if value.clipping_invocations {
result |= vk::QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT;
result |= ash::vk::QueryPipelineStatisticFlags::CLIPPING_INVOCATIONS;
}
if value.clipping_primitives {
result |= vk::QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT;
result |= ash::vk::QueryPipelineStatisticFlags::CLIPPING_PRIMITIVES;
}
if value.fragment_shader_invocations {
result |= vk::QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT;
result |= ash::vk::QueryPipelineStatisticFlags::FRAGMENT_SHADER_INVOCATIONS;
}
if value.tessellation_control_shader_patches {
result |= vk::QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT;
result |= ash::vk::QueryPipelineStatisticFlags::TESSELLATION_CONTROL_SHADER_PATCHES;
}
if value.tessellation_evaluation_shader_invocations {
result |= vk::QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT;
result |=
ash::vk::QueryPipelineStatisticFlags::TESSELLATION_EVALUATION_SHADER_INVOCATIONS;
}
if value.compute_shader_invocations {
result |= vk::QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT;
result |= ash::vk::QueryPipelineStatisticFlags::COMPUTE_SHADER_INVOCATIONS;
}
result
}
@ -650,18 +650,18 @@ pub struct QueryResultFlags {
pub partial: bool,
}
impl From<QueryResultFlags> for vk::QueryResultFlags {
impl From<QueryResultFlags> for ash::vk::QueryResultFlags {
#[inline]
fn from(value: QueryResultFlags) -> Self {
let mut result = 0;
let mut result = ash::vk::QueryResultFlags::empty();
if value.wait {
result |= vk::QUERY_RESULT_WAIT_BIT;
result |= ash::vk::QueryResultFlags::WAIT;
}
if value.with_availability {
result |= vk::QUERY_RESULT_WITH_AVAILABILITY_BIT;
result |= ash::vk::QueryResultFlags::WITH_AVAILABILITY;
}
if value.partial {
result |= vk::QUERY_RESULT_PARTIAL_BIT;
result |= ash::vk::QueryResultFlags::PARTIAL;
}
result
}

View File

@ -10,9 +10,9 @@
//! This module contains the `ensure_image_view_compatible` function, which verifies whether
//! an image view can be used as a render pass attachment.
use crate::format::Format;
use crate::image::view::ImageViewAbstract;
use crate::render_pass::RenderPassDesc;
use crate::{format::Format, image::SampleCount};
use std::error;
use std::fmt;
@ -119,9 +119,9 @@ pub enum IncompatibleRenderPassAttachmentError {
/// the image.
SamplesMismatch {
/// Number of samples expected by the render pass.
expected: u32,
expected: SampleCount,
/// Number of samples of the image.
obtained: u32,
obtained: SampleCount,
},
/// The image view has a component swizzle that is different from identity.

View File

@ -10,10 +10,10 @@
use crate::format::ClearValue;
use crate::format::Format;
use crate::image::ImageLayout;
use crate::image::SampleCount;
use crate::pipeline::shader::ShaderInterface;
use crate::sync::AccessFlags;
use crate::sync::PipelineStages;
use crate::vk;
/// The description of a render pass.
#[derive(Clone, Debug)]
@ -151,7 +151,7 @@ pub struct AttachmentDesc {
/// Format of the image that is going to be bound.
pub format: Format,
/// Number of samples of the image that is going to be bound.
pub samples: u32,
pub samples: SampleCount,
/// What the implementation should do with that attachment at the start of the render pass.
pub load: LoadOp,
@ -271,13 +271,13 @@ pub struct SubpassDependencyDesc {
/// Describes what the implementation should do with an attachment after all the subpasses have
/// completed.
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
#[repr(u32)]
#[repr(i32)]
pub enum StoreOp {
/// The attachment will be stored. This is what you usually want.
///
/// While this is the most intuitive option, it is also slower than `DontCare` because it can
/// take time to write the data back to memory.
Store = vk::ATTACHMENT_STORE_OP_STORE,
Store = ash::vk::AttachmentStoreOp::STORE.as_raw(),
/// What happens is implementation-specific.
///
@ -287,19 +287,26 @@ pub enum StoreOp {
/// This doesn't mean that the data won't be copied, as an implementation is also free to not
/// use a cache and write the output directly in memory. In other words, the content of the
/// image will be undefined.
DontCare = vk::ATTACHMENT_STORE_OP_DONT_CARE,
DontCare = ash::vk::AttachmentStoreOp::DONT_CARE.as_raw(),
}
impl From<StoreOp> for ash::vk::AttachmentStoreOp {
#[inline]
fn from(val: StoreOp) -> Self {
Self::from_raw(val as i32)
}
}
/// Describes what the implementation should do with an attachment at the start of the subpass.
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
#[repr(u32)]
#[repr(i32)]
pub enum LoadOp {
/// The content of the attachment will be loaded from memory. This is what you want if you want
/// to draw over something existing.
///
/// While this is the most intuitive option, it is also the slowest because it uses a lot of
/// memory bandwidth.
Load = vk::ATTACHMENT_LOAD_OP_LOAD,
Load = ash::vk::AttachmentLoadOp::LOAD.as_raw(),
/// The content of the attachment will be filled by the implementation with a uniform value
/// that you must provide when you start drawing.
@ -308,7 +315,7 @@ pub enum LoadOp {
/// the color, depth and/or stencil buffers.
///
/// See the `draw_inline` and `draw_secondary` methods of `PrimaryComputeBufferBuilder`.
Clear = vk::ATTACHMENT_LOAD_OP_CLEAR,
Clear = ash::vk::AttachmentLoadOp::CLEAR.as_raw(),
/// The attachment will have undefined content.
///
@ -316,5 +323,12 @@ pub enum LoadOp {
/// commands.
/// If you are going to fill the attachment with a uniform value, it is better to use `Clear`
/// instead.
DontCare = vk::ATTACHMENT_LOAD_OP_DONT_CARE,
DontCare = ash::vk::AttachmentLoadOp::DONT_CARE.as_raw(),
}
impl From<LoadOp> for ash::vk::AttachmentLoadOp {
#[inline]
fn from(val: LoadOp) -> Self {
Self::from_raw(val as i32)
}
}

View File

@ -15,7 +15,6 @@ use crate::render_pass::ensure_image_view_compatible;
use crate::render_pass::AttachmentsList;
use crate::render_pass::IncompatibleRenderPassAttachmentError;
use crate::render_pass::RenderPass;
use crate::vk;
use crate::Error;
use crate::OomError;
use crate::SafeDeref;
@ -77,7 +76,7 @@ use std::sync::Arc;
pub struct Framebuffer<A> {
device: Arc<Device>,
render_pass: Arc<RenderPass>,
framebuffer: vk::Framebuffer,
framebuffer: ash::vk::Framebuffer,
dimensions: [u32; 3],
resources: A,
}
@ -121,7 +120,7 @@ impl Framebuffer<()> {
/// Prototype of a framebuffer.
pub struct FramebufferBuilder<A> {
render_pass: Arc<RenderPass>,
raw_ids: SmallVec<[vk::ImageView; 8]>,
raw_ids: SmallVec<[ash::vk::ImageView; 8]>,
dimensions: FramebufferBuilderDimensions,
attachments: A,
}
@ -293,22 +292,21 @@ where
}
let framebuffer = unsafe {
let vk = device.pointers();
let fns = device.fns();
let infos = vk::FramebufferCreateInfo {
sType: vk::STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,
pNext: ptr::null(),
flags: 0, // reserved
renderPass: self.render_pass.inner().internal_object(),
attachmentCount: self.raw_ids.len() as u32,
pAttachments: self.raw_ids.as_ptr(),
let infos = ash::vk::FramebufferCreateInfo {
flags: ash::vk::FramebufferCreateFlags::empty(),
render_pass: self.render_pass.inner().internal_object(),
attachment_count: self.raw_ids.len() as u32,
p_attachments: self.raw_ids.as_ptr(),
width: dimensions[0],
height: dimensions[1],
layers: dimensions[2],
..Default::default()
};
let mut output = MaybeUninit::uninit();
check_errors(vk.CreateFramebuffer(
check_errors(fns.v1_0.create_framebuffer(
device.internal_object(),
&infos,
ptr::null(),
@ -465,23 +463,25 @@ impl<A> Drop for Framebuffer<A> {
#[inline]
fn drop(&mut self) {
unsafe {
let vk = self.device.pointers();
vk.DestroyFramebuffer(self.device.internal_object(), self.framebuffer, ptr::null());
let fns = self.device.fns();
fns.v1_0.destroy_framebuffer(
self.device.internal_object(),
self.framebuffer,
ptr::null(),
);
}
}
}
/// Opaque object that represents the internals of a framebuffer.
#[derive(Debug, Copy, Clone)]
pub struct FramebufferSys<'a>(vk::Framebuffer, PhantomData<&'a ()>);
pub struct FramebufferSys<'a>(ash::vk::Framebuffer, PhantomData<&'a ()>);
unsafe impl<'a> VulkanObject for FramebufferSys<'a> {
type Object = vk::Framebuffer;
const TYPE: vk::ObjectType = vk::OBJECT_TYPE_FRAMEBUFFER;
type Object = ash::vk::Framebuffer;
#[inline]
fn internal_object(&self) -> vk::Framebuffer {
fn internal_object(&self) -> ash::vk::Framebuffer {
self.0
}
}

View File

@ -72,6 +72,7 @@ macro_rules! ordered_passes_renderpass {
use $crate::image::ImageLayout;
use $crate::sync::AccessFlags;
use $crate::sync::PipelineStages;
use std::convert::TryInto;
let mut attachment_num = 0;
$(
@ -165,7 +166,7 @@ macro_rules! ordered_passes_renderpass {
AttachmentDesc {
format: $format,
samples: $samples,
samples: $samples.try_into().unwrap(),
load: $crate::render_pass::LoadOp::$load,
store: $crate::render_pass::StoreOp::$store,
stencil_load: $crate::render_pass::LoadOp::$load,

View File

@ -12,12 +12,12 @@ use crate::device::Device;
use crate::device::DeviceOwned;
use crate::format::FormatTy;
use crate::image::ImageLayout;
use crate::image::SampleCount;
use crate::pipeline::shader::ShaderInterface;
use crate::render_pass::AttachmentDesc;
use crate::render_pass::LoadOp;
use crate::render_pass::RenderPassDesc;
use crate::render_pass::SubpassDesc;
use crate::vk;
use crate::Error;
use crate::OomError;
use crate::VulkanObject;
@ -84,7 +84,7 @@ use std::sync::Mutex;
/// See the documentation of the macro for more details. TODO: put link here
pub struct RenderPass {
// The internal Vulkan object.
render_pass: vk::RenderPass,
render_pass: ash::vk::RenderPass,
// Device this render pass was created from.
device: Arc<Device>,
@ -109,7 +109,7 @@ impl RenderPass {
device: Arc<Device>,
description: RenderPassDesc,
) -> Result<RenderPass, RenderPassCreationError> {
let vk = device.pointers();
let fns = device.fns();
// If the first use of an attachment in this render pass is as an input attachment, and
// the attachment is not also used as a color or depth/stencil attachment in the same
@ -150,18 +150,16 @@ impl RenderPass {
.attachments()
.iter()
.map(|attachment| {
debug_assert!(attachment.samples.is_power_of_two());
vk::AttachmentDescription {
flags: 0, // FIXME: may alias flag
format: attachment.format as u32,
samples: attachment.samples,
loadOp: attachment.load as u32,
storeOp: attachment.store as u32,
stencilLoadOp: attachment.stencil_load as u32,
stencilStoreOp: attachment.stencil_store as u32,
initialLayout: attachment.initial_layout as u32,
finalLayout: attachment.final_layout as u32,
ash::vk::AttachmentDescription {
flags: ash::vk::AttachmentDescriptionFlags::empty(), // FIXME: may alias flag
format: attachment.format.into(),
samples: attachment.samples.into(),
load_op: attachment.load.into(),
store_op: attachment.store.into(),
stencil_load_op: attachment.stencil_load.into(),
stencil_store_op: attachment.stencil_store.into(),
initial_layout: attachment.initial_layout.into(),
final_layout: attachment.final_layout.into(),
}
})
.collect::<SmallVec<[_; 16]>>();
@ -184,13 +182,13 @@ impl RenderPass {
debug_assert!(pass
.resolve_attachments
.iter()
.all(|a| attachments[a.0].samples == 1));
.all(|a| attachments[a.0].samples == ash::vk::SampleCountFlags::TYPE_1));
debug_assert!(
pass.resolve_attachments.is_empty()
|| pass
.color_attachments
.iter()
.all(|a| attachments[a.0].samples > 1)
.all(|a| attachments[a.0].samples.as_raw() > 1)
);
debug_assert!(
pass.resolve_attachments.is_empty()
@ -228,32 +226,32 @@ impl RenderPass {
let resolve = pass.resolve_attachments.iter().map(|&(offset, img_la)| {
debug_assert!(offset < attachments.len());
vk::AttachmentReference {
ash::vk::AttachmentReference {
attachment: offset as u32,
layout: img_la as u32,
layout: img_la.into(),
}
});
let color = pass.color_attachments.iter().map(|&(offset, img_la)| {
debug_assert!(offset < attachments.len());
vk::AttachmentReference {
ash::vk::AttachmentReference {
attachment: offset as u32,
layout: img_la as u32,
layout: img_la.into(),
}
});
let input = pass.input_attachments.iter().map(|&(offset, img_la)| {
debug_assert!(offset < attachments.len());
vk::AttachmentReference {
ash::vk::AttachmentReference {
attachment: offset as u32,
layout: img_la as u32,
layout: img_la.into(),
}
});
let depthstencil = if let Some((offset, img_la)) = pass.depth_stencil {
Some(vk::AttachmentReference {
Some(ash::vk::AttachmentReference {
attachment: offset as u32,
layout: img_la as u32,
layout: img_la.into(),
})
} else {
None
@ -312,29 +310,29 @@ impl RenderPass {
.offset(preserve_ref_index as isize);
preserve_ref_index += pass.preserve_attachments.len();
out.push(vk::SubpassDescription {
flags: 0, // reserved
pipelineBindPoint: vk::PIPELINE_BIND_POINT_GRAPHICS,
inputAttachmentCount: pass.input_attachments.len() as u32,
pInputAttachments: if pass.input_attachments.is_empty() {
out.push(ash::vk::SubpassDescription {
flags: ash::vk::SubpassDescriptionFlags::empty(),
pipeline_bind_point: ash::vk::PipelineBindPoint::GRAPHICS,
input_attachment_count: pass.input_attachments.len() as u32,
p_input_attachments: if pass.input_attachments.is_empty() {
ptr::null()
} else {
input_attachments
},
colorAttachmentCount: pass.color_attachments.len() as u32,
pColorAttachments: if pass.color_attachments.is_empty() {
color_attachment_count: pass.color_attachments.len() as u32,
p_color_attachments: if pass.color_attachments.is_empty() {
ptr::null()
} else {
color_attachments
},
pResolveAttachments: if pass.resolve_attachments.is_empty() {
p_resolve_attachments: if pass.resolve_attachments.is_empty() {
ptr::null()
} else {
resolve_attachments
},
pDepthStencilAttachment: depth_stencil,
preserveAttachmentCount: pass.preserve_attachments.len() as u32,
pPreserveAttachments: if pass.preserve_attachments.is_empty() {
p_depth_stencil_attachment: depth_stencil,
preserve_attachment_count: pass.preserve_attachments.len() as u32,
p_preserve_attachments: if pass.preserve_attachments.is_empty() {
ptr::null()
} else {
preserve_attachments
@ -355,57 +353,56 @@ impl RenderPass {
.iter()
.map(|dependency| {
debug_assert!(
dependency.source_subpass as u32 == vk::SUBPASS_EXTERNAL
dependency.source_subpass as u32 == ash::vk::SUBPASS_EXTERNAL
|| dependency.source_subpass < passes.len()
);
debug_assert!(
dependency.destination_subpass as u32 == vk::SUBPASS_EXTERNAL
dependency.destination_subpass as u32 == ash::vk::SUBPASS_EXTERNAL
|| dependency.destination_subpass < passes.len()
);
vk::SubpassDependency {
srcSubpass: dependency.source_subpass as u32,
dstSubpass: dependency.destination_subpass as u32,
srcStageMask: dependency.source_stages.into(),
dstStageMask: dependency.destination_stages.into(),
srcAccessMask: dependency.source_access.into(),
dstAccessMask: dependency.destination_access.into(),
dependencyFlags: if dependency.by_region {
vk::DEPENDENCY_BY_REGION_BIT
ash::vk::SubpassDependency {
src_subpass: dependency.source_subpass as u32,
dst_subpass: dependency.destination_subpass as u32,
src_stage_mask: dependency.source_stages.into(),
dst_stage_mask: dependency.destination_stages.into(),
src_access_mask: dependency.source_access.into(),
dst_access_mask: dependency.destination_access.into(),
dependency_flags: if dependency.by_region {
ash::vk::DependencyFlags::BY_REGION
} else {
0
ash::vk::DependencyFlags::empty()
},
}
})
.collect::<SmallVec<[_; 16]>>();
let render_pass = unsafe {
let infos = vk::RenderPassCreateInfo {
sType: vk::STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
pNext: ptr::null(),
flags: 0, // reserved
attachmentCount: attachments.len() as u32,
pAttachments: if attachments.is_empty() {
let infos = ash::vk::RenderPassCreateInfo {
flags: ash::vk::RenderPassCreateFlags::empty(),
attachment_count: attachments.len() as u32,
p_attachments: if attachments.is_empty() {
ptr::null()
} else {
attachments.as_ptr()
},
subpassCount: passes.len() as u32,
pSubpasses: if passes.is_empty() {
subpass_count: passes.len() as u32,
p_subpasses: if passes.is_empty() {
ptr::null()
} else {
passes.as_ptr()
},
dependencyCount: dependencies.len() as u32,
pDependencies: if dependencies.is_empty() {
dependency_count: dependencies.len() as u32,
p_dependencies: if dependencies.is_empty() {
ptr::null()
} else {
dependencies.as_ptr()
},
..Default::default()
};
let mut output = MaybeUninit::uninit();
check_errors(vk.CreateRenderPass(
check_errors(fns.v1_0.create_render_pass(
device.internal_object(),
&infos,
ptr::null(),
@ -448,9 +445,9 @@ impl RenderPass {
}
unsafe {
let vk = self.device.pointers();
let fns = self.device.fns();
let mut out = MaybeUninit::uninit();
vk.GetRenderAreaGranularity(
fns.v1_0.get_render_area_granularity(
self.device.internal_object(),
self.render_pass,
out.as_mut_ptr(),
@ -493,23 +490,25 @@ impl Drop for RenderPass {
#[inline]
fn drop(&mut self) {
unsafe {
let vk = self.device.pointers();
vk.DestroyRenderPass(self.device.internal_object(), self.render_pass, ptr::null());
let fns = self.device.fns();
fns.v1_0.destroy_render_pass(
self.device.internal_object(),
self.render_pass,
ptr::null(),
);
}
}
}
/// Opaque object that represents the render pass' internals.
#[derive(Debug, Copy, Clone)]
pub struct RenderPassSys<'a>(vk::RenderPass, PhantomData<&'a ()>);
pub struct RenderPassSys<'a>(ash::vk::RenderPass, PhantomData<&'a ()>);
unsafe impl<'a> VulkanObject for RenderPassSys<'a> {
type Object = vk::RenderPass;
const TYPE: vk::ObjectType = vk::OBJECT_TYPE_RENDER_PASS;
type Object = ash::vk::RenderPass;
#[inline]
fn internal_object(&self) -> vk::RenderPass {
fn internal_object(&self) -> ash::vk::RenderPass {
self.0
}
}
@ -709,7 +708,7 @@ impl Subpass {
/// Returns the number of samples in the color and/or depth/stencil attachments. Returns `None`
/// if there is no such attachment in this subpass.
#[inline]
pub fn num_samples(&self) -> Option<u32> {
pub fn num_samples(&self) -> Option<SampleCount> {
let subpass_desc = self.subpass_desc();
// TODO: chain input attachments as well?

View File

@ -62,25 +62,22 @@
//!
// FIXME: restrictions aren't checked yet
use crate::check_errors;
use crate::device::Device;
use crate::device::DeviceOwned;
pub use crate::pipeline::depth_stencil::Compare;
use crate::Error;
use crate::OomError;
use crate::VulkanObject;
use std::error;
use std::fmt;
use std::mem::MaybeUninit;
use std::ptr;
use std::sync::Arc;
use crate::check_errors;
use crate::device::Device;
use crate::device::DeviceOwned;
use crate::vk;
use crate::Error;
use crate::OomError;
use crate::VulkanObject;
pub use crate::pipeline::depth_stencil::Compare;
/// Describes how to retrieve data from an image within a shader.
pub struct Sampler {
sampler: vk::Sampler,
sampler: ash::vk::Sampler,
device: Arc<Device>,
compare_mode: bool,
unnormalized: bool,
@ -318,39 +315,42 @@ impl Sampler {
(b, None) => b,
};
let vk = device.pointers();
let fns = device.fns();
let sampler = unsafe {
let infos = vk::SamplerCreateInfo {
sType: vk::STRUCTURE_TYPE_SAMPLER_CREATE_INFO,
pNext: ptr::null(),
flags: 0, // reserved
magFilter: mag_filter as u32,
minFilter: min_filter as u32,
mipmapMode: mipmap_mode as u32,
addressModeU: address_u.into(),
addressModeV: address_v.into(),
addressModeW: address_w.into(),
mipLodBias: mip_lod_bias,
anisotropyEnable: if max_anisotropy > 1.0 {
vk::TRUE
let infos = ash::vk::SamplerCreateInfo {
flags: ash::vk::SamplerCreateFlags::empty(),
mag_filter: mag_filter.into(),
min_filter: min_filter.into(),
mipmap_mode: mipmap_mode.into(),
address_mode_u: address_u.into(),
address_mode_v: address_v.into(),
address_mode_w: address_w.into(),
mip_lod_bias: mip_lod_bias,
anisotropy_enable: if max_anisotropy > 1.0 {
ash::vk::TRUE
} else {
vk::FALSE
ash::vk::FALSE
},
maxAnisotropy: max_anisotropy,
compareEnable: if compare.is_some() {
vk::TRUE
max_anisotropy: max_anisotropy,
compare_enable: if compare.is_some() {
ash::vk::TRUE
} else {
vk::FALSE
ash::vk::FALSE
},
compareOp: compare.map(|c| c as u32).unwrap_or(0),
minLod: min_lod,
maxLod: max_lod,
borderColor: border_color.map(|b| b as u32).unwrap_or(0),
unnormalizedCoordinates: vk::FALSE,
compare_op: compare
.map(|c| c.into())
.unwrap_or(ash::vk::CompareOp::NEVER),
min_lod: min_lod,
max_lod: max_lod,
border_color: border_color
.map(|b| b.into())
.unwrap_or(ash::vk::BorderColor::FLOAT_TRANSPARENT_BLACK),
unnormalized_coordinates: ash::vk::FALSE,
..Default::default()
};
let mut output = MaybeUninit::uninit();
check_errors(vk.CreateSampler(
check_errors(fns.v1_0.create_sampler(
device.internal_object(),
&infos,
ptr::null(),
@ -406,7 +406,7 @@ impl Sampler {
address_u: UnnormalizedSamplerAddressMode,
address_v: UnnormalizedSamplerAddressMode,
) -> Result<Arc<Sampler>, SamplerCreationError> {
let vk = device.pointers();
let fns = device.fns();
let border_color = address_u.border_color();
let border_color = match (border_color, address_v.border_color()) {
@ -419,29 +419,30 @@ impl Sampler {
};
let sampler = unsafe {
let infos = vk::SamplerCreateInfo {
sType: vk::STRUCTURE_TYPE_SAMPLER_CREATE_INFO,
pNext: ptr::null(),
flags: 0, // reserved
magFilter: filter as u32,
minFilter: filter as u32,
mipmapMode: vk::SAMPLER_MIPMAP_MODE_NEAREST,
addressModeU: address_u.into(),
addressModeV: address_v.into(),
addressModeW: vk::SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, // unused by the impl
mipLodBias: 0.0,
anisotropyEnable: vk::FALSE,
maxAnisotropy: 1.0,
compareEnable: vk::FALSE,
compareOp: vk::COMPARE_OP_NEVER,
minLod: 0.0,
maxLod: 0.0,
borderColor: border_color.map(|b| b as u32).unwrap_or(0),
unnormalizedCoordinates: vk::TRUE,
let infos = ash::vk::SamplerCreateInfo {
flags: ash::vk::SamplerCreateFlags::empty(),
mag_filter: filter.into(),
min_filter: filter.into(),
mipmap_mode: ash::vk::SamplerMipmapMode::NEAREST,
address_mode_u: address_u.into(),
address_mode_v: address_v.into(),
address_mode_w: ash::vk::SamplerAddressMode::CLAMP_TO_EDGE, // unused by the impl
mip_lod_bias: 0.0,
anisotropy_enable: ash::vk::FALSE,
max_anisotropy: 1.0,
compare_enable: ash::vk::FALSE,
compare_op: ash::vk::CompareOp::NEVER,
min_lod: 0.0,
max_lod: 0.0,
border_color: border_color
.map(|b| b.into())
.unwrap_or(ash::vk::BorderColor::FLOAT_TRANSPARENT_BLACK),
unnormalized_coordinates: ash::vk::TRUE,
..Default::default()
};
let mut output = MaybeUninit::uninit();
check_errors(vk.CreateSampler(
check_errors(fns.v1_0.create_sampler(
device.internal_object(),
&infos,
ptr::null(),
@ -519,12 +520,10 @@ unsafe impl DeviceOwned for Sampler {
}
unsafe impl VulkanObject for Sampler {
type Object = vk::Sampler;
const TYPE: vk::ObjectType = vk::OBJECT_TYPE_SAMPLER;
type Object = ash::vk::Sampler;
#[inline]
fn internal_object(&self) -> vk::Sampler {
fn internal_object(&self) -> ash::vk::Sampler {
self.sampler
}
}
@ -540,35 +539,50 @@ impl Drop for Sampler {
#[inline]
fn drop(&mut self) {
unsafe {
let vk = self.device.pointers();
vk.DestroySampler(self.device.internal_object(), self.sampler, ptr::null());
let fns = self.device.fns();
fns.v1_0
.destroy_sampler(self.device.internal_object(), self.sampler, ptr::null());
}
}
}
/// Describes how the color of each pixel should be determined.
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
#[repr(u32)]
#[repr(i32)]
pub enum Filter {
/// The four pixels whose center surround the requested coordinates are taken, then their
/// values are interpolated.
Linear = vk::FILTER_LINEAR,
Linear = ash::vk::Filter::LINEAR.as_raw(),
/// The pixel whose center is nearest to the requested coordinates is taken from the source
/// and its value is returned as-is.
Nearest = vk::FILTER_NEAREST,
Nearest = ash::vk::Filter::NEAREST.as_raw(),
}
impl From<Filter> for ash::vk::Filter {
#[inline]
fn from(val: Filter) -> Self {
Self::from_raw(val as i32)
}
}
/// Describes which mipmap from the source to use.
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
#[repr(u32)]
#[repr(i32)]
pub enum MipmapMode {
/// Use the mipmap whose dimensions are the nearest to the dimensions of the destination.
Nearest = vk::SAMPLER_MIPMAP_MODE_NEAREST,
Nearest = ash::vk::SamplerMipmapMode::NEAREST.as_raw(),
/// Take the mipmap whose dimensions are no greater than that of the destination together
/// with the next higher level mipmap, calculate the value for both, and interpolate them.
Linear = vk::SAMPLER_MIPMAP_MODE_LINEAR,
Linear = ash::vk::SamplerMipmapMode::LINEAR.as_raw(),
}
impl From<MipmapMode> for ash::vk::SamplerMipmapMode {
#[inline]
fn from(val: MipmapMode) -> Self {
Self::from_raw(val as i32)
}
}
/// How the sampler should behave when it needs to access a pixel that is out of range of the
@ -609,15 +623,17 @@ impl SamplerAddressMode {
}
}
impl From<SamplerAddressMode> for vk::SamplerAddressMode {
impl From<SamplerAddressMode> for ash::vk::SamplerAddressMode {
#[inline]
fn from(val: SamplerAddressMode) -> Self {
match val {
SamplerAddressMode::Repeat => vk::SAMPLER_ADDRESS_MODE_REPEAT,
SamplerAddressMode::MirroredRepeat => vk::SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT,
SamplerAddressMode::ClampToEdge => vk::SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
SamplerAddressMode::ClampToBorder(_) => vk::SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER,
SamplerAddressMode::MirrorClampToEdge => vk::SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE,
SamplerAddressMode::Repeat => ash::vk::SamplerAddressMode::REPEAT,
SamplerAddressMode::MirroredRepeat => ash::vk::SamplerAddressMode::MIRRORED_REPEAT,
SamplerAddressMode::ClampToEdge => ash::vk::SamplerAddressMode::CLAMP_TO_EDGE,
SamplerAddressMode::ClampToBorder(_) => ash::vk::SamplerAddressMode::CLAMP_TO_BORDER,
SamplerAddressMode::MirrorClampToEdge => {
ash::vk::SamplerAddressMode::MIRROR_CLAMP_TO_EDGE
}
}
}
}
@ -650,13 +666,15 @@ impl UnnormalizedSamplerAddressMode {
}
}
impl From<UnnormalizedSamplerAddressMode> for vk::SamplerAddressMode {
impl From<UnnormalizedSamplerAddressMode> for ash::vk::SamplerAddressMode {
#[inline]
fn from(val: UnnormalizedSamplerAddressMode) -> Self {
match val {
UnnormalizedSamplerAddressMode::ClampToEdge => vk::SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
UnnormalizedSamplerAddressMode::ClampToEdge => {
ash::vk::SamplerAddressMode::CLAMP_TO_EDGE
}
UnnormalizedSamplerAddressMode::ClampToBorder(_) => {
vk::SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER
ash::vk::SamplerAddressMode::CLAMP_TO_BORDER
}
}
}
@ -669,26 +687,33 @@ impl From<UnnormalizedSamplerAddressMode> for vk::SamplerAddressMode {
/// Using a border color restricts the sampler to either floating-point images or integer images.
/// See the documentation of the `sampler` module for more info.
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
#[repr(u32)]
#[repr(i32)]
pub enum BorderColor {
/// The value `(0.0, 0.0, 0.0, 0.0)`. Can only be used with floating-point images.
FloatTransparentBlack = vk::BORDER_COLOR_FLOAT_TRANSPARENT_BLACK,
FloatTransparentBlack = ash::vk::BorderColor::FLOAT_TRANSPARENT_BLACK.as_raw(),
/// The value `(0, 0, 0, 0)`. Can only be used with integer images.
IntTransparentBlack = vk::BORDER_COLOR_INT_TRANSPARENT_BLACK,
IntTransparentBlack = ash::vk::BorderColor::INT_TRANSPARENT_BLACK.as_raw(),
/// The value `(0.0, 0.0, 0.0, 1.0)`. Can only be used with floating-point identity-swizzled
/// images.
FloatOpaqueBlack = vk::BORDER_COLOR_FLOAT_OPAQUE_BLACK,
FloatOpaqueBlack = ash::vk::BorderColor::FLOAT_OPAQUE_BLACK.as_raw(),
/// The value `(0, 0, 0, 1)`. Can only be used with integer identity-swizzled images.
IntOpaqueBlack = vk::BORDER_COLOR_INT_OPAQUE_BLACK,
IntOpaqueBlack = ash::vk::BorderColor::INT_OPAQUE_BLACK.as_raw(),
/// The value `(1.0, 1.0, 1.0, 1.0)`. Can only be used with floating-point images.
FloatOpaqueWhite = vk::BORDER_COLOR_FLOAT_OPAQUE_WHITE,
FloatOpaqueWhite = ash::vk::BorderColor::FLOAT_OPAQUE_WHITE.as_raw(),
/// The value `(1, 1, 1, 1)`. Can only be used with integer images.
IntOpaqueWhite = vk::BORDER_COLOR_INT_OPAQUE_WHITE,
IntOpaqueWhite = ash::vk::BorderColor::INT_OPAQUE_WHITE.as_raw(),
}
impl From<BorderColor> for ash::vk::BorderColor {
#[inline]
fn from(val: BorderColor) -> Self {
Self::from_raw(val as i32)
}
}
/// Error that can happen when creating an instance.

View File

@ -9,7 +9,6 @@
use crate::format::Format;
use crate::image::ImageUsage;
use crate::vk;
use std::iter::FromIterator;
/// The capabilities of a surface when used by a physical device.
@ -60,15 +59,15 @@ pub struct Capabilities {
/// The way presenting a swapchain is accomplished.
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
#[repr(u32)]
#[repr(i32)]
pub enum PresentMode {
/// Immediately shows the image to the user. May result in visible tearing.
Immediate = vk::PRESENT_MODE_IMMEDIATE_KHR,
Immediate = ash::vk::PresentModeKHR::IMMEDIATE.as_raw(),
/// The action of presenting an image puts it in wait. When the next vertical blanking period
/// happens, the waiting image is effectively shown to the user. If an image is presented while
/// another one is waiting, it is replaced.
Mailbox = vk::PRESENT_MODE_MAILBOX_KHR,
Mailbox = ash::vk::PresentModeKHR::MAILBOX.as_raw(),
/// The action of presenting an image adds it to a queue of images. At each vertical blanking
/// period, the queue is popped and an image is presented.
@ -76,17 +75,24 @@ pub enum PresentMode {
/// Guaranteed to be always supported.
///
/// This is the equivalent of OpenGL's `SwapInterval` with a value of 1.
Fifo = vk::PRESENT_MODE_FIFO_KHR,
Fifo = ash::vk::PresentModeKHR::FIFO.as_raw(),
/// Same as `Fifo`, except that if the queue was empty during the previous vertical blanking
/// period then it is equivalent to `Immediate`.
///
/// This is the equivalent of OpenGL's `SwapInterval` with a value of -1.
Relaxed = vk::PRESENT_MODE_FIFO_RELAXED_KHR,
Relaxed = ash::vk::PresentModeKHR::FIFO_RELAXED.as_raw(),
// TODO: These can't be enabled yet because they have to be used with shared present surfaces
// which vulkano doesnt support yet.
//SharedDemand = vk::PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR,
//SharedContinuous = vk::PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR,
//SharedDemand = ash::vk::PresentModeKHR::SHARED_DEMAND_REFRESH,
//SharedContinuous = ash::vk::PresentModeKHR::SHARED_CONTINUOUS_REFRESH,
}
impl From<PresentMode> for ash::vk::PresentModeKHR {
#[inline]
fn from(val: PresentMode) -> Self {
Self::from_raw(val as i32)
}
}
/// List of `PresentMode`s that are supported.
@ -100,20 +106,22 @@ pub struct SupportedPresentModes {
pub shared_continuous: bool,
}
impl FromIterator<vk::PresentModeKHR> for SupportedPresentModes {
impl FromIterator<ash::vk::PresentModeKHR> for SupportedPresentModes {
fn from_iter<T>(iter: T) -> Self
where
T: IntoIterator<Item = vk::PresentModeKHR>,
T: IntoIterator<Item = ash::vk::PresentModeKHR>,
{
let mut result = SupportedPresentModes::none();
for e in iter {
match e {
vk::PRESENT_MODE_IMMEDIATE_KHR => result.immediate = true,
vk::PRESENT_MODE_MAILBOX_KHR => result.mailbox = true,
vk::PRESENT_MODE_FIFO_KHR => result.fifo = true,
vk::PRESENT_MODE_FIFO_RELAXED_KHR => result.relaxed = true,
vk::PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR => result.shared_demand = true,
vk::PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR => result.shared_continuous = true,
ash::vk::PresentModeKHR::IMMEDIATE => result.immediate = true,
ash::vk::PresentModeKHR::MAILBOX => result.mailbox = true,
ash::vk::PresentModeKHR::FIFO => result.fifo = true,
ash::vk::PresentModeKHR::FIFO_RELAXED => result.relaxed = true,
ash::vk::PresentModeKHR::SHARED_DEMAND_REFRESH => result.shared_demand = true,
ash::vk::PresentModeKHR::SHARED_CONTINUOUS_REFRESH => {
result.shared_continuous = true
}
_ => {}
}
}
@ -187,23 +195,33 @@ impl Iterator for SupportedPresentModesIter {
#[repr(u32)]
pub enum SurfaceTransform {
/// Don't transform the image.
Identity = vk::SURFACE_TRANSFORM_IDENTITY_BIT_KHR,
Identity = ash::vk::SurfaceTransformFlagsKHR::IDENTITY.as_raw(),
/// Rotate 90 degrees.
Rotate90 = vk::SURFACE_TRANSFORM_ROTATE_90_BIT_KHR,
Rotate90 = ash::vk::SurfaceTransformFlagsKHR::ROTATE_90.as_raw(),
/// Rotate 180 degrees.
Rotate180 = vk::SURFACE_TRANSFORM_ROTATE_180_BIT_KHR,
Rotate180 = ash::vk::SurfaceTransformFlagsKHR::ROTATE_180.as_raw(),
/// Rotate 270 degrees.
Rotate270 = vk::SURFACE_TRANSFORM_ROTATE_270_BIT_KHR,
Rotate270 = ash::vk::SurfaceTransformFlagsKHR::ROTATE_270.as_raw(),
/// Mirror the image horizontally.
HorizontalMirror = vk::SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR,
HorizontalMirror = ash::vk::SurfaceTransformFlagsKHR::HORIZONTAL_MIRROR.as_raw(),
/// Mirror the image horizontally and rotate 90 degrees.
HorizontalMirrorRotate90 = vk::SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR,
HorizontalMirrorRotate90 =
ash::vk::SurfaceTransformFlagsKHR::HORIZONTAL_MIRROR_ROTATE_90.as_raw(),
/// Mirror the image horizontally and rotate 180 degrees.
HorizontalMirrorRotate180 = vk::SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR,
HorizontalMirrorRotate180 =
ash::vk::SurfaceTransformFlagsKHR::HORIZONTAL_MIRROR_ROTATE_180.as_raw(),
/// Mirror the image horizontally and rotate 270 degrees.
HorizontalMirrorRotate270 = vk::SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR,
HorizontalMirrorRotate270 =
ash::vk::SurfaceTransformFlagsKHR::HORIZONTAL_MIRROR_ROTATE_270.as_raw(),
/// Let the operating system or driver implementation choose.
Inherit = vk::SURFACE_TRANSFORM_INHERIT_BIT_KHR,
Inherit = ash::vk::SurfaceTransformFlagsKHR::INHERIT.as_raw(),
}
impl From<SurfaceTransform> for ash::vk::SurfaceTransformFlagsKHR {
#[inline]
fn from(val: SurfaceTransform) -> Self {
Self::from_raw(val as u32)
}
}
/// How the alpha values of the pixels of the window are treated.
@ -212,18 +230,25 @@ pub enum SurfaceTransform {
pub enum CompositeAlpha {
/// The alpha channel of the image is ignored. All the pixels are considered as if they have a
/// value of 1.0.
Opaque = vk::COMPOSITE_ALPHA_OPAQUE_BIT_KHR,
Opaque = ash::vk::CompositeAlphaFlagsKHR::OPAQUE.as_raw(),
/// The alpha channel of the image is respected. The color channels are expected to have
/// already been multiplied by the alpha value.
PreMultiplied = vk::COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR,
PreMultiplied = ash::vk::CompositeAlphaFlagsKHR::PRE_MULTIPLIED.as_raw(),
/// The alpha channel of the image is respected. The color channels will be multiplied by the
/// alpha value by the compositor before being added to what is behind.
PostMultiplied = vk::COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR,
PostMultiplied = ash::vk::CompositeAlphaFlagsKHR::POST_MULTIPLIED.as_raw(),
/// Let the operating system or driver implementation choose.
Inherit = vk::COMPOSITE_ALPHA_INHERIT_BIT_KHR,
Inherit = ash::vk::CompositeAlphaFlagsKHR::INHERIT.as_raw(),
}
impl From<CompositeAlpha> for ash::vk::CompositeAlphaFlagsKHR {
#[inline]
fn from(val: CompositeAlpha) -> Self {
Self::from_raw(val as u32)
}
}
/// List of supported composite alpha modes.
@ -238,20 +263,20 @@ pub struct SupportedCompositeAlpha {
pub inherit: bool,
}
impl From<vk::CompositeAlphaFlagsKHR> for SupportedCompositeAlpha {
impl From<ash::vk::CompositeAlphaFlagsKHR> for SupportedCompositeAlpha {
#[inline]
fn from(val: vk::CompositeAlphaFlagsKHR) -> SupportedCompositeAlpha {
fn from(val: ash::vk::CompositeAlphaFlagsKHR) -> SupportedCompositeAlpha {
let mut result = SupportedCompositeAlpha::none();
if (val & vk::COMPOSITE_ALPHA_OPAQUE_BIT_KHR) != 0 {
if !(val & ash::vk::CompositeAlphaFlagsKHR::OPAQUE).is_empty() {
result.opaque = true;
}
if (val & vk::COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR) != 0 {
if !(val & ash::vk::CompositeAlphaFlagsKHR::PRE_MULTIPLIED).is_empty() {
result.pre_multiplied = true;
}
if (val & vk::COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR) != 0 {
if !(val & ash::vk::CompositeAlphaFlagsKHR::POST_MULTIPLIED).is_empty() {
result.post_multiplied = true;
}
if (val & vk::COMPOSITE_ALPHA_INHERIT_BIT_KHR) != 0 {
if !(val & ash::vk::CompositeAlphaFlagsKHR::INHERIT).is_empty() {
result.inherit = true;
}
result
@ -331,11 +356,11 @@ pub struct SupportedSurfaceTransforms {
pub inherit: bool,
}
impl From<vk::SurfaceTransformFlagsKHR> for SupportedSurfaceTransforms {
fn from(val: vk::SurfaceTransformFlagsKHR) -> Self {
impl From<ash::vk::SurfaceTransformFlagsKHR> for SupportedSurfaceTransforms {
fn from(val: ash::vk::SurfaceTransformFlagsKHR) -> Self {
macro_rules! v {
($val:expr, $out:ident, $e:expr, $f:ident) => {
if ($val & $e) != 0 {
if !($val & $e).is_empty() {
$out.$f = true;
}
};
@ -345,52 +370,57 @@ impl From<vk::SurfaceTransformFlagsKHR> for SupportedSurfaceTransforms {
v!(
val,
result,
vk::SURFACE_TRANSFORM_IDENTITY_BIT_KHR,
ash::vk::SurfaceTransformFlagsKHR::IDENTITY,
identity
);
v!(
val,
result,
vk::SURFACE_TRANSFORM_ROTATE_90_BIT_KHR,
ash::vk::SurfaceTransformFlagsKHR::ROTATE_90,
rotate90
);
v!(
val,
result,
vk::SURFACE_TRANSFORM_ROTATE_180_BIT_KHR,
ash::vk::SurfaceTransformFlagsKHR::ROTATE_180,
rotate180
);
v!(
val,
result,
vk::SURFACE_TRANSFORM_ROTATE_270_BIT_KHR,
ash::vk::SurfaceTransformFlagsKHR::ROTATE_270,
rotate270
);
v!(
val,
result,
vk::SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR,
ash::vk::SurfaceTransformFlagsKHR::HORIZONTAL_MIRROR,
horizontal_mirror
);
v!(
val,
result,
vk::SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR,
ash::vk::SurfaceTransformFlagsKHR::HORIZONTAL_MIRROR_ROTATE_90,
horizontal_mirror_rotate90
);
v!(
val,
result,
vk::SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR,
ash::vk::SurfaceTransformFlagsKHR::HORIZONTAL_MIRROR_ROTATE_180,
horizontal_mirror_rotate180
);
v!(
val,
result,
vk::SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR,
ash::vk::SurfaceTransformFlagsKHR::HORIZONTAL_MIRROR_ROTATE_270,
horizontal_mirror_rotate270
);
v!(val, result, vk::SURFACE_TRANSFORM_INHERIT_BIT_KHR, inherit);
v!(
val,
result,
ash::vk::SurfaceTransformFlagsKHR::INHERIT,
inherit
);
result
}
}
@ -579,42 +609,49 @@ impl Default for SurfaceTransform {
/// and perform a manual conversion to that color space from inside your shader.
///
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
#[repr(u32)]
#[repr(i32)]
pub enum ColorSpace {
SrgbNonLinear = vk::COLOR_SPACE_SRGB_NONLINEAR_KHR,
DisplayP3NonLinear = vk::COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT,
ExtendedSrgbLinear = vk::COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT,
DciP3Linear = vk::COLOR_SPACE_DCI_P3_LINEAR_EXT,
DciP3NonLinear = vk::COLOR_SPACE_DCI_P3_NONLINEAR_EXT,
Bt709Linear = vk::COLOR_SPACE_BT709_LINEAR_EXT,
Bt709NonLinear = vk::COLOR_SPACE_BT709_NONLINEAR_EXT,
Bt2020Linear = vk::COLOR_SPACE_BT2020_LINEAR_EXT,
Hdr10St2084 = vk::COLOR_SPACE_HDR10_ST2084_EXT,
DolbyVision = vk::COLOR_SPACE_DOLBYVISION_EXT,
Hdr10Hlg = vk::COLOR_SPACE_HDR10_HLG_EXT,
AdobeRgbLinear = vk::COLOR_SPACE_ADOBERGB_LINEAR_EXT,
AdobeRgbNonLinear = vk::COLOR_SPACE_ADOBERGB_NONLINEAR_EXT,
PassThrough = vk::COLOR_SPACE_PASS_THROUGH_EXT,
SrgbNonLinear = ash::vk::ColorSpaceKHR::SRGB_NONLINEAR.as_raw(),
DisplayP3NonLinear = ash::vk::ColorSpaceKHR::DISPLAY_P3_NONLINEAR_EXT.as_raw(),
ExtendedSrgbLinear = ash::vk::ColorSpaceKHR::EXTENDED_SRGB_LINEAR_EXT.as_raw(),
DciP3Linear = ash::vk::ColorSpaceKHR::DCI_P3_LINEAR_EXT.as_raw(),
DciP3NonLinear = ash::vk::ColorSpaceKHR::DCI_P3_NONLINEAR_EXT.as_raw(),
Bt709Linear = ash::vk::ColorSpaceKHR::BT709_LINEAR_EXT.as_raw(),
Bt709NonLinear = ash::vk::ColorSpaceKHR::BT709_NONLINEAR_EXT.as_raw(),
Bt2020Linear = ash::vk::ColorSpaceKHR::BT2020_LINEAR_EXT.as_raw(),
Hdr10St2084 = ash::vk::ColorSpaceKHR::HDR10_ST2084_EXT.as_raw(),
DolbyVision = ash::vk::ColorSpaceKHR::DOLBYVISION_EXT.as_raw(),
Hdr10Hlg = ash::vk::ColorSpaceKHR::HDR10_HLG_EXT.as_raw(),
AdobeRgbLinear = ash::vk::ColorSpaceKHR::ADOBERGB_LINEAR_EXT.as_raw(),
AdobeRgbNonLinear = ash::vk::ColorSpaceKHR::ADOBERGB_NONLINEAR_EXT.as_raw(),
PassThrough = ash::vk::ColorSpaceKHR::PASS_THROUGH_EXT.as_raw(),
}
impl From<vk::ColorSpaceKHR> for ColorSpace {
impl From<ColorSpace> for ash::vk::ColorSpaceKHR {
#[inline]
fn from(val: vk::ColorSpaceKHR) -> Self {
fn from(val: ColorSpace) -> Self {
Self::from_raw(val as i32)
}
}
impl From<ash::vk::ColorSpaceKHR> for ColorSpace {
#[inline]
fn from(val: ash::vk::ColorSpaceKHR) -> Self {
match val {
vk::COLOR_SPACE_SRGB_NONLINEAR_KHR => ColorSpace::SrgbNonLinear,
vk::COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT => ColorSpace::DisplayP3NonLinear,
vk::COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT => ColorSpace::ExtendedSrgbLinear,
vk::COLOR_SPACE_DCI_P3_LINEAR_EXT => ColorSpace::DciP3Linear,
vk::COLOR_SPACE_DCI_P3_NONLINEAR_EXT => ColorSpace::DciP3NonLinear,
vk::COLOR_SPACE_BT709_LINEAR_EXT => ColorSpace::Bt709Linear,
vk::COLOR_SPACE_BT709_NONLINEAR_EXT => ColorSpace::Bt709NonLinear,
vk::COLOR_SPACE_BT2020_LINEAR_EXT => ColorSpace::Bt2020Linear,
vk::COLOR_SPACE_HDR10_ST2084_EXT => ColorSpace::Hdr10St2084,
vk::COLOR_SPACE_DOLBYVISION_EXT => ColorSpace::DolbyVision,
vk::COLOR_SPACE_HDR10_HLG_EXT => ColorSpace::Hdr10Hlg,
vk::COLOR_SPACE_ADOBERGB_LINEAR_EXT => ColorSpace::AdobeRgbLinear,
vk::COLOR_SPACE_ADOBERGB_NONLINEAR_EXT => ColorSpace::AdobeRgbNonLinear,
vk::COLOR_SPACE_PASS_THROUGH_EXT => ColorSpace::PassThrough,
ash::vk::ColorSpaceKHR::SRGB_NONLINEAR => ColorSpace::SrgbNonLinear,
ash::vk::ColorSpaceKHR::DISPLAY_P3_NONLINEAR_EXT => ColorSpace::DisplayP3NonLinear,
ash::vk::ColorSpaceKHR::EXTENDED_SRGB_LINEAR_EXT => ColorSpace::ExtendedSrgbLinear,
ash::vk::ColorSpaceKHR::DCI_P3_LINEAR_EXT => ColorSpace::DciP3Linear,
ash::vk::ColorSpaceKHR::DCI_P3_NONLINEAR_EXT => ColorSpace::DciP3NonLinear,
ash::vk::ColorSpaceKHR::BT709_LINEAR_EXT => ColorSpace::Bt709Linear,
ash::vk::ColorSpaceKHR::BT709_NONLINEAR_EXT => ColorSpace::Bt709NonLinear,
ash::vk::ColorSpaceKHR::BT2020_LINEAR_EXT => ColorSpace::Bt2020Linear,
ash::vk::ColorSpaceKHR::HDR10_ST2084_EXT => ColorSpace::Hdr10St2084,
ash::vk::ColorSpaceKHR::DOLBYVISION_EXT => ColorSpace::DolbyVision,
ash::vk::ColorSpaceKHR::HDR10_HLG_EXT => ColorSpace::Hdr10Hlg,
ash::vk::ColorSpaceKHR::ADOBERGB_LINEAR_EXT => ColorSpace::AdobeRgbLinear,
ash::vk::ColorSpaceKHR::ADOBERGB_NONLINEAR_EXT => ColorSpace::AdobeRgbNonLinear,
ash::vk::ColorSpaceKHR::PASS_THROUGH_EXT => ColorSpace::PassThrough,
_ => panic!("Wrong value for color space enum"),
}
}

View File

@ -32,7 +32,6 @@ use crate::check_errors;
use crate::instance::Instance;
use crate::instance::PhysicalDevice;
use crate::swapchain::SupportedSurfaceTransforms;
use crate::vk;
use crate::OomError;
use crate::VulkanObject;
use std::ffi::CStr;
@ -49,35 +48,41 @@ pub struct DisplayPlane {
instance: Arc<Instance>,
physical_device: usize,
index: u32,
properties: vk::DisplayPlanePropertiesKHR,
supported_displays: Vec<vk::DisplayKHR>,
properties: ash::vk::DisplayPlanePropertiesKHR,
supported_displays: Vec<ash::vk::DisplayKHR>,
}
impl DisplayPlane {
/// See the docs of enumerate().
pub fn enumerate_raw(device: PhysicalDevice) -> Result<IntoIter<DisplayPlane>, OomError> {
let vk = device.instance().pointers();
let fns = device.instance().fns();
assert!(device.instance().loaded_extensions().khr_display); // TODO: return error instead
let num = unsafe {
let mut num: u32 = 0;
check_errors(vk.GetPhysicalDeviceDisplayPlanePropertiesKHR(
device.internal_object(),
&mut num,
ptr::null_mut(),
))?;
check_errors(
fns.khr_display
.get_physical_device_display_plane_properties_khr(
device.internal_object(),
&mut num,
ptr::null_mut(),
),
)?;
num
};
let planes: Vec<vk::DisplayPlanePropertiesKHR> = unsafe {
let planes: Vec<ash::vk::DisplayPlanePropertiesKHR> = unsafe {
let mut planes = Vec::with_capacity(num as usize);
let mut num = num;
check_errors(vk.GetPhysicalDeviceDisplayPlanePropertiesKHR(
device.internal_object(),
&mut num,
planes.as_mut_ptr(),
))?;
check_errors(
fns.khr_display
.get_physical_device_display_plane_properties_khr(
device.internal_object(),
&mut num,
planes.as_mut_ptr(),
),
)?;
planes.set_len(num as usize);
planes
};
@ -88,7 +93,7 @@ impl DisplayPlane {
.map(|(index, prop)| {
let num = unsafe {
let mut num: u32 = 0;
check_errors(vk.GetDisplayPlaneSupportedDisplaysKHR(
check_errors(fns.khr_display.get_display_plane_supported_displays_khr(
device.internal_object(),
index as u32,
&mut num,
@ -98,10 +103,10 @@ impl DisplayPlane {
num
};
let supported_displays: Vec<vk::DisplayKHR> = unsafe {
let supported_displays: Vec<ash::vk::DisplayKHR> = unsafe {
let mut displays = Vec::with_capacity(num as usize);
let mut num = num;
check_errors(vk.GetDisplayPlaneSupportedDisplaysKHR(
check_errors(fns.khr_display.get_display_plane_supported_displays_khr(
device.internal_object(),
index as u32,
&mut num,
@ -169,18 +174,18 @@ impl DisplayPlane {
pub struct Display {
instance: Arc<Instance>,
physical_device: usize,
properties: Arc<vk::DisplayPropertiesKHR>, // TODO: Arc because struct isn't clone
properties: Arc<ash::vk::DisplayPropertiesKHR>, // TODO: Arc because struct isn't clone
}
impl Display {
/// See the docs of enumerate().
pub fn enumerate_raw(device: PhysicalDevice) -> Result<IntoIter<Display>, OomError> {
let vk = device.instance().pointers();
let fns = device.instance().fns();
assert!(device.instance().loaded_extensions().khr_display); // TODO: return error instead
let num = unsafe {
let mut num = 0;
check_errors(vk.GetPhysicalDeviceDisplayPropertiesKHR(
check_errors(fns.khr_display.get_physical_device_display_properties_khr(
device.internal_object(),
&mut num,
ptr::null_mut(),
@ -188,10 +193,10 @@ impl Display {
num
};
let displays: Vec<vk::DisplayPropertiesKHR> = unsafe {
let displays: Vec<ash::vk::DisplayPropertiesKHR> = unsafe {
let mut displays = Vec::with_capacity(num as usize);
let mut num = num;
check_errors(vk.GetPhysicalDeviceDisplayPropertiesKHR(
check_errors(fns.khr_display.get_physical_device_display_properties_khr(
device.internal_object(),
&mut num,
displays.as_mut_ptr(),
@ -227,7 +232,7 @@ impl Display {
#[inline]
pub fn name(&self) -> &str {
unsafe {
CStr::from_ptr(self.properties.displayName)
CStr::from_ptr(self.properties.display_name)
.to_str()
.expect("non UTF-8 characters in display name")
}
@ -242,7 +247,7 @@ impl Display {
/// Returns the physical dimensions of the display in millimeters.
#[inline]
pub fn physical_dimensions(&self) -> [u32; 2] {
let ref r = self.properties.physicalDimensions;
let ref r = self.properties.physical_dimensions;
[r.width, r.height]
}
@ -252,35 +257,35 @@ impl Display {
/// > only the "best" resolution.
#[inline]
pub fn physical_resolution(&self) -> [u32; 2] {
let ref r = self.properties.physicalResolution;
let ref r = self.properties.physical_resolution;
[r.width, r.height]
}
/// Returns the transforms supported by this display.
#[inline]
pub fn supported_transforms(&self) -> SupportedSurfaceTransforms {
self.properties.supportedTransforms.into()
self.properties.supported_transforms.into()
}
/// Returns true if TODO.
#[inline]
pub fn plane_reorder_possible(&self) -> bool {
self.properties.planeReorderPossible != 0
self.properties.plane_reorder_possible != 0
}
/// Returns true if TODO.
#[inline]
pub fn persistent_content(&self) -> bool {
self.properties.persistentContent != 0
self.properties.persistent_content != 0
}
/// See the docs of display_modes().
pub fn display_modes_raw(&self) -> Result<IntoIter<DisplayMode>, OomError> {
let vk = self.instance.pointers();
let fns = self.instance.fns();
let num = unsafe {
let mut num = 0;
check_errors(vk.GetDisplayModePropertiesKHR(
check_errors(fns.khr_display.get_display_mode_properties_khr(
self.physical_device().internal_object(),
self.properties.display,
&mut num,
@ -289,10 +294,10 @@ impl Display {
num
};
let modes: Vec<vk::DisplayModePropertiesKHR> = unsafe {
let modes: Vec<ash::vk::DisplayModePropertiesKHR> = unsafe {
let mut modes = Vec::with_capacity(num as usize);
let mut num = num;
check_errors(vk.GetDisplayModePropertiesKHR(
check_errors(fns.khr_display.get_display_mode_properties_khr(
self.physical_device().internal_object(),
self.properties.display,
&mut num,
@ -306,7 +311,7 @@ impl Display {
.into_iter()
.map(|mode| DisplayMode {
display: self.clone(),
display_mode: mode.displayMode,
display_mode: mode.display_mode,
parameters: mode.parameters,
})
.collect::<Vec<_>>()
@ -327,12 +332,10 @@ impl Display {
}
unsafe impl VulkanObject for Display {
type Object = vk::DisplayKHR;
const TYPE: vk::ObjectType = vk::OBJECT_TYPE_DISPLAY_KHR;
type Object = ash::vk::DisplayKHR;
#[inline]
fn internal_object(&self) -> vk::DisplayKHR {
fn internal_object(&self) -> ash::vk::DisplayKHR {
self.properties.display
}
}
@ -340,30 +343,29 @@ unsafe impl VulkanObject for Display {
/// Represents a mode on a specific display.
pub struct DisplayMode {
display: Display,
display_mode: vk::DisplayModeKHR,
parameters: vk::DisplayModeParametersKHR,
display_mode: ash::vk::DisplayModeKHR,
parameters: ash::vk::DisplayModeParametersKHR,
}
impl DisplayMode {
/*pub fn new(display: &Display) -> Result<Arc<DisplayMode>, OomError> {
let vk = instance.pointers();
let fns = instance.fns();
assert!(device.instance().loaded_extensions().khr_display); // TODO: return error instead
let parameters = vk::DisplayModeParametersKHR {
visibleRegion: vk::Extent2D { width: , height: },
let parameters = ash::vk::DisplayModeParametersKHR {
visibleRegion: ash::vk::Extent2D { width: , height: },
refreshRate: ,
};
let display_mode = {
let infos = vk::DisplayModeCreateInfoKHR {
sType: vk::STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR,
pNext: ptr::null(),
flags: 0, // reserved
let infos = ash::vk::DisplayModeCreateInfoKHR {
flags: ash::vk::DisplayModeCreateFlags::empty(),
parameters: parameters,
..Default::default()
};
let mut output = mem::uninitialized();
check_errors(vk.CreateDisplayModeKHR(display.device.internal_object(),
check_errors(fns.v1_0.CreateDisplayModeKHR(display.device.internal_object(),
display.display, &infos, ptr::null(),
&mut output))?;
output
@ -385,24 +387,22 @@ impl DisplayMode {
/// Returns the dimensions of the region that is visible on the monitor.
#[inline]
pub fn visible_region(&self) -> [u32; 2] {
let ref d = self.parameters.visibleRegion;
let ref d = self.parameters.visible_region;
[d.width, d.height]
}
/// Returns the refresh rate of this mode.
#[inline]
pub fn refresh_rate(&self) -> u32 {
self.parameters.refreshRate
self.parameters.refresh_rate
}
}
unsafe impl VulkanObject for DisplayMode {
type Object = vk::DisplayModeKHR;
const TYPE: vk::ObjectType = vk::OBJECT_TYPE_DISPLAY_MODE_KHR;
type Object = ash::vk::DisplayModeKHR;
#[inline]
fn internal_object(&self) -> vk::DisplayModeKHR {
fn internal_object(&self) -> ash::vk::DisplayModeKHR {
self.display_mode
}
}

View File

@ -67,7 +67,7 @@
//! .. InstanceExtensions::none()
//! };
//!
//! match Instance::new(None, Version::major_minor(1, 1), &extensions, None) {
//! match Instance::new(None, Version::V1_1, &extensions, None) {
//! Ok(i) => i,
//! Err(err) => panic!("Couldn't build instance: {:?}", err)
//! }

View File

@ -8,7 +8,6 @@
// according to those terms.
use crate::swapchain::Swapchain;
use crate::vk;
/// Represents a region on an image.
///
@ -52,15 +51,15 @@ impl RectangleLayer {
}
}
impl From<&RectangleLayer> for vk::RectLayerKHR {
impl From<&RectangleLayer> for ash::vk::RectLayerKHR {
#[inline]
fn from(val: &RectangleLayer) -> Self {
vk::RectLayerKHR {
offset: vk::Offset2D {
ash::vk::RectLayerKHR {
offset: ash::vk::Offset2D {
x: val.offset[0],
y: val.offset[1],
},
extent: vk::Extent2D {
extent: ash::vk::Extent2D {
width: val.extent[0],
height: val.extent[1],
},

View File

@ -18,7 +18,6 @@ use crate::swapchain::display::DisplayMode;
use crate::swapchain::display::DisplayPlane;
use crate::swapchain::Capabilities;
use crate::swapchain::SurfaceSwapchainLock;
use crate::vk;
use crate::Error;
use crate::OomError;
use crate::VulkanObject;
@ -37,7 +36,7 @@ use std::sync::Arc;
pub struct Surface<W> {
window: W,
instance: Arc<Instance>,
surface: vk::SurfaceKHR,
surface: ash::vk::SurfaceKHR,
// If true, a swapchain has been associated to this surface, and that any new swapchain
// creation should be forbidden.
@ -51,7 +50,7 @@ impl<W> Surface<W> {
///
pub unsafe fn from_raw_surface(
instance: Arc<Instance>,
surface: vk::SurfaceKHR,
surface: ash::vk::SurfaceKHR,
win: W,
) -> Surface<W> {
Surface {
@ -92,28 +91,27 @@ impl<W> Surface<W> {
assert!(plane.supports(display_mode.display()));
let instance = display_mode.display().physical_device().instance();
let vk = instance.pointers();
let fns = instance.fns();
let surface = unsafe {
let infos = vk::DisplaySurfaceCreateInfoKHR {
sType: vk::STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR,
pNext: ptr::null(),
flags: 0, // reserved
displayMode: display_mode.internal_object(),
planeIndex: plane.index(),
planeStackIndex: 0, // FIXME: plane.properties.currentStackIndex,
transform: vk::SURFACE_TRANSFORM_IDENTITY_BIT_KHR, // TODO: let user choose
globalAlpha: 0.0, // TODO: let user choose
alphaMode: vk::DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR, // TODO: let user choose
imageExtent: vk::Extent2D {
let infos = ash::vk::DisplaySurfaceCreateInfoKHR {
flags: ash::vk::DisplaySurfaceCreateFlagsKHR::empty(),
display_mode: display_mode.internal_object(),
plane_index: plane.index(),
plane_stack_index: 0, // FIXME: plane.properties.currentStackIndex,
transform: ash::vk::SurfaceTransformFlagsKHR::IDENTITY, // TODO: let user choose
global_alpha: 0.0, // TODO: let user choose
alpha_mode: ash::vk::DisplayPlaneAlphaFlagsKHR::OPAQUE, // TODO: let user choose
image_extent: ash::vk::Extent2D {
// TODO: let user choose
width: display_mode.visible_region()[0],
height: display_mode.visible_region()[1],
},
..Default::default()
};
let mut output = MaybeUninit::uninit();
check_errors(vk.CreateDisplayPlaneSurfaceKHR(
check_errors(fns.khr_display.create_display_plane_surface_khr(
instance.internal_object(),
&infos,
ptr::null(),
@ -145,7 +143,7 @@ impl<W> Surface<W> {
hwnd: *const U,
win: W,
) -> Result<Arc<Surface<W>>, SurfaceCreationError> {
let vk = instance.pointers();
let fns = instance.fns();
if !instance.loaded_extensions().khr_win32_surface {
return Err(SurfaceCreationError::MissingExtension {
@ -154,16 +152,15 @@ impl<W> Surface<W> {
}
let surface = {
let infos = vk::Win32SurfaceCreateInfoKHR {
sType: vk::STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR,
pNext: ptr::null(),
flags: 0, // reserved
let infos = ash::vk::Win32SurfaceCreateInfoKHR {
flags: ash::vk::Win32SurfaceCreateFlagsKHR::empty(),
hinstance: hinstance as *mut _,
hwnd: hwnd as *mut _,
..Default::default()
};
let mut output = MaybeUninit::uninit();
check_errors(vk.CreateWin32SurfaceKHR(
check_errors(fns.khr_win32_surface.create_win32_surface_khr(
instance.internal_object(),
&infos,
ptr::null(),
@ -194,7 +191,7 @@ impl<W> Surface<W> {
window: u32,
win: W,
) -> Result<Arc<Surface<W>>, SurfaceCreationError> {
let vk = instance.pointers();
let fns = instance.fns();
if !instance.loaded_extensions().khr_xcb_surface {
return Err(SurfaceCreationError::MissingExtension {
@ -203,16 +200,15 @@ impl<W> Surface<W> {
}
let surface = {
let infos = vk::XcbSurfaceCreateInfoKHR {
sType: vk::STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR,
pNext: ptr::null(),
flags: 0, // reserved
let infos = ash::vk::XcbSurfaceCreateInfoKHR {
flags: ash::vk::XcbSurfaceCreateFlagsKHR::empty(),
connection: connection as *mut _,
window,
..Default::default()
};
let mut output = MaybeUninit::uninit();
check_errors(vk.CreateXcbSurfaceKHR(
check_errors(fns.khr_xcb_surface.create_xcb_surface_khr(
instance.internal_object(),
&infos,
ptr::null(),
@ -243,7 +239,7 @@ impl<W> Surface<W> {
window: c_ulong,
win: W,
) -> Result<Arc<Surface<W>>, SurfaceCreationError> {
let vk = instance.pointers();
let fns = instance.fns();
if !instance.loaded_extensions().khr_xlib_surface {
return Err(SurfaceCreationError::MissingExtension {
@ -252,16 +248,15 @@ impl<W> Surface<W> {
}
let surface = {
let infos = vk::XlibSurfaceCreateInfoKHR {
sType: vk::STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR,
pNext: ptr::null(),
flags: 0, // reserved
let infos = ash::vk::XlibSurfaceCreateInfoKHR {
flags: ash::vk::XlibSurfaceCreateFlagsKHR::empty(),
dpy: display as *mut _,
window,
..Default::default()
};
let mut output = MaybeUninit::uninit();
check_errors(vk.CreateXlibSurfaceKHR(
check_errors(fns.khr_xlib_surface.create_xlib_surface_khr(
instance.internal_object(),
&infos,
ptr::null(),
@ -292,7 +287,7 @@ impl<W> Surface<W> {
surface: *const S,
win: W,
) -> Result<Arc<Surface<W>>, SurfaceCreationError> {
let vk = instance.pointers();
let fns = instance.fns();
if !instance.loaded_extensions().khr_wayland_surface {
return Err(SurfaceCreationError::MissingExtension {
@ -301,16 +296,15 @@ impl<W> Surface<W> {
}
let surface = {
let infos = vk::WaylandSurfaceCreateInfoKHR {
sType: vk::STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR,
pNext: ptr::null(),
flags: 0, // reserved
let infos = ash::vk::WaylandSurfaceCreateInfoKHR {
flags: ash::vk::WaylandSurfaceCreateFlagsKHR::empty(),
display: display as *mut _,
surface: surface as *mut _,
..Default::default()
};
let mut output = MaybeUninit::uninit();
check_errors(vk.CreateWaylandSurfaceKHR(
check_errors(fns.khr_wayland_surface.create_wayland_surface_khr(
instance.internal_object(),
&infos,
ptr::null(),
@ -338,7 +332,7 @@ impl<W> Surface<W> {
window: *const T,
win: W,
) -> Result<Arc<Surface<W>>, SurfaceCreationError> {
let vk = instance.pointers();
let fns = instance.fns();
if !instance.loaded_extensions().khr_android_surface {
return Err(SurfaceCreationError::MissingExtension {
@ -347,15 +341,14 @@ impl<W> Surface<W> {
}
let surface = {
let infos = vk::AndroidSurfaceCreateInfoKHR {
sType: vk::STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR,
pNext: ptr::null(),
flags: 0, // reserved
let infos = ash::vk::AndroidSurfaceCreateInfoKHR {
flags: ash::vk::AndroidSurfaceCreateFlagsKHR::empty(),
window: window as *mut _,
..Default::default()
};
let mut output = MaybeUninit::uninit();
check_errors(vk.CreateAndroidSurfaceKHR(
check_errors(fns.khr_android_surface.create_android_surface_khr(
instance.internal_object(),
&infos,
ptr::null(),
@ -384,7 +377,7 @@ impl<W> Surface<W> {
view: *const T,
win: W,
) -> Result<Arc<Surface<W>>, SurfaceCreationError> {
let vk = instance.pointers();
let fns = instance.fns();
if !instance.loaded_extensions().mvk_ios_surface {
return Err(SurfaceCreationError::MissingExtension {
@ -393,15 +386,14 @@ impl<W> Surface<W> {
}
let surface = {
let infos = vk::IOSSurfaceCreateInfoMVK {
sType: vk::STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK,
pNext: ptr::null(),
flags: 0, // reserved
pView: view as *const _,
let infos = ash::vk::IOSSurfaceCreateInfoMVK {
flags: ash::vk::IOSSurfaceCreateFlagsMVK::empty(),
p_view: view as *const _,
..Default::default()
};
let mut output = MaybeUninit::uninit();
check_errors(vk.CreateIOSSurfaceMVK(
check_errors(fns.mvk_ios_surface.create_ios_surface_mvk(
instance.internal_object(),
&infos,
ptr::null(),
@ -430,7 +422,7 @@ impl<W> Surface<W> {
view: *const T,
win: W,
) -> Result<Arc<Surface<W>>, SurfaceCreationError> {
let vk = instance.pointers();
let fns = instance.fns();
if !instance.loaded_extensions().mvk_macos_surface {
return Err(SurfaceCreationError::MissingExtension {
@ -439,15 +431,14 @@ impl<W> Surface<W> {
}
let surface = {
let infos = vk::MacOSSurfaceCreateInfoMVK {
sType: vk::STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK,
pNext: ptr::null(),
flags: 0, // reserved
pView: view as *const _,
let infos = ash::vk::MacOSSurfaceCreateInfoMVK {
flags: ash::vk::MacOSSurfaceCreateFlagsMVK::empty(),
p_view: view as *const _,
..Default::default()
};
let mut output = MaybeUninit::uninit();
check_errors(vk.CreateMacOSSurfaceMVK(
check_errors(fns.mvk_macos_surface.create_mac_os_surface_mvk(
instance.internal_object(),
&infos,
ptr::null(),
@ -475,7 +466,7 @@ impl<W> Surface<W> {
window: *const T,
win: W,
) -> Result<Arc<Surface<W>>, SurfaceCreationError> {
let vk = instance.pointers();
let fns = instance.fns();
if !instance.loaded_extensions().nn_vi_surface {
return Err(SurfaceCreationError::MissingExtension {
@ -484,15 +475,14 @@ impl<W> Surface<W> {
}
let surface = {
let infos = vk::ViSurfaceCreateInfoNN {
sType: vk::STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN,
pNext: ptr::null(),
flags: 0, // reserved
let infos = ash::vk::ViSurfaceCreateInfoNN {
flags: ash::vk::ViSurfaceCreateFlagsNN::empty(),
window: window as *mut _,
..Default::default()
};
let mut output = MaybeUninit::uninit();
check_errors(vk.CreateViSurfaceNN(
check_errors(fns.nn_vi_surface.create_vi_surface_nn(
instance.internal_object(),
&infos,
ptr::null(),
@ -513,10 +503,10 @@ impl<W> Surface<W> {
// FIXME: vulkano doesn't check this for the moment!
pub fn is_supported(&self, queue: QueueFamily) -> Result<bool, CapabilitiesError> {
unsafe {
let vk = self.instance.pointers();
let fns = self.instance.fns();
let mut output = MaybeUninit::uninit();
check_errors(vk.GetPhysicalDeviceSurfaceSupportKHR(
check_errors(fns.khr_surface.get_physical_device_surface_support_khr(
queue.physical_device().internal_object(),
queue.id(),
self.surface,
@ -544,21 +534,24 @@ impl<W> Surface<W> {
"Instance mismatch in Surface::capabilities"
);
let vk = self.instance.pointers();
let fns = self.instance.fns();
let caps = {
let mut out: MaybeUninit<vk::SurfaceCapabilitiesKHR> = MaybeUninit::uninit();
check_errors(vk.GetPhysicalDeviceSurfaceCapabilitiesKHR(
device.internal_object(),
self.surface,
out.as_mut_ptr(),
))?;
let mut out: MaybeUninit<ash::vk::SurfaceCapabilitiesKHR> = MaybeUninit::uninit();
check_errors(
fns.khr_surface
.get_physical_device_surface_capabilities_khr(
device.internal_object(),
self.surface,
out.as_mut_ptr(),
),
)?;
out.assume_init()
};
let formats = {
let mut num = 0;
check_errors(vk.GetPhysicalDeviceSurfaceFormatsKHR(
check_errors(fns.khr_surface.get_physical_device_surface_formats_khr(
device.internal_object(),
self.surface,
&mut num,
@ -566,7 +559,7 @@ impl<W> Surface<W> {
))?;
let mut formats = Vec::with_capacity(num as usize);
check_errors(vk.GetPhysicalDeviceSurfaceFormatsKHR(
check_errors(fns.khr_surface.get_physical_device_surface_formats_khr(
device.internal_object(),
self.surface,
&mut num,
@ -578,55 +571,61 @@ impl<W> Surface<W> {
let modes = {
let mut num = 0;
check_errors(vk.GetPhysicalDeviceSurfacePresentModesKHR(
device.internal_object(),
self.surface,
&mut num,
ptr::null_mut(),
))?;
check_errors(
fns.khr_surface
.get_physical_device_surface_present_modes_khr(
device.internal_object(),
self.surface,
&mut num,
ptr::null_mut(),
),
)?;
let mut modes = Vec::with_capacity(num as usize);
check_errors(vk.GetPhysicalDeviceSurfacePresentModesKHR(
device.internal_object(),
self.surface,
&mut num,
modes.as_mut_ptr(),
))?;
check_errors(
fns.khr_surface
.get_physical_device_surface_present_modes_khr(
device.internal_object(),
self.surface,
&mut num,
modes.as_mut_ptr(),
),
)?;
modes.set_len(num as usize);
debug_assert!(modes
.iter()
.find(|&&m| m == vk::PRESENT_MODE_FIFO_KHR)
.find(|&&m| m == ash::vk::PresentModeKHR::FIFO)
.is_some());
debug_assert!(modes.iter().count() > 0);
modes.into_iter().collect()
};
Ok(Capabilities {
min_image_count: caps.minImageCount,
max_image_count: if caps.maxImageCount == 0 {
min_image_count: caps.min_image_count,
max_image_count: if caps.max_image_count == 0 {
None
} else {
Some(caps.maxImageCount)
Some(caps.max_image_count)
},
current_extent: if caps.currentExtent.width == 0xffffffff
&& caps.currentExtent.height == 0xffffffff
current_extent: if caps.current_extent.width == 0xffffffff
&& caps.current_extent.height == 0xffffffff
{
None
} else {
Some([caps.currentExtent.width, caps.currentExtent.height])
Some([caps.current_extent.width, caps.current_extent.height])
},
min_image_extent: [caps.minImageExtent.width, caps.minImageExtent.height],
max_image_extent: [caps.maxImageExtent.width, caps.maxImageExtent.height],
max_image_array_layers: caps.maxImageArrayLayers,
supported_transforms: caps.supportedTransforms.into(),
min_image_extent: [caps.min_image_extent.width, caps.min_image_extent.height],
max_image_extent: [caps.max_image_extent.width, caps.max_image_extent.height],
max_image_array_layers: caps.max_image_array_layers,
supported_transforms: caps.supported_transforms.into(),
current_transform: SupportedSurfaceTransforms::from(caps.currentTransform)
current_transform: SupportedSurfaceTransforms::from(caps.current_transform)
.iter()
.next()
.unwrap(), // TODO:
supported_composite_alpha: caps.supportedCompositeAlpha.into(),
supported_composite_alpha: caps.supported_composite_alpha.into(),
supported_usage_flags: {
let usage = ImageUsage::from(caps.supportedUsageFlags);
let usage = ImageUsage::from(caps.supported_usage_flags);
debug_assert!(usage.color_attachment); // specs say that this must be true
usage
},
@ -636,7 +635,7 @@ impl<W> Surface<W> {
// TODO: Change the way capabilities not supported in vk-sys are handled
Format::try_from(f.format)
.ok()
.map(|format| (format, f.colorSpace.into()))
.map(|format| (format, f.color_space.into()))
})
.collect(),
present_modes: modes,
@ -664,12 +663,10 @@ unsafe impl<W> SurfaceSwapchainLock for Surface<W> {
}
unsafe impl<W> VulkanObject for Surface<W> {
type Object = vk::SurfaceKHR;
const TYPE: vk::ObjectType = vk::OBJECT_TYPE_SURFACE_KHR;
type Object = ash::vk::SurfaceKHR;
#[inline]
fn internal_object(&self) -> vk::SurfaceKHR {
fn internal_object(&self) -> ash::vk::SurfaceKHR {
self.surface
}
}
@ -685,8 +682,12 @@ impl<W> Drop for Surface<W> {
#[inline]
fn drop(&mut self) {
unsafe {
let vk = self.instance.pointers();
vk.DestroySurfaceKHR(self.instance.internal_object(), self.surface, ptr::null());
let fns = self.instance.fns();
fns.khr_surface.destroy_surface_khr(
self.instance.internal_object(),
self.surface,
ptr::null(),
);
}
}
}

View File

@ -27,6 +27,7 @@ use crate::image::ImageLayout;
use crate::image::ImageTiling;
use crate::image::ImageType;
use crate::image::ImageUsage;
use crate::image::SampleCount;
use crate::swapchain::CapabilitiesError;
use crate::swapchain::ColorSpace;
use crate::swapchain::CompositeAlpha;
@ -44,7 +45,6 @@ use crate::sync::GpuFuture;
use crate::sync::PipelineStages;
use crate::sync::Semaphore;
use crate::sync::SharingMode;
use crate::vk;
use crate::Error;
use crate::OomError;
use crate::Success;
@ -62,32 +62,29 @@ use std::time::Duration;
/// The way fullscreen exclusivity is handled.
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
#[repr(i32)]
pub enum FullscreenExclusive {
/// Indicates that the driver should determine the appropriate full-screen method
/// by whatever means it deems appropriate.
Default,
Default = ash::vk::FullScreenExclusiveEXT::DEFAULT.as_raw(),
/// Indicates that the driver may use full-screen exclusive mechanisms when available.
/// Such mechanisms may result in better performance and/or the availability of
/// different presentation capabilities, but may require a more disruptive transition
// during swapchain initialization, first presentation and/or destruction.
Allowed,
Allowed = ash::vk::FullScreenExclusiveEXT::ALLOWED.as_raw(),
/// Indicates that the driver should avoid using full-screen mechanisms which rely
/// on disruptive transitions.
Disallowed,
Disallowed = ash::vk::FullScreenExclusiveEXT::DISALLOWED.as_raw(),
/// Indicates the application will manage full-screen exclusive mode by using
/// `Swapchain::acquire_fullscreen_exclusive()` and
/// `Swapchain::release_fullscreen_exclusive()` functions.
AppControlled,
AppControlled = ash::vk::FullScreenExclusiveEXT::APPLICATION_CONTROLLED.as_raw(),
}
impl FullscreenExclusive {
fn vk_sys_enum(&self) -> u32 {
match self {
&Self::Default => vk::FULL_SCREEN_EXCLUSIVE_DEFAUlT_EXT,
&Self::Allowed => vk::FULL_SCREEN_EXCLUSIVE_ALLOWED_EXT,
&Self::Disallowed => vk::FULL_SCREEN_EXCLUSIVE_DISALLOWED_EXT,
&Self::AppControlled => vk::FULL_SCREEN_EXCLUSIVE_APPLICATION_CONTROLLED_EXT,
}
impl From<FullscreenExclusive> for ash::vk::FullScreenExclusiveEXT {
#[inline]
fn from(val: FullscreenExclusive) -> Self {
Self::from_raw(val as i32)
}
}
@ -223,7 +220,7 @@ pub struct Swapchain<W> {
// The surface, which we need to keep alive.
surface: Arc<Surface<W>>,
// The swapchain object.
swapchain: vk::SwapchainKHR,
swapchain: ash::vk::SwapchainKHR,
// The images of this swapchain.
images: Vec<ImageEntry>,
@ -398,10 +395,13 @@ impl<W> Swapchain<W> {
unsafe {
check_errors(
self.device.pointers().AcquireFullScreenExclusiveModeEXT(
self.device.internal_object(),
self.swapchain,
),
self.device
.fns()
.ext_full_screen_exclusive
.acquire_full_screen_exclusive_mode_ext(
self.device.internal_object(),
self.swapchain,
),
)?;
}
@ -421,10 +421,13 @@ impl<W> Swapchain<W> {
unsafe {
check_errors(
self.device.pointers().ReleaseFullScreenExclusiveModeEXT(
self.device.internal_object(),
self.swapchain,
),
self.device
.fns()
.ext_full_screen_exclusive
.release_full_screen_exclusive_mode_ext(
self.device.internal_object(),
self.swapchain,
),
)?;
}
@ -465,12 +468,10 @@ impl<W> Swapchain<W> {
}
unsafe impl<W> VulkanObject for Swapchain<W> {
type Object = vk::SwapchainKHR;
const TYPE: vk::ObjectType = vk::OBJECT_TYPE_SWAPCHAIN_KHR;
type Object = ash::vk::SwapchainKHR;
#[inline]
fn internal_object(&self) -> vk::SwapchainKHR {
fn internal_object(&self) -> ash::vk::SwapchainKHR {
self.swapchain
}
}
@ -492,8 +493,12 @@ impl<W> Drop for Swapchain<W> {
#[inline]
fn drop(&mut self) {
unsafe {
let vk = self.device.pointers();
vk.DestroySwapchainKHR(self.device.internal_object(), self.swapchain, ptr::null());
let fns = self.device.fns();
fns.khr_swapchain.destroy_swapchain_khr(
self.device.internal_object(),
self.swapchain,
ptr::null(),
);
self.surface.flag().store(false, Ordering::Release);
}
}
@ -623,9 +628,9 @@ impl<W> SwapchainBuilder<W> {
if layers < 1 || layers > capabilities.max_image_array_layers {
return Err(SwapchainCreationError::UnsupportedArrayLayers);
}
if (vk::ImageUsageFlags::from(usage)
& vk::ImageUsageFlags::from(capabilities.supported_usage_flags))
!= vk::ImageUsageFlags::from(usage)
if (ash::vk::ImageUsageFlags::from(usage)
& ash::vk::ImageUsageFlags::from(capabilities.supported_usage_flags))
!= ash::vk::ImageUsageFlags::from(usage)
{
return Err(SwapchainCreationError::UnsupportedUsageFlags);
}
@ -678,6 +683,9 @@ impl<W> SwapchainBuilder<W> {
let mut surface_full_screen_exclusive_info = None;
// TODO: VK_EXT_FULL_SCREEN_EXCLUSIVE requires these extensions, so they should always
// be enabled if it is. A separate check here is unnecessary; this should be checked at
// device creation.
if device.loaded_extensions().ext_full_screen_exclusive
&& surface
.instance()
@ -688,10 +696,9 @@ impl<W> SwapchainBuilder<W> {
.loaded_extensions()
.khr_get_surface_capabilities2
{
surface_full_screen_exclusive_info = Some(vk::SurfaceFullScreenExclusiveInfoEXT {
sType: vk::STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT,
pNext: ptr::null_mut(),
fullScreenExclusive: fullscreen_exclusive.vk_sys_enum(),
surface_full_screen_exclusive_info = Some(ash::vk::SurfaceFullScreenExclusiveInfoEXT {
full_screen_exclusive: fullscreen_exclusive.into(),
..Default::default()
});
}
@ -721,46 +728,52 @@ impl<W> SwapchainBuilder<W> {
}
}
let vk = device.pointers();
let fns = device.fns();
let swapchain = unsafe {
let (sh_mode, sh_count, sh_indices) = match sharing_mode {
SharingMode::Exclusive => (vk::SHARING_MODE_EXCLUSIVE, 0, ptr::null()),
SharingMode::Concurrent(ref ids) => {
(vk::SHARING_MODE_CONCURRENT, ids.len() as u32, ids.as_ptr())
}
SharingMode::Exclusive => (ash::vk::SharingMode::EXCLUSIVE, 0, ptr::null()),
SharingMode::Concurrent(ref ids) => (
ash::vk::SharingMode::CONCURRENT,
ids.len() as u32,
ids.as_ptr(),
),
};
let infos = vk::SwapchainCreateInfoKHR {
sType: vk::STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR,
pNext: p_next,
flags: 0, // reserved
let infos = ash::vk::SwapchainCreateInfoKHR {
p_next,
flags: ash::vk::SwapchainCreateFlagsKHR::empty(),
surface: surface.internal_object(),
minImageCount: num_images,
imageFormat: format as u32,
imageColorSpace: color_space as u32,
imageExtent: vk::Extent2D {
min_image_count: num_images,
image_format: format.into(),
image_color_space: color_space.into(),
image_extent: ash::vk::Extent2D {
width: dimensions[0],
height: dimensions[1],
},
imageArrayLayers: layers,
imageUsage: usage.into(),
imageSharingMode: sh_mode,
queueFamilyIndexCount: sh_count,
pQueueFamilyIndices: sh_indices,
preTransform: transform as u32,
compositeAlpha: composite_alpha as u32,
presentMode: present_mode as u32,
clipped: if clipped { vk::TRUE } else { vk::FALSE },
oldSwapchain: if let Some(ref old_swapchain) = old_swapchain {
image_array_layers: layers,
image_usage: usage.into(),
image_sharing_mode: sh_mode,
queue_family_index_count: sh_count,
p_queue_family_indices: sh_indices,
pre_transform: transform.into(),
composite_alpha: composite_alpha.into(),
present_mode: present_mode.into(),
clipped: if clipped {
ash::vk::TRUE
} else {
ash::vk::FALSE
},
old_swapchain: if let Some(ref old_swapchain) = old_swapchain {
old_swapchain.swapchain
} else {
0
ash::vk::SwapchainKHR::null()
},
..Default::default()
};
let mut output = MaybeUninit::uninit();
check_errors(vk.CreateSwapchainKHR(
check_errors(fns.khr_swapchain.create_swapchain_khr(
device.internal_object(),
&infos,
ptr::null(),
@ -771,7 +784,7 @@ impl<W> SwapchainBuilder<W> {
let image_handles = unsafe {
let mut num = 0;
check_errors(vk.GetSwapchainImagesKHR(
check_errors(fns.khr_swapchain.get_swapchain_images_khr(
device.internal_object(),
swapchain,
&mut num,
@ -779,7 +792,7 @@ impl<W> SwapchainBuilder<W> {
))?;
let mut images = Vec::with_capacity(num as usize);
check_errors(vk.GetSwapchainImagesKHR(
check_errors(fns.khr_swapchain.get_swapchain_images_khr(
device.internal_object(),
swapchain,
&mut num,
@ -798,8 +811,16 @@ impl<W> SwapchainBuilder<W> {
array_layers: layers,
};
let img =
UnsafeImage::from_raw(device.clone(), image, usage, format, flags, dims, 1, 1);
let img = UnsafeImage::from_raw(
device.clone(),
image,
usage,
format,
flags,
dims,
SampleCount::Sample1,
1,
);
ImageEntry {
image: img,
@ -1651,7 +1672,7 @@ pub unsafe fn acquire_next_image_raw<W>(
semaphore: Option<&Semaphore>,
fence: Option<&Fence>,
) -> Result<AcquiredImage, AcquireError> {
let vk = swapchain.device.pointers();
let fns = swapchain.device.fns();
let timeout_ns = if let Some(timeout) = timeout {
timeout
@ -1663,14 +1684,20 @@ pub unsafe fn acquire_next_image_raw<W>(
};
let mut out = MaybeUninit::uninit();
let r = check_errors(vk.AcquireNextImageKHR(
swapchain.device.internal_object(),
swapchain.swapchain,
timeout_ns,
semaphore.map(|s| s.internal_object()).unwrap_or(0),
fence.map(|f| f.internal_object()).unwrap_or(0),
out.as_mut_ptr(),
))?;
let r = check_errors(
fns.khr_swapchain.acquire_next_image_khr(
swapchain.device.internal_object(),
swapchain.swapchain,
timeout_ns,
semaphore
.map(|s| s.internal_object())
.unwrap_or(ash::vk::Semaphore::null()),
fence
.map(|f| f.internal_object())
.unwrap_or(ash::vk::Fence::null()),
out.as_mut_ptr(),
),
)?;
let out = out.assume_init();
let (id, suboptimal) = match r {

View File

@ -7,17 +7,15 @@
// notice may not be copied, modified, or distributed except
// according to those terms.
use std::mem::MaybeUninit;
use std::ptr;
use std::sync::Arc;
use crate::check_errors;
use crate::device::Device;
use crate::device::DeviceOwned;
use crate::vk;
use crate::OomError;
use crate::Success;
use crate::VulkanObject;
use std::mem::MaybeUninit;
use std::ptr;
use std::sync::Arc;
/// Used to block the GPU execution until an event on the CPU occurs.
///
@ -28,7 +26,7 @@ use crate::VulkanObject;
#[derive(Debug)]
pub struct Event {
// The event.
event: vk::Event,
event: ash::vk::Event,
// The device.
device: Arc<Device>,
must_put_in_pool: bool,
@ -47,8 +45,8 @@ impl Event {
Some(raw_event) => {
unsafe {
// Make sure the event isn't signaled
let vk = device.pointers();
check_errors(vk.ResetEvent(device.internal_object(), raw_event))?;
let fns = device.fns();
check_errors(fns.v1_0.reset_event(device.internal_object(), raw_event))?;
}
Ok(Event {
event: raw_event,
@ -71,18 +69,16 @@ impl Event {
fn alloc_impl(device: Arc<Device>, must_put_in_pool: bool) -> Result<Event, OomError> {
let event = unsafe {
// since the creation is constant, we use a `static` instead of a struct on the stack
static mut INFOS: vk::EventCreateInfo = vk::EventCreateInfo {
sType: vk::STRUCTURE_TYPE_EVENT_CREATE_INFO,
pNext: 0 as *const _, //ptr::null(),
flags: 0, // reserved
let infos = ash::vk::EventCreateInfo {
flags: ash::vk::EventCreateFlags::empty(),
..Default::default()
};
let mut output = MaybeUninit::uninit();
let vk = device.pointers();
check_errors(vk.CreateEvent(
let fns = device.fns();
check_errors(fns.v1_0.create_event(
device.internal_object(),
&INFOS,
&infos,
ptr::null(),
output.as_mut_ptr(),
))?;
@ -100,9 +96,11 @@ impl Event {
#[inline]
pub fn signaled(&self) -> Result<bool, OomError> {
unsafe {
let vk = self.device.pointers();
let result =
check_errors(vk.GetEventStatus(self.device.internal_object(), self.event))?;
let fns = self.device.fns();
let result = check_errors(
fns.v1_0
.get_event_status(self.device.internal_object(), self.event),
)?;
match result {
Success::EventSet => Ok(true),
Success::EventReset => Ok(false),
@ -115,8 +113,11 @@ impl Event {
#[inline]
pub fn set_raw(&mut self) -> Result<(), OomError> {
unsafe {
let vk = self.device.pointers();
check_errors(vk.SetEvent(self.device.internal_object(), self.event))?;
let fns = self.device.fns();
check_errors(
fns.v1_0
.set_event(self.device.internal_object(), self.event),
)?;
Ok(())
}
}
@ -138,8 +139,11 @@ impl Event {
#[inline]
pub fn reset_raw(&mut self) -> Result<(), OomError> {
unsafe {
let vk = self.device.pointers();
check_errors(vk.ResetEvent(self.device.internal_object(), self.event))?;
let fns = self.device.fns();
check_errors(
fns.v1_0
.reset_event(self.device.internal_object(), self.event),
)?;
Ok(())
}
}
@ -164,12 +168,10 @@ unsafe impl DeviceOwned for Event {
}
unsafe impl VulkanObject for Event {
type Object = vk::Event;
const TYPE: vk::ObjectType = vk::OBJECT_TYPE_EVENT;
type Object = ash::vk::Event;
#[inline]
fn internal_object(&self) -> vk::Event {
fn internal_object(&self) -> ash::vk::Event {
self.event
}
}
@ -182,8 +184,9 @@ impl Drop for Event {
let raw_event = self.event;
self.device.event_pool().lock().unwrap().push(raw_event);
} else {
let vk = self.device.pointers();
vk.DestroyEvent(self.device.internal_object(), self.event, ptr::null());
let fns = self.device.fns();
fns.v1_0
.destroy_event(self.device.internal_object(), self.event, ptr::null());
}
}
}

View File

@ -7,6 +7,14 @@
// notice may not be copied, modified, or distributed except
// according to those terms.
use crate::check_errors;
use crate::device::Device;
use crate::device::DeviceOwned;
use crate::Error;
use crate::OomError;
use crate::SafeDeref;
use crate::Success;
use crate::VulkanObject;
use smallvec::SmallVec;
use std::error;
use std::fmt;
@ -17,16 +25,6 @@ use std::sync::atomic::Ordering;
use std::sync::Arc;
use std::time::Duration;
use crate::check_errors;
use crate::device::Device;
use crate::device::DeviceOwned;
use crate::vk;
use crate::Error;
use crate::OomError;
use crate::SafeDeref;
use crate::Success;
use crate::VulkanObject;
/// A fence is used to know when a command buffer submission has finished its execution.
///
/// When a command buffer accesses a resource, you have to ensure that the CPU doesn't access
@ -37,7 +35,7 @@ pub struct Fence<D = Arc<Device>>
where
D: SafeDeref<Target = Device>,
{
fence: vk::Fence,
fence: ash::vk::Fence,
device: D,
@ -67,8 +65,11 @@ where
Some(raw_fence) => {
unsafe {
// Make sure the fence isn't signaled
let vk = device.pointers();
check_errors(vk.ResetFences(device.internal_object(), 1, &raw_fence))?;
let fns = device.fns();
check_errors(
fns.v1_0
.reset_fences(device.internal_object(), 1, &raw_fence),
)?;
}
Ok(Fence {
fence: raw_fence,
@ -98,19 +99,18 @@ where
fn alloc_impl(device: D, signaled: bool, must_put_in_pool: bool) -> Result<Fence<D>, OomError> {
let fence = unsafe {
let infos = vk::FenceCreateInfo {
sType: vk::STRUCTURE_TYPE_FENCE_CREATE_INFO,
pNext: ptr::null(),
let infos = ash::vk::FenceCreateInfo {
flags: if signaled {
vk::FENCE_CREATE_SIGNALED_BIT
ash::vk::FenceCreateFlags::SIGNALED
} else {
0
ash::vk::FenceCreateFlags::empty()
},
..Default::default()
};
let vk = device.pointers();
let fns = device.fns();
let mut output = MaybeUninit::uninit();
check_errors(vk.CreateFence(
check_errors(fns.v1_0.create_fence(
device.internal_object(),
&infos,
ptr::null(),
@ -135,9 +135,11 @@ where
return Ok(true);
}
let vk = self.device.pointers();
let result =
check_errors(vk.GetFenceStatus(self.device.internal_object(), self.fence))?;
let fns = self.device.fns();
let result = check_errors(
fns.v1_0
.get_fence_status(self.device.internal_object(), self.fence),
)?;
match result {
Success::Success => {
self.signaled.store(true, Ordering::Relaxed);
@ -169,12 +171,12 @@ where
u64::max_value()
};
let vk = self.device.pointers();
let r = check_errors(vk.WaitForFences(
let fns = self.device.fns();
let r = check_errors(fns.v1_0.wait_for_fences(
self.device.internal_object(),
1,
&self.fence,
vk::TRUE,
ash::vk::TRUE,
timeout_ns,
))?;
@ -201,7 +203,7 @@ where
{
let mut device: Option<&Device> = None;
let fences: SmallVec<[vk::Fence; 8]> = iter
let fences: SmallVec<[ash::vk::Fence; 8]> = iter
.into_iter()
.filter_map(|fence| {
match &mut device {
@ -233,12 +235,12 @@ where
let r = if let Some(device) = device {
unsafe {
let vk = device.pointers();
check_errors(vk.WaitForFences(
let fns = device.fns();
check_errors(fns.v1_0.wait_for_fences(
device.internal_object(),
fences.len() as u32,
fences.as_ptr(),
vk::TRUE,
ash::vk::TRUE,
timeout_ns,
))?
}
@ -259,8 +261,11 @@ where
#[inline]
pub fn reset(&mut self) -> Result<(), OomError> {
unsafe {
let vk = self.device.pointers();
check_errors(vk.ResetFences(self.device.internal_object(), 1, &self.fence))?;
let fns = self.device.fns();
check_errors(
fns.v1_0
.reset_fences(self.device.internal_object(), 1, &self.fence),
)?;
self.signaled.store(false, Ordering::Relaxed);
Ok(())
}
@ -279,7 +284,7 @@ where
{
let mut device: Option<&Device> = None;
let fences: SmallVec<[vk::Fence; 8]> = iter
let fences: SmallVec<[ash::vk::Fence; 8]> = iter
.into_iter()
.map(|fence| {
match &mut device {
@ -299,8 +304,8 @@ where
if let Some(device) = device {
unsafe {
let vk = device.pointers();
check_errors(vk.ResetFences(
let fns = device.fns();
check_errors(fns.v1_0.reset_fences(
device.internal_object(),
fences.len() as u32,
fences.as_ptr(),
@ -322,12 +327,10 @@ unsafe impl<D> VulkanObject for Fence<D>
where
D: SafeDeref<Target = Device>,
{
type Object = vk::Fence;
const TYPE: vk::ObjectType = vk::OBJECT_TYPE_FENCE;
type Object = ash::vk::Fence;
#[inline]
fn internal_object(&self) -> vk::Fence {
fn internal_object(&self) -> ash::vk::Fence {
self.fence
}
}
@ -343,8 +346,9 @@ where
let raw_fence = self.fence;
self.device.fence_pool().lock().unwrap().push(raw_fence);
} else {
let vk = self.device.pointers();
vk.DestroyFence(self.device.internal_object(), self.fence, ptr::null());
let fns = self.device.fns();
fns.v1_0
.destroy_fence(self.device.internal_object(), self.fence, ptr::null());
}
}
}

View File

@ -7,7 +7,6 @@
// notice may not be copied, modified, or distributed except
// according to those terms.
use crate::vk;
use std::ops;
macro_rules! pipeline_stages {
@ -30,10 +29,10 @@ macro_rules! pipeline_stages {
}
}
impl From<PipelineStages> for vk::PipelineStageFlags {
impl From<PipelineStages> for ash::vk::PipelineStageFlags {
#[inline]
fn from(val: PipelineStages) -> Self {
let mut result = 0;
let mut result = ash::vk::PipelineStageFlags::empty();
$(
if val.$elem { result |= $val }
)+
@ -67,13 +66,13 @@ macro_rules! pipeline_stages {
#[repr(u32)]
pub enum PipelineStage {
$(
$var = $val,
$var = $val.as_raw(),
)+
}
impl PipelineStage {
#[inline]
pub fn required_queue_flags(&self) -> vk::QueueFlags {
pub fn required_queue_flags(&self) -> ash::vk::QueueFlags {
match self {
$(
Self::$var => $queue,
@ -84,24 +83,31 @@ macro_rules! pipeline_stages {
);
}
impl From<PipelineStage> for ash::vk::PipelineStageFlags {
#[inline]
fn from(val: PipelineStage) -> Self {
Self::from_raw(val as u32)
}
}
pipeline_stages! {
top_of_pipe, TopOfPipe => vk::PIPELINE_STAGE_TOP_OF_PIPE_BIT, 0;
draw_indirect, DrawIndirect => vk::PIPELINE_STAGE_DRAW_INDIRECT_BIT, vk::QUEUE_GRAPHICS_BIT | vk::QUEUE_COMPUTE_BIT;
vertex_input, VertexInput => vk::PIPELINE_STAGE_VERTEX_INPUT_BIT, vk::QUEUE_GRAPHICS_BIT;
vertex_shader, VertexShader => vk::PIPELINE_STAGE_VERTEX_SHADER_BIT, vk::QUEUE_GRAPHICS_BIT;
tessellation_control_shader, TessellationControlShader => vk::PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT, vk::QUEUE_GRAPHICS_BIT;
tessellation_evaluation_shader, TessellationEvaluationShader => vk::PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT, vk::QUEUE_GRAPHICS_BIT;
geometry_shader, GeometryShader => vk::PIPELINE_STAGE_GEOMETRY_SHADER_BIT, vk::QUEUE_GRAPHICS_BIT;
fragment_shader, FragmentShader => vk::PIPELINE_STAGE_FRAGMENT_SHADER_BIT, vk::QUEUE_GRAPHICS_BIT;
early_fragment_tests, EarlyFragmentTests => vk::PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT, vk::QUEUE_GRAPHICS_BIT;
late_fragment_tests, LateFragmentTests => vk::PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT, vk::QUEUE_GRAPHICS_BIT;
color_attachment_output, ColorAttachmentOutput => vk::PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, vk::QUEUE_GRAPHICS_BIT;
compute_shader, ComputeShader => vk::PIPELINE_STAGE_COMPUTE_SHADER_BIT, vk::QUEUE_COMPUTE_BIT;
transfer, Transfer => vk::PIPELINE_STAGE_TRANSFER_BIT, vk::QUEUE_GRAPHICS_BIT | vk::QUEUE_COMPUTE_BIT | vk::QUEUE_TRANSFER_BIT;
bottom_of_pipe, BottomOfPipe => vk::PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, 0;
host, Host => vk::PIPELINE_STAGE_HOST_BIT, 0;
all_graphics, AllGraphics => vk::PIPELINE_STAGE_ALL_GRAPHICS_BIT, vk::QUEUE_GRAPHICS_BIT;
all_commands, AllCommands => vk::PIPELINE_STAGE_ALL_COMMANDS_BIT, 0;
top_of_pipe, TopOfPipe => ash::vk::PipelineStageFlags::TOP_OF_PIPE, ash::vk::QueueFlags::empty();
draw_indirect, DrawIndirect => ash::vk::PipelineStageFlags::DRAW_INDIRECT, ash::vk::QueueFlags::GRAPHICS | ash::vk::QueueFlags::COMPUTE;
vertex_input, VertexInput => ash::vk::PipelineStageFlags::VERTEX_INPUT, ash::vk::QueueFlags::GRAPHICS;
vertex_shader, VertexShader => ash::vk::PipelineStageFlags::VERTEX_SHADER, ash::vk::QueueFlags::GRAPHICS;
tessellation_control_shader, TessellationControlShader => ash::vk::PipelineStageFlags::TESSELLATION_CONTROL_SHADER, ash::vk::QueueFlags::GRAPHICS;
tessellation_evaluation_shader, TessellationEvaluationShader => ash::vk::PipelineStageFlags::TESSELLATION_EVALUATION_SHADER, ash::vk::QueueFlags::GRAPHICS;
geometry_shader, GeometryShader => ash::vk::PipelineStageFlags::GEOMETRY_SHADER, ash::vk::QueueFlags::GRAPHICS;
fragment_shader, FragmentShader => ash::vk::PipelineStageFlags::FRAGMENT_SHADER, ash::vk::QueueFlags::GRAPHICS;
early_fragment_tests, EarlyFragmentTests => ash::vk::PipelineStageFlags::EARLY_FRAGMENT_TESTS, ash::vk::QueueFlags::GRAPHICS;
late_fragment_tests, LateFragmentTests => ash::vk::PipelineStageFlags::LATE_FRAGMENT_TESTS, ash::vk::QueueFlags::GRAPHICS;
color_attachment_output, ColorAttachmentOutput => ash::vk::PipelineStageFlags::COLOR_ATTACHMENT_OUTPUT, ash::vk::QueueFlags::GRAPHICS;
compute_shader, ComputeShader => ash::vk::PipelineStageFlags::COMPUTE_SHADER, ash::vk::QueueFlags::COMPUTE;
transfer, Transfer => ash::vk::PipelineStageFlags::TRANSFER, ash::vk::QueueFlags::GRAPHICS | ash::vk::QueueFlags::COMPUTE | ash::vk::QueueFlags::TRANSFER;
bottom_of_pipe, BottomOfPipe => ash::vk::PipelineStageFlags::BOTTOM_OF_PIPE, ash::vk::QueueFlags::empty();
host, Host => ash::vk::PipelineStageFlags::HOST, ash::vk::QueueFlags::empty();
all_graphics, AllGraphics => ash::vk::PipelineStageFlags::ALL_GRAPHICS, ash::vk::QueueFlags::GRAPHICS;
all_commands, AllCommands => ash::vk::PipelineStageFlags::ALL_COMMANDS, ash::vk::QueueFlags::empty();
}
macro_rules! access_flags {
@ -134,10 +140,10 @@ macro_rules! access_flags {
}
}
impl From<AccessFlags> for vk::AccessFlags {
impl From<AccessFlags> for ash::vk::AccessFlags {
#[inline]
fn from(val: AccessFlags) -> Self {
let mut result = 0;
let mut result = ash::vk::AccessFlags::empty();
$(
if val.$elem { result |= $val }
)+
@ -170,23 +176,23 @@ macro_rules! access_flags {
}
access_flags! {
indirect_command_read => vk::ACCESS_INDIRECT_COMMAND_READ_BIT,
index_read => vk::ACCESS_INDEX_READ_BIT,
vertex_attribute_read => vk::ACCESS_VERTEX_ATTRIBUTE_READ_BIT,
uniform_read => vk::ACCESS_UNIFORM_READ_BIT,
input_attachment_read => vk::ACCESS_INPUT_ATTACHMENT_READ_BIT,
shader_read => vk::ACCESS_SHADER_READ_BIT,
shader_write => vk::ACCESS_SHADER_WRITE_BIT,
color_attachment_read => vk::ACCESS_COLOR_ATTACHMENT_READ_BIT,
color_attachment_write => vk::ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
depth_stencil_attachment_read => vk::ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT,
depth_stencil_attachment_write => vk::ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT,
transfer_read => vk::ACCESS_TRANSFER_READ_BIT,
transfer_write => vk::ACCESS_TRANSFER_WRITE_BIT,
host_read => vk::ACCESS_HOST_READ_BIT,
host_write => vk::ACCESS_HOST_WRITE_BIT,
memory_read => vk::ACCESS_MEMORY_READ_BIT,
memory_write => vk::ACCESS_MEMORY_WRITE_BIT,
indirect_command_read => ash::vk::AccessFlags::INDIRECT_COMMAND_READ,
index_read => ash::vk::AccessFlags::INDEX_READ,
vertex_attribute_read => ash::vk::AccessFlags::VERTEX_ATTRIBUTE_READ,
uniform_read => ash::vk::AccessFlags::UNIFORM_READ,
input_attachment_read => ash::vk::AccessFlags::INPUT_ATTACHMENT_READ,
shader_read => ash::vk::AccessFlags::SHADER_READ,
shader_write => ash::vk::AccessFlags::SHADER_WRITE,
color_attachment_read => ash::vk::AccessFlags::COLOR_ATTACHMENT_READ,
color_attachment_write => ash::vk::AccessFlags::COLOR_ATTACHMENT_WRITE,
depth_stencil_attachment_read => ash::vk::AccessFlags::DEPTH_STENCIL_ATTACHMENT_READ,
depth_stencil_attachment_write => ash::vk::AccessFlags::DEPTH_STENCIL_ATTACHMENT_WRITE,
transfer_read => ash::vk::AccessFlags::TRANSFER_READ,
transfer_write => ash::vk::AccessFlags::TRANSFER_WRITE,
host_read => ash::vk::AccessFlags::HOST_READ,
host_write => ash::vk::AccessFlags::HOST_WRITE,
memory_read => ash::vk::AccessFlags::MEMORY_READ,
memory_write => ash::vk::AccessFlags::MEMORY_WRITE,
}
impl AccessFlags {

View File

@ -7,17 +7,15 @@
// notice may not be copied, modified, or distributed except
// according to those terms.
use std::mem::MaybeUninit;
use std::ptr;
use std::sync::Arc;
use crate::check_errors;
use crate::device::Device;
use crate::device::DeviceOwned;
use crate::vk;
use crate::OomError;
use crate::SafeDeref;
use crate::VulkanObject;
use std::mem::MaybeUninit;
use std::ptr;
use std::sync::Arc;
/// Used to provide synchronization between command buffers during their execution.
///
@ -28,7 +26,7 @@ pub struct Semaphore<D = Arc<Device>>
where
D: SafeDeref<Target = Device>,
{
semaphore: vk::Semaphore,
semaphore: ash::vk::Semaphore,
device: D,
must_put_in_pool: bool,
}
@ -67,17 +65,16 @@ where
fn alloc_impl(device: D, must_put_in_pool: bool) -> Result<Semaphore<D>, OomError> {
let semaphore = unsafe {
// since the creation is constant, we use a `static` instead of a struct on the stack
static mut INFOS: vk::SemaphoreCreateInfo = vk::SemaphoreCreateInfo {
sType: vk::STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO,
pNext: 0 as *const _, // ptr::null()
flags: 0, // reserved
let infos = ash::vk::SemaphoreCreateInfo {
flags: ash::vk::SemaphoreCreateFlags::empty(),
..Default::default()
};
let vk = device.pointers();
let fns = device.fns();
let mut output = MaybeUninit::uninit();
check_errors(vk.CreateSemaphore(
check_errors(fns.v1_0.create_semaphore(
device.internal_object(),
&INFOS,
&infos,
ptr::null(),
output.as_mut_ptr(),
))?;
@ -103,12 +100,10 @@ unsafe impl<D> VulkanObject for Semaphore<D>
where
D: SafeDeref<Target = Device>,
{
type Object = vk::Semaphore;
const TYPE: vk::ObjectType = vk::OBJECT_TYPE_SEMAPHORE;
type Object = ash::vk::Semaphore;
#[inline]
fn internal_object(&self) -> vk::Semaphore {
fn internal_object(&self) -> ash::vk::Semaphore {
self.semaphore
}
}
@ -124,8 +119,12 @@ where
let raw_sem = self.semaphore;
self.device.semaphore_pool().lock().unwrap().push(raw_sem);
} else {
let vk = self.device.pointers();
vk.DestroySemaphore(self.device.internal_object(), self.semaphore, ptr::null());
let fns = self.device.fns();
fns.v1_0.destroy_semaphore(
self.device.internal_object(),
self.semaphore,
ptr::null(),
);
}
}
}

View File

@ -17,7 +17,7 @@ macro_rules! instance {
match instance::Instance::new(
None,
Version::major_minor(1, 1),
Version::V1_1,
&instance::InstanceExtensions::none(),
None,
) {

Some files were not shown because too many files have changed in this diff Show More