mirror of
https://github.com/gfx-rs/wgpu.git
synced 2024-11-22 06:44:14 +00:00
hal/vk: buffer and texture creation
This commit is contained in:
parent
9ae5e36ad8
commit
b4380e4576
50
Cargo.lock
generated
50
Cargo.lock
generated
@ -20,6 +20,12 @@ version = "1.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "aae1277d39aeec15cb388266ecc24b11c80469deae6067e17a1a7aa9e5c1f234"
|
||||
|
||||
[[package]]
|
||||
name = "ahash"
|
||||
version = "0.4.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "739f4a8db6605981345c5654f3a85b056ce52f37a39d34da03f25bf2151ea16e"
|
||||
|
||||
[[package]]
|
||||
name = "aho-corasick"
|
||||
version = "0.7.15"
|
||||
@ -694,11 +700,53 @@ dependencies = [
|
||||
"weezl",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "gpu-alloc"
|
||||
version = "0.4.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cbc1b6ca374e81862526786d9cb42357ce03706ed1b8761730caafd02ab91f3a"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
"gpu-alloc-types",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "gpu-alloc-types"
|
||||
version = "0.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "54804d0d6bc9d7f26db4eaec1ad10def69b599315f487d32c334a80d1efe67a5"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "gpu-descriptor"
|
||||
version = "0.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e8a70f1e87a3840ed6a3e99e02c2b861e4dbdf26f0d07e38f42ea5aff46cfce2"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
"gpu-descriptor-types",
|
||||
"hashbrown",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "gpu-descriptor-types"
|
||||
version = "0.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "363e3677e55ad168fef68cf9de3a4a310b53124c5e784c53a1d70e92d23f2126"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hashbrown"
|
||||
version = "0.9.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d7afe4a420e3fe79967a00898cc1f4db7c8a49a9333a29f8a4bd76a253d5cd04"
|
||||
dependencies = [
|
||||
"ahash",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hermit-abi"
|
||||
@ -1951,6 +1999,8 @@ dependencies = [
|
||||
"block",
|
||||
"foreign-types",
|
||||
"fxhash",
|
||||
"gpu-alloc",
|
||||
"gpu-descriptor",
|
||||
"inplace_it",
|
||||
"libloading 0.7.0",
|
||||
"log",
|
||||
|
@ -53,6 +53,9 @@ version = "0.1"
|
||||
hal = { path = "../wgpu-hal", package = "wgpu-hal", features = ["metal"] }
|
||||
#Note: could also enable "vulkan" for Vulkan Portability
|
||||
|
||||
[target.'cfg(all(not(target_arch = "wasm32"), all(unix, not(target_os = "ios"), not(target_os = "macos"))))'.dependencies]
|
||||
hal = { path = "../wgpu-hal", package = "wgpu-hal", features = ["vulkan"] }
|
||||
|
||||
[dev-dependencies]
|
||||
loom = "0.3"
|
||||
|
||||
|
@ -13,11 +13,12 @@ license = "MIT OR Apache-2.0"
|
||||
|
||||
[features]
|
||||
default = []
|
||||
metal = ["naga/msl-out", "block", "foreign-types", "mtl", "objc", "parking_lot"]
|
||||
vulkan = ["naga/spv-out", "ash", "libloading", "inplace_it", "renderdoc-sys"]
|
||||
metal = ["naga/msl-out", "block", "foreign-types", "mtl", "objc"]
|
||||
vulkan = ["naga/spv-out", "ash", "gpu-alloc", "gpu-descriptor", "libloading", "inplace_it", "renderdoc-sys"]
|
||||
|
||||
[dependencies]
|
||||
bitflags = "1.0"
|
||||
parking_lot = "0.11"
|
||||
raw-window-handle = "0.3"
|
||||
thiserror = "1"
|
||||
wgt = { package = "wgpu-types", path = "../wgpu-types" }
|
||||
@ -27,7 +28,6 @@ arrayvec = "0.5"
|
||||
fxhash = "0.2.1"
|
||||
libloading = { version = "0.7", optional = true }
|
||||
log = "0.4"
|
||||
parking_lot = { version = "0.11", optional = true }
|
||||
# backend: Metal
|
||||
block = { version = "0.1", optional = true }
|
||||
foreign-types = { version = "0.3", optional = true }
|
||||
@ -35,6 +35,8 @@ mtl = { package = "metal", version = "0.22", optional = true }
|
||||
objc = { version = "0.2.5", optional = true }
|
||||
# backend: Vulkan
|
||||
ash = { version = "0.32", optional = true }
|
||||
gpu-alloc = { version = "0.4", optional = true }
|
||||
gpu-descriptor = { version = "0.1", optional = true }
|
||||
inplace_it = { version ="0.3.3", optional = true }
|
||||
renderdoc-sys = { version = "0.7.1", optional = true }
|
||||
|
||||
|
@ -872,7 +872,7 @@ pub struct RenderPipelineDescriptor<'a, A: Api> {
|
||||
|
||||
/// Specifies how the alpha channel of the textures should be handled during (martin mouv i step)
|
||||
/// compositing.
|
||||
#[derive(Debug, Clone)]
|
||||
#[derive(Debug, Clone, Copy, PartialEq)]
|
||||
pub enum CompositeAlphaMode {
|
||||
/// The alpha channel, if it exists, of the textures is ignored in the
|
||||
/// compositing process. Instead, the textures is treated as if it has a
|
||||
|
@ -5,10 +5,12 @@ use ash::{
|
||||
version::{DeviceV1_0, InstanceV1_0},
|
||||
vk,
|
||||
};
|
||||
use parking_lot::Mutex;
|
||||
|
||||
use std::{ffi::CStr, mem, ptr, sync::Arc};
|
||||
|
||||
const fn indexing_features() -> wgt::Features {
|
||||
//TODO: const fn?
|
||||
fn indexing_features() -> wgt::Features {
|
||||
wgt::Features::UNIFORM_BUFFER_ARRAY_DYNAMIC_INDEXING
|
||||
| wgt::Features::SAMPLED_TEXTURE_ARRAY_DYNAMIC_INDEXING
|
||||
| wgt::Features::STORAGE_BUFFER_ARRAY_DYNAMIC_INDEXING
|
||||
@ -29,18 +31,21 @@ unsafe impl Sync for PhysicalDeviceFeatures {}
|
||||
|
||||
impl PhysicalDeviceFeatures {
|
||||
/// Add the members of `self` into `info.enabled_features` and its `p_next` chain.
|
||||
fn add_to_device_create_builder<'a>(&'a mut self, info: &mut vk::DeviceCreateInfoBuilder<'a>) {
|
||||
*info = info.enabled_features(&self.core);
|
||||
|
||||
fn add_to_device_create_builder<'a>(
|
||||
&'a mut self,
|
||||
mut info: vk::DeviceCreateInfoBuilder<'a>,
|
||||
) -> vk::DeviceCreateInfoBuilder<'a> {
|
||||
info = info.enabled_features(&self.core);
|
||||
if let Some(ref mut feature) = self.vulkan_1_2 {
|
||||
*info = info.push_next(feature);
|
||||
info = info.push_next(feature);
|
||||
}
|
||||
if let Some(ref mut feature) = self.descriptor_indexing {
|
||||
*info = info.push_next(feature);
|
||||
info = info.push_next(feature);
|
||||
}
|
||||
if let Some(ref mut feature) = self.imageless_framebuffer {
|
||||
*info = info.push_next(feature);
|
||||
info = info.push_next(feature);
|
||||
}
|
||||
info
|
||||
}
|
||||
|
||||
/// Create a `PhysicalDeviceFeatures` that will be used to create a logical device.
|
||||
@ -499,7 +504,7 @@ impl super::Instance {
|
||||
backend: wgt::Backend::Vulkan,
|
||||
};
|
||||
|
||||
let (mut available_features, downlevel_flags) = phd_features.to_wgpu(&phd_capabilities);
|
||||
let (available_features, downlevel_flags) = phd_features.to_wgpu(&phd_capabilities);
|
||||
{
|
||||
use crate::aux::db;
|
||||
// see https://github.com/gfx-rs/gfx/issues/1930
|
||||
@ -544,18 +549,29 @@ impl super::Instance {
|
||||
|| phd_capabilities.supports_extension(vk::KhrImagelessFramebufferFn::name()),
|
||||
image_view_usage: phd_capabilities.properties.api_version >= vk::API_VERSION_1_1
|
||||
|| phd_capabilities.supports_extension(vk::KhrMaintenance2Fn::name()),
|
||||
texture_d24: self
|
||||
.shared
|
||||
.raw
|
||||
.get_physical_device_format_properties(phd, vk::Format::X8_D24_UNORM_PACK32)
|
||||
.optimal_tiling_features
|
||||
.contains(vk::FormatFeatureFlags::DEPTH_STENCIL_ATTACHMENT),
|
||||
texture_d24_s8: self
|
||||
.shared
|
||||
.raw
|
||||
.get_physical_device_format_properties(phd, vk::Format::D24_UNORM_S8_UINT)
|
||||
.optimal_tiling_features
|
||||
.contains(vk::FormatFeatureFlags::DEPTH_STENCIL_ATTACHMENT),
|
||||
texture_d24: unsafe {
|
||||
self.shared
|
||||
.raw
|
||||
.get_physical_device_format_properties(phd, vk::Format::X8_D24_UNORM_PACK32)
|
||||
.optimal_tiling_features
|
||||
.contains(vk::FormatFeatureFlags::DEPTH_STENCIL_ATTACHMENT)
|
||||
},
|
||||
texture_d24_s8: unsafe {
|
||||
self.shared
|
||||
.raw
|
||||
.get_physical_device_format_properties(phd, vk::Format::D24_UNORM_S8_UINT)
|
||||
.optimal_tiling_features
|
||||
.contains(vk::FormatFeatureFlags::DEPTH_STENCIL_ATTACHMENT)
|
||||
},
|
||||
};
|
||||
|
||||
let capabilities = crate::Capabilities {
|
||||
limits: phd_capabilities.to_wgpu_limits(),
|
||||
alignments: phd_capabilities.to_hal_alignments(),
|
||||
downlevel: wgt::DownlevelCapabilities {
|
||||
flags: downlevel_flags,
|
||||
shader_model: wgt::ShaderModel::Sm5, //TODO?
|
||||
},
|
||||
};
|
||||
|
||||
let adapter = super::Adapter {
|
||||
@ -574,15 +590,6 @@ impl super::Instance {
|
||||
private_caps,
|
||||
};
|
||||
|
||||
let capabilities = crate::Capabilities {
|
||||
limits: phd_capabilities.to_wgpu_limits(),
|
||||
alignments: phd_capabilities.to_hal_alignments(),
|
||||
downlevel: wgt::DownlevelCapabilities {
|
||||
flags: downlevel_flags,
|
||||
shader_model: wgt::ShaderModel::Sm5, //TODO?
|
||||
},
|
||||
};
|
||||
|
||||
Some(crate::ExposedAdapter {
|
||||
adapter,
|
||||
info,
|
||||
@ -614,22 +621,19 @@ impl crate::Adapter<super::Api> for super::Adapter {
|
||||
supported_extensions
|
||||
};
|
||||
|
||||
let valid_ash_memory_types = {
|
||||
let mem_properties = self
|
||||
.instance
|
||||
.raw
|
||||
.get_physical_device_memory_properties(self.raw);
|
||||
mem_properties.memory_types[..mem_properties.memory_type_count as usize]
|
||||
.iter()
|
||||
.enumerate()
|
||||
.fold(0, |u, (i, mem)| {
|
||||
if self.known_memory_flags.contains(mem.property_flags) {
|
||||
u | (1 << i)
|
||||
} else {
|
||||
u
|
||||
}
|
||||
})
|
||||
};
|
||||
let mem_properties = self
|
||||
.instance
|
||||
.raw
|
||||
.get_physical_device_memory_properties(self.raw);
|
||||
let memory_types =
|
||||
&mem_properties.memory_types[..mem_properties.memory_type_count as usize];
|
||||
let valid_ash_memory_types = memory_types.iter().enumerate().fold(0, |u, (i, mem)| {
|
||||
if self.known_memory_flags.contains(mem.property_flags) {
|
||||
u | (1 << i)
|
||||
} else {
|
||||
u
|
||||
}
|
||||
});
|
||||
|
||||
// Create device
|
||||
let raw_device = {
|
||||
@ -646,7 +650,7 @@ impl crate::Adapter<super::Api> for super::Adapter {
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let enabled_phd_features =
|
||||
let mut enabled_phd_features =
|
||||
PhysicalDeviceFeatures::from_extensions_and_requested_features(
|
||||
self.phd_capabilities.properties.api_version,
|
||||
&enabled_extensions,
|
||||
@ -654,10 +658,10 @@ impl crate::Adapter<super::Api> for super::Adapter {
|
||||
self.downlevel_flags,
|
||||
&self.private_caps,
|
||||
);
|
||||
let mut info = vk::DeviceCreateInfo::builder()
|
||||
let pre_info = vk::DeviceCreateInfo::builder()
|
||||
.queue_create_infos(&family_infos)
|
||||
.enabled_extension_names(&str_pointers);
|
||||
enabled_phd_features.add_to_device_create_builder(&mut info);
|
||||
let info = enabled_phd_features.add_to_device_create_builder(pre_info);
|
||||
|
||||
self.instance.raw.create_device(self.raw, &info, None)?
|
||||
};
|
||||
@ -709,6 +713,34 @@ impl crate::Adapter<super::Api> for super::Adapter {
|
||||
|
||||
log::info!("Private capabilities: {:?}", self.private_caps);
|
||||
|
||||
let mem_allocator = {
|
||||
let limits = self.phd_capabilities.properties.limits;
|
||||
let config = gpu_alloc::Config::i_am_prototyping(); //TODO
|
||||
let properties = gpu_alloc::DeviceProperties {
|
||||
max_memory_allocation_count: limits.max_memory_allocation_count,
|
||||
max_memory_allocation_size: u64::max_value(), // TODO
|
||||
non_coherent_atom_size: limits.non_coherent_atom_size,
|
||||
memory_types: memory_types
|
||||
.iter()
|
||||
.map(|memory_type| gpu_alloc::MemoryType {
|
||||
props: gpu_alloc::MemoryPropertyFlags::from_bits_truncate(
|
||||
memory_type.property_flags.as_raw() as u8,
|
||||
),
|
||||
heap: memory_type.heap_index,
|
||||
})
|
||||
.collect(),
|
||||
memory_heaps: mem_properties.memory_heaps
|
||||
[..mem_properties.memory_heap_count as usize]
|
||||
.iter()
|
||||
.map(|&memory_heap| gpu_alloc::MemoryHeap {
|
||||
size: memory_heap.size,
|
||||
})
|
||||
.collect(),
|
||||
buffer_device_address: false,
|
||||
};
|
||||
gpu_alloc::GpuAllocator::new(config, properties)
|
||||
};
|
||||
|
||||
let device = super::Device {
|
||||
shared: Arc::new(super::DeviceShared {
|
||||
raw: raw_device,
|
||||
@ -721,6 +753,7 @@ impl crate::Adapter<super::Api> for super::Adapter {
|
||||
private_caps: self.private_caps.clone(),
|
||||
timestamp_period: self.phd_capabilities.properties.limits.timestamp_period,
|
||||
}),
|
||||
mem_allocator: Mutex::new(mem_allocator),
|
||||
valid_ash_memory_types,
|
||||
naga_options,
|
||||
};
|
||||
|
173
wgpu-hal/src/vulkan/command.rs
Normal file
173
wgpu-hal/src/vulkan/command.rs
Normal file
@ -0,0 +1,173 @@
|
||||
use super::Resource;
|
||||
use std::ops::Range; //TEMP
|
||||
|
||||
impl crate::CommandBuffer<super::Api> for super::Encoder {
|
||||
unsafe fn finish(&mut self) {}
|
||||
|
||||
unsafe fn transition_buffers<'a, T>(&mut self, barriers: T)
|
||||
where
|
||||
T: Iterator<Item = crate::BufferBarrier<'a, super::Api>>,
|
||||
{
|
||||
}
|
||||
|
||||
unsafe fn transition_textures<'a, T>(&mut self, barriers: T)
|
||||
where
|
||||
T: Iterator<Item = crate::TextureBarrier<'a, super::Api>>,
|
||||
{
|
||||
}
|
||||
|
||||
unsafe fn fill_buffer(&mut self, buffer: &super::Buffer, range: crate::MemoryRange, value: u8) {
|
||||
}
|
||||
|
||||
unsafe fn copy_buffer_to_buffer<T>(
|
||||
&mut self,
|
||||
src: &super::Buffer,
|
||||
dst: &super::Buffer,
|
||||
regions: T,
|
||||
) {
|
||||
}
|
||||
|
||||
unsafe fn copy_texture_to_texture<T>(
|
||||
&mut self,
|
||||
src: &super::Texture,
|
||||
src_usage: crate::TextureUse,
|
||||
dst: &super::Texture,
|
||||
regions: T,
|
||||
) {
|
||||
}
|
||||
|
||||
unsafe fn copy_buffer_to_texture<T>(
|
||||
&mut self,
|
||||
src: &super::Buffer,
|
||||
dst: &super::Texture,
|
||||
regions: T,
|
||||
) {
|
||||
}
|
||||
|
||||
unsafe fn copy_texture_to_buffer<T>(
|
||||
&mut self,
|
||||
src: &super::Texture,
|
||||
src_usage: crate::TextureUse,
|
||||
dst: &super::Buffer,
|
||||
regions: T,
|
||||
) {
|
||||
}
|
||||
|
||||
unsafe fn begin_query(&mut self, set: &Resource, index: u32) {}
|
||||
unsafe fn end_query(&mut self, set: &Resource, index: u32) {}
|
||||
unsafe fn write_timestamp(&mut self, set: &Resource, index: u32) {}
|
||||
unsafe fn reset_queries(&mut self, set: &Resource, range: Range<u32>) {}
|
||||
unsafe fn copy_query_results(
|
||||
&mut self,
|
||||
set: &Resource,
|
||||
range: Range<u32>,
|
||||
buffer: &super::Buffer,
|
||||
offset: wgt::BufferAddress,
|
||||
) {
|
||||
}
|
||||
|
||||
// render
|
||||
|
||||
unsafe fn begin_render_pass(&mut self, desc: &crate::RenderPassDescriptor<super::Api>) {}
|
||||
unsafe fn end_render_pass(&mut self) {}
|
||||
|
||||
unsafe fn set_bind_group(
|
||||
&mut self,
|
||||
layout: &Resource,
|
||||
index: u32,
|
||||
group: &Resource,
|
||||
dynamic_offsets: &[wgt::DynamicOffset],
|
||||
) {
|
||||
}
|
||||
unsafe fn set_push_constants(
|
||||
&mut self,
|
||||
layout: &Resource,
|
||||
stages: wgt::ShaderStage,
|
||||
offset: u32,
|
||||
data: &[u32],
|
||||
) {
|
||||
}
|
||||
|
||||
unsafe fn insert_debug_marker(&mut self, label: &str) {}
|
||||
unsafe fn begin_debug_marker(&mut self, group_label: &str) {}
|
||||
unsafe fn end_debug_marker(&mut self) {}
|
||||
|
||||
unsafe fn set_render_pipeline(&mut self, pipeline: &Resource) {}
|
||||
|
||||
unsafe fn set_index_buffer<'a>(
|
||||
&mut self,
|
||||
binding: crate::BufferBinding<'a, super::Api>,
|
||||
format: wgt::IndexFormat,
|
||||
) {
|
||||
}
|
||||
unsafe fn set_vertex_buffer<'a>(
|
||||
&mut self,
|
||||
index: u32,
|
||||
binding: crate::BufferBinding<'a, super::Api>,
|
||||
) {
|
||||
}
|
||||
unsafe fn set_viewport(&mut self, rect: &crate::Rect<f32>, depth_range: Range<f32>) {}
|
||||
unsafe fn set_scissor_rect(&mut self, rect: &crate::Rect<u32>) {}
|
||||
unsafe fn set_stencil_reference(&mut self, value: u32) {}
|
||||
unsafe fn set_blend_constants(&mut self, color: &wgt::Color) {}
|
||||
|
||||
unsafe fn draw(
|
||||
&mut self,
|
||||
start_vertex: u32,
|
||||
vertex_count: u32,
|
||||
start_instance: u32,
|
||||
instance_count: u32,
|
||||
) {
|
||||
}
|
||||
unsafe fn draw_indexed(
|
||||
&mut self,
|
||||
start_index: u32,
|
||||
index_count: u32,
|
||||
base_vertex: i32,
|
||||
start_instance: u32,
|
||||
instance_count: u32,
|
||||
) {
|
||||
}
|
||||
unsafe fn draw_indirect(
|
||||
&mut self,
|
||||
buffer: &super::Buffer,
|
||||
offset: wgt::BufferAddress,
|
||||
draw_count: u32,
|
||||
) {
|
||||
}
|
||||
unsafe fn draw_indexed_indirect(
|
||||
&mut self,
|
||||
buffer: &super::Buffer,
|
||||
offset: wgt::BufferAddress,
|
||||
draw_count: u32,
|
||||
) {
|
||||
}
|
||||
unsafe fn draw_indirect_count(
|
||||
&mut self,
|
||||
buffer: &super::Buffer,
|
||||
offset: wgt::BufferAddress,
|
||||
count_buffer: &super::Buffer,
|
||||
count_offset: wgt::BufferAddress,
|
||||
max_count: u32,
|
||||
) {
|
||||
}
|
||||
unsafe fn draw_indexed_indirect_count(
|
||||
&mut self,
|
||||
buffer: &super::Buffer,
|
||||
offset: wgt::BufferAddress,
|
||||
count_buffer: &super::Buffer,
|
||||
count_offset: wgt::BufferAddress,
|
||||
max_count: u32,
|
||||
) {
|
||||
}
|
||||
|
||||
// compute
|
||||
|
||||
unsafe fn begin_compute_pass(&mut self, desc: &crate::ComputePassDescriptor) {}
|
||||
unsafe fn end_compute_pass(&mut self) {}
|
||||
|
||||
unsafe fn set_compute_pipeline(&mut self, pipeline: &Resource) {}
|
||||
|
||||
unsafe fn dispatch(&mut self, count: [u32; 3]) {}
|
||||
unsafe fn dispatch_indirect(&mut self, buffer: &super::Buffer, offset: wgt::BufferAddress) {}
|
||||
}
|
@ -157,6 +157,14 @@ pub fn map_vk_image_usage(usage: vk::ImageUsageFlags) -> crate::TextureUse {
|
||||
bits
|
||||
}
|
||||
|
||||
pub fn map_texture_dimension(dim: wgt::TextureDimension) -> vk::ImageType {
|
||||
match dim {
|
||||
wgt::TextureDimension::D1 => vk::ImageType::TYPE_1D,
|
||||
wgt::TextureDimension::D2 => vk::ImageType::TYPE_2D,
|
||||
wgt::TextureDimension::D3 => vk::ImageType::TYPE_3D,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn map_index_format(index_format: wgt::IndexFormat) -> vk::IndexType {
|
||||
match index_format {
|
||||
wgt::IndexFormat::Uint16 => vk::IndexType::UINT16,
|
||||
@ -189,15 +197,22 @@ pub fn map_origin(origin: wgt::Origin3d, texture_dim: wgt::TextureDimension) ->
|
||||
}
|
||||
}
|
||||
|
||||
pub fn map_extent(extent: wgt::Extent3d, texture_dim: wgt::TextureDimension) -> vk::Extent3D {
|
||||
vk::Extent3D {
|
||||
width: extent.width,
|
||||
height: extent.height,
|
||||
depth: match texture_dim {
|
||||
wgt::TextureDimension::D3 => extent.depth_or_array_layers,
|
||||
_ => 1,
|
||||
pub fn map_extent(
|
||||
extent: wgt::Extent3d,
|
||||
texture_dim: wgt::TextureDimension,
|
||||
) -> (u32, vk::Extent3D) {
|
||||
let (depth, array_layers) = match texture_dim {
|
||||
wgt::TextureDimension::D3 => (extent.depth_or_array_layers, 1),
|
||||
_ => (1, extent.depth_or_array_layers),
|
||||
};
|
||||
(
|
||||
array_layers,
|
||||
vk::Extent3D {
|
||||
width: extent.width,
|
||||
height: extent.height,
|
||||
depth,
|
||||
},
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
pub fn map_attachment_ops(
|
||||
@ -262,3 +277,29 @@ pub fn map_vk_composite_alpha(flags: vk::CompositeAlphaFlagsKHR) -> Vec<crate::C
|
||||
}
|
||||
modes
|
||||
}
|
||||
|
||||
pub fn map_buffer_usage(usage: crate::BufferUse) -> vk::BufferUsageFlags {
|
||||
let mut flags = vk::BufferUsageFlags::empty();
|
||||
if usage.contains(crate::BufferUse::COPY_SRC) {
|
||||
flags |= vk::BufferUsageFlags::TRANSFER_SRC;
|
||||
}
|
||||
if usage.contains(crate::BufferUse::COPY_DST) {
|
||||
flags |= vk::BufferUsageFlags::TRANSFER_DST;
|
||||
}
|
||||
if usage.contains(crate::BufferUse::UNIFORM) {
|
||||
flags |= vk::BufferUsageFlags::UNIFORM_BUFFER;
|
||||
}
|
||||
if usage.intersects(crate::BufferUse::STORAGE_LOAD | crate::BufferUse::STORAGE_STORE) {
|
||||
flags |= vk::BufferUsageFlags::STORAGE_BUFFER;
|
||||
}
|
||||
if usage.contains(crate::BufferUse::INDEX) {
|
||||
flags |= vk::BufferUsageFlags::INDEX_BUFFER;
|
||||
}
|
||||
if usage.contains(crate::BufferUse::VERTEX) {
|
||||
flags |= vk::BufferUsageFlags::VERTEX_BUFFER;
|
||||
}
|
||||
if usage.contains(crate::BufferUse::INDIRECT) {
|
||||
flags |= vk::BufferUsageFlags::INDIRECT_BUFFER;
|
||||
}
|
||||
flags
|
||||
}
|
||||
|
@ -2,7 +2,128 @@ use super::conv;
|
||||
|
||||
use ash::{extensions::khr, version::DeviceV1_0, vk};
|
||||
|
||||
use std::sync::Arc;
|
||||
use std::{ptr::NonNull, sync::Arc};
|
||||
|
||||
impl gpu_alloc::MemoryDevice<vk::DeviceMemory> for super::DeviceShared {
|
||||
unsafe fn allocate_memory(
|
||||
&self,
|
||||
size: u64,
|
||||
memory_type: u32,
|
||||
flags: gpu_alloc::AllocationFlags,
|
||||
) -> Result<vk::DeviceMemory, gpu_alloc::OutOfMemory> {
|
||||
let mut info = vk::MemoryAllocateInfo::builder()
|
||||
.allocation_size(size)
|
||||
.memory_type_index(memory_type);
|
||||
|
||||
let mut info_flags;
|
||||
|
||||
if flags.contains(gpu_alloc::AllocationFlags::DEVICE_ADDRESS) {
|
||||
info_flags = vk::MemoryAllocateFlagsInfo::builder()
|
||||
.flags(vk::MemoryAllocateFlags::DEVICE_ADDRESS);
|
||||
info = info.push_next(&mut info_flags);
|
||||
}
|
||||
|
||||
match self.raw.allocate_memory(&info, None) {
|
||||
Ok(memory) => Ok(memory),
|
||||
Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => {
|
||||
Err(gpu_alloc::OutOfMemory::OutOfDeviceMemory)
|
||||
}
|
||||
Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => {
|
||||
Err(gpu_alloc::OutOfMemory::OutOfHostMemory)
|
||||
}
|
||||
Err(vk::Result::ERROR_TOO_MANY_OBJECTS) => panic!("Too many objects"),
|
||||
Err(err) => panic!("Unexpected Vulkan error: `{}`", err),
|
||||
}
|
||||
}
|
||||
|
||||
unsafe fn deallocate_memory(&self, memory: vk::DeviceMemory) {
|
||||
self.raw.free_memory(memory, None);
|
||||
}
|
||||
|
||||
unsafe fn map_memory(
|
||||
&self,
|
||||
memory: &mut vk::DeviceMemory,
|
||||
offset: u64,
|
||||
size: u64,
|
||||
) -> Result<NonNull<u8>, gpu_alloc::DeviceMapError> {
|
||||
match self
|
||||
.raw
|
||||
.map_memory(*memory, offset, size, vk::MemoryMapFlags::empty())
|
||||
{
|
||||
Ok(ptr) => {
|
||||
Ok(NonNull::new(ptr as *mut u8)
|
||||
.expect("Pointer to memory mapping must not be null"))
|
||||
}
|
||||
Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => {
|
||||
Err(gpu_alloc::DeviceMapError::OutOfDeviceMemory)
|
||||
}
|
||||
Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => {
|
||||
Err(gpu_alloc::DeviceMapError::OutOfHostMemory)
|
||||
}
|
||||
Err(vk::Result::ERROR_MEMORY_MAP_FAILED) => Err(gpu_alloc::DeviceMapError::MapFailed),
|
||||
Err(err) => panic!("Unexpected Vulkan error: `{}`", err),
|
||||
}
|
||||
}
|
||||
|
||||
unsafe fn unmap_memory(&self, memory: &mut vk::DeviceMemory) {
|
||||
self.raw.unmap_memory(*memory);
|
||||
}
|
||||
|
||||
unsafe fn invalidate_memory_ranges(
|
||||
&self,
|
||||
ranges: &[gpu_alloc::MappedMemoryRange<'_, vk::DeviceMemory>],
|
||||
) -> Result<(), gpu_alloc::OutOfMemory> {
|
||||
let vk_ranges = ranges.iter().map(|range| {
|
||||
vk::MappedMemoryRange::builder()
|
||||
.memory(*range.memory)
|
||||
.offset(range.offset)
|
||||
.size(range.size)
|
||||
.build()
|
||||
});
|
||||
let result = inplace_it::inplace_or_alloc_from_iter(vk_ranges, |array| {
|
||||
self.raw.invalidate_mapped_memory_ranges(array)
|
||||
});
|
||||
result.map_err(|err| match err {
|
||||
vk::Result::ERROR_OUT_OF_DEVICE_MEMORY => gpu_alloc::OutOfMemory::OutOfDeviceMemory,
|
||||
vk::Result::ERROR_OUT_OF_HOST_MEMORY => gpu_alloc::OutOfMemory::OutOfHostMemory,
|
||||
err => panic!("Unexpected Vulkan error: `{}`", err),
|
||||
})
|
||||
}
|
||||
|
||||
unsafe fn flush_memory_ranges(
|
||||
&self,
|
||||
ranges: &[gpu_alloc::MappedMemoryRange<'_, vk::DeviceMemory>],
|
||||
) -> Result<(), gpu_alloc::OutOfMemory> {
|
||||
let vk_ranges = ranges.iter().map(|range| {
|
||||
vk::MappedMemoryRange::builder()
|
||||
.memory(*range.memory)
|
||||
.offset(range.offset)
|
||||
.size(range.size)
|
||||
.build()
|
||||
});
|
||||
let result = inplace_it::inplace_or_alloc_from_iter(vk_ranges, |array| {
|
||||
self.raw.flush_mapped_memory_ranges(array)
|
||||
});
|
||||
result.map_err(|err| match err {
|
||||
vk::Result::ERROR_OUT_OF_DEVICE_MEMORY => gpu_alloc::OutOfMemory::OutOfDeviceMemory,
|
||||
vk::Result::ERROR_OUT_OF_HOST_MEMORY => gpu_alloc::OutOfMemory::OutOfHostMemory,
|
||||
err => panic!("Unexpected Vulkan error: `{}`", err),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl From<gpu_alloc::AllocationError> for crate::DeviceError {
|
||||
fn from(error: gpu_alloc::AllocationError) -> Self {
|
||||
use gpu_alloc::AllocationError as Ae;
|
||||
match error {
|
||||
Ae::OutOfDeviceMemory | Ae::OutOfHostMemory => Self::OutOfMemory,
|
||||
_ => {
|
||||
log::error!("memory allocation: {:?}", error);
|
||||
Self::Lost
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl super::Device {
|
||||
pub(super) unsafe fn create_swapchain(
|
||||
@ -84,3 +205,220 @@ impl super::Device {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
use super::{DeviceResult, Encoder, Resource}; //temporary
|
||||
|
||||
impl crate::Device<super::Api> for super::Device {
|
||||
unsafe fn create_buffer(
|
||||
&self,
|
||||
desc: &crate::BufferDescriptor,
|
||||
) -> Result<super::Buffer, crate::DeviceError> {
|
||||
let vk_info = vk::BufferCreateInfo::builder()
|
||||
.size(desc.size)
|
||||
.usage(conv::map_buffer_usage(desc.usage))
|
||||
.sharing_mode(vk::SharingMode::EXCLUSIVE);
|
||||
|
||||
let raw = self.shared.raw.create_buffer(&vk_info, None)?;
|
||||
let req = self.shared.raw.get_buffer_memory_requirements(raw);
|
||||
|
||||
let mut alloc_usage = if desc
|
||||
.usage
|
||||
.intersects(crate::BufferUse::MAP_READ | crate::BufferUse::MAP_WRITE)
|
||||
{
|
||||
let mut flags = gpu_alloc::UsageFlags::HOST_ACCESS;
|
||||
flags.set(
|
||||
gpu_alloc::UsageFlags::DOWNLOAD,
|
||||
desc.usage.contains(crate::BufferUse::MAP_READ),
|
||||
);
|
||||
flags.set(
|
||||
gpu_alloc::UsageFlags::UPLOAD,
|
||||
desc.usage.contains(crate::BufferUse::MAP_WRITE),
|
||||
);
|
||||
flags
|
||||
} else {
|
||||
gpu_alloc::UsageFlags::FAST_DEVICE_ACCESS
|
||||
};
|
||||
alloc_usage.set(
|
||||
gpu_alloc::UsageFlags::TRANSIENT,
|
||||
desc.memory_flags.contains(crate::MemoryFlag::TRANSIENT),
|
||||
);
|
||||
|
||||
let block = self.mem_allocator.lock().alloc(
|
||||
&*self.shared,
|
||||
gpu_alloc::Request {
|
||||
size: req.size,
|
||||
align_mask: req.alignment - 1,
|
||||
usage: alloc_usage,
|
||||
memory_types: req.memory_type_bits & self.valid_ash_memory_types,
|
||||
},
|
||||
)?;
|
||||
|
||||
self.shared
|
||||
.raw
|
||||
.bind_buffer_memory(raw, *block.memory(), block.offset())?;
|
||||
|
||||
Ok(super::Buffer { raw, block })
|
||||
}
|
||||
unsafe fn destroy_buffer(&self, buffer: super::Buffer) {
|
||||
self.shared.raw.destroy_buffer(buffer.raw, None);
|
||||
self.mem_allocator
|
||||
.lock()
|
||||
.dealloc(&*self.shared, buffer.block);
|
||||
}
|
||||
|
||||
unsafe fn map_buffer(
|
||||
&self,
|
||||
buffer: &super::Buffer,
|
||||
range: crate::MemoryRange,
|
||||
) -> DeviceResult<crate::BufferMapping> {
|
||||
Err(crate::DeviceError::Lost)
|
||||
}
|
||||
unsafe fn unmap_buffer(&self, buffer: &super::Buffer) -> DeviceResult<()> {
|
||||
Ok(())
|
||||
}
|
||||
unsafe fn flush_mapped_ranges<I>(&self, buffer: &super::Buffer, ranges: I) {}
|
||||
unsafe fn invalidate_mapped_ranges<I>(&self, buffer: &super::Buffer, ranges: I) {}
|
||||
|
||||
unsafe fn create_texture(
|
||||
&self,
|
||||
desc: &crate::TextureDescriptor,
|
||||
) -> Result<super::Texture, crate::DeviceError> {
|
||||
let (array_layer_count, vk_extent) = conv::map_extent(desc.size, desc.dimension);
|
||||
let mut flags = vk::ImageCreateFlags::empty();
|
||||
if desc.dimension == wgt::TextureDimension::D2 && desc.size.depth_or_array_layers % 6 == 0 {
|
||||
flags |= vk::ImageCreateFlags::CUBE_COMPATIBLE;
|
||||
}
|
||||
|
||||
let vk_info = vk::ImageCreateInfo::builder()
|
||||
.flags(flags)
|
||||
.image_type(conv::map_texture_dimension(desc.dimension))
|
||||
.format(self.shared.private_caps.map_texture_format(desc.format))
|
||||
.extent(vk_extent)
|
||||
.mip_levels(desc.mip_level_count)
|
||||
.array_layers(array_layer_count)
|
||||
.samples(vk::SampleCountFlags::from_raw(desc.sample_count))
|
||||
.tiling(vk::ImageTiling::OPTIMAL)
|
||||
.usage(conv::map_texture_usage(desc.usage))
|
||||
.sharing_mode(vk::SharingMode::EXCLUSIVE)
|
||||
.initial_layout(vk::ImageLayout::UNDEFINED);
|
||||
|
||||
let raw = self.shared.raw.create_image(&vk_info, None)?;
|
||||
let req = self.shared.raw.get_image_memory_requirements(raw);
|
||||
|
||||
let block = self.mem_allocator.lock().alloc(
|
||||
&*self.shared,
|
||||
gpu_alloc::Request {
|
||||
size: req.size,
|
||||
align_mask: req.alignment - 1,
|
||||
usage: gpu_alloc::UsageFlags::FAST_DEVICE_ACCESS,
|
||||
memory_types: req.memory_type_bits & self.valid_ash_memory_types,
|
||||
},
|
||||
)?;
|
||||
|
||||
self.shared
|
||||
.raw
|
||||
.bind_image_memory(raw, *block.memory(), block.offset())?;
|
||||
|
||||
Ok(super::Texture {
|
||||
raw,
|
||||
block: Some(block),
|
||||
})
|
||||
}
|
||||
unsafe fn destroy_texture(&self, texture: super::Texture) {
|
||||
self.shared.raw.destroy_image(texture.raw, None);
|
||||
self.mem_allocator
|
||||
.lock()
|
||||
.dealloc(&*self.shared, texture.block.unwrap());
|
||||
}
|
||||
|
||||
unsafe fn create_texture_view(
|
||||
&self,
|
||||
texture: &super::Texture,
|
||||
desc: &crate::TextureViewDescriptor,
|
||||
) -> DeviceResult<Resource> {
|
||||
Ok(Resource)
|
||||
}
|
||||
unsafe fn destroy_texture_view(&self, view: Resource) {}
|
||||
unsafe fn create_sampler(&self, desc: &crate::SamplerDescriptor) -> DeviceResult<Resource> {
|
||||
Ok(Resource)
|
||||
}
|
||||
unsafe fn destroy_sampler(&self, sampler: Resource) {}
|
||||
|
||||
unsafe fn create_command_buffer(
|
||||
&self,
|
||||
desc: &crate::CommandBufferDescriptor,
|
||||
) -> DeviceResult<Encoder> {
|
||||
Ok(Encoder)
|
||||
}
|
||||
unsafe fn destroy_command_buffer(&self, cmd_buf: Encoder) {}
|
||||
|
||||
unsafe fn create_bind_group_layout(
|
||||
&self,
|
||||
desc: &crate::BindGroupLayoutDescriptor,
|
||||
) -> DeviceResult<Resource> {
|
||||
Ok(Resource)
|
||||
}
|
||||
unsafe fn destroy_bind_group_layout(&self, bg_layout: Resource) {}
|
||||
unsafe fn create_pipeline_layout(
|
||||
&self,
|
||||
desc: &crate::PipelineLayoutDescriptor<super::Api>,
|
||||
) -> DeviceResult<Resource> {
|
||||
Ok(Resource)
|
||||
}
|
||||
unsafe fn destroy_pipeline_layout(&self, pipeline_layout: Resource) {}
|
||||
unsafe fn create_bind_group(
|
||||
&self,
|
||||
desc: &crate::BindGroupDescriptor<super::Api>,
|
||||
) -> DeviceResult<Resource> {
|
||||
Ok(Resource)
|
||||
}
|
||||
unsafe fn destroy_bind_group(&self, group: Resource) {}
|
||||
|
||||
unsafe fn create_shader_module(
|
||||
&self,
|
||||
desc: &crate::ShaderModuleDescriptor,
|
||||
shader: crate::NagaShader,
|
||||
) -> Result<Resource, crate::ShaderError> {
|
||||
Ok(Resource)
|
||||
}
|
||||
unsafe fn destroy_shader_module(&self, module: Resource) {}
|
||||
unsafe fn create_render_pipeline(
|
||||
&self,
|
||||
desc: &crate::RenderPipelineDescriptor<super::Api>,
|
||||
) -> Result<Resource, crate::PipelineError> {
|
||||
Ok(Resource)
|
||||
}
|
||||
unsafe fn destroy_render_pipeline(&self, pipeline: Resource) {}
|
||||
unsafe fn create_compute_pipeline(
|
||||
&self,
|
||||
desc: &crate::ComputePipelineDescriptor<super::Api>,
|
||||
) -> Result<Resource, crate::PipelineError> {
|
||||
Ok(Resource)
|
||||
}
|
||||
unsafe fn destroy_compute_pipeline(&self, pipeline: Resource) {}
|
||||
|
||||
unsafe fn create_query_set(&self, desc: &wgt::QuerySetDescriptor) -> DeviceResult<Resource> {
|
||||
Ok(Resource)
|
||||
}
|
||||
unsafe fn destroy_query_set(&self, set: Resource) {}
|
||||
unsafe fn create_fence(&self) -> DeviceResult<Resource> {
|
||||
Ok(Resource)
|
||||
}
|
||||
unsafe fn destroy_fence(&self, fence: Resource) {}
|
||||
unsafe fn get_fence_value(&self, fence: &Resource) -> DeviceResult<crate::FenceValue> {
|
||||
Ok(0)
|
||||
}
|
||||
unsafe fn wait(
|
||||
&self,
|
||||
fence: &Resource,
|
||||
value: crate::FenceValue,
|
||||
timeout_ms: u32,
|
||||
) -> DeviceResult<bool> {
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
unsafe fn start_capture(&self) -> bool {
|
||||
false
|
||||
}
|
||||
unsafe fn stop_capture(&self) {}
|
||||
}
|
||||
|
@ -194,7 +194,7 @@ impl super::Instance {
|
||||
|
||||
impl crate::Instance<super::Api> for super::Instance {
|
||||
unsafe fn init(desc: &crate::InstanceDescriptor) -> Result<Self, crate::InstanceError> {
|
||||
let entry = match unsafe { ash::Entry::new() } {
|
||||
let entry = match ash::Entry::new() {
|
||||
Ok(entry) => entry,
|
||||
Err(err) => {
|
||||
log::info!("Missing Vulkan entry points: {:?}", err);
|
||||
@ -282,10 +282,7 @@ impl crate::Instance<super::Api> for super::Instance {
|
||||
extensions.retain(|&ext| {
|
||||
if instance_extensions
|
||||
.iter()
|
||||
.find(|inst_ext| unsafe {
|
||||
CStr::from_ptr(inst_ext.extension_name.as_ptr()) == ext
|
||||
})
|
||||
.is_some()
|
||||
.any(|inst_ext| CStr::from_ptr(inst_ext.extension_name.as_ptr()) == ext)
|
||||
{
|
||||
true
|
||||
} else {
|
||||
@ -314,10 +311,7 @@ impl crate::Instance<super::Api> for super::Instance {
|
||||
layers.retain(|&layer| {
|
||||
if instance_layers
|
||||
.iter()
|
||||
.find(|inst_layer| unsafe {
|
||||
CStr::from_ptr(inst_layer.layer_name.as_ptr()) == layer
|
||||
})
|
||||
.is_some()
|
||||
.any(|inst_layer| CStr::from_ptr(inst_layer.layer_name.as_ptr()) == layer)
|
||||
{
|
||||
true
|
||||
} else {
|
||||
@ -430,7 +424,7 @@ impl crate::Instance<super::Api> for super::Instance {
|
||||
}
|
||||
|
||||
unsafe fn enumerate_adapters(&self) -> Vec<crate::ExposedAdapter<super::Api>> {
|
||||
let raw_devices = match unsafe { self.shared.raw.enumerate_physical_devices() } {
|
||||
let raw_devices = match self.shared.raw.enumerate_physical_devices() {
|
||||
Ok(devices) => devices,
|
||||
Err(err) => {
|
||||
log::error!("enumerate_adapters: {}", err);
|
||||
@ -517,9 +511,7 @@ impl crate::Surface<super::Api> for super::Surface {
|
||||
index,
|
||||
texture: super::Texture {
|
||||
raw: sc.images[index as usize],
|
||||
ty: vk::ImageType::TYPE_2D,
|
||||
flags: vk::ImageCreateFlags::empty(),
|
||||
extent: sc.extent,
|
||||
block: None,
|
||||
},
|
||||
};
|
||||
Ok(Some(crate::AcquiredSurfaceTexture {
|
||||
|
@ -1,13 +1,15 @@
|
||||
#![allow(unused_variables)]
|
||||
|
||||
mod adapter;
|
||||
mod command;
|
||||
mod conv;
|
||||
mod device;
|
||||
mod instance;
|
||||
|
||||
use ash::{extensions::khr, vk};
|
||||
use std::{borrow::Borrow, ffi::CStr, sync::Arc};
|
||||
|
||||
use std::{borrow::Borrow, ffi::CStr, ops::Range, sync::Arc};
|
||||
use ash::{extensions::khr, vk};
|
||||
use parking_lot::Mutex;
|
||||
|
||||
const MILLIS_TO_NANOS: u64 = 1_000_000;
|
||||
|
||||
@ -28,7 +30,7 @@ impl crate::Api for Api {
|
||||
|
||||
type CommandBuffer = Encoder;
|
||||
|
||||
type Buffer = Resource;
|
||||
type Buffer = Buffer;
|
||||
type Texture = Texture;
|
||||
type SurfaceTexture = SurfaceTexture;
|
||||
type TextureView = Resource;
|
||||
@ -156,6 +158,7 @@ struct DeviceShared {
|
||||
|
||||
pub struct Device {
|
||||
shared: Arc<DeviceShared>,
|
||||
mem_allocator: Mutex<gpu_alloc::GpuAllocator<vk::DeviceMemory>>,
|
||||
valid_ash_memory_types: u32,
|
||||
naga_options: naga::back::spv::Options,
|
||||
}
|
||||
@ -166,12 +169,16 @@ pub struct Queue {
|
||||
//device: Arc<DeviceShared>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Buffer {
|
||||
raw: vk::Buffer,
|
||||
block: gpu_alloc::MemoryBlock<vk::DeviceMemory>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Texture {
|
||||
raw: vk::Image,
|
||||
ty: vk::ImageType,
|
||||
flags: vk::ImageCreateFlags,
|
||||
extent: vk::Extent3D,
|
||||
block: Option<gpu_alloc::MemoryBlock<vk::DeviceMemory>>,
|
||||
}
|
||||
|
||||
impl crate::Queue<Api> for Queue {
|
||||
@ -191,275 +198,6 @@ impl crate::Queue<Api> for Queue {
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Context;
|
||||
impl crate::Device<Api> for Context {
|
||||
unsafe fn create_buffer(&self, desc: &crate::BufferDescriptor) -> DeviceResult<Resource> {
|
||||
Ok(Resource)
|
||||
}
|
||||
unsafe fn destroy_buffer(&self, buffer: Resource) {}
|
||||
unsafe fn map_buffer(
|
||||
&self,
|
||||
buffer: &Resource,
|
||||
range: crate::MemoryRange,
|
||||
) -> DeviceResult<crate::BufferMapping> {
|
||||
Err(crate::DeviceError::Lost)
|
||||
}
|
||||
unsafe fn unmap_buffer(&self, buffer: &Resource) -> DeviceResult<()> {
|
||||
Ok(())
|
||||
}
|
||||
unsafe fn flush_mapped_ranges<I>(&self, buffer: &Resource, ranges: I) {}
|
||||
unsafe fn invalidate_mapped_ranges<I>(&self, buffer: &Resource, ranges: I) {}
|
||||
|
||||
unsafe fn create_texture(&self, desc: &crate::TextureDescriptor) -> DeviceResult<Texture> {
|
||||
unimplemented!()
|
||||
}
|
||||
unsafe fn destroy_texture(&self, texture: Texture) {}
|
||||
unsafe fn create_texture_view(
|
||||
&self,
|
||||
texture: &Texture,
|
||||
desc: &crate::TextureViewDescriptor,
|
||||
) -> DeviceResult<Resource> {
|
||||
Ok(Resource)
|
||||
}
|
||||
unsafe fn destroy_texture_view(&self, view: Resource) {}
|
||||
unsafe fn create_sampler(&self, desc: &crate::SamplerDescriptor) -> DeviceResult<Resource> {
|
||||
Ok(Resource)
|
||||
}
|
||||
unsafe fn destroy_sampler(&self, sampler: Resource) {}
|
||||
|
||||
unsafe fn create_command_buffer(
|
||||
&self,
|
||||
desc: &crate::CommandBufferDescriptor,
|
||||
) -> DeviceResult<Encoder> {
|
||||
Ok(Encoder)
|
||||
}
|
||||
unsafe fn destroy_command_buffer(&self, cmd_buf: Encoder) {}
|
||||
|
||||
unsafe fn create_bind_group_layout(
|
||||
&self,
|
||||
desc: &crate::BindGroupLayoutDescriptor,
|
||||
) -> DeviceResult<Resource> {
|
||||
Ok(Resource)
|
||||
}
|
||||
unsafe fn destroy_bind_group_layout(&self, bg_layout: Resource) {}
|
||||
unsafe fn create_pipeline_layout(
|
||||
&self,
|
||||
desc: &crate::PipelineLayoutDescriptor<Api>,
|
||||
) -> DeviceResult<Resource> {
|
||||
Ok(Resource)
|
||||
}
|
||||
unsafe fn destroy_pipeline_layout(&self, pipeline_layout: Resource) {}
|
||||
unsafe fn create_bind_group(
|
||||
&self,
|
||||
desc: &crate::BindGroupDescriptor<Api>,
|
||||
) -> DeviceResult<Resource> {
|
||||
Ok(Resource)
|
||||
}
|
||||
unsafe fn destroy_bind_group(&self, group: Resource) {}
|
||||
|
||||
unsafe fn create_shader_module(
|
||||
&self,
|
||||
desc: &crate::ShaderModuleDescriptor,
|
||||
shader: crate::NagaShader,
|
||||
) -> Result<Resource, crate::ShaderError> {
|
||||
Ok(Resource)
|
||||
}
|
||||
unsafe fn destroy_shader_module(&self, module: Resource) {}
|
||||
unsafe fn create_render_pipeline(
|
||||
&self,
|
||||
desc: &crate::RenderPipelineDescriptor<Api>,
|
||||
) -> Result<Resource, crate::PipelineError> {
|
||||
Ok(Resource)
|
||||
}
|
||||
unsafe fn destroy_render_pipeline(&self, pipeline: Resource) {}
|
||||
unsafe fn create_compute_pipeline(
|
||||
&self,
|
||||
desc: &crate::ComputePipelineDescriptor<Api>,
|
||||
) -> Result<Resource, crate::PipelineError> {
|
||||
Ok(Resource)
|
||||
}
|
||||
unsafe fn destroy_compute_pipeline(&self, pipeline: Resource) {}
|
||||
|
||||
unsafe fn create_query_set(&self, desc: &wgt::QuerySetDescriptor) -> DeviceResult<Resource> {
|
||||
Ok(Resource)
|
||||
}
|
||||
unsafe fn destroy_query_set(&self, set: Resource) {}
|
||||
unsafe fn create_fence(&self) -> DeviceResult<Resource> {
|
||||
Ok(Resource)
|
||||
}
|
||||
unsafe fn destroy_fence(&self, fence: Resource) {}
|
||||
unsafe fn get_fence_value(&self, fence: &Resource) -> DeviceResult<crate::FenceValue> {
|
||||
Ok(0)
|
||||
}
|
||||
unsafe fn wait(
|
||||
&self,
|
||||
fence: &Resource,
|
||||
value: crate::FenceValue,
|
||||
timeout_ms: u32,
|
||||
) -> DeviceResult<bool> {
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
unsafe fn start_capture(&self) -> bool {
|
||||
false
|
||||
}
|
||||
unsafe fn stop_capture(&self) {}
|
||||
}
|
||||
|
||||
impl crate::CommandBuffer<Api> for Encoder {
|
||||
unsafe fn finish(&mut self) {}
|
||||
|
||||
unsafe fn transition_buffers<'a, T>(&mut self, barriers: T)
|
||||
where
|
||||
T: Iterator<Item = crate::BufferBarrier<'a, Api>>,
|
||||
{
|
||||
}
|
||||
|
||||
unsafe fn transition_textures<'a, T>(&mut self, barriers: T)
|
||||
where
|
||||
T: Iterator<Item = crate::TextureBarrier<'a, Api>>,
|
||||
{
|
||||
}
|
||||
|
||||
unsafe fn fill_buffer(&mut self, buffer: &Resource, range: crate::MemoryRange, value: u8) {}
|
||||
|
||||
unsafe fn copy_buffer_to_buffer<T>(&mut self, src: &Resource, dst: &Resource, regions: T) {}
|
||||
|
||||
unsafe fn copy_texture_to_texture<T>(
|
||||
&mut self,
|
||||
src: &Texture,
|
||||
src_usage: crate::TextureUse,
|
||||
dst: &Texture,
|
||||
regions: T,
|
||||
) {
|
||||
}
|
||||
|
||||
unsafe fn copy_buffer_to_texture<T>(&mut self, src: &Resource, dst: &Texture, regions: T) {}
|
||||
|
||||
unsafe fn copy_texture_to_buffer<T>(
|
||||
&mut self,
|
||||
src: &Texture,
|
||||
src_usage: crate::TextureUse,
|
||||
dst: &Resource,
|
||||
regions: T,
|
||||
) {
|
||||
}
|
||||
|
||||
unsafe fn begin_query(&mut self, set: &Resource, index: u32) {}
|
||||
unsafe fn end_query(&mut self, set: &Resource, index: u32) {}
|
||||
unsafe fn write_timestamp(&mut self, set: &Resource, index: u32) {}
|
||||
unsafe fn reset_queries(&mut self, set: &Resource, range: Range<u32>) {}
|
||||
unsafe fn copy_query_results(
|
||||
&mut self,
|
||||
set: &Resource,
|
||||
range: Range<u32>,
|
||||
buffer: &Resource,
|
||||
offset: wgt::BufferAddress,
|
||||
) {
|
||||
}
|
||||
|
||||
// render
|
||||
|
||||
unsafe fn begin_render_pass(&mut self, desc: &crate::RenderPassDescriptor<Api>) {}
|
||||
unsafe fn end_render_pass(&mut self) {}
|
||||
|
||||
unsafe fn set_bind_group(
|
||||
&mut self,
|
||||
layout: &Resource,
|
||||
index: u32,
|
||||
group: &Resource,
|
||||
dynamic_offsets: &[wgt::DynamicOffset],
|
||||
) {
|
||||
}
|
||||
unsafe fn set_push_constants(
|
||||
&mut self,
|
||||
layout: &Resource,
|
||||
stages: wgt::ShaderStage,
|
||||
offset: u32,
|
||||
data: &[u32],
|
||||
) {
|
||||
}
|
||||
|
||||
unsafe fn insert_debug_marker(&mut self, label: &str) {}
|
||||
unsafe fn begin_debug_marker(&mut self, group_label: &str) {}
|
||||
unsafe fn end_debug_marker(&mut self) {}
|
||||
|
||||
unsafe fn set_render_pipeline(&mut self, pipeline: &Resource) {}
|
||||
|
||||
unsafe fn set_index_buffer<'a>(
|
||||
&mut self,
|
||||
binding: crate::BufferBinding<'a, Api>,
|
||||
format: wgt::IndexFormat,
|
||||
) {
|
||||
}
|
||||
unsafe fn set_vertex_buffer<'a>(&mut self, index: u32, binding: crate::BufferBinding<'a, Api>) {
|
||||
}
|
||||
unsafe fn set_viewport(&mut self, rect: &crate::Rect<f32>, depth_range: Range<f32>) {}
|
||||
unsafe fn set_scissor_rect(&mut self, rect: &crate::Rect<u32>) {}
|
||||
unsafe fn set_stencil_reference(&mut self, value: u32) {}
|
||||
unsafe fn set_blend_constants(&mut self, color: &wgt::Color) {}
|
||||
|
||||
unsafe fn draw(
|
||||
&mut self,
|
||||
start_vertex: u32,
|
||||
vertex_count: u32,
|
||||
start_instance: u32,
|
||||
instance_count: u32,
|
||||
) {
|
||||
}
|
||||
unsafe fn draw_indexed(
|
||||
&mut self,
|
||||
start_index: u32,
|
||||
index_count: u32,
|
||||
base_vertex: i32,
|
||||
start_instance: u32,
|
||||
instance_count: u32,
|
||||
) {
|
||||
}
|
||||
unsafe fn draw_indirect(
|
||||
&mut self,
|
||||
buffer: &Resource,
|
||||
offset: wgt::BufferAddress,
|
||||
draw_count: u32,
|
||||
) {
|
||||
}
|
||||
unsafe fn draw_indexed_indirect(
|
||||
&mut self,
|
||||
buffer: &Resource,
|
||||
offset: wgt::BufferAddress,
|
||||
draw_count: u32,
|
||||
) {
|
||||
}
|
||||
unsafe fn draw_indirect_count(
|
||||
&mut self,
|
||||
buffer: &Resource,
|
||||
offset: wgt::BufferAddress,
|
||||
count_buffer: &Resource,
|
||||
count_offset: wgt::BufferAddress,
|
||||
max_count: u32,
|
||||
) {
|
||||
}
|
||||
unsafe fn draw_indexed_indirect_count(
|
||||
&mut self,
|
||||
buffer: &Resource,
|
||||
offset: wgt::BufferAddress,
|
||||
count_buffer: &Resource,
|
||||
count_offset: wgt::BufferAddress,
|
||||
max_count: u32,
|
||||
) {
|
||||
}
|
||||
|
||||
// compute
|
||||
|
||||
unsafe fn begin_compute_pass(&mut self, desc: &crate::ComputePassDescriptor) {}
|
||||
unsafe fn end_compute_pass(&mut self) {}
|
||||
|
||||
unsafe fn set_compute_pipeline(&mut self, pipeline: &Resource) {}
|
||||
|
||||
unsafe fn dispatch(&mut self, count: [u32; 3]) {}
|
||||
unsafe fn dispatch_indirect(&mut self, buffer: &Resource, offset: wgt::BufferAddress) {}
|
||||
}
|
||||
|
||||
impl From<vk::Result> for crate::DeviceError {
|
||||
fn from(result: vk::Result) -> Self {
|
||||
match result {
|
||||
|
Loading…
Reference in New Issue
Block a user