mirror of
https://github.com/vulkano-rs/vulkano.git
synced 2024-11-22 06:45:23 +00:00
Enable and fix pointer cast lints (#2507)
* Enable and fix pointer cast lints * Fix for #2508 * Use coercion instead of `ptr::from_ref` * Search-and-replace fail * Replace `addr_of!` + `cast`
This commit is contained in:
parent
18c88cd854
commit
fe74e0aac8
10
Cargo.toml
10
Cargo.toml
@ -64,10 +64,14 @@ rand = "0.8"
|
|||||||
ron = "0.8"
|
ron = "0.8"
|
||||||
|
|
||||||
[workspace.lints]
|
[workspace.lints]
|
||||||
rust.missing_docs = "allow" # TODO: warn eventually
|
rust.missing_docs = "allow" # TODO: warn eventually
|
||||||
rust.rust_2018_idioms = "warn"
|
rust.rust_2018_idioms = "warn"
|
||||||
rust.rust_2024_compatibility = "allow" # TODO: warn eventually
|
rust.rust_2024_compatibility = "allow" # TODO: warn eventually
|
||||||
clippy.missing_safety_doc = "allow" # TODO: warn eventually
|
clippy.borrow_as_ptr = "warn"
|
||||||
|
clippy.missing_safety_doc = "allow" # TODO: warn eventually
|
||||||
|
clippy.ptr_as_ptr = "warn"
|
||||||
|
clippy.ptr_cast_constness = "warn"
|
||||||
|
# clippy.ref_as_ptr = "warn" # TODO: enable once it's stable
|
||||||
clippy.trivially_copy_pass_by_ref = "warn"
|
clippy.trivially_copy_pass_by_ref = "warn"
|
||||||
# These lints are a bit too pedantic, so they're disabled here.
|
# These lints are a bit too pedantic, so they're disabled here.
|
||||||
# They can be removed if they no longer happen in the future.
|
# They can be removed if they no longer happen in the future.
|
||||||
|
@ -631,7 +631,7 @@ fn features_ffi_output(members: &[FeaturesFfiMember]) -> TokenStream {
|
|||||||
self.#name = Some(Default::default());
|
self.#name = Some(Default::default());
|
||||||
let member = self.#name.as_mut().unwrap();
|
let member = self.#name.as_mut().unwrap();
|
||||||
member.p_next = head.p_next;
|
member.p_next = head.p_next;
|
||||||
head.p_next = member as *mut _ as _;
|
head.p_next = <*mut _>::cast(member);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -336,7 +336,7 @@ fn properties_ffi_output(members: &[PropertiesFfiMember]) -> TokenStream {
|
|||||||
self.#name = Some(Default::default());
|
self.#name = Some(Default::default());
|
||||||
let member = self.#name.as_mut().unwrap();
|
let member = self.#name.as_mut().unwrap();
|
||||||
member.p_next = head.p_next;
|
member.p_next = head.p_next;
|
||||||
head.p_next = member as *mut _ as _;
|
head.p_next = <*mut _>::cast(member);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -88,7 +88,7 @@ impl RawBuffer {
|
|||||||
} = &create_info;
|
} = &create_info;
|
||||||
|
|
||||||
let (sharing_mode, queue_family_index_count, p_queue_family_indices) = match sharing {
|
let (sharing_mode, queue_family_index_count, p_queue_family_indices) = match sharing {
|
||||||
Sharing::Exclusive => (ash::vk::SharingMode::EXCLUSIVE, 0, &[] as _),
|
Sharing::Exclusive => (ash::vk::SharingMode::EXCLUSIVE, 0, ptr::null()),
|
||||||
Sharing::Concurrent(queue_family_indices) => (
|
Sharing::Concurrent(queue_family_indices) => (
|
||||||
ash::vk::SharingMode::CONCURRENT,
|
ash::vk::SharingMode::CONCURRENT,
|
||||||
queue_family_indices.len() as u32,
|
queue_family_indices.len() as u32,
|
||||||
@ -114,7 +114,7 @@ impl RawBuffer {
|
|||||||
});
|
});
|
||||||
|
|
||||||
next.p_next = create_info_vk.p_next;
|
next.p_next = create_info_vk.p_next;
|
||||||
create_info_vk.p_next = next as *const _ as *const _;
|
create_info_vk.p_next = <*const _>::cast(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
let handle = {
|
let handle = {
|
||||||
@ -245,7 +245,7 @@ impl RawBuffer {
|
|||||||
.insert(ash::vk::MemoryDedicatedRequirements::default());
|
.insert(ash::vk::MemoryDedicatedRequirements::default());
|
||||||
|
|
||||||
next.p_next = memory_requirements2_vk.p_next;
|
next.p_next = memory_requirements2_vk.p_next;
|
||||||
memory_requirements2_vk.p_next = next as *mut _ as *mut _;
|
memory_requirements2_vk.p_next = <*mut _>::cast(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
unsafe {
|
unsafe {
|
||||||
@ -864,7 +864,7 @@ impl BufferCreateInfo {
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::{BufferCreateInfo, BufferUsage, RawBuffer};
|
use super::{BufferCreateInfo, BufferUsage, RawBuffer};
|
||||||
use crate::device::{Device, DeviceOwned};
|
use crate::device::DeviceOwned;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn create() {
|
fn create() {
|
||||||
@ -882,7 +882,7 @@ mod tests {
|
|||||||
|
|
||||||
assert!(reqs.layout.size() >= 128);
|
assert!(reqs.layout.size() >= 128);
|
||||||
assert_eq!(buf.size(), 128);
|
assert_eq!(buf.size(), 128);
|
||||||
assert_eq!(&**buf.device() as *const Device, &*device as *const Device);
|
assert_eq!(buf.device(), &device);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Re-enable when sparse binding is properly implemented
|
/* Re-enable when sparse binding is properly implemented
|
||||||
|
@ -21,7 +21,7 @@ use crate::{
|
|||||||
DeviceSize, Requires, RequiresAllOf, RequiresOneOf, ValidationError, VulkanObject,
|
DeviceSize, Requires, RequiresAllOf, RequiresOneOf, ValidationError, VulkanObject,
|
||||||
};
|
};
|
||||||
use smallvec::SmallVec;
|
use smallvec::SmallVec;
|
||||||
use std::{mem::size_of, sync::Arc};
|
use std::{mem::size_of, ptr, sync::Arc};
|
||||||
|
|
||||||
/// # Commands to do operations on acceleration structures.
|
/// # Commands to do operations on acceleration structures.
|
||||||
impl RecordingCommandBuffer {
|
impl RecordingCommandBuffer {
|
||||||
@ -1568,7 +1568,7 @@ impl RawRecordingCommandBuffer {
|
|||||||
.collect();
|
.collect();
|
||||||
let build_range_info_pointers_vk: SmallVec<[_; 8]> = build_range_info_elements_vk
|
let build_range_info_pointers_vk: SmallVec<[_; 8]> = build_range_info_elements_vk
|
||||||
.iter()
|
.iter()
|
||||||
.map(|element| element as *const _)
|
.map(ptr::from_ref)
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
let fns = self.device().fns();
|
let fns = self.device().fns();
|
||||||
|
@ -1146,7 +1146,7 @@ impl RawRecordingCommandBuffer {
|
|||||||
let push_size = remaining_size.min(range.offset + range.size - current_offset);
|
let push_size = remaining_size.min(range.offset + range.size - current_offset);
|
||||||
let data_offset = (current_offset - offset) as usize;
|
let data_offset = (current_offset - offset) as usize;
|
||||||
debug_assert!(data_offset < size as usize);
|
debug_assert!(data_offset < size as usize);
|
||||||
let data = (push_constants as *const Pc as *const c_void).add(data_offset);
|
let data = <*const _>::cast::<c_void>(push_constants).add(data_offset);
|
||||||
|
|
||||||
(fns.v1_0.cmd_push_constants)(
|
(fns.v1_0.cmd_push_constants)(
|
||||||
self.handle(),
|
self.handle(),
|
||||||
@ -1348,13 +1348,13 @@ impl RawRecordingCommandBuffer {
|
|||||||
}
|
}
|
||||||
DescriptorWriteInfo::InlineUniformBlock(data) => {
|
DescriptorWriteInfo::InlineUniformBlock(data) => {
|
||||||
write_vk.descriptor_count = data.len() as u32;
|
write_vk.descriptor_count = data.len() as u32;
|
||||||
write_vk.p_next = &per_write_vk.inline_uniform_block as *const _ as _;
|
write_vk.p_next = <*const _>::cast(&per_write_vk.inline_uniform_block);
|
||||||
per_write_vk.inline_uniform_block.data_size = write_vk.descriptor_count;
|
per_write_vk.inline_uniform_block.data_size = write_vk.descriptor_count;
|
||||||
per_write_vk.inline_uniform_block.p_data = data.as_ptr() as *const _;
|
per_write_vk.inline_uniform_block.p_data = data.as_ptr().cast();
|
||||||
}
|
}
|
||||||
DescriptorWriteInfo::AccelerationStructure(info) => {
|
DescriptorWriteInfo::AccelerationStructure(info) => {
|
||||||
write_vk.descriptor_count = info.len() as u32;
|
write_vk.descriptor_count = info.len() as u32;
|
||||||
write_vk.p_next = &per_write_vk.acceleration_structures as *const _ as _;
|
write_vk.p_next = <*const _>::cast(&per_write_vk.acceleration_structures);
|
||||||
per_write_vk
|
per_write_vk
|
||||||
.acceleration_structures
|
.acceleration_structures
|
||||||
.acceleration_structure_count = write_vk.descriptor_count;
|
.acceleration_structure_count = write_vk.descriptor_count;
|
||||||
|
@ -630,7 +630,7 @@ impl RawRecordingCommandBuffer {
|
|||||||
dst_buffer.buffer().handle(),
|
dst_buffer.buffer().handle(),
|
||||||
dst_buffer.offset(),
|
dst_buffer.offset(),
|
||||||
size_of_val(data) as DeviceSize,
|
size_of_val(data) as DeviceSize,
|
||||||
data as *const _ as *const _,
|
<*const _>::cast(data),
|
||||||
);
|
);
|
||||||
|
|
||||||
self
|
self
|
||||||
|
@ -172,7 +172,7 @@ impl RawRecordingCommandBuffer {
|
|||||||
});
|
});
|
||||||
|
|
||||||
inheritance_info_vk.p_next =
|
inheritance_info_vk.p_next =
|
||||||
inheritance_rendering_info_vk as *const _ as *const _;
|
<*const _>::cast(inheritance_rendering_info_vk);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -156,7 +156,7 @@ impl DescriptorSetLayout {
|
|||||||
});
|
});
|
||||||
|
|
||||||
next.p_next = create_info_vk.p_next;
|
next.p_next = create_info_vk.p_next;
|
||||||
create_info_vk.p_next = next as *const _ as *const _;
|
create_info_vk.p_next = <*const _>::cast(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
let handle = {
|
let handle = {
|
||||||
|
@ -96,7 +96,7 @@ impl DescriptorPool {
|
|||||||
);
|
);
|
||||||
|
|
||||||
next.p_next = create_info_vk.p_next;
|
next.p_next = create_info_vk.p_next;
|
||||||
create_info_vk.p_next = next as *const _ as *const _;
|
create_info_vk.p_next = <*const _>::cast(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
let handle = unsafe {
|
let handle = unsafe {
|
||||||
@ -287,7 +287,7 @@ impl DescriptorPool {
|
|||||||
descriptor_set_count: layouts_vk.len() as u32,
|
descriptor_set_count: layouts_vk.len() as u32,
|
||||||
p_set_layouts: layouts_vk.as_ptr(),
|
p_set_layouts: layouts_vk.as_ptr(),
|
||||||
p_next: if let Some(next) = variable_desc_count_alloc_info.as_ref() {
|
p_next: if let Some(next) = variable_desc_count_alloc_info.as_ref() {
|
||||||
next as *const _ as *const _
|
<*const _>::cast(next)
|
||||||
} else {
|
} else {
|
||||||
ptr::null()
|
ptr::null()
|
||||||
},
|
},
|
||||||
|
@ -152,13 +152,13 @@ impl RawDescriptorSet {
|
|||||||
}
|
}
|
||||||
DescriptorWriteInfo::InlineUniformBlock(data) => {
|
DescriptorWriteInfo::InlineUniformBlock(data) => {
|
||||||
write_vk.descriptor_count = data.len() as u32;
|
write_vk.descriptor_count = data.len() as u32;
|
||||||
write_vk.p_next = &per_write_vk.inline_uniform_block as *const _ as _;
|
write_vk.p_next = <*const _>::cast(&per_write_vk.inline_uniform_block);
|
||||||
per_write_vk.inline_uniform_block.data_size = write_vk.descriptor_count;
|
per_write_vk.inline_uniform_block.data_size = write_vk.descriptor_count;
|
||||||
per_write_vk.inline_uniform_block.p_data = data.as_ptr() as *const _;
|
per_write_vk.inline_uniform_block.p_data = data.as_ptr().cast();
|
||||||
}
|
}
|
||||||
DescriptorWriteInfo::AccelerationStructure(info) => {
|
DescriptorWriteInfo::AccelerationStructure(info) => {
|
||||||
write_vk.descriptor_count = info.len() as u32;
|
write_vk.descriptor_count = info.len() as u32;
|
||||||
write_vk.p_next = &per_write_vk.acceleration_structures as *const _ as _;
|
write_vk.p_next = <*const _>::cast(&per_write_vk.acceleration_structures);
|
||||||
per_write_vk
|
per_write_vk
|
||||||
.acceleration_structures
|
.acceleration_structures
|
||||||
.acceleration_structure_count = write_vk.descriptor_count;
|
.acceleration_structure_count = write_vk.descriptor_count;
|
||||||
|
@ -415,7 +415,7 @@ impl Device {
|
|||||||
});
|
});
|
||||||
|
|
||||||
next.p_next = create_info_vk.p_next;
|
next.p_next = create_info_vk.p_next;
|
||||||
create_info_vk.p_next = next as *mut _ as *mut _;
|
create_info_vk.p_next = <*mut _>::cast(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut private_data_create_info_vk = None;
|
let mut private_data_create_info_vk = None;
|
||||||
@ -427,12 +427,12 @@ impl Device {
|
|||||||
});
|
});
|
||||||
|
|
||||||
next.p_next = create_info_vk.p_next;
|
next.p_next = create_info_vk.p_next;
|
||||||
create_info_vk.p_next = next as *mut _ as *mut _;
|
create_info_vk.p_next = <*mut _>::cast(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
// VUID-VkDeviceCreateInfo-pNext-00373
|
// VUID-VkDeviceCreateInfo-pNext-00373
|
||||||
if has_khr_get_physical_device_properties2 {
|
if has_khr_get_physical_device_properties2 {
|
||||||
create_info_vk.p_next = features_ffi.head_as_ref() as *const _ as _;
|
create_info_vk.p_next = <*const _>::cast(features_ffi.head_as_ref());
|
||||||
} else {
|
} else {
|
||||||
create_info_vk.p_enabled_features = &features_ffi.head_as_ref().features;
|
create_info_vk.p_enabled_features = &features_ffi.head_as_ref().features;
|
||||||
}
|
}
|
||||||
@ -977,13 +977,13 @@ impl Device {
|
|||||||
});
|
});
|
||||||
|
|
||||||
next.p_next = create_info_vk.p_next;
|
next.p_next = create_info_vk.p_next;
|
||||||
create_info_vk.p_next = next as *const _ as *const _;
|
create_info_vk.p_next = <*const _>::cast(next);
|
||||||
|
|
||||||
let next = variable_descriptor_count_support_vk
|
let next = variable_descriptor_count_support_vk
|
||||||
.insert(ash::vk::DescriptorSetVariableDescriptorCountLayoutSupport::default());
|
.insert(ash::vk::DescriptorSetVariableDescriptorCountLayoutSupport::default());
|
||||||
|
|
||||||
next.p_next = support_vk.p_next;
|
next.p_next = support_vk.p_next;
|
||||||
support_vk.p_next = next as *mut _ as *mut _;
|
support_vk.p_next = <*mut _>::cast(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
let fns = self.fns();
|
let fns = self.fns();
|
||||||
@ -1061,7 +1061,7 @@ impl Device {
|
|||||||
} = &create_info;
|
} = &create_info;
|
||||||
|
|
||||||
let (sharing_mode, queue_family_index_count, p_queue_family_indices) = match sharing {
|
let (sharing_mode, queue_family_index_count, p_queue_family_indices) = match sharing {
|
||||||
Sharing::Exclusive => (ash::vk::SharingMode::EXCLUSIVE, 0, &[] as _),
|
Sharing::Exclusive => (ash::vk::SharingMode::EXCLUSIVE, 0, ptr::null()),
|
||||||
Sharing::Concurrent(queue_family_indices) => (
|
Sharing::Concurrent(queue_family_indices) => (
|
||||||
ash::vk::SharingMode::CONCURRENT,
|
ash::vk::SharingMode::CONCURRENT,
|
||||||
queue_family_indices.len() as u32,
|
queue_family_indices.len() as u32,
|
||||||
@ -1087,7 +1087,7 @@ impl Device {
|
|||||||
});
|
});
|
||||||
|
|
||||||
next.p_next = create_info_vk.p_next;
|
next.p_next = create_info_vk.p_next;
|
||||||
create_info_vk.p_next = next as *const _ as *const _;
|
create_info_vk.p_next = <*const _>::cast(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
let info_vk = ash::vk::DeviceBufferMemoryRequirements {
|
let info_vk = ash::vk::DeviceBufferMemoryRequirements {
|
||||||
@ -1105,7 +1105,7 @@ impl Device {
|
|||||||
.insert(ash::vk::MemoryDedicatedRequirements::default());
|
.insert(ash::vk::MemoryDedicatedRequirements::default());
|
||||||
|
|
||||||
next.p_next = memory_requirements2_vk.p_next;
|
next.p_next = memory_requirements2_vk.p_next;
|
||||||
memory_requirements2_vk.p_next = next as *mut _ as *mut _;
|
memory_requirements2_vk.p_next = <*mut _>::cast(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
unsafe {
|
unsafe {
|
||||||
@ -1303,7 +1303,7 @@ impl Device {
|
|||||||
} = &create_info;
|
} = &create_info;
|
||||||
|
|
||||||
let (sharing_mode, queue_family_index_count, p_queue_family_indices) = match sharing {
|
let (sharing_mode, queue_family_index_count, p_queue_family_indices) = match sharing {
|
||||||
Sharing::Exclusive => (ash::vk::SharingMode::EXCLUSIVE, 0, &[] as _),
|
Sharing::Exclusive => (ash::vk::SharingMode::EXCLUSIVE, 0, ptr::null()),
|
||||||
Sharing::Concurrent(queue_family_indices) => (
|
Sharing::Concurrent(queue_family_indices) => (
|
||||||
ash::vk::SharingMode::CONCURRENT,
|
ash::vk::SharingMode::CONCURRENT,
|
||||||
queue_family_indices.len() as u32,
|
queue_family_indices.len() as u32,
|
||||||
@ -1347,7 +1347,7 @@ impl Device {
|
|||||||
);
|
);
|
||||||
|
|
||||||
next.p_next = create_info_vk.p_next;
|
next.p_next = create_info_vk.p_next;
|
||||||
create_info_vk.p_next = next as *const _ as *const _;
|
create_info_vk.p_next = <*const _>::cast(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
if !external_memory_handle_types.is_empty() {
|
if !external_memory_handle_types.is_empty() {
|
||||||
@ -1357,7 +1357,7 @@ impl Device {
|
|||||||
});
|
});
|
||||||
|
|
||||||
next.p_next = create_info_vk.p_next;
|
next.p_next = create_info_vk.p_next;
|
||||||
create_info_vk.p_next = next as *const _ as *const _;
|
create_info_vk.p_next = <*const _>::cast(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
if !view_formats.is_empty() {
|
if !view_formats.is_empty() {
|
||||||
@ -1374,7 +1374,7 @@ impl Device {
|
|||||||
});
|
});
|
||||||
|
|
||||||
next.p_next = create_info_vk.p_next;
|
next.p_next = create_info_vk.p_next;
|
||||||
create_info_vk.p_next = next as *const _ as *const _;
|
create_info_vk.p_next = <*const _>::cast(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(stencil_usage) = stencil_usage {
|
if let Some(stencil_usage) = stencil_usage {
|
||||||
@ -1384,7 +1384,7 @@ impl Device {
|
|||||||
});
|
});
|
||||||
|
|
||||||
next.p_next = create_info_vk.p_next;
|
next.p_next = create_info_vk.p_next;
|
||||||
create_info_vk.p_next = next as *const _ as *const _;
|
create_info_vk.p_next = <*const _>::cast(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
// This is currently necessary because of an issue with the spec. The plane aspect should
|
// This is currently necessary because of an issue with the spec. The plane aspect should
|
||||||
@ -1430,7 +1430,7 @@ impl Device {
|
|||||||
.insert(ash::vk::MemoryDedicatedRequirements::default());
|
.insert(ash::vk::MemoryDedicatedRequirements::default());
|
||||||
|
|
||||||
next.p_next = memory_requirements2_vk.p_next;
|
next.p_next = memory_requirements2_vk.p_next;
|
||||||
memory_requirements2_vk.p_next = next as *mut _ as *mut _;
|
memory_requirements2_vk.p_next = <*mut _>::cast(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
unsafe {
|
unsafe {
|
||||||
@ -2263,7 +2263,7 @@ pub(crate) struct DeviceOwnedDebugWrapper<T>(pub(crate) T);
|
|||||||
impl<T> DeviceOwnedDebugWrapper<T> {
|
impl<T> DeviceOwnedDebugWrapper<T> {
|
||||||
pub fn cast_slice_inner(slice: &[Self]) -> &[T] {
|
pub fn cast_slice_inner(slice: &[Self]) -> &[T] {
|
||||||
// SAFETY: `DeviceOwnedDebugWrapper<T>` and `T` have the same layout.
|
// SAFETY: `DeviceOwnedDebugWrapper<T>` and `T` have the same layout.
|
||||||
unsafe { slice::from_raw_parts(slice as *const _ as *const _, slice.len()) }
|
unsafe { slice::from_raw_parts(<*const _>::cast(slice), slice.len()) }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1202,7 +1202,7 @@ impl PhysicalDevice {
|
|||||||
});
|
});
|
||||||
|
|
||||||
next.p_next = external_semaphore_info_vk.p_next;
|
next.p_next = external_semaphore_info_vk.p_next;
|
||||||
external_semaphore_info_vk.p_next = next as *const _ as *const _;
|
external_semaphore_info_vk.p_next = <*const _>::cast(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Output */
|
/* Output */
|
||||||
@ -1285,14 +1285,14 @@ impl PhysicalDevice {
|
|||||||
{
|
{
|
||||||
let next = format_properties3_vk.insert(ash::vk::FormatProperties3KHR::default());
|
let next = format_properties3_vk.insert(ash::vk::FormatProperties3KHR::default());
|
||||||
next.p_next = format_properties2_vk.p_next;
|
next.p_next = format_properties2_vk.p_next;
|
||||||
format_properties2_vk.p_next = next as *mut _ as *mut _;
|
format_properties2_vk.p_next = <*mut _>::cast(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
if self.supported_extensions().ext_image_drm_format_modifier {
|
if self.supported_extensions().ext_image_drm_format_modifier {
|
||||||
let next = drm_format_modifier_properties_list_vk
|
let next = drm_format_modifier_properties_list_vk
|
||||||
.insert(ash::vk::DrmFormatModifierPropertiesListEXT::default());
|
.insert(ash::vk::DrmFormatModifierPropertiesListEXT::default());
|
||||||
next.p_next = format_properties2_vk.p_next;
|
next.p_next = format_properties2_vk.p_next;
|
||||||
format_properties2_vk.p_next = next as *mut _ as *mut _;
|
format_properties2_vk.p_next = <*mut _>::cast(next);
|
||||||
|
|
||||||
if self.api_version() >= Version::V1_3
|
if self.api_version() >= Version::V1_3
|
||||||
|| self.supported_extensions().khr_format_feature_flags2
|
|| self.supported_extensions().khr_format_feature_flags2
|
||||||
@ -1300,7 +1300,7 @@ impl PhysicalDevice {
|
|||||||
let next = drm_format_modifier_properties_list2_vk
|
let next = drm_format_modifier_properties_list2_vk
|
||||||
.insert(ash::vk::DrmFormatModifierPropertiesList2EXT::default());
|
.insert(ash::vk::DrmFormatModifierPropertiesList2EXT::default());
|
||||||
next.p_next = format_properties2_vk.p_next;
|
next.p_next = format_properties2_vk.p_next;
|
||||||
format_properties2_vk.p_next = next as *mut _ as *mut _;
|
format_properties2_vk.p_next = <*mut _>::cast(next);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1511,7 +1511,7 @@ impl PhysicalDevice {
|
|||||||
|
|
||||||
let (sharing_mode, queue_family_index_count, p_queue_family_indices) =
|
let (sharing_mode, queue_family_index_count, p_queue_family_indices) =
|
||||||
match sharing {
|
match sharing {
|
||||||
Sharing::Exclusive => (ash::vk::SharingMode::EXCLUSIVE, 0, &[] as _),
|
Sharing::Exclusive => (ash::vk::SharingMode::EXCLUSIVE, 0, ptr::null()),
|
||||||
Sharing::Concurrent(queue_family_indices) => (
|
Sharing::Concurrent(queue_family_indices) => (
|
||||||
ash::vk::SharingMode::CONCURRENT,
|
ash::vk::SharingMode::CONCURRENT,
|
||||||
queue_family_indices.len() as u32,
|
queue_family_indices.len() as u32,
|
||||||
@ -1530,7 +1530,7 @@ impl PhysicalDevice {
|
|||||||
);
|
);
|
||||||
|
|
||||||
next.p_next = info2_vk.p_next;
|
next.p_next = info2_vk.p_next;
|
||||||
info2_vk.p_next = next as *const _ as *const _;
|
info2_vk.p_next = <*const _>::cast(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(handle_type) = external_memory_handle_type {
|
if let Some(handle_type) = external_memory_handle_type {
|
||||||
@ -1541,7 +1541,7 @@ impl PhysicalDevice {
|
|||||||
});
|
});
|
||||||
|
|
||||||
next.p_next = info2_vk.p_next;
|
next.p_next = info2_vk.p_next;
|
||||||
info2_vk.p_next = next as *const _ as *const _;
|
info2_vk.p_next = <*const _>::cast(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
if !view_formats.is_empty() {
|
if !view_formats.is_empty() {
|
||||||
@ -1558,7 +1558,7 @@ impl PhysicalDevice {
|
|||||||
});
|
});
|
||||||
|
|
||||||
next.p_next = info2_vk.p_next;
|
next.p_next = info2_vk.p_next;
|
||||||
info2_vk.p_next = next as *const _ as *const _;
|
info2_vk.p_next = <*const _>::cast(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(image_view_type) = image_view_type {
|
if let Some(image_view_type) = image_view_type {
|
||||||
@ -1569,8 +1569,8 @@ impl PhysicalDevice {
|
|||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
next.p_next = info2_vk.p_next as *mut _;
|
next.p_next = info2_vk.p_next.cast_mut();
|
||||||
info2_vk.p_next = next as *const _ as *const _;
|
info2_vk.p_next = <*const _>::cast(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(stencil_usage) = stencil_usage {
|
if let Some(stencil_usage) = stencil_usage {
|
||||||
@ -1579,8 +1579,8 @@ impl PhysicalDevice {
|
|||||||
..Default::default()
|
..Default::default()
|
||||||
});
|
});
|
||||||
|
|
||||||
next.p_next = info2_vk.p_next as *mut _;
|
next.p_next = info2_vk.p_next.cast_mut();
|
||||||
info2_vk.p_next = next as *const _ as *const _;
|
info2_vk.p_next = <*const _>::cast(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Output */
|
/* Output */
|
||||||
@ -1594,7 +1594,7 @@ impl PhysicalDevice {
|
|||||||
.insert(ash::vk::ExternalImageFormatProperties::default());
|
.insert(ash::vk::ExternalImageFormatProperties::default());
|
||||||
|
|
||||||
next.p_next = properties2_vk.p_next;
|
next.p_next = properties2_vk.p_next;
|
||||||
properties2_vk.p_next = next as *mut _ as *mut _;
|
properties2_vk.p_next = <*mut _>::cast(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
if image_view_info_vk.is_some() {
|
if image_view_info_vk.is_some() {
|
||||||
@ -1602,7 +1602,7 @@ impl PhysicalDevice {
|
|||||||
.insert(ash::vk::FilterCubicImageViewImageFormatPropertiesEXT::default());
|
.insert(ash::vk::FilterCubicImageViewImageFormatPropertiesEXT::default());
|
||||||
|
|
||||||
next.p_next = properties2_vk.p_next;
|
next.p_next = properties2_vk.p_next;
|
||||||
properties2_vk.p_next = next as *mut _ as *mut _;
|
properties2_vk.p_next = <*mut _>::cast(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
let result = {
|
let result = {
|
||||||
@ -2066,8 +2066,8 @@ impl PhysicalDevice {
|
|||||||
..Default::default()
|
..Default::default()
|
||||||
});
|
});
|
||||||
|
|
||||||
next.p_next = info_vk.p_next as *mut _;
|
next.p_next = info_vk.p_next.cast_mut();
|
||||||
info_vk.p_next = next as *const _ as *const _;
|
info_vk.p_next = <*const _>::cast(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
if full_screen_exclusive != FullScreenExclusive::Default {
|
if full_screen_exclusive != FullScreenExclusive::Default {
|
||||||
@ -2077,8 +2077,8 @@ impl PhysicalDevice {
|
|||||||
..Default::default()
|
..Default::default()
|
||||||
});
|
});
|
||||||
|
|
||||||
next.p_next = info_vk.p_next as *mut _;
|
next.p_next = info_vk.p_next.cast_mut();
|
||||||
info_vk.p_next = next as *const _ as *const _;
|
info_vk.p_next = <*const _>::cast(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(win32_monitor) = win32_monitor {
|
if let Some(win32_monitor) = win32_monitor {
|
||||||
@ -2089,8 +2089,8 @@ impl PhysicalDevice {
|
|||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
next.p_next = info_vk.p_next as *mut _;
|
next.p_next = info_vk.p_next.cast_mut();
|
||||||
info_vk.p_next = next as *const _ as *const _;
|
info_vk.p_next = <*const _>::cast(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Output */
|
/* Output */
|
||||||
@ -2107,8 +2107,8 @@ impl PhysicalDevice {
|
|||||||
let next = capabilities_full_screen_exclusive_vk
|
let next = capabilities_full_screen_exclusive_vk
|
||||||
.insert(ash::vk::SurfaceCapabilitiesFullScreenExclusiveEXT::default());
|
.insert(ash::vk::SurfaceCapabilitiesFullScreenExclusiveEXT::default());
|
||||||
|
|
||||||
next.p_next = capabilities_vk.p_next as *mut _;
|
next.p_next = capabilities_vk.p_next.cast();
|
||||||
capabilities_vk.p_next = next as *mut _ as *mut _;
|
capabilities_vk.p_next = <*mut _>::cast(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
if present_mode.is_some() {
|
if present_mode.is_some() {
|
||||||
@ -2121,16 +2121,16 @@ impl PhysicalDevice {
|
|||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
next.p_next = capabilities_vk.p_next as *mut _;
|
next.p_next = capabilities_vk.p_next.cast();
|
||||||
capabilities_vk.p_next = next as *mut _ as *mut _;
|
capabilities_vk.p_next = <*mut _>::cast(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
let next = capabilities_present_scaling_vk
|
let next = capabilities_present_scaling_vk
|
||||||
.insert(ash::vk::SurfacePresentScalingCapabilitiesEXT::default());
|
.insert(ash::vk::SurfacePresentScalingCapabilitiesEXT::default());
|
||||||
|
|
||||||
next.p_next = capabilities_vk.p_next as *mut _;
|
next.p_next = capabilities_vk.p_next.cast();
|
||||||
capabilities_vk.p_next = next as *mut _ as *mut _;
|
capabilities_vk.p_next = <*mut _>::cast(next);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2142,8 +2142,8 @@ impl PhysicalDevice {
|
|||||||
let next = capabilities_protected_vk
|
let next = capabilities_protected_vk
|
||||||
.insert(ash::vk::SurfaceProtectedCapabilitiesKHR::default());
|
.insert(ash::vk::SurfaceProtectedCapabilitiesKHR::default());
|
||||||
|
|
||||||
next.p_next = capabilities_vk.p_next as *mut _;
|
next.p_next = capabilities_vk.p_next.cast();
|
||||||
capabilities_vk.p_next = next as *mut _ as *mut _;
|
capabilities_vk.p_next = <*mut _>::cast(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
let fns = self.instance.fns();
|
let fns = self.instance.fns();
|
||||||
@ -2469,8 +2469,8 @@ impl PhysicalDevice {
|
|||||||
..Default::default()
|
..Default::default()
|
||||||
});
|
});
|
||||||
|
|
||||||
next.p_next = info_vk.p_next as *mut _;
|
next.p_next = info_vk.p_next.cast_mut();
|
||||||
info_vk.p_next = next as *const _ as *const _;
|
info_vk.p_next = <*const _>::cast(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
if full_screen_exclusive != FullScreenExclusive::Default {
|
if full_screen_exclusive != FullScreenExclusive::Default {
|
||||||
@ -2481,8 +2481,8 @@ impl PhysicalDevice {
|
|||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
next.p_next = info_vk.p_next as *mut _;
|
next.p_next = info_vk.p_next.cast_mut();
|
||||||
info_vk.p_next = next as *const _ as *const _;
|
info_vk.p_next = <*const _>::cast(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(win32_monitor) = win32_monitor {
|
if let Some(win32_monitor) = win32_monitor {
|
||||||
@ -2493,8 +2493,8 @@ impl PhysicalDevice {
|
|||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
next.p_next = info_vk.p_next as *mut _;
|
next.p_next = info_vk.p_next.cast_mut();
|
||||||
info_vk.p_next = next as *const _ as *const _;
|
info_vk.p_next = <*const _>::cast(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
let fns = self.instance.fns();
|
let fns = self.instance.fns();
|
||||||
@ -2748,8 +2748,8 @@ impl PhysicalDevice {
|
|||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
next.p_next = info_vk.p_next as *mut _;
|
next.p_next = info_vk.p_next.cast_mut();
|
||||||
info_vk.p_next = next as *const _ as *const _;
|
info_vk.p_next = <*const _>::cast(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(win32_monitor) = win32_monitor {
|
if let Some(win32_monitor) = win32_monitor {
|
||||||
@ -2760,8 +2760,8 @@ impl PhysicalDevice {
|
|||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
next.p_next = info_vk.p_next as *mut _;
|
next.p_next = info_vk.p_next.cast_mut();
|
||||||
info_vk.p_next = next as *const _ as *const _;
|
info_vk.p_next = <*const _>::cast(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
let fns = self.instance.fns();
|
let fns = self.instance.fns();
|
||||||
|
@ -662,7 +662,7 @@ impl<'a> QueueGuard<'a> {
|
|||||||
});
|
});
|
||||||
|
|
||||||
next.p_next = info_vk.p_next;
|
next.p_next = info_vk.p_next;
|
||||||
info_vk.p_next = next as *const _ as *const _;
|
info_vk.p_next = <*const _>::cast(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
if has_present_modes {
|
if has_present_modes {
|
||||||
@ -672,8 +672,8 @@ impl<'a> QueueGuard<'a> {
|
|||||||
..Default::default()
|
..Default::default()
|
||||||
});
|
});
|
||||||
|
|
||||||
next.p_next = info_vk.p_next as _;
|
next.p_next = info_vk.p_next.cast();
|
||||||
info_vk.p_next = next as *const _ as *const _;
|
info_vk.p_next = <*const _>::cast(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
if has_present_regions {
|
if has_present_regions {
|
||||||
@ -691,7 +691,7 @@ impl<'a> QueueGuard<'a> {
|
|||||||
});
|
});
|
||||||
|
|
||||||
next.p_next = info_vk.p_next;
|
next.p_next = info_vk.p_next;
|
||||||
info_vk.p_next = next as *const _ as *const _;
|
info_vk.p_next = <*const _>::cast(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
let fns = self.queue.device().fns();
|
let fns = self.queue.device().fns();
|
||||||
@ -1177,7 +1177,7 @@ impl<'a> QueueGuard<'a> {
|
|||||||
};
|
};
|
||||||
|
|
||||||
timeline_semaphore_submit_info_vk.p_next = submit_info_vk.p_next;
|
timeline_semaphore_submit_info_vk.p_next = submit_info_vk.p_next;
|
||||||
submit_info_vk.p_next = timeline_semaphore_submit_info_vk as *mut _ as *mut _;
|
submit_info_vk.p_next = <*mut _>::cast(timeline_semaphore_submit_info_vk);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -194,7 +194,7 @@ impl Sampler {
|
|||||||
);
|
);
|
||||||
|
|
||||||
next.p_next = create_info_vk.p_next;
|
next.p_next = create_info_vk.p_next;
|
||||||
create_info_vk.p_next = next as *const _ as *const _;
|
create_info_vk.p_next = <*const _>::cast(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(sampler_ycbcr_conversion) = sampler_ycbcr_conversion {
|
if let Some(sampler_ycbcr_conversion) = sampler_ycbcr_conversion {
|
||||||
@ -205,7 +205,7 @@ impl Sampler {
|
|||||||
});
|
});
|
||||||
|
|
||||||
next.p_next = create_info_vk.p_next;
|
next.p_next = create_info_vk.p_next;
|
||||||
create_info_vk.p_next = next as *const _ as *const _;
|
create_info_vk.p_next = <*const _>::cast(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
let handle = unsafe {
|
let handle = unsafe {
|
||||||
|
@ -124,7 +124,7 @@ impl RawImage {
|
|||||||
} = &create_info;
|
} = &create_info;
|
||||||
|
|
||||||
let (sharing_mode, queue_family_index_count, p_queue_family_indices) = match sharing {
|
let (sharing_mode, queue_family_index_count, p_queue_family_indices) = match sharing {
|
||||||
Sharing::Exclusive => (ash::vk::SharingMode::EXCLUSIVE, 0, &[] as _),
|
Sharing::Exclusive => (ash::vk::SharingMode::EXCLUSIVE, 0, ptr::null()),
|
||||||
Sharing::Concurrent(queue_family_indices) => (
|
Sharing::Concurrent(queue_family_indices) => (
|
||||||
ash::vk::SharingMode::CONCURRENT,
|
ash::vk::SharingMode::CONCURRENT,
|
||||||
queue_family_indices.len() as u32,
|
queue_family_indices.len() as u32,
|
||||||
@ -171,7 +171,7 @@ impl RawImage {
|
|||||||
);
|
);
|
||||||
|
|
||||||
next.p_next = create_info_vk.p_next;
|
next.p_next = create_info_vk.p_next;
|
||||||
create_info_vk.p_next = next as *const _ as *const _;
|
create_info_vk.p_next = <*const _>::cast(next);
|
||||||
} else {
|
} else {
|
||||||
drm_format_modifier_plane_layouts_vk = drm_format_modifier_plane_layouts
|
drm_format_modifier_plane_layouts_vk = drm_format_modifier_plane_layouts
|
||||||
.iter()
|
.iter()
|
||||||
@ -205,7 +205,7 @@ impl RawImage {
|
|||||||
);
|
);
|
||||||
|
|
||||||
next.p_next = create_info_vk.p_next;
|
next.p_next = create_info_vk.p_next;
|
||||||
create_info_vk.p_next = next as *const _ as *const _;
|
create_info_vk.p_next = <*const _>::cast(next);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -216,7 +216,7 @@ impl RawImage {
|
|||||||
});
|
});
|
||||||
|
|
||||||
next.p_next = create_info_vk.p_next;
|
next.p_next = create_info_vk.p_next;
|
||||||
create_info_vk.p_next = next as *const _ as *const _;
|
create_info_vk.p_next = <*const _>::cast(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
if !view_formats.is_empty() {
|
if !view_formats.is_empty() {
|
||||||
@ -233,7 +233,7 @@ impl RawImage {
|
|||||||
});
|
});
|
||||||
|
|
||||||
next.p_next = create_info_vk.p_next;
|
next.p_next = create_info_vk.p_next;
|
||||||
create_info_vk.p_next = next as *const _ as *const _;
|
create_info_vk.p_next = <*const _>::cast(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(stencil_usage) = stencil_usage {
|
if let Some(stencil_usage) = stencil_usage {
|
||||||
@ -243,7 +243,7 @@ impl RawImage {
|
|||||||
});
|
});
|
||||||
|
|
||||||
next.p_next = create_info_vk.p_next;
|
next.p_next = create_info_vk.p_next;
|
||||||
create_info_vk.p_next = next as *const _ as *const _;
|
create_info_vk.p_next = <*const _>::cast(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
let handle = {
|
let handle = {
|
||||||
@ -444,7 +444,7 @@ impl RawImage {
|
|||||||
});
|
});
|
||||||
|
|
||||||
next.p_next = info_vk.p_next;
|
next.p_next = info_vk.p_next;
|
||||||
info_vk.p_next = next as *mut _ as *mut _;
|
info_vk.p_next = <*mut _>::cast(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut memory_requirements2_vk = ash::vk::MemoryRequirements2::default();
|
let mut memory_requirements2_vk = ash::vk::MemoryRequirements2::default();
|
||||||
@ -462,7 +462,7 @@ impl RawImage {
|
|||||||
.insert(ash::vk::MemoryDedicatedRequirements::default());
|
.insert(ash::vk::MemoryDedicatedRequirements::default());
|
||||||
|
|
||||||
next.p_next = memory_requirements2_vk.p_next;
|
next.p_next = memory_requirements2_vk.p_next;
|
||||||
memory_requirements2_vk.p_next = next as *mut _ as *mut _;
|
memory_requirements2_vk.p_next = <*mut _>::cast(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
unsafe {
|
unsafe {
|
||||||
@ -1197,7 +1197,7 @@ impl RawImage {
|
|||||||
};
|
};
|
||||||
|
|
||||||
for (info_vk, plane_info_vk) in infos_vk.iter_mut().zip(plane_infos_vk.iter_mut()) {
|
for (info_vk, plane_info_vk) in infos_vk.iter_mut().zip(plane_infos_vk.iter_mut()) {
|
||||||
info_vk.p_next = plane_info_vk as *mut _ as *mut _;
|
info_vk.p_next = <*mut _>::cast(plane_info_vk);
|
||||||
}
|
}
|
||||||
|
|
||||||
if self.device.api_version() >= Version::V1_1 {
|
if self.device.api_version() >= Version::V1_1 {
|
||||||
|
@ -595,7 +595,7 @@ impl ImageView {
|
|||||||
});
|
});
|
||||||
|
|
||||||
next.p_next = info_vk.p_next;
|
next.p_next = info_vk.p_next;
|
||||||
info_vk.p_next = next as *const _ as *const _;
|
info_vk.p_next = <*const _>::cast(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(conversion) = sampler_ycbcr_conversion {
|
if let Some(conversion) = sampler_ycbcr_conversion {
|
||||||
@ -606,7 +606,7 @@ impl ImageView {
|
|||||||
});
|
});
|
||||||
|
|
||||||
next.p_next = info_vk.p_next;
|
next.p_next = info_vk.p_next;
|
||||||
info_vk.p_next = next as *const _ as *const _;
|
info_vk.p_next = <*const _>::cast(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
let handle = {
|
let handle = {
|
||||||
|
@ -109,7 +109,7 @@ impl DebugUtilsMessenger {
|
|||||||
message_severity: message_severity.into(),
|
message_severity: message_severity.into(),
|
||||||
message_type: message_type.into(),
|
message_type: message_type.into(),
|
||||||
pfn_user_callback: Some(trampoline),
|
pfn_user_callback: Some(trampoline),
|
||||||
p_user_data: user_callback.as_ptr() as *const c_void as *mut _,
|
p_user_data: user_callback.as_ptr().cast_mut().cast(),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -305,7 +305,7 @@ impl DebugUtilsMessengerCallback {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn as_ptr(&self) -> *const CallbackData {
|
pub(crate) fn as_ptr(&self) -> *const CallbackData {
|
||||||
&self.0 as _
|
ptr::addr_of!(self.0)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -367,7 +367,7 @@ pub(super) unsafe extern "system" fn trampoline(
|
|||||||
),
|
),
|
||||||
};
|
};
|
||||||
|
|
||||||
let user_callback = &*(user_data_vk as *mut CallbackData as *const CallbackData);
|
let user_callback: &CallbackData = &*user_data_vk.cast_const().cast();
|
||||||
|
|
||||||
user_callback(
|
user_callback(
|
||||||
message_severity_vk.into(),
|
message_severity_vk.into(),
|
||||||
|
@ -94,7 +94,7 @@ use parking_lot::RwLock;
|
|||||||
use smallvec::SmallVec;
|
use smallvec::SmallVec;
|
||||||
use std::{
|
use std::{
|
||||||
borrow::Cow,
|
borrow::Cow,
|
||||||
ffi::{c_void, CString},
|
ffi::CString,
|
||||||
fmt::{Debug, Error as FmtError, Formatter},
|
fmt::{Debug, Error as FmtError, Formatter},
|
||||||
mem::MaybeUninit,
|
mem::MaybeUninit,
|
||||||
num::NonZeroU64,
|
num::NonZeroU64,
|
||||||
@ -469,7 +469,7 @@ impl Instance {
|
|||||||
});
|
});
|
||||||
|
|
||||||
next.p_next = create_info_vk.p_next;
|
next.p_next = create_info_vk.p_next;
|
||||||
create_info_vk.p_next = next as *const _ as *const _;
|
create_info_vk.p_next = <*const _>::cast(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut debug_utils_messenger_create_infos_vk: Vec<_> = debug_utils_messengers
|
let mut debug_utils_messenger_create_infos_vk: Vec<_> = debug_utils_messengers
|
||||||
@ -487,7 +487,7 @@ impl Instance {
|
|||||||
message_severity: message_severity.into(),
|
message_severity: message_severity.into(),
|
||||||
message_type: message_type.into(),
|
message_type: message_type.into(),
|
||||||
pfn_user_callback: Some(trampoline),
|
pfn_user_callback: Some(trampoline),
|
||||||
p_user_data: user_callback.as_ptr() as *const c_void as *mut _,
|
p_user_data: user_callback.as_ptr().cast_mut().cast(),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
@ -495,11 +495,11 @@ impl Instance {
|
|||||||
|
|
||||||
for i in 1..debug_utils_messenger_create_infos_vk.len() {
|
for i in 1..debug_utils_messenger_create_infos_vk.len() {
|
||||||
debug_utils_messenger_create_infos_vk[i - 1].p_next =
|
debug_utils_messenger_create_infos_vk[i - 1].p_next =
|
||||||
&debug_utils_messenger_create_infos_vk[i] as *const _ as *const _;
|
<*const _>::cast(&debug_utils_messenger_create_infos_vk[i]);
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(info) = debug_utils_messenger_create_infos_vk.first() {
|
if let Some(info) = debug_utils_messenger_create_infos_vk.first() {
|
||||||
create_info_vk.p_next = info as *const _ as *const _;
|
create_info_vk.p_next = <*const _>::cast(info);
|
||||||
}
|
}
|
||||||
|
|
||||||
let handle = {
|
let handle = {
|
||||||
@ -1210,7 +1210,7 @@ pub(crate) struct InstanceOwnedDebugWrapper<T>(pub(crate) T);
|
|||||||
impl<T> InstanceOwnedDebugWrapper<T> {
|
impl<T> InstanceOwnedDebugWrapper<T> {
|
||||||
pub fn cast_slice_inner(slice: &[Self]) -> &[T] {
|
pub fn cast_slice_inner(slice: &[Self]) -> &[T] {
|
||||||
// SAFETY: `InstanceOwnedDebugWrapper<T>` and `T` have the same layout.
|
// SAFETY: `InstanceOwnedDebugWrapper<T>` and `T` have the same layout.
|
||||||
unsafe { slice::from_raw_parts(slice as *const _ as *const _, slice.len()) }
|
unsafe { slice::from_raw_parts(<*const _>::cast(slice), slice.len()) }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1454,12 +1454,14 @@ unsafe impl<S: Suballocator + Send + 'static> MemoryAllocator for GenericMemoryA
|
|||||||
if let Some(suballocation) = allocation.suballocation {
|
if let Some(suballocation) = allocation.suballocation {
|
||||||
let memory_type_index = allocation.device_memory.memory_type_index();
|
let memory_type_index = allocation.device_memory.memory_type_index();
|
||||||
let pool = self.pools[memory_type_index as usize].blocks.lock();
|
let pool = self.pools[memory_type_index as usize].blocks.lock();
|
||||||
let block_ptr = allocation.allocation_handle.0 as *mut DeviceMemoryBlock<S>;
|
let block_ptr = allocation
|
||||||
|
.allocation_handle
|
||||||
|
.0
|
||||||
|
.cast::<DeviceMemoryBlock<S>>();
|
||||||
|
|
||||||
// TODO: Maybe do a similar check for dedicated blocks.
|
// TODO: Maybe do a similar check for dedicated blocks.
|
||||||
debug_assert!(
|
debug_assert!(
|
||||||
pool.iter()
|
pool.iter().any(|block| ptr::addr_of!(**block) == block_ptr),
|
||||||
.any(|block| &**block as *const DeviceMemoryBlock<S> == block_ptr),
|
|
||||||
"attempted to deallocate a memory block that does not belong to this allocator",
|
"attempted to deallocate a memory block that does not belong to this allocator",
|
||||||
);
|
);
|
||||||
|
|
||||||
@ -1605,7 +1607,7 @@ impl<S: Suballocator> DeviceMemoryBlock<S> {
|
|||||||
Ok(MemoryAlloc {
|
Ok(MemoryAlloc {
|
||||||
device_memory: self.device_memory.clone(),
|
device_memory: self.device_memory.clone(),
|
||||||
suballocation: Some(suballocation),
|
suballocation: Some(suballocation),
|
||||||
allocation_handle: AllocationHandle::from_ptr(self as *mut DeviceMemoryBlock<S> as _),
|
allocation_handle: AllocationHandle::from_ptr(<*mut _>::cast(self)),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -186,7 +186,7 @@ impl DeviceMemory {
|
|||||||
});
|
});
|
||||||
|
|
||||||
next.p_next = allocate_info_vk.p_next;
|
next.p_next = allocate_info_vk.p_next;
|
||||||
allocate_info_vk.p_next = next as *const _ as *const _;
|
allocate_info_vk.p_next = <*const _>::cast(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
if !export_handle_types.is_empty() {
|
if !export_handle_types.is_empty() {
|
||||||
@ -196,7 +196,7 @@ impl DeviceMemory {
|
|||||||
});
|
});
|
||||||
|
|
||||||
next.p_next = allocate_info_vk.p_next;
|
next.p_next = allocate_info_vk.p_next;
|
||||||
allocate_info_vk.p_next = next as *const _ as *const _;
|
allocate_info_vk.p_next = <*const _>::cast(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
let imported_handle_type = import_info.as_ref().map(|import_info| match import_info {
|
let imported_handle_type = import_info.as_ref().map(|import_info| match import_info {
|
||||||
@ -226,7 +226,7 @@ impl DeviceMemory {
|
|||||||
});
|
});
|
||||||
|
|
||||||
next.p_next = allocate_info_vk.p_next;
|
next.p_next = allocate_info_vk.p_next;
|
||||||
allocate_info_vk.p_next = next as *const _ as *const _;
|
allocate_info_vk.p_next = <*const _>::cast(next);
|
||||||
}
|
}
|
||||||
MemoryImportInfo::Win32 {
|
MemoryImportInfo::Win32 {
|
||||||
handle_type,
|
handle_type,
|
||||||
@ -241,7 +241,7 @@ impl DeviceMemory {
|
|||||||
);
|
);
|
||||||
|
|
||||||
next.p_next = allocate_info_vk.p_next;
|
next.p_next = allocate_info_vk.p_next;
|
||||||
allocate_info_vk.p_next = next as *const _ as *const _;
|
allocate_info_vk.p_next = <*const _>::cast(next);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -253,7 +253,7 @@ impl DeviceMemory {
|
|||||||
});
|
});
|
||||||
|
|
||||||
next.p_next = allocate_info_vk.p_next;
|
next.p_next = allocate_info_vk.p_next;
|
||||||
allocate_info_vk.p_next = next as *const _ as *const _;
|
allocate_info_vk.p_next = <*const _>::cast(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
// VUID-vkAllocateMemory-maxMemoryAllocationCount-04101
|
// VUID-vkAllocateMemory-maxMemoryAllocationCount-04101
|
||||||
@ -2017,7 +2017,9 @@ impl MappedDeviceMemory {
|
|||||||
#[inline]
|
#[inline]
|
||||||
pub unsafe fn read_unchecked(&self, range: Range<DeviceSize>) -> &[u8] {
|
pub unsafe fn read_unchecked(&self, range: Range<DeviceSize>) -> &[u8] {
|
||||||
slice::from_raw_parts(
|
slice::from_raw_parts(
|
||||||
self.pointer.add((range.start - self.range.start) as usize) as *const u8,
|
self.pointer
|
||||||
|
.add((range.start - self.range.start).try_into().unwrap())
|
||||||
|
.cast(),
|
||||||
(range.end - range.start) as usize,
|
(range.end - range.start) as usize,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
@ -2055,8 +2057,10 @@ impl MappedDeviceMemory {
|
|||||||
#[allow(clippy::mut_from_ref)]
|
#[allow(clippy::mut_from_ref)]
|
||||||
pub unsafe fn write_unchecked(&self, range: Range<DeviceSize>) -> &mut [u8] {
|
pub unsafe fn write_unchecked(&self, range: Range<DeviceSize>) -> &mut [u8] {
|
||||||
slice::from_raw_parts_mut(
|
slice::from_raw_parts_mut(
|
||||||
self.pointer.add((range.start - self.range.start) as usize) as *mut u8,
|
self.pointer
|
||||||
(range.end - range.start) as usize,
|
.add((range.start - self.range.start).try_into().unwrap())
|
||||||
|
.cast::<u8>(),
|
||||||
|
(range.end - range.start).try_into().unwrap(),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -118,7 +118,7 @@ impl PipelineCache {
|
|||||||
p_initial_data: if initial_data.is_empty() {
|
p_initial_data: if initial_data.is_empty() {
|
||||||
ptr::null()
|
ptr::null()
|
||||||
} else {
|
} else {
|
||||||
initial_data.as_ptr() as _
|
initial_data.as_ptr().cast()
|
||||||
},
|
},
|
||||||
..Default::default()
|
..Default::default()
|
||||||
};
|
};
|
||||||
@ -215,7 +215,7 @@ impl PipelineCache {
|
|||||||
self.device.handle(),
|
self.device.handle(),
|
||||||
self.handle,
|
self.handle,
|
||||||
&mut count,
|
&mut count,
|
||||||
data.as_mut_ptr() as *mut _,
|
data.as_mut_ptr().cast(),
|
||||||
);
|
);
|
||||||
|
|
||||||
match result {
|
match result {
|
||||||
|
@ -129,7 +129,7 @@ impl ComputePipeline {
|
|||||||
map_entry_count: specialization_map_entries_vk.len() as u32,
|
map_entry_count: specialization_map_entries_vk.len() as u32,
|
||||||
p_map_entries: specialization_map_entries_vk.as_ptr(),
|
p_map_entries: specialization_map_entries_vk.as_ptr(),
|
||||||
data_size: specialization_data_vk.len(),
|
data_size: specialization_data_vk.len(),
|
||||||
p_data: specialization_data_vk.as_ptr() as *const _,
|
p_data: specialization_data_vk.as_ptr().cast(),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
};
|
};
|
||||||
required_subgroup_size_create_info =
|
required_subgroup_size_create_info =
|
||||||
@ -143,7 +143,7 @@ impl ComputePipeline {
|
|||||||
p_next: required_subgroup_size_create_info.as_ref().map_or(
|
p_next: required_subgroup_size_create_info.as_ref().map_or(
|
||||||
ptr::null(),
|
ptr::null(),
|
||||||
|required_subgroup_size_create_info| {
|
|required_subgroup_size_create_info| {
|
||||||
required_subgroup_size_create_info as *const _ as _
|
<*const _>::cast(required_subgroup_size_create_info)
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
flags: flags.into(),
|
flags: flags.into(),
|
||||||
|
@ -318,7 +318,7 @@ impl GraphicsPipeline {
|
|||||||
p_next: required_subgroup_size_create_info.as_ref().map_or(
|
p_next: required_subgroup_size_create_info.as_ref().map_or(
|
||||||
ptr::null(),
|
ptr::null(),
|
||||||
|required_subgroup_size_create_info| {
|
|required_subgroup_size_create_info| {
|
||||||
required_subgroup_size_create_info as *const _ as _
|
<*const _>::cast(required_subgroup_size_create_info)
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
p_name: name_vk.as_ptr(),
|
p_name: name_vk.as_ptr(),
|
||||||
@ -328,7 +328,7 @@ impl GraphicsPipeline {
|
|||||||
|
|
||||||
*specialization_info_vk = ash::vk::SpecializationInfo {
|
*specialization_info_vk = ash::vk::SpecializationInfo {
|
||||||
p_map_entries: specialization_map_entries_vk.as_ptr(),
|
p_map_entries: specialization_map_entries_vk.as_ptr(),
|
||||||
p_data: specialization_data_vk.as_ptr() as _,
|
p_data: specialization_data_vk.as_ptr().cast(),
|
||||||
..*specialization_info_vk
|
..*specialization_info_vk
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@ -408,7 +408,7 @@ impl GraphicsPipeline {
|
|||||||
|
|
||||||
// VUID-VkPipelineVertexInputDivisorStateCreateInfoEXT-vertexBindingDivisorCount-arraylength
|
// VUID-VkPipelineVertexInputDivisorStateCreateInfoEXT-vertexBindingDivisorCount-arraylength
|
||||||
if !vertex_binding_divisor_descriptions_vk.is_empty() {
|
if !vertex_binding_divisor_descriptions_vk.is_empty() {
|
||||||
vertex_input_state.p_next = vertex_binding_divisor_state_vk.insert(
|
let next = vertex_binding_divisor_state_vk.insert(
|
||||||
ash::vk::PipelineVertexInputDivisorStateCreateInfoEXT {
|
ash::vk::PipelineVertexInputDivisorStateCreateInfoEXT {
|
||||||
vertex_binding_divisor_count: vertex_binding_divisor_descriptions_vk
|
vertex_binding_divisor_count: vertex_binding_divisor_descriptions_vk
|
||||||
.len()
|
.len()
|
||||||
@ -417,7 +417,8 @@ impl GraphicsPipeline {
|
|||||||
.as_ptr(),
|
.as_ptr(),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
},
|
},
|
||||||
) as *const _ as *const _;
|
);
|
||||||
|
vertex_input_state.p_next = <*const _>::cast(next);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -465,7 +466,7 @@ impl GraphicsPipeline {
|
|||||||
|
|
||||||
tessellation_domain_origin_state_vk.p_next = tessellation_state_vk.p_next;
|
tessellation_domain_origin_state_vk.p_next = tessellation_state_vk.p_next;
|
||||||
tessellation_state_vk.p_next =
|
tessellation_state_vk.p_next =
|
||||||
tessellation_domain_origin_state_vk as *const _ as *const _;
|
<*const _>::cast(tessellation_domain_origin_state_vk);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -559,7 +560,7 @@ impl GraphicsPipeline {
|
|||||||
(ash::vk::FALSE, 1, 0)
|
(ash::vk::FALSE, 1, 0)
|
||||||
};
|
};
|
||||||
|
|
||||||
rasterization_state.p_next = rasterization_line_state_vk.insert(
|
let next = rasterization_line_state_vk.insert(
|
||||||
ash::vk::PipelineRasterizationLineStateCreateInfoEXT {
|
ash::vk::PipelineRasterizationLineStateCreateInfoEXT {
|
||||||
line_rasterization_mode: line_rasterization_mode.into(),
|
line_rasterization_mode: line_rasterization_mode.into(),
|
||||||
stippled_line_enable,
|
stippled_line_enable,
|
||||||
@ -567,7 +568,8 @@ impl GraphicsPipeline {
|
|||||||
line_stipple_pattern,
|
line_stipple_pattern,
|
||||||
..Default::default()
|
..Default::default()
|
||||||
},
|
},
|
||||||
) as *const _ as *const _;
|
);
|
||||||
|
rasterization_state.p_next = <*const _>::cast(next);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -595,7 +597,7 @@ impl GraphicsPipeline {
|
|||||||
rasterization_samples: rasterization_samples.into(),
|
rasterization_samples: rasterization_samples.into(),
|
||||||
sample_shading_enable,
|
sample_shading_enable,
|
||||||
min_sample_shading,
|
min_sample_shading,
|
||||||
p_sample_mask: sample_mask as _,
|
p_sample_mask: sample_mask.as_ptr(),
|
||||||
alpha_to_coverage_enable: alpha_to_coverage_enable as ash::vk::Bool32,
|
alpha_to_coverage_enable: alpha_to_coverage_enable as ash::vk::Bool32,
|
||||||
alpha_to_one_enable: alpha_to_one_enable as ash::vk::Bool32,
|
alpha_to_one_enable: alpha_to_one_enable as ash::vk::Bool32,
|
||||||
..Default::default()
|
..Default::default()
|
||||||
@ -743,12 +745,12 @@ impl GraphicsPipeline {
|
|||||||
},
|
},
|
||||||
));
|
));
|
||||||
|
|
||||||
color_blend_state_vk.p_next =
|
let next = color_write_vk.insert(ash::vk::PipelineColorWriteCreateInfoEXT {
|
||||||
color_write_vk.insert(ash::vk::PipelineColorWriteCreateInfoEXT {
|
attachment_count: color_write_enables_vk.len() as u32,
|
||||||
attachment_count: color_write_enables_vk.len() as u32,
|
p_color_write_enables: color_write_enables_vk.as_ptr(),
|
||||||
p_color_write_enables: color_write_enables_vk.as_ptr(),
|
..Default::default()
|
||||||
..Default::default()
|
});
|
||||||
}) as *const _ as *const _;
|
color_blend_state_vk.p_next = <*const _>::cast(next);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -853,39 +855,39 @@ impl GraphicsPipeline {
|
|||||||
p_stages: stages_vk.as_ptr(),
|
p_stages: stages_vk.as_ptr(),
|
||||||
p_vertex_input_state: vertex_input_state_vk
|
p_vertex_input_state: vertex_input_state_vk
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.map(|p| p as *const _)
|
.map(ptr::from_ref)
|
||||||
.unwrap_or(ptr::null()),
|
.unwrap_or(ptr::null()),
|
||||||
p_input_assembly_state: input_assembly_state_vk
|
p_input_assembly_state: input_assembly_state_vk
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.map(|p| p as *const _)
|
.map(ptr::from_ref)
|
||||||
.unwrap_or(ptr::null()),
|
.unwrap_or(ptr::null()),
|
||||||
p_tessellation_state: tessellation_state_vk
|
p_tessellation_state: tessellation_state_vk
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.map(|p| p as *const _)
|
.map(ptr::from_ref)
|
||||||
.unwrap_or(ptr::null()),
|
.unwrap_or(ptr::null()),
|
||||||
p_viewport_state: viewport_state_vk
|
p_viewport_state: viewport_state_vk
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.map(|p| p as *const _)
|
.map(ptr::from_ref)
|
||||||
.unwrap_or(ptr::null()),
|
.unwrap_or(ptr::null()),
|
||||||
p_rasterization_state: rasterization_state_vk
|
p_rasterization_state: rasterization_state_vk
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.map(|p| p as *const _)
|
.map(ptr::from_ref)
|
||||||
.unwrap_or(ptr::null()),
|
.unwrap_or(ptr::null()),
|
||||||
p_multisample_state: multisample_state_vk
|
p_multisample_state: multisample_state_vk
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.map(|p| p as *const _)
|
.map(ptr::from_ref)
|
||||||
.unwrap_or(ptr::null()),
|
.unwrap_or(ptr::null()),
|
||||||
p_depth_stencil_state: depth_stencil_state_vk
|
p_depth_stencil_state: depth_stencil_state_vk
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.map(|p| p as *const _)
|
.map(ptr::from_ref)
|
||||||
.unwrap_or(ptr::null()),
|
.unwrap_or(ptr::null()),
|
||||||
p_color_blend_state: color_blend_state_vk
|
p_color_blend_state: color_blend_state_vk
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.map(|p| p as *const _)
|
.map(ptr::from_ref)
|
||||||
.unwrap_or(ptr::null()),
|
.unwrap_or(ptr::null()),
|
||||||
p_dynamic_state: dynamic_state_vk
|
p_dynamic_state: dynamic_state_vk
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.map(|s| s as *const _)
|
.map(ptr::from_ref)
|
||||||
.unwrap_or(ptr::null()),
|
.unwrap_or(ptr::null()),
|
||||||
layout: layout.handle(),
|
layout: layout.handle(),
|
||||||
render_pass: render_pass_vk,
|
render_pass: render_pass_vk,
|
||||||
@ -899,17 +901,17 @@ impl GraphicsPipeline {
|
|||||||
|
|
||||||
if let Some(info) = discard_rectangle_state_vk.as_mut() {
|
if let Some(info) = discard_rectangle_state_vk.as_mut() {
|
||||||
info.p_next = create_info_vk.p_next;
|
info.p_next = create_info_vk.p_next;
|
||||||
create_info_vk.p_next = info as *const _ as *const _;
|
create_info_vk.p_next = <*const _>::cast(info);
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(info) = conservative_rasterization_state_vk.as_mut() {
|
if let Some(info) = conservative_rasterization_state_vk.as_mut() {
|
||||||
info.p_next = create_info_vk.p_next;
|
info.p_next = create_info_vk.p_next;
|
||||||
create_info_vk.p_next = info as *const _ as *const _;
|
create_info_vk.p_next = <*const _>::cast(info);
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(info) = rendering_create_info_vk.as_mut() {
|
if let Some(info) = rendering_create_info_vk.as_mut() {
|
||||||
info.p_next = create_info_vk.p_next;
|
info.p_next = create_info_vk.p_next;
|
||||||
create_info_vk.p_next = info as *const _ as *const _;
|
create_info_vk.p_next = <*const _>::cast(info);
|
||||||
}
|
}
|
||||||
|
|
||||||
let cache_handle = match cache.as_ref() {
|
let cache_handle = match cache.as_ref() {
|
||||||
|
@ -40,21 +40,22 @@ macro_rules! impl_vertex {
|
|||||||
#[inline] fn f<T: VertexMember>(_: &T) -> Format { T::format() }
|
#[inline] fn f<T: VertexMember>(_: &T) -> Format { T::format() }
|
||||||
let format = f(&dummy.$member);
|
let format = f(&dummy.$member);
|
||||||
let field_size = {
|
let field_size = {
|
||||||
let p = unsafe {
|
let dummy_ptr: *const $out = <*const _>::cast(&dummy);
|
||||||
core::ptr::addr_of!((*(&dummy as *const _ as *const $out)).$member)
|
let member_ptr = unsafe {
|
||||||
|
core::ptr::addr_of!((*dummy_ptr).$member)
|
||||||
};
|
};
|
||||||
const fn size_of_raw<T>(_: *const T) -> usize {
|
const fn size_of_raw<T>(_: *const T) -> usize {
|
||||||
core::mem::size_of::<T>()
|
core::mem::size_of::<T>()
|
||||||
}
|
}
|
||||||
size_of_raw(p)
|
size_of_raw(member_ptr)
|
||||||
} as u32;
|
} as u32;
|
||||||
let format_size = format.block_size() as u32;
|
let format_size = format.block_size() as u32;
|
||||||
let num_elements = field_size / format_size;
|
let num_elements = field_size / format_size;
|
||||||
let remainder = field_size % format_size;
|
let remainder = field_size % format_size;
|
||||||
assert!(remainder == 0, "struct field `{}` size does not fit multiple of format size", stringify!($member));
|
assert!(remainder == 0, "struct field `{}` size does not fit multiple of format size", stringify!($member));
|
||||||
|
|
||||||
let dummy_ptr = (&dummy) as *const _;
|
let dummy_ptr = core::ptr::addr_of!(dummy);
|
||||||
let member_ptr = (&dummy.$member) as *const _;
|
let member_ptr = core::ptr::addr_of!(dummy.$member);
|
||||||
|
|
||||||
members.insert(stringify!($member).to_string(), VertexMemberInfo {
|
members.insert(stringify!($member).to_string(), VertexMemberInfo {
|
||||||
offset: u32::try_from(member_ptr as usize - dummy_ptr as usize).unwrap(),
|
offset: u32::try_from(member_ptr as usize - dummy_ptr as usize).unwrap(),
|
||||||
|
@ -13,7 +13,6 @@ use crate::{
|
|||||||
VulkanError, VulkanObject,
|
VulkanError, VulkanObject,
|
||||||
};
|
};
|
||||||
use std::{
|
use std::{
|
||||||
ffi::c_void,
|
|
||||||
mem::{size_of_val, MaybeUninit},
|
mem::{size_of_val, MaybeUninit},
|
||||||
num::NonZeroU64,
|
num::NonZeroU64,
|
||||||
ops::Range,
|
ops::Range,
|
||||||
@ -286,7 +285,7 @@ impl QueryPool {
|
|||||||
range.start,
|
range.start,
|
||||||
range.len() as u32,
|
range.len() as u32,
|
||||||
size_of_val(destination),
|
size_of_val(destination),
|
||||||
destination.as_mut_ptr() as *mut c_void,
|
destination.as_mut_ptr().cast(),
|
||||||
stride,
|
stride,
|
||||||
ash::vk::QueryResultFlags::from(flags) | T::FLAG,
|
ash::vk::QueryResultFlags::from(flags) | T::FLAG,
|
||||||
)
|
)
|
||||||
|
@ -78,8 +78,8 @@ impl RenderPass {
|
|||||||
let PerAttachment { stencil_layout_vk } = per_attachment_vk;
|
let PerAttachment { stencil_layout_vk } = per_attachment_vk;
|
||||||
|
|
||||||
if let Some(next) = stencil_layout_vk {
|
if let Some(next) = stencil_layout_vk {
|
||||||
next.p_next = attachment_vk.p_next as *mut _;
|
next.p_next = attachment_vk.p_next.cast_mut();
|
||||||
attachment_vk.p_next = next as *const _ as *const _;
|
attachment_vk.p_next = <*const _>::cast(next);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -350,8 +350,8 @@ impl RenderPass {
|
|||||||
let PerAttachmentReferenceVk { stencil_layout_vk } = per_input_attachment_vk;
|
let PerAttachmentReferenceVk { stencil_layout_vk } = per_input_attachment_vk;
|
||||||
|
|
||||||
if let Some(stencil_layout_vk) = stencil_layout_vk {
|
if let Some(stencil_layout_vk) = stencil_layout_vk {
|
||||||
stencil_layout_vk.p_next = input_attachment_vk.p_next as *mut _;
|
stencil_layout_vk.p_next = input_attachment_vk.p_next.cast_mut();
|
||||||
input_attachment_vk.p_next = stencil_layout_vk as *const _ as *const _;
|
input_attachment_vk.p_next = <*const _>::cast(stencil_layout_vk);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -360,8 +360,8 @@ impl RenderPass {
|
|||||||
per_depth_stencil_attachment_vk;
|
per_depth_stencil_attachment_vk;
|
||||||
|
|
||||||
if let Some(stencil_layout_vk) = stencil_layout_vk {
|
if let Some(stencil_layout_vk) = stencil_layout_vk {
|
||||||
stencil_layout_vk.p_next = depth_stencil_attachment_vk.p_next as *mut _;
|
stencil_layout_vk.p_next = depth_stencil_attachment_vk.p_next.cast_mut();
|
||||||
depth_stencil_attachment_vk.p_next = stencil_layout_vk as *const _ as *const _;
|
depth_stencil_attachment_vk.p_next = <*const _>::cast(stencil_layout_vk);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -370,9 +370,10 @@ impl RenderPass {
|
|||||||
per_depth_stencil_resolve_attachment_vk;
|
per_depth_stencil_resolve_attachment_vk;
|
||||||
|
|
||||||
if let Some(stencil_layout_vk) = stencil_layout_vk {
|
if let Some(stencil_layout_vk) = stencil_layout_vk {
|
||||||
stencil_layout_vk.p_next = depth_stencil_resolve_attachment_vk.p_next as *mut _;
|
stencil_layout_vk.p_next =
|
||||||
|
depth_stencil_resolve_attachment_vk.p_next.cast_mut();
|
||||||
depth_stencil_resolve_attachment_vk.p_next =
|
depth_stencil_resolve_attachment_vk.p_next =
|
||||||
stencil_layout_vk as *const _ as *const _;
|
<*const _>::cast(stencil_layout_vk);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -405,7 +406,7 @@ impl RenderPass {
|
|||||||
};
|
};
|
||||||
|
|
||||||
depth_stencil_resolve_vk.p_next = subpass_vk.p_next;
|
depth_stencil_resolve_vk.p_next = subpass_vk.p_next;
|
||||||
subpass_vk.p_next = depth_stencil_resolve_vk as *const _ as *const _;
|
subpass_vk.p_next = <*const _>::cast(depth_stencil_resolve_vk);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -465,7 +466,7 @@ impl RenderPass {
|
|||||||
|
|
||||||
if let Some(next) = memory_barrier_vk {
|
if let Some(next) = memory_barrier_vk {
|
||||||
next.p_next = dependency_vk.p_next;
|
next.p_next = dependency_vk.p_next;
|
||||||
dependency_vk.p_next = next as *const _ as *const _;
|
dependency_vk.p_next = <*const _>::cast(next);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -838,7 +839,7 @@ impl RenderPass {
|
|||||||
);
|
);
|
||||||
|
|
||||||
next.p_next = create_info_vk.p_next;
|
next.p_next = create_info_vk.p_next;
|
||||||
create_info_vk.p_next = next as *const _ as *const _;
|
create_info_vk.p_next = <*const _>::cast(next);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -871,7 +872,7 @@ impl RenderPass {
|
|||||||
});
|
});
|
||||||
|
|
||||||
next.p_next = create_info_vk.p_next;
|
next.p_next = create_info_vk.p_next;
|
||||||
create_info_vk.p_next = next as *const _ as *const _;
|
create_info_vk.p_next = <*const _>::cast(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok({
|
Ok({
|
||||||
|
@ -1056,7 +1056,7 @@ impl Swapchain {
|
|||||||
});
|
});
|
||||||
|
|
||||||
next.p_next = create_info_vk.p_next;
|
next.p_next = create_info_vk.p_next;
|
||||||
create_info_vk.p_next = next as *const _ as *const _;
|
create_info_vk.p_next = <*const _>::cast(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
if full_screen_exclusive != FullScreenExclusive::Default {
|
if full_screen_exclusive != FullScreenExclusive::Default {
|
||||||
@ -1066,8 +1066,8 @@ impl Swapchain {
|
|||||||
..Default::default()
|
..Default::default()
|
||||||
});
|
});
|
||||||
|
|
||||||
next.p_next = create_info_vk.p_next as *mut _;
|
next.p_next = create_info_vk.p_next.cast_mut();
|
||||||
create_info_vk.p_next = next as *const _ as *const _;
|
create_info_vk.p_next = <*const _>::cast(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(Win32Monitor(hmonitor)) = win32_monitor {
|
if let Some(Win32Monitor(hmonitor)) = win32_monitor {
|
||||||
@ -1078,8 +1078,8 @@ impl Swapchain {
|
|||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
next.p_next = create_info_vk.p_next as *mut _;
|
next.p_next = create_info_vk.p_next.cast_mut();
|
||||||
create_info_vk.p_next = next as *const _ as *const _;
|
create_info_vk.p_next = <*const _>::cast(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
if !present_modes.is_empty() {
|
if !present_modes.is_empty() {
|
||||||
@ -1091,8 +1091,8 @@ impl Swapchain {
|
|||||||
..Default::default()
|
..Default::default()
|
||||||
});
|
});
|
||||||
|
|
||||||
next.p_next = create_info_vk.p_next as *mut _;
|
next.p_next = create_info_vk.p_next.cast_mut();
|
||||||
create_info_vk.p_next = next as *const _ as *const _;
|
create_info_vk.p_next = <*const _>::cast(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
if scaling_behavior.is_some() || present_gravity.is_some() {
|
if scaling_behavior.is_some() || present_gravity.is_some() {
|
||||||
@ -1106,8 +1106,8 @@ impl Swapchain {
|
|||||||
..Default::default()
|
..Default::default()
|
||||||
});
|
});
|
||||||
|
|
||||||
next.p_next = create_info_vk.p_next as *mut _;
|
next.p_next = create_info_vk.p_next.cast_mut();
|
||||||
create_info_vk.p_next = next as *const _ as *const _;
|
create_info_vk.p_next = <*const _>::cast(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
let fns = device.fns();
|
let fns = device.fns();
|
||||||
|
@ -105,31 +105,27 @@ impl Surface {
|
|||||||
|
|
||||||
match (window_handle.as_raw(), display_handle.as_raw()) {
|
match (window_handle.as_raw(), display_handle.as_raw()) {
|
||||||
(RawWindowHandle::AndroidNdk(window), RawDisplayHandle::Android(_display)) => {
|
(RawWindowHandle::AndroidNdk(window), RawDisplayHandle::Android(_display)) => {
|
||||||
Self::from_android(
|
Self::from_android(instance, window.a_native_window.as_ptr().cast(), None)
|
||||||
instance,
|
|
||||||
window.a_native_window.as_ptr() as *mut ash::vk::ANativeWindow,
|
|
||||||
None,
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
#[cfg(target_os = "macos")]
|
#[cfg(target_os = "macos")]
|
||||||
(RawWindowHandle::AppKit(window), RawDisplayHandle::AppKit(_display)) => {
|
(RawWindowHandle::AppKit(window), RawDisplayHandle::AppKit(_display)) => {
|
||||||
// Ensure the layer is `CAMetalLayer`.
|
// Ensure the layer is `CAMetalLayer`.
|
||||||
let metal_layer = get_metal_layer_macos(window.ns_view.as_ptr() as *mut c_void);
|
let metal_layer = get_metal_layer_macos(window.ns_view.as_ptr().cast());
|
||||||
|
|
||||||
Self::from_mac_os(instance, metal_layer as *const c_void, None)
|
Self::from_mac_os(instance, metal_layer.cast(), None)
|
||||||
}
|
}
|
||||||
#[cfg(target_os = "ios")]
|
#[cfg(target_os = "ios")]
|
||||||
(RawWindowHandle::UiKit(window), RawDisplayHandle::UiKit(_display)) => {
|
(RawWindowHandle::UiKit(window), RawDisplayHandle::UiKit(_display)) => {
|
||||||
// Ensure the layer is `CAMetalLayer`.
|
// Ensure the layer is `CAMetalLayer`.
|
||||||
let metal_layer = get_metal_layer_ios(window.ui_view.as_ptr() as *mut c_void);
|
let metal_layer = get_metal_layer_ios(window.ui_view.as_ptr().cast());
|
||||||
|
|
||||||
Self::from_ios(instance, metal_layer.render_layer.0 as *const c_void, None)
|
Self::from_ios(instance, metal_layer.render_layer.0.cast(), None)
|
||||||
}
|
}
|
||||||
(RawWindowHandle::Wayland(window), RawDisplayHandle::Wayland(display)) => {
|
(RawWindowHandle::Wayland(window), RawDisplayHandle::Wayland(display)) => {
|
||||||
Self::from_wayland(
|
Self::from_wayland(
|
||||||
instance,
|
instance,
|
||||||
display.display.as_ptr() as *mut ash::vk::wl_display,
|
display.display.as_ptr().cast(),
|
||||||
window.surface.as_ptr() as *mut ash::vk::wl_surface,
|
window.surface.as_ptr().cast(),
|
||||||
None,
|
None,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
@ -143,13 +139,13 @@ impl Surface {
|
|||||||
}
|
}
|
||||||
(RawWindowHandle::Xcb(window), RawDisplayHandle::Xcb(display)) => Self::from_xcb(
|
(RawWindowHandle::Xcb(window), RawDisplayHandle::Xcb(display)) => Self::from_xcb(
|
||||||
instance,
|
instance,
|
||||||
display.connection.unwrap().as_ptr() as *mut ash::vk::xcb_connection_t,
|
display.connection.unwrap().as_ptr().cast(),
|
||||||
window.window.get() as ash::vk::xcb_window_t,
|
window.window.get() as ash::vk::xcb_window_t,
|
||||||
None,
|
None,
|
||||||
),
|
),
|
||||||
(RawWindowHandle::Xlib(window), RawDisplayHandle::Xlib(display)) => Self::from_xlib(
|
(RawWindowHandle::Xlib(window), RawDisplayHandle::Xlib(display)) => Self::from_xlib(
|
||||||
instance,
|
instance,
|
||||||
display.display.unwrap().as_ptr() as *mut ash::vk::Display,
|
display.display.unwrap().as_ptr().cast(),
|
||||||
window.window as ash::vk::Window,
|
window.window as ash::vk::Window,
|
||||||
None,
|
None,
|
||||||
),
|
),
|
||||||
|
@ -108,7 +108,7 @@ impl Fence {
|
|||||||
|
|
||||||
if let Some(info) = export_fence_create_info_vk.as_mut() {
|
if let Some(info) = export_fence_create_info_vk.as_mut() {
|
||||||
info.p_next = create_info_vk.p_next;
|
info.p_next = create_info_vk.p_next;
|
||||||
create_info_vk.p_next = info as *const _ as *const _;
|
create_info_vk.p_next = <*const _>::cast(info);
|
||||||
}
|
}
|
||||||
|
|
||||||
let handle = {
|
let handle = {
|
||||||
|
@ -140,7 +140,7 @@ impl Semaphore {
|
|||||||
});
|
});
|
||||||
|
|
||||||
next.p_next = create_info_vk.p_next;
|
next.p_next = create_info_vk.p_next;
|
||||||
create_info_vk.p_next = next as *const _ as *const _;
|
create_info_vk.p_next = <*const _>::cast(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
if !export_handle_types.is_empty() {
|
if !export_handle_types.is_empty() {
|
||||||
@ -150,7 +150,7 @@ impl Semaphore {
|
|||||||
});
|
});
|
||||||
|
|
||||||
next.p_next = create_info_vk.p_next;
|
next.p_next = create_info_vk.p_next;
|
||||||
create_info_vk.p_next = next as *const _ as *const _;
|
create_info_vk.p_next = <*const _>::cast(next);
|
||||||
}
|
}
|
||||||
|
|
||||||
let handle = {
|
let handle = {
|
||||||
|
Loading…
Reference in New Issue
Block a user