Use pub(crate) in more places (#705)

This commit is contained in:
tomaka 2017-07-31 07:50:47 +02:00 committed by GitHub
parent 5e78de73e2
commit a2f7f0db80
21 changed files with 131 additions and 161 deletions

View File

@ -32,7 +32,6 @@ use std::ptr;
use std::sync::Arc;
use buffer::BufferUsage;
use buffer::usage::usage_to_bits;
use device::Device;
use device::DeviceOwned;
use memory::DeviceMemory;
@ -79,7 +78,7 @@ impl UnsafeBuffer {
size
};
let usage_bits = usage_to_bits(usage);
let usage_bits = usage.to_vulkan_bits();
// Checking sparse features.
assert!(sparse.sparse || !sparse.sparse_residency,
@ -156,7 +155,7 @@ impl UnsafeBuffer {
debug_assert!(output.memoryRequirements.size >= size as u64);
debug_assert!(output.memoryRequirements.memoryTypeBits != 0);
let mut out: MemoryRequirements = output.memoryRequirements.into();
let mut out = MemoryRequirements::from_vulkan_reqs(output.memoryRequirements);
if let Some(output2) = output2 {
debug_assert_eq!(output2.requiresDedicatedAllocation, 0);
out.prefer_dedicated = output2.prefersDedicatedAllocation != 0;
@ -168,7 +167,7 @@ impl UnsafeBuffer {
vk.GetBufferMemoryRequirements(device.internal_object(), buffer, &mut output);
debug_assert!(output.size >= size as u64);
debug_assert!(output.memoryTypeBits != 0);
output.into()
MemoryRequirements::from_vulkan_reqs(output)
};
// We have to manually enforce some additional requirements for some buffer types.

View File

@ -30,6 +30,39 @@ pub struct BufferUsage {
}
impl BufferUsage {
/// Turns this `BufferUsage` into raw Vulkan bits.
pub(crate) fn to_vulkan_bits(&self) -> vk::BufferUsageFlagBits {
let mut result = 0;
if self.transfer_source {
result |= vk::BUFFER_USAGE_TRANSFER_SRC_BIT;
}
if self.transfer_destination {
result |= vk::BUFFER_USAGE_TRANSFER_DST_BIT;
}
if self.uniform_texel_buffer {
result |= vk::BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT;
}
if self.storage_texel_buffer {
result |= vk::BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT;
}
if self.uniform_buffer {
result |= vk::BUFFER_USAGE_UNIFORM_BUFFER_BIT;
}
if self.storage_buffer {
result |= vk::BUFFER_USAGE_STORAGE_BUFFER_BIT;
}
if self.index_buffer {
result |= vk::BUFFER_USAGE_INDEX_BUFFER_BIT;
}
if self.vertex_buffer {
result |= vk::BUFFER_USAGE_VERTEX_BUFFER_BIT;
}
if self.indirect_buffer {
result |= vk::BUFFER_USAGE_INDIRECT_BUFFER_BIT;
}
result
}
/// Builds a `BufferUsage` with all values set to false.
#[inline]
pub fn none() -> BufferUsage {
@ -178,37 +211,3 @@ impl BitOr for BufferUsage {
}
}
}
/// Turns a `BufferUsage` into raw bits.
#[inline]
pub fn usage_to_bits(usage: BufferUsage) -> vk::BufferUsageFlagBits {
let mut result = 0;
if usage.transfer_source {
result |= vk::BUFFER_USAGE_TRANSFER_SRC_BIT;
}
if usage.transfer_destination {
result |= vk::BUFFER_USAGE_TRANSFER_DST_BIT;
}
if usage.uniform_texel_buffer {
result |= vk::BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT;
}
if usage.storage_texel_buffer {
result |= vk::BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT;
}
if usage.uniform_buffer {
result |= vk::BUFFER_USAGE_UNIFORM_BUFFER_BIT;
}
if usage.storage_buffer {
result |= vk::BUFFER_USAGE_STORAGE_BUFFER_BIT;
}
if usage.index_buffer {
result |= vk::BUFFER_USAGE_INDEX_BUFFER_BIT;
}
if usage.vertex_buffer {
result |= vk::BUFFER_USAGE_VERTEX_BUFFER_BIT;
}
if usage.indirect_buffer {
result |= vk::BUFFER_USAGE_INDIRECT_BUFFER_BIT;
}
result
}

View File

@ -141,10 +141,10 @@ impl<'a> SubmitCommandBufferBuilder<'a> {
///
#[inline]
pub unsafe fn add_wait_semaphore(&mut self, semaphore: &'a Semaphore, stages: PipelineStages) {
debug_assert!(Into::<vk::PipelineStageFlagBits>::into(stages) != 0);
debug_assert!(stages.into_vulkan_bits() != 0);
// TODO: debug assert that the device supports the stages
self.wait_semaphores.push(semaphore.internal_object());
self.destination_stages.push(stages.into());
self.destination_stages.push(stages.into_vulkan_bits());
}
/// Adds a command buffer that is executed as part of this command.

View File

@ -1102,7 +1102,7 @@ impl<P> UnsafeCommandBufferBuilder<P> {
vk.CmdPushConstants(cmd,
pipeline_layout.sys().internal_object(),
stages.into(),
stages.into_vulkan_bits(),
offset as u32,
size as u32,
data as *const D as *const _);
@ -1117,7 +1117,7 @@ impl<P> UnsafeCommandBufferBuilder<P> {
debug_assert!(!stages.host);
debug_assert_ne!(stages, PipelineStages::none());
vk.CmdResetEvent(cmd, event.internal_object(), stages.into());
vk.CmdResetEvent(cmd, event.internal_object(), stages.into_vulkan_bits());
}
/// Calls `vkCmdSetBlendConstants` on the builder.
@ -1156,7 +1156,7 @@ impl<P> UnsafeCommandBufferBuilder<P> {
debug_assert!(!stages.host);
debug_assert_ne!(stages, PipelineStages::none());
vk.CmdSetEvent(cmd, event.internal_object(), stages.into());
vk.CmdSetEvent(cmd, event.internal_object(), stages.into_vulkan_bits());
}
/// Calls `vkCmdSetLineWidth` on the builder.
@ -1203,7 +1203,7 @@ impl<P> UnsafeCommandBufferBuilder<P> {
where I: Iterator<Item = Scissor>
{
let scissors = scissors
.map(|v| v.clone().into())
.map(|v| v.clone().into_vulkan_rect())
.collect::<SmallVec<[_; 16]>>();
if scissors.is_empty() {
return;
@ -1230,7 +1230,7 @@ impl<P> UnsafeCommandBufferBuilder<P> {
where I: Iterator<Item = Viewport>
{
let viewports = viewports
.map(|v| v.clone().into())
.map(|v| v.clone().into_vulkan_viewport())
.collect::<SmallVec<[_; 16]>>();
if viewports.is_empty() {
return;
@ -1490,8 +1490,8 @@ impl UnsafeCommandBufferBuilderPipelineBarrier {
debug_assert_ne!(source, PipelineStages::none());
debug_assert_ne!(destination, PipelineStages::none());
self.src_stage_mask |= Into::<vk::PipelineStageFlags>::into(source);
self.dst_stage_mask |= Into::<vk::PipelineStageFlags>::into(destination);
self.src_stage_mask |= source.into_vulkan_bits();
self.dst_stage_mask |= destination.into_vulkan_bits();
}
/// Adds a memory barrier. This means that all the memory writes by the given source stages
@ -1516,8 +1516,8 @@ impl UnsafeCommandBufferBuilderPipelineBarrier {
self.memory_barriers.push(vk::MemoryBarrier {
sType: vk::STRUCTURE_TYPE_MEMORY_BARRIER,
pNext: ptr::null(),
srcAccessMask: source_access.into(),
dstAccessMask: destination_access.into(),
srcAccessMask: source_access.into_vulkan_bits(),
dstAccessMask: destination_access.into_vulkan_bits(),
});
}
@ -1565,8 +1565,8 @@ impl UnsafeCommandBufferBuilderPipelineBarrier {
self.buffer_barriers.push(vk::BufferMemoryBarrier {
sType: vk::STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
pNext: ptr::null(),
srcAccessMask: source_access.into(),
dstAccessMask: destination_access.into(),
srcAccessMask: source_access.into_vulkan_bits(),
dstAccessMask: destination_access.into_vulkan_bits(),
srcQueueFamilyIndex: src_queue,
dstQueueFamilyIndex: dest_queue,
buffer: buffer.internal_object(),
@ -1637,8 +1637,8 @@ impl UnsafeCommandBufferBuilderPipelineBarrier {
self.image_barriers.push(vk::ImageMemoryBarrier {
sType: vk::STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
pNext: ptr::null(),
srcAccessMask: source_access.into(),
dstAccessMask: destination_access.into(),
srcAccessMask: source_access.into_vulkan_bits(),
dstAccessMask: destination_access.into_vulkan_bits(),
oldLayout: current_layout as u32,
newLayout: new_layout as u32,
srcQueueFamilyIndex: src_queue,

View File

@ -493,6 +493,30 @@ impl ShaderStages {
(self.geometry && other.geometry) || (self.fragment && other.fragment) ||
(self.compute && other.compute)
}
#[inline]
pub(crate) fn into_vulkan_bits(self) -> vk::ShaderStageFlags {
let mut result = 0;
if self.vertex {
result |= vk::SHADER_STAGE_VERTEX_BIT;
}
if self.tessellation_control {
result |= vk::SHADER_STAGE_TESSELLATION_CONTROL_BIT;
}
if self.tessellation_evaluation {
result |= vk::SHADER_STAGE_TESSELLATION_EVALUATION_BIT;
}
if self.geometry {
result |= vk::SHADER_STAGE_GEOMETRY_BIT;
}
if self.fragment {
result |= vk::SHADER_STAGE_FRAGMENT_BIT;
}
if self.compute {
result |= vk::SHADER_STAGE_COMPUTE_BIT;
}
result
}
}
impl BitOr for ShaderStages {
@ -525,30 +549,3 @@ impl From<ShaderStages> for PipelineStages {
}
}
}
#[doc(hidden)]
impl Into<vk::ShaderStageFlags> for ShaderStages {
#[inline]
fn into(self) -> vk::ShaderStageFlags {
let mut result = 0;
if self.vertex {
result |= vk::SHADER_STAGE_VERTEX_BIT;
}
if self.tessellation_control {
result |= vk::SHADER_STAGE_TESSELLATION_CONTROL_BIT;
}
if self.tessellation_evaluation {
result |= vk::SHADER_STAGE_TESSELLATION_EVALUATION_BIT;
}
if self.geometry {
result |= vk::SHADER_STAGE_GEOMETRY_BIT;
}
if self.fragment {
result |= vk::SHADER_STAGE_FRAGMENT_BIT;
}
if self.compute {
result |= vk::SHADER_STAGE_COMPUTE_BIT;
}
result
}
}

View File

@ -68,7 +68,7 @@ impl UnsafeDescriptorSetLayout {
binding: binding as u32,
descriptorType: ty as u32,
descriptorCount: desc.array_count,
stageFlags: desc.stages.into(),
stageFlags: desc.stages.into_vulkan_bits(),
pImmutableSamplers: ptr::null(), // FIXME: not yet implemented
})
})

View File

@ -113,7 +113,7 @@ impl<L> PipelineLayout<L>
}
out.push(vk::PushConstantRange {
stageFlags: stages.into(),
stageFlags: stages.into_vulkan_bits(),
offset: offset as u32,
size: size as u32,
});

View File

@ -263,7 +263,7 @@ impl Device {
// Note that if we ever remove this, don't forget to adjust the change in
// `Device`'s construction below.
let features = {
let mut features: vk::PhysicalDeviceFeatures = requested_features.clone().into();
let mut features = requested_features.clone().into_vulkan_features();
features.robustBufferAccess = vk::TRUE;
features
};

View File

@ -100,22 +100,16 @@ macro_rules! features {
)+
}
}
}
#[doc(hidden)]
impl From<vk::PhysicalDeviceFeatures> for Features {
fn from(features: vk::PhysicalDeviceFeatures) -> Features {
pub(crate) fn from_vulkan_features(features: vk::PhysicalDeviceFeatures) -> Features {
Features {
$(
$name: features.$vk != 0,
)+
}
}
}
#[doc(hidden)]
impl Into<vk::PhysicalDeviceFeatures> for Features {
fn into(self) -> vk::PhysicalDeviceFeatures {
pub(crate) fn into_vulkan_features(self) -> vk::PhysicalDeviceFeatures {
vk::PhysicalDeviceFeatures {
$(
$vk: if self.$name { vk::TRUE } else { vk::FALSE },

View File

@ -200,8 +200,7 @@ macro_rules! formats {
}
/// Returns the `Format` corresponding to a Vulkan constant.
#[doc(hidden)]
pub fn from_num(val: u32) -> Option<Format> {
pub(crate) fn from_vulkan_num(val: u32) -> Option<Format> {
match val {
$(
vk::$vk => Some(Format::$name),

View File

@ -305,10 +305,10 @@ impl<D> RenderPass<D>
vk::SubpassDependency {
srcSubpass: dependency.source_subpass as u32,
dstSubpass: dependency.destination_subpass as u32,
srcStageMask: dependency.source_stages.into(),
dstStageMask: dependency.destination_stages.into(),
srcAccessMask: dependency.source_access.into(),
dstAccessMask: dependency.destination_access.into(),
srcStageMask: dependency.source_stages.into_vulkan_bits(),
dstStageMask: dependency.destination_stages.into_vulkan_bits(),
srcAccessMask: dependency.source_access.into_vulkan_bits(),
dstAccessMask: dependency.destination_access.into_vulkan_bits(),
dependencyFlags: if dependency.by_region {
vk::DEPENDENCY_BY_REGION_BIT
} else {

View File

@ -547,7 +547,7 @@ impl UnsafeImage {
vk.GetImageMemoryRequirements2KHR(device.internal_object(), &infos, &mut output);
debug_assert!(output.memoryRequirements.memoryTypeBits != 0);
let mut out: MemoryRequirements = output.memoryRequirements.into();
let mut out = MemoryRequirements::from_vulkan_reqs(output.memoryRequirements);
if let Some(output2) = output2 {
debug_assert_eq!(output2.requiresDedicatedAllocation, 0);
out.prefer_dedicated = output2.prefersDedicatedAllocation != 0;
@ -558,7 +558,7 @@ impl UnsafeImage {
let mut output: vk::MemoryRequirements = mem::uninitialized();
vk.GetImageMemoryRequirements(device.internal_object(), image, &mut output);
debug_assert!(output.memoryTypeBits != 0);
output.into()
MemoryRequirements::from_vulkan_reqs(output)
};
let image = UnsafeImage {

View File

@ -93,10 +93,8 @@ impl ImageUsage {
}
}
// TODO: these functions shouldn't be public-hidden
#[doc(hidden)]
#[inline]
pub fn to_usage_bits(&self) -> vk::ImageUsageFlagBits {
pub(crate) fn to_usage_bits(&self) -> vk::ImageUsageFlagBits {
let mut result = 0;
if self.transfer_source {
result |= vk::IMAGE_USAGE_TRANSFER_SRC_BIT;
@ -125,10 +123,8 @@ impl ImageUsage {
result
}
// TODO: these functions shouldn't be public-hidden
#[inline]
#[doc(hidden)]
pub fn from_bits(val: u32) -> ImageUsage {
pub(crate) fn from_bits(val: u32) -> ImageUsage {
ImageUsage {
transfer_source: (val & vk::IMAGE_USAGE_TRANSFER_SRC_BIT) != 0,
transfer_destination: (val & vk::IMAGE_USAGE_TRANSFER_DST_BIT) != 0,

View File

@ -330,7 +330,7 @@ impl Instance {
properties: properties,
memory: memory,
queue_families: queue_families,
available_features: Features::from(available_features),
available_features: Features::from_vulkan_features(available_features),
});
}
output
@ -404,7 +404,7 @@ impl Instance {
properties: properties,
memory: memory,
queue_families: queue_families,
available_features: Features::from(available_features),
available_features: Features::from_vulkan_features(available_features),
});
}
output

View File

@ -182,8 +182,8 @@ enum Success {
/// panic for error code that arent supposed to happen.
#[derive(Debug, Copy, Clone)]
#[repr(u32)]
#[doc(hidden)] // TODO: this is necessary because of the stupid visibility rules in rustc
pub enum Error {
// TODO: being pub is necessary because of the weird visibility rules in rustc
pub(crate) enum Error {
OutOfHostMemory = vk::ERROR_OUT_OF_HOST_MEMORY,
OutOfDeviceMemory = vk::ERROR_OUT_OF_DEVICE_MEMORY,
InitializationFailed = vk::ERROR_INITIALIZATION_FAILED,

View File

@ -126,10 +126,9 @@ pub struct MemoryRequirements {
pub prefer_dedicated: bool,
}
#[doc(hidden)]
impl From<vk::MemoryRequirements> for MemoryRequirements {
impl MemoryRequirements {
#[inline]
fn from(reqs: vk::MemoryRequirements) -> MemoryRequirements {
pub(crate) fn from_vulkan_reqs(reqs: vk::MemoryRequirements) -> MemoryRequirements {
MemoryRequirements {
size: reqs.size as usize,
alignment: reqs.alignment as usize,

View File

@ -149,12 +149,9 @@ impl AttachmentBlend {
mask_alpha: true,
}
}
}
#[doc(hidden)]
impl Into<vk::PipelineColorBlendAttachmentState> for AttachmentBlend {
#[inline]
fn into(self) -> vk::PipelineColorBlendAttachmentState {
pub(crate) fn into_vulkan_state(self) -> vk::PipelineColorBlendAttachmentState {
vk::PipelineColorBlendAttachmentState {
blendEnable: if self.enabled { vk::TRUE } else { vk::FALSE },
srcColorBlendFactor: self.color_source as u32,

View File

@ -931,17 +931,17 @@ impl<Vdef, L, Rp> GraphicsPipeline<Vdef, L, Rp>
let (vp_vp, vp_sc, vp_num) = match params.viewport {
ViewportsState::Fixed { ref data } => (data.iter()
.map(|e| e.0.clone().into())
.map(|e| e.0.clone().into_vulkan_viewport())
.collect::<SmallVec<[vk::Viewport; 4]>>(),
data.iter()
.map(|e| e.1.clone().into())
.map(|e| e.1.clone().into_vulkan_rect())
.collect::<SmallVec<[vk::Rect2D; 4]>>(),
data.len() as u32),
ViewportsState::DynamicViewports { ref scissors } => {
let num = scissors.len() as u32;
let scissors = scissors
.iter()
.map(|e| e.clone().into())
.map(|e| e.clone().into_vulkan_rect())
.collect::<SmallVec<[vk::Rect2D; 4]>>();
dynamic_states.push(vk::DYNAMIC_STATE_VIEWPORT);
(SmallVec::new(), scissors, num)
@ -950,7 +950,7 @@ impl<Vdef, L, Rp> GraphicsPipeline<Vdef, L, Rp>
let num = viewports.len() as u32;
let viewports = viewports
.iter()
.map(|e| e.clone().into())
.map(|e| e.clone().into_vulkan_viewport())
.collect::<SmallVec<[vk::Viewport; 4]>>();
dynamic_states.push(vk::DYNAMIC_STATE_SCISSOR);
(viewports, SmallVec::new(), num)
@ -1231,7 +1231,7 @@ impl<Vdef, L, Rp> GraphicsPipeline<Vdef, L, Rp>
match params.blend.attachments {
AttachmentsBlend::Collective(blend) => {
(0 .. num_atch).map(|_| blend.clone().into()).collect()
(0 .. num_atch).map(|_| blend.clone().into_vulkan_state()).collect()
},
AttachmentsBlend::Individual(blend) => {
if blend.len() != num_atch as usize {
@ -1242,7 +1242,7 @@ impl<Vdef, L, Rp> GraphicsPipeline<Vdef, L, Rp>
return Err(GraphicsPipelineCreationError::IndependentBlendFeatureNotEnabled);
}
blend.iter().map(|b| b.clone().into()).collect()
blend.iter().map(|b| b.clone().into_vulkan_state()).collect()
},
}
};

View File

@ -141,10 +141,9 @@ pub struct Viewport {
pub depth_range: Range<f32>,
}
#[doc(hidden)]
impl Into<vk::Viewport> for Viewport {
impl Viewport {
#[inline]
fn into(self) -> vk::Viewport {
pub(crate) fn into_vulkan_viewport(self) -> vk::Viewport {
vk::Viewport {
x: self.origin[0],
y: self.origin[1],
@ -178,19 +177,9 @@ impl Scissor {
dimensions: [0x7fffffff, 0x7fffffff],
}
}
}
impl Default for Scissor {
#[inline]
fn default() -> Scissor {
Scissor::irrelevant()
}
}
#[doc(hidden)]
impl Into<vk::Rect2D> for Scissor {
#[inline]
fn into(self) -> vk::Rect2D {
pub(crate) fn into_vulkan_rect(self) -> vk::Rect2D {
vk::Rect2D {
offset: vk::Offset2D {
x: self.origin[0],
@ -203,3 +192,10 @@ impl Into<vk::Rect2D> for Scissor {
}
}
}
impl Default for Scissor {
#[inline]
fn default() -> Scissor {
Scissor::irrelevant()
}
}

View File

@ -561,7 +561,7 @@ impl Surface {
usage
},
supported_formats: formats.into_iter().map(|f| {
(Format::from_num(f.format).unwrap(), capabilities::color_space_from_num(f.colorSpace))
(Format::from_vulkan_num(f.format).unwrap(), capabilities::color_space_from_num(f.colorSpace))
}).collect(),
present_modes: modes,
})

View File

@ -29,6 +29,15 @@ macro_rules! pipeline_stages {
)+
}
}
#[inline]
pub(crate) fn into_vulkan_bits(self) -> vk::PipelineStageFlagBits {
let mut result = 0;
$(
if self.$elem { result |= $val }
)+
result
}
}
impl ops::BitOr for PipelineStages {
@ -52,18 +61,6 @@ macro_rules! pipeline_stages {
)+
}
}
#[doc(hidden)]
impl Into<vk::PipelineStageFlagBits> for PipelineStages {
#[inline]
fn into(self) -> vk::PipelineStageFlagBits {
let mut result = 0;
$(
if self.$elem { result |= $val }
)+
result
}
}
);
}
@ -115,6 +112,15 @@ macro_rules! access_flags {
)+
}
}
#[inline]
pub(crate) fn into_vulkan_bits(self) -> vk::AccessFlagBits {
let mut result = 0;
$(
if self.$elem { result |= $val }
)+
result
}
}
impl ops::BitOr for AccessFlagBits {
@ -138,18 +144,6 @@ macro_rules! access_flags {
)+
}
}
#[doc(hidden)]
impl Into<vk::AccessFlagBits> for AccessFlagBits {
#[inline]
fn into(self) -> vk::AccessFlagBits {
let mut result = 0;
$(
if self.$elem { result |= $val }
)+
result
}
}
);
}