Merge pull request #171 from tomaka/inner

Rename all inner-like functions to inner
This commit is contained in:
tomaka 2016-07-07 19:39:49 +02:00 committed by GitHub
commit 5373fbe45f
24 changed files with 116 additions and 122 deletions

View File

@ -242,7 +242,7 @@ unsafe impl<T: ?Sized, A> Buffer for CpuAccessibleBuffer<T, A>
where T: 'static + Send + Sync, A: MemoryPool
{
#[inline]
fn inner_buffer(&self) -> &UnsafeBuffer {
fn inner(&self) -> &UnsafeBuffer {
&self.inner
}

View File

@ -169,7 +169,7 @@ unsafe impl<T: ?Sized, A> Buffer for DeviceLocalBuffer<T, A>
where T: 'static + Send + Sync, A: MemoryPool
{
#[inline]
fn inner_buffer(&self) -> &UnsafeBuffer {
fn inner(&self) -> &UnsafeBuffer {
&self.inner
}

View File

@ -165,7 +165,7 @@ unsafe impl<T: ?Sized, A> Buffer for ImmutableBuffer<T, A>
where T: 'static + Send + Sync, A: MemoryPool
{
#[inline]
fn inner_buffer(&self) -> &UnsafeBuffer {
fn inner(&self) -> &UnsafeBuffer {
&self.inner
}

View File

@ -17,8 +17,7 @@ use sync::Semaphore;
pub unsafe trait Buffer: 'static + Send + Sync {
/// Returns the inner buffer.
// TODO: should be named "inner()" after https://github.com/rust-lang/rust/issues/12808 is fixed
fn inner_buffer(&self) -> &UnsafeBuffer;
fn inner(&self) -> &UnsafeBuffer;
/// Returns whether accessing a range of this buffer should signal a fence.
fn needs_fence(&self, write: bool, Range<usize>) -> Option<bool>;
@ -58,7 +57,7 @@ pub unsafe trait Buffer: 'static + Send + Sync {
#[inline]
fn size(&self) -> usize {
self.inner_buffer().size()
self.inner().size()
}
}

View File

@ -83,13 +83,13 @@ impl<F, B> BufferView<F, B> where B: Buffer {
where S: Into<BufferSlice<'a, T, B>>, B: 'static, T: 'static, F: FormatDesc + 'static
{
let buffer = buffer.into();
let device = buffer.resource.inner_buffer().device();
let device = buffer.resource.inner().device();
let format = format.format();
// TODO: check minTexelBufferOffsetAlignment
if !buffer.buffer().inner_buffer().usage_uniform_texel_buffer() &&
!buffer.buffer().inner_buffer().usage_storage_texel_buffer()
if !buffer.buffer().inner().usage_uniform_texel_buffer() &&
!buffer.buffer().inner().usage_storage_texel_buffer()
{
return Err(BufferViewCreationError::WrongBufferUsage);
}
@ -102,13 +102,13 @@ impl<F, B> BufferView<F, B> where B: Buffer {
output.bufferFeatures
};
if buffer.buffer().inner_buffer().usage_uniform_texel_buffer() {
if buffer.buffer().inner().usage_uniform_texel_buffer() {
if (format_props & vk::FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT) == 0 {
return Err(BufferViewCreationError::UnsupportedFormat);
}
}
if buffer.buffer().inner_buffer().usage_storage_texel_buffer() {
if buffer.buffer().inner().usage_storage_texel_buffer() {
if (format_props & vk::FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT) == 0 {
return Err(BufferViewCreationError::UnsupportedFormat);
}
@ -118,7 +118,7 @@ impl<F, B> BufferView<F, B> where B: Buffer {
sType: vk::STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO,
pNext: ptr::null(),
flags: 0, // reserved,
buffer: buffer.resource.inner_buffer().internal_object(),
buffer: buffer.resource.inner().internal_object(),
format: format as u32,
offset: buffer.offset as u64,
range: buffer.size as u64,
@ -144,13 +144,13 @@ impl<F, B> BufferView<F, B> where B: Buffer {
/// Returns true if the buffer view can be used as a uniform texel buffer.
#[inline]
pub fn uniform_texel_buffer(&self) -> bool {
self.buffer.inner_buffer().usage_uniform_texel_buffer()
self.buffer.inner().usage_uniform_texel_buffer()
}
/// Returns true if the buffer view can be used as a storage texel buffer.
#[inline]
pub fn storage_texel_buffer(&self) -> bool {
self.buffer.inner_buffer().usage_storage_texel_buffer()
self.buffer.inner().usage_storage_texel_buffer()
}
/// Returns true if the buffer view can be used as a storage texel buffer with atomic accesses.
@ -173,8 +173,8 @@ impl<F, B> Drop for BufferView<F, B> where B: Buffer {
#[inline]
fn drop(&mut self) {
unsafe {
let vk = self.buffer.inner_buffer().device().pointers();
vk.DestroyBufferView(self.buffer.inner_buffer().device().internal_object(), self.view,
let vk = self.buffer.inner().device().pointers();
vk.DestroyBufferView(self.buffer.inner().device().internal_object(), self.view,
ptr::null());
}
}

View File

@ -152,7 +152,7 @@ impl<P> InnerCommandBufferBuilder<P> where P: CommandPool {
let (rp, sp) = if let Some(ref sp) = secondary_cont {
keep_alive.push(sp.render_pass().clone() as Arc<_>);
(sp.render_pass().render_pass().internal_object(), sp.index())
(sp.render_pass().inner().internal_object(), sp.index())
} else {
(0, 0)
};
@ -346,7 +346,7 @@ impl<P> InnerCommandBufferBuilder<P> where P: CommandPool {
assert!(buffer.size() <= 65536);
assert!(buffer.offset() % 4 == 0);
assert!(buffer.size() % 4 == 0);
assert!(buffer.buffer().inner_buffer().usage_transfer_dest());
assert!(buffer.buffer().inner().usage_transfer_dest());
// FIXME: check queue family of the buffer
@ -358,7 +358,7 @@ impl<P> InnerCommandBufferBuilder<P> where P: CommandPool {
{
let buffer_offset = buffer.offset() as vk::DeviceSize;
let buffer_size = buffer.size() as vk::DeviceSize;
let buffer = buffer.buffer().inner_buffer().internal_object();
let buffer = buffer.buffer().inner().internal_object();
let mut data = Some(data.clone()); // TODO: meh for Cloning, but I guess there's no other choice
self.staging_commands.push(Box::new(move |vk, cmd| {
@ -395,7 +395,7 @@ impl<P> InnerCommandBufferBuilder<P> where P: CommandPool {
assert!(offset + size <= buffer.size());
assert!(offset % 4 == 0);
assert!(size % 4 == 0);
assert!(buffer.inner_buffer().usage_transfer_dest());
assert!(buffer.inner().usage_transfer_dest());
self.add_buffer_resource_outside(buffer.clone() as Arc<_>, true, offset .. offset + size,
vk::PIPELINE_STAGE_TRANSFER_BIT,
@ -407,7 +407,7 @@ impl<P> InnerCommandBufferBuilder<P> where P: CommandPool {
{
let buffer = buffer.clone();
self.staging_commands.push(Box::new(move |vk, cmd| {
vk.CmdFillBuffer(cmd, buffer.inner_buffer().internal_object(),
vk.CmdFillBuffer(cmd, buffer.inner().internal_object(),
offset as vk::DeviceSize, size as vk::DeviceSize, data);
}));
}
@ -437,10 +437,10 @@ impl<P> InnerCommandBufferBuilder<P> where P: CommandPool {
{
debug_assert!(self.render_pass_staging_commands.is_empty());
assert_eq!(&**source.inner_buffer().device() as *const _,
&**destination.inner_buffer().device() as *const _);
assert!(source.inner_buffer().usage_transfer_src());
assert!(destination.inner_buffer().usage_transfer_dest());
assert_eq!(&**source.inner().device() as *const _,
&**destination.inner().device() as *const _);
assert!(source.inner().usage_transfer_src());
assert!(destination.inner().usage_transfer_dest());
self.add_buffer_resource_outside(source.clone() as Arc<_>, false, 0 .. source.size(),
vk::PIPELINE_STAGE_TRANSFER_BIT,
@ -451,8 +451,8 @@ impl<P> InnerCommandBufferBuilder<P> where P: CommandPool {
{
let source_size = source.size() as u64; // FIXME: what is destination is too small?
let source = source.inner_buffer().internal_object();
let destination = destination.inner_buffer().internal_object();
let source = source.inner().internal_object();
let destination = destination.inner().internal_object();
self.staging_commands.push(Box::new(move |vk, cmd| {
let copy = vk::BufferCopy {
@ -486,7 +486,7 @@ impl<P> InnerCommandBufferBuilder<P> where P: CommandPool {
let color = image.decode(color).unwrap(); /* FIXME: error */
{
let image = image.inner_image().internal_object();
let image = image.inner().internal_object();
self.staging_commands.push(Box::new(move |vk, cmd| {
let color = match color {
@ -546,8 +546,8 @@ impl<P> InnerCommandBufferBuilder<P> where P: CommandPool {
{
let source_offset = source.offset() as vk::DeviceSize;
let source = source.buffer().inner_buffer().internal_object();
let image = image.inner_image().internal_object();
let source = source.buffer().inner().internal_object();
let image = image.inner().internal_object();
self.staging_commands.push(Box::new(move |vk, cmd| {
let region = vk::BufferImageCopy {
@ -615,8 +615,8 @@ impl<P> InnerCommandBufferBuilder<P> where P: CommandPool {
{
let dest_offset = dest.offset() as vk::DeviceSize;
let dest = dest.buffer().inner_buffer().internal_object();
let image = image.inner_image().internal_object();
let dest = dest.buffer().inner().internal_object();
let image = image.inner().internal_object();
self.staging_commands.push(Box::new(move |vk, cmd| {
let region = vk::BufferImageCopy {
@ -678,8 +678,8 @@ impl<P> InnerCommandBufferBuilder<P> where P: CommandPool {
vk::ACCESS_TRANSFER_WRITE_BIT);
{
let source = source.inner_image().internal_object();
let destination = destination.inner_image().internal_object();
let source = source.inner().internal_object();
let destination = destination.inner().internal_object();
self.staging_commands.push(Box::new(move |vk, cmd| {
let region = vk::ImageBlit {
@ -762,11 +762,11 @@ impl<P> InnerCommandBufferBuilder<P> where P: CommandPool {
let offsets = (0 .. vertices.0.len()).map(|_| 0).collect::<SmallVec<[_; 8]>>();
let ids = vertices.0.map(|b| {
assert!(b.inner_buffer().usage_vertex_buffer());
assert!(b.inner().usage_vertex_buffer());
self.add_buffer_resource_inside(b.clone(), false, 0 .. b.size(),
vk::PIPELINE_STAGE_VERTEX_INPUT_BIT,
vk::ACCESS_VERTEX_ATTRIBUTE_READ_BIT);
b.inner_buffer().internal_object()
b.inner().internal_object()
}).collect::<SmallVec<[_; 8]>>();
{
@ -809,14 +809,14 @@ impl<P> InnerCommandBufferBuilder<P> where P: CommandPool {
let offsets = (0 .. vertices.0.len()).map(|_| 0).collect::<SmallVec<[_; 8]>>();
let ids = vertices.0.map(|b| {
assert!(b.inner_buffer().usage_vertex_buffer());
assert!(b.inner().usage_vertex_buffer());
self.add_buffer_resource_inside(b.clone(), false, 0 .. b.size(),
vk::PIPELINE_STAGE_VERTEX_INPUT_BIT,
vk::ACCESS_VERTEX_ATTRIBUTE_READ_BIT);
b.inner_buffer().internal_object()
b.inner().internal_object()
}).collect::<SmallVec<[_; 8]>>();
assert!(indices.buffer().inner_buffer().usage_index_buffer());
assert!(indices.buffer().inner().usage_index_buffer());
self.add_buffer_resource_inside(indices.buffer().clone() as Arc<_>, false,
indices.offset() .. indices.offset() + indices.size(),
@ -829,7 +829,7 @@ impl<P> InnerCommandBufferBuilder<P> where P: CommandPool {
let indices_offset = indices.offset() as u64;
let indices_len = indices.len() as u32;
let indices_ty = I::ty() as u32;
let indices = indices.buffer().inner_buffer().internal_object();
let indices = indices.buffer().inner().internal_object();
let num_instances = vertices.2 as u32;
self.render_pass_staging_commands.push(Box::new(move |vk, cmd| {
@ -862,11 +862,11 @@ impl<P> InnerCommandBufferBuilder<P> where P: CommandPool {
let offsets = (0 .. vertices.0.len()).map(|_| 0).collect::<SmallVec<[_; 8]>>();
let ids = vertices.0.map(|b| {
assert!(b.inner_buffer().usage_vertex_buffer());
assert!(b.inner().usage_vertex_buffer());
self.add_buffer_resource_inside(b.clone(), false, 0 .. b.size(),
vk::PIPELINE_STAGE_VERTEX_INPUT_BIT,
vk::ACCESS_VERTEX_ATTRIBUTE_READ_BIT);
b.inner_buffer().internal_object()
b.inner().internal_object()
}).collect::<SmallVec<[_; 8]>>();
self.add_buffer_resource_inside(buffer.clone(), false, 0 .. buffer.size(),
@ -876,7 +876,7 @@ impl<P> InnerCommandBufferBuilder<P> where P: CommandPool {
{
let mut ids = Some(ids);
let mut offsets = Some(offsets);
let buffer_internal = buffer.inner_buffer().internal_object();
let buffer_internal = buffer.inner().internal_object();
let buffer_draw_count = buffer.len() as u32;
let buffer_size = buffer.size() as u32;
@ -915,12 +915,12 @@ impl<P> InnerCommandBufferBuilder<P> where P: CommandPool {
let mut descriptor_sets = DescriptorSetsCollection::list(&sets).collect::<SmallVec<[_; 32]>>();
for set in descriptor_sets.iter() {
for &(ref img, block, layout) in set.inner_descriptor_set().images_list().iter() {
for &(ref img, block, layout) in set.inner().images_list().iter() {
self.add_image_resource_outside(img.clone(), 0 .. 1 /* FIXME */, 0 .. 1 /* FIXME */,
false, layout, vk::PIPELINE_STAGE_ALL_COMMANDS_BIT /* FIXME */,
vk::ACCESS_SHADER_READ_BIT | vk::ACCESS_UNIFORM_READ_BIT /* TODO */);
}
for buffer in set.inner_descriptor_set().buffers_list().iter() {
for buffer in set.inner().buffers_list().iter() {
self.add_buffer_resource_outside(buffer.clone(), false, 0 .. buffer.size() /* TODO */,
vk::PIPELINE_STAGE_ALL_COMMANDS_BIT /* FIXME */,
vk::ACCESS_SHADER_READ_BIT | vk::ACCESS_UNIFORM_READ_BIT /* TODO */);
@ -928,11 +928,11 @@ impl<P> InnerCommandBufferBuilder<P> where P: CommandPool {
}
for d in descriptor_sets.iter() { self.keep_alive.push(mem::transmute(d.clone()) /* FIXME: */); }
let mut descriptor_sets = Some(descriptor_sets.into_iter().map(|set| set.inner_descriptor_set().internal_object()).collect::<SmallVec<[_; 32]>>());
let mut descriptor_sets = Some(descriptor_sets.into_iter().map(|set| set.inner().internal_object()).collect::<SmallVec<[_; 32]>>());
// TODO: shouldn't rebind everything every time
if !descriptor_sets.as_ref().unwrap().is_empty() {
let pipeline = PipelineLayout::inner_pipeline_layout(&**pipeline.layout()).internal_object();
let pipeline = PipelineLayout::inner(&**pipeline.layout()).internal_object();
self.staging_commands.push(Box::new(move |vk, cmd| {
let descriptor_sets = descriptor_sets.take().unwrap();
vk.CmdBindDescriptorSets(cmd, vk::PIPELINE_BIND_POINT_COMPUTE,
@ -942,7 +942,7 @@ impl<P> InnerCommandBufferBuilder<P> where P: CommandPool {
}
if mem::size_of_val(push_constants) >= 1 {
let pipeline = PipelineLayout::inner_pipeline_layout(&**pipeline.layout()).internal_object();
let pipeline = PipelineLayout::inner(&**pipeline.layout()).internal_object();
let size = mem::size_of_val(push_constants);
let push_constants = push_constants.clone();
assert!((size % 4) == 0);
@ -1016,22 +1016,22 @@ impl<P> InnerCommandBufferBuilder<P> where P: CommandPool {
let mut descriptor_sets = DescriptorSetsCollection::list(&sets).collect::<SmallVec<[_; 32]>>();
for set in descriptor_sets.iter() {
for &(ref img, block, layout) in set.inner_descriptor_set().images_list().iter() {
for &(ref img, block, layout) in set.inner().images_list().iter() {
self.add_image_resource_inside(img.clone(), 0 .. 1 /* FIXME */, 0 .. 1 /* FIXME */,
false, layout, layout, vk::PIPELINE_STAGE_ALL_COMMANDS_BIT /* FIXME */,
vk::ACCESS_SHADER_READ_BIT | vk::ACCESS_UNIFORM_READ_BIT /* TODO */);
}
for buffer in set.inner_descriptor_set().buffers_list().iter() {
for buffer in set.inner().buffers_list().iter() {
self.add_buffer_resource_inside(buffer.clone(), false, 0 .. buffer.size() /* TODO */,
vk::PIPELINE_STAGE_ALL_COMMANDS_BIT /* FIXME */,
vk::ACCESS_SHADER_READ_BIT | vk::ACCESS_UNIFORM_READ_BIT /* TODO */);
}
}
for d in descriptor_sets.iter() { self.keep_alive.push(mem::transmute(d.clone()) /* FIXME: */); }
let mut descriptor_sets = Some(descriptor_sets.into_iter().map(|set| set.inner_descriptor_set().internal_object()).collect::<SmallVec<[_; 32]>>());
let mut descriptor_sets = Some(descriptor_sets.into_iter().map(|set| set.inner().internal_object()).collect::<SmallVec<[_; 32]>>());
if mem::size_of_val(push_constants) >= 1 {
let pipeline = PipelineLayout::inner_pipeline_layout(&**pipeline.layout()).internal_object();
let pipeline = PipelineLayout::inner(&**pipeline.layout()).internal_object();
let size = mem::size_of_val(push_constants);
let push_constants = push_constants.clone();
assert!((size % 4) == 0);
@ -1047,7 +1047,7 @@ impl<P> InnerCommandBufferBuilder<P> where P: CommandPool {
// TODO: shouldn't rebind everything every time
if !descriptor_sets.as_ref().unwrap().is_empty() {
let pipeline = PipelineLayout::inner_pipeline_layout(&**pipeline.layout()).internal_object();
let pipeline = PipelineLayout::inner(&**pipeline.layout()).internal_object();
self.render_pass_staging_commands.push(Box::new(move |vk, cmd| {
let descriptor_sets = descriptor_sets.take().unwrap();
vk.CmdBindDescriptorSets(cmd, vk::PIPELINE_BIND_POINT_GRAPHICS, pipeline,
@ -1125,7 +1125,7 @@ impl<P> InnerCommandBufferBuilder<P> where P: CommandPool {
{
let mut clear_values = Some(clear_values);
let render_pass = render_pass.render_pass().internal_object();
let render_pass = render_pass.inner().internal_object();
let (fw, fh) = (framebuffer.width(), framebuffer.height());
let framebuffer = framebuffer.internal_object();
@ -1448,7 +1448,7 @@ impl<P> InnerCommandBufferBuilder<P> where P: CommandPool {
dstAccessMask: access.accesses,
srcQueueFamilyIndex: vk::QUEUE_FAMILY_IGNORED,
dstQueueFamilyIndex: vk::QUEUE_FAMILY_IGNORED,
buffer: (buffer.0).0.inner_buffer().internal_object(),
buffer: (buffer.0).0.inner().internal_object(),
offset: range.start as u64,
size: (range.end - range.start) as u64,
});
@ -1474,7 +1474,7 @@ impl<P> InnerCommandBufferBuilder<P> where P: CommandPool {
dstAccessMask: access.accesses,
srcQueueFamilyIndex: vk::QUEUE_FAMILY_IGNORED,
dstQueueFamilyIndex: vk::QUEUE_FAMILY_IGNORED,
buffer: (buffer.0).0.inner_buffer().internal_object(),
buffer: (buffer.0).0.inner().internal_object(),
offset: range.start as u64,
size: (range.end - range.start) as u64,
});
@ -1523,7 +1523,7 @@ impl<P> InnerCommandBufferBuilder<P> where P: CommandPool {
newLayout: access.old_layout as u32,
srcQueueFamilyIndex: vk::QUEUE_FAMILY_IGNORED,
dstQueueFamilyIndex: vk::QUEUE_FAMILY_IGNORED,
image: (image.0).0.inner_image().internal_object(),
image: (image.0).0.inner().internal_object(),
subresourceRange: vk::ImageSubresourceRange {
aspectMask: access.aspects,
baseMipLevel: range_mipmaps.start,
@ -1564,7 +1564,7 @@ impl<P> InnerCommandBufferBuilder<P> where P: CommandPool {
newLayout: access.old_layout as u32,
srcQueueFamilyIndex: vk::QUEUE_FAMILY_IGNORED,
dstQueueFamilyIndex: vk::QUEUE_FAMILY_IGNORED,
image: (image.0).0.inner_image().internal_object(),
image: (image.0).0.inner().internal_object(),
subresourceRange: vk::ImageSubresourceRange {
aspectMask: access.aspects,
baseMipLevel: range_mipmaps.start,
@ -2240,7 +2240,7 @@ fn transition_cb<P>(pool: P, image: Arc<Image>, block: (u32, u32),
newLayout: new_layout as u32,
srcQueueFamilyIndex: vk::QUEUE_FAMILY_IGNORED,
dstQueueFamilyIndex: vk::QUEUE_FAMILY_IGNORED,
image: image.inner_image().internal_object(),
image: image.inner().internal_object(),
subresourceRange: vk::ImageSubresourceRange {
aspectMask: aspect_mask,
baseMipLevel: range_mipmaps.start,

View File

@ -123,7 +123,7 @@ impl<P> UnsafeCommandBufferBuilder<P> where P: CommandPool {
};
let (rp, sp) = if let Kind::SecondaryRenderPass { subpass, .. } = kind {
(subpass.render_pass().render_pass().internal_object(), subpass.index())
(subpass.render_pass().inner().internal_object(), subpass.index())
} else {
(0, 0)
};
@ -229,7 +229,7 @@ impl<P> UnsafeCommandBufferBuilder<P> where P: CommandPool {
pub unsafe fn fill_buffer<T: ?Sized, B>(&mut self, buffer: BufferSlice<T, B>, data: u32)
where B: Buffer
{
assert_eq!(buffer.buffer().inner_buffer().device().internal_object(),
assert_eq!(buffer.buffer().inner().device().internal_object(),
self.device.internal_object());
debug_assert_eq!(buffer.offset() % 4, 0);
@ -243,7 +243,7 @@ impl<P> UnsafeCommandBufferBuilder<P> where P: CommandPool {
let vk = self.device.pointers();
let cmd = self.cmd.take().unwrap();
vk.CmdFillBuffer(cmd, buffer.buffer().inner_buffer().internal_object(),
vk.CmdFillBuffer(cmd, buffer.buffer().inner().internal_object(),
buffer.offset() as vk::DeviceSize,
buffer.size() as vk::DeviceSize, data);
}
@ -272,7 +272,7 @@ impl<P> UnsafeCommandBufferBuilder<P> where P: CommandPool {
data: &D)
where B: Buffer, D: Copy + 'static
{
assert_eq!(buffer.buffer().inner_buffer().device().internal_object(),
assert_eq!(buffer.buffer().inner().device().internal_object(),
self.device.internal_object());
let size = cmp::min(buffer.size(), mem::size_of_val(data));
@ -283,7 +283,7 @@ impl<P> UnsafeCommandBufferBuilder<P> where P: CommandPool {
let vk = self.device.pointers();
let cmd = self.cmd.take().unwrap();
vk.CmdUpdateBuffer(cmd, buffer.buffer().inner_buffer().internal_object(),
vk.CmdUpdateBuffer(cmd, buffer.buffer().inner().internal_object(),
buffer.offset() as vk::DeviceSize, size as vk::DeviceSize,
data as *const D as *const _);
}
@ -314,10 +314,10 @@ impl<P> UnsafeCommandBufferBuilder<P> where P: CommandPool {
Bd: Buffer,
I: IntoIterator<Item = BufferCopyRegion>
{
assert_eq!(src.inner_buffer().device().internal_object(),
assert_eq!(src.inner().device().internal_object(),
self.device.internal_object());
assert_eq!(src.inner_buffer().device().internal_object(),
dest.inner_buffer().device().internal_object());
assert_eq!(src.inner().device().internal_object(),
dest.inner().device().internal_object());
let regions: SmallVec<[_; 4]> = {
let mut res = SmallVec::new();
@ -339,8 +339,8 @@ impl<P> UnsafeCommandBufferBuilder<P> where P: CommandPool {
let vk = self.device.pointers();
let cmd = self.cmd.take().unwrap();
vk.CmdCopyBuffer(cmd, src.inner_buffer().internal_object(),
dest.inner_buffer().internal_object(), regions.len() as u32,
vk.CmdCopyBuffer(cmd, src.inner().internal_object(),
dest.inner().internal_object(), regions.len() as u32,
regions.as_ptr());
}
}

View File

@ -25,8 +25,7 @@ mod unsafe_layout;
/// Objects of this type can be passed when submitting a draw command.
pub unsafe trait DescriptorSet: 'static + Send + Sync {
/// Returns the inner `UnsafeDescriptorSet`.
// TODO: should be named "inner()" after https://github.com/rust-lang/rust/issues/12808 is fixed
fn inner_descriptor_set(&self) -> &UnsafeDescriptorSet;
fn inner(&self) -> &UnsafeDescriptorSet;
}
/// Trait for objects that describe the layout of the descriptors of a set.

View File

@ -124,20 +124,20 @@ impl UnsafeDescriptorSet {
match write.inner {
DescriptorWriteInner::UniformBuffer { ref buffer, offset, size } |
DescriptorWriteInner::DynamicUniformBuffer { ref buffer, offset, size } => {
assert!(buffer.inner_buffer().usage_uniform_buffer());
assert!(buffer.inner().usage_uniform_buffer());
self_resources_buffers.push(buffer.clone());
Some(vk::DescriptorBufferInfo {
buffer: buffer.inner_buffer().internal_object(),
buffer: buffer.inner().internal_object(),
offset: offset as u64,
range: size as u64,
})
},
DescriptorWriteInner::StorageBuffer { ref buffer, offset, size } |
DescriptorWriteInner::DynamicStorageBuffer { ref buffer, offset, size } => {
assert!(buffer.inner_buffer().usage_storage_buffer());
assert!(buffer.inner().usage_storage_buffer());
self_resources_buffers.push(buffer.clone());
Some(vk::DescriptorBufferInfo {
buffer: buffer.inner_buffer().internal_object(),
buffer: buffer.inner().internal_object(),
offset: offset as u64,
range: size as u64,
})
@ -157,7 +157,7 @@ impl UnsafeDescriptorSet {
})
},
DescriptorWriteInner::CombinedImageSampler(ref sampler, ref view, ref image, ref blocks) => {
assert!(view.inner_view().usage_sampled());
assert!(view.inner().usage_sampled());
let layout = view.descriptor_set_combined_image_sampler_layout();
self_resources_samplers.push(sampler.clone());
self_resources_image_views.push(view.clone());
@ -166,12 +166,12 @@ impl UnsafeDescriptorSet {
}
Some(vk::DescriptorImageInfo {
sampler: sampler.internal_object(),
imageView: view.inner_view().internal_object(),
imageView: view.inner().internal_object(),
imageLayout: layout as u32,
})
},
DescriptorWriteInner::StorageImage(ref view, ref image, ref blocks) => {
assert!(view.inner_view().usage_storage());
assert!(view.inner().usage_storage());
assert!(view.identity_swizzle());
let layout = view.descriptor_set_storage_image_layout();
self_resources_image_views.push(view.clone());
@ -180,12 +180,12 @@ impl UnsafeDescriptorSet {
}
Some(vk::DescriptorImageInfo {
sampler: 0,
imageView: view.inner_view().internal_object(),
imageView: view.inner().internal_object(),
imageLayout: layout as u32,
})
},
DescriptorWriteInner::SampledImage(ref view, ref image, ref blocks) => {
assert!(view.inner_view().usage_sampled());
assert!(view.inner().usage_sampled());
let layout = view.descriptor_set_sampled_image_layout();
self_resources_image_views.push(view.clone());
for &block in blocks.iter() {
@ -193,12 +193,12 @@ impl UnsafeDescriptorSet {
}
Some(vk::DescriptorImageInfo {
sampler: 0,
imageView: view.inner_view().internal_object(),
imageView: view.inner().internal_object(),
imageLayout: layout as u32,
})
},
DescriptorWriteInner::InputAttachment(ref view, ref image, ref blocks) => {
assert!(view.inner_view().usage_input_attachment());
assert!(view.inner().usage_input_attachment());
assert!(view.identity_swizzle());
let layout = view.descriptor_set_input_attachment_layout();
self_resources_image_views.push(view.clone());
@ -207,7 +207,7 @@ impl UnsafeDescriptorSet {
}
Some(vk::DescriptorImageInfo {
sampler: 0,
imageView: view.inner_view().internal_object(),
imageView: view.inner().internal_object(),
imageLayout: layout as u32,
})
},

View File

@ -76,7 +76,7 @@ macro_rules! pipeline_layout {
#[allow(unsafe_code)]
unsafe impl PipelineLayout for CustomPipeline {
#[inline]
fn inner_pipeline_layout(&self) -> &UnsafePipelineLayout {
fn inner(&self) -> &UnsafePipelineLayout {
&self.inner
}
}
@ -174,7 +174,7 @@ macro_rules! pipeline_layout {
{
#![allow(unsafe_code)]
unsafe {
let layout = layout.inner_pipeline_layout().descriptor_set_layout($num).unwrap();
let layout = layout.inner().descriptor_set_layout($num).unwrap();
let mut set = try!(UnsafeDescriptorSet::uninitialized_raw(pool, layout));
set.write(descriptors.writes());
Ok(Set { inner: set })
@ -195,7 +195,7 @@ macro_rules! pipeline_layout {
#[allow(unsafe_code)]
unsafe impl DescriptorSet for Set {
#[inline]
fn inner_descriptor_set(&self) -> &UnsafeDescriptorSet {
fn inner(&self) -> &UnsafeDescriptorSet {
&self.inner
}
}

View File

@ -38,7 +38,7 @@ impl EmptyPipeline {
unsafe impl PipelineLayout for EmptyPipeline {
#[inline]
fn inner_pipeline_layout(&self) -> &UnsafePipelineLayout {
fn inner(&self) -> &UnsafePipelineLayout {
&self.inner
}
}

View File

@ -14,8 +14,7 @@ use descriptor::pipeline_layout::UnsafePipelineLayout;
/// Trait for objects that describe the layout of the descriptors and push constants of a pipeline.
pub unsafe trait PipelineLayout: PipelineLayoutDesc + 'static + Send + Sync {
/// Returns the inner `UnsafePipelineLayout`.
// TODO: should be named "inner()" after https://github.com/rust-lang/rust/issues/12808 is fixed
fn inner_pipeline_layout(&self) -> &UnsafePipelineLayout;
fn inner(&self) -> &UnsafePipelineLayout;
}
/// Trait for objects that describe the layout of the descriptors and push constants of a pipeline.

View File

@ -70,7 +70,7 @@ impl EmptySinglePassRenderPass {
unsafe impl RenderPass for EmptySinglePassRenderPass {
#[inline]
fn render_pass(&self) -> &UnsafeRenderPass {
fn inner(&self) -> &UnsafeRenderPass {
&self.render_pass
}
}

View File

@ -54,15 +54,15 @@ impl<L> Framebuffer<L> {
attachments: A) -> Result<Arc<Framebuffer<L>>, FramebufferCreationError>
where L: RenderPass + RenderPassAttachmentsList<A>
{
let vk = render_pass.render_pass().device().pointers();
let device = render_pass.render_pass().device().clone();
let vk = render_pass.inner().device().pointers();
let device = render_pass.inner().device().clone();
let attachments = try!(render_pass.convert_attachments_list(attachments))
.collect::<SmallVec<[_; 8]>>();
// Checking the dimensions against the limits.
{
let limits = render_pass.render_pass().device().physical_device().limits();
let limits = render_pass.inner().device().physical_device().limits();
let limits = [limits.max_framebuffer_width(), limits.max_framebuffer_height(),
limits.max_framebuffer_layers()];
if dimensions[0] > limits[0] || dimensions[1] > limits[1] ||
@ -86,7 +86,7 @@ impl<L> Framebuffer<L> {
return Err(FramebufferCreationError::AttachmentTooSmall);
}
ids.push(a.inner_view().internal_object());
ids.push(a.inner().internal_object());
}
ids
@ -97,7 +97,7 @@ impl<L> Framebuffer<L> {
sType: vk::STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,
pNext: ptr::null(),
flags: 0, // reserved
renderPass: render_pass.render_pass().internal_object(),
renderPass: render_pass.inner().internal_object(),
attachmentCount: ids.len() as u32,
pAttachments: ids.as_ptr(),
width: dimensions[0],
@ -126,8 +126,8 @@ impl<L> Framebuffer<L> {
where R: RenderPass,
L: RenderPass + RenderPassCompatible<R>
{
(&*self.render_pass.render_pass() as *const UnsafeRenderPass as usize ==
&*render_pass.render_pass() as *const UnsafeRenderPass as usize) ||
(&*self.render_pass.inner() as *const UnsafeRenderPass as usize ==
&*render_pass.inner() as *const UnsafeRenderPass as usize) ||
self.render_pass.is_compatible_with(render_pass)
}

View File

@ -114,7 +114,7 @@ macro_rules! ordered_passes_renderpass {
unsafe impl RenderPass for CustomRenderPass {
#[inline]
fn render_pass(&self) -> &UnsafeRenderPass {
fn inner(&self) -> &UnsafeRenderPass {
&self.render_pass
}
}

View File

@ -316,7 +316,7 @@ unsafe impl VulkanObject for UnsafeRenderPass {
unsafe impl RenderPass for UnsafeRenderPass {
#[inline]
fn render_pass(&self) -> &UnsafeRenderPass {
fn inner(&self) -> &UnsafeRenderPass {
self
}
}

View File

@ -34,8 +34,7 @@ use vk;
///
pub unsafe trait RenderPass: 'static + Send + Sync {
/// Returns the underlying `UnsafeRenderPass`. Used by vulkano's internals.
// TODO: should be named "inner()" after https://github.com/rust-lang/rust/issues/12808 is fixed
fn render_pass(&self) -> &UnsafeRenderPass;
fn inner(&self) -> &UnsafeRenderPass;
}
pub unsafe trait RenderPassDesc {

View File

@ -202,7 +202,7 @@ impl<F, A> AttachmentImage<F, A> where A: MemoryPool {
unsafe impl<F, A> Image for AttachmentImage<F, A> where F: 'static + Send + Sync, A: MemoryPool {
#[inline]
fn inner_image(&self) -> &UnsafeImage {
fn inner(&self) -> &UnsafeImage {
&self.image
}
@ -307,7 +307,7 @@ unsafe impl<F, A> ImageView for AttachmentImage<F, A>
}
#[inline]
fn inner_view(&self) -> &UnsafeImageView {
fn inner(&self) -> &UnsafeImageView {
&self.view
}

View File

@ -131,7 +131,7 @@ impl<F, A> ImmutableImage<F, A> where A: MemoryPool {
unsafe impl<F, A> Image for ImmutableImage<F, A> where F: 'static + Send + Sync, A: MemoryPool {
#[inline]
fn inner_image(&self) -> &UnsafeImage {
fn inner(&self) -> &UnsafeImage {
&self.image
}
@ -235,7 +235,7 @@ unsafe impl<F: 'static, A> ImageView for ImmutableImage<F, A>
}
#[inline]
fn inner_view(&self) -> &UnsafeImageView {
fn inner(&self) -> &UnsafeImageView {
&self.view
}

View File

@ -159,7 +159,7 @@ impl<F, A> StorageImage<F, A> where A: MemoryPool {
unsafe impl<F, A> Image for StorageImage<F, A> where F: 'static + Send + Sync, A: MemoryPool {
#[inline]
fn inner_image(&self) -> &UnsafeImage {
fn inner(&self) -> &UnsafeImage {
&self.image
}
@ -287,7 +287,7 @@ unsafe impl<F, A> ImageView for StorageImage<F, A>
}
#[inline]
fn inner_view(&self) -> &UnsafeImageView {
fn inner(&self) -> &UnsafeImageView {
&self.view
}

View File

@ -105,7 +105,7 @@ impl SwapchainImage {
unsafe impl Image for SwapchainImage {
#[inline]
fn inner_image(&self) -> &UnsafeImage {
fn inner(&self) -> &UnsafeImage {
&self.image
}
@ -208,7 +208,7 @@ unsafe impl ImageView for SwapchainImage {
}
#[inline]
fn inner_view(&self) -> &UnsafeImageView {
fn inner(&self) -> &UnsafeImageView {
&self.view
}

View File

@ -23,27 +23,26 @@ use sync::Semaphore;
/// Trait for types that represent images.
pub unsafe trait Image: 'static + Send + Sync {
/// Returns the inner unsafe image object used by this image.
// TODO: should be named "inner()" after https://github.com/rust-lang/rust/issues/12808 is fixed
fn inner_image(&self) -> &UnsafeImage;
fn inner(&self) -> &UnsafeImage;
//fn align(&self, subresource_range: ) -> ;
/// Returns the format of this image.
#[inline]
fn format(&self) -> Format {
self.inner_image().format()
self.inner().format()
}
/// Returns the number of samples of this image.
#[inline]
fn samples(&self) -> u32 {
self.inner_image().samples()
self.inner().samples()
}
/// Returns the dimensions of the image.
#[inline]
fn dimensions(&self) -> Dimensions {
self.inner_image().dimensions()
self.inner().dimensions()
}
/// Given a range, returns the list of blocks which each range is contained in.
@ -106,13 +105,13 @@ pub unsafe trait Image: 'static + Send + Sync {
/// Returns true if the image can be used as a source for blits.
#[inline]
fn supports_blit_source(&self) -> bool {
self.inner_image().supports_blit_source()
self.inner().supports_blit_source()
}
/// Returns true if the image can be used as a destination for blits.
#[inline]
fn supports_blit_destination(&self) -> bool {
self.inner_image().supports_blit_destination()
self.inner().supports_blit_destination()
}
}
@ -135,8 +134,7 @@ pub unsafe trait ImageView: 'static + Send + Sync {
fn parent_arc(&Arc<Self>) -> Arc<Image> where Self: Sized;
/// Returns the inner unsafe image view object used by this image view.
// TODO: should be named "inner()" after https://github.com/rust-lang/rust/issues/12808 is fixed
fn inner_view(&self) -> &UnsafeImageView;
fn inner(&self) -> &UnsafeImageView;
/// Returns the blocks of the parent image this image view overlaps.
fn blocks(&self) -> Vec<(u32, u32)>;
@ -144,7 +142,7 @@ pub unsafe trait ImageView: 'static + Send + Sync {
/// Returns the format of this view. This can be different from the parent's format.
#[inline]
fn format(&self) -> Format {
self.inner_view().format()
self.inner().format()
}
#[inline]

View File

@ -80,7 +80,7 @@ impl<Pl> ComputePipeline<Pl> {
pNext: ptr::null(),
flags: 0,
stage: stage,
layout: PipelineLayout::inner_pipeline_layout(&**pipeline_layout).internal_object(),
layout: PipelineLayout::inner(&**pipeline_layout).internal_object(),
basePipelineHandle: 0,
basePipelineIndex: 0,
};

View File

@ -844,8 +844,8 @@ impl<Vdef, L, Rp> GraphicsPipeline<Vdef, L, Rp>
pDepthStencilState: &depth_stencil,
pColorBlendState: &blend,
pDynamicState: &dynamic_states,
layout: PipelineLayout::inner_pipeline_layout(&**params.layout).internal_object(),
renderPass: params.render_pass.render_pass().render_pass().internal_object(),
layout: PipelineLayout::inner(&**params.layout).internal_object(),
renderPass: params.render_pass.render_pass().inner().internal_object(),
subpass: params.render_pass.index(),
basePipelineHandle: 0, // TODO:
basePipelineIndex: -1, // TODO: