Bind group resource transitions

This commit is contained in:
Dzmitry Malyshau 2019-01-21 06:36:39 -05:00
parent dee685aa0c
commit 4f939bc32f
5 changed files with 200 additions and 112 deletions

View File

@ -1,84 +1,78 @@
use crate::registry::{HUB, Items};
use crate::{
B, Stored, WeaklyStored,
B, BindGroup, Stored, WeaklyStored,
BindGroupId, BindGroupLayoutId, PipelineLayoutId,
};
use hal;
pub struct BindGroupPair {
layout_id: WeaklyStored<BindGroupLayoutId>,
group_id: Stored<BindGroupId>,
}
#[derive(Clone, Default)]
struct BindGroupEntry {
layout: Option<WeaklyStored<BindGroupLayoutId>>,
data: Option<Stored<BindGroupId>>,
#[derive(Default)]
pub struct BindGroupEntry {
expected_layout_id: Option<WeaklyStored<BindGroupLayoutId>>,
provided: Option<BindGroupPair>,
}
impl BindGroupEntry {
fn provide(&mut self, bind_group_id: BindGroupId, bind_group: &BindGroup<B>) -> bool {
if let Some(BindGroupPair { ref layout_id, ref group_id }) = self.provided {
if group_id.value == bind_group_id {
assert_eq!(*layout_id, bind_group.layout_id);
return false
}
}
self.provided = Some(BindGroupPair {
layout_id: bind_group.layout_id.clone(),
group_id: Stored {
value: bind_group_id,
ref_count: bind_group.life_guard.ref_count.clone(),
},
});
self.expected_layout_id == Some(bind_group.layout_id.clone())
}
pub fn expect_layout(
&mut self, bind_group_layout_id: BindGroupLayoutId,
) -> Option<BindGroupId> {
let some = Some(WeaklyStored(bind_group_layout_id));
if self.expected_layout_id != some {
self.expected_layout_id = some;
match self.provided {
Some(BindGroupPair { ref layout_id, ref group_id })
if layout_id.0 == bind_group_layout_id => Some(group_id.value),
Some(_) | None => None,
}
} else {
None
}
}
}
#[derive(Default)]
pub struct Binder {
pipeline_layout_id: Option<WeaklyStored<PipelineLayoutId>>, //TODO: strongly `Stored`
entries: Vec<BindGroupEntry>,
pub(crate) pipeline_layout_id: Option<WeaklyStored<PipelineLayoutId>>, //TODO: strongly `Stored`
pub(crate) entries: Vec<BindGroupEntry>,
}
//Note: we can probably make this much better than passing an `FnMut`
impl Binder {
pub fn bind_group<F>(&mut self, index: usize, bind_group_id: BindGroupId, mut fun: F)
where
F: FnMut(&<B as hal::Backend>::PipelineLayout, &<B as hal::Backend>::DescriptorSet),
{
let bind_group_guard = HUB.bind_groups.read();
let bind_group = bind_group_guard.get(bind_group_id);
while self.entries.len() <= index {
pub fn ensure_length(&mut self, length: usize) {
while self.entries.len() < length {
self.entries.push(BindGroupEntry::default());
}
*self.entries.get_mut(index).unwrap() = BindGroupEntry {
layout: Some(bind_group.layout_id.clone()),
data: Some(Stored {
value: bind_group_id,
ref_count: bind_group.life_guard.ref_count.clone(),
}),
};
if let Some(WeaklyStored(pipeline_layout_id)) = self.pipeline_layout_id {
//TODO: we can cache the group layout ids of the current pipeline in `Binder` itself
let pipeline_layout_guard = HUB.pipeline_layouts.read();
let pipeline_layout = pipeline_layout_guard.get(pipeline_layout_id);
if pipeline_layout.bind_group_layout_ids[index] == bind_group.layout_id {
fun(&pipeline_layout.raw, &bind_group.raw);
}
}
}
pub fn change_layout<F>(&mut self, pipeline_layout_id: PipelineLayoutId, mut fun: F)
where
F: FnMut(&<B as hal::Backend>::PipelineLayout, usize, &<B as hal::Backend>::DescriptorSet),
{
if self.pipeline_layout_id == Some(WeaklyStored(pipeline_layout_id)) {
return
}
self.pipeline_layout_id = Some(WeaklyStored(pipeline_layout_id));
let pipeline_layout_guard = HUB.pipeline_layouts.read();
let pipeline_layout = pipeline_layout_guard.get(pipeline_layout_id);
let bing_group_guard = HUB.bind_groups.read();
while self.entries.len() < pipeline_layout.bind_group_layout_ids.len() {
self.entries.push(BindGroupEntry::default());
}
for (index, (entry, bgl_id)) in self.entries
.iter_mut()
.zip(&pipeline_layout.bind_group_layout_ids)
.enumerate()
{
if entry.layout == Some(bgl_id.clone()) {
continue
}
entry.layout = Some(bgl_id.clone());
if let Some(ref bg_id) = entry.data {
let bind_group = bing_group_guard.get(bg_id.value);
fun(&pipeline_layout.raw, index, &bind_group.raw);
}
pub(crate) fn provide_entry(
&mut self, index: usize, bind_group_id: BindGroupId, bind_group: &BindGroup<B>
) -> Option<PipelineLayoutId> {
self.ensure_length(index + 1);
if self.entries[index].provide(bind_group_id, bind_group) {
self.pipeline_layout_id.as_ref().map(|&WeaklyStored(id)| id)
} else {
None
}
}
}

View File

@ -1,10 +1,12 @@
use crate::command::bind::Binder;
use crate::command::bind::{Binder};
use crate::registry::{Items, HUB};
use crate::track::{BufferTracker, TextureTracker};
use crate::{
Stored,
Stored, CommandBuffer,
BindGroupId, CommandBufferId, ComputePassId, ComputePipelineId,
};
use hal;
use hal::command::RawCommandBuffer;
use std::iter;
@ -14,6 +16,8 @@ pub struct ComputePass<B: hal::Backend> {
raw: B::CommandBuffer,
cmb_id: Stored<CommandBufferId>,
binder: Binder,
buffer_tracker: BufferTracker,
texture_tracker: TextureTracker,
}
impl<B: hal::Backend> ComputePass<B> {
@ -22,6 +26,8 @@ impl<B: hal::Backend> ComputePass<B> {
raw,
cmb_id,
binder: Binder::default(),
buffer_tracker: BufferTracker::new(),
texture_tracker: TextureTracker::new(),
}
}
}
@ -56,16 +62,30 @@ pub extern "C" fn wgpu_compute_pass_set_bind_group(
bind_group_id: BindGroupId,
) {
let mut pass_guard = HUB.compute_passes.write();
let ComputePass { ref mut raw, ref mut binder, .. } = *pass_guard.get_mut(pass_id);
let pass = pass_guard.get_mut(pass_id);
let bind_group_guard = HUB.bind_groups.read();
let bind_group = bind_group_guard.get(bind_group_id);
binder.bind_group(index as usize, bind_group_id, |pipeline_layout, desc_set| unsafe {
raw.bind_compute_descriptor_sets(
pipeline_layout,
index as usize,
iter::once(desc_set),
&[],
);
});
CommandBuffer::insert_barriers(
&mut pass.raw,
pass.buffer_tracker.consume_by_replace(&bind_group.used_buffers),
pass.texture_tracker.consume_by_replace(&bind_group.used_textures),
&*HUB.buffers.read(),
&*HUB.textures.read(),
);
if let Some(pipeline_layout_id) = pass.binder.provide_entry(index as usize, bind_group_id, bind_group) {
let pipeline_layout_guard = HUB.pipeline_layouts.read();
let pipeline_layout = pipeline_layout_guard.get(pipeline_layout_id);
unsafe {
pass.raw.bind_compute_descriptor_sets(
&pipeline_layout.raw,
index as usize,
iter::once(&bind_group.raw),
&[],
);
}
}
}
#[no_mangle]
@ -74,20 +94,40 @@ pub extern "C" fn wgpu_compute_pass_set_pipeline(
pipeline_id: ComputePipelineId,
) {
let mut pass_guard = HUB.compute_passes.write();
let ComputePass { ref mut raw, ref mut binder, .. } = *pass_guard.get_mut(pass_id);
let pass = pass_guard.get_mut(pass_id);
let pipeline_guard = HUB.compute_pipelines.read();
let pipeline = pipeline_guard.get(pipeline_id);
unsafe {
raw.bind_compute_pipeline(&pipeline.raw);
pass.raw.bind_compute_pipeline(&pipeline.raw);
}
if pass.binder.pipeline_layout_id == Some(pipeline.layout_id.clone()) {
return
}
let pipeline_layout_guard = HUB.pipeline_layouts.read();
let pipeline_layout = pipeline_layout_guard.get(pipeline.layout_id.0);
let bing_group_guard = HUB.bind_groups.read();
pass.binder.pipeline_layout_id = Some(pipeline.layout_id.clone());
pass.binder.ensure_length(pipeline_layout.bind_group_layout_ids.len());
for (index, (entry, bgl_id)) in pass.binder.entries
.iter_mut()
.zip(&pipeline_layout.bind_group_layout_ids)
.enumerate()
{
if let Some(bg_id) = entry.expect_layout(bgl_id.0) {
let bind_group = bing_group_guard.get(bg_id);
unsafe {
pass.raw.bind_compute_descriptor_sets(
&pipeline_layout.raw,
index,
iter::once(&bind_group.raw),
&[]
);
}
}
}
binder.change_layout(pipeline.layout_id.0, |pipeline_layout, index, desc_set| unsafe {
raw.bind_compute_descriptor_sets(
pipeline_layout,
index,
iter::once(desc_set),
&[],
);
});
}

View File

@ -45,8 +45,8 @@ pub extern "C" fn wgpu_render_pass_end_pass(pass_id: RenderPassId) -> CommandBuf
if let Some(ref mut last) = cmb.raw.last_mut() {
CommandBuffer::insert_barriers(
last,
cmb.buffer_tracker.consume(&pass.buffer_tracker),
cmb.texture_tracker.consume(&pass.texture_tracker),
cmb.buffer_tracker.consume_by_replace(&pass.buffer_tracker),
cmb.texture_tracker.consume_by_replace(&pass.texture_tracker),
&*HUB.buffers.read(),
&*HUB.textures.read(),
);
@ -165,16 +165,29 @@ pub extern "C" fn wgpu_render_pass_set_bind_group(
bind_group_id: BindGroupId,
) {
let mut pass_guard = HUB.render_passes.write();
let RenderPass { ref mut raw, ref mut binder, .. } = *pass_guard.get_mut(pass_id);
let pass = pass_guard.get_mut(pass_id);
let bind_group_guard = HUB.bind_groups.read();
let bind_group = bind_group_guard.get(bind_group_id);
binder.bind_group(index as usize, bind_group_id, |pipeline_layout, desc_set| unsafe {
raw.bind_compute_descriptor_sets(
pipeline_layout,
index as usize,
iter::once(desc_set),
&[],
);
});
pass.buffer_tracker
.consume_by_extend(&bind_group.used_buffers)
.unwrap();
pass.texture_tracker
.consume_by_extend(&bind_group.used_textures)
.unwrap();
if let Some(pipeline_layout_id) = pass.binder.provide_entry(index as usize, bind_group_id, bind_group) {
let pipeline_layout_guard = HUB.pipeline_layouts.read();
let pipeline_layout = pipeline_layout_guard.get(pipeline_layout_id);
unsafe {
pass.raw.bind_graphics_descriptor_sets(
&pipeline_layout.raw,
index as usize,
iter::once(&bind_group.raw),
&[],
);
}
}
}
#[no_mangle]
@ -183,20 +196,40 @@ pub extern "C" fn wgpu_render_pass_set_pipeline(
pipeline_id: RenderPipelineId,
) {
let mut pass_guard = HUB.render_passes.write();
let RenderPass { ref mut raw, ref mut binder, .. } = *pass_guard.get_mut(pass_id);
let pass = pass_guard.get_mut(pass_id);
let pipeline_guard = HUB.render_pipelines.read();
let pipeline = pipeline_guard.get(pipeline_id);
unsafe {
raw.bind_graphics_pipeline(&pipeline.raw);
pass.raw.bind_graphics_pipeline(&pipeline.raw);
}
if pass.binder.pipeline_layout_id == Some(pipeline.layout_id.clone()) {
return
}
let pipeline_layout_guard = HUB.pipeline_layouts.read();
let pipeline_layout = pipeline_layout_guard.get(pipeline.layout_id.0);
let bing_group_guard = HUB.bind_groups.read();
pass.binder.pipeline_layout_id = Some(pipeline.layout_id.clone());
pass.binder.ensure_length(pipeline_layout.bind_group_layout_ids.len());
for (index, (entry, bgl_id)) in pass.binder.entries
.iter_mut()
.zip(&pipeline_layout.bind_group_layout_ids)
.enumerate()
{
if let Some(bg_id) = entry.expect_layout(bgl_id.0) {
let bind_group = bing_group_guard.get(bg_id);
unsafe {
pass.raw.bind_graphics_descriptor_sets(
&pipeline_layout.raw,
index,
iter::once(&bind_group.raw),
&[]
);
}
}
}
binder.change_layout(pipeline.layout_id.0, |pipeline_layout, index, desc_set| unsafe {
raw.bind_graphics_descriptor_sets(
pipeline_layout,
index,
iter::once(desc_set),
&[],
);
});
}

View File

@ -705,8 +705,8 @@ pub extern "C" fn wgpu_queue_submit(
//TODO: fix the consume
command::CommandBuffer::insert_barriers(
&mut transit,
buffer_tracker.consume(&comb.buffer_tracker),
texture_tracker.consume(&comb.texture_tracker),
buffer_tracker.consume_by_replace(&comb.buffer_tracker),
texture_tracker.consume_by_replace(&comb.texture_tracker),
&*buffer_guard,
&*texture_guard,
);

View File

@ -70,7 +70,7 @@ pub type BufferTracker = Tracker<BufferId, BufferUsageFlags>;
pub type TextureTracker = Tracker<TextureId, TextureUsageFlags>;
impl<I: Clone + Hash + Eq, U: Copy + GenericUsage + BitOr<Output = U> + PartialEq> Tracker<I, U> {
pub(crate) fn new() -> Self {
pub fn new() -> Self {
Tracker {
map: HashMap::new(),
}
@ -132,7 +132,7 @@ impl<I: Clone + Hash + Eq, U: Copy + GenericUsage + BitOr<Output = U> + PartialE
}
/// Consume another tacker, adding it's transitions to `self`.
pub fn consume<'a>(&'a mut self, other: &'a Self) -> impl 'a + Iterator<Item = (I, Range<U>)> {
pub fn consume_by_replace<'a>(&'a mut self, other: &'a Self) -> impl 'a + Iterator<Item = (I, Range<U>)> {
other.map.iter().flat_map(move |(id, new)| {
match self.map.entry(WeaklyStored(id.0.clone())) {
Entry::Vacant(e) => {
@ -151,6 +151,27 @@ impl<I: Clone + Hash + Eq, U: Copy + GenericUsage + BitOr<Output = U> + PartialE
})
}
pub fn consume_by_extend<'a>(&'a mut self, other: &'a Self) -> Result<(), (I, Range<U>)> {
for (id, new) in other.map.iter() {
match self.map.entry(WeaklyStored(id.0.clone())) {
Entry::Vacant(e) => {
e.insert(new.clone());
}
Entry::Occupied(mut e) => {
let old = e.get().last;
if old != new.last {
let extended = old | new.last;
if extended.is_exclusive() {
return Err((id.0.clone(), old..new.last));
}
e.get_mut().last = extended;
}
}
}
}
Ok(())
}
/// Return an iterator over used resources keys.
pub fn used<'a>(&'a self) -> impl 'a + Iterator<Item = I> {
self.map.keys().map(|&WeaklyStored(ref id)| id.clone())