mirror of
https://github.com/gfx-rs/wgpu.git
synced 2024-11-26 08:44:08 +00:00
Introduced separate MemoryInitTracker
This commit is contained in:
parent
076aecd150
commit
5133b0da94
@ -89,21 +89,25 @@ impl<B: GfxBackend> CommandBuffer<B> {
|
||||
// TODO This is not optimal since buffer usage may be target of a copy operation.
|
||||
for id in head.buffers.used() {
|
||||
let buffer = &mut buffer_guard[id];
|
||||
let uninitialized_ranges: Vec<std::ops::Range<wgt::BufferAddress>> =
|
||||
buffer.uninitialized_ranges().collect();
|
||||
for uninitialized_range in uninitialized_ranges {
|
||||
for uninitialized_segment in
|
||||
buffer
|
||||
.initialization_status
|
||||
.drain_uninitialized_segments(hal::memory::Segment {
|
||||
offset: 0,
|
||||
size: None,
|
||||
})
|
||||
{
|
||||
// TODO this itself needs resource barriers. How can we make this work?
|
||||
unsafe {
|
||||
raw.fill_buffer(
|
||||
&buffer.raw.as_ref().unwrap().0,
|
||||
hal::buffer::SubRange {
|
||||
offset: uninitialized_range.start,
|
||||
size: Some(uninitialized_range.end - uninitialized_range.start),
|
||||
offset: uninitialized_segment.offset,
|
||||
size: uninitialized_segment.size,
|
||||
},
|
||||
0,
|
||||
);
|
||||
}
|
||||
buffer.mark_initialized(uninitialized_range);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -8,7 +8,9 @@ use crate::{
|
||||
hub::{
|
||||
GfxBackend, Global, GlobalIdentityHandlerFactory, Hub, Input, InvalidId, Storage, Token,
|
||||
},
|
||||
id, instance, pipeline, resource, span, swap_chain,
|
||||
id, instance,
|
||||
memory_init_tracker::MemoryInitTracker,
|
||||
pipeline, resource, span, swap_chain,
|
||||
track::{BufferState, TextureSelector, TextureState, TrackerSet},
|
||||
validation::{self, check_buffer_usage, check_texture_usage},
|
||||
FastHashMap, FastHashSet, Label, LabelHelpers, LifeGuard, MultiRefCount, PrivateFeatures,
|
||||
@ -207,17 +209,27 @@ fn map_buffer<B: hal::Backend>(
|
||||
// If this is a write mapping zeroing out the memory here is the only reasonable way as all data is pushed to GPU anyways.
|
||||
let zero_init_needs_flush_now = !block.is_coherent() && buffer.sync_mapped_writes.is_none(); // No need to flush if it is flushed later anyways.
|
||||
|
||||
let uninitialized_ranges: Vec<Range<BufferAddress>> =
|
||||
buffer.uninitialized_ranges_in_range(Range {
|
||||
start: offset,
|
||||
end: offset + size,
|
||||
});
|
||||
for range in uninitialized_ranges {
|
||||
let size = (range.end - range.start) as usize;
|
||||
unsafe { ptr::write_bytes(ptr.as_ptr().offset(range.start as isize), 0, size) };
|
||||
// (technically the buffer is only initialized when we're done unmapping but we know it can't be used otherwise meanwhile)
|
||||
for uninitialized_segment in
|
||||
buffer
|
||||
.initialization_status
|
||||
.drain_uninitialized_segments(hal::memory::Segment {
|
||||
offset,
|
||||
size: Some(size),
|
||||
})
|
||||
{
|
||||
unsafe {
|
||||
ptr::write_bytes(
|
||||
ptr.as_ptr().offset(uninitialized_segment.offset as isize),
|
||||
0,
|
||||
uninitialized_segment.size.unwrap_or(size) as usize,
|
||||
)
|
||||
};
|
||||
if zero_init_needs_flush_now {
|
||||
block.flush_range(raw, range.start, Some(size))?;
|
||||
block.flush_range(
|
||||
raw,
|
||||
uninitialized_segment.offset,
|
||||
uninitialized_segment.size,
|
||||
)?;
|
||||
}
|
||||
}
|
||||
|
||||
@ -559,13 +571,6 @@ impl<B: GfxBackend> Device<B> {
|
||||
.allocate(&self.raw, requirements, mem_usage)?;
|
||||
block.bind_buffer(&self.raw, &mut buffer)?;
|
||||
|
||||
let mut uninitialized_ranges = range_alloc::RangeAllocator::new(Range {
|
||||
start: 0,
|
||||
end: desc.size,
|
||||
});
|
||||
// Mark buffer as uninitialized
|
||||
let _ = uninitialized_ranges.allocate_range(desc.size);
|
||||
|
||||
Ok(resource::Buffer {
|
||||
raw: Some((buffer, block)),
|
||||
device_id: Stored {
|
||||
@ -574,7 +579,7 @@ impl<B: GfxBackend> Device<B> {
|
||||
},
|
||||
usage: desc.usage,
|
||||
size: desc.size,
|
||||
uninitialized_ranges,
|
||||
initialization_status: MemoryInitTracker::new(desc.size),
|
||||
sync_mapped_writes: None,
|
||||
map_state: resource::BufferMapState::Idle,
|
||||
life_guard: LifeGuard::new(desc.label.borrow_or_default()),
|
||||
|
@ -274,16 +274,13 @@ impl<G: GlobalIdentityHandlerFactory> Global<G> {
|
||||
// Ensure the overwritten bytes are marked as initialized so they don't need to be nulled prior to mapping or binding.
|
||||
{
|
||||
let dst = buffer_guard.get_mut(buffer_id).unwrap();
|
||||
for range in dst
|
||||
.uninitialized_ranges_in_range::<Vec<std::ops::Range<wgt::BufferAddress>>>(
|
||||
std::ops::Range {
|
||||
start: buffer_offset,
|
||||
end: buffer_offset + data_size,
|
||||
},
|
||||
)
|
||||
{
|
||||
dst.mark_initialized(range);
|
||||
}
|
||||
for _ in dst
|
||||
.initialization_status
|
||||
.drain_uninitialized_segments(hal::memory::Segment {
|
||||
offset: buffer_offset,
|
||||
size: Some(buffer_offset + data_size),
|
||||
})
|
||||
{}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
@ -36,6 +36,7 @@ pub mod device;
|
||||
pub mod hub;
|
||||
pub mod id;
|
||||
pub mod instance;
|
||||
mod memory_init_tracker;
|
||||
pub mod pipeline;
|
||||
pub mod resource;
|
||||
pub mod swap_chain;
|
||||
|
65
wgpu-core/src/memory_init_tracker.rs
Normal file
65
wgpu-core/src/memory_init_tracker.rs
Normal file
@ -0,0 +1,65 @@
|
||||
use std::ops::Range;
|
||||
|
||||
use hal::memory::Segment;
|
||||
|
||||
/// Tracks initialization status of a linear range
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct MemoryInitTracker {
|
||||
// TODO: Use a more fitting data structure!
|
||||
// An allocated range in this allocator means that the range in question is NOT yet initialized.
|
||||
uninitialized_ranges: range_alloc::RangeAllocator<wgt::BufferAddress>,
|
||||
}
|
||||
|
||||
impl MemoryInitTracker {
|
||||
pub(crate) fn new(size: wgt::BufferAddress) -> Self {
|
||||
let mut uninitialized_ranges =
|
||||
range_alloc::RangeAllocator::<wgt::BufferAddress>::new(0..size);
|
||||
let _ = uninitialized_ranges.allocate_range(size);
|
||||
|
||||
Self {
|
||||
uninitialized_ranges,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn drain_uninitialized_segments<'a>(
|
||||
&'a mut self,
|
||||
segment: Segment,
|
||||
) -> impl Iterator<Item = Segment> + 'a {
|
||||
let range = match segment.size {
|
||||
Some(size) => segment.offset..(segment.offset + size),
|
||||
None => segment.offset..self.uninitialized_ranges.initial_range().end,
|
||||
};
|
||||
|
||||
let mut uninitialized_ranges: Vec<Range<wgt::BufferAddress>> = self
|
||||
.uninitialized_ranges
|
||||
.allocated_ranges()
|
||||
.filter_map(|r: Range<wgt::BufferAddress>| {
|
||||
if r.end > range.start && r.start < range.end {
|
||||
Some(Range {
|
||||
start: range.start.max(r.start),
|
||||
end: range.end.min(r.end),
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
std::iter::from_fn(move || {
|
||||
let range: Option<Range<wgt::BufferAddress>> =
|
||||
uninitialized_ranges.last().map(|r| r.clone());
|
||||
match range {
|
||||
Some(range) => {
|
||||
uninitialized_ranges.pop();
|
||||
let result = Some(Segment {
|
||||
offset: range.start,
|
||||
size: Some(range.end - range.start),
|
||||
});
|
||||
self.uninitialized_ranges.free_range(range);
|
||||
result
|
||||
}
|
||||
None => None,
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
@ -6,6 +6,7 @@ use crate::{
|
||||
device::{alloc::MemoryBlock, DeviceError, HostMap},
|
||||
hub::Resource,
|
||||
id::{DeviceId, SwapChainId, TextureId},
|
||||
memory_init_tracker::MemoryInitTracker,
|
||||
track::{TextureSelector, DUMMY_SELECTOR},
|
||||
validation::MissingBufferUsageError,
|
||||
Label, LifeGuard, RefCount, Stored,
|
||||
@ -160,47 +161,12 @@ pub struct Buffer<B: hal::Backend> {
|
||||
pub(crate) device_id: Stored<DeviceId>,
|
||||
pub(crate) usage: wgt::BufferUsage,
|
||||
pub(crate) size: wgt::BufferAddress,
|
||||
// An allocated range in this allocator means that the range in question is NOT yet initialized.
|
||||
pub(crate) uninitialized_ranges: range_alloc::RangeAllocator<wgt::BufferAddress>,
|
||||
pub(crate) initialization_status: MemoryInitTracker,
|
||||
pub(crate) sync_mapped_writes: Option<hal::memory::Segment>,
|
||||
pub(crate) life_guard: LifeGuard,
|
||||
pub(crate) map_state: BufferMapState<B>,
|
||||
}
|
||||
|
||||
impl<B: hal::Backend> Buffer<B> {
|
||||
pub(crate) fn uninitialized_ranges_in_range<
|
||||
R: std::iter::FromIterator<Range<wgt::BufferAddress>>,
|
||||
>(
|
||||
&self,
|
||||
range: Range<wgt::BufferAddress>,
|
||||
) -> R {
|
||||
self.uninitialized_ranges
|
||||
.allocated_ranges()
|
||||
.filter_map(|r: Range<wgt::BufferAddress>| {
|
||||
if r.end > range.start && r.start < range.end {
|
||||
Some(Range {
|
||||
start: range.start.max(r.start),
|
||||
end: range.end.min(r.end),
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect::<R>()
|
||||
}
|
||||
|
||||
pub(crate) fn uninitialized_ranges<'a>(
|
||||
&'a self,
|
||||
) -> impl 'a + Iterator<Item = Range<wgt::BufferAddress>> {
|
||||
self.uninitialized_ranges.allocated_ranges()
|
||||
}
|
||||
|
||||
// Range must be continuous previously uninitialized section.
|
||||
pub(crate) fn mark_initialized(&mut self, range: Range<wgt::BufferAddress>) {
|
||||
self.uninitialized_ranges.free_range(range);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Error)]
|
||||
pub enum CreateBufferError {
|
||||
#[error(transparent)]
|
||||
|
Loading…
Reference in New Issue
Block a user