minimize moves by introducing WeakVec

This commit is contained in:
teoxoy 2024-10-17 14:10:14 +02:00 committed by Teodor Tanasoaia
parent 1b2ef8612d
commit 8ba5c82831
4 changed files with 120 additions and 58 deletions

View File

@ -31,6 +31,7 @@ use crate::{
UsageScopePool,
},
validation::{self, validate_color_attachment_bytes_per_sample},
weak_vec::WeakVec,
FastHashMap, LabelHelpers, PreHashedKey, PreHashedMap,
};
@ -42,7 +43,7 @@ use wgt::{
use std::{
borrow::Cow,
mem::ManuallyDrop,
mem::{self, ManuallyDrop},
num::NonZeroU32,
sync::{
atomic::{AtomicBool, AtomicU64, Ordering},
@ -150,8 +151,8 @@ pub struct Device {
}
pub(crate) enum DeferredDestroy {
TextureView(Weak<TextureView>),
BindGroup(Weak<BindGroup>),
TextureViews(WeakVec<TextureView>),
BindGroups(WeakVec<BindGroup>),
}
impl std::fmt::Debug for Device {
@ -384,36 +385,42 @@ impl Device {
/// implementation of a reference-counted structure).
/// The snatch lock must not be held while this function is called.
pub(crate) fn deferred_resource_destruction(&self) {
while let Some(item) = self.deferred_destroy.lock().pop() {
let deferred_destroy = mem::take(&mut *self.deferred_destroy.lock());
for item in deferred_destroy {
match item {
DeferredDestroy::TextureView(view) => {
let Some(view) = view.upgrade() else {
continue;
};
let Some(raw_view) = view.raw.snatch(&mut self.snatchable_lock.write()) else {
continue;
};
DeferredDestroy::TextureViews(views) => {
for view in views {
let Some(view) = view.upgrade() else {
continue;
};
let Some(raw_view) = view.raw.snatch(&mut self.snatchable_lock.write())
else {
continue;
};
resource_log!("Destroy raw {}", view.error_ident());
resource_log!("Destroy raw {}", view.error_ident());
unsafe {
self.raw().destroy_texture_view(raw_view);
unsafe {
self.raw().destroy_texture_view(raw_view);
}
}
}
DeferredDestroy::BindGroup(bind_group) => {
let Some(bind_group) = bind_group.upgrade() else {
continue;
};
let Some(raw_bind_group) =
bind_group.raw.snatch(&mut self.snatchable_lock.write())
else {
continue;
};
DeferredDestroy::BindGroups(bind_groups) => {
for bind_group in bind_groups {
let Some(bind_group) = bind_group.upgrade() else {
continue;
};
let Some(raw_bind_group) =
bind_group.raw.snatch(&mut self.snatchable_lock.write())
else {
continue;
};
resource_log!("Destroy raw {}", bind_group.error_ident());
resource_log!("Destroy raw {}", bind_group.error_ident());
unsafe {
self.raw().destroy_bind_group(raw_bind_group);
unsafe {
self.raw().destroy_bind_group(raw_bind_group);
}
}
}
}
@ -638,7 +645,7 @@ impl Device {
map_state: Mutex::new(rank::BUFFER_MAP_STATE, resource::BufferMapState::Idle),
label: desc.label.to_string(),
tracking_data: TrackingData::new(self.tracker_indices.buffers.clone()),
bind_groups: Mutex::new(rank::BUFFER_BIND_GROUPS, Vec::new()),
bind_groups: Mutex::new(rank::BUFFER_BIND_GROUPS, WeakVec::new()),
#[cfg(feature = "indirect-validation")]
raw_indirect_validation_bind_group,
};
@ -753,7 +760,7 @@ impl Device {
map_state: Mutex::new(rank::BUFFER_MAP_STATE, resource::BufferMapState::Idle),
label: desc.label.to_string(),
tracking_data: TrackingData::new(self.tracker_indices.buffers.clone()),
bind_groups: Mutex::new(rank::BUFFER_BIND_GROUPS, Vec::new()),
bind_groups: Mutex::new(rank::BUFFER_BIND_GROUPS, WeakVec::new()),
#[cfg(feature = "indirect-validation")]
raw_indirect_validation_bind_group,
};
@ -1386,10 +1393,6 @@ impl Device {
{
let mut views = texture.views.lock();
// Remove stale weak references
views.retain(|view| view.strong_count() > 0);
views.push(Arc::downgrade(&view));
}
@ -2379,18 +2382,10 @@ impl Device {
let weak_ref = Arc::downgrade(&bind_group);
for range in &bind_group.used_texture_ranges {
let mut bind_groups = range.texture.bind_groups.lock();
// Remove stale weak references
bind_groups.retain(|bg| bg.strong_count() > 0);
bind_groups.push(weak_ref.clone());
}
for range in &bind_group.used_buffer_ranges {
let mut bind_groups = range.buffer.bind_groups.lock();
// Remove stale weak references
bind_groups.retain(|bg| bg.strong_count() > 0);
bind_groups.push(weak_ref.clone());
}

View File

@ -81,6 +81,7 @@ pub mod resource;
mod snatch;
pub mod storage;
mod track;
mod weak_vec;
// This is public for users who pre-compile shaders while still wanting to
// preserve all run-time checks that `wgpu-core` does.
// See <https://github.com/gfx-rs/wgpu/issues/3103>, after which this can be

View File

@ -14,6 +14,7 @@ use crate::{
resource_log,
snatch::{SnatchGuard, Snatchable},
track::{SharedTrackerIndexAllocator, TextureSelector, TrackerIndex},
weak_vec::WeakVec,
Label, LabelHelpers,
};
@ -26,7 +27,7 @@ use std::{
mem::{self, ManuallyDrop},
ops::Range,
ptr::NonNull,
sync::{Arc, Weak},
sync::Arc,
};
/// Information about the wgpu-core resource.
@ -474,7 +475,7 @@ pub struct Buffer {
pub(crate) label: String,
pub(crate) tracking_data: TrackingData,
pub(crate) map_state: Mutex<BufferMapState>,
pub(crate) bind_groups: Mutex<Vec<Weak<BindGroup>>>,
pub(crate) bind_groups: Mutex<WeakVec<BindGroup>>,
#[cfg(feature = "indirect-validation")]
pub(crate) raw_indirect_validation_bind_group: Snatchable<Box<dyn hal::DynBindGroup>>,
}
@ -824,7 +825,7 @@ pub struct DestroyedBuffer {
raw: ManuallyDrop<Box<dyn hal::DynBuffer>>,
device: Arc<Device>,
label: String,
bind_groups: Vec<Weak<BindGroup>>,
bind_groups: WeakVec<BindGroup>,
#[cfg(feature = "indirect-validation")]
raw_indirect_validation_bind_group: Option<Box<dyn hal::DynBindGroup>>,
}
@ -838,9 +839,9 @@ impl DestroyedBuffer {
impl Drop for DestroyedBuffer {
fn drop(&mut self) {
let mut deferred = self.device.deferred_destroy.lock();
for bind_group in self.bind_groups.drain(..) {
deferred.push(DeferredDestroy::BindGroup(bind_group));
}
deferred.push(DeferredDestroy::BindGroups(mem::take(
&mut self.bind_groups,
)));
drop(deferred);
#[cfg(feature = "indirect-validation")]
@ -1060,8 +1061,8 @@ pub struct Texture {
pub(crate) label: String,
pub(crate) tracking_data: TrackingData,
pub(crate) clear_mode: TextureClearMode,
pub(crate) views: Mutex<Vec<Weak<TextureView>>>,
pub(crate) bind_groups: Mutex<Vec<Weak<BindGroup>>>,
pub(crate) views: Mutex<WeakVec<TextureView>>,
pub(crate) bind_groups: Mutex<WeakVec<BindGroup>>,
}
impl Texture {
@ -1095,8 +1096,8 @@ impl Texture {
label: desc.label.to_string(),
tracking_data: TrackingData::new(device.tracker_indices.textures.clone()),
clear_mode,
views: Mutex::new(rank::TEXTURE_VIEWS, Vec::new()),
bind_groups: Mutex::new(rank::TEXTURE_BIND_GROUPS, Vec::new()),
views: Mutex::new(rank::TEXTURE_VIEWS, WeakVec::new()),
bind_groups: Mutex::new(rank::TEXTURE_BIND_GROUPS, WeakVec::new()),
}
}
/// Checks that the given texture usage contains the required texture usage,
@ -1430,8 +1431,8 @@ impl Global {
#[derive(Debug)]
pub struct DestroyedTexture {
raw: ManuallyDrop<Box<dyn hal::DynTexture>>,
views: Vec<Weak<TextureView>>,
bind_groups: Vec<Weak<BindGroup>>,
views: WeakVec<TextureView>,
bind_groups: WeakVec<BindGroup>,
device: Arc<Device>,
label: String,
}
@ -1447,12 +1448,10 @@ impl Drop for DestroyedTexture {
let device = &self.device;
let mut deferred = device.deferred_destroy.lock();
for view in self.views.drain(..) {
deferred.push(DeferredDestroy::TextureView(view));
}
for bind_group in self.bind_groups.drain(..) {
deferred.push(DeferredDestroy::BindGroup(bind_group));
}
deferred.push(DeferredDestroy::TextureViews(mem::take(&mut self.views)));
deferred.push(DeferredDestroy::BindGroups(mem::take(
&mut self.bind_groups,
)));
drop(deferred);
resource_log!("Destroy raw Texture (destroyed) {:?}", self.label());

67
wgpu-core/src/weak_vec.rs Normal file
View File

@ -0,0 +1,67 @@
//! Module containing the [`WeakVec`] API.
use std::sync::Weak;
/// A container that holds Weak references of T.
///
/// On `push` it scans its contents for weak references with no strong references still alive and drops them.
#[derive(Debug)]
pub(crate) struct WeakVec<T> {
inner: Vec<Option<Weak<T>>>,
}
impl<T> Default for WeakVec<T> {
fn default() -> Self {
Self {
inner: Default::default(),
}
}
}
impl<T> WeakVec<T> {
pub(crate) fn new() -> Self {
Self { inner: Vec::new() }
}
/// Pushes a new element to this collection, dropping older elements that no longer have
/// a strong reference to them.
///
/// NOTE: The length and capacity of this collection do not change when old elements are
/// dropped.
pub(crate) fn push(&mut self, value: Weak<T>) {
let mut to_insert = Some(value);
for slot in &mut self.inner {
if let Some(w) = slot {
if w.strong_count() == 0 {
*slot = to_insert.take();
}
} else {
*slot = to_insert.take();
}
}
if let Some(to_insert) = to_insert {
self.inner.push(Some(to_insert));
}
}
}
pub(crate) struct WeakVecIter<T> {
inner: std::iter::Flatten<std::vec::IntoIter<Option<Weak<T>>>>,
}
impl<T> Iterator for WeakVecIter<T> {
type Item = Weak<T>;
fn next(&mut self) -> Option<Self::Item> {
self.inner.next()
}
}
impl<T> IntoIterator for WeakVec<T> {
type Item = Weak<T>;
type IntoIter = WeakVecIter<T>;
fn into_iter(self) -> Self::IntoIter {
WeakVecIter {
inner: self.inner.into_iter().flatten(),
}
}
}