mirror of
https://github.com/gfx-rs/wgpu.git
synced 2024-11-24 15:54:00 +00:00
[wgpu-core] Replace usage of PreHashedMap
with FastHashMap
(#6541)
PreHashedMap was introduced as a performance optimization. It is, however, potentially unsound as it cannot distinguish between keys that are not equal yet have the same hash. This patch replaces its usage with a simple FastHashMap, which should be good enough. If this is found to cause real life performance issues down the line then we can figure out a better solution.
This commit is contained in:
parent
d0fd6a231c
commit
4fb32170bf
@ -128,6 +128,7 @@ By @ErichDonGubler in [#6456](https://github.com/gfx-rs/wgpu/pull/6456), [#6148]
|
|||||||
- Check for device mismatches when beginning render and compute passes. By @ErichDonGubler in [#6497](https://github.com/gfx-rs/wgpu/pull/6497).
|
- Check for device mismatches when beginning render and compute passes. By @ErichDonGubler in [#6497](https://github.com/gfx-rs/wgpu/pull/6497).
|
||||||
- Lower `QUERY_SET_MAX_QUERIES` (and enforced limits) from 8192 to 4096 to match WebGPU spec. By @ErichDonGubler in [#6525](https://github.com/gfx-rs/wgpu/pull/6525).
|
- Lower `QUERY_SET_MAX_QUERIES` (and enforced limits) from 8192 to 4096 to match WebGPU spec. By @ErichDonGubler in [#6525](https://github.com/gfx-rs/wgpu/pull/6525).
|
||||||
- Allow non-filterable float on texture bindings never used with samplers when using a derived bind group layout. By @ErichDonGubler in [#6531](https://github.com/gfx-rs/wgpu/pull/6531/).
|
- Allow non-filterable float on texture bindings never used with samplers when using a derived bind group layout. By @ErichDonGubler in [#6531](https://github.com/gfx-rs/wgpu/pull/6531/).
|
||||||
|
- Replace potentially unsound usage of `PreHashedMap` with `FastHashMap`. By @jamienicol in [#6541](https://github.com/gfx-rs/wgpu/pull/6541).
|
||||||
|
|
||||||
#### Naga
|
#### Naga
|
||||||
|
|
||||||
|
@ -32,7 +32,7 @@ use crate::{
|
|||||||
},
|
},
|
||||||
validation::{self, validate_color_attachment_bytes_per_sample},
|
validation::{self, validate_color_attachment_bytes_per_sample},
|
||||||
weak_vec::WeakVec,
|
weak_vec::WeakVec,
|
||||||
FastHashMap, LabelHelpers, PreHashedKey, PreHashedMap,
|
FastHashMap, LabelHelpers,
|
||||||
};
|
};
|
||||||
|
|
||||||
use arrayvec::ArrayVec;
|
use arrayvec::ArrayVec;
|
||||||
@ -2712,18 +2712,18 @@ impl Device {
|
|||||||
derived_group_layouts.pop();
|
derived_group_layouts.pop();
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut unique_bind_group_layouts = PreHashedMap::default();
|
let mut unique_bind_group_layouts = FastHashMap::default();
|
||||||
|
|
||||||
let bind_group_layouts = derived_group_layouts
|
let bind_group_layouts = derived_group_layouts
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|mut bgl_entry_map| {
|
.map(|mut bgl_entry_map| {
|
||||||
bgl_entry_map.sort();
|
bgl_entry_map.sort();
|
||||||
match unique_bind_group_layouts.entry(PreHashedKey::from_key(&bgl_entry_map)) {
|
match unique_bind_group_layouts.entry(bgl_entry_map) {
|
||||||
std::collections::hash_map::Entry::Occupied(v) => Ok(Arc::clone(v.get())),
|
std::collections::hash_map::Entry::Occupied(v) => Ok(Arc::clone(v.get())),
|
||||||
std::collections::hash_map::Entry::Vacant(e) => {
|
std::collections::hash_map::Entry::Vacant(e) => {
|
||||||
match self.create_bind_group_layout(
|
match self.create_bind_group_layout(
|
||||||
&None,
|
&None,
|
||||||
bgl_entry_map,
|
e.key().clone(),
|
||||||
bgl::Origin::Derived,
|
bgl::Origin::Derived,
|
||||||
) {
|
) {
|
||||||
Ok(bgl) => {
|
Ok(bgl) => {
|
||||||
|
@ -12,75 +12,3 @@ pub type FastHashSet<K> =
|
|||||||
/// IndexMap using a fast, non-cryptographic hash algorithm.
|
/// IndexMap using a fast, non-cryptographic hash algorithm.
|
||||||
pub type FastIndexMap<K, V> =
|
pub type FastIndexMap<K, V> =
|
||||||
indexmap::IndexMap<K, V, std::hash::BuildHasherDefault<rustc_hash::FxHasher>>;
|
indexmap::IndexMap<K, V, std::hash::BuildHasherDefault<rustc_hash::FxHasher>>;
|
||||||
|
|
||||||
/// HashMap that uses pre-hashed keys and an identity hasher.
|
|
||||||
///
|
|
||||||
/// This is useful when you only need the key to lookup the value, and don't need to store the key,
|
|
||||||
/// particularly when the key is large.
|
|
||||||
pub type PreHashedMap<K, V> =
|
|
||||||
std::collections::HashMap<PreHashedKey<K>, V, std::hash::BuildHasherDefault<IdentityHasher>>;
|
|
||||||
|
|
||||||
/// A pre-hashed key using FxHash which allows the hashing operation to be disconnected
|
|
||||||
/// from the storage in the map.
|
|
||||||
pub struct PreHashedKey<K>(u64, std::marker::PhantomData<fn() -> K>);
|
|
||||||
|
|
||||||
impl<K> std::fmt::Debug for PreHashedKey<K> {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
||||||
f.debug_tuple("PreHashedKey").field(&self.0).finish()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<K> Copy for PreHashedKey<K> {}
|
|
||||||
|
|
||||||
impl<K> Clone for PreHashedKey<K> {
|
|
||||||
fn clone(&self) -> Self {
|
|
||||||
*self
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<K> PartialEq for PreHashedKey<K> {
|
|
||||||
fn eq(&self, other: &Self) -> bool {
|
|
||||||
self.0 == other.0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<K> Eq for PreHashedKey<K> {}
|
|
||||||
|
|
||||||
impl<K> std::hash::Hash for PreHashedKey<K> {
|
|
||||||
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
|
|
||||||
self.0.hash(state);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<K: std::hash::Hash> PreHashedKey<K> {
|
|
||||||
pub fn from_key(key: &K) -> Self {
|
|
||||||
use std::hash::Hasher;
|
|
||||||
|
|
||||||
let mut hasher = rustc_hash::FxHasher::default();
|
|
||||||
key.hash(&mut hasher);
|
|
||||||
Self(hasher.finish(), std::marker::PhantomData)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A hasher which does nothing. Useful for when you want to use a map with pre-hashed keys.
|
|
||||||
///
|
|
||||||
/// When hashing with this hasher, you must provide exactly 8 bytes. Multiple calls to `write`
|
|
||||||
/// will overwrite the previous value.
|
|
||||||
#[derive(Default)]
|
|
||||||
pub struct IdentityHasher {
|
|
||||||
hash: u64,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::hash::Hasher for IdentityHasher {
|
|
||||||
fn write(&mut self, bytes: &[u8]) {
|
|
||||||
self.hash = u64::from_ne_bytes(
|
|
||||||
bytes
|
|
||||||
.try_into()
|
|
||||||
.expect("identity hasher must be given exactly 8 bytes"),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn finish(&self) -> u64 {
|
|
||||||
self.hash
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
@ -7,16 +7,13 @@ use std::{
|
|||||||
use once_cell::sync::OnceCell;
|
use once_cell::sync::OnceCell;
|
||||||
|
|
||||||
use crate::lock::{rank, Mutex};
|
use crate::lock::{rank, Mutex};
|
||||||
use crate::{PreHashedKey, PreHashedMap};
|
use crate::FastHashMap;
|
||||||
|
|
||||||
type SlotInner<V> = Weak<V>;
|
type SlotInner<V> = Weak<V>;
|
||||||
type ResourcePoolSlot<V> = Arc<OnceCell<SlotInner<V>>>;
|
type ResourcePoolSlot<V> = Arc<OnceCell<SlotInner<V>>>;
|
||||||
|
|
||||||
pub struct ResourcePool<K, V> {
|
pub struct ResourcePool<K, V> {
|
||||||
// We use a pre-hashed map as we never actually need to read the keys.
|
inner: Mutex<FastHashMap<K, ResourcePoolSlot<V>>>,
|
||||||
//
|
|
||||||
// This additionally allows us to not need to hash more than once on get_or_init.
|
|
||||||
inner: Mutex<PreHashedMap<K, ResourcePoolSlot<V>>>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<K: Clone + Eq + Hash, V> ResourcePool<K, V> {
|
impl<K: Clone + Eq + Hash, V> ResourcePool<K, V> {
|
||||||
@ -35,9 +32,6 @@ impl<K: Clone + Eq + Hash, V> ResourcePool<K, V> {
|
|||||||
where
|
where
|
||||||
F: FnOnce(K) -> Result<Arc<V>, E>,
|
F: FnOnce(K) -> Result<Arc<V>, E>,
|
||||||
{
|
{
|
||||||
// Hash the key outside of the lock.
|
|
||||||
let hashed_key = PreHashedKey::from_key(&key);
|
|
||||||
|
|
||||||
// We can't prove at compile time that these will only ever be consumed once,
|
// We can't prove at compile time that these will only ever be consumed once,
|
||||||
// so we need to do the check at runtime.
|
// so we need to do the check at runtime.
|
||||||
let mut key = Some(key);
|
let mut key = Some(key);
|
||||||
@ -46,7 +40,7 @@ impl<K: Clone + Eq + Hash, V> ResourcePool<K, V> {
|
|||||||
'race: loop {
|
'race: loop {
|
||||||
let mut map_guard = self.inner.lock();
|
let mut map_guard = self.inner.lock();
|
||||||
|
|
||||||
let entry = match map_guard.entry(hashed_key) {
|
let entry = match map_guard.entry(key.clone().unwrap()) {
|
||||||
// An entry exists for this resource.
|
// An entry exists for this resource.
|
||||||
//
|
//
|
||||||
// We know that either:
|
// We know that either:
|
||||||
@ -86,9 +80,11 @@ impl<K: Clone + Eq + Hash, V> ResourcePool<K, V> {
|
|||||||
return Ok(strong);
|
return Ok(strong);
|
||||||
}
|
}
|
||||||
|
|
||||||
// The resource is in the process of being dropped, because upgrade failed. The entry still exists in the map, but it points to nothing.
|
// The resource is in the process of being dropped, because upgrade failed.
|
||||||
|
// The entry still exists in the map, but it points to nothing.
|
||||||
//
|
//
|
||||||
// We're in a race with the drop implementation of the resource, so lets just go around again. When we go around again:
|
// We're in a race with the drop implementation of the resource,
|
||||||
|
// so lets just go around again. When we go around again:
|
||||||
// - If the entry exists, we might need to go around a few more times.
|
// - If the entry exists, we might need to go around a few more times.
|
||||||
// - If the entry doesn't exist, we'll create a new one.
|
// - If the entry doesn't exist, we'll create a new one.
|
||||||
continue 'race;
|
continue 'race;
|
||||||
@ -101,13 +97,11 @@ impl<K: Clone + Eq + Hash, V> ResourcePool<K, V> {
|
|||||||
///
|
///
|
||||||
/// [`BindGroupLayout`]: crate::binding_model::BindGroupLayout
|
/// [`BindGroupLayout`]: crate::binding_model::BindGroupLayout
|
||||||
pub fn remove(&self, key: &K) {
|
pub fn remove(&self, key: &K) {
|
||||||
let hashed_key = PreHashedKey::from_key(key);
|
|
||||||
|
|
||||||
let mut map_guard = self.inner.lock();
|
let mut map_guard = self.inner.lock();
|
||||||
|
|
||||||
// Weak::upgrade will be failing long before this code is called. All threads trying to access the resource will be spinning,
|
// Weak::upgrade will be failing long before this code is called. All threads trying to access the resource will be spinning,
|
||||||
// waiting for the entry to be removed. It is safe to remove the entry from the map.
|
// waiting for the entry to be removed. It is safe to remove the entry from the map.
|
||||||
map_guard.remove(&hashed_key);
|
map_guard.remove(key);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user