2023-03-29 23:57:44 +00:00
|
|
|
//! This module defines various operations and types that are implemented in
|
|
|
|
//! one way for the serial compiler, and another way the parallel compiler.
|
2017-12-03 12:49:01 +00:00
|
|
|
//!
|
2023-03-29 23:57:44 +00:00
|
|
|
//! Operations
|
|
|
|
//! ----------
|
|
|
|
//! The parallel versions of operations use Rayon to execute code in parallel,
|
|
|
|
//! while the serial versions degenerate straightforwardly to serial execution.
|
|
|
|
//! The operations include `join`, `parallel`, `par_iter`, and `par_for_each`.
|
2017-12-03 12:49:01 +00:00
|
|
|
//!
|
2023-03-29 23:57:44 +00:00
|
|
|
//! Types
|
|
|
|
//! -----
|
|
|
|
//! The parallel versions of types provide various kinds of synchronization,
|
|
|
|
//! while the serial compiler versions do not.
|
2017-12-03 12:49:01 +00:00
|
|
|
//!
|
2023-03-29 23:57:44 +00:00
|
|
|
//! The following table shows how the types are implemented internally. Except
|
|
|
|
//! where noted otherwise, the type in column one is defined as a
|
|
|
|
//! newtype around the type from column two or three.
|
2017-12-03 12:49:01 +00:00
|
|
|
//!
|
2023-03-29 23:57:44 +00:00
|
|
|
//! | Type | Serial version | Parallel version |
|
|
|
|
//! | ----------------------- | ------------------- | ------------------------------- |
|
|
|
|
//! | `Lrc<T>` | `rc::Rc<T>` | `sync::Arc<T>` |
|
|
|
|
//! |` Weak<T>` | `rc::Weak<T>` | `sync::Weak<T>` |
|
2024-03-15 20:06:40 +00:00
|
|
|
//! | `LRef<'a, T>` [^2] | `&'a mut T` | `&'a T` |
|
2023-03-29 23:57:44 +00:00
|
|
|
//! | | | |
|
|
|
|
//! | `AtomicBool` | `Cell<bool>` | `atomic::AtomicBool` |
|
|
|
|
//! | `AtomicU32` | `Cell<u32>` | `atomic::AtomicU32` |
|
|
|
|
//! | `AtomicU64` | `Cell<u64>` | `atomic::AtomicU64` |
|
|
|
|
//! | `AtomicUsize` | `Cell<usize>` | `atomic::AtomicUsize` |
|
|
|
|
//! | | | |
|
2020-10-31 02:14:32 +00:00
|
|
|
//! | `Lock<T>` | `RefCell<T>` | `RefCell<T>` or |
|
|
|
|
//! | | | `parking_lot::Mutex<T>` |
|
2023-03-29 23:57:44 +00:00
|
|
|
//! | `RwLock<T>` | `RefCell<T>` | `parking_lot::RwLock<T>` |
|
|
|
|
//! | `MTLock<T>` [^1] | `T` | `Lock<T>` |
|
|
|
|
//! | `MTLockRef<'a, T>` [^2] | `&'a mut MTLock<T>` | `&'a MTLock<T>` |
|
|
|
|
//! | | | |
|
|
|
|
//! | `ParallelIterator` | `Iterator` | `rayon::iter::ParallelIterator` |
|
2018-06-08 15:48:31 +00:00
|
|
|
//!
|
2024-07-01 02:04:14 +00:00
|
|
|
//! [^1]: `MTLock` is similar to `Lock`, but the serial version avoids the cost
|
2023-03-29 23:57:44 +00:00
|
|
|
//! of a `RefCell`. This is appropriate when interior mutability is not
|
|
|
|
//! required.
|
|
|
|
//!
|
2024-07-01 02:04:14 +00:00
|
|
|
//! [^2]: `MTRef`, `MTLockRef` are type aliases.
|
2017-12-03 12:49:01 +00:00
|
|
|
|
2018-04-01 08:25:16 +00:00
|
|
|
use std::collections::HashMap;
|
|
|
|
use std::hash::{BuildHasher, Hash};
|
2017-12-03 12:49:01 +00:00
|
|
|
|
2023-03-03 02:14:57 +00:00
|
|
|
pub use crate::marker::*;
|
2024-07-28 22:13:50 +00:00
|
|
|
|
2020-10-31 02:14:32 +00:00
|
|
|
mod lock;
|
2024-04-28 14:10:32 +00:00
|
|
|
#[doc(no_inline)]
|
2023-09-08 07:27:25 +00:00
|
|
|
pub use lock::{Lock, LockGuard, Mode};
|
2020-10-31 02:14:32 +00:00
|
|
|
|
2020-10-31 11:01:54 +00:00
|
|
|
mod worker_local;
|
|
|
|
pub use worker_local::{Registry, WorkerLocal};
|
|
|
|
|
2023-09-04 21:12:30 +00:00
|
|
|
mod parallel;
|
2024-10-28 17:51:12 +00:00
|
|
|
pub use parallel::{join, par_for_each_in, par_map, parallel_guard, scope, try_par_for_each_in};
|
2023-03-14 11:51:00 +00:00
|
|
|
pub use vec::{AppendOnlyIndexVec, AppendOnlyVec};
|
2023-02-21 08:37:10 +00:00
|
|
|
|
|
|
|
mod vec;
|
|
|
|
|
2023-08-31 07:16:33 +00:00
|
|
|
mod freeze;
|
2023-09-01 23:28:04 +00:00
|
|
|
pub use freeze::{FreezeLock, FreezeReadGuard, FreezeWriteGuard};
|
2023-08-31 07:16:33 +00:00
|
|
|
|
2023-03-03 02:14:57 +00:00
|
|
|
mod mode {
|
2024-01-02 19:36:01 +00:00
|
|
|
use std::sync::atomic::{AtomicU8, Ordering};
|
2023-03-03 02:14:57 +00:00
|
|
|
|
|
|
|
const UNINITIALIZED: u8 = 0;
|
2023-04-07 13:20:26 +00:00
|
|
|
const DYN_NOT_THREAD_SAFE: u8 = 1;
|
|
|
|
const DYN_THREAD_SAFE: u8 = 2;
|
2023-03-03 02:14:57 +00:00
|
|
|
|
2023-04-07 13:20:26 +00:00
|
|
|
static DYN_THREAD_SAFE_MODE: AtomicU8 = AtomicU8::new(UNINITIALIZED);
|
2023-03-03 02:14:57 +00:00
|
|
|
|
2023-04-07 13:20:26 +00:00
|
|
|
// Whether thread safety is enabled (due to running under multiple threads).
|
2023-03-03 02:14:57 +00:00
|
|
|
#[inline]
|
2023-04-04 08:26:00 +00:00
|
|
|
pub fn is_dyn_thread_safe() -> bool {
|
2023-04-07 13:20:26 +00:00
|
|
|
match DYN_THREAD_SAFE_MODE.load(Ordering::Relaxed) {
|
|
|
|
DYN_NOT_THREAD_SAFE => false,
|
|
|
|
DYN_THREAD_SAFE => true,
|
|
|
|
_ => panic!("uninitialized dyn_thread_safe mode!"),
|
2023-03-03 02:14:57 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-10-31 02:14:32 +00:00
|
|
|
// Whether thread safety might be enabled.
|
|
|
|
#[inline]
|
|
|
|
pub fn might_be_dyn_thread_safe() -> bool {
|
|
|
|
DYN_THREAD_SAFE_MODE.load(Ordering::Relaxed) != DYN_NOT_THREAD_SAFE
|
|
|
|
}
|
|
|
|
|
2023-03-03 02:14:57 +00:00
|
|
|
// Only set by the `-Z threads` compile option
|
2023-04-07 13:20:26 +00:00
|
|
|
pub fn set_dyn_thread_safe_mode(mode: bool) {
|
|
|
|
let set: u8 = if mode { DYN_THREAD_SAFE } else { DYN_NOT_THREAD_SAFE };
|
|
|
|
let previous = DYN_THREAD_SAFE_MODE.compare_exchange(
|
2023-04-04 08:26:00 +00:00
|
|
|
UNINITIALIZED,
|
|
|
|
set,
|
|
|
|
Ordering::Relaxed,
|
|
|
|
Ordering::Relaxed,
|
|
|
|
);
|
2023-03-03 02:14:57 +00:00
|
|
|
|
|
|
|
// Check that the mode was either uninitialized or was already set to the requested mode.
|
|
|
|
assert!(previous.is_ok() || previous == Err(set));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-10-28 17:51:12 +00:00
|
|
|
// FIXME(parallel_compiler): Get rid of these aliases across the compiler.
|
2018-12-05 15:51:58 +00:00
|
|
|
|
2024-10-28 17:51:12 +00:00
|
|
|
pub use std::marker::{Send, Sync};
|
|
|
|
// Use portable AtomicU64 for targets without native 64-bit atomics
|
|
|
|
#[cfg(target_has_atomic = "64")]
|
|
|
|
pub use std::sync::atomic::AtomicU64;
|
|
|
|
pub use std::sync::atomic::{AtomicBool, AtomicU32, AtomicUsize};
|
|
|
|
pub use std::sync::{Arc as Lrc, OnceLock, Weak};
|
2023-03-15 16:25:28 +00:00
|
|
|
|
2024-10-28 17:51:12 +00:00
|
|
|
pub use mode::{is_dyn_thread_safe, set_dyn_thread_safe_mode};
|
|
|
|
pub use parking_lot::{
|
|
|
|
MappedMutexGuard as MappedLockGuard, MappedRwLockReadGuard as MappedReadGuard,
|
|
|
|
MappedRwLockWriteGuard as MappedWriteGuard, RwLockReadGuard as ReadGuard,
|
|
|
|
RwLockWriteGuard as WriteGuard,
|
|
|
|
};
|
|
|
|
#[cfg(not(target_has_atomic = "64"))]
|
|
|
|
pub use portable_atomic::AtomicU64;
|
2018-12-05 15:51:58 +00:00
|
|
|
|
2024-10-28 17:51:12 +00:00
|
|
|
pub type LRef<'a, T> = &'a T;
|
2018-12-05 15:51:58 +00:00
|
|
|
|
2024-10-28 17:51:12 +00:00
|
|
|
#[derive(Debug, Default)]
|
|
|
|
pub struct MTLock<T>(Lock<T>);
|
2017-12-03 12:49:01 +00:00
|
|
|
|
2024-10-28 17:51:12 +00:00
|
|
|
impl<T> MTLock<T> {
|
|
|
|
#[inline(always)]
|
|
|
|
pub fn new(inner: T) -> Self {
|
|
|
|
MTLock(Lock::new(inner))
|
2023-10-19 23:18:51 +00:00
|
|
|
}
|
2017-12-03 12:49:01 +00:00
|
|
|
|
2024-10-28 17:51:12 +00:00
|
|
|
#[inline(always)]
|
|
|
|
pub fn into_inner(self) -> T {
|
|
|
|
self.0.into_inner()
|
|
|
|
}
|
2018-06-08 15:48:31 +00:00
|
|
|
|
2024-10-28 17:51:12 +00:00
|
|
|
#[inline(always)]
|
|
|
|
pub fn get_mut(&mut self) -> &mut T {
|
|
|
|
self.0.get_mut()
|
|
|
|
}
|
2017-12-03 12:49:01 +00:00
|
|
|
|
2024-10-28 17:51:12 +00:00
|
|
|
#[inline(always)]
|
|
|
|
pub fn lock(&self) -> LockGuard<'_, T> {
|
|
|
|
self.0.lock()
|
|
|
|
}
|
2017-12-03 12:49:01 +00:00
|
|
|
|
2024-10-28 17:51:12 +00:00
|
|
|
#[inline(always)]
|
|
|
|
pub fn lock_mut(&self) -> LockGuard<'_, T> {
|
|
|
|
self.lock()
|
2017-12-03 12:49:01 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-10-28 17:51:12 +00:00
|
|
|
use parking_lot::RwLock as InnerRwLock;
|
|
|
|
|
|
|
|
/// This makes locks panic if they are already held.
|
|
|
|
/// It is only useful when you are running in a single thread
|
|
|
|
const ERROR_CHECKING: bool = false;
|
|
|
|
|
2024-03-15 20:06:40 +00:00
|
|
|
pub type MTLockRef<'a, T> = LRef<'a, MTLock<T>>;
|
|
|
|
|
2020-10-31 11:01:54 +00:00
|
|
|
#[derive(Default)]
|
2024-10-28 17:51:12 +00:00
|
|
|
#[repr(align(64))]
|
2020-10-31 11:01:54 +00:00
|
|
|
pub struct CacheAligned<T>(pub T);
|
|
|
|
|
2018-04-01 08:25:16 +00:00
|
|
|
pub trait HashMapExt<K, V> {
|
|
|
|
/// Same as HashMap::insert, but it may panic if there's already an
|
|
|
|
/// entry for `key` with a value not equal to `value`
|
|
|
|
fn insert_same(&mut self, key: K, value: V);
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<K: Eq + Hash, V: Eq, S: BuildHasher> HashMapExt<K, V> for HashMap<K, V, S> {
|
|
|
|
fn insert_same(&mut self, key: K, value: V) {
|
|
|
|
self.entry(key).and_modify(|old| assert!(*old == value)).or_insert(value);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-10-29 00:00:00 +00:00
|
|
|
#[derive(Debug, Default)]
|
2018-03-08 03:50:43 +00:00
|
|
|
pub struct RwLock<T>(InnerRwLock<T>);
|
|
|
|
|
|
|
|
impl<T> RwLock<T> {
|
|
|
|
#[inline(always)]
|
|
|
|
pub fn new(inner: T) -> Self {
|
|
|
|
RwLock(InnerRwLock::new(inner))
|
|
|
|
}
|
|
|
|
|
2021-03-31 16:29:34 +00:00
|
|
|
#[inline(always)]
|
|
|
|
pub fn into_inner(self) -> T {
|
|
|
|
self.0.into_inner()
|
|
|
|
}
|
|
|
|
|
|
|
|
#[inline(always)]
|
|
|
|
pub fn get_mut(&mut self) -> &mut T {
|
|
|
|
self.0.get_mut()
|
|
|
|
}
|
|
|
|
|
2018-03-08 03:50:43 +00:00
|
|
|
#[inline(always)]
|
2019-02-08 16:36:22 +00:00
|
|
|
pub fn read(&self) -> ReadGuard<'_, T> {
|
2018-03-08 03:50:43 +00:00
|
|
|
if ERROR_CHECKING {
|
|
|
|
self.0.try_read().expect("lock was already held")
|
|
|
|
} else {
|
|
|
|
self.0.read()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-03-31 16:29:34 +00:00
|
|
|
#[inline(always)]
|
2022-11-01 17:24:51 +00:00
|
|
|
#[track_caller]
|
2021-03-31 16:29:34 +00:00
|
|
|
pub fn with_read_lock<F: FnOnce(&T) -> R, R>(&self, f: F) -> R {
|
|
|
|
f(&*self.read())
|
|
|
|
}
|
|
|
|
|
2018-03-26 18:52:59 +00:00
|
|
|
#[inline(always)]
|
2019-02-08 16:36:22 +00:00
|
|
|
pub fn try_write(&self) -> Result<WriteGuard<'_, T>, ()> {
|
2018-03-26 18:52:59 +00:00
|
|
|
self.0.try_write().ok_or(())
|
|
|
|
}
|
|
|
|
|
2018-03-08 03:50:43 +00:00
|
|
|
#[inline(always)]
|
2019-02-08 16:36:22 +00:00
|
|
|
pub fn write(&self) -> WriteGuard<'_, T> {
|
2018-03-08 03:50:43 +00:00
|
|
|
if ERROR_CHECKING {
|
|
|
|
self.0.try_write().expect("lock was already held")
|
|
|
|
} else {
|
|
|
|
self.0.write()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-03-31 16:29:34 +00:00
|
|
|
#[inline(always)]
|
2022-11-01 17:24:51 +00:00
|
|
|
#[track_caller]
|
2021-03-31 16:29:34 +00:00
|
|
|
pub fn with_write_lock<F: FnOnce(&mut T) -> R, R>(&self, f: F) -> R {
|
|
|
|
f(&mut *self.write())
|
|
|
|
}
|
|
|
|
|
2018-03-08 03:50:43 +00:00
|
|
|
#[inline(always)]
|
2022-11-01 17:24:51 +00:00
|
|
|
#[track_caller]
|
2019-02-08 16:36:22 +00:00
|
|
|
pub fn borrow(&self) -> ReadGuard<'_, T> {
|
2018-03-08 03:50:43 +00:00
|
|
|
self.read()
|
|
|
|
}
|
|
|
|
|
|
|
|
#[inline(always)]
|
2022-11-01 17:24:51 +00:00
|
|
|
#[track_caller]
|
2019-02-08 16:36:22 +00:00
|
|
|
pub fn borrow_mut(&self) -> WriteGuard<'_, T> {
|
2018-03-08 03:50:43 +00:00
|
|
|
self.write()
|
|
|
|
}
|
2021-07-12 20:19:25 +00:00
|
|
|
|
|
|
|
#[inline(always)]
|
|
|
|
pub fn leak(&self) -> &T {
|
|
|
|
let guard = self.read();
|
2024-09-27 03:33:26 +00:00
|
|
|
let ret = unsafe { &*(&raw const *guard) };
|
2021-07-12 20:19:25 +00:00
|
|
|
std::mem::forget(guard);
|
|
|
|
ret
|
|
|
|
}
|
2018-03-08 03:50:43 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// FIXME: Probably a bad idea
|
|
|
|
impl<T: Clone> Clone for RwLock<T> {
|
|
|
|
#[inline]
|
|
|
|
fn clone(&self) -> Self {
|
|
|
|
RwLock::new(self.borrow().clone())
|
|
|
|
}
|
|
|
|
}
|