mirror of
https://github.com/rust-lang/rust.git
synced 2024-11-21 22:34:05 +00:00
introduce DynSend
and DynSync
auto trait
This commit is contained in:
parent
963e5c0eff
commit
b9746ce039
@ -3299,6 +3299,7 @@ dependencies = [
|
||||
"rustc-hash",
|
||||
"rustc-rayon",
|
||||
"rustc-rayon-core",
|
||||
"rustc_arena",
|
||||
"rustc_graphviz",
|
||||
"rustc_index",
|
||||
"rustc_macros",
|
||||
|
@ -48,14 +48,15 @@ pub enum TokenTree {
|
||||
Delimited(DelimSpan, Delimiter, TokenStream),
|
||||
}
|
||||
|
||||
// Ensure all fields of `TokenTree` is `Send` and `Sync`.
|
||||
// Ensure all fields of `TokenTree` is `DynSend` and `DynSync`.
|
||||
#[cfg(parallel_compiler)]
|
||||
fn _dummy()
|
||||
where
|
||||
Token: Send + Sync,
|
||||
DelimSpan: Send + Sync,
|
||||
Delimiter: Send + Sync,
|
||||
TokenStream: Send + Sync,
|
||||
Token: sync::DynSend + sync::DynSync,
|
||||
Spacing: sync::DynSend + sync::DynSync,
|
||||
DelimSpan: sync::DynSend + sync::DynSync,
|
||||
Delimiter: sync::DynSend + sync::DynSync,
|
||||
TokenStream: sync::DynSend + sync::DynSync,
|
||||
{
|
||||
}
|
||||
|
||||
@ -118,7 +119,7 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
pub trait ToAttrTokenStream: sync::Send + sync::Sync {
|
||||
pub trait ToAttrTokenStream: sync::DynSend + sync::DynSync {
|
||||
fn to_attr_token_stream(&self) -> AttrTokenStream;
|
||||
}
|
||||
|
||||
|
@ -17,10 +17,7 @@ use rustc_ast::expand::allocator::AllocatorKind;
|
||||
use rustc_attr as attr;
|
||||
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
|
||||
use rustc_data_structures::profiling::{get_resident_set_size, print_time_passes_entry};
|
||||
|
||||
use rustc_data_structures::sync::par_iter;
|
||||
#[cfg(parallel_compiler)]
|
||||
use rustc_data_structures::sync::ParallelIterator;
|
||||
use rustc_data_structures::sync::par_map;
|
||||
use rustc_hir as hir;
|
||||
use rustc_hir::def_id::{DefId, LOCAL_CRATE};
|
||||
use rustc_hir::lang_items::LangItem;
|
||||
@ -689,7 +686,7 @@ pub fn codegen_crate<B: ExtraBackendMethods>(
|
||||
// This likely is a temporary measure. Once we don't have to support the
|
||||
// non-parallel compiler anymore, we can compile CGUs end-to-end in
|
||||
// parallel and get rid of the complicated scheduling logic.
|
||||
let mut pre_compiled_cgus = if cfg!(parallel_compiler) {
|
||||
let mut pre_compiled_cgus = if tcx.sess.threads() > 1 {
|
||||
tcx.sess.time("compile_first_CGU_batch", || {
|
||||
// Try to find one CGU to compile per thread.
|
||||
let cgus: Vec<_> = cgu_reuse
|
||||
@ -702,12 +699,10 @@ pub fn codegen_crate<B: ExtraBackendMethods>(
|
||||
// Compile the found CGUs in parallel.
|
||||
let start_time = Instant::now();
|
||||
|
||||
let pre_compiled_cgus = par_iter(cgus)
|
||||
.map(|(i, _)| {
|
||||
let module = backend.compile_codegen_unit(tcx, codegen_units[i].name());
|
||||
(i, module)
|
||||
})
|
||||
.collect();
|
||||
let pre_compiled_cgus = par_map(cgus, |(i, _)| {
|
||||
let module = backend.compile_codegen_unit(tcx, codegen_units[i].name());
|
||||
(i, module)
|
||||
});
|
||||
|
||||
total_codegen_time += start_time.elapsed();
|
||||
|
||||
|
@ -22,6 +22,7 @@ use rustc_target::spec::Target;
|
||||
|
||||
pub use rustc_data_structures::sync::MetadataRef;
|
||||
|
||||
use rustc_data_structures::sync::{DynSend, DynSync};
|
||||
use std::any::Any;
|
||||
|
||||
pub trait BackendTypes {
|
||||
@ -117,7 +118,9 @@ pub trait CodegenBackend {
|
||||
) -> Result<(), ErrorGuaranteed>;
|
||||
}
|
||||
|
||||
pub trait ExtraBackendMethods: CodegenBackend + WriteBackendMethods + Sized + Send + Sync {
|
||||
pub trait ExtraBackendMethods:
|
||||
CodegenBackend + WriteBackendMethods + Sized + Send + Sync + DynSend + DynSync
|
||||
{
|
||||
fn codegen_allocator<'tcx>(
|
||||
&self,
|
||||
tcx: TyCtxt<'tcx>,
|
||||
|
@ -16,6 +16,7 @@ libc = "0.2"
|
||||
measureme = "10.0.0"
|
||||
rustc-rayon-core = { version = "0.5.0", optional = true }
|
||||
rustc-rayon = { version = "0.5.0", optional = true }
|
||||
rustc_arena = { path = "../rustc_arena" }
|
||||
rustc_graphviz = { path = "../rustc_graphviz" }
|
||||
rustc-hash = "1.1.0"
|
||||
rustc_index = { path = "../rustc_index", package = "rustc_index" }
|
||||
|
@ -26,9 +26,11 @@
|
||||
#![feature(test)]
|
||||
#![feature(thread_id_value)]
|
||||
#![feature(vec_into_raw_parts)]
|
||||
#![feature(allocator_api)]
|
||||
#![feature(get_mut_unchecked)]
|
||||
#![feature(lint_reasons)]
|
||||
#![feature(unwrap_infallible)]
|
||||
#![feature(unwrap_infallible)]#![feature(const_mut_refs)]
|
||||
#![feature(const_trait_impl)]
|
||||
#![feature(strict_provenance)]
|
||||
#![feature(ptr_alignment_type)]
|
||||
#![feature(macro_metavar_expr)]
|
||||
@ -77,6 +79,7 @@ pub mod sorted_map;
|
||||
pub mod stable_hasher;
|
||||
mod atomic_ref;
|
||||
pub mod fingerprint;
|
||||
pub mod marker;
|
||||
pub mod profiling;
|
||||
pub mod sharded;
|
||||
pub mod stack;
|
||||
|
268
compiler/rustc_data_structures/src/marker.rs
Normal file
268
compiler/rustc_data_structures/src/marker.rs
Normal file
@ -0,0 +1,268 @@
|
||||
cfg_if!(
|
||||
if #[cfg(not(parallel_compiler))] {
|
||||
pub auto trait DynSend {}
|
||||
pub auto trait DynSync {}
|
||||
|
||||
impl<T> DynSend for T {}
|
||||
impl<T> DynSync for T {}
|
||||
} else {
|
||||
#[rustc_on_unimplemented(
|
||||
message = "`{Self}` doesn't implement `DynSend`. \
|
||||
Add it to `rustc_data_structures::marker` or use `IntoDyn` if it's already `Send`",
|
||||
label = "`{Self}` doesn't implement `DynSend`. \
|
||||
Add it to `rustc_data_structures::marker` or use `IntoDyn` if it's already `Send`"
|
||||
)]
|
||||
// Ensure data structures is `Send` if `sync::active()` is true.
|
||||
// `sync::active()` should be checked before using these data structures.
|
||||
// Note: Ensure that the data structure **will not break**
|
||||
// thread safety after being created.
|
||||
//
|
||||
// `sync::active()` should be checked when downcasting these data structures
|
||||
// to `Send` via `FromDyn`.
|
||||
pub unsafe auto trait DynSend {}
|
||||
|
||||
#[rustc_on_unimplemented(
|
||||
message = "`{Self}` doesn't implement `DynSync`. \
|
||||
Add it to `rustc_data_structures::marker` or use `IntoDyn` if it's already `Sync`",
|
||||
label = "`{Self}` doesn't implement `DynSync`. \
|
||||
Add it to `rustc_data_structures::marker` or use `IntoDyn` if it's already `Sync`"
|
||||
)]
|
||||
// Ensure data structures is `Sync` if `sync::active()` is true.
|
||||
// Note: Ensure that the data structure **will not break**
|
||||
// thread safety after being checked.
|
||||
//
|
||||
// `sync::active()` should be checked when downcasting these data structures
|
||||
// to `Send` via `FromDyn`.
|
||||
pub unsafe auto trait DynSync {}
|
||||
|
||||
// Same with `Sync` and `Send`.
|
||||
unsafe impl<T: DynSync + ?Sized> DynSend for &T {}
|
||||
|
||||
macro_rules! impls_dyn_send_neg {
|
||||
($([$t1: ty $(where $($generics1: tt)*)?])*) => {
|
||||
$(impl$(<$($generics1)*>)? !DynSend for $t1 {})*
|
||||
};
|
||||
}
|
||||
|
||||
// Consistent with `std`
|
||||
impls_dyn_send_neg!(
|
||||
[std::env::Args]
|
||||
[std::env::ArgsOs]
|
||||
[*const T where T: ?Sized]
|
||||
[*mut T where T: ?Sized]
|
||||
[std::ptr::NonNull<T> where T: ?Sized]
|
||||
[std::rc::Rc<T> where T: ?Sized]
|
||||
[std::rc::Weak<T> where T: ?Sized]
|
||||
[std::sync::MutexGuard<'_, T> where T: ?Sized]
|
||||
[std::sync::RwLockReadGuard<'_, T> where T: ?Sized]
|
||||
[std::sync::RwLockWriteGuard<'_, T> where T: ?Sized]
|
||||
[std::io::StdoutLock<'_>]
|
||||
[std::io::StderrLock<'_>]
|
||||
);
|
||||
cfg_if!(
|
||||
// Consistent with `std`
|
||||
// `os_imp::Env` is `!Send` in these platforms
|
||||
if #[cfg(any(unix, target_os = "hermit", target_os = "wasi", target_os = "solid_asp3"))] {
|
||||
impl !DynSend for std::env::VarsOs {}
|
||||
}
|
||||
);
|
||||
|
||||
macro_rules! already_send {
|
||||
($([$ty: ty])*) => {
|
||||
$(unsafe impl DynSend for $ty where $ty: Send {})*
|
||||
};
|
||||
}
|
||||
|
||||
// These structures are already `Send`.
|
||||
already_send!(
|
||||
[std::backtrace::Backtrace]
|
||||
[std::io::Stdout]
|
||||
[std::io::Stderr]
|
||||
[std::io::Error]
|
||||
[std::fs::File]
|
||||
[rustc_arena::DroplessArena]
|
||||
[crate::memmap::Mmap]
|
||||
[crate::profiling::SelfProfiler]
|
||||
);
|
||||
|
||||
macro_rules! impl_dyn_send {
|
||||
($($($attr: meta)* [$ty: ty where $($generics2: tt)*])*) => {
|
||||
$(unsafe impl<$($generics2)*> DynSend for $ty {})*
|
||||
};
|
||||
}
|
||||
|
||||
impl_dyn_send!(
|
||||
[std::sync::atomic::AtomicPtr<T> where T]
|
||||
[std::sync::Mutex<T> where T: ?Sized+ DynSend]
|
||||
[std::sync::mpsc::Sender<T> where T: DynSend]
|
||||
[std::sync::Arc<T> where T: ?Sized + DynSync + DynSend]
|
||||
[std::sync::LazyLock<T, F> where T: DynSend, F: DynSend]
|
||||
[std::collections::HashSet<K, S> where K: DynSend, S: DynSend]
|
||||
[std::collections::HashMap<K, V, S> where K: DynSend, V: DynSend, S: DynSend]
|
||||
[std::collections::BTreeMap<K, V, A> where K: DynSend, V: DynSend, A: std::alloc::Allocator + Clone + DynSend]
|
||||
[Vec<T, A> where T: DynSend, A: std::alloc::Allocator + DynSend]
|
||||
[Box<T, A> where T: ?Sized + DynSend, A: std::alloc::Allocator + DynSend]
|
||||
[crate::sync::Lock<T> where T: DynSend]
|
||||
[crate::sync::RwLock<T> where T: DynSend]
|
||||
[rustc_arena::TypedArena<T> where T: DynSend]
|
||||
[indexmap::IndexSet<V, S> where V: DynSend, S: DynSend]
|
||||
[indexmap::IndexMap<K, V, S> where K: DynSend, V: DynSend, S: DynSend]
|
||||
[thin_vec::ThinVec<T> where T: DynSend]
|
||||
[smallvec::SmallVec<A> where A: smallvec::Array + DynSend]
|
||||
|
||||
// We use `Send` here to omit some extra code, since they are only
|
||||
// used in `Send` situations now.
|
||||
[crate::owning_ref::OwningRef<O, T> where O: Send, T: ?Sized + Send]
|
||||
[crate::owning_ref::OwningRefMut<O, T> where O: Send, T: ?Sized + Send]
|
||||
);
|
||||
|
||||
macro_rules! impls_dyn_sync_neg {
|
||||
($([$t1: ty $(where $($generics1: tt)*)?])*) => {
|
||||
$(impl$(<$($generics1)*>)? !DynSync for $t1 {})*
|
||||
};
|
||||
}
|
||||
|
||||
// Consistent with `std`
|
||||
impls_dyn_sync_neg!(
|
||||
[std::env::Args]
|
||||
[std::env::ArgsOs]
|
||||
[*const T where T: ?Sized]
|
||||
[*mut T where T: ?Sized]
|
||||
[std::cell::Cell<T> where T: ?Sized]
|
||||
[std::cell::RefCell<T> where T: ?Sized]
|
||||
[std::cell::UnsafeCell<T> where T: ?Sized]
|
||||
[std::ptr::NonNull<T> where T: ?Sized]
|
||||
[std::rc::Rc<T> where T: ?Sized]
|
||||
[std::rc::Weak<T> where T: ?Sized]
|
||||
[std::cell::OnceCell<T> where T]
|
||||
[std::sync::mpsc::Receiver<T> where T]
|
||||
[std::sync::mpsc::Sender<T> where T]
|
||||
);
|
||||
cfg_if!(
|
||||
// Consistent with `std`
|
||||
// `os_imp::Env` is `!Sync` in these platforms
|
||||
if #[cfg(any(unix, target_os = "hermit", target_os = "wasi", target_os = "solid_asp3"))] {
|
||||
impl !DynSync for std::env::VarsOs {}
|
||||
}
|
||||
);
|
||||
|
||||
macro_rules! already_sync {
|
||||
($([$ty: ty])*) => {
|
||||
$(unsafe impl DynSync for $ty where $ty: Sync {})*
|
||||
};
|
||||
}
|
||||
|
||||
// These structures are already `Sync`.
|
||||
already_sync!(
|
||||
[std::sync::atomic::AtomicBool]
|
||||
[std::sync::atomic::AtomicUsize]
|
||||
[std::sync::atomic::AtomicU8]
|
||||
[std::sync::atomic::AtomicU32]
|
||||
[std::sync::atomic::AtomicU64]
|
||||
[std::backtrace::Backtrace]
|
||||
[std::io::Error]
|
||||
[std::fs::File]
|
||||
[jobserver_crate::Client]
|
||||
[crate::memmap::Mmap]
|
||||
[crate::profiling::SelfProfiler]
|
||||
);
|
||||
|
||||
macro_rules! impl_dyn_sync {
|
||||
($($($attr: meta)* [$ty: ty where $($generics2: tt)*])*) => {
|
||||
$(unsafe impl<$($generics2)*> DynSync for $ty {})*
|
||||
};
|
||||
}
|
||||
|
||||
impl_dyn_sync!(
|
||||
[std::sync::atomic::AtomicPtr<T> where T]
|
||||
[std::sync::OnceLock<T> where T: DynSend + DynSync]
|
||||
[std::sync::Mutex<T> where T: ?Sized + DynSend]
|
||||
[std::sync::Arc<T> where T: ?Sized + DynSync + DynSend]
|
||||
[std::sync::LazyLock<T, F> where T: DynSend + DynSync, F: DynSend]
|
||||
[std::collections::HashSet<K, S> where K: DynSync, S: DynSync]
|
||||
[std::collections::HashMap<K, V, S> where K: DynSync, V: DynSync, S: DynSync]
|
||||
[std::collections::BTreeMap<K, V, A> where K: DynSync, V: DynSync, A: std::alloc::Allocator + Clone + DynSync]
|
||||
[Vec<T, A> where T: DynSync, A: std::alloc::Allocator + DynSync]
|
||||
[Box<T, A> where T: ?Sized + DynSync, A: std::alloc::Allocator + DynSync]
|
||||
[crate::sync::Lock<T> where T: DynSend]
|
||||
[crate::sync::RwLock<T> where T: DynSend + DynSync]
|
||||
[crate::sync::OneThread<T> where T]
|
||||
[crate::sync::WorkerLocal<T> where T: DynSend]
|
||||
[crate::intern::Interned<'a, T> where 'a, T: DynSync]
|
||||
[parking_lot::lock_api::Mutex<R, T> where R: DynSync, T: ?Sized + DynSend]
|
||||
[parking_lot::lock_api::RwLock<R, T> where R: DynSync, T: ?Sized + DynSend + DynSync]
|
||||
[indexmap::IndexSet<V, S> where V: DynSync, S: DynSync]
|
||||
[indexmap::IndexMap<K, V, S> where K: DynSync, V: DynSync, S: DynSync]
|
||||
[smallvec::SmallVec<A> where A: smallvec::Array + DynSync]
|
||||
[thin_vec::ThinVec<T> where T: DynSync]
|
||||
|
||||
// We use `Sync` here to omit some extra code, since they are only
|
||||
// used in `Sync` situations now.
|
||||
[crate::owning_ref::OwningRef<O, T> where O: Sync, T: ?Sized + Sync]
|
||||
[crate::owning_ref::OwningRefMut<O, T> where O: Sync, T: ?Sized + Sync]
|
||||
);
|
||||
}
|
||||
);
|
||||
|
||||
pub fn assert_dyn_sync<T: ?Sized + DynSync>() {}
|
||||
pub fn assert_dyn_send<T: ?Sized + DynSend>() {}
|
||||
pub fn assert_dyn_send_val<T: ?Sized + DynSend>(_t: &T) {}
|
||||
pub fn assert_dyn_send_sync_val<T: ?Sized + DynSync + DynSend>(_t: &T) {}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
pub struct FromDyn<T>(T);
|
||||
|
||||
impl<T> FromDyn<T> {
|
||||
// Check `sync::active()` when creating this structure
|
||||
// and downcasting to `Send`. So we can ensure it is
|
||||
// thread-safe.
|
||||
#[inline(always)]
|
||||
pub fn from(val: T) -> Self {
|
||||
#[cfg(parallel_compiler)]
|
||||
assert!(crate::sync::active());
|
||||
FromDyn(val)
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn into_inner(self) -> T {
|
||||
self.0
|
||||
}
|
||||
}
|
||||
|
||||
// `FromDyn` is `Send` if `T` is `DynSend`, since it check when created.
|
||||
#[cfg(parallel_compiler)]
|
||||
unsafe impl<T: DynSend> Send for FromDyn<T> {}
|
||||
|
||||
// `FromDyn` is `Sync` if `T` is `DynSync`, since it check when created.
|
||||
#[cfg(parallel_compiler)]
|
||||
unsafe impl<T: DynSync> Sync for FromDyn<T> {}
|
||||
|
||||
impl<T> const std::ops::Deref for FromDyn<T> {
|
||||
type Target = T;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
pub struct IntoDyn<T: ?Sized>(pub T);
|
||||
|
||||
#[cfg(parallel_compiler)]
|
||||
unsafe impl<T: ?Sized + Send> DynSend for IntoDyn<T> {}
|
||||
#[cfg(parallel_compiler)]
|
||||
unsafe impl<T: ?Sized + Sync> DynSync for IntoDyn<T> {}
|
||||
|
||||
impl<T> const std::ops::Deref for IntoDyn<T> {
|
||||
type Target = T;
|
||||
|
||||
fn deref(&self) -> &T {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> const std::ops::DerefMut for IntoDyn<T> {
|
||||
fn deref_mut(&mut self) -> &mut T {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
@ -39,6 +39,7 @@
|
||||
//!
|
||||
//! [^2] `MTLockRef` is a typedef.
|
||||
|
||||
pub use crate::marker::*;
|
||||
use crate::owned_slice::OwnedSlice;
|
||||
use std::collections::HashMap;
|
||||
use std::hash::{BuildHasher, Hash};
|
||||
@ -55,6 +56,37 @@ pub use vec::{AppendOnlyIndexVec, AppendOnlyVec};
|
||||
|
||||
mod vec;
|
||||
|
||||
mod mode {
|
||||
use super::Ordering;
|
||||
use std::sync::atomic::AtomicU8;
|
||||
|
||||
const UNINITIALIZED: u8 = 0;
|
||||
const INACTIVE: u8 = 1;
|
||||
const ACTIVE: u8 = 2;
|
||||
|
||||
static MODE: AtomicU8 = AtomicU8::new(UNINITIALIZED);
|
||||
|
||||
#[inline]
|
||||
pub fn active() -> bool {
|
||||
match MODE.load(Ordering::Relaxed) {
|
||||
INACTIVE => false,
|
||||
ACTIVE => true,
|
||||
_ => panic!("uninitialized parallel mode!"),
|
||||
}
|
||||
}
|
||||
|
||||
// Only set by the `-Z threads` compile option
|
||||
pub fn set(parallel: bool) {
|
||||
let set: u8 = if parallel { ACTIVE } else { INACTIVE };
|
||||
let previous =
|
||||
MODE.compare_exchange(UNINITIALIZED, set, Ordering::Relaxed, Ordering::Relaxed);
|
||||
|
||||
// Check that the mode was either uninitialized or was already set to the requested mode.
|
||||
assert!(previous.is_ok() || previous == Err(set));
|
||||
}
|
||||
}
|
||||
|
||||
pub use mode::{active, set};
|
||||
cfg_if! {
|
||||
if #[cfg(not(parallel_compiler))] {
|
||||
pub unsafe auto trait Send {}
|
||||
@ -149,7 +181,7 @@ cfg_if! {
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! parallel {
|
||||
($($blocks:tt),*) => {
|
||||
($($blocks:block),*) => {{
|
||||
// We catch panics here ensuring that all the blocks execute.
|
||||
// This makes behavior consistent with the parallel compiler.
|
||||
let mut panic = None;
|
||||
@ -165,13 +197,7 @@ cfg_if! {
|
||||
if let Some(panic) = panic {
|
||||
::std::panic::resume_unwind(panic);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub use Iterator as ParallelIterator;
|
||||
|
||||
pub fn par_iter<T: IntoIterator>(t: T) -> T::IntoIter {
|
||||
t.into_iter()
|
||||
}}
|
||||
}
|
||||
|
||||
pub fn par_for_each_in<T: IntoIterator>(t: T, mut for_each: impl FnMut(T::Item) + Sync + Send) {
|
||||
@ -190,6 +216,29 @@ cfg_if! {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn par_map<T: IntoIterator, R, C: FromIterator<R>>(
|
||||
t: T,
|
||||
mut map: impl FnMut(<<T as IntoIterator>::IntoIter as Iterator>::Item) -> R,
|
||||
) -> C {
|
||||
// We catch panics here ensuring that all the loop iterations execute.
|
||||
let mut panic = None;
|
||||
let r = t.into_iter().filter_map(|i| {
|
||||
match catch_unwind(AssertUnwindSafe(|| map(i))) {
|
||||
Ok(r) => Some(r),
|
||||
Err(p) => {
|
||||
if panic.is_none() {
|
||||
panic = Some(p);
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
}).collect();
|
||||
if let Some(panic) = panic {
|
||||
resume_unwind(panic);
|
||||
}
|
||||
r
|
||||
}
|
||||
|
||||
pub type MetadataRef = OwnedSlice;
|
||||
|
||||
pub use std::rc::Rc as Lrc;
|
||||
@ -302,46 +351,159 @@ cfg_if! {
|
||||
use parking_lot::RwLock as InnerRwLock;
|
||||
|
||||
use std::thread;
|
||||
pub use rayon::{join, scope};
|
||||
|
||||
#[inline]
|
||||
pub fn join<A, B, RA: DynSend, RB: DynSend>(oper_a: A, oper_b: B) -> (RA, RB)
|
||||
where
|
||||
A: FnOnce() -> RA + DynSend,
|
||||
B: FnOnce() -> RB + DynSend,
|
||||
{
|
||||
if mode::active() {
|
||||
let oper_a = FromDyn::from(oper_a);
|
||||
let oper_b = FromDyn::from(oper_b);
|
||||
let (a, b) = rayon::join(move || FromDyn::from(oper_a.into_inner()()), move || FromDyn::from(oper_b.into_inner()()));
|
||||
(a.into_inner(), b.into_inner())
|
||||
} else {
|
||||
(oper_a(), oper_b())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn scope<'scope, OP, R>(op: OP) -> R
|
||||
where
|
||||
OP: FnOnce(&rayon::Scope<'scope>) -> R + DynSend,
|
||||
R: DynSend,
|
||||
{
|
||||
let op = FromDyn::from(op);
|
||||
rayon::scope(|s| FromDyn::from(op.into_inner()(s))).into_inner()
|
||||
}
|
||||
|
||||
/// Runs a list of blocks in parallel. The first block is executed immediately on
|
||||
/// the current thread. Use that for the longest running block.
|
||||
#[macro_export]
|
||||
macro_rules! parallel {
|
||||
(impl $fblock:tt [$($c:tt,)*] [$block:tt $(, $rest:tt)*]) => {
|
||||
parallel!(impl $fblock [$block, $($c,)*] [$($rest),*])
|
||||
($fblock:block [$($c:expr,)*] [$block:expr $(, $rest:expr)*]) => {
|
||||
parallel!($fblock [$block, $($c,)*] [$($rest),*])
|
||||
};
|
||||
(impl $fblock:tt [$($blocks:tt,)*] []) => {
|
||||
::rustc_data_structures::sync::scope(|s| {
|
||||
($fblock:block [$($blocks:expr,)*] []) => {
|
||||
{
|
||||
::rustc_data_structures::sync::scope(|s| {
|
||||
$(let block = rustc_data_structures::sync::FromDyn::from(|| $blocks);
|
||||
s.spawn(move |_| block.into_inner()());)*
|
||||
(|| $fblock)();
|
||||
});
|
||||
}
|
||||
};
|
||||
($fblock:block, $($blocks:block),*) => {
|
||||
if rustc_data_structures::sync::active() {
|
||||
// Reverse the order of the later blocks since Rayon executes them in reverse order
|
||||
// when using a single thread. This ensures the execution order matches that
|
||||
// of a single threaded rustc
|
||||
parallel!($fblock [] [$($blocks),*]);
|
||||
} else {
|
||||
// We catch panics here ensuring that all the blocks execute.
|
||||
// This makes behavior consistent with the parallel compiler.
|
||||
let mut panic = None;
|
||||
$(
|
||||
s.spawn(|_| $blocks);
|
||||
if let Err(p) = ::std::panic::catch_unwind(
|
||||
::std::panic::AssertUnwindSafe(|| $blocks)
|
||||
) {
|
||||
if panic.is_none() {
|
||||
panic = Some(p);
|
||||
}
|
||||
}
|
||||
)*
|
||||
$fblock;
|
||||
})
|
||||
};
|
||||
($fblock:tt, $($blocks:tt),*) => {
|
||||
// Reverse the order of the later blocks since Rayon executes them in reverse order
|
||||
// when using a single thread. This ensures the execution order matches that
|
||||
// of a single threaded rustc
|
||||
parallel!(impl $fblock [] [$($blocks),*]);
|
||||
if let Some(panic) = panic {
|
||||
::std::panic::resume_unwind(panic);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
pub use rayon::iter::ParallelIterator;
|
||||
use rayon::iter::IntoParallelIterator;
|
||||
use rayon::iter::{FromParallelIterator, IntoParallelIterator, ParallelIterator};
|
||||
|
||||
pub fn par_iter<T: IntoParallelIterator>(t: T) -> T::Iter {
|
||||
t.into_par_iter()
|
||||
}
|
||||
|
||||
pub fn par_for_each_in<T: IntoParallelIterator>(
|
||||
pub fn par_for_each_in<I, T: IntoIterator<Item = I> + IntoParallelIterator<Item = I>>(
|
||||
t: T,
|
||||
for_each: impl Fn(T::Item) + Sync + Send,
|
||||
for_each: impl Fn(I) + DynSync + DynSend
|
||||
) {
|
||||
let ps: Vec<_> = t.into_par_iter().map(|i| catch_unwind(AssertUnwindSafe(|| for_each(i)))).collect();
|
||||
ps.into_iter().for_each(|p| if let Err(panic) = p {
|
||||
resume_unwind(panic)
|
||||
});
|
||||
if mode::active() {
|
||||
let for_each = FromDyn::from(for_each);
|
||||
let panic: Lock<Option<_>> = Lock::new(None);
|
||||
t.into_par_iter().for_each(|i| if let Err(p) = catch_unwind(AssertUnwindSafe(|| for_each(i))) {
|
||||
let mut l = panic.lock();
|
||||
if l.is_none() {
|
||||
*l = Some(p)
|
||||
}
|
||||
});
|
||||
|
||||
if let Some(panic) = panic.into_inner() {
|
||||
resume_unwind(panic);
|
||||
}
|
||||
} else {
|
||||
// We catch panics here ensuring that all the loop iterations execute.
|
||||
// This makes behavior consistent with the parallel compiler.
|
||||
let mut panic = None;
|
||||
t.into_iter().for_each(|i| {
|
||||
if let Err(p) = catch_unwind(AssertUnwindSafe(|| for_each(i))) {
|
||||
if panic.is_none() {
|
||||
panic = Some(p);
|
||||
}
|
||||
}
|
||||
});
|
||||
if let Some(panic) = panic {
|
||||
resume_unwind(panic);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn par_map<
|
||||
I,
|
||||
T: IntoIterator<Item = I> + IntoParallelIterator<Item = I>,
|
||||
R: std::marker::Send,
|
||||
C: FromIterator<R> + FromParallelIterator<R>
|
||||
>(
|
||||
t: T,
|
||||
map: impl Fn(I) -> R + DynSync + DynSend
|
||||
) -> C {
|
||||
if mode::active() {
|
||||
let panic: Lock<Option<_>> = Lock::new(None);
|
||||
let map = FromDyn::from(map);
|
||||
// We catch panics here ensuring that all the loop iterations execute.
|
||||
let r = t.into_par_iter().filter_map(|i| {
|
||||
match catch_unwind(AssertUnwindSafe(|| map(i))) {
|
||||
Ok(r) => Some(r),
|
||||
Err(p) => {
|
||||
let mut l = panic.lock();
|
||||
if l.is_none() {
|
||||
*l = Some(p);
|
||||
}
|
||||
None
|
||||
},
|
||||
}
|
||||
}).collect();
|
||||
|
||||
if let Some(panic) = panic.into_inner() {
|
||||
resume_unwind(panic);
|
||||
}
|
||||
r
|
||||
} else {
|
||||
// We catch panics here ensuring that all the loop iterations execute.
|
||||
let mut panic = None;
|
||||
let r = t.into_iter().filter_map(|i| {
|
||||
match catch_unwind(AssertUnwindSafe(|| map(i))) {
|
||||
Ok(r) => Some(r),
|
||||
Err(p) => {
|
||||
if panic.is_none() {
|
||||
panic = Some(p);
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
}).collect();
|
||||
if let Some(panic) = panic {
|
||||
resume_unwind(panic);
|
||||
}
|
||||
r
|
||||
}
|
||||
}
|
||||
|
||||
pub type MetadataRef = OwnedSlice;
|
||||
@ -352,11 +514,6 @@ cfg_if! {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn assert_sync<T: ?Sized + Sync>() {}
|
||||
pub fn assert_send<T: ?Sized + Send>() {}
|
||||
pub fn assert_send_val<T: ?Sized + Send>(_t: &T) {}
|
||||
pub fn assert_send_sync_val<T: ?Sized + Sync + Send>(_t: &T) {}
|
||||
|
||||
#[derive(Default)]
|
||||
#[cfg_attr(parallel_compiler, repr(align(64)))]
|
||||
pub struct CacheAligned<T>(pub T);
|
||||
|
@ -255,6 +255,9 @@ fn run_compiler(
|
||||
|
||||
let sopts = config::build_session_options(&matches);
|
||||
|
||||
// Set parallel mode before thread pool creation as the session will already create locks.
|
||||
interface::set_parallel_mode(&sopts.unstable_opts);
|
||||
|
||||
if let Some(ref code) = matches.opt_str("explain") {
|
||||
handle_explain(diagnostics_registry(), code, sopts.error_format);
|
||||
return Ok(());
|
||||
|
@ -11,7 +11,7 @@ extern crate tracing;
|
||||
use fluent_bundle::FluentResource;
|
||||
use fluent_syntax::parser::ParserError;
|
||||
use icu_provider_adapters::fallback::{LocaleFallbackProvider, LocaleFallbacker};
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
use rustc_data_structures::sync::{IntoDyn, Lrc};
|
||||
use rustc_fluent_macro::fluent_messages;
|
||||
use rustc_macros::{Decodable, Encodable};
|
||||
use rustc_span::Span;
|
||||
@ -37,16 +37,17 @@ pub use unic_langid::{langid, LanguageIdentifier};
|
||||
|
||||
fluent_messages! { "../messages.ftl" }
|
||||
|
||||
pub type FluentBundle = fluent_bundle::bundle::FluentBundle<FluentResource, IntlLangMemoizer>;
|
||||
|
||||
#[cfg(parallel_compiler)]
|
||||
fn new_bundle(locales: Vec<LanguageIdentifier>) -> FluentBundle {
|
||||
FluentBundle::new_concurrent(locales)
|
||||
}
|
||||
pub type FluentBundle =
|
||||
IntoDyn<fluent_bundle::bundle::FluentBundle<FluentResource, IntlLangMemoizer>>;
|
||||
|
||||
#[cfg(not(parallel_compiler))]
|
||||
fn new_bundle(locales: Vec<LanguageIdentifier>) -> FluentBundle {
|
||||
FluentBundle::new(locales)
|
||||
IntoDyn(fluent_bundle::bundle::FluentBundle::new(locales))
|
||||
}
|
||||
|
||||
#[cfg(parallel_compiler)]
|
||||
fn new_bundle(locales: Vec<LanguageIdentifier>) -> FluentBundle {
|
||||
IntoDyn(fluent_bundle::bundle::FluentBundle::new_concurrent(locales))
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
|
@ -32,7 +32,7 @@ use emitter::{is_case_difference, Emitter, EmitterWriter};
|
||||
use registry::Registry;
|
||||
use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexMap, FxIndexSet};
|
||||
use rustc_data_structures::stable_hasher::{Hash128, StableHasher};
|
||||
use rustc_data_structures::sync::{self, Lock, Lrc};
|
||||
use rustc_data_structures::sync::{self, IntoDyn, Lock, Lrc};
|
||||
use rustc_data_structures::AtomicRef;
|
||||
pub use rustc_error_messages::{
|
||||
fallback_fluent_bundle, fluent_bundle, DelayDm, DiagnosticMessage, FluentBundle,
|
||||
@ -409,7 +409,7 @@ struct HandlerInner {
|
||||
err_count: usize,
|
||||
warn_count: usize,
|
||||
deduplicated_err_count: usize,
|
||||
emitter: Box<dyn Emitter + sync::Send>,
|
||||
emitter: IntoDyn<Box<dyn Emitter + sync::Send>>,
|
||||
delayed_span_bugs: Vec<DelayedDiagnostic>,
|
||||
delayed_good_path_bugs: Vec<DelayedDiagnostic>,
|
||||
/// This flag indicates that an expected diagnostic was emitted and suppressed.
|
||||
@ -605,7 +605,7 @@ impl Handler {
|
||||
warn_count: 0,
|
||||
deduplicated_err_count: 0,
|
||||
deduplicated_warn_count: 0,
|
||||
emitter,
|
||||
emitter: IntoDyn(emitter),
|
||||
delayed_span_bugs: Vec::new(),
|
||||
delayed_good_path_bugs: Vec::new(),
|
||||
suppressed_expected_diag: false,
|
||||
|
@ -2,7 +2,7 @@ use crate::error::{TranslateError, TranslateErrorKind};
|
||||
use crate::fluent_bundle::*;
|
||||
use crate::translation::Translate;
|
||||
use crate::FluentBundle;
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
use rustc_data_structures::sync::{IntoDyn, Lrc};
|
||||
use rustc_error_messages::fluent_bundle::resolver::errors::{ReferenceKind, ResolverError};
|
||||
use rustc_error_messages::langid;
|
||||
use rustc_error_messages::DiagnosticMessage;
|
||||
@ -27,10 +27,12 @@ fn make_dummy(ftl: &'static str) -> Dummy {
|
||||
let langid_en = langid!("en-US");
|
||||
|
||||
#[cfg(parallel_compiler)]
|
||||
let mut bundle = FluentBundle::new_concurrent(vec![langid_en]);
|
||||
let mut bundle: FluentBundle =
|
||||
IntoDyn(crate::fluent_bundle::bundle::FluentBundle::new_concurrent(vec![langid_en]));
|
||||
|
||||
#[cfg(not(parallel_compiler))]
|
||||
let mut bundle = FluentBundle::new(vec![langid_en]);
|
||||
let mut bundle: FluentBundle =
|
||||
IntoDyn(crate::fluent_bundle::bundle::FluentBundle::new(vec![langid_en]));
|
||||
|
||||
bundle.add_resource(resource).expect("Failed to add FTL resources to the bundle.");
|
||||
|
||||
|
@ -653,13 +653,13 @@ pub enum SyntaxExtensionKind {
|
||||
/// A token-based function-like macro.
|
||||
Bang(
|
||||
/// An expander with signature TokenStream -> TokenStream.
|
||||
Box<dyn BangProcMacro + sync::Sync + sync::Send>,
|
||||
Box<dyn BangProcMacro + sync::DynSync + sync::DynSend>,
|
||||
),
|
||||
|
||||
/// An AST-based function-like macro.
|
||||
LegacyBang(
|
||||
/// An expander with signature TokenStream -> AST.
|
||||
Box<dyn TTMacroExpander + sync::Sync + sync::Send>,
|
||||
Box<dyn TTMacroExpander + sync::DynSync + sync::DynSend>,
|
||||
),
|
||||
|
||||
/// A token-based attribute macro.
|
||||
@ -667,7 +667,7 @@ pub enum SyntaxExtensionKind {
|
||||
/// An expander with signature (TokenStream, TokenStream) -> TokenStream.
|
||||
/// The first TokenSteam is the attribute itself, the second is the annotated item.
|
||||
/// The produced TokenSteam replaces the input TokenSteam.
|
||||
Box<dyn AttrProcMacro + sync::Sync + sync::Send>,
|
||||
Box<dyn AttrProcMacro + sync::DynSync + sync::DynSend>,
|
||||
),
|
||||
|
||||
/// An AST-based attribute macro.
|
||||
@ -675,7 +675,7 @@ pub enum SyntaxExtensionKind {
|
||||
/// An expander with signature (AST, AST) -> AST.
|
||||
/// The first AST fragment is the attribute itself, the second is the annotated item.
|
||||
/// The produced AST fragment replaces the input AST fragment.
|
||||
Box<dyn MultiItemModifier + sync::Sync + sync::Send>,
|
||||
Box<dyn MultiItemModifier + sync::DynSync + sync::DynSend>,
|
||||
),
|
||||
|
||||
/// A trivial attribute "macro" that does nothing,
|
||||
@ -692,14 +692,14 @@ pub enum SyntaxExtensionKind {
|
||||
/// is handled identically to `LegacyDerive`. It should be migrated to
|
||||
/// a token-based representation like `Bang` and `Attr`, instead of
|
||||
/// using `MultiItemModifier`.
|
||||
Box<dyn MultiItemModifier + sync::Sync + sync::Send>,
|
||||
Box<dyn MultiItemModifier + sync::DynSync + sync::DynSend>,
|
||||
),
|
||||
|
||||
/// An AST-based derive macro.
|
||||
LegacyDerive(
|
||||
/// An expander with signature AST -> AST.
|
||||
/// The produced AST fragment is appended to the input AST fragment.
|
||||
Box<dyn MultiItemModifier + sync::Sync + sync::Send>,
|
||||
Box<dyn MultiItemModifier + sync::DynSync + sync::DynSend>,
|
||||
),
|
||||
}
|
||||
|
||||
|
@ -60,6 +60,11 @@ impl Compiler {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(rustc::bad_opt_access)]
|
||||
pub fn set_parallel_mode(sopts: &config::UnstableOptions) {
|
||||
rustc_data_structures::sync::set(sopts.threads > 1);
|
||||
}
|
||||
|
||||
/// Converts strings provided as `--cfg [cfgspec]` into a `crate_cfg`.
|
||||
pub fn parse_cfgspecs(cfgspecs: Vec<String>) -> FxHashSet<(String, Option<String>)> {
|
||||
rustc_span::create_default_session_if_not_set_then(move |_| {
|
||||
|
@ -49,9 +49,9 @@ use std::cell::Cell;
|
||||
use std::iter;
|
||||
use std::slice;
|
||||
|
||||
type EarlyLintPassFactory = dyn Fn() -> EarlyLintPassObject + sync::Send + sync::Sync;
|
||||
type EarlyLintPassFactory = dyn Fn() -> EarlyLintPassObject + sync::DynSend + sync::DynSync;
|
||||
type LateLintPassFactory =
|
||||
dyn for<'tcx> Fn(TyCtxt<'tcx>) -> LateLintPassObject<'tcx> + sync::Send + sync::Sync;
|
||||
dyn for<'tcx> Fn(TyCtxt<'tcx>) -> LateLintPassObject<'tcx> + sync::DynSend + sync::DynSync;
|
||||
|
||||
/// Information about the registered lints.
|
||||
///
|
||||
@ -169,7 +169,7 @@ impl LintStore {
|
||||
|
||||
pub fn register_early_pass(
|
||||
&mut self,
|
||||
pass: impl Fn() -> EarlyLintPassObject + 'static + sync::Send + sync::Sync,
|
||||
pass: impl Fn() -> EarlyLintPassObject + 'static + sync::DynSend + sync::DynSync,
|
||||
) {
|
||||
self.early_passes.push(Box::new(pass));
|
||||
}
|
||||
@ -182,7 +182,7 @@ impl LintStore {
|
||||
/// * See [rust-clippy#5518](https://github.com/rust-lang/rust-clippy/pull/5518)
|
||||
pub fn register_pre_expansion_pass(
|
||||
&mut self,
|
||||
pass: impl Fn() -> EarlyLintPassObject + 'static + sync::Send + sync::Sync,
|
||||
pass: impl Fn() -> EarlyLintPassObject + 'static + sync::DynSend + sync::DynSync,
|
||||
) {
|
||||
self.pre_expansion_passes.push(Box::new(pass));
|
||||
}
|
||||
@ -191,8 +191,8 @@ impl LintStore {
|
||||
&mut self,
|
||||
pass: impl for<'tcx> Fn(TyCtxt<'tcx>) -> LateLintPassObject<'tcx>
|
||||
+ 'static
|
||||
+ sync::Send
|
||||
+ sync::Sync,
|
||||
+ sync::DynSend
|
||||
+ sync::DynSync,
|
||||
) {
|
||||
self.late_passes.push(Box::new(pass));
|
||||
}
|
||||
@ -201,8 +201,8 @@ impl LintStore {
|
||||
&mut self,
|
||||
pass: impl for<'tcx> Fn(TyCtxt<'tcx>) -> LateLintPassObject<'tcx>
|
||||
+ 'static
|
||||
+ sync::Send
|
||||
+ sync::Sync,
|
||||
+ sync::DynSend
|
||||
+ sync::DynSync,
|
||||
) {
|
||||
self.late_module_passes.push(Box::new(pass));
|
||||
}
|
||||
|
@ -16,7 +16,7 @@
|
||||
|
||||
use crate::{passes::LateLintPassObject, LateContext, LateLintPass, LintStore};
|
||||
use rustc_ast as ast;
|
||||
use rustc_data_structures::sync::join;
|
||||
use rustc_data_structures::sync::{join, DynSend};
|
||||
use rustc_hir as hir;
|
||||
use rustc_hir::def_id::LocalDefId;
|
||||
use rustc_hir::intravisit as hir_visit;
|
||||
@ -429,7 +429,7 @@ fn late_lint_crate_inner<'tcx, T: LateLintPass<'tcx>>(
|
||||
/// Performs lint checking on a crate.
|
||||
pub fn check_crate<'tcx, T: LateLintPass<'tcx> + 'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
builtin_lints: impl FnOnce() -> T + Send,
|
||||
builtin_lints: impl FnOnce() -> T + Send + DynSend,
|
||||
) {
|
||||
join(
|
||||
|| {
|
||||
|
@ -8,7 +8,7 @@ use rustc_data_structures::fingerprint::Fingerprint;
|
||||
use rustc_data_structures::fx::{FxHashMap, FxIndexSet};
|
||||
use rustc_data_structures::memmap::{Mmap, MmapMut};
|
||||
use rustc_data_structures::stable_hasher::{Hash128, HashStable, StableHasher};
|
||||
use rustc_data_structures::sync::{join, par_iter, Lrc, ParallelIterator};
|
||||
use rustc_data_structures::sync::{join, par_for_each_in, Lrc};
|
||||
use rustc_data_structures::temp_dir::MaybeTempDir;
|
||||
use rustc_hir as hir;
|
||||
use rustc_hir::def::DefKind;
|
||||
@ -2125,7 +2125,7 @@ fn prefetch_mir(tcx: TyCtxt<'_>) {
|
||||
return;
|
||||
}
|
||||
|
||||
par_iter(tcx.mir_keys(())).for_each(|&def_id| {
|
||||
par_for_each_in(tcx.mir_keys(()), |&def_id| {
|
||||
let (encode_const, encode_opt) = should_encode_mir(tcx, def_id);
|
||||
|
||||
if encode_const {
|
||||
|
@ -5,7 +5,7 @@ use rustc_ast as ast;
|
||||
use rustc_data_structures::fingerprint::Fingerprint;
|
||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
||||
use rustc_data_structures::svh::Svh;
|
||||
use rustc_data_structures::sync::{par_for_each_in, Send, Sync};
|
||||
use rustc_data_structures::sync::{par_for_each_in, DynSend, DynSync};
|
||||
use rustc_hir::def::{DefKind, Res};
|
||||
use rustc_hir::def_id::{DefId, LocalDefId, CRATE_DEF_ID, LOCAL_CRATE};
|
||||
use rustc_hir::definitions::{DefKey, DefPath, DefPathData, DefPathHash};
|
||||
@ -150,11 +150,6 @@ impl<'hir> Map<'hir> {
|
||||
self.tcx.hir_module_items(module).items()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn par_for_each_item(self, f: impl Fn(ItemId) + Sync + Send) {
|
||||
par_for_each_in(&self.tcx.hir_crate_items(()).items[..], |id| f(*id));
|
||||
}
|
||||
|
||||
pub fn def_key(self, def_id: LocalDefId) -> DefKey {
|
||||
// Accessing the DefKey is ok, since it is part of DefPathHash.
|
||||
self.tcx.definitions_untracked().def_key(def_id)
|
||||
@ -502,7 +497,7 @@ impl<'hir> Map<'hir> {
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn par_body_owners(self, f: impl Fn(LocalDefId) + Sync + Send) {
|
||||
pub fn par_body_owners(self, f: impl Fn(LocalDefId) + DynSend + DynSync) {
|
||||
par_for_each_in(&self.tcx.hir_crate_items(()).body_owners[..], |&def_id| f(def_id));
|
||||
}
|
||||
|
||||
@ -640,7 +635,7 @@ impl<'hir> Map<'hir> {
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn par_for_each_module(self, f: impl Fn(LocalDefId) + Sync + Send) {
|
||||
pub fn par_for_each_module(self, f: impl Fn(LocalDefId) + DynSend + DynSync) {
|
||||
let crate_items = self.tcx.hir_crate_items(());
|
||||
par_for_each_in(&crate_items.submodules[..], |module| f(module.def_id))
|
||||
}
|
||||
|
@ -9,7 +9,7 @@ pub mod place;
|
||||
use crate::ty::query::Providers;
|
||||
use crate::ty::{EarlyBinder, ImplSubject, TyCtxt};
|
||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
||||
use rustc_data_structures::sync::{par_for_each_in, Send, Sync};
|
||||
use rustc_data_structures::sync::{par_for_each_in, DynSend, DynSync};
|
||||
use rustc_hir::def_id::{DefId, LocalDefId};
|
||||
use rustc_hir::*;
|
||||
use rustc_query_system::ich::StableHashingContext;
|
||||
@ -77,19 +77,19 @@ impl ModuleItems {
|
||||
self.owners().map(|id| id.def_id)
|
||||
}
|
||||
|
||||
pub fn par_items(&self, f: impl Fn(ItemId) + Send + Sync) {
|
||||
pub fn par_items(&self, f: impl Fn(ItemId) + DynSend + DynSync) {
|
||||
par_for_each_in(&self.items[..], |&id| f(id))
|
||||
}
|
||||
|
||||
pub fn par_trait_items(&self, f: impl Fn(TraitItemId) + Send + Sync) {
|
||||
pub fn par_trait_items(&self, f: impl Fn(TraitItemId) + DynSend + DynSync) {
|
||||
par_for_each_in(&self.trait_items[..], |&id| f(id))
|
||||
}
|
||||
|
||||
pub fn par_impl_items(&self, f: impl Fn(ImplItemId) + Send + Sync) {
|
||||
pub fn par_impl_items(&self, f: impl Fn(ImplItemId) + DynSend + DynSync) {
|
||||
par_for_each_in(&self.impl_items[..], |&id| f(id))
|
||||
}
|
||||
|
||||
pub fn par_foreign_items(&self, f: impl Fn(ForeignItemId) + Send + Sync) {
|
||||
pub fn par_foreign_items(&self, f: impl Fn(ForeignItemId) + DynSend + DynSync) {
|
||||
par_for_each_in(&self.foreign_items[..], |&id| f(id))
|
||||
}
|
||||
}
|
||||
|
@ -496,7 +496,7 @@ pub struct GlobalCtxt<'tcx> {
|
||||
///
|
||||
/// FIXME(Centril): consider `dyn LintStoreMarker` once
|
||||
/// we can upcast to `Any` for some additional type safety.
|
||||
pub lint_store: Lrc<dyn Any + sync::Sync + sync::Send>,
|
||||
pub lint_store: Lrc<dyn Any + sync::DynSync + sync::DynSend>,
|
||||
|
||||
pub dep_graph: DepGraph,
|
||||
|
||||
@ -648,7 +648,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||
/// reference to the context, to allow formatting values that need it.
|
||||
pub fn create_global_ctxt(
|
||||
s: &'tcx Session,
|
||||
lint_store: Lrc<dyn Any + sync::Send + sync::Sync>,
|
||||
lint_store: Lrc<dyn Any + sync::DynSend + sync::DynSync>,
|
||||
arena: &'tcx WorkerLocal<Arena<'tcx>>,
|
||||
hir_arena: &'tcx WorkerLocal<hir::Arena<'tcx>>,
|
||||
untracked: Untracked,
|
||||
|
@ -94,8 +94,8 @@ where
|
||||
f(None)
|
||||
} else {
|
||||
// We could get an `ImplicitCtxt` pointer from another thread.
|
||||
// Ensure that `ImplicitCtxt` is `Sync`.
|
||||
sync::assert_sync::<ImplicitCtxt<'_, '_>>();
|
||||
// Ensure that `ImplicitCtxt` is `DynSync`.
|
||||
sync::assert_dyn_sync::<ImplicitCtxt<'_, '_>>();
|
||||
|
||||
unsafe { f(Some(downcast(context))) }
|
||||
}
|
||||
|
@ -199,6 +199,12 @@ impl<'a, T: Copy> IntoIterator for &'a List<T> {
|
||||
|
||||
unsafe impl<T: Sync> Sync for List<T> {}
|
||||
|
||||
// We need this since `List` uses extern type `OpaqueListContents`
|
||||
#[cfg(parallel_compiler)]
|
||||
use rustc_data_structures::sync::DynSync;
|
||||
#[cfg(parallel_compiler)]
|
||||
unsafe impl<T: DynSync> DynSync for List<T> {}
|
||||
|
||||
// Safety:
|
||||
// Layouts of `Equivalent<T>` and `List<T>` are the same, modulo opaque tail,
|
||||
// thus aligns of `Equivalent<T>` and `List<T>` must be the same.
|
||||
|
@ -52,9 +52,8 @@ use rustc_data_structures::fingerprint::Fingerprint;
|
||||
use rustc_data_structures::fx::{FxHashMap, FxIndexMap, FxIndexSet};
|
||||
use rustc_data_structures::steal::Steal;
|
||||
use rustc_data_structures::svh::Svh;
|
||||
use rustc_data_structures::sync::AtomicU64;
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
use rustc_data_structures::sync::WorkerLocal;
|
||||
use rustc_data_structures::sync::AtomicU64;use rustc_data_structures::sync::WorkerLocal;
|
||||
use rustc_data_structures::sync::{self, Lrc};
|
||||
use rustc_data_structures::unord::UnordSet;
|
||||
use rustc_errors::ErrorGuaranteed;
|
||||
use rustc_hir as hir;
|
||||
|
@ -207,7 +207,7 @@ pub trait MetadataLoader: std::fmt::Debug {
|
||||
fn get_dylib_metadata(&self, target: &Target, filename: &Path) -> Result<MetadataRef, String>;
|
||||
}
|
||||
|
||||
pub type MetadataLoaderDyn = dyn MetadataLoader + Send + Sync;
|
||||
pub type MetadataLoaderDyn = dyn MetadataLoader + Send + Sync + sync::DynSend + sync::DynSync;
|
||||
|
||||
/// A store of Rust crates, through which their metadata can be accessed.
|
||||
///
|
||||
@ -252,7 +252,7 @@ pub trait CrateStore: std::fmt::Debug {
|
||||
fn import_source_files(&self, sess: &Session, cnum: CrateNum);
|
||||
}
|
||||
|
||||
pub type CrateStoreDyn = dyn CrateStore + sync::Sync + sync::Send;
|
||||
pub type CrateStoreDyn = dyn CrateStore + sync::DynSync + sync::DynSend;
|
||||
|
||||
pub struct Untracked {
|
||||
pub cstore: RwLock<Box<CrateStoreDyn>>,
|
||||
|
@ -14,7 +14,7 @@ pub use crate::*;
|
||||
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_data_structures::stable_hasher::{Hash128, Hash64, StableHasher};
|
||||
use rustc_data_structures::sync::{AtomicU32, Lrc, MappedReadGuard, ReadGuard, RwLock};
|
||||
use rustc_data_structures::sync::{AtomicU32, IntoDyn, Lrc, MappedReadGuard, ReadGuard, RwLock};
|
||||
use std::cmp;
|
||||
use std::hash::Hash;
|
||||
use std::path::{self, Path, PathBuf};
|
||||
@ -176,7 +176,7 @@ pub struct SourceMap {
|
||||
used_address_space: AtomicU32,
|
||||
|
||||
files: RwLock<SourceMapFiles>,
|
||||
file_loader: Box<dyn FileLoader + Sync + Send>,
|
||||
file_loader: IntoDyn<Box<dyn FileLoader + Sync + Send>>,
|
||||
// This is used to apply the file path remapping as specified via
|
||||
// `--remap-path-prefix` to all `SourceFile`s allocated within this `SourceMap`.
|
||||
path_mapping: FilePathMapping,
|
||||
@ -202,7 +202,7 @@ impl SourceMap {
|
||||
SourceMap {
|
||||
used_address_space: AtomicU32::new(0),
|
||||
files: Default::default(),
|
||||
file_loader,
|
||||
file_loader: IntoDyn(file_loader),
|
||||
path_mapping,
|
||||
hash_kind,
|
||||
}
|
||||
|
@ -730,6 +730,9 @@ fn main_args(at_args: &[String]) -> MainResult {
|
||||
}
|
||||
};
|
||||
|
||||
// Set parallel mode early as the error handler will already create locks.
|
||||
interface::set_parallel_mode(&options.unstable_opts);
|
||||
|
||||
let diag = core::new_handler(
|
||||
options.error_format,
|
||||
None,
|
||||
|
Loading…
Reference in New Issue
Block a user