Make librustc_query_system compile.

This commit is contained in:
Camille GILLOT 2020-03-18 10:25:22 +01:00
parent a7e2641b9a
commit 6624dc4045
12 changed files with 360 additions and 606 deletions

View File

@ -3116,6 +3116,7 @@ dependencies = [
"rustc_hir",
"rustc_index",
"rustc_macros",
"rustc_query_system",
"rustc_session",
"rustc_span",
"rustc_target",
@ -4021,6 +4022,22 @@ dependencies = [
"rustc_typeck",
]
[[package]]
name = "rustc_query_system"
version = "0.0.0"
dependencies = [
"log",
"parking_lot 0.9.0",
"rustc_ast",
"rustc_data_structures",
"rustc_errors",
"rustc_hir",
"rustc_index",
"rustc_macros",
"serialize",
"smallvec 1.0.0",
]
[[package]]
name = "rustc_resolve"
version = "0.0.0"

View File

@ -25,6 +25,7 @@ rustc_hir = { path = "../librustc_hir" }
rustc_target = { path = "../librustc_target" }
rustc_macros = { path = "../librustc_macros" }
rustc_data_structures = { path = "../librustc_data_structures" }
rustc_query_system = { path = "../librustc_query_system" }
rustc_errors = { path = "../librustc_errors" }
rustc_index = { path = "../librustc_index" }
rustc_serialize = { path = "../libserialize", package = "serialize" }

View File

@ -0,0 +1,22 @@
[package]
authors = ["The Rust Project Developers"]
name = "rustc_query_system"
version = "0.0.0"
edition = "2018"
[lib]
name = "rustc_query_system"
path = "lib.rs"
doctest = false
[dependencies]
log = { version = "0.4", features = ["release_max_level_info", "std"] }
rustc_ast = { path = "../librustc_ast" }
rustc_data_structures = { path = "../librustc_data_structures" }
rustc_errors = { path = "../librustc_errors" }
rustc_hir = { path = "../librustc_hir" }
rustc_index = { path = "../librustc_index" }
rustc_macros = { path = "../librustc_macros" }
rustc_serialize = { path = "../libserialize", package = "serialize" }
parking_lot = "0.9"
smallvec = { version = "1.0", features = ["union", "may_dangle"] }

View File

@ -1,6 +1,6 @@
//! Code for debugging the dep-graph.
use super::dep_node::DepNode;
use super::{DepKind, DepNode};
use std::error::Error;
/// A dep-node filter goes from a user-defined string to a query over
@ -26,7 +26,7 @@ impl DepNodeFilter {
}
/// Tests whether `node` meets the filter, returning true if so.
pub fn test(&self, node: &DepNode) -> bool {
pub fn test<K: DepKind>(&self, node: &DepNode<K>) -> bool {
let debug_str = format!("{:?}", node);
self.text.split('&').map(|s| s.trim()).all(|f| debug_str.contains(f))
}
@ -52,7 +52,7 @@ impl EdgeFilter {
}
}
pub fn test(&self, source: &DepNode, target: &DepNode) -> bool {
pub fn test<K: DepKind>(&self, source: &DepNode<K>, target: &DepNode<K>) -> bool {
self.source.test(source) && self.target.test(target)
}
}

View File

@ -26,10 +26,10 @@
//! could not be instantiated because the current compilation session
//! contained no `DefId` for thing that had been removed.
//!
//! `DepNode` definition happens in the `define_dep_nodes!()` macro. This macro
//! defines the `DepKind` enum and a corresponding `DepConstructor` enum. The
//! `DepConstructor` enum links a `DepKind` to the parameters that are needed at
//! runtime in order to construct a valid `DepNode` fingerprint.
//! `DepNode` definition happens in `librustc` with the `define_dep_nodes!()` macro.
//! This macro defines the `DepKind` enum and a corresponding `DepConstructor` enum. The
//! `DepConstructor` enum links a `DepKind` to the parameters that are needed at runtime in order
//! to construct a valid `DepNode` fingerprint.
//!
//! Because the macro sees what parameters a given `DepKind` requires, it can
//! "infer" some properties for each kind of `DepNode`:
@ -41,326 +41,50 @@
//! in which case it is possible to map the node's fingerprint back to the
//! `DefId` it was computed from. In other cases, too much information gets
//! lost during fingerprint computation.
//!
//! The `DepConstructor` enum, together with `DepNode::new()` ensures that only
//! valid `DepNode` instances can be constructed. For example, the API does not
//! allow for constructing parameterless `DepNode`s with anything other
//! than a zeroed out fingerprint. More generally speaking, it relieves the
//! user of the `DepNode` API of having to know how to compute the expected
//! fingerprint for a given set of node parameters.
use crate::hir::map::DefPathHash;
use crate::ich::{Fingerprint, StableHashingContext};
use crate::mir;
use crate::mir::interpret::{GlobalId, LitToConstInput};
use crate::traits;
use crate::traits::query::{
CanonicalPredicateGoal, CanonicalProjectionGoal, CanonicalTyGoal,
CanonicalTypeOpAscribeUserTypeGoal, CanonicalTypeOpEqGoal, CanonicalTypeOpNormalizeGoal,
CanonicalTypeOpProvePredicateGoal, CanonicalTypeOpSubtypeGoal,
};
use crate::ty::subst::SubstsRef;
use crate::ty::{self, ParamEnvAnd, Ty, TyCtxt};
use super::{DepContext, DepKind};
use rustc_data_structures::fingerprint::Fingerprint;
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_hir::def_id::{CrateNum, DefId, DefIndex, CRATE_DEF_INDEX};
use rustc_hir::HirId;
use rustc_span::symbol::Symbol;
use rustc_macros::HashStable_Generic;
use std::fmt;
use std::hash::Hash;
// erase!() just makes tokens go away. It's used to specify which macro argument
// is repeated (i.e., which sub-expression of the macro we are in) but don't need
// to actually use any of the arguments.
macro_rules! erase {
($x:tt) => {{}};
}
macro_rules! is_anon_attr {
(anon) => {
true
};
($attr:ident) => {
false
};
}
macro_rules! is_eval_always_attr {
(eval_always) => {
true
};
($attr:ident) => {
false
};
}
macro_rules! contains_anon_attr {
($($attr:ident $(($($attr_args:tt)*))* ),*) => ({$(is_anon_attr!($attr) | )* false});
}
macro_rules! contains_eval_always_attr {
($($attr:ident $(($($attr_args:tt)*))* ),*) => ({$(is_eval_always_attr!($attr) | )* false});
}
macro_rules! define_dep_nodes {
(<$tcx:tt>
$(
[$($attrs:tt)*]
$variant:ident $(( $tuple_arg_ty:ty $(,)? ))*
,)*
) => (
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash,
RustcEncodable, RustcDecodable)]
#[allow(non_camel_case_types)]
pub enum DepKind {
$($variant),*
}
impl DepKind {
#[allow(unreachable_code)]
pub fn can_reconstruct_query_key<$tcx>(&self) -> bool {
match *self {
$(
DepKind :: $variant => {
if contains_anon_attr!($($attrs)*) {
return false;
}
// tuple args
$({
return <$tuple_arg_ty as DepNodeParams>
::CAN_RECONSTRUCT_QUERY_KEY;
})*
true
}
)*
}
}
pub fn is_anon(&self) -> bool {
match *self {
$(
DepKind :: $variant => { contains_anon_attr!($($attrs)*) }
)*
}
}
pub fn is_eval_always(&self) -> bool {
match *self {
$(
DepKind :: $variant => { contains_eval_always_attr!($($attrs)*) }
)*
}
}
#[allow(unreachable_code)]
pub fn has_params(&self) -> bool {
match *self {
$(
DepKind :: $variant => {
// tuple args
$({
erase!($tuple_arg_ty);
return true;
})*
false
}
)*
}
}
}
pub struct DepConstructor;
#[allow(non_camel_case_types)]
impl DepConstructor {
$(
#[inline(always)]
#[allow(unreachable_code, non_snake_case)]
pub fn $variant(_tcx: TyCtxt<'_>, $(arg: $tuple_arg_ty)*) -> DepNode {
// tuple args
$({
erase!($tuple_arg_ty);
let hash = DepNodeParams::to_fingerprint(&arg, _tcx);
let dep_node = DepNode {
kind: DepKind::$variant,
hash
};
#[cfg(debug_assertions)]
{
if !dep_node.kind.can_reconstruct_query_key() &&
(_tcx.sess.opts.debugging_opts.incremental_info ||
_tcx.sess.opts.debugging_opts.query_dep_graph)
{
_tcx.dep_graph.register_dep_node_debug_str(dep_node, || {
arg.to_debug_str(_tcx)
});
}
}
return dep_node;
})*
DepNode {
kind: DepKind::$variant,
hash: Fingerprint::ZERO,
}
}
)*
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash,
RustcEncodable, RustcDecodable)]
pub struct DepNode {
pub kind: DepKind,
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable)]
pub struct DepNode<K> {
pub kind: K,
pub hash: Fingerprint,
}
impl DepNode {
/// Construct a DepNode from the given DepKind and DefPathHash. This
/// method will assert that the given DepKind actually requires a
/// single DefId/DefPathHash parameter.
pub fn from_def_path_hash(def_path_hash: DefPathHash,
kind: DepKind)
-> DepNode {
debug_assert!(kind.can_reconstruct_query_key() && kind.has_params());
DepNode {
kind,
hash: def_path_hash.0,
}
}
impl<K: DepKind> DepNode<K> {
/// Creates a new, parameterless DepNode. This method will assert
/// that the DepNode corresponding to the given DepKind actually
/// does not require any parameters.
pub fn new_no_params(kind: DepKind) -> DepNode {
pub fn new_no_params(kind: K) -> DepNode<K> {
debug_assert!(!kind.has_params());
DepNode {
kind,
hash: Fingerprint::ZERO,
DepNode { kind, hash: Fingerprint::ZERO }
}
}
/// Extracts the DefId corresponding to this DepNode. This will work
/// if two conditions are met:
///
/// 1. The Fingerprint of the DepNode actually is a DefPathHash, and
/// 2. the item that the DefPath refers to exists in the current tcx.
///
/// Condition (1) is determined by the DepKind variant of the
/// DepNode. Condition (2) might not be fulfilled if a DepNode
/// refers to something from the previous compilation session that
/// has been removed.
pub fn extract_def_id(&self, tcx: TyCtxt<'_>) -> Option<DefId> {
if self.kind.can_reconstruct_query_key() {
let def_path_hash = DefPathHash(self.hash);
tcx.def_path_hash_to_def_id.as_ref()?
.get(&def_path_hash).cloned()
} else {
None
}
}
/// Used in testing
pub fn from_label_string(label: &str,
def_path_hash: DefPathHash)
-> Result<DepNode, ()> {
let kind = match label {
$(
stringify!($variant) => DepKind::$variant,
)*
_ => return Err(()),
};
if !kind.can_reconstruct_query_key() {
return Err(());
}
if kind.has_params() {
Ok(DepNode::from_def_path_hash(def_path_hash, kind))
} else {
Ok(DepNode::new_no_params(kind))
}
}
/// Used in testing
pub fn has_label_string(label: &str) -> bool {
match label {
$(
stringify!($variant) => true,
)*
_ => false,
}
}
}
/// Contains variant => str representations for constructing
/// DepNode groups for tests.
#[allow(dead_code, non_upper_case_globals)]
pub mod label_strs {
$(
pub const $variant: &str = stringify!($variant);
)*
}
);
}
impl fmt::Debug for DepNode {
impl<K: DepKind> fmt::Debug for DepNode<K> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{:?}", self.kind)?;
if !self.kind.has_params() && !self.kind.is_anon() {
return Ok(());
}
write!(f, "(")?;
crate::ty::tls::with_opt(|opt_tcx| {
if let Some(tcx) = opt_tcx {
if let Some(def_id) = self.extract_def_id(tcx) {
write!(f, "{}", tcx.def_path_debug_str(def_id))?;
} else if let Some(ref s) = tcx.dep_graph.dep_node_debug_str(*self) {
write!(f, "{}", s)?;
} else {
write!(f, "{}", self.hash)?;
}
} else {
write!(f, "{}", self.hash)?;
}
Ok(())
})?;
write!(f, ")")
K::debug_node(self, f)
}
}
rustc_dep_node_append!([define_dep_nodes!][ <'tcx>
// We use this for most things when incr. comp. is turned off.
[] Null,
// Represents metadata from an extern crate.
[eval_always] CrateMetadata(CrateNum),
[anon] TraitSelect,
[] CompileCodegenUnit(Symbol),
]);
pub(crate) trait DepNodeParams<'tcx>: fmt::Debug + Sized {
pub trait DepNodeParams<Ctxt: DepContext>: fmt::Debug + Sized {
const CAN_RECONSTRUCT_QUERY_KEY: bool;
/// This method turns the parameters of a DepNodeConstructor into an opaque
/// Fingerprint to be used in DepNode.
/// Not all DepNodeParams support being turned into a Fingerprint (they
/// don't need to if the corresponding DepNode is anonymous).
fn to_fingerprint(&self, _: TyCtxt<'tcx>) -> Fingerprint {
fn to_fingerprint(&self, _: Ctxt) -> Fingerprint {
panic!("Not implemented. Accidentally called on anonymous node?")
}
fn to_debug_str(&self, _: TyCtxt<'tcx>) -> String {
fn to_debug_str(&self, _: Ctxt) -> String {
format!("{:?}", self)
}
@ -370,16 +94,16 @@ pub(crate) trait DepNodeParams<'tcx>: fmt::Debug + Sized {
/// `CAN_RECONSTRUCT_QUERY_KEY` is `true`.
/// It is always valid to return `None` here, in which case incremental
/// compilation will treat the query as having changed instead of forcing it.
fn recover(tcx: TyCtxt<'tcx>, dep_node: &DepNode) -> Option<Self>;
fn recover(tcx: Ctxt, dep_node: &DepNode<Ctxt::DepKind>) -> Option<Self>;
}
impl<'tcx, T> DepNodeParams<'tcx> for T
impl<Ctxt: DepContext, T> DepNodeParams<Ctxt> for T
where
T: HashStable<StableHashingContext<'tcx>> + fmt::Debug,
T: HashStable<Ctxt::StableHashingContext> + fmt::Debug,
{
default const CAN_RECONSTRUCT_QUERY_KEY: bool = false;
default fn to_fingerprint(&self, tcx: TyCtxt<'tcx>) -> Fingerprint {
default fn to_fingerprint(&self, tcx: Ctxt) -> Fingerprint {
let mut hcx = tcx.create_stable_hashing_context();
let mut hasher = StableHasher::new();
@ -388,102 +112,15 @@ where
hasher.finish()
}
default fn to_debug_str(&self, _: TyCtxt<'tcx>) -> String {
default fn to_debug_str(&self, _: Ctxt) -> String {
format!("{:?}", *self)
}
default fn recover(_: TyCtxt<'tcx>, _: &DepNode) -> Option<Self> {
default fn recover(_: Ctxt, _: &DepNode<Ctxt::DepKind>) -> Option<Self> {
None
}
}
impl<'tcx> DepNodeParams<'tcx> for DefId {
const CAN_RECONSTRUCT_QUERY_KEY: bool = true;
fn to_fingerprint(&self, tcx: TyCtxt<'_>) -> Fingerprint {
tcx.def_path_hash(*self).0
}
fn to_debug_str(&self, tcx: TyCtxt<'tcx>) -> String {
tcx.def_path_str(*self)
}
fn recover(tcx: TyCtxt<'tcx>, dep_node: &DepNode) -> Option<Self> {
dep_node.extract_def_id(tcx)
}
}
impl<'tcx> DepNodeParams<'tcx> for DefIndex {
const CAN_RECONSTRUCT_QUERY_KEY: bool = true;
fn to_fingerprint(&self, tcx: TyCtxt<'_>) -> Fingerprint {
tcx.hir().definitions().def_path_hash(*self).0
}
fn to_debug_str(&self, tcx: TyCtxt<'tcx>) -> String {
tcx.def_path_str(DefId::local(*self))
}
fn recover(tcx: TyCtxt<'tcx>, dep_node: &DepNode) -> Option<Self> {
dep_node.extract_def_id(tcx).map(|id| id.index)
}
}
impl<'tcx> DepNodeParams<'tcx> for CrateNum {
const CAN_RECONSTRUCT_QUERY_KEY: bool = true;
fn to_fingerprint(&self, tcx: TyCtxt<'_>) -> Fingerprint {
let def_id = DefId { krate: *self, index: CRATE_DEF_INDEX };
tcx.def_path_hash(def_id).0
}
fn to_debug_str(&self, tcx: TyCtxt<'tcx>) -> String {
tcx.crate_name(*self).to_string()
}
fn recover(tcx: TyCtxt<'tcx>, dep_node: &DepNode) -> Option<Self> {
dep_node.extract_def_id(tcx).map(|id| id.krate)
}
}
impl<'tcx> DepNodeParams<'tcx> for (DefId, DefId) {
const CAN_RECONSTRUCT_QUERY_KEY: bool = false;
// We actually would not need to specialize the implementation of this
// method but it's faster to combine the hashes than to instantiate a full
// hashing context and stable-hashing state.
fn to_fingerprint(&self, tcx: TyCtxt<'_>) -> Fingerprint {
let (def_id_0, def_id_1) = *self;
let def_path_hash_0 = tcx.def_path_hash(def_id_0);
let def_path_hash_1 = tcx.def_path_hash(def_id_1);
def_path_hash_0.0.combine(def_path_hash_1.0)
}
fn to_debug_str(&self, tcx: TyCtxt<'tcx>) -> String {
let (def_id_0, def_id_1) = *self;
format!("({}, {})", tcx.def_path_debug_str(def_id_0), tcx.def_path_debug_str(def_id_1))
}
}
impl<'tcx> DepNodeParams<'tcx> for HirId {
const CAN_RECONSTRUCT_QUERY_KEY: bool = false;
// We actually would not need to specialize the implementation of this
// method but it's faster to combine the hashes than to instantiate a full
// hashing context and stable-hashing state.
fn to_fingerprint(&self, tcx: TyCtxt<'_>) -> Fingerprint {
let HirId { owner, local_id } = *self;
let def_path_hash = tcx.def_path_hash(DefId::local(owner));
let local_id = Fingerprint::from_smaller_hash(local_id.as_u32().into());
def_path_hash.0.combine(local_id)
}
}
/// A "work product" corresponds to a `.o` (or other) file that we
/// save in between runs. These IDs do not have a `DefId` but rather
/// some independent path or string that persists between runs without
@ -500,7 +137,7 @@ impl<'tcx> DepNodeParams<'tcx> for HirId {
Hash,
RustcEncodable,
RustcDecodable,
HashStable
HashStable_Generic
)]
pub struct WorkProductId {
hash: Fingerprint,

View File

@ -1,32 +1,33 @@
use crate::ty::{self, TyCtxt};
use parking_lot::{Condvar, Mutex};
use rustc_data_structures::fingerprint::Fingerprint;
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_data_structures::profiling::QueryInvocationId;
use rustc_data_structures::sharded::{self, Sharded};
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_data_structures::sync::{AtomicU32, AtomicU64, Lock, Lrc, Ordering};
use rustc_data_structures::unlikely;
use rustc_errors::Diagnostic;
use rustc_hir::def_id::DefId;
use rustc_index::vec::{Idx, IndexVec};
use smallvec::SmallVec;
use parking_lot::{Condvar, Mutex};
use smallvec::{smallvec, SmallVec};
use std::collections::hash_map::Entry;
use std::env;
use std::hash::Hash;
use std::mem;
use std::panic as bug;
use std::sync::atomic::Ordering::Relaxed;
use crate::ich::{Fingerprint, StableHashingContext, StableHashingContextProvider};
use super::debug::EdgeFilter;
use super::dep_node::{DepKind, DepNode, WorkProductId};
use super::prev::PreviousDepGraph;
use super::query::DepGraphQuery;
use super::safe::DepGraphSafe;
use super::serialized::{SerializedDepGraph, SerializedDepNodeIndex};
use super::{DepContext, DepKind, DepNode, WorkProductId};
use crate::{HashStableContext, HashStableContextProvider};
#[derive(Clone)]
pub struct DepGraph {
data: Option<Lrc<DepGraphData>>,
pub struct DepGraph<K: DepKind> {
data: Option<Lrc<DepGraphData<K>>>,
/// This field is used for assigning DepNodeIndices when running in
/// non-incremental mode. Even in non-incremental mode we make sure that
@ -65,16 +66,16 @@ impl DepNodeColor {
}
}
struct DepGraphData {
struct DepGraphData<K: DepKind> {
/// The new encoding of the dependency graph, optimized for red/green
/// tracking. The `current` field is the dependency graph of only the
/// current compilation session: We don't merge the previous dep-graph into
/// current one anymore.
current: CurrentDepGraph,
current: CurrentDepGraph<K>,
/// The dep-graph from the previous compilation session. It contains all
/// nodes and edges as well as all fingerprints of nodes that have them.
previous: PreviousDepGraph,
previous: PreviousDepGraph<K>,
colors: DepNodeColorMap,
@ -90,12 +91,12 @@ struct DepGraphData {
/// this map. We can later look for and extract that data.
previous_work_products: FxHashMap<WorkProductId, WorkProduct>,
dep_node_debug: Lock<FxHashMap<DepNode, String>>,
dep_node_debug: Lock<FxHashMap<DepNode<K>, String>>,
}
pub fn hash_result<R>(hcx: &mut StableHashingContext<'_>, result: &R) -> Option<Fingerprint>
pub fn hash_result<HashCtxt, R>(hcx: &mut HashCtxt, result: &R) -> Option<Fingerprint>
where
R: for<'a> HashStable<StableHashingContext<'a>>,
R: HashStable<HashCtxt>,
{
let mut stable_hasher = StableHasher::new();
result.hash_stable(hcx, &mut stable_hasher);
@ -103,11 +104,11 @@ where
Some(stable_hasher.finish())
}
impl DepGraph {
impl<K: DepKind> DepGraph<K> {
pub fn new(
prev_graph: PreviousDepGraph,
prev_graph: PreviousDepGraph<K>,
prev_work_products: FxHashMap<WorkProductId, WorkProduct>,
) -> DepGraph {
) -> DepGraph<K> {
let prev_graph_node_count = prev_graph.node_count();
DepGraph {
@ -124,7 +125,7 @@ impl DepGraph {
}
}
pub fn new_disabled() -> DepGraph {
pub fn new_disabled() -> DepGraph<K> {
DepGraph { data: None, virtual_dep_node_index: Lrc::new(AtomicU32::new(0)) }
}
@ -134,7 +135,7 @@ impl DepGraph {
self.data.is_some()
}
pub fn query(&self) -> DepGraphQuery {
pub fn query(&self) -> DepGraphQuery<K> {
let data = self.data.as_ref().unwrap().current.data.lock();
let nodes: Vec<_> = data.iter().map(|n| n.node).collect();
let mut edges = Vec::new();
@ -150,10 +151,7 @@ impl DepGraph {
pub fn assert_ignored(&self) {
if let Some(..) = self.data {
ty::tls::with_context_opt(|icx| {
let icx = if let Some(icx) = icx { icx } else { return };
assert!(icx.task_deps.is_none(), "expected no task dependency tracking");
})
K::assert_ignored();
}
}
@ -161,11 +159,7 @@ impl DepGraph {
where
OP: FnOnce() -> R,
{
ty::tls::with_context(|icx| {
let icx = ty::tls::ImplicitCtxt { task_deps: None, ..icx.clone() };
ty::tls::enter_context(&icx, |_| op())
})
K::with_ignore_deps(op)
}
/// Starts a new dep-graph task. Dep-graph tasks are specified
@ -195,16 +189,17 @@ impl DepGraph {
/// `arg` parameter.
///
/// [rustc dev guide]: https://rustc-dev-guide.rust-lang.org/incremental-compilation.html
pub fn with_task<'a, C, A, R>(
pub fn with_task<H, C, A, R>(
&self,
key: DepNode,
key: DepNode<K>,
cx: C,
arg: A,
task: fn(C, A) -> R,
hash_result: impl FnOnce(&mut StableHashingContext<'_>, &R) -> Option<Fingerprint>,
hash_result: impl FnOnce(&mut H, &R) -> Option<Fingerprint>,
) -> (R, DepNodeIndex)
where
C: DepGraphSafe + StableHashingContextProvider<'a>,
C: DepGraphSafe + HashStableContextProvider<H>,
H: HashStableContext,
{
self.with_task_impl(
key,
@ -218,6 +213,7 @@ impl DepGraph {
node: Some(_key),
reads: SmallVec::new(),
read_set: Default::default(),
phantom_data: std::marker::PhantomData,
})
},
|data, key, fingerprint, task| data.complete_task(key, task.unwrap(), fingerprint),
@ -225,24 +221,25 @@ impl DepGraph {
)
}
fn with_task_impl<'a, C, A, R>(
fn with_task_impl<H, C, A, R>(
&self,
key: DepNode,
key: DepNode<K>,
cx: C,
arg: A,
no_tcx: bool,
task: fn(C, A) -> R,
create_task: fn(DepNode) -> Option<TaskDeps>,
create_task: fn(DepNode<K>) -> Option<TaskDeps<K>>,
finish_task_and_alloc_depnode: fn(
&CurrentDepGraph,
DepNode,
&CurrentDepGraph<K>,
DepNode<K>,
Fingerprint,
Option<TaskDeps>,
Option<TaskDeps<K>>,
) -> DepNodeIndex,
hash_result: impl FnOnce(&mut StableHashingContext<'_>, &R) -> Option<Fingerprint>,
hash_result: impl FnOnce(&mut H, &R) -> Option<Fingerprint>,
) -> (R, DepNodeIndex)
where
C: DepGraphSafe + StableHashingContextProvider<'a>,
C: DepGraphSafe + HashStableContextProvider<H>,
H: HashStableContext,
{
if let Some(ref data) = self.data {
let task_deps = create_task(key).map(Lock::new);
@ -257,12 +254,7 @@ impl DepGraph {
let result = if no_tcx {
task(cx, arg)
} else {
ty::tls::with_context(|icx| {
let icx =
ty::tls::ImplicitCtxt { task_deps: task_deps.as_ref(), ..icx.clone() };
ty::tls::enter_context(&icx, |_| task(cx, arg))
})
K::with_deps(task_deps.as_ref(), || task(cx, arg))
};
let current_fingerprint = hash_result(&mut hcx, &result);
@ -274,7 +266,7 @@ impl DepGraph {
task_deps.map(|lock| lock.into_inner()),
);
let print_status = cfg!(debug_assertions) && hcx.sess().opts.debugging_opts.dep_tasks;
let print_status = cfg!(debug_assertions) && hcx.debug_dep_tasks();
// Determine the color of the new DepNode.
if let Some(prev_index) = data.previous.node_to_index_opt(&key) {
@ -322,22 +314,16 @@ impl DepGraph {
/// Executes something within an "anonymous" task, that is, a task the
/// `DepNode` of which is determined by the list of inputs it read from.
pub fn with_anon_task<OP, R>(&self, dep_kind: DepKind, op: OP) -> (R, DepNodeIndex)
pub fn with_anon_task<OP, R>(&self, dep_kind: K, op: OP) -> (R, DepNodeIndex)
where
OP: FnOnce() -> R,
{
if let Some(ref data) = self.data {
let (result, task_deps) = ty::tls::with_context(|icx| {
let task_deps = Lock::new(TaskDeps::default());
let r = {
let icx = ty::tls::ImplicitCtxt { task_deps: Some(&task_deps), ..icx.clone() };
let result = K::with_deps(Some(&task_deps), op);
let task_deps = task_deps.into_inner();
ty::tls::enter_context(&icx, |_| op())
};
(r, task_deps.into_inner())
});
let dep_node_index = data.current.complete_anon_task(dep_kind, task_deps);
(result, dep_node_index)
} else {
@ -347,16 +333,17 @@ impl DepGraph {
/// Executes something within an "eval-always" task which is a task
/// that runs whenever anything changes.
pub fn with_eval_always_task<'a, C, A, R>(
pub fn with_eval_always_task<H, C, A, R>(
&self,
key: DepNode,
key: DepNode<K>,
cx: C,
arg: A,
task: fn(C, A) -> R,
hash_result: impl FnOnce(&mut StableHashingContext<'_>, &R) -> Option<Fingerprint>,
hash_result: impl FnOnce(&mut H, &R) -> Option<Fingerprint>,
) -> (R, DepNodeIndex)
where
C: DepGraphSafe + StableHashingContextProvider<'a>,
C: DepGraphSafe + HashStableContextProvider<H>,
H: HashStableContext,
{
self.with_task_impl(
key,
@ -371,7 +358,7 @@ impl DepGraph {
}
#[inline]
pub fn read(&self, v: DepNode) {
pub fn read(&self, v: DepNode<K>) {
if let Some(ref data) = self.data {
let map = data.current.node_to_node_index.get_shard_by_value(&v).lock();
if let Some(dep_node_index) = map.get(&v).copied() {
@ -391,7 +378,7 @@ impl DepGraph {
}
#[inline]
pub fn dep_node_index_of(&self, dep_node: &DepNode) -> DepNodeIndex {
pub fn dep_node_index_of(&self, dep_node: &DepNode<K>) -> DepNodeIndex {
self.data
.as_ref()
.unwrap()
@ -405,7 +392,7 @@ impl DepGraph {
}
#[inline]
pub fn dep_node_exists(&self, dep_node: &DepNode) -> bool {
pub fn dep_node_exists(&self, dep_node: &DepNode<K>) -> bool {
if let Some(ref data) = self.data {
data.current
.node_to_node_index
@ -423,12 +410,12 @@ impl DepGraph {
data[dep_node_index].fingerprint
}
pub fn prev_fingerprint_of(&self, dep_node: &DepNode) -> Option<Fingerprint> {
pub fn prev_fingerprint_of(&self, dep_node: &DepNode<K>) -> Option<Fingerprint> {
self.data.as_ref().unwrap().previous.fingerprint_of(dep_node)
}
#[inline]
pub fn prev_dep_node_index_of(&self, dep_node: &DepNode) -> SerializedDepNodeIndex {
pub fn prev_dep_node_index_of(&self, dep_node: &DepNode<K>) -> SerializedDepNodeIndex {
self.data.as_ref().unwrap().previous.node_to_index(dep_node)
}
@ -445,7 +432,7 @@ impl DepGraph {
}
#[inline(always)]
pub fn register_dep_node_debug_str<F>(&self, dep_node: DepNode, debug_str_gen: F)
pub fn register_dep_node_debug_str<F>(&self, dep_node: DepNode<K>, debug_str_gen: F)
where
F: FnOnce() -> String,
{
@ -458,7 +445,7 @@ impl DepGraph {
dep_node_debug.borrow_mut().insert(dep_node, debug_str);
}
pub(super) fn dep_node_debug_str(&self, dep_node: DepNode) -> Option<String> {
pub fn dep_node_debug_str(&self, dep_node: DepNode<K>) -> Option<String> {
self.data.as_ref()?.dep_node_debug.borrow().get(&dep_node).cloned()
}
@ -475,7 +462,7 @@ impl DepGraph {
}
}
pub fn serialize(&self) -> SerializedDepGraph {
pub fn serialize(&self) -> SerializedDepGraph<K> {
let data = self.data.as_ref().unwrap().current.data.lock();
let fingerprints: IndexVec<SerializedDepNodeIndex, _> =
@ -503,7 +490,7 @@ impl DepGraph {
SerializedDepGraph { nodes, fingerprints, edge_list_indices, edge_list_data }
}
pub fn node_color(&self, dep_node: &DepNode) -> Option<DepNodeColor> {
pub fn node_color(&self, dep_node: &DepNode<K>) -> Option<DepNodeColor> {
if let Some(ref data) = self.data {
if let Some(prev_index) = data.previous.node_to_index_opt(dep_node) {
return data.colors.get(prev_index);
@ -521,10 +508,10 @@ impl DepGraph {
/// A node will have an index, when it's already been marked green, or when we can mark it
/// green. This function will mark the current task as a reader of the specified node, when
/// a node index can be found for that node.
pub fn try_mark_green_and_read(
pub fn try_mark_green_and_read<Ctxt: DepContext<DepKind = K>>(
&self,
tcx: TyCtxt<'_>,
dep_node: &DepNode,
tcx: Ctxt,
dep_node: &DepNode<K>,
) -> Option<(SerializedDepNodeIndex, DepNodeIndex)> {
self.try_mark_green(tcx, dep_node).map(|(prev_index, dep_node_index)| {
debug_assert!(self.is_green(&dep_node));
@ -533,10 +520,10 @@ impl DepGraph {
})
}
pub fn try_mark_green(
pub fn try_mark_green<Ctxt: DepContext<DepKind = K>>(
&self,
tcx: TyCtxt<'_>,
dep_node: &DepNode,
tcx: Ctxt,
dep_node: &DepNode<K>,
) -> Option<(SerializedDepNodeIndex, DepNodeIndex)> {
debug_assert!(!dep_node.kind.is_eval_always());
@ -561,12 +548,12 @@ impl DepGraph {
}
/// Try to mark a dep-node which existed in the previous compilation session as green.
fn try_mark_previous_green<'tcx>(
fn try_mark_previous_green<Ctxt: DepContext<DepKind = K>>(
&self,
tcx: TyCtxt<'tcx>,
data: &DepGraphData,
tcx: Ctxt,
data: &DepGraphData<K>,
prev_dep_node_index: SerializedDepNodeIndex,
dep_node: &DepNode,
dep_node: &DepNode<K>,
) -> Option<DepNodeIndex> {
debug!("try_mark_previous_green({:?}) - BEGIN", dep_node);
@ -649,49 +636,7 @@ impl DepGraph {
continue;
}
} else {
// FIXME: This match is just a workaround for incremental bugs and should
// be removed. https://github.com/rust-lang/rust/issues/62649 is one such
// bug that must be fixed before removing this.
match dep_dep_node.kind {
DepKind::hir_owner
| DepKind::hir_owner_nodes
| DepKind::CrateMetadata => {
if let Some(def_id) = dep_dep_node.extract_def_id(tcx) {
if def_id_corresponds_to_hir_dep_node(tcx, def_id) {
if dep_dep_node.kind == DepKind::CrateMetadata {
// The `DefPath` has corresponding node,
// and that node should have been marked
// either red or green in `data.colors`.
bug!(
"DepNode {:?} should have been \
pre-marked as red or green but wasn't.",
dep_dep_node
);
}
} else {
// This `DefPath` does not have a
// corresponding `DepNode` (e.g. a
// struct field), and the ` DefPath`
// collided with the `DefPath` of a
// proper item that existed in the
// previous compilation session.
//
// Since the given `DefPath` does not
// denote the item that previously
// existed, we just fail to mark green.
return None;
}
} else {
// If the node does not exist anymore, we
// just fail to mark green.
return None;
}
}
_ => {
// For other kinds of nodes it's OK to be
// forced.
}
}
tcx.ensure_node_can_be_forced(dep_dep_node)?;
}
// We failed to mark it green, so we try to force the query.
@ -700,7 +645,7 @@ impl DepGraph {
dependency {:?}",
dep_node, dep_dep_node
);
if crate::ty::query::force_from_dep_node(tcx, dep_dep_node) {
if tcx.force_from_dep_node(dep_dep_node) {
let dep_dep_node_color = data.colors.get(dep_dep_node_index);
match dep_dep_node_color {
@ -721,7 +666,7 @@ impl DepGraph {
return None;
}
None => {
if !tcx.sess.has_errors_or_delayed_span_bugs() {
if !tcx.has_errors_or_delayed_span_bugs() {
bug!(
"try_mark_previous_green() - Forcing the DepNode \
should have set its color"
@ -779,7 +724,7 @@ impl DepGraph {
// FIXME: Store the fact that a node has diagnostics in a bit in the dep graph somewhere
// Maybe store a list on disk and encode this fact in the DepNodeState
let diagnostics = tcx.queries.on_disk_cache.load_diagnostics(tcx, prev_dep_node_index);
let diagnostics = tcx.load_diagnostics(prev_dep_node_index);
#[cfg(not(parallel_compiler))]
debug_assert!(
@ -805,10 +750,10 @@ impl DepGraph {
/// This may be called concurrently on multiple threads for the same dep node.
#[cold]
#[inline(never)]
fn emit_diagnostics<'tcx>(
fn emit_diagnostics<Ctxt: DepContext<DepKind = K>>(
&self,
tcx: TyCtxt<'tcx>,
data: &DepGraphData,
tcx: Ctxt,
data: &DepGraphData<K>,
dep_node_index: DepNodeIndex,
prev_dep_node_index: SerializedDepNodeIndex,
diagnostics: Vec<Diagnostic>,
@ -827,9 +772,9 @@ impl DepGraph {
mem::drop(emitting);
// Promote the previous diagnostics to the current session.
tcx.queries.on_disk_cache.store_diagnostics(dep_node_index, diagnostics.clone().into());
tcx.store_diagnostics(dep_node_index, diagnostics.clone().into());
let handle = tcx.sess.diagnostic();
let handle = tcx.diagnostic();
for diagnostic in diagnostics {
handle.emit_diagnostic(&diagnostic);
@ -858,7 +803,7 @@ impl DepGraph {
// Returns true if the given node has been marked as green during the
// current compilation session. Used in various assertions
pub fn is_green(&self, dep_node: &DepNode) -> bool {
pub fn is_green(&self, dep_node: &DepNode<K>) -> bool {
self.node_color(dep_node).map(|c| c.is_green()).unwrap_or(false)
}
@ -870,15 +815,15 @@ impl DepGraph {
//
// This method will only load queries that will end up in the disk cache.
// Other queries will not be executed.
pub fn exec_cache_promotions(&self, tcx: TyCtxt<'_>) {
let _prof_timer = tcx.prof.generic_activity("incr_comp_query_cache_promotion");
pub fn exec_cache_promotions<Ctxt: DepContext<DepKind = K>>(&self, tcx: Ctxt) {
let _prof_timer = tcx.profiler().generic_activity("incr_comp_query_cache_promotion");
let data = self.data.as_ref().unwrap();
for prev_index in data.colors.values.indices() {
match data.colors.get(prev_index) {
Some(DepNodeColor::Green(_)) => {
let dep_node = data.previous.index_to_node(prev_index);
dep_node.try_load_from_on_disk_cache(tcx);
tcx.try_load_from_on_disk_cache(&dep_node);
}
None | Some(DepNodeColor::Red) => {
// We can skip red nodes because a node can only be marked
@ -895,11 +840,6 @@ impl DepGraph {
}
}
fn def_id_corresponds_to_hir_dep_node(tcx: TyCtxt<'_>, def_id: DefId) -> bool {
let hir_id = tcx.hir().as_local_hir_id(def_id).unwrap();
def_id.index == hir_id.owner.local_def_index
}
/// A "work product" is an intermediate result that we save into the
/// incremental directory for later re-use. The primary example are
/// the object files that we save for each partition at code
@ -946,8 +886,8 @@ pub enum WorkProductFileKind {
}
#[derive(Clone)]
struct DepNodeData {
node: DepNode,
struct DepNodeData<K> {
node: DepNode<K>,
edges: EdgesVec,
fingerprint: Fingerprint,
}
@ -967,9 +907,9 @@ struct DepNodeData {
/// The only operation that must manipulate both locks is adding new nodes, in which case
/// we first acquire the `node_to_node_index` lock and then, once a new node is to be inserted,
/// acquire the lock on `data.`
pub(super) struct CurrentDepGraph {
data: Lock<IndexVec<DepNodeIndex, DepNodeData>>,
node_to_node_index: Sharded<FxHashMap<DepNode, DepNodeIndex>>,
pub(super) struct CurrentDepGraph<K> {
data: Lock<IndexVec<DepNodeIndex, DepNodeData<K>>>,
node_to_node_index: Sharded<FxHashMap<DepNode<K>, DepNodeIndex>>,
/// Used to trap when a specific edge is added to the graph.
/// This is used for debug purposes and is only active with `debug_assertions`.
@ -995,8 +935,8 @@ pub(super) struct CurrentDepGraph {
total_duplicate_read_count: AtomicU64,
}
impl CurrentDepGraph {
fn new(prev_graph_node_count: usize) -> CurrentDepGraph {
impl<K: DepKind> CurrentDepGraph<K> {
fn new(prev_graph_node_count: usize) -> CurrentDepGraph<K> {
use std::time::{SystemTime, UNIX_EPOCH};
let duration = SystemTime::now().duration_since(UNIX_EPOCH).unwrap();
@ -1039,14 +979,14 @@ impl CurrentDepGraph {
fn complete_task(
&self,
node: DepNode,
task_deps: TaskDeps,
node: DepNode<K>,
task_deps: TaskDeps<K>,
fingerprint: Fingerprint,
) -> DepNodeIndex {
self.alloc_node(node, task_deps.reads, fingerprint)
}
fn complete_anon_task(&self, kind: DepKind, task_deps: TaskDeps) -> DepNodeIndex {
fn complete_anon_task(&self, kind: K, task_deps: TaskDeps<K>) -> DepNodeIndex {
debug_assert!(!kind.is_eval_always());
let mut hasher = StableHasher::new();
@ -1072,7 +1012,7 @@ impl CurrentDepGraph {
fn alloc_node(
&self,
dep_node: DepNode,
dep_node: DepNode<K>,
edges: EdgesVec,
fingerprint: Fingerprint,
) -> DepNodeIndex {
@ -1084,7 +1024,7 @@ impl CurrentDepGraph {
fn intern_node(
&self,
dep_node: DepNode,
dep_node: DepNode<K>,
edges: EdgesVec,
fingerprint: Fingerprint,
) -> DepNodeIndex {
@ -1101,12 +1041,11 @@ impl CurrentDepGraph {
}
}
impl DepGraphData {
impl<K: DepKind> DepGraphData<K> {
#[inline(never)]
fn read_index(&self, source: DepNodeIndex) {
ty::tls::with_context_opt(|icx| {
let icx = if let Some(icx) = icx { icx } else { return };
if let Some(task_deps) = icx.task_deps {
K::read_deps(|task_deps| {
if let Some(task_deps) = task_deps {
let mut task_deps = task_deps.lock();
let task_deps = &mut *task_deps;
if cfg!(debug_assertions) {
@ -1151,12 +1090,25 @@ impl DepGraphData {
/// The capacity of the `reads` field `SmallVec`
const TASK_DEPS_READS_CAP: usize = 8;
type EdgesVec = SmallVec<[DepNodeIndex; TASK_DEPS_READS_CAP]>;
#[derive(Default)]
pub struct TaskDeps {
pub struct TaskDeps<K> {
#[cfg(debug_assertions)]
node: Option<DepNode>,
node: Option<DepNode<K>>,
reads: EdgesVec,
read_set: FxHashSet<DepNodeIndex>,
phantom_data: std::marker::PhantomData<DepNode<K>>,
}
impl<K> Default for TaskDeps<K> {
fn default() -> Self {
Self {
#[cfg(debug_assertions)]
node: None,
reads: EdgesVec::new(),
read_set: FxHashSet::default(),
phantom_data: std::marker::PhantomData,
}
}
}
// A data structure that stores Option<DepNodeColor> values as a contiguous

View File

@ -6,12 +6,94 @@ mod query;
mod safe;
mod serialized;
pub(crate) use self::dep_node::DepNodeParams;
pub use self::dep_node::{label_strs, DepConstructor, DepKind, DepNode, WorkProductId};
pub use self::graph::WorkProductFileKind;
pub use self::graph::{hash_result, DepGraph, DepNodeColor, DepNodeIndex, TaskDeps, WorkProduct};
pub use self::prev::PreviousDepGraph;
pub use self::query::DepGraphQuery;
pub use self::safe::AssertDepGraphSafe;
pub use self::safe::DepGraphSafe;
pub use self::serialized::{SerializedDepGraph, SerializedDepNodeIndex};
pub use dep_node::{DepNode, DepNodeParams, WorkProductId};
pub use graph::WorkProductFileKind;
pub use graph::{hash_result, DepGraph, DepNodeColor, DepNodeIndex, TaskDeps, WorkProduct};
pub use prev::PreviousDepGraph;
pub use query::DepGraphQuery;
pub use safe::AssertDepGraphSafe;
pub use safe::DepGraphSafe;
pub use serialized::{SerializedDepGraph, SerializedDepNodeIndex};
use rustc_data_structures::profiling::SelfProfilerRef;
use rustc_data_structures::sync::Lock;
use rustc_data_structures::thin_vec::ThinVec;
use rustc_errors::Diagnostic;
use rustc_hir::def_id::DefId;
use std::fmt;
use std::hash::Hash;
pub trait DepContext: Copy {
type DepKind: self::DepKind;
type StableHashingContext: crate::HashStableContext;
/// Create a hashing context for hashing new results.
fn create_stable_hashing_context(&self) -> Self::StableHashingContext;
/// Force the execution of a query given the associated `DepNode`.
fn force_from_dep_node(&self, node: &DepNode<Self::DepKind>) -> bool;
/// Extracts the DefId corresponding to this DepNode. This will work
/// if two conditions are met:
///
/// 1. The Fingerprint of the DepNode actually is a DefPathHash, and
/// 2. the item that the DefPath refers to exists in the current tcx.
///
/// Condition (1) is determined by the DepKind variant of the
/// DepNode. Condition (2) might not be fulfilled if a DepNode
/// refers to something from the previous compilation session that
/// has been removed.
fn extract_def_id(&self, node: &DepNode<Self::DepKind>) -> Option<DefId>;
/// Check the legality of forcing this node.
fn ensure_node_can_be_forced(&self, dep_dep_node: &DepNode<Self::DepKind>) -> Option<()>;
/// Return whether the current session is tainted by errors.
fn has_errors_or_delayed_span_bugs(&self) -> bool;
/// Return the diagnostic handler.
fn diagnostic(&self) -> &rustc_errors::Handler;
/// Load data from the on-disk cache.
fn try_load_from_on_disk_cache(&self, dep_node: &DepNode<Self::DepKind>);
/// Load diagnostics associated to the node in the previous session.
fn load_diagnostics(&self, prev_dep_node_index: SerializedDepNodeIndex) -> Vec<Diagnostic>;
/// Register diagnostics for the given node, for use in next session.
fn store_diagnostics(&self, dep_node_index: DepNodeIndex, diagnostics: ThinVec<Diagnostic>);
/// Access the profiler.
fn profiler(&self) -> &SelfProfilerRef;
}
/// Describe the different families of dependency nodes.
pub trait DepKind: Copy + fmt::Debug + Eq + Ord + Hash {
/// Return whether this kind always require evaluation.
fn is_eval_always(&self) -> bool;
/// Return whether this kind requires additional parameters to be executed.
fn has_params(&self) -> bool;
/// Implementation of `std::fmt::Debug` for `DepNode`.
fn debug_node(node: &DepNode<Self>, f: &mut fmt::Formatter<'_>) -> fmt::Result;
/// Assert the current implicit context does not track any dependency.
fn assert_ignored();
/// Execute the operation ignoring the dependencies.
fn with_ignore_deps<OP, R>(op: OP) -> R
where
OP: FnOnce() -> R;
/// Execute the operation with provided dependencies.
fn with_deps<OP, R>(deps: Option<&Lock<TaskDeps<Self>>>, op: OP) -> R
where
OP: FnOnce() -> R;
/// Access dependencies from current implicit context.
fn read_deps<OP>(op: OP) -> ()
where
OP: for<'a> FnOnce(Option<&'a Lock<TaskDeps<Self>>>) -> ();
}

View File

@ -1,16 +1,22 @@
use super::dep_node::DepNode;
use super::serialized::{SerializedDepGraph, SerializedDepNodeIndex};
use crate::ich::Fingerprint;
use super::{DepKind, DepNode};
use rustc_data_structures::fingerprint::Fingerprint;
use rustc_data_structures::fx::FxHashMap;
#[derive(Debug, RustcEncodable, RustcDecodable, Default)]
pub struct PreviousDepGraph {
data: SerializedDepGraph,
index: FxHashMap<DepNode, SerializedDepNodeIndex>,
#[derive(Debug, RustcEncodable, RustcDecodable)]
pub struct PreviousDepGraph<K: DepKind> {
data: SerializedDepGraph<K>,
index: FxHashMap<DepNode<K>, SerializedDepNodeIndex>,
}
impl PreviousDepGraph {
pub fn new(data: SerializedDepGraph) -> PreviousDepGraph {
impl<K: DepKind> Default for PreviousDepGraph<K> {
fn default() -> Self {
PreviousDepGraph { data: Default::default(), index: Default::default() }
}
}
impl<K: DepKind> PreviousDepGraph<K> {
pub fn new(data: SerializedDepGraph<K>) -> PreviousDepGraph<K> {
let index: FxHashMap<_, _> =
data.nodes.iter_enumerated().map(|(idx, &dep_node)| (dep_node, idx)).collect();
PreviousDepGraph { data, index }
@ -25,22 +31,22 @@ impl PreviousDepGraph {
}
#[inline]
pub fn index_to_node(&self, dep_node_index: SerializedDepNodeIndex) -> DepNode {
pub fn index_to_node(&self, dep_node_index: SerializedDepNodeIndex) -> DepNode<K> {
self.data.nodes[dep_node_index]
}
#[inline]
pub fn node_to_index(&self, dep_node: &DepNode) -> SerializedDepNodeIndex {
pub fn node_to_index(&self, dep_node: &DepNode<K>) -> SerializedDepNodeIndex {
self.index[dep_node]
}
#[inline]
pub fn node_to_index_opt(&self, dep_node: &DepNode) -> Option<SerializedDepNodeIndex> {
pub fn node_to_index_opt(&self, dep_node: &DepNode<K>) -> Option<SerializedDepNodeIndex> {
self.index.get(dep_node).cloned()
}
#[inline]
pub fn fingerprint_of(&self, dep_node: &DepNode) -> Option<Fingerprint> {
pub fn fingerprint_of(&self, dep_node: &DepNode<K>) -> Option<Fingerprint> {
self.index.get(dep_node).map(|&node_index| self.data.fingerprints[node_index])
}

View File

@ -3,15 +3,15 @@ use rustc_data_structures::graph::implementation::{
Direction, Graph, NodeIndex, INCOMING, OUTGOING,
};
use super::DepNode;
use super::{DepKind, DepNode};
pub struct DepGraphQuery {
pub graph: Graph<DepNode, ()>,
pub indices: FxHashMap<DepNode, NodeIndex>,
pub struct DepGraphQuery<K> {
pub graph: Graph<DepNode<K>, ()>,
pub indices: FxHashMap<DepNode<K>, NodeIndex>,
}
impl DepGraphQuery {
pub fn new(nodes: &[DepNode], edges: &[(DepNode, DepNode)]) -> DepGraphQuery {
impl<K: DepKind> DepGraphQuery<K> {
pub fn new(nodes: &[DepNode<K>], edges: &[(DepNode<K>, DepNode<K>)]) -> DepGraphQuery<K> {
let mut graph = Graph::with_capacity(nodes.len(), edges.len());
let mut indices = FxHashMap::default();
for node in nodes {
@ -27,15 +27,15 @@ impl DepGraphQuery {
DepGraphQuery { graph, indices }
}
pub fn contains_node(&self, node: &DepNode) -> bool {
pub fn contains_node(&self, node: &DepNode<K>) -> bool {
self.indices.contains_key(&node)
}
pub fn nodes(&self) -> Vec<&DepNode> {
pub fn nodes(&self) -> Vec<&DepNode<K>> {
self.graph.all_nodes().iter().map(|n| &n.data).collect()
}
pub fn edges(&self) -> Vec<(&DepNode, &DepNode)> {
pub fn edges(&self) -> Vec<(&DepNode<K>, &DepNode<K>)> {
self.graph
.all_edges()
.iter()
@ -44,7 +44,7 @@ impl DepGraphQuery {
.collect()
}
fn reachable_nodes(&self, node: &DepNode, direction: Direction) -> Vec<&DepNode> {
fn reachable_nodes(&self, node: &DepNode<K>, direction: Direction) -> Vec<&DepNode<K>> {
if let Some(&index) = self.indices.get(node) {
self.graph.depth_traverse(index, direction).map(|s| self.graph.node_data(s)).collect()
} else {
@ -54,17 +54,17 @@ impl DepGraphQuery {
/// All nodes reachable from `node`. In other words, things that
/// will have to be recomputed if `node` changes.
pub fn transitive_successors(&self, node: &DepNode) -> Vec<&DepNode> {
pub fn transitive_successors(&self, node: &DepNode<K>) -> Vec<&DepNode<K>> {
self.reachable_nodes(node, OUTGOING)
}
/// All nodes that can reach `node`.
pub fn transitive_predecessors(&self, node: &DepNode) -> Vec<&DepNode> {
pub fn transitive_predecessors(&self, node: &DepNode<K>) -> Vec<&DepNode<K>> {
self.reachable_nodes(node, INCOMING)
}
/// Just the outgoing edges from `node`.
pub fn immediate_successors(&self, node: &DepNode) -> Vec<&DepNode> {
pub fn immediate_successors(&self, node: &DepNode<K>) -> Vec<&DepNode<K>> {
if let Some(&index) = self.indices.get(&node) {
self.graph.successor_nodes(index).map(|s| self.graph.node_data(s)).collect()
} else {

View File

@ -1,7 +1,5 @@
//! The `DepGraphSafe` trait
use crate::ty::TyCtxt;
use rustc_ast::ast::NodeId;
use rustc_hir::def_id::DefId;
use rustc_hir::BodyId;
@ -28,10 +26,6 @@ impl DepGraphSafe for NodeId {}
/// on-demand queries, all of which create reads.
impl DepGraphSafe for DefId {}
/// The type context itself can be used to access all kinds of tracked
/// state, but those accesses should always generate read events.
impl<'tcx> DepGraphSafe for TyCtxt<'tcx> {}
/// Tuples make it easy to build up state.
impl<A, B> DepGraphSafe for (A, B)
where

View File

@ -1,7 +1,7 @@
//! The data that we will serialize and deserialize.
use crate::dep_graph::DepNode;
use crate::ich::Fingerprint;
use super::{DepKind, DepNode};
use rustc_data_structures::fingerprint::Fingerprint;
use rustc_index::vec::IndexVec;
rustc_index::newtype_index! {
@ -9,10 +9,10 @@ rustc_index::newtype_index! {
}
/// Data for use when recompiling the **current crate**.
#[derive(Debug, RustcEncodable, RustcDecodable, Default)]
pub struct SerializedDepGraph {
#[derive(Debug, RustcEncodable, RustcDecodable)]
pub struct SerializedDepGraph<K: DepKind> {
/// The set of all DepNodes in the graph
pub nodes: IndexVec<SerializedDepNodeIndex, DepNode>,
pub nodes: IndexVec<SerializedDepNodeIndex, DepNode<K>>,
/// The set of all Fingerprints in the graph. Each Fingerprint corresponds to
/// the DepNode at the same index in the nodes vector.
pub fingerprints: IndexVec<SerializedDepNodeIndex, Fingerprint>,
@ -25,7 +25,18 @@ pub struct SerializedDepGraph {
pub edge_list_data: Vec<SerializedDepNodeIndex>,
}
impl SerializedDepGraph {
impl<K: DepKind> Default for SerializedDepGraph<K> {
fn default() -> Self {
SerializedDepGraph {
nodes: Default::default(),
fingerprints: Default::default(),
edge_list_indices: Default::default(),
edge_list_data: Default::default(),
}
}
}
impl<K: DepKind> SerializedDepGraph<K> {
#[inline]
pub fn edge_targets_from(&self, source: SerializedDepNodeIndex) -> &[SerializedDepNodeIndex] {
let targets = self.edge_list_indices[source];

View File

@ -0,0 +1,32 @@
#![feature(const_fn)]
#![feature(const_if_match)]
#![feature(const_panic)]
#![feature(core_intrinsics)]
#![feature(specialization)]
#![feature(stmt_expr_attributes)]
#[macro_use]
extern crate log;
pub mod dep_graph;
pub trait HashStableContext {
fn debug_dep_tasks(&self) -> bool;
}
/// Something that can provide a stable hashing context.
pub trait HashStableContextProvider<Ctxt> {
fn get_stable_hashing_context(&self) -> Ctxt;
}
impl<Ctxt, T: HashStableContextProvider<Ctxt>> HashStableContextProvider<Ctxt> for &T {
fn get_stable_hashing_context(&self) -> Ctxt {
(**self).get_stable_hashing_context()
}
}
impl<Ctxt, T: HashStableContextProvider<Ctxt>> HashStableContextProvider<Ctxt> for &mut T {
fn get_stable_hashing_context(&self) -> Ctxt {
(**self).get_stable_hashing_context()
}
}