Auto merge of #44696 - michaelwoerister:fingerprints-in-dep-graph-3, r=nikomatsakis

incr.comp.: Move task result fingerprinting into DepGraph.

This PR
- makes the DepGraph store all `Fingerprints` of task results,
- allows `DepNode` to be marked as input nodes,
- makes HIR node hashing use the regular fingerprinting infrastructure,
- removes the now unused `IncrementalHashesMap`, and
- makes sure that `traits_in_scope_map` fingerprints are stable.

r? @nikomatsakis
cc @alexcrichton
This commit is contained in:
bors 2017-09-22 17:24:29 +00:00
commit 14039a42ac
26 changed files with 416 additions and 514 deletions

View File

@ -80,14 +80,28 @@ macro_rules! erase {
($x:tt) => ({})
}
macro_rules! anon_attr_to_bool {
(anon) => (true)
macro_rules! is_anon_attr {
(anon) => (true);
($attr:ident) => (false);
}
macro_rules! is_input_attr {
(input) => (true);
($attr:ident) => (false);
}
macro_rules! contains_anon_attr {
($($attr:ident),*) => ({$(is_anon_attr!($attr) | )* false});
}
macro_rules! contains_input_attr {
($($attr:ident),*) => ({$(is_input_attr!($attr) | )* false});
}
macro_rules! define_dep_nodes {
(<$tcx:tt>
$(
[$($anon:ident)*]
[$($attr:ident),* ]
$variant:ident $(( $($tuple_arg:tt),* ))*
$({ $($struct_arg_name:ident : $struct_arg_ty:ty),* })*
,)*
@ -105,7 +119,9 @@ macro_rules! define_dep_nodes {
match *self {
$(
DepKind :: $variant => {
$(return !anon_attr_to_bool!($anon);)*
if contains_anon_attr!($($attr),*) {
return false;
}
// tuple args
$({
@ -126,15 +142,20 @@ macro_rules! define_dep_nodes {
}
}
#[allow(unreachable_code)]
#[inline]
pub fn is_anon<$tcx>(&self) -> bool {
pub fn is_anon(&self) -> bool {
match *self {
$(
DepKind :: $variant => {
$(return anon_attr_to_bool!($anon);)*
false
}
DepKind :: $variant => { contains_anon_attr!($($attr),*) }
)*
}
}
#[inline]
pub fn is_input(&self) -> bool {
match *self {
$(
DepKind :: $variant => { contains_input_attr!($($attr),*) }
)*
}
}
@ -366,6 +387,17 @@ impl DefId {
}
}
impl DepKind {
#[inline]
pub fn fingerprint_needed_for_crate_hash(self) -> bool {
match self {
DepKind::HirBody |
DepKind::Krate => true,
_ => false,
}
}
}
define_dep_nodes!( <'tcx>
// Represents the `Krate` as a whole (the `hir::Krate` value) (as
// distinct from the krate module). This is basically a hash of
@ -378,18 +410,17 @@ define_dep_nodes!( <'tcx>
// suitable wrapper, you can use `tcx.dep_graph.ignore()` to gain
// access to the krate, but you must remember to add suitable
// edges yourself for the individual items that you read.
[] Krate,
// Represents the HIR node with the given node-id
[] Hir(DefId),
[input] Krate,
// Represents the body of a function or method. The def-id is that of the
// function/method.
[] HirBody(DefId),
[input] HirBody(DefId),
// Represents the metadata for a given HIR node, typically found
// in an extern crate.
[] MetaData(DefId),
// Represents the HIR node with the given node-id
[input] Hir(DefId),
// Represents metadata from an extern crate.
[input] MetaData(DefId),
// Represents some artifact that we save to disk. Note that these
// do not have a def-id as part of their identifier.
@ -529,7 +560,7 @@ define_dep_nodes!( <'tcx>
[] ExternCrate(DefId),
[] LintLevels,
[] Specializes { impl1: DefId, impl2: DefId },
[] InScopeTraits(DefIndex),
[input] InScopeTraits(DefIndex),
[] ModuleExports(DefId),
[] IsSanitizerRuntime(CrateNum),
[] IsProfilerRuntime(CrateNum),

View File

@ -123,6 +123,7 @@ impl DepGraphEdges {
reads
} = popped_node {
debug_assert_eq!(node, key);
debug_assert!(!node.kind.is_input() || reads.is_empty());
let target_id = self.get_or_create_node(node);

View File

@ -26,7 +26,16 @@ use super::edges::{DepGraphEdges, DepNodeIndex};
#[derive(Clone)]
pub struct DepGraph {
data: Option<Rc<DepGraphData>>
data: Option<Rc<DepGraphData>>,
// At the moment we are using DepNode as key here. In the future it might
// be possible to use an IndexVec<DepNodeIndex, _> here. At the moment there
// are a few problems with that:
// - Some fingerprints are needed even if incr. comp. is disabled -- yet
// we need to have a dep-graph to generate DepNodeIndices.
// - The architecture is still in flux and it's not clear what how to best
// implement things.
fingerprints: Rc<RefCell<FxHashMap<DepNode, Fingerprint>>>
}
struct DepGraphData {
@ -57,7 +66,8 @@ impl DepGraph {
}))
} else {
None
}
},
fingerprints: Rc::new(RefCell::new(FxHashMap())),
}
}
@ -139,11 +149,27 @@ impl DepGraph {
let mut stable_hasher = StableHasher::new();
result.hash_stable(&mut hcx, &mut stable_hasher);
let _: Fingerprint = stable_hasher.finish();
assert!(self.fingerprints
.borrow_mut()
.insert(key, stable_hasher.finish())
.is_none());
(result, dep_node_index)
} else {
(task(cx, arg), DepNodeIndex::INVALID)
if key.kind.fingerprint_needed_for_crate_hash() {
let mut hcx = cx.create_stable_hashing_context();
let result = task(cx, arg);
let mut stable_hasher = StableHasher::new();
result.hash_stable(&mut hcx, &mut stable_hasher);
assert!(self.fingerprints
.borrow_mut()
.insert(key, stable_hasher.finish())
.is_none());
(result, DepNodeIndex::INVALID)
} else {
(task(cx, arg), DepNodeIndex::INVALID)
}
}
}
@ -195,6 +221,10 @@ impl DepGraph {
}
}
pub fn fingerprint_of(&self, dep_node: &DepNode) -> Option<Fingerprint> {
self.fingerprints.borrow().get(dep_node).cloned()
}
/// Indicates that a previous work product exists for `v`. This is
/// invoked during initial start-up based on what nodes are clean
/// (and what files exist in the incr. directory).

View File

@ -16,6 +16,9 @@ use std::iter::repeat;
use syntax::ast::{NodeId, CRATE_NODE_ID};
use syntax_pos::Span;
use ich::StableHashingContext;
use rustc_data_structures::stable_hasher::{HashStable, StableHasher, StableHasherResult};
/// A Visitor that walks over the HIR and collects Nodes into a HIR map
pub(super) struct NodeCollector<'a, 'hir> {
/// The crate
@ -25,37 +28,104 @@ pub(super) struct NodeCollector<'a, 'hir> {
/// The parent of this node
parent_node: NodeId,
// These fields keep track of the currently relevant DepNodes during
// the visitor's traversal.
current_dep_node_owner: DefIndex,
current_dep_node_index: DepNodeIndex,
current_signature_dep_index: DepNodeIndex,
current_full_dep_index: DepNodeIndex,
currently_in_body: bool,
dep_graph: &'a DepGraph,
definitions: &'a definitions::Definitions,
hcx: StableHashingContext<'a>,
// We are collecting DepNode::HirBody hashes here so we can compute the
// crate hash from then later on.
hir_body_nodes: Vec<DefPathHash>,
}
impl<'a, 'hir> NodeCollector<'a, 'hir> {
pub(super) fn root(krate: &'hir Crate,
dep_graph: &'a DepGraph,
definitions: &'a definitions::Definitions)
dep_graph: &'a DepGraph,
definitions: &'a definitions::Definitions,
hcx: StableHashingContext<'a>)
-> NodeCollector<'a, 'hir> {
let root_mod_def_path_hash = definitions.def_path_hash(CRATE_DEF_INDEX);
let root_mod_dep_node = root_mod_def_path_hash.to_dep_node(DepKind::Hir);
let root_mod_dep_node_index = dep_graph.alloc_input_node(root_mod_dep_node);
// Allocate DepNodes for the root module
let (root_mod_sig_dep_index, root_mod_full_dep_index);
{
let Crate {
ref module,
// Crate attributes are not copied over to the root `Mod`, so hash
// them explicitly here.
ref attrs,
span,
// These fields are handled separately:
exported_macros: _,
items: _,
trait_items: _,
impl_items: _,
bodies: _,
trait_impls: _,
trait_default_impl: _,
body_ids: _,
} = *krate;
root_mod_sig_dep_index = dep_graph.with_task(
root_mod_def_path_hash.to_dep_node(DepKind::Hir),
&hcx,
HirItemLike { item_like: (module, attrs, span), hash_bodies: false },
identity_fn
).1;
root_mod_full_dep_index = dep_graph.with_task(
root_mod_def_path_hash.to_dep_node(DepKind::HirBody),
&hcx,
HirItemLike { item_like: (module, attrs, span), hash_bodies: true },
identity_fn
).1;
}
let hir_body_nodes = vec![root_mod_def_path_hash];
let mut collector = NodeCollector {
krate,
map: vec![],
parent_node: CRATE_NODE_ID,
current_dep_node_index: root_mod_dep_node_index,
current_signature_dep_index: root_mod_sig_dep_index,
current_full_dep_index: root_mod_full_dep_index,
current_dep_node_owner: CRATE_DEF_INDEX,
currently_in_body: false,
dep_graph,
definitions,
hcx,
hir_body_nodes,
};
collector.insert_entry(CRATE_NODE_ID, RootCrate(root_mod_dep_node_index));
collector.insert_entry(CRATE_NODE_ID, RootCrate(root_mod_sig_dep_index));
collector
}
pub(super) fn into_map(self) -> Vec<MapEntry<'hir>> {
pub(super) fn finalize_and_compute_crate_hash(self,
crate_disambiguator: &str)
-> Vec<MapEntry<'hir>> {
let mut node_hashes: Vec<_> = self
.hir_body_nodes
.iter()
.map(|&def_path_hash| {
let dep_node = def_path_hash.to_dep_node(DepKind::HirBody);
(def_path_hash, self.dep_graph.fingerprint_of(&dep_node))
})
.collect();
node_hashes.sort_unstable_by(|&(ref d1, _), &(ref d2, _)| d1.cmp(d2));
self.dep_graph.with_task(DepNode::new_no_params(DepKind::Krate),
&self.hcx,
(node_hashes, crate_disambiguator),
identity_fn);
self.map
}
@ -70,7 +140,11 @@ impl<'a, 'hir> NodeCollector<'a, 'hir> {
fn insert(&mut self, id: NodeId, node: Node<'hir>) {
let parent = self.parent_node;
let dep_node_index = self.current_dep_node_index;
let dep_node_index = if self.currently_in_body {
self.current_full_dep_index
} else {
self.current_signature_dep_index
};
let entry = match node {
NodeItem(n) => EntryItem(parent, dep_node_index, n),
@ -91,6 +165,7 @@ impl<'a, 'hir> NodeCollector<'a, 'hir> {
NodeTyParam(n) => EntryTyParam(parent, dep_node_index, n),
NodeVisibility(n) => EntryVisibility(parent, dep_node_index, n),
NodeLocal(n) => EntryLocal(parent, dep_node_index, n),
NodeMacroDef(n) => EntryMacroDef(dep_node_index, n),
};
// Make sure that the DepNode of some node coincides with the HirId
@ -127,22 +202,41 @@ impl<'a, 'hir> NodeCollector<'a, 'hir> {
self.parent_node = parent_node;
}
fn with_dep_node_owner<F: FnOnce(&mut Self)>(&mut self,
fn with_dep_node_owner<T: HashStable<StableHashingContext<'a>>,
F: FnOnce(&mut Self)>(&mut self,
dep_node_owner: DefIndex,
item_like: &T,
f: F) {
let prev_owner = self.current_dep_node_owner;
let prev_index = self.current_dep_node_index;
let prev_signature_dep_index = self.current_signature_dep_index;
let prev_full_dep_index = self.current_signature_dep_index;
let prev_in_body = self.currently_in_body;
let def_path_hash = self.definitions.def_path_hash(dep_node_owner);
self.current_signature_dep_index = self.dep_graph.with_task(
def_path_hash.to_dep_node(DepKind::Hir),
&self.hcx,
HirItemLike { item_like, hash_bodies: false },
identity_fn
).1;
self.current_full_dep_index = self.dep_graph.with_task(
def_path_hash.to_dep_node(DepKind::HirBody),
&self.hcx,
HirItemLike { item_like, hash_bodies: true },
identity_fn
).1;
self.hir_body_nodes.push(def_path_hash);
// When we enter a new owner (item, impl item, or trait item), we always
// start out again with DepKind::Hir.
let new_dep_node = self.definitions
.def_path_hash(dep_node_owner)
.to_dep_node(DepKind::Hir);
self.current_dep_node_index = self.dep_graph.alloc_input_node(new_dep_node);
self.current_dep_node_owner = dep_node_owner;
self.currently_in_body = false;
f(self);
self.current_dep_node_index = prev_index;
self.currently_in_body = prev_in_body;
self.current_dep_node_owner = prev_owner;
self.current_full_dep_index = prev_full_dep_index;
self.current_signature_dep_index = prev_signature_dep_index;
}
}
@ -169,24 +263,17 @@ impl<'a, 'hir> Visitor<'hir> for NodeCollector<'a, 'hir> {
}
fn visit_nested_body(&mut self, id: BodyId) {
// When we enter a body, we switch to DepKind::HirBody.
// Note that current_dep_node_index might already be DepKind::HirBody,
// e.g. when entering the body of a closure that is already part of a
// surrounding body. That's expected and not a problem.
let prev_index = self.current_dep_node_index;
let new_dep_node = self.definitions
.def_path_hash(self.current_dep_node_owner)
.to_dep_node(DepKind::HirBody);
self.current_dep_node_index = self.dep_graph.alloc_input_node(new_dep_node);
let prev_in_body = self.currently_in_body;
self.currently_in_body = true;
self.visit_body(self.krate.body(id));
self.current_dep_node_index = prev_index;
self.currently_in_body = prev_in_body;
}
fn visit_item(&mut self, i: &'hir Item) {
debug!("visit_item: {:?}", i);
debug_assert_eq!(i.hir_id.owner,
self.definitions.opt_def_index(i.id).unwrap());
self.with_dep_node_owner(i.hir_id.owner, |this| {
self.with_dep_node_owner(i.hir_id.owner, i, |this| {
this.insert(i.id, NodeItem(i));
this.with_parent(i.id, |this| {
match i.node {
@ -222,7 +309,7 @@ impl<'a, 'hir> Visitor<'hir> for NodeCollector<'a, 'hir> {
fn visit_trait_item(&mut self, ti: &'hir TraitItem) {
debug_assert_eq!(ti.hir_id.owner,
self.definitions.opt_def_index(ti.id).unwrap());
self.with_dep_node_owner(ti.hir_id.owner, |this| {
self.with_dep_node_owner(ti.hir_id.owner, ti, |this| {
this.insert(ti.id, NodeTraitItem(ti));
this.with_parent(ti.id, |this| {
@ -234,7 +321,7 @@ impl<'a, 'hir> Visitor<'hir> for NodeCollector<'a, 'hir> {
fn visit_impl_item(&mut self, ii: &'hir ImplItem) {
debug_assert_eq!(ii.hir_id.owner,
self.definitions.opt_def_index(ii.id).unwrap());
self.with_dep_node_owner(ii.hir_id.owner, |this| {
self.with_dep_node_owner(ii.hir_id.owner, ii, |this| {
this.insert(ii.id, NodeImplItem(ii));
this.with_parent(ii.id, |this| {
@ -328,7 +415,11 @@ impl<'a, 'hir> Visitor<'hir> for NodeCollector<'a, 'hir> {
}
fn visit_macro_def(&mut self, macro_def: &'hir MacroDef) {
self.insert_entry(macro_def.id, NotPresent);
let def_index = self.definitions.opt_def_index(macro_def.id).unwrap();
self.with_dep_node_owner(def_index, macro_def, |this| {
this.insert(macro_def.id, NodeMacroDef(macro_def));
});
}
fn visit_variant(&mut self, v: &'hir Variant, g: &'hir Generics, item_id: NodeId) {
@ -375,3 +466,28 @@ impl<'a, 'hir> Visitor<'hir> for NodeCollector<'a, 'hir> {
self.visit_nested_impl_item(id);
}
}
// We use this with DepGraph::with_task(). Since we are handling only input
// values here, the "task" computing them just passes them through.
fn identity_fn<T>(_: &StableHashingContext, item_like: T) -> T {
item_like
}
// This is a wrapper structure that allows determining if span values within
// the wrapped item should be hashed or not.
struct HirItemLike<T> {
item_like: T,
hash_bodies: bool,
}
impl<'hir, T> HashStable<StableHashingContext<'hir>> for HirItemLike<T>
where T: HashStable<StableHashingContext<'hir>>
{
fn hash_stable<W: StableHasherResult>(&self,
hcx: &mut StableHashingContext<'hir>,
hasher: &mut StableHasher<W>) {
hcx.while_hashing_hir_bodies(self.hash_bodies, |hcx| {
self.item_like.hash_stable(hcx, hasher);
});
}
}

View File

@ -57,6 +57,7 @@ pub enum Node<'hir> {
NodePat(&'hir Pat),
NodeBlock(&'hir Block),
NodeLocal(&'hir Local),
NodeMacroDef(&'hir MacroDef),
/// NodeStructCtor represents a tuple struct.
NodeStructCtor(&'hir VariantData),
@ -93,6 +94,8 @@ enum MapEntry<'hir> {
EntryVisibility(NodeId, DepNodeIndex, &'hir Visibility),
EntryLocal(NodeId, DepNodeIndex, &'hir Local),
EntryMacroDef(DepNodeIndex, &'hir MacroDef),
/// Roots for node trees. The DepNodeIndex is the dependency node of the
/// crate's root module.
RootCrate(DepNodeIndex),
@ -127,6 +130,7 @@ impl<'hir> MapEntry<'hir> {
EntryLocal(id, _, _) => id,
NotPresent |
EntryMacroDef(..) |
RootCrate(_) => return None,
})
}
@ -151,6 +155,7 @@ impl<'hir> MapEntry<'hir> {
EntryTyParam(_, _, n) => NodeTyParam(n),
EntryVisibility(_, _, n) => NodeVisibility(n),
EntryLocal(_, _, n) => NodeLocal(n),
EntryMacroDef(_, n) => NodeMacroDef(n),
NotPresent |
RootCrate(_) => return None
@ -285,20 +290,12 @@ impl<'hir> Map<'hir> {
EntryVisibility(_, dep_node_index, _) |
EntryExpr(_, dep_node_index, _) |
EntryLocal(_, dep_node_index, _) |
EntryMacroDef(dep_node_index, _) |
RootCrate(dep_node_index) => {
self.dep_graph.read_index(dep_node_index);
}
NotPresent => {
// Some nodes, notably macro definitions, are not
// present in the map for whatever reason, but
// they *do* have def-ids. So if we encounter an
// empty hole, check for that case.
if let Some(def_index) = self.definitions.opt_def_index(id) {
let def_path_hash = self.definitions.def_path_hash(def_index);
self.dep_graph.read(def_path_hash.to_dep_node(DepKind::Hir));
} else {
bug!("called HirMap::read() with invalid NodeId")
}
bug!("called HirMap::read() with invalid NodeId")
}
}
}
@ -875,6 +872,7 @@ impl<'hir> Map<'hir> {
Some(EntryVisibility(_, _, &Visibility::Restricted { ref path, .. })) => path.span,
Some(EntryVisibility(_, _, v)) => bug!("unexpected Visibility {:?}", v),
Some(EntryLocal(_, _, local)) => local.span,
Some(EntryMacroDef(_, macro_def)) => macro_def.span,
Some(RootCrate(_)) => self.forest.krate.span,
Some(NotPresent) | None => {
@ -1012,15 +1010,22 @@ impl Named for StructField { fn name(&self) -> Name { self.name } }
impl Named for TraitItem { fn name(&self) -> Name { self.name } }
impl Named for ImplItem { fn name(&self) -> Name { self.name } }
pub fn map_crate<'hir>(forest: &'hir mut Forest,
pub fn map_crate<'hir>(sess: &::session::Session,
cstore: &::middle::cstore::CrateStore,
forest: &'hir mut Forest,
definitions: &'hir Definitions)
-> Map<'hir> {
let map = {
let hcx = ::ich::StableHashingContext::new(sess, &forest.krate, definitions, cstore);
let mut collector = NodeCollector::root(&forest.krate,
&forest.dep_graph,
&definitions);
&definitions,
hcx);
intravisit::walk_crate(&mut collector, &forest.krate);
collector.into_map()
let crate_disambiguator = sess.local_crate_disambiguator().as_str();
collector.finalize_and_compute_crate_hash(&crate_disambiguator)
};
if log_enabled!(::log::LogLevel::Debug) {
@ -1103,6 +1108,7 @@ impl<'a> print::State<'a> {
// printing.
NodeStructCtor(_) => bug!("cannot print isolated StructCtor"),
NodeLocal(a) => self.print_local_decl(&a),
NodeMacroDef(_) => bug!("cannot print MacroDef"),
}
}
}
@ -1219,6 +1225,9 @@ fn node_id_to_string(map: &Map, id: NodeId, include_id: bool) -> String {
Some(NodeVisibility(ref vis)) => {
format!("visibility {:?}{}", vis, id_str)
}
Some(NodeMacroDef(_)) => {
format!("macro {}{}", path_str(), id_str)
}
None => {
format!("unknown node{}", id_str)
}

View File

@ -22,6 +22,7 @@ struct CacheEntry {
file_index: usize,
}
#[derive(Clone)]
pub struct CachingCodemapView<'cm> {
codemap: &'cm CodeMap,
line_cache: [CacheEntry; 3],

View File

@ -43,6 +43,7 @@ thread_local!(static IGNORED_ATTR_NAMES: RefCell<FxHashSet<Symbol>> =
/// enough information to transform DefIds and HirIds into stable DefPaths (i.e.
/// a reference to the TyCtxt) and it holds a few caches for speeding up various
/// things (e.g. each DefId/DefPath is only hashed once).
#[derive(Clone)]
pub struct StableHashingContext<'gcx> {
sess: &'gcx Session,
definitions: &'gcx Definitions,
@ -168,6 +169,11 @@ impl<'gcx> StableHashingContext<'gcx> {
self.definitions.def_path_hash(def_index)
}
#[inline]
pub fn node_to_hir_id(&self, node_id: ast::NodeId) -> hir::HirId {
self.definitions.node_to_hir_id(node_id)
}
#[inline]
pub fn hash_spans(&self) -> bool {
self.hash_spans
@ -259,6 +265,18 @@ impl<'a, 'gcx, 'lcx> StableHashingContextProvider for TyCtxt<'a, 'gcx, 'lcx> {
}
}
impl<'gcx> StableHashingContextProvider for StableHashingContext<'gcx> {
type ContextType = StableHashingContext<'gcx>;
fn create_stable_hashing_context(&self) -> Self::ContextType {
self.clone()
}
}
impl<'gcx> ::dep_graph::DepGraphSafe for StableHashingContext<'gcx> {
}
impl<'gcx> HashStable<StableHashingContext<'gcx>> for hir::BodyId {
fn hash_stable<W: StableHasherResult>(&self,
hcx: &mut StableHashingContext<'gcx>,

View File

@ -698,7 +698,7 @@ impl<'gcx> HashStable<StableHashingContext<'gcx>> for hir::TraitItem {
hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
let hir::TraitItem {
id,
id: _,
hir_id: _,
name,
ref attrs,
@ -707,7 +707,6 @@ impl<'gcx> HashStable<StableHashingContext<'gcx>> for hir::TraitItem {
} = *self;
hcx.hash_hir_item_like(attrs, |hcx| {
id.hash_stable(hcx, hasher);
name.hash_stable(hcx, hasher);
attrs.hash_stable(hcx, hasher);
node.hash_stable(hcx, hasher);
@ -732,7 +731,7 @@ impl<'gcx> HashStable<StableHashingContext<'gcx>> for hir::ImplItem {
hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
let hir::ImplItem {
id,
id: _,
hir_id: _,
name,
ref vis,
@ -743,7 +742,6 @@ impl<'gcx> HashStable<StableHashingContext<'gcx>> for hir::ImplItem {
} = *self;
hcx.hash_hir_item_like(attrs, |hcx| {
id.hash_stable(hcx, hasher);
name.hash_stable(hcx, hasher);
vis.hash_stable(hcx, hasher);
defaultness.hash_stable(hcx, hasher);
@ -1167,6 +1165,25 @@ for hir::TraitCandidate {
}
}
impl<'gcx> ToStableHashKey<StableHashingContext<'gcx>> for hir::TraitCandidate {
type KeyType = (DefPathHash, Option<(DefPathHash, hir::ItemLocalId)>);
fn to_stable_hash_key(&self,
hcx: &StableHashingContext<'gcx>)
-> Self::KeyType {
let hir::TraitCandidate {
def_id,
import_id,
} = *self;
let import_id = import_id.map(|node_id| hcx.node_to_hir_id(node_id))
.map(|hir_id| (hcx.local_def_path_hash(hir_id.owner),
hir_id.local_id));
(hcx.def_path_hash(def_id), import_id)
}
}
impl_stable_hash_for!(struct hir::Freevar {
def,
span

View File

@ -310,7 +310,8 @@ impl<'a, 'tcx> ReachableContext<'a, 'tcx> {
hir_map::NodeVariant(_) |
hir_map::NodeStructCtor(_) |
hir_map::NodeField(_) |
hir_map::NodeTy(_) => {}
hir_map::NodeTy(_) |
hir_map::NodeMacroDef(_) => {}
_ => {
bug!("found unexpected thingy in worklist: {}",
self.tcx.hir.node_to_string(search_item))

View File

@ -50,8 +50,8 @@ use util::nodemap::{NodeMap, NodeSet, DefIdSet, ItemLocalMap};
use util::nodemap::{FxHashMap, FxHashSet};
use rustc_data_structures::accumulate_vec::AccumulateVec;
use rustc_data_structures::stable_hasher::{HashStable, hash_stable_hashmap,
StableHasher, StableHasherResult};
StableHasher, StableHasherResult,
StableVec};
use arena::{TypedArena, DroplessArena};
use rustc_const_math::{ConstInt, ConstUsize};
use rustc_data_structures::indexed_vec::IndexVec;
@ -828,7 +828,9 @@ pub struct GlobalCtxt<'tcx> {
/// Map indicating what traits are in scope for places where this
/// is relevant; generated by resolve.
trait_map: FxHashMap<DefIndex, Rc<FxHashMap<ItemLocalId, Rc<Vec<TraitCandidate>>>>>,
trait_map: FxHashMap<DefIndex,
Rc<FxHashMap<ItemLocalId,
Rc<StableVec<TraitCandidate>>>>>,
/// Export map produced by name resolution.
export_map: FxHashMap<DefId, Rc<Vec<Export>>>,
@ -1081,15 +1083,14 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
None
};
// FIXME(mw): Each of the Vecs in the trait_map should be brought into
// a deterministic order here. Otherwise we might end up with
// unnecessarily unstable incr. comp. hashes.
let mut trait_map = FxHashMap();
for (k, v) in resolutions.trait_map {
let hir_id = hir.node_to_hir_id(k);
let map = trait_map.entry(hir_id.owner)
.or_insert_with(|| Rc::new(FxHashMap()));
Rc::get_mut(map).unwrap().insert(hir_id.local_id, Rc::new(v));
Rc::get_mut(map).unwrap()
.insert(hir_id.local_id,
Rc::new(StableVec::new(v)));
}
let mut defs = FxHashMap();
for (k, v) in named_region_map.defs {
@ -1235,6 +1236,16 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
self.hir.definitions(),
self.cstore)
}
// This method exercises the `in_scope_traits_map` query for all possible
// values so that we have their fingerprints available in the DepGraph.
// This is only required as long as we still use the old dependency tracking
// which needs to have the fingerprints of all input nodes beforehand.
pub fn precompute_in_scope_traits_hashes(self) {
for &def_index in self.trait_map.keys() {
self.in_scope_traits_map(def_index);
}
}
}
impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> {
@ -2103,7 +2114,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
lint::struct_lint_level(self.sess, lint, level, src, None, msg)
}
pub fn in_scope_traits(self, id: HirId) -> Option<Rc<Vec<TraitCandidate>>> {
pub fn in_scope_traits(self, id: HirId) -> Option<Rc<StableVec<TraitCandidate>>> {
self.in_scope_traits_map(id.owner)
.and_then(|map| map.get(&id.local_id).cloned())
}

View File

@ -42,6 +42,7 @@ use rustc_data_structures::indexed_set::IdxSetBuf;
use rustc_back::PanicStrategy;
use rustc_data_structures::indexed_vec::IndexVec;
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_data_structures::stable_hasher::StableVec;
use std::cell::{RefCell, Cell};
use std::ops::Deref;
@ -259,7 +260,7 @@ define_maps! { <'tcx>
[] fn specializes: specializes_node((DefId, DefId)) -> bool,
[] fn in_scope_traits_map: InScopeTraits(DefIndex)
-> Option<Rc<FxHashMap<ItemLocalId, Rc<Vec<TraitCandidate>>>>>,
-> Option<Rc<FxHashMap<ItemLocalId, Rc<StableVec<TraitCandidate>>>>>,
[] fn module_exports: ModuleExports(DefId) -> Option<Rc<Vec<Export>>>,
[] fn lint_levels: lint_levels_node(CrateNum) -> Rc<lint::LintLevelMap>,

View File

@ -558,3 +558,37 @@ pub fn hash_stable_hashmap<HCX, K, V, R, SK, F, W>(
entries.hash_stable(hcx, hasher);
}
/// A vector container that makes sure that its items are hashed in a stable
/// order.
pub struct StableVec<T>(Vec<T>);
impl<T> StableVec<T> {
pub fn new(v: Vec<T>) -> Self {
StableVec(v)
}
}
impl<T> ::std::ops::Deref for StableVec<T> {
type Target = Vec<T>;
fn deref(&self) -> &Vec<T> {
&self.0
}
}
impl<T, HCX> HashStable<HCX> for StableVec<T>
where T: HashStable<HCX> + ToStableHashKey<HCX>
{
fn hash_stable<W: StableHasherResult>(&self,
hcx: &mut HCX,
hasher: &mut StableHasher<W>) {
let StableVec(ref v) = *self;
let mut sorted: Vec<_> = v.iter()
.map(|x| x.to_stable_hash_key(hcx))
.collect();
sorted.sort_unstable();
sorted.hash_stable(hcx, hasher);
}
}

View File

@ -30,7 +30,7 @@ use rustc::traits;
use rustc::util::common::{ErrorReported, time};
use rustc_allocator as allocator;
use rustc_borrowck as borrowck;
use rustc_incremental::{self, IncrementalHashesMap};
use rustc_incremental;
use rustc_resolve::{MakeGlobMap, Resolver};
use rustc_metadata::creader::CrateLoader;
use rustc_metadata::cstore::{self, CStore};
@ -175,7 +175,7 @@ pub fn compile_input(sess: &Session,
// Construct the HIR map
let hir_map = time(sess.time_passes(),
"indexing hir",
|| hir_map::map_crate(&mut hir_forest, &defs));
|| hir_map::map_crate(sess, cstore, &mut hir_forest, &defs));
{
let _ignore = hir_map.dep_graph.in_ignore();
@ -218,7 +218,7 @@ pub fn compile_input(sess: &Session,
&arenas,
&crate_name,
&outputs,
|tcx, analysis, incremental_hashes_map, rx, result| {
|tcx, analysis, rx, result| {
{
// Eventually, we will want to track plugins.
let _ignore = tcx.dep_graph.in_ignore();
@ -246,9 +246,7 @@ pub fn compile_input(sess: &Session,
tcx.print_debug_stats();
}
let trans = phase_4_translate_to_llvm(tcx,
incremental_hashes_map,
rx);
let trans = phase_4_translate_to_llvm(tcx, rx);
if log_enabled!(::log::LogLevel::Info) {
println!("Post-trans");
@ -921,7 +919,6 @@ pub fn phase_3_run_analysis_passes<'tcx, F, R>(sess: &'tcx Session,
-> Result<R, CompileIncomplete>
where F: for<'a> FnOnce(TyCtxt<'a, 'tcx, 'tcx>,
ty::CrateAnalysis,
IncrementalHashesMap,
mpsc::Receiver<Box<Any + Send>>,
CompileResult) -> R
{
@ -1053,22 +1050,16 @@ pub fn phase_3_run_analysis_passes<'tcx, F, R>(sess: &'tcx Session,
tx,
output_filenames,
|tcx| {
let incremental_hashes_map =
time(time_passes,
"compute_incremental_hashes_map",
|| rustc_incremental::compute_incremental_hashes_map(tcx));
time(time_passes,
"load_dep_graph",
|| rustc_incremental::load_dep_graph(tcx, &incremental_hashes_map));
|| rustc_incremental::load_dep_graph(tcx));
time(time_passes,
"stability checking",
|| stability::check_unstable_api_usage(tcx));
// passes are timed inside typeck
try_with_f!(typeck::check_crate(tcx),
(tcx, analysis, incremental_hashes_map, rx));
try_with_f!(typeck::check_crate(tcx), (tcx, analysis, rx));
time(time_passes,
"const checking",
@ -1112,7 +1103,7 @@ pub fn phase_3_run_analysis_passes<'tcx, F, R>(sess: &'tcx Session,
// lint warnings and so on -- kindck used to do this abort, but
// kindck is gone now). -nmatsakis
if sess.err_count() > 0 {
return Ok(f(tcx, analysis, incremental_hashes_map, rx, sess.compile_status()));
return Ok(f(tcx, analysis, rx, sess.compile_status()));
}
time(time_passes, "death checking", || middle::dead::check_crate(tcx));
@ -1123,14 +1114,13 @@ pub fn phase_3_run_analysis_passes<'tcx, F, R>(sess: &'tcx Session,
time(time_passes, "lint checking", || lint::check_crate(tcx));
return Ok(f(tcx, analysis, incremental_hashes_map, rx, tcx.sess.compile_status()));
return Ok(f(tcx, analysis, rx, tcx.sess.compile_status()));
})
}
/// Run the translation phase to LLVM, after which the AST and analysis can
/// be discarded.
pub fn phase_4_translate_to_llvm<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
incremental_hashes_map: IncrementalHashesMap,
rx: mpsc::Receiver<Box<Any + Send>>)
-> write::OngoingCrateTranslation {
let time_passes = tcx.sess.time_passes();
@ -1141,7 +1131,7 @@ pub fn phase_4_translate_to_llvm<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
let translation =
time(time_passes, "translation", move || {
trans::trans_crate(tcx, incremental_hashes_map, rx)
trans::trans_crate(tcx, rx)
});
if tcx.sess.profile_queries() {

View File

@ -237,7 +237,7 @@ impl PpSourceMode {
arenas,
id,
output_filenames,
|tcx, _, _, _, _| {
|tcx, _, _, _| {
let empty_tables = ty::TypeckTables::empty(None);
let annotation = TypedAnnotation {
tcx,
@ -1036,7 +1036,7 @@ fn print_with_analysis<'tcx, 'a: 'tcx>(sess: &'a Session,
arenas,
crate_name,
output_filenames,
|tcx, _, _, _, _| {
|tcx, _, _, _| {
match ppm {
PpmMir | PpmMirCFG => {
if let Some(nodeid) = nodeid {

View File

@ -133,7 +133,7 @@ fn test_env<F>(source_string: &str,
let arena = DroplessArena::new();
let arenas = ty::GlobalArenas::new();
let hir_map = hir_map::map_crate(&mut hir_forest, &defs);
let hir_map = hir_map::map_crate(&sess, &*cstore, &mut hir_forest, &defs);
// run just enough stuff to build a tcx:
let named_region_map = resolve_lifetime::krate(&sess, &*cstore, &hir_map);

View File

@ -1,331 +0,0 @@
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Calculation of the (misnamed) "strict version hash" for crates and
//! items. This hash is used to tell when the HIR changed in such a
//! way that results from previous compilations may no longer be
//! applicable and hence must be recomputed. It should probably be
//! renamed to the ICH (incremental compilation hash).
//!
//! The hashes for all items are computed once at the beginning of
//! compilation and stored into a map. In addition, a hash is computed
//! of the **entire crate**.
//!
//! Storing the hashes in a map avoids the need to compute them twice
//! (once when loading prior incremental results and once when
//! saving), but it is also important for correctness: at least as of
//! the time of this writing, the typeck passes rewrites entries in
//! the dep-map in-place to accommodate UFCS resolutions. Since name
//! resolution is part of the hash, the result is that hashes computed
//! at the end of compilation would be different from those computed
//! at the beginning.
use std::cell::RefCell;
use std::hash::Hash;
use rustc::dep_graph::{DepNode, DepKind};
use rustc::hir;
use rustc::hir::def_id::{CRATE_DEF_INDEX, DefId, DefIndex};
use rustc::hir::map::DefPathHash;
use rustc::hir::itemlikevisit::ItemLikeVisitor;
use rustc::ich::{Fingerprint, StableHashingContext};
use rustc::ty::TyCtxt;
use rustc::util::common::record_time;
use rustc_data_structures::stable_hasher::{StableHasher, HashStable};
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::accumulate_vec::AccumulateVec;
pub type IchHasher = StableHasher<Fingerprint>;
pub struct IncrementalHashesMap {
hashes: FxHashMap<DepNode, Fingerprint>,
// These are the metadata hashes for the current crate as they were stored
// during the last compilation session. They are only loaded if
// -Z query-dep-graph was specified and are needed for auto-tests using
// the #[rustc_metadata_dirty] and #[rustc_metadata_clean] attributes to
// check whether some metadata hash has changed in between two revisions.
pub prev_metadata_hashes: RefCell<FxHashMap<DefId, Fingerprint>>,
}
impl IncrementalHashesMap {
pub fn new() -> IncrementalHashesMap {
IncrementalHashesMap {
hashes: FxHashMap(),
prev_metadata_hashes: RefCell::new(FxHashMap()),
}
}
pub fn insert(&mut self, k: DepNode, v: Fingerprint) {
assert!(self.hashes.insert(k, v).is_none());
}
pub fn iter<'a>(&'a self)
-> ::std::collections::hash_map::Iter<'a, DepNode, Fingerprint> {
self.hashes.iter()
}
pub fn len(&self) -> usize {
self.hashes.len()
}
}
impl<'a> ::std::ops::Index<&'a DepNode> for IncrementalHashesMap {
type Output = Fingerprint;
fn index(&self, index: &'a DepNode) -> &Fingerprint {
match self.hashes.get(index) {
Some(fingerprint) => fingerprint,
None => {
bug!("Could not find ICH for {:?}", index);
}
}
}
}
struct ComputeItemHashesVisitor<'a, 'tcx: 'a> {
tcx: TyCtxt<'a, 'tcx, 'tcx>,
hcx: StableHashingContext<'tcx>,
hashes: IncrementalHashesMap,
}
impl<'a, 'tcx: 'a> ComputeItemHashesVisitor<'a, 'tcx> {
fn compute_and_store_ich_for_item_like<T>(&mut self,
def_index: DefIndex,
hash_bodies: bool,
item_like: T)
where T: HashStable<StableHashingContext<'tcx>>
{
if !hash_bodies && !self.tcx.sess.opts.build_dep_graph() {
// If we just need the hashes in order to compute the SVH, we don't
// need have two hashes per item. Just the one containing also the
// item's body is sufficient.
return
}
let def_path_hash = self.hcx.local_def_path_hash(def_index);
let mut hasher = IchHasher::new();
self.hcx.while_hashing_hir_bodies(hash_bodies, |hcx| {
item_like.hash_stable(hcx, &mut hasher);
});
let bytes_hashed = hasher.bytes_hashed();
let item_hash = hasher.finish();
let dep_node = if hash_bodies {
def_path_hash.to_dep_node(DepKind::HirBody)
} else {
def_path_hash.to_dep_node(DepKind::Hir)
};
debug!("calculate_def_hash: dep_node={:?} hash={:?}", dep_node, item_hash);
self.hashes.insert(dep_node, item_hash);
let bytes_hashed =
self.tcx.sess.perf_stats.incr_comp_bytes_hashed.get() + bytes_hashed;
self.tcx.sess.perf_stats.incr_comp_bytes_hashed.set(bytes_hashed);
if hash_bodies {
let in_scope_traits_map = self.tcx.in_scope_traits_map(def_index);
let mut hasher = IchHasher::new();
in_scope_traits_map.hash_stable(&mut self.hcx, &mut hasher);
let dep_node = def_path_hash.to_dep_node(DepKind::InScopeTraits);
self.hashes.insert(dep_node, hasher.finish());
}
}
fn compute_crate_hash(&mut self) {
let krate = self.tcx.hir.krate();
let mut crate_state = IchHasher::new();
let crate_disambiguator = self.tcx.sess.local_crate_disambiguator();
"crate_disambiguator".hash(&mut crate_state);
crate_disambiguator.as_str().len().hash(&mut crate_state);
crate_disambiguator.as_str().hash(&mut crate_state);
// add each item (in some deterministic order) to the overall
// crate hash.
{
let mut item_hashes: Vec<_> =
self.hashes.iter()
.filter_map(|(&item_dep_node, &item_hash)| {
// This `match` determines what kinds of nodes
// go into the SVH:
match item_dep_node.kind {
DepKind::InScopeTraits |
DepKind::Hir |
DepKind::HirBody => {
// We want to incoporate these into the
// SVH.
}
DepKind::AllLocalTraitImpls => {
// These are already covered by hashing
// the HIR.
return None
}
ref other => {
bug!("Found unexpected DepKind during \
SVH computation: {:?}",
other)
}
}
Some((item_dep_node, item_hash))
})
.collect();
item_hashes.sort_unstable(); // avoid artificial dependencies on item ordering
item_hashes.hash(&mut crate_state);
}
krate.attrs.hash_stable(&mut self.hcx, &mut crate_state);
let crate_hash = crate_state.finish();
self.hashes.insert(DepNode::new_no_params(DepKind::Krate), crate_hash);
debug!("calculate_crate_hash: crate_hash={:?}", crate_hash);
}
fn hash_crate_root_module(&mut self, krate: &'tcx hir::Crate) {
let hir::Crate {
ref module,
// Crate attributes are not copied over to the root `Mod`, so hash
// them explicitly here.
ref attrs,
span,
// These fields are handled separately:
exported_macros: _,
items: _,
trait_items: _,
impl_items: _,
bodies: _,
trait_impls: _,
trait_default_impl: _,
body_ids: _,
} = *krate;
self.compute_and_store_ich_for_item_like(CRATE_DEF_INDEX,
false,
(module, (span, attrs)));
self.compute_and_store_ich_for_item_like(CRATE_DEF_INDEX,
true,
(module, (span, attrs)));
}
fn compute_and_store_ich_for_trait_impls(&mut self, krate: &'tcx hir::Crate)
{
let tcx = self.tcx;
let mut impls: Vec<(DefPathHash, Fingerprint)> = krate
.trait_impls
.iter()
.map(|(&trait_id, impls)| {
let trait_id = tcx.def_path_hash(trait_id);
let mut impls: AccumulateVec<[_; 32]> = impls
.iter()
.map(|&node_id| {
let def_id = tcx.hir.local_def_id(node_id);
tcx.def_path_hash(def_id)
})
.collect();
impls.sort_unstable();
let mut hasher = StableHasher::new();
impls.hash_stable(&mut self.hcx, &mut hasher);
(trait_id, hasher.finish())
})
.collect();
impls.sort_unstable();
let mut default_impls: AccumulateVec<[_; 32]> = krate
.trait_default_impl
.iter()
.map(|(&trait_def_id, &impl_node_id)| {
let impl_def_id = tcx.hir.local_def_id(impl_node_id);
(tcx.def_path_hash(trait_def_id), tcx.def_path_hash(impl_def_id))
})
.collect();
default_impls.sort_unstable();
let mut hasher = StableHasher::new();
impls.hash_stable(&mut self.hcx, &mut hasher);
self.hashes.insert(DepNode::new_no_params(DepKind::AllLocalTraitImpls),
hasher.finish());
}
}
impl<'a, 'tcx: 'a> ItemLikeVisitor<'tcx> for ComputeItemHashesVisitor<'a, 'tcx> {
fn visit_item(&mut self, item: &'tcx hir::Item) {
let def_index = self.tcx.hir.local_def_id(item.id).index;
self.compute_and_store_ich_for_item_like(def_index,
false,
item);
self.compute_and_store_ich_for_item_like(def_index,
true,
item);
}
fn visit_trait_item(&mut self, item: &'tcx hir::TraitItem) {
let def_index = self.tcx.hir.local_def_id(item.id).index;
self.compute_and_store_ich_for_item_like(def_index,
false,
item);
self.compute_and_store_ich_for_item_like(def_index,
true,
item);
}
fn visit_impl_item(&mut self, item: &'tcx hir::ImplItem) {
let def_index = self.tcx.hir.local_def_id(item.id).index;
self.compute_and_store_ich_for_item_like(def_index,
false,
item);
self.compute_and_store_ich_for_item_like(def_index,
true,
item);
}
}
pub fn compute_incremental_hashes_map<'a, 'tcx: 'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>)
-> IncrementalHashesMap {
let _ignore = tcx.dep_graph.in_ignore();
let krate = tcx.hir.krate();
let mut visitor = ComputeItemHashesVisitor {
tcx,
hcx: tcx.create_stable_hashing_context(),
hashes: IncrementalHashesMap::new(),
};
record_time(&tcx.sess.perf_stats.incr_comp_hashes_time, || {
visitor.hash_crate_root_module(krate);
krate.visit_all_item_likes(&mut visitor);
for macro_def in krate.exported_macros.iter() {
let def_index = tcx.hir.local_def_id(macro_def.id).index;
visitor.compute_and_store_ich_for_item_like(def_index,
false,
macro_def);
visitor.compute_and_store_ich_for_item_like(def_index,
true,
macro_def);
}
visitor.compute_and_store_ich_for_trait_impls(krate);
});
tcx.sess.perf_stats.incr_comp_hashes_count.set(visitor.hashes.len() as u64);
record_time(&tcx.sess.perf_stats.svh_time, || visitor.compute_crate_hash());
visitor.hashes
}

View File

@ -28,13 +28,9 @@ extern crate syntax;
extern crate syntax_pos;
mod assert_dep_graph;
mod calculate_svh;
mod persist;
pub use assert_dep_graph::assert_dep_graph;
pub use calculate_svh::compute_incremental_hashes_map;
pub use calculate_svh::IncrementalHashesMap;
pub use calculate_svh::IchHasher;
pub use persist::load_dep_graph;
pub use persist::save_dep_graph;
pub use persist::save_trans_partition;

View File

@ -18,7 +18,6 @@ use rustc_data_structures::flock;
use rustc_serialize::Decodable;
use rustc_serialize::opaque::Decoder;
use IncrementalHashesMap;
use super::data::*;
use super::fs::*;
use super::file_format;
@ -28,49 +27,27 @@ use std::fmt::Debug;
pub struct HashContext<'a, 'tcx: 'a> {
pub tcx: TyCtxt<'a, 'tcx, 'tcx>,
incremental_hashes_map: &'a IncrementalHashesMap,
metadata_hashes: FxHashMap<DefId, Fingerprint>,
crate_hashes: FxHashMap<CrateNum, Svh>,
}
impl<'a, 'tcx> HashContext<'a, 'tcx> {
pub fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>,
incremental_hashes_map: &'a IncrementalHashesMap)
-> Self {
pub fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Self {
HashContext {
tcx,
incremental_hashes_map,
metadata_hashes: FxHashMap(),
crate_hashes: FxHashMap(),
}
}
pub fn is_hashable(tcx: TyCtxt, dep_node: &DepNode) -> bool {
match dep_node.kind {
DepKind::Krate |
DepKind::Hir |
DepKind::InScopeTraits |
DepKind::HirBody =>
true,
DepKind::MetaData => {
let def_id = dep_node.extract_def_id(tcx).unwrap();
!def_id.is_local()
}
_ => false,
}
}
pub fn hash(&mut self, dep_node: &DepNode) -> Option<Fingerprint> {
match dep_node.kind {
DepKind::Krate => {
Some(self.incremental_hashes_map[dep_node])
}
// HIR nodes (which always come from our crate) are an input:
DepKind::Krate |
DepKind::InScopeTraits |
DepKind::Hir |
DepKind::HirBody => {
Some(self.incremental_hashes_map[dep_node])
Some(self.tcx.dep_graph.fingerprint_of(dep_node).unwrap())
}
// MetaData from other crates is an *input* to us.
@ -79,13 +56,11 @@ impl<'a, 'tcx> HashContext<'a, 'tcx> {
// save it for others to use.
DepKind::MetaData => {
let def_id = dep_node.extract_def_id(self.tcx).unwrap();
if !def_id.is_local() {
Some(self.metadata_hash(def_id,
assert!(!def_id.is_local());
Some(self.metadata_hash(def_id,
def_id.krate,
|this| &mut this.metadata_hashes))
} else {
None
}
}
_ => {

View File

@ -11,18 +11,17 @@
//! Code to save/load the dep-graph from files.
use rustc::dep_graph::{DepNode, WorkProductId, DepKind};
use rustc::hir::def_id::DefId;
use rustc::hir::svh::Svh;
use rustc::ich::Fingerprint;
use rustc::session::Session;
use rustc::ty::TyCtxt;
use rustc::util::nodemap::DefIdMap;
use rustc_data_structures::fx::{FxHashSet, FxHashMap};
use rustc_data_structures::indexed_vec::IndexVec;
use rustc_serialize::Decodable as RustcDecodable;
use rustc_serialize::opaque::Decoder;
use std::path::{Path};
use IncrementalHashesMap;
use super::data::*;
use super::dirty_clean;
use super::hash::*;
@ -40,16 +39,15 @@ pub type DirtyNodes = FxHashMap<DepNodeIndex, DepNodeIndex>;
/// early in compilation, before we've really done any work, but
/// actually it doesn't matter all that much.) See `README.md` for
/// more general overview.
pub fn load_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
incremental_hashes_map: &IncrementalHashesMap) {
pub fn load_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
tcx.precompute_in_scope_traits_hashes();
if tcx.sess.incr_session_load_dep_graph() {
let _ignore = tcx.dep_graph.in_ignore();
load_dep_graph_if_exists(tcx, incremental_hashes_map);
load_dep_graph_if_exists(tcx);
}
}
fn load_dep_graph_if_exists<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
incremental_hashes_map: &IncrementalHashesMap) {
fn load_dep_graph_if_exists<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
let dep_graph_path = dep_graph_path(tcx.sess);
let dep_graph_data = match load_data(tcx.sess, &dep_graph_path) {
Some(p) => p,
@ -62,7 +60,7 @@ fn load_dep_graph_if_exists<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
None => return // no file
};
match decode_dep_graph(tcx, incremental_hashes_map, &dep_graph_data, &work_products_data) {
match decode_dep_graph(tcx, &dep_graph_data, &work_products_data) {
Ok(dirty_nodes) => dirty_nodes,
Err(err) => {
tcx.sess.warn(
@ -117,7 +115,6 @@ fn does_still_exist(tcx: TyCtxt, dep_node: &DepNode) -> bool {
/// Decode the dep graph and load the edges/nodes that are still clean
/// into `tcx.dep_graph`.
pub fn decode_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
incremental_hashes_map: &IncrementalHashesMap,
dep_graph_data: &[u8],
work_products_data: &[u8])
-> Result<(), String>
@ -150,7 +147,6 @@ pub fn decode_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
// Compute the set of nodes from the old graph where some input
// has changed or been removed.
let dirty_raw_nodes = initial_dirty_nodes(tcx,
incremental_hashes_map,
&serialized_dep_graph.nodes,
&serialized_dep_graph.hashes);
let dirty_raw_nodes = transitive_dirty_nodes(&serialized_dep_graph,
@ -193,20 +189,16 @@ pub fn decode_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
dirty_clean::check_dirty_clean_annotations(tcx,
&serialized_dep_graph.nodes,
&dirty_raw_nodes);
load_prev_metadata_hashes(tcx,
&mut *incremental_hashes_map.prev_metadata_hashes.borrow_mut());
Ok(())
}
/// Computes which of the original set of def-ids are dirty. Stored in
/// a bit vector where the index is the DefPathIndex.
fn initial_dirty_nodes<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
incremental_hashes_map: &IncrementalHashesMap,
nodes: &IndexVec<DepNodeIndex, DepNode>,
serialized_hashes: &[(DepNodeIndex, Fingerprint)])
-> DirtyNodes {
let mut hcx = HashContext::new(tcx, incremental_hashes_map);
let mut hcx = HashContext::new(tcx);
let mut dirty_nodes = FxHashMap();
for &(dep_node_index, prev_hash) in serialized_hashes {
@ -310,11 +302,12 @@ fn delete_dirty_work_product(tcx: TyCtxt,
work_product::delete_workproduct_files(tcx.sess, &swp.work_product);
}
fn load_prev_metadata_hashes(tcx: TyCtxt,
output: &mut FxHashMap<DefId, Fingerprint>) {
pub fn load_prev_metadata_hashes(tcx: TyCtxt) -> DefIdMap<Fingerprint> {
let mut output = DefIdMap();
if !tcx.sess.opts.debugging_opts.query_dep_graph {
// Previous metadata hashes are only needed for testing.
return
return output
}
debug!("load_prev_metadata_hashes() - Loading previous metadata hashes");
@ -324,7 +317,7 @@ fn load_prev_metadata_hashes(tcx: TyCtxt,
if !file_path.exists() {
debug!("load_prev_metadata_hashes() - Couldn't find file containing \
hashes at `{}`", file_path.display());
return
return output
}
debug!("load_prev_metadata_hashes() - File: {}", file_path.display());
@ -334,12 +327,12 @@ fn load_prev_metadata_hashes(tcx: TyCtxt,
Ok(None) => {
debug!("load_prev_metadata_hashes() - File produced by incompatible \
compiler version: {}", file_path.display());
return
return output
}
Err(err) => {
debug!("load_prev_metadata_hashes() - Error reading file `{}`: {}",
file_path.display(), err);
return
return output
}
};
@ -363,6 +356,8 @@ fn load_prev_metadata_hashes(tcx: TyCtxt,
debug!("load_prev_metadata_hashes() - successfully loaded {} hashes",
serialized_hashes.index_map.len());
output
}
fn process_edge<'a, 'tcx, 'edges>(

View File

@ -66,7 +66,7 @@ impl<'q> Predecessors<'q> {
// Reduce the graph to the most important nodes.
let compress::Reduction { graph, input_nodes } =
compress::reduce_graph(&query.graph,
|n| HashContext::is_hashable(tcx, n),
|n| n.kind.is_input(),
|n| is_output(n));
let mut hashes = FxHashMap();

View File

@ -15,6 +15,7 @@ use rustc::ich::Fingerprint;
use rustc::middle::cstore::EncodedMetadataHashes;
use rustc::session::Session;
use rustc::ty::TyCtxt;
use rustc::util::nodemap::DefIdMap;
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::graph;
use rustc_data_structures::indexed_vec::IndexVec;
@ -24,7 +25,6 @@ use std::io::{self, Cursor, Write};
use std::fs::{self, File};
use std::path::PathBuf;
use IncrementalHashesMap;
use super::data::*;
use super::hash::*;
use super::preds::*;
@ -33,8 +33,9 @@ use super::dirty_clean;
use super::file_format;
use super::work_product;
use super::load::load_prev_metadata_hashes;
pub fn save_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
incremental_hashes_map: IncrementalHashesMap,
metadata_hashes: &EncodedMetadataHashes,
svh: Svh) {
debug!("save_dep_graph()");
@ -51,7 +52,15 @@ pub fn save_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
eprintln!("incremental: {} edges in dep-graph", query.graph.len_edges());
}
let mut hcx = HashContext::new(tcx, &incremental_hashes_map);
// We load the previous metadata hashes now before overwriting the file
// (if we need them for testing).
let prev_metadata_hashes = if tcx.sess.opts.debugging_opts.query_dep_graph {
load_prev_metadata_hashes(tcx)
} else {
DefIdMap()
};
let mut hcx = HashContext::new(tcx);
let preds = Predecessors::new(&query, &mut hcx);
let mut current_metadata_hashes = FxHashMap();
@ -73,9 +82,8 @@ pub fn save_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
dep_graph_path(sess),
|e| encode_dep_graph(tcx, &preds, e));
let prev_metadata_hashes = incremental_hashes_map.prev_metadata_hashes.borrow();
dirty_clean::check_dirty_clean_metadata(tcx,
&*prev_metadata_hashes,
&prev_metadata_hashes,
&current_metadata_hashes);
}

View File

@ -20,17 +20,16 @@ use rustc::session::config::{self, NoDebugInfo, OutputFilenames, OutputType, Pri
use rustc::session::filesearch;
use rustc::session::search_paths::PathKind;
use rustc::session::Session;
use rustc::ich::Fingerprint;
use rustc::middle::cstore::{LinkMeta, NativeLibrary, LibSource, NativeLibraryKind};
use rustc::middle::dependency_format::Linkage;
use {CrateTranslation, CrateInfo};
use rustc::util::common::time;
use rustc::util::fs::fix_windows_verbatim_for_gcc;
use rustc::dep_graph::{DepKind, DepNode};
use rustc::hir::def_id::CrateNum;
use rustc::hir::svh::Svh;
use rustc_back::tempdir::TempDir;
use rustc_back::{PanicStrategy, RelroLevel};
use rustc_incremental::IncrementalHashesMap;
use context::get_reloc_model;
use llvm;
@ -92,10 +91,9 @@ pub const RLIB_BYTECODE_OBJECT_V1_DATA_OFFSET: usize =
pub use self::rustc_trans_utils::link::{find_crate_name, filename_for_input,
default_output_for_target, invalid_output_for_target};
pub fn build_link_meta(incremental_hashes_map: &IncrementalHashesMap) -> LinkMeta {
let krate_dep_node = &DepNode::new_no_params(DepKind::Krate);
pub fn build_link_meta(crate_hash: Fingerprint) -> LinkMeta {
let r = LinkMeta {
crate_hash: Svh::new(incremental_hashes_map[krate_dep_node].to_smaller_hash()),
crate_hash: Svh::new(crate_hash.to_smaller_hash()),
};
info!("{:?}", r);
return r;

View File

@ -41,12 +41,13 @@ use rustc::middle::trans::{Linkage, Visibility, Stats};
use rustc::middle::cstore::{EncodedMetadata, EncodedMetadataHashes};
use rustc::ty::{self, Ty, TyCtxt};
use rustc::ty::maps::Providers;
use rustc::dep_graph::{DepNode, DepKind};
use rustc::middle::cstore::{self, LinkMeta, LinkagePreference};
use rustc::hir::map as hir_map;
use rustc::util::common::{time, print_time_passes_entry};
use rustc::session::config::{self, NoDebugInfo};
use rustc::session::Session;
use rustc_incremental::{self, IncrementalHashesMap};
use rustc_incremental;
use abi;
use allocator;
use mir::lvalue::LvalueRef;
@ -935,12 +936,15 @@ pub fn find_exported_symbols(tcx: TyCtxt) -> NodeSet {
}
pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
incremental_hashes_map: IncrementalHashesMap,
rx: mpsc::Receiver<Box<Any + Send>>)
-> OngoingCrateTranslation {
check_for_rustc_errors_attr(tcx);
let link_meta = link::build_link_meta(&incremental_hashes_map);
let crate_hash = tcx.dep_graph
.fingerprint_of(&DepNode::new_no_params(DepKind::Krate))
.unwrap();
let link_meta = link::build_link_meta(crate_hash);
let exported_symbol_node_ids = find_exported_symbols(tcx);
let shared_ccx = SharedCrateContext::new(tcx);
@ -980,7 +984,6 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
ongoing_translation.translation_finished(tcx);
assert_and_save_dep_graph(tcx,
incremental_hashes_map,
metadata_incr_hashes,
link_meta);
@ -1113,7 +1116,6 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
ongoing_translation.check_for_errors(tcx.sess);
assert_and_save_dep_graph(tcx,
incremental_hashes_map,
metadata_incr_hashes,
link_meta);
ongoing_translation
@ -1124,7 +1126,6 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
thread_local!(static DISPOSITIONS: RefCell<Vec<(String, Disposition)>> = Default::default());
fn assert_and_save_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
incremental_hashes_map: IncrementalHashesMap,
metadata_incr_hashes: EncodedMetadataHashes,
link_meta: LinkMeta) {
time(tcx.sess.time_passes(),
@ -1134,7 +1135,6 @@ fn assert_and_save_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
time(tcx.sess.time_passes(),
"serialize dep graph",
|| rustc_incremental::save_dep_graph(tcx,
incremental_hashes_map,
&metadata_incr_hashes,
link_meta.crate_hash));
}

View File

@ -108,11 +108,12 @@ use rustc::dep_graph::{DepNode, WorkProductId};
use rustc::hir::def_id::DefId;
use rustc::hir::map::DefPathData;
use rustc::middle::trans::{Linkage, Visibility};
use rustc::ich::Fingerprint;
use rustc::session::config::NUMBERED_CODEGEN_UNIT_MARKER;
use rustc::ty::{self, TyCtxt, InstanceDef};
use rustc::ty::item_path::characteristic_def_id_of_type;
use rustc::util::nodemap::{FxHashMap, FxHashSet};
use rustc_incremental::IchHasher;
use rustc_data_structures::stable_hasher::StableHasher;
use std::collections::hash_map::Entry;
use std::hash::Hash;
use syntax::ast::NodeId;
@ -155,7 +156,7 @@ pub trait CodegenUnitExt<'tcx> {
}
fn compute_symbol_name_hash<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> u64 {
let mut state = IchHasher::new();
let mut state: StableHasher<Fingerprint> = StableHasher::new();
let all_items = self.items_in_deterministic_order(tcx);
for (item, (linkage, visibility)) in all_items {
let symbol_name = item.symbol_name(tcx);

View File

@ -175,7 +175,7 @@ pub fn run_core(search_paths: SearchPaths,
let arena = DroplessArena::new();
let arenas = GlobalArenas::new();
let hir_map = hir_map::map_crate(&mut hir_forest, &defs);
let hir_map = hir_map::map_crate(&sess, &*cstore, &mut hir_forest, &defs);
let output_filenames = driver::build_output_filenames(&input,
&None,
&None,
@ -191,7 +191,7 @@ pub fn run_core(search_paths: SearchPaths,
&arenas,
&name,
&output_filenames,
|tcx, analysis, _, _, result| {
|tcx, analysis, _, result| {
if let Err(_) = result {
sess.fatal("Compilation failed, aborting rustdoc");
}

View File

@ -124,7 +124,7 @@ pub fn run(input: &str,
render_type);
{
let map = hir::map::map_crate(&mut hir_forest, &defs);
let map = hir::map::map_crate(&sess, &*cstore, &mut hir_forest, &defs);
let krate = map.krate();
let mut hir_collector = HirCollector {
sess: &sess,