mirror of
https://github.com/rust-lang/rust.git
synced 2024-12-23 22:14:15 +00:00
incr.comp.: Remove IncrementalHashesMap and calculate_svh module.
This commit is contained in:
parent
c4d1651266
commit
47d14ccd51
@ -30,7 +30,7 @@ use rustc::traits;
|
||||
use rustc::util::common::{ErrorReported, time};
|
||||
use rustc_allocator as allocator;
|
||||
use rustc_borrowck as borrowck;
|
||||
use rustc_incremental::{self, IncrementalHashesMap};
|
||||
use rustc_incremental;
|
||||
use rustc_resolve::{MakeGlobMap, Resolver};
|
||||
use rustc_metadata::creader::CrateLoader;
|
||||
use rustc_metadata::cstore::{self, CStore};
|
||||
@ -218,7 +218,7 @@ pub fn compile_input(sess: &Session,
|
||||
&arenas,
|
||||
&crate_name,
|
||||
&outputs,
|
||||
|tcx, analysis, incremental_hashes_map, rx, result| {
|
||||
|tcx, analysis, rx, result| {
|
||||
{
|
||||
// Eventually, we will want to track plugins.
|
||||
let _ignore = tcx.dep_graph.in_ignore();
|
||||
@ -246,9 +246,7 @@ pub fn compile_input(sess: &Session,
|
||||
tcx.print_debug_stats();
|
||||
}
|
||||
|
||||
let trans = phase_4_translate_to_llvm(tcx,
|
||||
incremental_hashes_map,
|
||||
rx);
|
||||
let trans = phase_4_translate_to_llvm(tcx, rx);
|
||||
|
||||
if log_enabled!(::log::LogLevel::Info) {
|
||||
println!("Post-trans");
|
||||
@ -921,7 +919,6 @@ pub fn phase_3_run_analysis_passes<'tcx, F, R>(sess: &'tcx Session,
|
||||
-> Result<R, CompileIncomplete>
|
||||
where F: for<'a> FnOnce(TyCtxt<'a, 'tcx, 'tcx>,
|
||||
ty::CrateAnalysis,
|
||||
IncrementalHashesMap,
|
||||
mpsc::Receiver<Box<Any + Send>>,
|
||||
CompileResult) -> R
|
||||
{
|
||||
@ -1053,22 +1050,16 @@ pub fn phase_3_run_analysis_passes<'tcx, F, R>(sess: &'tcx Session,
|
||||
tx,
|
||||
output_filenames,
|
||||
|tcx| {
|
||||
let incremental_hashes_map =
|
||||
time(time_passes,
|
||||
"compute_incremental_hashes_map",
|
||||
|| rustc_incremental::compute_incremental_hashes_map(tcx));
|
||||
|
||||
time(time_passes,
|
||||
"load_dep_graph",
|
||||
|| rustc_incremental::load_dep_graph(tcx, &incremental_hashes_map));
|
||||
|| rustc_incremental::load_dep_graph(tcx));
|
||||
|
||||
time(time_passes,
|
||||
"stability checking",
|
||||
|| stability::check_unstable_api_usage(tcx));
|
||||
|
||||
// passes are timed inside typeck
|
||||
try_with_f!(typeck::check_crate(tcx),
|
||||
(tcx, analysis, incremental_hashes_map, rx));
|
||||
try_with_f!(typeck::check_crate(tcx), (tcx, analysis, rx));
|
||||
|
||||
time(time_passes,
|
||||
"const checking",
|
||||
@ -1112,7 +1103,7 @@ pub fn phase_3_run_analysis_passes<'tcx, F, R>(sess: &'tcx Session,
|
||||
// lint warnings and so on -- kindck used to do this abort, but
|
||||
// kindck is gone now). -nmatsakis
|
||||
if sess.err_count() > 0 {
|
||||
return Ok(f(tcx, analysis, incremental_hashes_map, rx, sess.compile_status()));
|
||||
return Ok(f(tcx, analysis, rx, sess.compile_status()));
|
||||
}
|
||||
|
||||
time(time_passes, "death checking", || middle::dead::check_crate(tcx));
|
||||
@ -1123,14 +1114,13 @@ pub fn phase_3_run_analysis_passes<'tcx, F, R>(sess: &'tcx Session,
|
||||
|
||||
time(time_passes, "lint checking", || lint::check_crate(tcx));
|
||||
|
||||
return Ok(f(tcx, analysis, incremental_hashes_map, rx, tcx.sess.compile_status()));
|
||||
return Ok(f(tcx, analysis, rx, tcx.sess.compile_status()));
|
||||
})
|
||||
}
|
||||
|
||||
/// Run the translation phase to LLVM, after which the AST and analysis can
|
||||
/// be discarded.
|
||||
pub fn phase_4_translate_to_llvm<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
incremental_hashes_map: IncrementalHashesMap,
|
||||
rx: mpsc::Receiver<Box<Any + Send>>)
|
||||
-> write::OngoingCrateTranslation {
|
||||
let time_passes = tcx.sess.time_passes();
|
||||
@ -1141,7 +1131,7 @@ pub fn phase_4_translate_to_llvm<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
|
||||
let translation =
|
||||
time(time_passes, "translation", move || {
|
||||
trans::trans_crate(tcx, incremental_hashes_map, rx)
|
||||
trans::trans_crate(tcx, rx)
|
||||
});
|
||||
|
||||
if tcx.sess.profile_queries() {
|
||||
|
@ -237,7 +237,7 @@ impl PpSourceMode {
|
||||
arenas,
|
||||
id,
|
||||
output_filenames,
|
||||
|tcx, _, _, _, _| {
|
||||
|tcx, _, _, _| {
|
||||
let empty_tables = ty::TypeckTables::empty(None);
|
||||
let annotation = TypedAnnotation {
|
||||
tcx,
|
||||
@ -1036,7 +1036,7 @@ fn print_with_analysis<'tcx, 'a: 'tcx>(sess: &'a Session,
|
||||
arenas,
|
||||
crate_name,
|
||||
output_filenames,
|
||||
|tcx, _, _, _, _| {
|
||||
|tcx, _, _, _| {
|
||||
match ppm {
|
||||
PpmMir | PpmMirCFG => {
|
||||
if let Some(nodeid) = nodeid {
|
||||
|
@ -1,331 +0,0 @@
|
||||
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
//! Calculation of the (misnamed) "strict version hash" for crates and
|
||||
//! items. This hash is used to tell when the HIR changed in such a
|
||||
//! way that results from previous compilations may no longer be
|
||||
//! applicable and hence must be recomputed. It should probably be
|
||||
//! renamed to the ICH (incremental compilation hash).
|
||||
//!
|
||||
//! The hashes for all items are computed once at the beginning of
|
||||
//! compilation and stored into a map. In addition, a hash is computed
|
||||
//! of the **entire crate**.
|
||||
//!
|
||||
//! Storing the hashes in a map avoids the need to compute them twice
|
||||
//! (once when loading prior incremental results and once when
|
||||
//! saving), but it is also important for correctness: at least as of
|
||||
//! the time of this writing, the typeck passes rewrites entries in
|
||||
//! the dep-map in-place to accommodate UFCS resolutions. Since name
|
||||
//! resolution is part of the hash, the result is that hashes computed
|
||||
//! at the end of compilation would be different from those computed
|
||||
//! at the beginning.
|
||||
|
||||
use std::cell::RefCell;
|
||||
use std::hash::Hash;
|
||||
use rustc::dep_graph::{DepNode, DepKind};
|
||||
use rustc::hir;
|
||||
use rustc::hir::def_id::{CRATE_DEF_INDEX, DefId, DefIndex};
|
||||
use rustc::hir::map::DefPathHash;
|
||||
use rustc::hir::itemlikevisit::ItemLikeVisitor;
|
||||
use rustc::ich::{Fingerprint, StableHashingContext};
|
||||
use rustc::ty::TyCtxt;
|
||||
use rustc::util::common::record_time;
|
||||
use rustc_data_structures::stable_hasher::{StableHasher, HashStable};
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_data_structures::accumulate_vec::AccumulateVec;
|
||||
|
||||
pub type IchHasher = StableHasher<Fingerprint>;
|
||||
|
||||
pub struct IncrementalHashesMap {
|
||||
hashes: FxHashMap<DepNode, Fingerprint>,
|
||||
|
||||
// These are the metadata hashes for the current crate as they were stored
|
||||
// during the last compilation session. They are only loaded if
|
||||
// -Z query-dep-graph was specified and are needed for auto-tests using
|
||||
// the #[rustc_metadata_dirty] and #[rustc_metadata_clean] attributes to
|
||||
// check whether some metadata hash has changed in between two revisions.
|
||||
pub prev_metadata_hashes: RefCell<FxHashMap<DefId, Fingerprint>>,
|
||||
}
|
||||
|
||||
impl IncrementalHashesMap {
|
||||
pub fn new() -> IncrementalHashesMap {
|
||||
IncrementalHashesMap {
|
||||
hashes: FxHashMap(),
|
||||
prev_metadata_hashes: RefCell::new(FxHashMap()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn insert(&mut self, k: DepNode, v: Fingerprint) {
|
||||
assert!(self.hashes.insert(k, v).is_none());
|
||||
}
|
||||
|
||||
pub fn iter<'a>(&'a self)
|
||||
-> ::std::collections::hash_map::Iter<'a, DepNode, Fingerprint> {
|
||||
self.hashes.iter()
|
||||
}
|
||||
|
||||
pub fn len(&self) -> usize {
|
||||
self.hashes.len()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> ::std::ops::Index<&'a DepNode> for IncrementalHashesMap {
|
||||
type Output = Fingerprint;
|
||||
|
||||
fn index(&self, index: &'a DepNode) -> &Fingerprint {
|
||||
match self.hashes.get(index) {
|
||||
Some(fingerprint) => fingerprint,
|
||||
None => {
|
||||
bug!("Could not find ICH for {:?}", index);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct ComputeItemHashesVisitor<'a, 'tcx: 'a> {
|
||||
tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
hcx: StableHashingContext<'tcx>,
|
||||
hashes: IncrementalHashesMap,
|
||||
}
|
||||
|
||||
impl<'a, 'tcx: 'a> ComputeItemHashesVisitor<'a, 'tcx> {
|
||||
fn compute_and_store_ich_for_item_like<T>(&mut self,
|
||||
def_index: DefIndex,
|
||||
hash_bodies: bool,
|
||||
item_like: T)
|
||||
where T: HashStable<StableHashingContext<'tcx>>
|
||||
{
|
||||
if !hash_bodies && !self.tcx.sess.opts.build_dep_graph() {
|
||||
// If we just need the hashes in order to compute the SVH, we don't
|
||||
// need have two hashes per item. Just the one containing also the
|
||||
// item's body is sufficient.
|
||||
return
|
||||
}
|
||||
|
||||
let def_path_hash = self.hcx.local_def_path_hash(def_index);
|
||||
|
||||
let mut hasher = IchHasher::new();
|
||||
self.hcx.while_hashing_hir_bodies(hash_bodies, |hcx| {
|
||||
item_like.hash_stable(hcx, &mut hasher);
|
||||
});
|
||||
|
||||
let bytes_hashed = hasher.bytes_hashed();
|
||||
let item_hash = hasher.finish();
|
||||
let dep_node = if hash_bodies {
|
||||
def_path_hash.to_dep_node(DepKind::HirBody)
|
||||
} else {
|
||||
def_path_hash.to_dep_node(DepKind::Hir)
|
||||
};
|
||||
debug!("calculate_def_hash: dep_node={:?} hash={:?}", dep_node, item_hash);
|
||||
self.hashes.insert(dep_node, item_hash);
|
||||
|
||||
let bytes_hashed =
|
||||
self.tcx.sess.perf_stats.incr_comp_bytes_hashed.get() + bytes_hashed;
|
||||
self.tcx.sess.perf_stats.incr_comp_bytes_hashed.set(bytes_hashed);
|
||||
|
||||
if hash_bodies {
|
||||
let in_scope_traits_map = self.tcx.in_scope_traits_map(def_index);
|
||||
let mut hasher = IchHasher::new();
|
||||
in_scope_traits_map.hash_stable(&mut self.hcx, &mut hasher);
|
||||
let dep_node = def_path_hash.to_dep_node(DepKind::InScopeTraits);
|
||||
self.hashes.insert(dep_node, hasher.finish());
|
||||
}
|
||||
}
|
||||
|
||||
fn compute_crate_hash(&mut self) {
|
||||
let krate = self.tcx.hir.krate();
|
||||
|
||||
let mut crate_state = IchHasher::new();
|
||||
|
||||
let crate_disambiguator = self.tcx.sess.local_crate_disambiguator();
|
||||
"crate_disambiguator".hash(&mut crate_state);
|
||||
crate_disambiguator.as_str().len().hash(&mut crate_state);
|
||||
crate_disambiguator.as_str().hash(&mut crate_state);
|
||||
|
||||
// add each item (in some deterministic order) to the overall
|
||||
// crate hash.
|
||||
{
|
||||
let mut item_hashes: Vec<_> =
|
||||
self.hashes.iter()
|
||||
.filter_map(|(&item_dep_node, &item_hash)| {
|
||||
// This `match` determines what kinds of nodes
|
||||
// go into the SVH:
|
||||
match item_dep_node.kind {
|
||||
DepKind::InScopeTraits |
|
||||
DepKind::Hir |
|
||||
DepKind::HirBody => {
|
||||
// We want to incoporate these into the
|
||||
// SVH.
|
||||
}
|
||||
DepKind::AllLocalTraitImpls => {
|
||||
// These are already covered by hashing
|
||||
// the HIR.
|
||||
return None
|
||||
}
|
||||
ref other => {
|
||||
bug!("Found unexpected DepKind during \
|
||||
SVH computation: {:?}",
|
||||
other)
|
||||
}
|
||||
}
|
||||
|
||||
Some((item_dep_node, item_hash))
|
||||
})
|
||||
.collect();
|
||||
item_hashes.sort_unstable(); // avoid artificial dependencies on item ordering
|
||||
item_hashes.hash(&mut crate_state);
|
||||
}
|
||||
|
||||
krate.attrs.hash_stable(&mut self.hcx, &mut crate_state);
|
||||
|
||||
let crate_hash = crate_state.finish();
|
||||
self.hashes.insert(DepNode::new_no_params(DepKind::Krate), crate_hash);
|
||||
debug!("calculate_crate_hash: crate_hash={:?}", crate_hash);
|
||||
}
|
||||
|
||||
fn hash_crate_root_module(&mut self, krate: &'tcx hir::Crate) {
|
||||
let hir::Crate {
|
||||
ref module,
|
||||
// Crate attributes are not copied over to the root `Mod`, so hash
|
||||
// them explicitly here.
|
||||
ref attrs,
|
||||
span,
|
||||
|
||||
// These fields are handled separately:
|
||||
exported_macros: _,
|
||||
items: _,
|
||||
trait_items: _,
|
||||
impl_items: _,
|
||||
bodies: _,
|
||||
trait_impls: _,
|
||||
trait_default_impl: _,
|
||||
body_ids: _,
|
||||
} = *krate;
|
||||
|
||||
self.compute_and_store_ich_for_item_like(CRATE_DEF_INDEX,
|
||||
false,
|
||||
(module, (span, attrs)));
|
||||
self.compute_and_store_ich_for_item_like(CRATE_DEF_INDEX,
|
||||
true,
|
||||
(module, (span, attrs)));
|
||||
}
|
||||
|
||||
fn compute_and_store_ich_for_trait_impls(&mut self, krate: &'tcx hir::Crate)
|
||||
{
|
||||
let tcx = self.tcx;
|
||||
|
||||
let mut impls: Vec<(DefPathHash, Fingerprint)> = krate
|
||||
.trait_impls
|
||||
.iter()
|
||||
.map(|(&trait_id, impls)| {
|
||||
let trait_id = tcx.def_path_hash(trait_id);
|
||||
let mut impls: AccumulateVec<[_; 32]> = impls
|
||||
.iter()
|
||||
.map(|&node_id| {
|
||||
let def_id = tcx.hir.local_def_id(node_id);
|
||||
tcx.def_path_hash(def_id)
|
||||
})
|
||||
.collect();
|
||||
|
||||
impls.sort_unstable();
|
||||
let mut hasher = StableHasher::new();
|
||||
impls.hash_stable(&mut self.hcx, &mut hasher);
|
||||
(trait_id, hasher.finish())
|
||||
})
|
||||
.collect();
|
||||
|
||||
impls.sort_unstable();
|
||||
|
||||
let mut default_impls: AccumulateVec<[_; 32]> = krate
|
||||
.trait_default_impl
|
||||
.iter()
|
||||
.map(|(&trait_def_id, &impl_node_id)| {
|
||||
let impl_def_id = tcx.hir.local_def_id(impl_node_id);
|
||||
(tcx.def_path_hash(trait_def_id), tcx.def_path_hash(impl_def_id))
|
||||
})
|
||||
.collect();
|
||||
|
||||
default_impls.sort_unstable();
|
||||
|
||||
let mut hasher = StableHasher::new();
|
||||
impls.hash_stable(&mut self.hcx, &mut hasher);
|
||||
|
||||
self.hashes.insert(DepNode::new_no_params(DepKind::AllLocalTraitImpls),
|
||||
hasher.finish());
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx: 'a> ItemLikeVisitor<'tcx> for ComputeItemHashesVisitor<'a, 'tcx> {
|
||||
fn visit_item(&mut self, item: &'tcx hir::Item) {
|
||||
let def_index = self.tcx.hir.local_def_id(item.id).index;
|
||||
self.compute_and_store_ich_for_item_like(def_index,
|
||||
false,
|
||||
item);
|
||||
self.compute_and_store_ich_for_item_like(def_index,
|
||||
true,
|
||||
item);
|
||||
}
|
||||
|
||||
fn visit_trait_item(&mut self, item: &'tcx hir::TraitItem) {
|
||||
let def_index = self.tcx.hir.local_def_id(item.id).index;
|
||||
self.compute_and_store_ich_for_item_like(def_index,
|
||||
false,
|
||||
item);
|
||||
self.compute_and_store_ich_for_item_like(def_index,
|
||||
true,
|
||||
item);
|
||||
}
|
||||
|
||||
fn visit_impl_item(&mut self, item: &'tcx hir::ImplItem) {
|
||||
let def_index = self.tcx.hir.local_def_id(item.id).index;
|
||||
self.compute_and_store_ich_for_item_like(def_index,
|
||||
false,
|
||||
item);
|
||||
self.compute_and_store_ich_for_item_like(def_index,
|
||||
true,
|
||||
item);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
pub fn compute_incremental_hashes_map<'a, 'tcx: 'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>)
|
||||
-> IncrementalHashesMap {
|
||||
let _ignore = tcx.dep_graph.in_ignore();
|
||||
let krate = tcx.hir.krate();
|
||||
|
||||
let mut visitor = ComputeItemHashesVisitor {
|
||||
tcx,
|
||||
hcx: tcx.create_stable_hashing_context(),
|
||||
hashes: IncrementalHashesMap::new(),
|
||||
};
|
||||
|
||||
record_time(&tcx.sess.perf_stats.incr_comp_hashes_time, || {
|
||||
visitor.hash_crate_root_module(krate);
|
||||
krate.visit_all_item_likes(&mut visitor);
|
||||
|
||||
for macro_def in krate.exported_macros.iter() {
|
||||
let def_index = tcx.hir.local_def_id(macro_def.id).index;
|
||||
visitor.compute_and_store_ich_for_item_like(def_index,
|
||||
false,
|
||||
macro_def);
|
||||
visitor.compute_and_store_ich_for_item_like(def_index,
|
||||
true,
|
||||
macro_def);
|
||||
}
|
||||
|
||||
visitor.compute_and_store_ich_for_trait_impls(krate);
|
||||
});
|
||||
|
||||
tcx.sess.perf_stats.incr_comp_hashes_count.set(visitor.hashes.len() as u64);
|
||||
|
||||
record_time(&tcx.sess.perf_stats.svh_time, || visitor.compute_crate_hash());
|
||||
visitor.hashes
|
||||
}
|
@ -28,13 +28,9 @@ extern crate syntax;
|
||||
extern crate syntax_pos;
|
||||
|
||||
mod assert_dep_graph;
|
||||
mod calculate_svh;
|
||||
mod persist;
|
||||
|
||||
pub use assert_dep_graph::assert_dep_graph;
|
||||
pub use calculate_svh::compute_incremental_hashes_map;
|
||||
pub use calculate_svh::IncrementalHashesMap;
|
||||
pub use calculate_svh::IchHasher;
|
||||
pub use persist::load_dep_graph;
|
||||
pub use persist::save_dep_graph;
|
||||
pub use persist::save_trans_partition;
|
||||
|
@ -11,18 +11,17 @@
|
||||
//! Code to save/load the dep-graph from files.
|
||||
|
||||
use rustc::dep_graph::{DepNode, WorkProductId, DepKind};
|
||||
use rustc::hir::def_id::DefId;
|
||||
use rustc::hir::svh::Svh;
|
||||
use rustc::ich::Fingerprint;
|
||||
use rustc::session::Session;
|
||||
use rustc::ty::TyCtxt;
|
||||
use rustc::util::nodemap::DefIdMap;
|
||||
use rustc_data_structures::fx::{FxHashSet, FxHashMap};
|
||||
use rustc_data_structures::indexed_vec::IndexVec;
|
||||
use rustc_serialize::Decodable as RustcDecodable;
|
||||
use rustc_serialize::opaque::Decoder;
|
||||
use std::path::{Path};
|
||||
|
||||
use IncrementalHashesMap;
|
||||
use super::data::*;
|
||||
use super::dirty_clean;
|
||||
use super::hash::*;
|
||||
@ -40,17 +39,15 @@ pub type DirtyNodes = FxHashMap<DepNodeIndex, DepNodeIndex>;
|
||||
/// early in compilation, before we've really done any work, but
|
||||
/// actually it doesn't matter all that much.) See `README.md` for
|
||||
/// more general overview.
|
||||
pub fn load_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
incremental_hashes_map: &IncrementalHashesMap) {
|
||||
pub fn load_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
|
||||
tcx.precompute_in_scope_traits_hashes();
|
||||
if tcx.sess.incr_session_load_dep_graph() {
|
||||
let _ignore = tcx.dep_graph.in_ignore();
|
||||
load_dep_graph_if_exists(tcx, incremental_hashes_map);
|
||||
load_dep_graph_if_exists(tcx);
|
||||
}
|
||||
}
|
||||
|
||||
fn load_dep_graph_if_exists<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
incremental_hashes_map: &IncrementalHashesMap) {
|
||||
fn load_dep_graph_if_exists<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
|
||||
let dep_graph_path = dep_graph_path(tcx.sess);
|
||||
let dep_graph_data = match load_data(tcx.sess, &dep_graph_path) {
|
||||
Some(p) => p,
|
||||
@ -63,7 +60,7 @@ fn load_dep_graph_if_exists<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
None => return // no file
|
||||
};
|
||||
|
||||
match decode_dep_graph(tcx, incremental_hashes_map, &dep_graph_data, &work_products_data) {
|
||||
match decode_dep_graph(tcx, &dep_graph_data, &work_products_data) {
|
||||
Ok(dirty_nodes) => dirty_nodes,
|
||||
Err(err) => {
|
||||
tcx.sess.warn(
|
||||
@ -118,7 +115,6 @@ fn does_still_exist(tcx: TyCtxt, dep_node: &DepNode) -> bool {
|
||||
/// Decode the dep graph and load the edges/nodes that are still clean
|
||||
/// into `tcx.dep_graph`.
|
||||
pub fn decode_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
incremental_hashes_map: &IncrementalHashesMap,
|
||||
dep_graph_data: &[u8],
|
||||
work_products_data: &[u8])
|
||||
-> Result<(), String>
|
||||
@ -193,9 +189,6 @@ pub fn decode_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
dirty_clean::check_dirty_clean_annotations(tcx,
|
||||
&serialized_dep_graph.nodes,
|
||||
&dirty_raw_nodes);
|
||||
|
||||
load_prev_metadata_hashes(tcx,
|
||||
&mut *incremental_hashes_map.prev_metadata_hashes.borrow_mut());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -309,11 +302,12 @@ fn delete_dirty_work_product(tcx: TyCtxt,
|
||||
work_product::delete_workproduct_files(tcx.sess, &swp.work_product);
|
||||
}
|
||||
|
||||
fn load_prev_metadata_hashes(tcx: TyCtxt,
|
||||
output: &mut FxHashMap<DefId, Fingerprint>) {
|
||||
pub fn load_prev_metadata_hashes(tcx: TyCtxt) -> DefIdMap<Fingerprint> {
|
||||
let mut output = DefIdMap();
|
||||
|
||||
if !tcx.sess.opts.debugging_opts.query_dep_graph {
|
||||
// Previous metadata hashes are only needed for testing.
|
||||
return
|
||||
return output
|
||||
}
|
||||
|
||||
debug!("load_prev_metadata_hashes() - Loading previous metadata hashes");
|
||||
@ -323,7 +317,7 @@ fn load_prev_metadata_hashes(tcx: TyCtxt,
|
||||
if !file_path.exists() {
|
||||
debug!("load_prev_metadata_hashes() - Couldn't find file containing \
|
||||
hashes at `{}`", file_path.display());
|
||||
return
|
||||
return output
|
||||
}
|
||||
|
||||
debug!("load_prev_metadata_hashes() - File: {}", file_path.display());
|
||||
@ -333,12 +327,12 @@ fn load_prev_metadata_hashes(tcx: TyCtxt,
|
||||
Ok(None) => {
|
||||
debug!("load_prev_metadata_hashes() - File produced by incompatible \
|
||||
compiler version: {}", file_path.display());
|
||||
return
|
||||
return output
|
||||
}
|
||||
Err(err) => {
|
||||
debug!("load_prev_metadata_hashes() - Error reading file `{}`: {}",
|
||||
file_path.display(), err);
|
||||
return
|
||||
return output
|
||||
}
|
||||
};
|
||||
|
||||
@ -362,6 +356,8 @@ fn load_prev_metadata_hashes(tcx: TyCtxt,
|
||||
|
||||
debug!("load_prev_metadata_hashes() - successfully loaded {} hashes",
|
||||
serialized_hashes.index_map.len());
|
||||
|
||||
output
|
||||
}
|
||||
|
||||
fn process_edge<'a, 'tcx, 'edges>(
|
||||
|
@ -15,6 +15,7 @@ use rustc::ich::Fingerprint;
|
||||
use rustc::middle::cstore::EncodedMetadataHashes;
|
||||
use rustc::session::Session;
|
||||
use rustc::ty::TyCtxt;
|
||||
use rustc::util::nodemap::DefIdMap;
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_data_structures::graph;
|
||||
use rustc_data_structures::indexed_vec::IndexVec;
|
||||
@ -24,7 +25,6 @@ use std::io::{self, Cursor, Write};
|
||||
use std::fs::{self, File};
|
||||
use std::path::PathBuf;
|
||||
|
||||
use IncrementalHashesMap;
|
||||
use super::data::*;
|
||||
use super::hash::*;
|
||||
use super::preds::*;
|
||||
@ -33,8 +33,9 @@ use super::dirty_clean;
|
||||
use super::file_format;
|
||||
use super::work_product;
|
||||
|
||||
use super::load::load_prev_metadata_hashes;
|
||||
|
||||
pub fn save_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
incremental_hashes_map: IncrementalHashesMap,
|
||||
metadata_hashes: &EncodedMetadataHashes,
|
||||
svh: Svh) {
|
||||
debug!("save_dep_graph()");
|
||||
@ -51,6 +52,14 @@ pub fn save_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
eprintln!("incremental: {} edges in dep-graph", query.graph.len_edges());
|
||||
}
|
||||
|
||||
// We load the previous metadata hashes now before overwriting the file
|
||||
// (if we need them for testing).
|
||||
let prev_metadata_hashes = if tcx.sess.opts.debugging_opts.query_dep_graph {
|
||||
load_prev_metadata_hashes(tcx)
|
||||
} else {
|
||||
DefIdMap()
|
||||
};
|
||||
|
||||
let mut hcx = HashContext::new(tcx);
|
||||
let preds = Predecessors::new(&query, &mut hcx);
|
||||
let mut current_metadata_hashes = FxHashMap();
|
||||
@ -73,9 +82,8 @@ pub fn save_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
dep_graph_path(sess),
|
||||
|e| encode_dep_graph(tcx, &preds, e));
|
||||
|
||||
let prev_metadata_hashes = incremental_hashes_map.prev_metadata_hashes.borrow();
|
||||
dirty_clean::check_dirty_clean_metadata(tcx,
|
||||
&*prev_metadata_hashes,
|
||||
&prev_metadata_hashes,
|
||||
¤t_metadata_hashes);
|
||||
}
|
||||
|
||||
|
@ -20,17 +20,16 @@ use rustc::session::config::{self, NoDebugInfo, OutputFilenames, OutputType, Pri
|
||||
use rustc::session::filesearch;
|
||||
use rustc::session::search_paths::PathKind;
|
||||
use rustc::session::Session;
|
||||
use rustc::ich::Fingerprint;
|
||||
use rustc::middle::cstore::{LinkMeta, NativeLibrary, LibSource, NativeLibraryKind};
|
||||
use rustc::middle::dependency_format::Linkage;
|
||||
use {CrateTranslation, CrateInfo};
|
||||
use rustc::util::common::time;
|
||||
use rustc::util::fs::fix_windows_verbatim_for_gcc;
|
||||
use rustc::dep_graph::{DepKind, DepNode};
|
||||
use rustc::hir::def_id::CrateNum;
|
||||
use rustc::hir::svh::Svh;
|
||||
use rustc_back::tempdir::TempDir;
|
||||
use rustc_back::{PanicStrategy, RelroLevel};
|
||||
use rustc_incremental::IncrementalHashesMap;
|
||||
use context::get_reloc_model;
|
||||
use llvm;
|
||||
|
||||
@ -92,10 +91,9 @@ pub const RLIB_BYTECODE_OBJECT_V1_DATA_OFFSET: usize =
|
||||
pub use self::rustc_trans_utils::link::{find_crate_name, filename_for_input,
|
||||
default_output_for_target, invalid_output_for_target};
|
||||
|
||||
pub fn build_link_meta(incremental_hashes_map: &IncrementalHashesMap) -> LinkMeta {
|
||||
let krate_dep_node = &DepNode::new_no_params(DepKind::Krate);
|
||||
pub fn build_link_meta(crate_hash: Fingerprint) -> LinkMeta {
|
||||
let r = LinkMeta {
|
||||
crate_hash: Svh::new(incremental_hashes_map[krate_dep_node].to_smaller_hash()),
|
||||
crate_hash: Svh::new(crate_hash.to_smaller_hash()),
|
||||
};
|
||||
info!("{:?}", r);
|
||||
return r;
|
||||
|
@ -41,12 +41,13 @@ use rustc::middle::trans::{Linkage, Visibility, Stats};
|
||||
use rustc::middle::cstore::{EncodedMetadata, EncodedMetadataHashes};
|
||||
use rustc::ty::{self, Ty, TyCtxt};
|
||||
use rustc::ty::maps::Providers;
|
||||
use rustc::dep_graph::{DepNode, DepKind};
|
||||
use rustc::middle::cstore::{self, LinkMeta, LinkagePreference};
|
||||
use rustc::hir::map as hir_map;
|
||||
use rustc::util::common::{time, print_time_passes_entry};
|
||||
use rustc::session::config::{self, NoDebugInfo};
|
||||
use rustc::session::Session;
|
||||
use rustc_incremental::{self, IncrementalHashesMap};
|
||||
use rustc_incremental;
|
||||
use abi;
|
||||
use allocator;
|
||||
use mir::lvalue::LvalueRef;
|
||||
@ -935,12 +936,15 @@ pub fn find_exported_symbols(tcx: TyCtxt) -> NodeSet {
|
||||
}
|
||||
|
||||
pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
incremental_hashes_map: IncrementalHashesMap,
|
||||
rx: mpsc::Receiver<Box<Any + Send>>)
|
||||
-> OngoingCrateTranslation {
|
||||
check_for_rustc_errors_attr(tcx);
|
||||
|
||||
let link_meta = link::build_link_meta(&incremental_hashes_map);
|
||||
|
||||
let crate_hash = tcx.dep_graph
|
||||
.fingerprint_of(&DepNode::new_no_params(DepKind::Krate))
|
||||
.unwrap();
|
||||
let link_meta = link::build_link_meta(crate_hash);
|
||||
let exported_symbol_node_ids = find_exported_symbols(tcx);
|
||||
|
||||
let shared_ccx = SharedCrateContext::new(tcx);
|
||||
@ -980,7 +984,6 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
ongoing_translation.translation_finished(tcx);
|
||||
|
||||
assert_and_save_dep_graph(tcx,
|
||||
incremental_hashes_map,
|
||||
metadata_incr_hashes,
|
||||
link_meta);
|
||||
|
||||
@ -1113,7 +1116,6 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
ongoing_translation.check_for_errors(tcx.sess);
|
||||
|
||||
assert_and_save_dep_graph(tcx,
|
||||
incremental_hashes_map,
|
||||
metadata_incr_hashes,
|
||||
link_meta);
|
||||
ongoing_translation
|
||||
@ -1124,7 +1126,6 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
thread_local!(static DISPOSITIONS: RefCell<Vec<(String, Disposition)>> = Default::default());
|
||||
|
||||
fn assert_and_save_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
incremental_hashes_map: IncrementalHashesMap,
|
||||
metadata_incr_hashes: EncodedMetadataHashes,
|
||||
link_meta: LinkMeta) {
|
||||
time(tcx.sess.time_passes(),
|
||||
@ -1134,7 +1135,6 @@ fn assert_and_save_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
time(tcx.sess.time_passes(),
|
||||
"serialize dep graph",
|
||||
|| rustc_incremental::save_dep_graph(tcx,
|
||||
incremental_hashes_map,
|
||||
&metadata_incr_hashes,
|
||||
link_meta.crate_hash));
|
||||
}
|
||||
|
@ -108,11 +108,12 @@ use rustc::dep_graph::{DepNode, WorkProductId};
|
||||
use rustc::hir::def_id::DefId;
|
||||
use rustc::hir::map::DefPathData;
|
||||
use rustc::middle::trans::{Linkage, Visibility};
|
||||
use rustc::ich::Fingerprint;
|
||||
use rustc::session::config::NUMBERED_CODEGEN_UNIT_MARKER;
|
||||
use rustc::ty::{self, TyCtxt, InstanceDef};
|
||||
use rustc::ty::item_path::characteristic_def_id_of_type;
|
||||
use rustc::util::nodemap::{FxHashMap, FxHashSet};
|
||||
use rustc_incremental::IchHasher;
|
||||
use rustc_data_structures::stable_hasher::StableHasher;
|
||||
use std::collections::hash_map::Entry;
|
||||
use std::hash::Hash;
|
||||
use syntax::ast::NodeId;
|
||||
@ -155,7 +156,7 @@ pub trait CodegenUnitExt<'tcx> {
|
||||
}
|
||||
|
||||
fn compute_symbol_name_hash<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> u64 {
|
||||
let mut state = IchHasher::new();
|
||||
let mut state: StableHasher<Fingerprint> = StableHasher::new();
|
||||
let all_items = self.items_in_deterministic_order(tcx);
|
||||
for (item, (linkage, visibility)) in all_items {
|
||||
let symbol_name = item.symbol_name(tcx);
|
||||
|
@ -175,7 +175,7 @@ pub fn run_core(search_paths: SearchPaths,
|
||||
|
||||
let arena = DroplessArena::new();
|
||||
let arenas = GlobalArenas::new();
|
||||
let hir_map = hir_map::map_crate(&mut hir_forest, &defs);
|
||||
let hir_map = hir_map::map_crate(&sess, &*cstore, &mut hir_forest, &defs);
|
||||
let output_filenames = driver::build_output_filenames(&input,
|
||||
&None,
|
||||
&None,
|
||||
@ -191,7 +191,7 @@ pub fn run_core(search_paths: SearchPaths,
|
||||
&arenas,
|
||||
&name,
|
||||
&output_filenames,
|
||||
|tcx, analysis, _, _, result| {
|
||||
|tcx, analysis, _, result| {
|
||||
if let Err(_) = result {
|
||||
sess.fatal("Compilation failed, aborting rustdoc");
|
||||
}
|
||||
|
@ -124,7 +124,7 @@ pub fn run(input: &str,
|
||||
render_type);
|
||||
|
||||
{
|
||||
let map = hir::map::map_crate(&mut hir_forest, &defs);
|
||||
let map = hir::map::map_crate(&sess, &*cstore, &mut hir_forest, &defs);
|
||||
let krate = map.krate();
|
||||
let mut hir_collector = HirCollector {
|
||||
sess: &sess,
|
||||
|
Loading…
Reference in New Issue
Block a user