2021-06-28 19:12:01 +00:00
|
|
|
use crate::QueryCtxt;
|
2020-06-11 14:49:57 +00:00
|
|
|
use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexSet};
|
2021-03-16 20:39:03 +00:00
|
|
|
use rustc_data_structures::memmap::Mmap;
|
2021-03-17 21:49:16 +00:00
|
|
|
use rustc_data_structures::sync::{HashMapExt, Lock, Lrc, OnceCell, RwLock};
|
2021-01-02 04:51:07 +00:00
|
|
|
use rustc_data_structures::unhash::UnhashMap;
|
2021-05-30 15:54:49 +00:00
|
|
|
use rustc_hir::def_id::{CrateNum, DefId, DefIndex, LocalDefId, StableCrateId, LOCAL_CRATE};
|
2021-06-08 20:18:53 +00:00
|
|
|
use rustc_hir::definitions::DefPathHash;
|
2019-12-22 22:42:04 +00:00
|
|
|
use rustc_index::vec::{Idx, IndexVec};
|
2021-06-28 19:12:01 +00:00
|
|
|
use rustc_middle::dep_graph::{DepNode, DepNodeIndex, SerializedDepNodeIndex};
|
|
|
|
use rustc_middle::mir::interpret::{AllocDecodingSession, AllocDecodingState};
|
|
|
|
use rustc_middle::mir::{self, interpret};
|
|
|
|
use rustc_middle::ty::codec::{RefDecodable, TyDecoder, TyEncoder};
|
|
|
|
use rustc_middle::ty::{self, Ty, TyCtxt};
|
2021-01-19 19:04:40 +00:00
|
|
|
use rustc_query_system::dep_graph::DepContext;
|
2021-07-23 21:40:26 +00:00
|
|
|
use rustc_query_system::query::{QueryContext, QuerySideEffects};
|
2020-12-07 01:30:55 +00:00
|
|
|
use rustc_serialize::{
|
2021-03-04 18:24:11 +00:00
|
|
|
opaque::{self, FileEncodeResult, FileEncoder, IntEncodedWithFixedSize},
|
2020-12-07 01:30:55 +00:00
|
|
|
Decodable, Decoder, Encodable, Encoder,
|
|
|
|
};
|
2021-05-30 15:54:49 +00:00
|
|
|
use rustc_session::Session;
|
2020-03-17 15:45:02 +00:00
|
|
|
use rustc_span::hygiene::{
|
2021-06-24 19:02:09 +00:00
|
|
|
ExpnId, HygieneDecodeContext, HygieneEncodeContext, SyntaxContext, SyntaxContextData,
|
2020-03-17 15:45:02 +00:00
|
|
|
};
|
2020-01-01 18:25:28 +00:00
|
|
|
use rustc_span::source_map::{SourceMap, StableSourceFileId};
|
2020-03-24 08:09:42 +00:00
|
|
|
use rustc_span::CachingSourceMapView;
|
2021-04-18 12:27:28 +00:00
|
|
|
use rustc_span::{BytePos, ExpnData, ExpnHash, Pos, SourceFile, Span};
|
2020-07-29 16:26:15 +00:00
|
|
|
use std::collections::hash_map::Entry;
|
2017-10-19 12:32:39 +00:00
|
|
|
use std::mem;
|
|
|
|
|
2017-11-28 15:58:02 +00:00
|
|
|
const TAG_FILE_FOOTER: u128 = 0xC0FFEE_C0FFEE_C0FFEE_C0FFEE_C0FFEE;
|
2017-11-14 13:50:03 +00:00
|
|
|
|
2021-01-03 15:09:32 +00:00
|
|
|
// A normal span encoded with both location information and a `SyntaxContext`
|
|
|
|
const TAG_FULL_SPAN: u8 = 0;
|
|
|
|
// A partial span with no location information, encoded only with a `SyntaxContext`
|
|
|
|
const TAG_PARTIAL_SPAN: u8 = 1;
|
2021-04-18 12:27:28 +00:00
|
|
|
const TAG_RELATIVE_SPAN: u8 = 2;
|
2017-11-24 13:00:33 +00:00
|
|
|
|
2020-03-17 15:45:02 +00:00
|
|
|
const TAG_SYNTAX_CONTEXT: u8 = 0;
|
|
|
|
const TAG_EXPN_DATA: u8 = 1;
|
|
|
|
|
2019-09-06 02:57:44 +00:00
|
|
|
/// Provides an interface to incremental compilation data cached from the
|
2017-10-24 12:51:26 +00:00
|
|
|
/// previous compilation session. This data will eventually include the results
|
2020-07-17 08:47:04 +00:00
|
|
|
/// of a few selected queries (like `typeck` and `mir_optimized`) and
|
2021-07-23 21:40:26 +00:00
|
|
|
/// any side effects that have been emitted during a query.
|
2017-10-19 12:32:39 +00:00
|
|
|
pub struct OnDiskCache<'sess> {
|
2017-11-14 15:15:45 +00:00
|
|
|
// The complete cache data in serialized form.
|
2021-03-17 21:49:16 +00:00
|
|
|
serialized_data: RwLock<Option<Mmap>>,
|
2017-11-14 15:15:45 +00:00
|
|
|
|
2021-07-23 21:40:26 +00:00
|
|
|
// Collects all `QuerySideEffects` created during the current compilation
|
2019-09-06 02:57:44 +00:00
|
|
|
// session.
|
2021-07-23 21:40:26 +00:00
|
|
|
current_side_effects: Lock<FxHashMap<DepNodeIndex, QuerySideEffects>>,
|
2017-10-24 12:51:26 +00:00
|
|
|
|
2021-05-30 15:54:49 +00:00
|
|
|
cnum_map: OnceCell<UnhashMap<StableCrateId, CrateNum>>,
|
2017-11-14 13:07:12 +00:00
|
|
|
|
2018-08-18 10:14:14 +00:00
|
|
|
source_map: &'sess SourceMap,
|
2021-06-21 13:30:16 +00:00
|
|
|
file_index_to_stable_id: FxHashMap<SourceFileIndex, EncodedSourceFileId>,
|
2017-11-24 13:00:33 +00:00
|
|
|
|
2019-09-06 02:57:44 +00:00
|
|
|
// Caches that are populated lazily during decoding.
|
2018-08-18 10:13:52 +00:00
|
|
|
file_index_to_file: Lock<FxHashMap<SourceFileIndex, Lrc<SourceFile>>>,
|
2017-11-14 15:15:45 +00:00
|
|
|
|
|
|
|
// A map from dep-node to the position of the cached query result in
|
|
|
|
// `serialized_data`.
|
2017-11-28 13:19:44 +00:00
|
|
|
query_result_index: FxHashMap<SerializedDepNodeIndex, AbsoluteBytePos>,
|
2017-11-28 15:58:02 +00:00
|
|
|
|
2021-07-23 21:40:26 +00:00
|
|
|
// A map from dep-node to the position of any associated `QuerySideEffects` in
|
2017-11-28 15:58:02 +00:00
|
|
|
// `serialized_data`.
|
2021-07-23 21:40:26 +00:00
|
|
|
prev_side_effects_index: FxHashMap<SerializedDepNodeIndex, AbsoluteBytePos>,
|
2018-03-16 08:59:42 +00:00
|
|
|
|
2018-05-25 15:19:31 +00:00
|
|
|
alloc_decoding_state: AllocDecodingState,
|
2020-03-17 15:45:02 +00:00
|
|
|
|
|
|
|
// A map from syntax context ids to the position of their associated
|
|
|
|
// `SyntaxContextData`. We use a `u32` instead of a `SyntaxContext`
|
|
|
|
// to represent the fact that we are storing *encoded* ids. When we decode
|
|
|
|
// a `SyntaxContext`, a new id will be allocated from the global `HygieneData`,
|
|
|
|
// which will almost certainly be different than the serialized id.
|
|
|
|
syntax_contexts: FxHashMap<u32, AbsoluteBytePos>,
|
|
|
|
// A map from the `DefPathHash` of an `ExpnId` to the position
|
|
|
|
// of their associated `ExpnData`. Ideally, we would store a `DefId`,
|
|
|
|
// but we need to decode this before we've constructed a `TyCtxt` (which
|
|
|
|
// makes it difficult to decode a `DefId`).
|
|
|
|
|
|
|
|
// Note that these `DefPathHashes` correspond to both local and foreign
|
|
|
|
// `ExpnData` (e.g `ExpnData.krate` may not be `LOCAL_CRATE`). Alternatively,
|
|
|
|
// we could look up the `ExpnData` from the metadata of foreign crates,
|
|
|
|
// but it seemed easier to have `OnDiskCache` be independent of the `CStore`.
|
2021-06-27 13:51:25 +00:00
|
|
|
expn_data: UnhashMap<ExpnHash, AbsoluteBytePos>,
|
2020-03-17 15:45:02 +00:00
|
|
|
// Additional information used when decoding hygiene data.
|
2020-07-24 07:01:07 +00:00
|
|
|
hygiene_context: HygieneDecodeContext,
|
2020-07-29 16:26:15 +00:00
|
|
|
// Maps `DefPathHash`es to their `RawDefId`s from the *previous*
|
|
|
|
// compilation session. This is used as an initial 'guess' when
|
|
|
|
// we try to map a `DefPathHash` to its `DefId` in the current compilation
|
|
|
|
// session.
|
2021-01-02 04:51:07 +00:00
|
|
|
foreign_def_path_hashes: UnhashMap<DefPathHash, RawDefId>,
|
2021-06-27 13:51:25 +00:00
|
|
|
// Likewise for ExpnId.
|
|
|
|
foreign_expn_data: UnhashMap<ExpnHash, u32>,
|
2020-07-29 16:26:15 +00:00
|
|
|
|
|
|
|
// The *next* compilation sessison's `foreign_def_path_hashes` - at
|
|
|
|
// the end of our current compilation session, this will get written
|
|
|
|
// out to the `foreign_def_path_hashes` field of the `Footer`, which
|
|
|
|
// will become `foreign_def_path_hashes` of the next compilation session.
|
|
|
|
// This stores any `DefPathHash` that we may need to map to a `DefId`
|
|
|
|
// during the next compilation session.
|
2021-01-02 04:51:07 +00:00
|
|
|
latest_foreign_def_path_hashes: Lock<UnhashMap<DefPathHash, RawDefId>>,
|
2020-07-29 16:26:15 +00:00
|
|
|
|
|
|
|
// Caches all lookups of `DefPathHashes`, both for local and foreign
|
|
|
|
// definitions. A definition from the previous compilation session
|
|
|
|
// may no longer exist in the current compilation session, so
|
|
|
|
// we use `Option<DefId>` so that we can cache a lookup failure.
|
2021-01-02 04:51:07 +00:00
|
|
|
def_path_hash_to_def_id_cache: Lock<UnhashMap<DefPathHash, Option<DefId>>>,
|
2017-10-19 12:32:39 +00:00
|
|
|
}
|
|
|
|
|
2020-03-17 15:45:02 +00:00
|
|
|
// This type is used only for serialization and deserialization.
|
2020-06-11 14:49:57 +00:00
|
|
|
#[derive(Encodable, Decodable)]
|
2017-11-28 15:58:02 +00:00
|
|
|
struct Footer {
|
2021-06-21 13:30:16 +00:00
|
|
|
file_index_to_stable_id: FxHashMap<SourceFileIndex, EncodedSourceFileId>,
|
2021-07-23 21:40:26 +00:00
|
|
|
query_result_index: EncodedDepNodeIndex,
|
|
|
|
side_effects_index: EncodedDepNodeIndex,
|
2019-09-06 02:57:44 +00:00
|
|
|
// The location of all allocations.
|
2018-05-25 15:19:31 +00:00
|
|
|
interpret_alloc_index: Vec<u32>,
|
2020-03-17 15:45:02 +00:00
|
|
|
// See `OnDiskCache.syntax_contexts`
|
|
|
|
syntax_contexts: FxHashMap<u32, AbsoluteBytePos>,
|
|
|
|
// See `OnDiskCache.expn_data`
|
2021-06-27 13:51:25 +00:00
|
|
|
expn_data: UnhashMap<ExpnHash, AbsoluteBytePos>,
|
2021-01-02 04:51:07 +00:00
|
|
|
foreign_def_path_hashes: UnhashMap<DefPathHash, RawDefId>,
|
2021-06-27 13:51:25 +00:00
|
|
|
foreign_expn_data: UnhashMap<ExpnHash, u32>,
|
2017-10-19 12:32:39 +00:00
|
|
|
}
|
|
|
|
|
2021-07-23 21:40:26 +00:00
|
|
|
pub type EncodedDepNodeIndex = Vec<(SerializedDepNodeIndex, AbsoluteBytePos)>;
|
2017-10-19 12:32:39 +00:00
|
|
|
|
2020-06-11 14:49:57 +00:00
|
|
|
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, Encodable, Decodable)]
|
2018-08-18 10:13:52 +00:00
|
|
|
struct SourceFileIndex(u32);
|
2017-11-24 13:00:33 +00:00
|
|
|
|
2020-06-11 14:49:57 +00:00
|
|
|
#[derive(Copy, Clone, Debug, Hash, Eq, PartialEq, Encodable, Decodable)]
|
2020-10-11 08:34:13 +00:00
|
|
|
pub struct AbsoluteBytePos(u32);
|
2017-11-28 13:19:44 +00:00
|
|
|
|
|
|
|
impl AbsoluteBytePos {
|
|
|
|
fn new(pos: usize) -> AbsoluteBytePos {
|
2020-03-04 12:18:08 +00:00
|
|
|
debug_assert!(pos <= u32::MAX as usize);
|
2017-11-28 13:19:44 +00:00
|
|
|
AbsoluteBytePos(pos as u32)
|
|
|
|
}
|
|
|
|
|
|
|
|
fn to_usize(self) -> usize {
|
|
|
|
self.0 as usize
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-07-29 16:26:15 +00:00
|
|
|
/// Represents a potentially invalid `DefId`. This is used during incremental
|
|
|
|
/// compilation to represent a `DefId` from the *previous* compilation session,
|
|
|
|
/// which may no longer be valid. This is used to help map a `DefPathHash`
|
|
|
|
/// to a `DefId` in the current compilation session.
|
|
|
|
#[derive(Encodable, Decodable, Copy, Clone, Debug)]
|
|
|
|
crate struct RawDefId {
|
|
|
|
// We deliberately do not use `CrateNum` and `DefIndex`
|
|
|
|
// here, since a crate/index from the previous compilation
|
|
|
|
// session may no longer exist.
|
|
|
|
pub krate: u32,
|
|
|
|
pub index: u32,
|
|
|
|
}
|
|
|
|
|
2021-06-21 13:30:16 +00:00
|
|
|
/// An `EncodedSourceFileId` is the same as a `StableSourceFileId` except that
|
|
|
|
/// the source crate is represented as a [StableCrateId] instead of as a
|
|
|
|
/// `CrateNum`. This way `EncodedSourceFileId` can be encoded and decoded
|
|
|
|
/// without any additional context, i.e. with a simple `opaque::Decoder` (which
|
|
|
|
/// is the only thing available when decoding the cache's [Footer].
|
|
|
|
#[derive(Encodable, Decodable, Clone, Debug)]
|
|
|
|
struct EncodedSourceFileId {
|
|
|
|
file_name_hash: u64,
|
|
|
|
stable_crate_id: StableCrateId,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl EncodedSourceFileId {
|
|
|
|
fn translate(&self, cnum_map: &UnhashMap<StableCrateId, CrateNum>) -> StableSourceFileId {
|
|
|
|
let cnum = cnum_map[&self.stable_crate_id];
|
|
|
|
StableSourceFileId { file_name_hash: self.file_name_hash, cnum }
|
|
|
|
}
|
|
|
|
|
|
|
|
fn new(tcx: TyCtxt<'_>, file: &SourceFile) -> EncodedSourceFileId {
|
|
|
|
let source_file_id = StableSourceFileId::new(file);
|
|
|
|
EncodedSourceFileId {
|
|
|
|
file_name_hash: source_file_id.file_name_hash,
|
|
|
|
stable_crate_id: tcx.stable_crate_id(source_file_id.cnum),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-06-28 19:12:01 +00:00
|
|
|
impl<'sess> rustc_middle::ty::OnDiskCache<'sess> for OnDiskCache<'sess> {
|
2021-03-16 20:39:03 +00:00
|
|
|
/// Creates a new `OnDiskCache` instance from the serialized data in `data`.
|
|
|
|
fn new(sess: &'sess Session, data: Mmap, start_pos: usize) -> Self {
|
2017-10-19 12:32:39 +00:00
|
|
|
debug_assert!(sess.opts.incremental.is_some());
|
|
|
|
|
2019-09-06 02:57:44 +00:00
|
|
|
// Wrap in a scope so we can borrow `data`.
|
2017-11-28 15:58:02 +00:00
|
|
|
let footer: Footer = {
|
2017-11-14 15:15:45 +00:00
|
|
|
let mut decoder = opaque::Decoder::new(&data[..], start_pos);
|
2017-10-19 12:32:39 +00:00
|
|
|
|
2019-09-06 02:57:44 +00:00
|
|
|
// Decode the *position* of the footer, which can be found in the
|
2017-11-28 15:58:02 +00:00
|
|
|
// last 8 bytes of the file.
|
|
|
|
decoder.set_position(data.len() - IntEncodedWithFixedSize::ENCODED_SIZE);
|
2019-09-06 02:57:44 +00:00
|
|
|
let footer_pos = IntEncodedWithFixedSize::decode(&mut decoder)
|
|
|
|
.expect("error while trying to decode footer position")
|
2017-11-28 15:58:02 +00:00
|
|
|
.0 as usize;
|
|
|
|
|
2019-09-06 02:57:44 +00:00
|
|
|
// Decode the file footer, which contains all the lookup tables, etc.
|
|
|
|
decoder.set_position(footer_pos);
|
2020-03-17 15:45:02 +00:00
|
|
|
|
2017-11-28 15:58:02 +00:00
|
|
|
decode_tagged(&mut decoder, TAG_FILE_FOOTER)
|
2019-09-06 02:57:44 +00:00
|
|
|
.expect("error while trying to decode footer position")
|
2017-10-19 12:32:39 +00:00
|
|
|
};
|
|
|
|
|
2019-09-06 02:57:44 +00:00
|
|
|
Self {
|
2021-03-17 21:49:16 +00:00
|
|
|
serialized_data: RwLock::new(Some(data)),
|
2017-11-28 15:58:02 +00:00
|
|
|
file_index_to_stable_id: footer.file_index_to_stable_id,
|
2018-10-16 14:57:53 +00:00
|
|
|
file_index_to_file: Default::default(),
|
2020-05-16 04:44:28 +00:00
|
|
|
cnum_map: OnceCell::new(),
|
2018-08-18 10:14:14 +00:00
|
|
|
source_map: sess.source_map(),
|
2021-07-23 21:40:26 +00:00
|
|
|
current_side_effects: Default::default(),
|
2017-11-28 15:58:02 +00:00
|
|
|
query_result_index: footer.query_result_index.into_iter().collect(),
|
2021-07-23 21:40:26 +00:00
|
|
|
prev_side_effects_index: footer.side_effects_index.into_iter().collect(),
|
2018-05-25 15:19:31 +00:00
|
|
|
alloc_decoding_state: AllocDecodingState::new(footer.interpret_alloc_index),
|
2020-03-17 15:45:02 +00:00
|
|
|
syntax_contexts: footer.syntax_contexts,
|
|
|
|
expn_data: footer.expn_data,
|
2021-06-27 13:51:25 +00:00
|
|
|
foreign_expn_data: footer.foreign_expn_data,
|
2020-07-24 07:01:07 +00:00
|
|
|
hygiene_context: Default::default(),
|
2020-07-29 16:26:15 +00:00
|
|
|
foreign_def_path_hashes: footer.foreign_def_path_hashes,
|
|
|
|
latest_foreign_def_path_hashes: Default::default(),
|
|
|
|
def_path_hash_to_def_id_cache: Default::default(),
|
2017-10-19 12:32:39 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-06-28 19:12:01 +00:00
|
|
|
fn new_empty(source_map: &'sess SourceMap) -> Self {
|
2019-09-06 02:57:44 +00:00
|
|
|
Self {
|
2021-03-17 21:49:16 +00:00
|
|
|
serialized_data: RwLock::new(None),
|
2018-10-16 14:57:53 +00:00
|
|
|
file_index_to_stable_id: Default::default(),
|
|
|
|
file_index_to_file: Default::default(),
|
2020-05-16 04:44:28 +00:00
|
|
|
cnum_map: OnceCell::new(),
|
2018-08-18 10:14:14 +00:00
|
|
|
source_map,
|
2021-07-23 21:40:26 +00:00
|
|
|
current_side_effects: Default::default(),
|
2018-10-16 14:57:53 +00:00
|
|
|
query_result_index: Default::default(),
|
2021-07-23 21:40:26 +00:00
|
|
|
prev_side_effects_index: Default::default(),
|
2018-05-25 15:19:31 +00:00
|
|
|
alloc_decoding_state: AllocDecodingState::new(Vec::new()),
|
2020-03-17 15:45:02 +00:00
|
|
|
syntax_contexts: FxHashMap::default(),
|
2021-06-27 13:51:25 +00:00
|
|
|
expn_data: UnhashMap::default(),
|
|
|
|
foreign_expn_data: UnhashMap::default(),
|
2020-07-24 07:01:07 +00:00
|
|
|
hygiene_context: Default::default(),
|
2020-07-29 16:26:15 +00:00
|
|
|
foreign_def_path_hashes: Default::default(),
|
|
|
|
latest_foreign_def_path_hashes: Default::default(),
|
|
|
|
def_path_hash_to_def_id_cache: Default::default(),
|
2017-10-24 12:51:26 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-08-28 19:49:51 +00:00
|
|
|
/// Execute all cache promotions and release the serialized backing Mmap.
|
|
|
|
///
|
|
|
|
/// Cache promotions require invoking queries, which needs to read the serialized data.
|
|
|
|
/// In order to serialize the new on-disk cache, the former on-disk cache file needs to be
|
|
|
|
/// deleted, hence we won't be able to refer to its memmapped data.
|
2021-03-17 21:49:16 +00:00
|
|
|
fn drop_serialized_data(&self, tcx: TyCtxt<'tcx>) {
|
|
|
|
// Register any dep nodes that we reused from the previous session,
|
|
|
|
// but didn't `DepNode::construct` in this session. This ensures
|
|
|
|
// that their `DefPathHash` to `RawDefId` mappings are registered
|
|
|
|
// in 'latest_foreign_def_path_hashes' if necessary, since that
|
|
|
|
// normally happens in `DepNode::construct`.
|
|
|
|
tcx.dep_graph.register_reused_dep_nodes(tcx);
|
|
|
|
|
|
|
|
// Load everything into memory so we can write it out to the on-disk
|
|
|
|
// cache. The vast majority of cacheable query results should already
|
|
|
|
// be in memory, so this should be a cheap operation.
|
|
|
|
// Do this *before* we clone 'latest_foreign_def_path_hashes', since
|
|
|
|
// loading existing queries may cause us to create new DepNodes, which
|
|
|
|
// may in turn end up invoking `store_foreign_def_id_hash`
|
|
|
|
tcx.dep_graph.exec_cache_promotions(QueryCtxt::from_tcx(tcx));
|
|
|
|
|
|
|
|
*self.serialized_data.write() = None;
|
|
|
|
}
|
|
|
|
|
|
|
|
fn serialize<'tcx>(&self, tcx: TyCtxt<'tcx>, encoder: &mut FileEncoder) -> FileEncodeResult {
|
2019-09-06 02:57:44 +00:00
|
|
|
// Serializing the `DepGraph` should not modify it.
|
2017-12-28 05:05:45 +00:00
|
|
|
tcx.dep_graph.with_ignore(|| {
|
2019-09-06 02:57:44 +00:00
|
|
|
// Allocate `SourceFileIndex`es.
|
2017-12-28 05:05:45 +00:00
|
|
|
let (file_to_file_index, file_index_to_stable_id) = {
|
2018-10-19 15:18:03 +00:00
|
|
|
let files = tcx.sess.source_map().files();
|
2019-12-22 22:42:04 +00:00
|
|
|
let mut file_to_file_index =
|
|
|
|
FxHashMap::with_capacity_and_hasher(files.len(), Default::default());
|
|
|
|
let mut file_index_to_stable_id =
|
|
|
|
FxHashMap::with_capacity_and_hasher(files.len(), Default::default());
|
2017-12-28 05:05:45 +00:00
|
|
|
|
2018-10-19 15:18:03 +00:00
|
|
|
for (index, file) in files.iter().enumerate() {
|
2018-08-18 10:13:52 +00:00
|
|
|
let index = SourceFileIndex(index as u32);
|
|
|
|
let file_ptr: *const SourceFile = &**file as *const _;
|
2017-12-28 05:05:45 +00:00
|
|
|
file_to_file_index.insert(file_ptr, index);
|
2021-06-21 13:30:16 +00:00
|
|
|
let source_file_id = EncodedSourceFileId::new(tcx, &file);
|
|
|
|
file_index_to_stable_id.insert(index, source_file_id);
|
2017-12-28 05:05:45 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
(file_to_file_index, file_index_to_stable_id)
|
|
|
|
};
|
|
|
|
|
2020-07-29 16:26:15 +00:00
|
|
|
let latest_foreign_def_path_hashes = self.latest_foreign_def_path_hashes.lock().clone();
|
2020-07-24 07:01:07 +00:00
|
|
|
let hygiene_encode_context = HygieneEncodeContext::default();
|
|
|
|
|
2017-12-28 05:05:45 +00:00
|
|
|
let mut encoder = CacheEncoder {
|
|
|
|
tcx,
|
|
|
|
encoder,
|
2018-10-16 14:57:53 +00:00
|
|
|
type_shorthands: Default::default(),
|
2021-01-17 07:49:30 +00:00
|
|
|
predicate_shorthands: Default::default(),
|
2018-10-16 14:57:53 +00:00
|
|
|
interpret_allocs: Default::default(),
|
2018-08-18 10:14:25 +00:00
|
|
|
source_map: CachingSourceMapView::new(tcx.sess.source_map()),
|
2017-12-28 05:05:45 +00:00
|
|
|
file_to_file_index,
|
2020-07-24 07:01:07 +00:00
|
|
|
hygiene_context: &hygiene_encode_context,
|
2020-07-29 16:26:15 +00:00
|
|
|
latest_foreign_def_path_hashes,
|
2017-12-28 05:05:45 +00:00
|
|
|
};
|
|
|
|
|
2019-09-06 02:57:44 +00:00
|
|
|
// Encode query results.
|
2021-07-23 21:40:26 +00:00
|
|
|
let mut query_result_index = EncodedDepNodeIndex::new();
|
2017-12-28 05:05:45 +00:00
|
|
|
|
2020-12-07 01:30:55 +00:00
|
|
|
tcx.sess.time("encode_query_results", || -> FileEncodeResult {
|
2017-12-28 05:05:45 +00:00
|
|
|
let enc = &mut encoder;
|
|
|
|
let qri = &mut query_result_index;
|
2021-06-28 19:12:01 +00:00
|
|
|
QueryCtxt::from_tcx(tcx).encode_query_results(enc, qri)
|
2018-03-15 09:17:04 +00:00
|
|
|
})?;
|
2018-03-09 06:09:24 +00:00
|
|
|
|
2021-07-23 21:40:26 +00:00
|
|
|
// Encode side effects.
|
|
|
|
let side_effects_index: EncodedDepNodeIndex = self
|
|
|
|
.current_side_effects
|
2019-12-22 22:42:04 +00:00
|
|
|
.borrow()
|
2018-10-01 13:39:17 +00:00
|
|
|
.iter()
|
2020-12-07 01:30:55 +00:00
|
|
|
.map(
|
2021-07-23 21:40:26 +00:00
|
|
|
|(dep_node_index, side_effects)| -> Result<_, <FileEncoder as Encoder>::Error> {
|
2020-12-07 01:30:55 +00:00
|
|
|
let pos = AbsoluteBytePos::new(encoder.position());
|
|
|
|
let dep_node_index = SerializedDepNodeIndex::new(dep_node_index.index());
|
2021-07-23 21:40:26 +00:00
|
|
|
encoder.encode_tagged(dep_node_index, side_effects)?;
|
2020-12-07 01:30:55 +00:00
|
|
|
|
|
|
|
Ok((dep_node_index, pos))
|
|
|
|
},
|
|
|
|
)
|
2019-09-06 02:57:44 +00:00
|
|
|
.collect::<Result<_, _>>()?;
|
2017-12-28 05:05:45 +00:00
|
|
|
|
2018-04-10 07:58:46 +00:00
|
|
|
let interpret_alloc_index = {
|
|
|
|
let mut interpret_alloc_index = Vec::new();
|
|
|
|
let mut n = 0;
|
|
|
|
loop {
|
2020-08-08 03:44:47 +00:00
|
|
|
let new_n = encoder.interpret_allocs.len();
|
2019-09-06 02:57:44 +00:00
|
|
|
// If we have found new IDs, serialize those too.
|
2018-04-13 16:48:41 +00:00
|
|
|
if n == new_n {
|
2019-09-06 02:57:44 +00:00
|
|
|
// Otherwise, abort.
|
2018-04-13 16:48:41 +00:00
|
|
|
break;
|
|
|
|
}
|
2018-11-05 14:30:04 +00:00
|
|
|
interpret_alloc_index.reserve(new_n - n);
|
2018-04-10 07:58:46 +00:00
|
|
|
for idx in n..new_n {
|
2020-08-08 03:44:47 +00:00
|
|
|
let id = encoder.interpret_allocs[idx];
|
2018-05-25 15:19:31 +00:00
|
|
|
let pos = encoder.position() as u32;
|
2018-04-10 07:58:46 +00:00
|
|
|
interpret_alloc_index.push(pos);
|
2019-12-22 22:42:04 +00:00
|
|
|
interpret::specialized_encode_alloc_id(&mut encoder, tcx, id)?;
|
2018-04-10 07:58:46 +00:00
|
|
|
}
|
|
|
|
n = new_n;
|
|
|
|
}
|
|
|
|
interpret_alloc_index
|
|
|
|
};
|
|
|
|
|
2020-03-17 15:45:02 +00:00
|
|
|
let mut syntax_contexts = FxHashMap::default();
|
2021-06-27 13:51:25 +00:00
|
|
|
let mut expn_data = UnhashMap::default();
|
|
|
|
let mut foreign_expn_data = UnhashMap::default();
|
2020-03-17 15:45:02 +00:00
|
|
|
|
|
|
|
// Encode all hygiene data (`SyntaxContextData` and `ExpnData`) from the current
|
|
|
|
// session.
|
|
|
|
|
2020-07-24 07:01:07 +00:00
|
|
|
hygiene_encode_context.encode(
|
|
|
|
&mut encoder,
|
2020-12-07 01:30:55 +00:00
|
|
|
|encoder, index, ctxt_data| -> FileEncodeResult {
|
2020-07-24 07:01:07 +00:00
|
|
|
let pos = AbsoluteBytePos::new(encoder.position());
|
|
|
|
encoder.encode_tagged(TAG_SYNTAX_CONTEXT, ctxt_data)?;
|
|
|
|
syntax_contexts.insert(index, pos);
|
|
|
|
Ok(())
|
|
|
|
},
|
2021-06-27 13:51:25 +00:00
|
|
|
|encoder, expn_id, data, hash| -> FileEncodeResult {
|
|
|
|
if expn_id.krate == LOCAL_CRATE {
|
2021-07-10 21:34:41 +00:00
|
|
|
let pos = AbsoluteBytePos::new(encoder.position());
|
2021-07-12 14:56:13 +00:00
|
|
|
encoder.encode_tagged(TAG_EXPN_DATA, data)?;
|
2021-06-27 13:51:25 +00:00
|
|
|
expn_data.insert(hash, pos);
|
|
|
|
} else {
|
|
|
|
foreign_expn_data.insert(hash, expn_id.local_id.as_u32());
|
2021-07-10 21:34:41 +00:00
|
|
|
}
|
2020-07-24 07:01:07 +00:00
|
|
|
Ok(())
|
|
|
|
},
|
|
|
|
)?;
|
2020-03-17 15:45:02 +00:00
|
|
|
|
2020-07-29 16:26:15 +00:00
|
|
|
let foreign_def_path_hashes =
|
|
|
|
std::mem::take(&mut encoder.latest_foreign_def_path_hashes);
|
|
|
|
|
2020-03-17 15:45:02 +00:00
|
|
|
// `Encode the file footer.
|
2017-12-28 05:05:45 +00:00
|
|
|
let footer_pos = encoder.position() as u64;
|
2019-12-22 22:42:04 +00:00
|
|
|
encoder.encode_tagged(
|
|
|
|
TAG_FILE_FOOTER,
|
|
|
|
&Footer {
|
|
|
|
file_index_to_stable_id,
|
|
|
|
query_result_index,
|
2021-07-23 21:40:26 +00:00
|
|
|
side_effects_index,
|
2019-12-22 22:42:04 +00:00
|
|
|
interpret_alloc_index,
|
2020-03-17 15:45:02 +00:00
|
|
|
syntax_contexts,
|
2021-06-27 13:51:25 +00:00
|
|
|
expn_data,
|
|
|
|
foreign_expn_data,
|
2020-07-29 16:26:15 +00:00
|
|
|
foreign_def_path_hashes,
|
2019-12-22 22:42:04 +00:00
|
|
|
},
|
|
|
|
)?;
|
2017-12-28 05:05:45 +00:00
|
|
|
|
|
|
|
// Encode the position of the footer as the last 8 bytes of the
|
|
|
|
// file so we know where to look for it.
|
2020-12-07 01:30:55 +00:00
|
|
|
IntEncodedWithFixedSize(footer_pos).encode(encoder.encoder)?;
|
2017-12-28 05:05:45 +00:00
|
|
|
|
|
|
|
// DO NOT WRITE ANYTHING TO THE ENCODER AFTER THIS POINT! The address
|
|
|
|
// of the footer must be the last thing in the data stream.
|
|
|
|
|
2021-05-30 15:54:49 +00:00
|
|
|
Ok(())
|
2017-12-28 05:05:45 +00:00
|
|
|
})
|
2017-10-19 12:32:39 +00:00
|
|
|
}
|
|
|
|
|
2021-06-28 19:12:01 +00:00
|
|
|
fn def_path_hash_to_def_id(&self, tcx: TyCtxt<'tcx>, hash: DefPathHash) -> Option<DefId> {
|
|
|
|
let mut cache = self.def_path_hash_to_def_id_cache.lock();
|
|
|
|
match cache.entry(hash) {
|
|
|
|
Entry::Occupied(e) => *e.get(),
|
|
|
|
Entry::Vacant(e) => {
|
|
|
|
debug!("def_path_hash_to_def_id({:?})", hash);
|
|
|
|
// Check if the `DefPathHash` corresponds to a definition in the current
|
|
|
|
// crate
|
|
|
|
if let Some(def_id) =
|
|
|
|
tcx.definitions_untracked().local_def_path_hash_to_def_id(hash)
|
|
|
|
{
|
|
|
|
let def_id = def_id.to_def_id();
|
|
|
|
e.insert(Some(def_id));
|
|
|
|
return Some(def_id);
|
|
|
|
}
|
|
|
|
// This `raw_def_id` represents the `DefId` of this `DefPathHash` in
|
|
|
|
// the *previous* compliation session. The `DefPathHash` includes the
|
|
|
|
// owning crate, so if the corresponding definition still exists in the
|
|
|
|
// current compilation session, the crate is guaranteed to be the same
|
|
|
|
// (otherwise, we would compute a different `DefPathHash`).
|
|
|
|
let raw_def_id = self.get_raw_def_id(&hash)?;
|
|
|
|
debug!("def_path_hash_to_def_id({:?}): raw_def_id = {:?}", hash, raw_def_id);
|
|
|
|
// If the owning crate no longer exists, the corresponding definition definitely
|
|
|
|
// no longer exists.
|
|
|
|
let krate = self.try_remap_cnum(tcx, hash.stable_crate_id())?;
|
|
|
|
debug!("def_path_hash_to_def_id({:?}): krate = {:?}", hash, krate);
|
|
|
|
// If our `DefPathHash` corresponded to a definition in the local crate,
|
|
|
|
// we should have either found it in `local_def_path_hash_to_def_id`, or
|
|
|
|
// never attempted to load it in the first place. Any query result or `DepNode`
|
|
|
|
// that references a local `DefId` should depend on some HIR-related `DepNode`.
|
|
|
|
// If a local definition is removed/modified such that its old `DefPathHash`
|
|
|
|
// no longer has a corresponding definition, that HIR-related `DepNode` should
|
|
|
|
// end up red. This should prevent us from ever calling
|
|
|
|
// `tcx.def_path_hash_to_def_id`, since we'll end up recomputing any
|
|
|
|
// queries involved.
|
|
|
|
debug_assert_ne!(krate, LOCAL_CRATE);
|
|
|
|
// Try to find a definition in the current session, using the previous `DefIndex`
|
|
|
|
// as an initial guess.
|
|
|
|
let opt_def_id =
|
|
|
|
tcx.cstore_untracked().def_path_hash_to_def_id(krate, raw_def_id.index, hash);
|
|
|
|
debug!("def_path_to_def_id({:?}): opt_def_id = {:?}", hash, opt_def_id);
|
|
|
|
e.insert(opt_def_id);
|
|
|
|
opt_def_id
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn register_reused_dep_node(&self, tcx: TyCtxt<'sess>, dep_node: &DepNode) {
|
|
|
|
// For reused dep nodes, we only need to store the mapping if the node
|
|
|
|
// is one whose query key we can reconstruct from the hash. We use the
|
|
|
|
// mapping to aid that reconstruction in the next session. While we also
|
|
|
|
// use it to decode `DefId`s we encoded in the cache as `DefPathHashes`,
|
|
|
|
// they're already registered during `DefId` encoding.
|
|
|
|
if dep_node.kind.can_reconstruct_query_key() {
|
|
|
|
let hash = DefPathHash(dep_node.hash.into());
|
|
|
|
|
|
|
|
// We can't simply copy the `RawDefId` from `foreign_def_path_hashes` to
|
|
|
|
// `latest_foreign_def_path_hashes`, since the `RawDefId` might have
|
|
|
|
// changed in the current compilation session (e.g. we've added/removed crates,
|
|
|
|
// or added/removed definitions before/after the target definition).
|
|
|
|
if let Some(def_id) = self.def_path_hash_to_def_id(tcx, hash) {
|
|
|
|
if !def_id.is_local() {
|
|
|
|
self.store_foreign_def_id_hash(def_id, hash);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn store_foreign_def_id_hash(&self, def_id: DefId, hash: DefPathHash) {
|
|
|
|
// We may overwrite an existing entry, but it will have the same value,
|
|
|
|
// so it's fine
|
|
|
|
self.latest_foreign_def_path_hashes
|
|
|
|
.lock()
|
|
|
|
.insert(hash, RawDefId { krate: def_id.krate.as_u32(), index: def_id.index.as_u32() });
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'sess> OnDiskCache<'sess> {
|
|
|
|
pub fn as_dyn(&self) -> &dyn rustc_middle::ty::OnDiskCache<'sess> {
|
|
|
|
self as _
|
|
|
|
}
|
|
|
|
|
2021-07-23 21:40:26 +00:00
|
|
|
/// Loads a `QuerySideEffects` created during the previous compilation session.
|
|
|
|
pub fn load_side_effects(
|
2019-06-11 21:11:55 +00:00
|
|
|
&self,
|
2019-06-21 21:49:03 +00:00
|
|
|
tcx: TyCtxt<'_>,
|
2019-06-11 21:11:55 +00:00
|
|
|
dep_node_index: SerializedDepNodeIndex,
|
2021-07-23 21:40:26 +00:00
|
|
|
) -> QuerySideEffects {
|
|
|
|
let side_effects: Option<QuerySideEffects> =
|
|
|
|
self.load_indexed(tcx, dep_node_index, &self.prev_side_effects_index, "side_effects");
|
2017-11-28 15:58:02 +00:00
|
|
|
|
2021-07-23 21:40:26 +00:00
|
|
|
side_effects.unwrap_or_default()
|
2017-10-19 12:32:39 +00:00
|
|
|
}
|
|
|
|
|
2021-07-23 21:40:26 +00:00
|
|
|
/// Stores a `QuerySideEffects` emitted during the current compilation session.
|
|
|
|
/// Anything stored like this will be available via `load_side_effects` in
|
2017-10-24 12:51:26 +00:00
|
|
|
/// the next compilation session.
|
2018-12-07 02:04:23 +00:00
|
|
|
#[inline(never)]
|
|
|
|
#[cold]
|
2021-07-23 21:40:26 +00:00
|
|
|
pub fn store_side_effects(&self, dep_node_index: DepNodeIndex, side_effects: QuerySideEffects) {
|
|
|
|
let mut current_side_effects = self.current_side_effects.borrow_mut();
|
|
|
|
let prev = current_side_effects.insert(dep_node_index, side_effects);
|
2017-10-19 12:32:39 +00:00
|
|
|
debug_assert!(prev.is_none());
|
|
|
|
}
|
|
|
|
|
2020-07-29 16:26:15 +00:00
|
|
|
fn get_raw_def_id(&self, hash: &DefPathHash) -> Option<RawDefId> {
|
|
|
|
self.foreign_def_path_hashes.get(hash).copied()
|
|
|
|
}
|
|
|
|
|
2021-05-30 15:54:49 +00:00
|
|
|
fn try_remap_cnum(&self, tcx: TyCtxt<'_>, stable_crate_id: StableCrateId) -> Option<CrateNum> {
|
|
|
|
let cnum_map = self.cnum_map.get_or_init(|| Self::compute_cnum_map(tcx));
|
|
|
|
debug!("try_remap_cnum({:?}): cnum_map={:?}", stable_crate_id, cnum_map);
|
2020-07-29 16:26:15 +00:00
|
|
|
|
2021-05-30 15:54:49 +00:00
|
|
|
cnum_map.get(&stable_crate_id).copied()
|
2020-07-29 16:26:15 +00:00
|
|
|
}
|
|
|
|
|
2017-11-28 16:32:28 +00:00
|
|
|
/// Returns the cached query result if there is something in the cache for
|
2019-02-08 13:53:55 +00:00
|
|
|
/// the given `SerializedDepNodeIndex`; otherwise returns `None`.
|
2021-01-19 19:40:16 +00:00
|
|
|
pub fn try_load_query_result<'tcx, T>(
|
2019-06-11 21:11:55 +00:00
|
|
|
&self,
|
2020-06-11 14:49:57 +00:00
|
|
|
tcx: TyCtxt<'tcx>,
|
2019-06-11 21:11:55 +00:00
|
|
|
dep_node_index: SerializedDepNodeIndex,
|
|
|
|
) -> Option<T>
|
|
|
|
where
|
2020-06-11 14:49:57 +00:00
|
|
|
T: for<'a> Decodable<CacheDecoder<'a, 'tcx>>,
|
2017-11-14 15:15:45 +00:00
|
|
|
{
|
2019-12-22 22:42:04 +00:00
|
|
|
self.load_indexed(tcx, dep_node_index, &self.query_result_index, "query result")
|
2017-11-28 15:58:02 +00:00
|
|
|
}
|
|
|
|
|
2021-07-23 21:40:26 +00:00
|
|
|
/// Stores side effect emitted during computation of an anonymous query.
|
2017-11-28 15:58:02 +00:00
|
|
|
/// Since many anonymous queries can share the same `DepNode`, we aggregate
|
|
|
|
/// them -- as opposed to regular queries where we assume that there is a
|
|
|
|
/// 1:1 relationship between query-key and `DepNode`.
|
2018-12-07 02:04:23 +00:00
|
|
|
#[inline(never)]
|
|
|
|
#[cold]
|
2021-07-23 21:40:26 +00:00
|
|
|
pub fn store_side_effects_for_anon_node(
|
2019-12-22 22:42:04 +00:00
|
|
|
&self,
|
|
|
|
dep_node_index: DepNodeIndex,
|
2021-07-23 21:40:26 +00:00
|
|
|
side_effects: QuerySideEffects,
|
2019-12-22 22:42:04 +00:00
|
|
|
) {
|
2021-07-23 21:40:26 +00:00
|
|
|
let mut current_side_effects = self.current_side_effects.borrow_mut();
|
2017-11-28 15:58:02 +00:00
|
|
|
|
2021-07-23 21:40:26 +00:00
|
|
|
let x = current_side_effects.entry(dep_node_index).or_default();
|
|
|
|
x.append(side_effects);
|
2017-11-28 15:58:02 +00:00
|
|
|
}
|
|
|
|
|
2019-06-11 21:11:55 +00:00
|
|
|
fn load_indexed<'tcx, T>(
|
|
|
|
&self,
|
2019-06-13 21:48:52 +00:00
|
|
|
tcx: TyCtxt<'tcx>,
|
2019-06-11 21:11:55 +00:00
|
|
|
dep_node_index: SerializedDepNodeIndex,
|
|
|
|
index: &FxHashMap<SerializedDepNodeIndex, AbsoluteBytePos>,
|
|
|
|
debug_tag: &'static str,
|
|
|
|
) -> Option<T>
|
|
|
|
where
|
2020-06-11 14:49:57 +00:00
|
|
|
T: for<'a> Decodable<CacheDecoder<'a, 'tcx>>,
|
2017-11-28 15:58:02 +00:00
|
|
|
{
|
2018-12-19 12:19:48 +00:00
|
|
|
let pos = index.get(&dep_node_index).cloned()?;
|
2017-11-14 15:15:45 +00:00
|
|
|
|
2020-03-17 15:45:02 +00:00
|
|
|
self.with_decoder(tcx, pos, |decoder| match decode_tagged(decoder, dep_node_index) {
|
|
|
|
Ok(v) => Some(v),
|
|
|
|
Err(e) => bug!("could not decode cached {}: {}", debug_tag, e),
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2021-03-17 21:49:16 +00:00
|
|
|
fn with_decoder<'a, 'tcx, T, F: for<'s> FnOnce(&mut CacheDecoder<'s, 'tcx>) -> T>(
|
2020-03-17 15:45:02 +00:00
|
|
|
&'sess self,
|
|
|
|
tcx: TyCtxt<'tcx>,
|
|
|
|
pos: AbsoluteBytePos,
|
|
|
|
f: F,
|
|
|
|
) -> T
|
|
|
|
where
|
2020-06-11 14:49:57 +00:00
|
|
|
T: Decodable<CacheDecoder<'a, 'tcx>>,
|
2020-03-17 15:45:02 +00:00
|
|
|
{
|
2021-05-30 15:54:49 +00:00
|
|
|
let cnum_map = self.cnum_map.get_or_init(|| Self::compute_cnum_map(tcx));
|
2017-11-22 12:41:27 +00:00
|
|
|
|
2021-03-17 21:49:16 +00:00
|
|
|
let serialized_data = self.serialized_data.read();
|
2017-11-14 15:15:45 +00:00
|
|
|
let mut decoder = CacheDecoder {
|
2017-11-28 15:58:02 +00:00
|
|
|
tcx,
|
2021-03-17 21:49:16 +00:00
|
|
|
opaque: opaque::Decoder::new(serialized_data.as_deref().unwrap_or(&[]), pos.to_usize()),
|
2018-08-18 10:14:14 +00:00
|
|
|
source_map: self.source_map,
|
2020-05-16 04:44:28 +00:00
|
|
|
cnum_map,
|
2018-02-13 14:50:37 +00:00
|
|
|
file_index_to_file: &self.file_index_to_file,
|
2017-11-24 13:00:33 +00:00
|
|
|
file_index_to_stable_id: &self.file_index_to_stable_id,
|
2018-05-25 15:19:31 +00:00
|
|
|
alloc_decoding_session: self.alloc_decoding_state.new_decoding_session(),
|
2020-03-17 15:45:02 +00:00
|
|
|
syntax_contexts: &self.syntax_contexts,
|
|
|
|
expn_data: &self.expn_data,
|
2021-06-27 13:51:25 +00:00
|
|
|
foreign_expn_data: &self.foreign_expn_data,
|
2020-03-17 15:45:02 +00:00
|
|
|
hygiene_context: &self.hygiene_context,
|
2017-11-14 15:15:45 +00:00
|
|
|
};
|
2020-03-17 15:45:02 +00:00
|
|
|
f(&mut decoder)
|
2017-11-14 15:15:45 +00:00
|
|
|
}
|
|
|
|
|
2019-09-06 02:57:44 +00:00
|
|
|
// This function builds mapping from previous-session-`CrateNum` to
|
|
|
|
// current-session-`CrateNum`. There might be `CrateNum`s from the previous
|
|
|
|
// `Session` that don't occur in the current one. For these, the mapping
|
2017-11-14 13:07:12 +00:00
|
|
|
// maps to None.
|
2021-05-30 15:54:49 +00:00
|
|
|
fn compute_cnum_map(tcx: TyCtxt<'_>) -> UnhashMap<StableCrateId, CrateNum> {
|
2017-12-28 05:05:45 +00:00
|
|
|
tcx.dep_graph.with_ignore(|| {
|
2021-06-07 09:03:17 +00:00
|
|
|
tcx.crates(())
|
2019-12-22 22:42:04 +00:00
|
|
|
.iter()
|
2021-05-30 15:54:49 +00:00
|
|
|
.chain(std::iter::once(&LOCAL_CRATE))
|
2019-12-22 22:42:04 +00:00
|
|
|
.map(|&cnum| {
|
2021-05-30 15:54:49 +00:00
|
|
|
let hash = tcx.def_path_hash(cnum.as_def_id()).stable_crate_id();
|
|
|
|
(hash, cnum)
|
2019-12-22 22:42:04 +00:00
|
|
|
})
|
2021-05-30 15:54:49 +00:00
|
|
|
.collect()
|
2017-12-28 05:05:45 +00:00
|
|
|
})
|
2017-11-14 13:07:12 +00:00
|
|
|
}
|
2017-10-19 12:32:39 +00:00
|
|
|
}
|
|
|
|
|
2017-11-13 15:35:51 +00:00
|
|
|
//- DECODING -------------------------------------------------------------------
|
|
|
|
|
2020-12-19 22:25:24 +00:00
|
|
|
/// A decoder that can read from the incremental compilation cache. It is similar to the one
|
2019-09-06 02:57:44 +00:00
|
|
|
/// we use for crate metadata decoding in that it can rebase spans and eventually
|
|
|
|
/// will also handle things that contain `Ty` instances.
|
2021-01-19 19:40:16 +00:00
|
|
|
pub struct CacheDecoder<'a, 'tcx> {
|
2019-06-13 21:48:52 +00:00
|
|
|
tcx: TyCtxt<'tcx>,
|
2019-06-11 19:03:44 +00:00
|
|
|
opaque: opaque::Decoder<'a>,
|
|
|
|
source_map: &'a SourceMap,
|
2021-05-30 15:54:49 +00:00
|
|
|
cnum_map: &'a UnhashMap<StableCrateId, CrateNum>,
|
2019-06-11 19:03:44 +00:00
|
|
|
file_index_to_file: &'a Lock<FxHashMap<SourceFileIndex, Lrc<SourceFile>>>,
|
2021-06-21 13:30:16 +00:00
|
|
|
file_index_to_stable_id: &'a FxHashMap<SourceFileIndex, EncodedSourceFileId>,
|
2019-06-11 19:03:44 +00:00
|
|
|
alloc_decoding_session: AllocDecodingSession<'a>,
|
2020-03-17 15:45:02 +00:00
|
|
|
syntax_contexts: &'a FxHashMap<u32, AbsoluteBytePos>,
|
2021-06-27 13:51:25 +00:00
|
|
|
expn_data: &'a UnhashMap<ExpnHash, AbsoluteBytePos>,
|
|
|
|
foreign_expn_data: &'a UnhashMap<ExpnHash, u32>,
|
2020-07-24 07:01:07 +00:00
|
|
|
hygiene_context: &'a HygieneDecodeContext,
|
2017-10-19 12:32:39 +00:00
|
|
|
}
|
|
|
|
|
2019-06-11 19:03:44 +00:00
|
|
|
impl<'a, 'tcx> CacheDecoder<'a, 'tcx> {
|
2018-08-18 10:13:52 +00:00
|
|
|
fn file_index_to_file(&self, index: SourceFileIndex) -> Lrc<SourceFile> {
|
2017-11-24 13:00:33 +00:00
|
|
|
let CacheDecoder {
|
2018-02-13 14:50:37 +00:00
|
|
|
ref file_index_to_file,
|
2017-11-24 13:00:33 +00:00
|
|
|
ref file_index_to_stable_id,
|
2018-08-18 10:14:14 +00:00
|
|
|
ref source_map,
|
2021-06-21 13:30:16 +00:00
|
|
|
ref cnum_map,
|
2017-11-24 13:00:33 +00:00
|
|
|
..
|
|
|
|
} = *self;
|
|
|
|
|
2019-12-22 22:42:04 +00:00
|
|
|
file_index_to_file
|
|
|
|
.borrow_mut()
|
|
|
|
.entry(index)
|
|
|
|
.or_insert_with(|| {
|
2021-06-21 13:30:16 +00:00
|
|
|
let stable_id = file_index_to_stable_id[&index].translate(cnum_map);
|
2019-12-22 22:42:04 +00:00
|
|
|
source_map
|
|
|
|
.source_file_by_stable_id(stable_id)
|
|
|
|
.expect("failed to lookup `SourceFile` in new context")
|
|
|
|
})
|
|
|
|
.clone()
|
2017-10-19 12:32:39 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-09-06 20:05:37 +00:00
|
|
|
trait DecoderWithPosition: Decoder {
|
2017-11-28 15:58:02 +00:00
|
|
|
fn position(&self) -> usize;
|
|
|
|
}
|
|
|
|
|
2019-06-11 19:03:44 +00:00
|
|
|
impl<'a> DecoderWithPosition for opaque::Decoder<'a> {
|
2017-11-28 15:58:02 +00:00
|
|
|
fn position(&self) -> usize {
|
|
|
|
self.position()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-06-11 19:03:44 +00:00
|
|
|
impl<'a, 'tcx> DecoderWithPosition for CacheDecoder<'a, 'tcx> {
|
2017-11-28 15:58:02 +00:00
|
|
|
fn position(&self) -> usize {
|
|
|
|
self.opaque.position()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-09-06 02:57:44 +00:00
|
|
|
// Decodes something that was encoded with `encode_tagged()` and verify that the
|
2017-11-14 13:50:03 +00:00
|
|
|
// tag matches and the correct amount of bytes was read.
|
2019-09-06 20:05:37 +00:00
|
|
|
fn decode_tagged<D, T, V>(decoder: &mut D, expected_tag: T) -> Result<V, D::Error>
|
2019-06-16 09:41:24 +00:00
|
|
|
where
|
2020-10-13 08:17:05 +00:00
|
|
|
T: Decodable<D> + Eq + std::fmt::Debug,
|
2020-06-11 14:49:57 +00:00
|
|
|
V: Decodable<D>,
|
2019-06-16 09:41:24 +00:00
|
|
|
D: DecoderWithPosition,
|
2017-11-14 13:50:03 +00:00
|
|
|
{
|
|
|
|
let start_pos = decoder.position();
|
|
|
|
|
|
|
|
let actual_tag = T::decode(decoder)?;
|
|
|
|
assert_eq!(actual_tag, expected_tag);
|
|
|
|
let value = V::decode(decoder)?;
|
|
|
|
let end_pos = decoder.position();
|
|
|
|
|
|
|
|
let expected_len: u64 = Decodable::decode(decoder)?;
|
|
|
|
assert_eq!((end_pos - start_pos) as u64, expected_len);
|
|
|
|
|
|
|
|
Ok(value)
|
|
|
|
}
|
|
|
|
|
2019-09-06 02:57:44 +00:00
|
|
|
impl<'a, 'tcx> TyDecoder<'tcx> for CacheDecoder<'a, 'tcx> {
|
2020-06-11 14:49:57 +00:00
|
|
|
const CLEAR_CROSS_CRATE: bool = false;
|
|
|
|
|
2017-11-14 11:03:57 +00:00
|
|
|
#[inline]
|
2019-06-13 21:48:52 +00:00
|
|
|
fn tcx(&self) -> TyCtxt<'tcx> {
|
2017-11-28 15:58:02 +00:00
|
|
|
self.tcx
|
2017-11-14 11:03:57 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
#[inline]
|
|
|
|
fn position(&self) -> usize {
|
|
|
|
self.opaque.position()
|
|
|
|
}
|
|
|
|
|
|
|
|
#[inline]
|
|
|
|
fn peek_byte(&self) -> u8 {
|
|
|
|
self.opaque.data[self.opaque.position()]
|
|
|
|
}
|
|
|
|
|
2019-12-22 22:42:04 +00:00
|
|
|
fn cached_ty_for_shorthand<F>(
|
|
|
|
&mut self,
|
|
|
|
shorthand: usize,
|
|
|
|
or_insert_with: F,
|
|
|
|
) -> Result<Ty<'tcx>, Self::Error>
|
|
|
|
where
|
|
|
|
F: FnOnce(&mut Self) -> Result<Ty<'tcx>, Self::Error>,
|
2017-11-14 11:03:57 +00:00
|
|
|
{
|
|
|
|
let tcx = self.tcx();
|
|
|
|
|
2021-05-30 12:06:26 +00:00
|
|
|
let cache_key = ty::CReaderCacheKey { cnum: None, pos: shorthand };
|
2017-11-14 11:03:57 +00:00
|
|
|
|
2020-06-10 08:26:54 +00:00
|
|
|
if let Some(&ty) = tcx.ty_rcache.borrow().get(&cache_key) {
|
2017-11-14 11:03:57 +00:00
|
|
|
return Ok(ty);
|
|
|
|
}
|
|
|
|
|
|
|
|
let ty = or_insert_with(self)?;
|
2019-09-06 02:57:44 +00:00
|
|
|
// This may overwrite the entry, but it should overwrite with the same value.
|
2020-06-10 08:26:54 +00:00
|
|
|
tcx.ty_rcache.borrow_mut().insert_same(cache_key, ty);
|
2017-11-14 11:03:57 +00:00
|
|
|
Ok(ty)
|
|
|
|
}
|
|
|
|
|
|
|
|
fn with_position<F, R>(&mut self, pos: usize, f: F) -> R
|
2019-12-22 22:42:04 +00:00
|
|
|
where
|
|
|
|
F: FnOnce(&mut Self) -> R,
|
2017-11-14 11:03:57 +00:00
|
|
|
{
|
|
|
|
debug_assert!(pos < self.opaque.data.len());
|
|
|
|
|
|
|
|
let new_opaque = opaque::Decoder::new(self.opaque.data, pos);
|
|
|
|
let old_opaque = mem::replace(&mut self.opaque, new_opaque);
|
|
|
|
let r = f(self);
|
|
|
|
self.opaque = old_opaque;
|
|
|
|
r
|
|
|
|
}
|
|
|
|
|
2020-06-11 14:49:57 +00:00
|
|
|
fn decode_alloc_id(&mut self) -> Result<interpret::AllocId, Self::Error> {
|
|
|
|
let alloc_decoding_session = self.alloc_decoding_session;
|
|
|
|
alloc_decoding_session.decode_alloc_id(self)
|
|
|
|
}
|
2017-11-14 11:03:57 +00:00
|
|
|
}
|
|
|
|
|
2021-06-28 19:12:01 +00:00
|
|
|
rustc_middle::implement_ty_decoder!(CacheDecoder<'a, 'tcx>);
|
2017-11-16 14:26:00 +00:00
|
|
|
|
2020-12-17 05:03:45 +00:00
|
|
|
// This ensures that the `Decodable<opaque::Decoder>::decode` specialization for `Vec<u8>` is used
|
|
|
|
// when a `CacheDecoder` is passed to `Decodable::decode`. Unfortunately, we have to manually opt
|
|
|
|
// into specializations this way, given how `CacheDecoder` and the decoding traits currently work.
|
|
|
|
impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for Vec<u8> {
|
|
|
|
fn decode(d: &mut CacheDecoder<'a, 'tcx>) -> Result<Self, String> {
|
|
|
|
Decodable::decode(&mut d.opaque)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-06-11 14:49:57 +00:00
|
|
|
impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for SyntaxContext {
|
|
|
|
fn decode(decoder: &mut CacheDecoder<'a, 'tcx>) -> Result<Self, String> {
|
|
|
|
let syntax_contexts = decoder.syntax_contexts;
|
|
|
|
rustc_span::hygiene::decode_syntax_context(decoder, decoder.hygiene_context, |this, id| {
|
2020-03-17 15:45:02 +00:00
|
|
|
// This closure is invoked if we haven't already decoded the data for the `SyntaxContext` we are deserializing.
|
|
|
|
// We look up the position of the associated `SyntaxData` and decode it.
|
|
|
|
let pos = syntax_contexts.get(&id).unwrap();
|
|
|
|
this.with_position(pos.to_usize(), |decoder| {
|
|
|
|
let data: SyntaxContextData = decode_tagged(decoder, TAG_SYNTAX_CONTEXT)?;
|
|
|
|
Ok(data)
|
|
|
|
})
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-06-11 14:49:57 +00:00
|
|
|
impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for ExpnId {
|
|
|
|
fn decode(decoder: &mut CacheDecoder<'a, 'tcx>) -> Result<Self, String> {
|
2021-06-27 13:51:25 +00:00
|
|
|
let hash = ExpnHash::decode(decoder)?;
|
|
|
|
if hash.is_root() {
|
|
|
|
return Ok(ExpnId::root());
|
|
|
|
}
|
|
|
|
|
|
|
|
if let Some(expn_id) = ExpnId::from_hash(hash) {
|
|
|
|
return Ok(expn_id);
|
|
|
|
}
|
|
|
|
|
|
|
|
let krate = decoder.cnum_map[&hash.stable_crate_id()];
|
|
|
|
|
|
|
|
let expn_id = if krate == LOCAL_CRATE {
|
|
|
|
// We look up the position of the associated `ExpnData` and decode it.
|
|
|
|
let pos = decoder
|
|
|
|
.expn_data
|
|
|
|
.get(&hash)
|
|
|
|
.unwrap_or_else(|| panic!("Bad hash {:?} (map {:?})", hash, decoder.expn_data));
|
|
|
|
|
|
|
|
let data: ExpnData = decoder
|
|
|
|
.with_position(pos.to_usize(), |decoder| decode_tagged(decoder, TAG_EXPN_DATA))?;
|
|
|
|
rustc_span::hygiene::register_local_expn_id(data, hash)
|
|
|
|
} else {
|
|
|
|
let index_guess = decoder.foreign_expn_data[&hash];
|
2021-06-28 19:12:01 +00:00
|
|
|
decoder.tcx.cstore_untracked().expn_hash_to_expn_id(krate, index_guess, hash)
|
2021-06-27 13:51:25 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
#[cfg(debug_assertions)]
|
|
|
|
{
|
|
|
|
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
|
|
|
let mut hcx = decoder.tcx.create_stable_hashing_context();
|
|
|
|
let mut hasher = StableHasher::new();
|
2021-06-27 19:30:20 +00:00
|
|
|
hcx.while_hashing_spans(true, |hcx| expn_id.expn_data().hash_stable(hcx, &mut hasher));
|
2021-06-27 13:51:25 +00:00
|
|
|
let local_hash: u64 = hasher.finish();
|
|
|
|
debug_assert_eq!(hash.local_hash(), local_hash);
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(expn_id)
|
2020-03-17 15:45:02 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-06-11 14:49:57 +00:00
|
|
|
impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for Span {
|
|
|
|
fn decode(decoder: &mut CacheDecoder<'a, 'tcx>) -> Result<Self, String> {
|
2021-04-18 12:27:28 +00:00
|
|
|
let ctxt = SyntaxContext::decode(decoder)?;
|
|
|
|
let parent = Option::<LocalDefId>::decode(decoder)?;
|
2020-06-11 14:49:57 +00:00
|
|
|
let tag: u8 = Decodable::decode(decoder)?;
|
2017-11-24 13:00:33 +00:00
|
|
|
|
2021-01-03 15:09:32 +00:00
|
|
|
if tag == TAG_PARTIAL_SPAN {
|
2021-04-18 12:27:28 +00:00
|
|
|
return Ok(Span::new(BytePos(0), BytePos(0), ctxt, parent));
|
|
|
|
} else if tag == TAG_RELATIVE_SPAN {
|
|
|
|
let dlo = u32::decode(decoder)?;
|
|
|
|
let dto = u32::decode(decoder)?;
|
|
|
|
|
2021-04-27 17:24:33 +00:00
|
|
|
let enclosing = decoder.tcx.definitions_untracked().def_span(parent.unwrap()).decode();
|
2021-04-18 12:27:28 +00:00
|
|
|
let span = Span::new(
|
|
|
|
enclosing.lo + BytePos::from_u32(dlo),
|
|
|
|
enclosing.lo + BytePos::from_u32(dto),
|
|
|
|
ctxt,
|
|
|
|
parent,
|
|
|
|
);
|
|
|
|
|
|
|
|
return Ok(span);
|
2017-11-24 13:00:33 +00:00
|
|
|
} else {
|
2021-01-03 15:09:32 +00:00
|
|
|
debug_assert_eq!(tag, TAG_FULL_SPAN);
|
2017-10-24 12:51:26 +00:00
|
|
|
}
|
|
|
|
|
2020-06-11 14:49:57 +00:00
|
|
|
let file_lo_index = SourceFileIndex::decode(decoder)?;
|
|
|
|
let line_lo = usize::decode(decoder)?;
|
|
|
|
let col_lo = BytePos::decode(decoder)?;
|
|
|
|
let len = BytePos::decode(decoder)?;
|
2017-11-24 13:00:33 +00:00
|
|
|
|
2020-06-11 14:49:57 +00:00
|
|
|
let file_lo = decoder.file_index_to_file(file_lo_index);
|
2018-05-23 13:59:42 +00:00
|
|
|
let lo = file_lo.lines[line_lo - 1] + col_lo;
|
2017-11-24 13:00:33 +00:00
|
|
|
let hi = lo + len;
|
|
|
|
|
2021-04-18 12:27:28 +00:00
|
|
|
Ok(Span::new(lo, hi, ctxt, parent))
|
2017-10-24 12:51:26 +00:00
|
|
|
}
|
|
|
|
}
|
2017-11-13 15:35:51 +00:00
|
|
|
|
2020-06-11 14:49:57 +00:00
|
|
|
impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for CrateNum {
|
|
|
|
fn decode(d: &mut CacheDecoder<'a, 'tcx>) -> Result<Self, String> {
|
2021-05-30 15:54:49 +00:00
|
|
|
let stable_id = StableCrateId::decode(d)?;
|
|
|
|
let cnum = d.cnum_map[&stable_id];
|
|
|
|
Ok(cnum)
|
2019-08-24 15:25:55 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-11-14 11:03:57 +00:00
|
|
|
// This impl makes sure that we get a runtime error when we try decode a
|
2019-09-06 02:57:44 +00:00
|
|
|
// `DefIndex` that is not contained in a `DefId`. Such a case would be problematic
|
|
|
|
// because we would not know how to transform the `DefIndex` to the current
|
2017-11-14 11:03:57 +00:00
|
|
|
// context.
|
2020-06-11 14:49:57 +00:00
|
|
|
impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for DefIndex {
|
|
|
|
fn decode(d: &mut CacheDecoder<'a, 'tcx>) -> Result<DefIndex, String> {
|
|
|
|
Err(d.error("trying to decode `DefIndex` outside the context of a `DefId`"))
|
2017-11-14 11:03:57 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-09-06 02:57:44 +00:00
|
|
|
// Both the `CrateNum` and the `DefIndex` of a `DefId` can change in between two
|
|
|
|
// compilation sessions. We use the `DefPathHash`, which is stable across
|
2019-09-19 05:17:36 +00:00
|
|
|
// sessions, to map the old `DefId` to the new one.
|
2020-06-11 14:49:57 +00:00
|
|
|
impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for DefId {
|
|
|
|
fn decode(d: &mut CacheDecoder<'a, 'tcx>) -> Result<Self, String> {
|
2019-09-06 02:57:44 +00:00
|
|
|
// Load the `DefPathHash` which is was we encoded the `DefId` as.
|
2020-06-11 14:49:57 +00:00
|
|
|
let def_path_hash = DefPathHash::decode(d)?;
|
2017-11-14 11:03:57 +00:00
|
|
|
|
2019-09-06 02:57:44 +00:00
|
|
|
// Using the `DefPathHash`, we can lookup the new `DefId`.
|
2020-07-29 16:26:15 +00:00
|
|
|
// Subtle: We only encode a `DefId` as part of a query result.
|
|
|
|
// If we get to this point, then all of the query inputs were green,
|
|
|
|
// which means that the definition with this hash is guaranteed to
|
|
|
|
// still exist in the current compilation session.
|
2020-11-25 20:08:31 +00:00
|
|
|
Ok(d.tcx()
|
|
|
|
.on_disk_cache
|
|
|
|
.as_ref()
|
|
|
|
.unwrap()
|
|
|
|
.def_path_hash_to_def_id(d.tcx(), def_path_hash)
|
|
|
|
.unwrap())
|
2017-11-14 11:03:57 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-06-11 14:49:57 +00:00
|
|
|
impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for &'tcx FxHashSet<LocalDefId> {
|
|
|
|
fn decode(d: &mut CacheDecoder<'a, 'tcx>) -> Result<Self, String> {
|
|
|
|
RefDecodable::decode(d)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>>
|
|
|
|
for &'tcx IndexVec<mir::Promoted, mir::Body<'tcx>>
|
|
|
|
{
|
|
|
|
fn decode(d: &mut CacheDecoder<'a, 'tcx>) -> Result<Self, String> {
|
|
|
|
RefDecodable::decode(d)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-09-11 19:16:16 +00:00
|
|
|
impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for &'tcx [mir::abstract_const::Node<'tcx>] {
|
|
|
|
fn decode(d: &mut CacheDecoder<'a, 'tcx>) -> Result<Self, String> {
|
|
|
|
RefDecodable::decode(d)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-06-11 14:49:57 +00:00
|
|
|
impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for &'tcx [(ty::Predicate<'tcx>, Span)] {
|
|
|
|
fn decode(d: &mut CacheDecoder<'a, 'tcx>) -> Result<Self, String> {
|
|
|
|
RefDecodable::decode(d)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-04-27 17:56:11 +00:00
|
|
|
impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for &'tcx [rustc_ast::InlineAsmTemplatePiece] {
|
2020-06-11 14:49:57 +00:00
|
|
|
fn decode(d: &mut CacheDecoder<'a, 'tcx>) -> Result<Self, String> {
|
|
|
|
RefDecodable::decode(d)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for &'tcx [Span] {
|
|
|
|
fn decode(d: &mut CacheDecoder<'a, 'tcx>) -> Result<Self, String> {
|
|
|
|
RefDecodable::decode(d)
|
2017-12-23 03:41:09 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-11-13 15:35:51 +00:00
|
|
|
//- ENCODING -------------------------------------------------------------------
|
|
|
|
|
2020-10-11 08:34:13 +00:00
|
|
|
pub trait OpaqueEncoder: Encoder {
|
2020-12-07 01:30:55 +00:00
|
|
|
fn position(&self) -> usize;
|
|
|
|
}
|
|
|
|
|
|
|
|
impl OpaqueEncoder for FileEncoder {
|
|
|
|
#[inline]
|
|
|
|
fn position(&self) -> usize {
|
|
|
|
FileEncoder::position(self)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-12-19 22:25:24 +00:00
|
|
|
/// An encoder that can write to the incremental compilation cache.
|
2020-10-11 08:34:13 +00:00
|
|
|
pub struct CacheEncoder<'a, 'tcx, E: OpaqueEncoder> {
|
2019-06-13 21:48:52 +00:00
|
|
|
tcx: TyCtxt<'tcx>,
|
2019-06-11 19:03:44 +00:00
|
|
|
encoder: &'a mut E,
|
2019-04-25 20:54:19 +00:00
|
|
|
type_shorthands: FxHashMap<Ty<'tcx>, usize>,
|
2021-01-17 07:49:30 +00:00
|
|
|
predicate_shorthands: FxHashMap<ty::PredicateKind<'tcx>, usize>,
|
2020-08-08 03:44:47 +00:00
|
|
|
interpret_allocs: FxIndexSet<interpret::AllocId>,
|
2018-08-18 10:14:25 +00:00
|
|
|
source_map: CachingSourceMapView<'tcx>,
|
2018-08-18 10:13:52 +00:00
|
|
|
file_to_file_index: FxHashMap<*const SourceFile, SourceFileIndex>,
|
2020-07-24 07:01:07 +00:00
|
|
|
hygiene_context: &'a HygieneEncodeContext,
|
2021-01-02 04:51:07 +00:00
|
|
|
latest_foreign_def_path_hashes: UnhashMap<DefPathHash, RawDefId>,
|
2017-11-13 15:35:51 +00:00
|
|
|
}
|
|
|
|
|
2019-06-11 19:03:44 +00:00
|
|
|
impl<'a, 'tcx, E> CacheEncoder<'a, 'tcx, E>
|
2019-06-11 21:11:55 +00:00
|
|
|
where
|
2020-06-11 14:49:57 +00:00
|
|
|
E: 'a + OpaqueEncoder,
|
2017-11-14 13:50:03 +00:00
|
|
|
{
|
2018-08-18 10:13:56 +00:00
|
|
|
fn source_file_index(&mut self, source_file: Lrc<SourceFile>) -> SourceFileIndex {
|
|
|
|
self.file_to_file_index[&(&*source_file as *const SourceFile)]
|
2017-11-24 13:00:33 +00:00
|
|
|
}
|
|
|
|
|
2017-11-14 13:50:03 +00:00
|
|
|
/// Encode something with additional information that allows to do some
|
|
|
|
/// sanity checks when decoding the data again. This method will first
|
|
|
|
/// encode the specified tag, then the given value, then the number of
|
|
|
|
/// bytes taken up by tag and value. On decoding, we can then verify that
|
|
|
|
/// we get the expected tag and read the expected number of bytes.
|
2020-06-11 14:49:57 +00:00
|
|
|
fn encode_tagged<T: Encodable<Self>, V: Encodable<Self>>(
|
2019-09-06 02:57:44 +00:00
|
|
|
&mut self,
|
|
|
|
tag: T,
|
2019-12-22 22:42:04 +00:00
|
|
|
value: &V,
|
2019-09-06 02:57:44 +00:00
|
|
|
) -> Result<(), E::Error> {
|
2017-11-14 13:50:03 +00:00
|
|
|
let start_pos = self.position();
|
|
|
|
|
|
|
|
tag.encode(self)?;
|
|
|
|
value.encode(self)?;
|
|
|
|
|
|
|
|
let end_pos = self.position();
|
|
|
|
((end_pos - start_pos) as u64).encode(self)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-06-11 14:49:57 +00:00
|
|
|
impl<'a, 'tcx, E> Encodable<CacheEncoder<'a, 'tcx, E>> for SyntaxContext
|
2020-03-17 15:45:02 +00:00
|
|
|
where
|
2020-06-11 14:49:57 +00:00
|
|
|
E: 'a + OpaqueEncoder,
|
2020-03-17 15:45:02 +00:00
|
|
|
{
|
2020-06-11 14:49:57 +00:00
|
|
|
fn encode(&self, s: &mut CacheEncoder<'a, 'tcx, E>) -> Result<(), E::Error> {
|
|
|
|
rustc_span::hygiene::raw_encode_syntax_context(*self, s.hygiene_context, s)
|
2020-03-17 15:45:02 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-06-11 14:49:57 +00:00
|
|
|
impl<'a, 'tcx, E> Encodable<CacheEncoder<'a, 'tcx, E>> for ExpnId
|
2020-03-17 15:45:02 +00:00
|
|
|
where
|
2020-06-11 14:49:57 +00:00
|
|
|
E: 'a + OpaqueEncoder,
|
2020-03-17 15:45:02 +00:00
|
|
|
{
|
2020-06-11 14:49:57 +00:00
|
|
|
fn encode(&self, s: &mut CacheEncoder<'a, 'tcx, E>) -> Result<(), E::Error> {
|
2021-06-27 08:38:06 +00:00
|
|
|
s.hygiene_context.schedule_expn_data_for_encoding(*self);
|
2021-06-27 13:51:25 +00:00
|
|
|
self.expn_hash().encode(s)
|
2020-03-17 15:45:02 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-06-11 14:49:57 +00:00
|
|
|
impl<'a, 'tcx, E> Encodable<CacheEncoder<'a, 'tcx, E>> for Span
|
2019-06-11 21:11:55 +00:00
|
|
|
where
|
2020-06-11 14:49:57 +00:00
|
|
|
E: 'a + OpaqueEncoder,
|
2017-11-22 12:41:27 +00:00
|
|
|
{
|
2020-06-11 14:49:57 +00:00
|
|
|
fn encode(&self, s: &mut CacheEncoder<'a, 'tcx, E>) -> Result<(), E::Error> {
|
2021-04-27 17:24:33 +00:00
|
|
|
let span_data = self.decode();
|
2021-04-18 12:27:28 +00:00
|
|
|
span_data.ctxt.encode(s)?;
|
|
|
|
span_data.parent.encode(s)?;
|
|
|
|
|
|
|
|
if span_data.is_dummy() {
|
|
|
|
return TAG_PARTIAL_SPAN.encode(s);
|
|
|
|
}
|
|
|
|
|
|
|
|
if let Some(parent) = span_data.parent {
|
2021-04-27 17:24:33 +00:00
|
|
|
let enclosing = s.tcx.definitions_untracked().def_span(parent).decode();
|
2021-04-18 12:27:28 +00:00
|
|
|
if enclosing.contains(span_data) {
|
|
|
|
TAG_RELATIVE_SPAN.encode(s)?;
|
|
|
|
(span_data.lo - enclosing.lo).to_u32().encode(s)?;
|
|
|
|
(span_data.hi - enclosing.lo).to_u32().encode(s)?;
|
|
|
|
return Ok(());
|
|
|
|
}
|
2017-11-24 13:00:33 +00:00
|
|
|
}
|
|
|
|
|
2021-01-03 15:09:32 +00:00
|
|
|
let pos = s.source_map.byte_pos_to_line_and_col(span_data.lo);
|
|
|
|
let partial_span = match &pos {
|
|
|
|
Some((file_lo, _, _)) => !file_lo.contains(span_data.hi),
|
|
|
|
None => true,
|
2020-06-11 14:49:57 +00:00
|
|
|
};
|
2017-11-24 13:00:33 +00:00
|
|
|
|
2021-01-03 15:09:32 +00:00
|
|
|
if partial_span {
|
2021-04-18 12:27:28 +00:00
|
|
|
return TAG_PARTIAL_SPAN.encode(s);
|
2017-11-24 13:00:33 +00:00
|
|
|
}
|
|
|
|
|
2021-01-03 15:09:32 +00:00
|
|
|
let (file_lo, line_lo, col_lo) = pos.unwrap();
|
|
|
|
|
2017-11-24 13:00:33 +00:00
|
|
|
let len = span_data.hi - span_data.lo;
|
|
|
|
|
2020-06-11 14:49:57 +00:00
|
|
|
let source_file_index = s.source_file_index(file_lo);
|
2017-11-24 13:00:33 +00:00
|
|
|
|
2021-01-03 15:09:32 +00:00
|
|
|
TAG_FULL_SPAN.encode(s)?;
|
2020-06-11 14:49:57 +00:00
|
|
|
source_file_index.encode(s)?;
|
|
|
|
line_lo.encode(s)?;
|
|
|
|
col_lo.encode(s)?;
|
2021-04-18 12:27:28 +00:00
|
|
|
len.encode(s)
|
2017-11-22 12:41:27 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-06-11 14:49:57 +00:00
|
|
|
impl<'a, 'tcx, E> TyEncoder<'tcx> for CacheEncoder<'a, 'tcx, E>
|
2019-08-24 15:25:55 +00:00
|
|
|
where
|
2020-06-11 14:49:57 +00:00
|
|
|
E: 'a + OpaqueEncoder,
|
2019-08-24 15:25:55 +00:00
|
|
|
{
|
2020-06-11 14:49:57 +00:00
|
|
|
const CLEAR_CROSS_CRATE: bool = false;
|
2019-08-24 15:25:55 +00:00
|
|
|
|
2017-11-13 15:35:51 +00:00
|
|
|
fn position(&self) -> usize {
|
2020-12-07 01:30:55 +00:00
|
|
|
self.encoder.position()
|
2017-11-13 15:35:51 +00:00
|
|
|
}
|
2020-06-11 14:49:57 +00:00
|
|
|
fn type_shorthands(&mut self) -> &mut FxHashMap<Ty<'tcx>, usize> {
|
|
|
|
&mut self.type_shorthands
|
2017-11-16 16:13:39 +00:00
|
|
|
}
|
2021-01-17 07:49:30 +00:00
|
|
|
fn predicate_shorthands(&mut self) -> &mut FxHashMap<ty::PredicateKind<'tcx>, usize> {
|
|
|
|
&mut self.predicate_shorthands
|
|
|
|
}
|
2020-06-11 14:49:57 +00:00
|
|
|
fn encode_alloc_id(&mut self, alloc_id: &interpret::AllocId) -> Result<(), Self::Error> {
|
|
|
|
let (index, _) = self.interpret_allocs.insert_full(*alloc_id);
|
2017-11-13 15:35:51 +00:00
|
|
|
|
2020-06-11 14:49:57 +00:00
|
|
|
index.encode(self)
|
2017-11-16 13:09:44 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-05-30 15:54:49 +00:00
|
|
|
impl<'a, 'tcx, E> Encodable<CacheEncoder<'a, 'tcx, E>> for CrateNum
|
|
|
|
where
|
|
|
|
E: 'a + OpaqueEncoder,
|
|
|
|
{
|
|
|
|
fn encode(&self, s: &mut CacheEncoder<'a, 'tcx, E>) -> Result<(), E::Error> {
|
2021-05-30 20:32:45 +00:00
|
|
|
s.tcx.stable_crate_id(*self).encode(s)
|
2021-05-30 15:54:49 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-06-11 14:49:57 +00:00
|
|
|
impl<'a, 'tcx, E> Encodable<CacheEncoder<'a, 'tcx, E>> for DefId
|
2019-06-11 21:11:55 +00:00
|
|
|
where
|
2020-06-11 14:49:57 +00:00
|
|
|
E: 'a + OpaqueEncoder,
|
2017-11-16 13:09:44 +00:00
|
|
|
{
|
2020-06-11 14:49:57 +00:00
|
|
|
fn encode(&self, s: &mut CacheEncoder<'a, 'tcx, E>) -> Result<(), E::Error> {
|
|
|
|
let def_path_hash = s.tcx.def_path_hash(*self);
|
2020-07-29 16:26:15 +00:00
|
|
|
// Store additional information when we encode a foreign `DefId`,
|
|
|
|
// so that we can map its `DefPathHash` back to a `DefId` in the next
|
|
|
|
// compilation session.
|
|
|
|
if !self.is_local() {
|
|
|
|
s.latest_foreign_def_path_hashes.insert(
|
|
|
|
def_path_hash,
|
|
|
|
RawDefId { krate: self.krate.as_u32(), index: self.index.as_u32() },
|
|
|
|
);
|
|
|
|
}
|
2020-06-11 14:49:57 +00:00
|
|
|
def_path_hash.encode(s)
|
2017-11-16 13:09:44 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-06-11 14:49:57 +00:00
|
|
|
impl<'a, 'tcx, E> Encodable<CacheEncoder<'a, 'tcx, E>> for DefIndex
|
2019-06-11 21:11:55 +00:00
|
|
|
where
|
2020-06-11 14:49:57 +00:00
|
|
|
E: 'a + OpaqueEncoder,
|
2017-11-16 13:09:44 +00:00
|
|
|
{
|
2020-06-11 14:49:57 +00:00
|
|
|
fn encode(&self, _: &mut CacheEncoder<'a, 'tcx, E>) -> Result<(), E::Error> {
|
2019-09-06 02:57:44 +00:00
|
|
|
bug!("encoding `DefIndex` without context");
|
2017-11-16 13:09:44 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-11-13 15:35:51 +00:00
|
|
|
macro_rules! encoder_methods {
|
|
|
|
($($name:ident($ty:ty);)*) => {
|
2020-02-19 05:54:36 +00:00
|
|
|
#[inline]
|
2017-11-13 15:35:51 +00:00
|
|
|
$(fn $name(&mut self, value: $ty) -> Result<(), Self::Error> {
|
|
|
|
self.encoder.$name(value)
|
|
|
|
})*
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-06-11 19:03:44 +00:00
|
|
|
impl<'a, 'tcx, E> Encoder for CacheEncoder<'a, 'tcx, E>
|
2019-06-11 21:11:55 +00:00
|
|
|
where
|
2020-06-11 14:49:57 +00:00
|
|
|
E: 'a + OpaqueEncoder,
|
2017-11-13 15:35:51 +00:00
|
|
|
{
|
|
|
|
type Error = E::Error;
|
|
|
|
|
2020-04-12 17:48:56 +00:00
|
|
|
#[inline]
|
2018-09-11 14:32:41 +00:00
|
|
|
fn emit_unit(&mut self) -> Result<(), Self::Error> {
|
2017-11-13 15:35:51 +00:00
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
encoder_methods! {
|
|
|
|
emit_usize(usize);
|
|
|
|
emit_u128(u128);
|
|
|
|
emit_u64(u64);
|
|
|
|
emit_u32(u32);
|
|
|
|
emit_u16(u16);
|
|
|
|
emit_u8(u8);
|
|
|
|
|
|
|
|
emit_isize(isize);
|
|
|
|
emit_i128(i128);
|
|
|
|
emit_i64(i64);
|
|
|
|
emit_i32(i32);
|
|
|
|
emit_i16(i16);
|
|
|
|
emit_i8(i8);
|
|
|
|
|
|
|
|
emit_bool(bool);
|
|
|
|
emit_f64(f64);
|
|
|
|
emit_f32(f32);
|
|
|
|
emit_char(char);
|
|
|
|
emit_str(&str);
|
2021-03-11 21:06:45 +00:00
|
|
|
emit_raw_bytes(&[u8]);
|
2017-11-13 15:35:51 +00:00
|
|
|
}
|
|
|
|
}
|
2017-11-14 13:50:03 +00:00
|
|
|
|
2020-12-07 01:30:55 +00:00
|
|
|
// This ensures that the `Encodable<opaque::FileEncoder>::encode` specialization for byte slices
|
|
|
|
// is used when a `CacheEncoder` having an `opaque::FileEncoder` is passed to `Encodable::encode`.
|
2020-12-17 03:03:31 +00:00
|
|
|
// Unfortunately, we have to manually opt into specializations this way, given how `CacheEncoder`
|
|
|
|
// and the encoding traits currently work.
|
2020-12-07 01:30:55 +00:00
|
|
|
impl<'a, 'tcx> Encodable<CacheEncoder<'a, 'tcx, FileEncoder>> for [u8] {
|
|
|
|
fn encode(&self, e: &mut CacheEncoder<'a, 'tcx, FileEncoder>) -> FileEncodeResult {
|
2020-12-17 03:03:31 +00:00
|
|
|
self.encode(e.encoder)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-01-19 19:04:40 +00:00
|
|
|
pub fn encode_query_results<'a, 'tcx, CTX, Q>(
|
|
|
|
tcx: CTX,
|
2020-12-07 01:30:55 +00:00
|
|
|
encoder: &mut CacheEncoder<'a, 'tcx, FileEncoder>,
|
2021-07-23 21:40:26 +00:00
|
|
|
query_result_index: &mut EncodedDepNodeIndex,
|
2020-12-07 01:30:55 +00:00
|
|
|
) -> FileEncodeResult
|
2019-06-11 21:11:55 +00:00
|
|
|
where
|
2021-01-19 19:04:40 +00:00
|
|
|
CTX: QueryContext + 'tcx,
|
|
|
|
Q: super::QueryDescription<CTX> + super::QueryAccessors<CTX>,
|
2020-12-07 01:30:55 +00:00
|
|
|
Q::Value: Encodable<CacheEncoder<'a, 'tcx, FileEncoder>>,
|
2017-11-15 13:18:00 +00:00
|
|
|
{
|
2020-02-07 14:01:23 +00:00
|
|
|
let _timer = tcx
|
2021-01-19 19:04:40 +00:00
|
|
|
.dep_context()
|
|
|
|
.profiler()
|
2020-10-13 08:17:05 +00:00
|
|
|
.extra_verbose_generic_activity("encode_query_results_for", std::any::type_name::<Q>());
|
2018-03-15 09:17:04 +00:00
|
|
|
|
2021-02-06 12:49:08 +00:00
|
|
|
assert!(Q::query_state(tcx).all_inactive());
|
|
|
|
let cache = Q::query_cache(tcx);
|
2021-04-29 14:23:17 +00:00
|
|
|
let mut res = Ok(());
|
|
|
|
cache.iter_results(&mut |key, value, dep_node| {
|
|
|
|
if res.is_err() {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
if Q::cache_on_disk(tcx, &key, Some(value)) {
|
|
|
|
let dep_node = SerializedDepNodeIndex::new(dep_node.index());
|
|
|
|
|
|
|
|
// Record position of the cache entry.
|
|
|
|
query_result_index.push((dep_node, AbsoluteBytePos::new(encoder.encoder.position())));
|
|
|
|
|
|
|
|
// Encode the type check tables with the `SerializedDepNodeIndex`
|
|
|
|
// as tag.
|
|
|
|
match encoder.encode_tagged(dep_node, value) {
|
|
|
|
Ok(()) => {}
|
|
|
|
Err(e) => {
|
|
|
|
res = Err(e);
|
|
|
|
}
|
2020-02-08 06:38:00 +00:00
|
|
|
}
|
|
|
|
}
|
2021-04-29 14:23:17 +00:00
|
|
|
});
|
|
|
|
|
|
|
|
res
|
2017-11-15 13:18:00 +00:00
|
|
|
}
|