diff --git a/src/librustc/middle/cstore.rs b/src/librustc/middle/cstore.rs index dbbd5eca483..658825d417e 100644 --- a/src/librustc/middle/cstore.rs +++ b/src/librustc/middle/cstore.rs @@ -138,11 +138,11 @@ pub trait CrateStore<'tcx> { fn item_super_predicates<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) -> ty::GenericPredicates<'tcx>; fn item_generics<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) - -> &'tcx ty::Generics<'tcx>; + -> ty::Generics<'tcx>; fn item_attrs(&self, def_id: DefId) -> Vec; fn trait_def<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId)-> ty::TraitDef<'tcx>; fn adt_def<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) -> ty::AdtDefMaster<'tcx>; - fn fn_arg_names(&self, did: DefId) -> Vec; + fn fn_arg_names(&self, did: DefId) -> Vec; fn inherent_implementations_for_type(&self, def_id: DefId) -> Vec; // trait info @@ -299,13 +299,13 @@ impl<'tcx> CrateStore<'tcx> for DummyCrateStore { fn item_super_predicates<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) -> ty::GenericPredicates<'tcx> { bug!("item_super_predicates") } fn item_generics<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) - -> &'tcx ty::Generics<'tcx> { bug!("item_generics") } + -> ty::Generics<'tcx> { bug!("item_generics") } fn item_attrs(&self, def_id: DefId) -> Vec { bug!("item_attrs") } fn trait_def<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId)-> ty::TraitDef<'tcx> { bug!("trait_def") } fn adt_def<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) -> ty::AdtDefMaster<'tcx> { bug!("adt_def") } - fn fn_arg_names(&self, did: DefId) -> Vec { bug!("fn_arg_names") } + fn fn_arg_names(&self, did: DefId) -> Vec { bug!("fn_arg_names") } fn inherent_implementations_for_type(&self, def_id: DefId) -> Vec { vec![] } // trait info diff --git a/src/librustc/ty/mod.rs b/src/librustc/ty/mod.rs index 8171a99beb9..8a9b2846ac6 100644 --- a/src/librustc/ty/mod.rs +++ b/src/librustc/ty/mod.rs @@ -737,6 +737,9 @@ pub struct GenericPredicates<'tcx> { pub predicates: Vec>, } +impl<'tcx> serialize::UseSpecializedEncodable for GenericPredicates<'tcx> {} +impl<'tcx> serialize::UseSpecializedDecodable for GenericPredicates<'tcx> {} + impl<'a, 'gcx, 'tcx> GenericPredicates<'tcx> { pub fn instantiate(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, substs: &Substs<'tcx>) -> InstantiatedPredicates<'tcx> { @@ -2457,7 +2460,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { pub fn lookup_generics(self, did: DefId) -> &'gcx Generics<'gcx> { lookup_locally_or_in_crate_store( "generics", did, &self.generics, - || self.sess.cstore.item_generics(self.global_tcx(), did)) + || self.alloc_generics(self.sess.cstore.item_generics(self.global_tcx(), did))) } /// Given the did of an item, returns its full set of predicates. diff --git a/src/librustc_metadata/Cargo.toml b/src/librustc_metadata/Cargo.toml index fede6f66341..680d55955bb 100644 --- a/src/librustc_metadata/Cargo.toml +++ b/src/librustc_metadata/Cargo.toml @@ -13,7 +13,6 @@ flate = { path = "../libflate" } log = { path = "../liblog" } rustc = { path = "../librustc" } rustc_back = { path = "../librustc_back" } -rustc_bitflags = { path = "../librustc_bitflags" } rustc_const_math = { path = "../librustc_const_math" } rustc_data_structures = { path = "../librustc_data_structures" } rustc_errors = { path = "../librustc_errors" } diff --git a/src/librustc_metadata/astencode.rs b/src/librustc_metadata/astencode.rs index f48c31fc2f9..c9dbedacbc1 100644 --- a/src/librustc_metadata/astencode.rs +++ b/src/librustc_metadata/astencode.rs @@ -13,43 +13,92 @@ use rustc::hir::map as ast_map; use rustc::hir::intravisit::{Visitor, IdRangeComputingVisitor, IdRange}; use cstore::CrateMetadata; -use decoder::DecodeContext; use encoder::EncodeContext; +use schema::*; use rustc::middle::cstore::{InlinedItem, InlinedItemRef}; -use rustc::hir::def; +use rustc::middle::const_qualif::ConstQualif; +use rustc::hir::def::{self, Def}; use rustc::hir::def_id::DefId; -use rustc::ty::TyCtxt; +use rustc::ty::{self, TyCtxt, Ty}; use syntax::ast; -use rbml; -use rustc_serialize::{Decodable, Encodable}; +use rustc_serialize::Encodable; -// ______________________________________________________________________ -// Top-level methods. +#[derive(RustcEncodable, RustcDecodable)] +pub struct Ast<'tcx> { + id_range: IdRange, + item: Lazy, + side_tables: LazySeq<(ast::NodeId, TableEntry<'tcx>)> +} -pub fn encode_inlined_item(ecx: &mut EncodeContext, ii: InlinedItemRef) { - ecx.tag(::common::item_tag::ast, |ecx| { - let mut visitor = IdRangeComputingVisitor::new(); +#[derive(RustcEncodable, RustcDecodable)] +enum TableEntry<'tcx> { + Def(Def), + NodeType(Ty<'tcx>), + ItemSubsts(ty::ItemSubsts<'tcx>), + Adjustment(ty::adjustment::AutoAdjustment<'tcx>), + ConstQualif(ConstQualif) +} + +impl<'a, 'tcx> EncodeContext<'a, 'tcx> { + pub fn encode_inlined_item(&mut self, ii: InlinedItemRef) -> Lazy> { + let mut id_visitor = IdRangeComputingVisitor::new(); match ii { - InlinedItemRef::Item(_, i) => visitor.visit_item(i), - InlinedItemRef::TraitItem(_, ti) => visitor.visit_trait_item(ti), - InlinedItemRef::ImplItem(_, ii) => visitor.visit_impl_item(ii) + InlinedItemRef::Item(_, i) => id_visitor.visit_item(i), + InlinedItemRef::TraitItem(_, ti) => id_visitor.visit_trait_item(ti), + InlinedItemRef::ImplItem(_, ii) => id_visitor.visit_impl_item(ii) } - visitor.result().encode(ecx).unwrap(); - ii.encode(ecx).unwrap(); + let ii_pos = self.position(); + ii.encode(self).unwrap(); - let mut visitor = SideTableEncodingIdVisitor { - ecx: ecx + let tables_pos = self.position(); + let tables_count = { + let mut visitor = SideTableEncodingIdVisitor { + ecx: self, + count: 0 + }; + match ii { + InlinedItemRef::Item(_, i) => visitor.visit_item(i), + InlinedItemRef::TraitItem(_, ti) => visitor.visit_trait_item(ti), + InlinedItemRef::ImplItem(_, ii) => visitor.visit_impl_item(ii) + } + visitor.count }; - match ii { - InlinedItemRef::Item(_, i) => visitor.visit_item(i), - InlinedItemRef::TraitItem(_, ti) => visitor.visit_trait_item(ti), - InlinedItemRef::ImplItem(_, ii) => visitor.visit_impl_item(ii) - } - }); + + self.lazy(&Ast { + id_range: id_visitor.result(), + item: Lazy::with_position(ii_pos), + side_tables: LazySeq::with_position_and_length(tables_pos, tables_count) + }) + } +} + +struct SideTableEncodingIdVisitor<'a, 'b:'a, 'tcx:'b> { + ecx: &'a mut EncodeContext<'b, 'tcx>, + count: usize +} + +impl<'a, 'b, 'tcx, 'v> Visitor<'v> for SideTableEncodingIdVisitor<'a, 'b, 'tcx> { + fn visit_id(&mut self, id: ast::NodeId) { + debug!("Encoding side tables for id {}", id); + + let tcx = self.ecx.tcx; + let mut encode = |entry: Option| { + if let Some(entry) = entry { + (id, entry).encode(self.ecx).unwrap(); + self.count += 1; + } + }; + + encode(tcx.expect_def_or_none(id).map(TableEntry::Def)); + encode(tcx.node_types().get(&id).cloned().map(TableEntry::NodeType)); + encode(tcx.tables.borrow().item_substs.get(&id).cloned().map(TableEntry::ItemSubsts)); + encode(tcx.tables.borrow().adjustments.get(&id).cloned().map(TableEntry::Adjustment)); + encode(tcx.const_qualif_map.borrow().get(&id).cloned().map(TableEntry::ConstQualif)); + } } /// Decodes an item from its AST in the cdata's metadata and adds it to the @@ -58,17 +107,19 @@ pub fn decode_inlined_item<'a, 'tcx>(cdata: &CrateMetadata, tcx: TyCtxt<'a, 'tcx, 'tcx>, parent_def_path: ast_map::DefPath, parent_did: DefId, - ast_doc: rbml::Doc, + ast: Ast<'tcx>, orig_did: DefId) -> &'tcx InlinedItem { debug!("> Decoding inlined fn: {:?}", tcx.item_path_str(orig_did)); - let dcx = &mut DecodeContext::new(ast_doc, Some(cdata)).typed(tcx); - dcx.from_id_range = IdRange::decode(dcx).unwrap(); - let cnt = dcx.from_id_range.max.as_usize() - dcx.from_id_range.min.as_usize(); - dcx.to_id_range.min = tcx.sess.reserve_node_ids(cnt); - dcx.to_id_range.max = ast::NodeId::new(dcx.to_id_range.min.as_usize() + cnt); - let ii = InlinedItem::decode(dcx).unwrap(); + let cnt = ast.id_range.max.as_usize() - ast.id_range.min.as_usize(); + let start = tcx.sess.reserve_node_ids(cnt); + let id_ranges = [ast.id_range, IdRange { + min: start, + max: ast::NodeId::new(start.as_usize() + cnt) + }]; + + let ii = ast.item.decode((cdata, tcx, id_ranges)); let ii = ast_map::map_decoded_item(&tcx.map, parent_def_path, parent_did, @@ -83,107 +134,25 @@ pub fn decode_inlined_item<'a, 'tcx>(cdata: &CrateMetadata, let inlined_did = tcx.map.local_def_id(item_node_id); tcx.register_item_type(inlined_did, tcx.lookup_item_type(orig_did)); - decode_side_tables(dcx, ast_doc); - - ii -} - -// ______________________________________________________________________ -// Encoding and decoding the side tables - -impl<'a, 'tcx> EncodeContext<'a, 'tcx> { - fn tag(&mut self, - tag_id: usize, - f: F) where - F: FnOnce(&mut Self), - { - self.start_tag(tag_id).unwrap(); - f(self); - self.end_tag().unwrap(); - } - - fn entry(&mut self, table: Table, id: ast::NodeId) { - table.encode(self).unwrap(); - id.encode(self).unwrap(); - } -} - -struct SideTableEncodingIdVisitor<'a, 'b:'a, 'tcx:'b> { - ecx: &'a mut EncodeContext<'b, 'tcx>, -} - -impl<'a, 'b, 'tcx, 'v> Visitor<'v> for SideTableEncodingIdVisitor<'a, 'b, 'tcx> { - fn visit_id(&mut self, id: ast::NodeId) { - encode_side_tables_for_id(self.ecx, id) - } -} - -#[derive(RustcEncodable, RustcDecodable, Debug)] -enum Table { - Def, - NodeType, - ItemSubsts, - Adjustment, - ConstQualif -} - -fn encode_side_tables_for_id(ecx: &mut EncodeContext, id: ast::NodeId) { - let tcx = ecx.tcx; - - debug!("Encoding side tables for id {}", id); - - if let Some(def) = tcx.expect_def_or_none(id) { - ecx.entry(Table::Def, id); - def.encode(ecx).unwrap(); - } - - if let Some(ty) = tcx.node_types().get(&id) { - ecx.entry(Table::NodeType, id); - ty.encode(ecx).unwrap(); - } - - if let Some(item_substs) = tcx.tables.borrow().item_substs.get(&id) { - ecx.entry(Table::ItemSubsts, id); - item_substs.substs.encode(ecx).unwrap(); - } - - if let Some(adjustment) = tcx.tables.borrow().adjustments.get(&id) { - ecx.entry(Table::Adjustment, id); - adjustment.encode(ecx).unwrap(); - } - - if let Some(qualif) = tcx.const_qualif_map.borrow().get(&id) { - ecx.entry(Table::ConstQualif, id); - qualif.encode(ecx).unwrap(); - } -} - -fn decode_side_tables(dcx: &mut DecodeContext, ast_doc: rbml::Doc) { - while dcx.opaque.position() < ast_doc.end { - let table = Decodable::decode(dcx).unwrap(); - let id = Decodable::decode(dcx).unwrap(); - debug!("decode_side_tables: entry for id={}, table={:?}", id, table); - match table { - Table::Def => { - let def = Decodable::decode(dcx).unwrap(); - dcx.tcx().def_map.borrow_mut().insert(id, def::PathResolution::new(def)); + for (id, entry) in ast.side_tables.decode((cdata, tcx, id_ranges)) { + match entry { + TableEntry::Def(def) => { + tcx.def_map.borrow_mut().insert(id, def::PathResolution::new(def)); } - Table::NodeType => { - let ty = Decodable::decode(dcx).unwrap(); - dcx.tcx().node_type_insert(id, ty); + TableEntry::NodeType(ty) => { + tcx.node_type_insert(id, ty); } - Table::ItemSubsts => { - let item_substs = Decodable::decode(dcx).unwrap(); - dcx.tcx().tables.borrow_mut().item_substs.insert(id, item_substs); + TableEntry::ItemSubsts(item_substs) => { + tcx.tables.borrow_mut().item_substs.insert(id, item_substs); } - Table::Adjustment => { - let adj = Decodable::decode(dcx).unwrap(); - dcx.tcx().tables.borrow_mut().adjustments.insert(id, adj); + TableEntry::Adjustment(adj) => { + tcx.tables.borrow_mut().adjustments.insert(id, adj); } - Table::ConstQualif => { - let qualif = Decodable::decode(dcx).unwrap(); - dcx.tcx().const_qualif_map.borrow_mut().insert(id, qualif); + TableEntry::ConstQualif(qualif) => { + tcx.const_qualif_map.borrow_mut().insert(id, qualif); } } } + + ii } diff --git a/src/librustc_metadata/common.rs b/src/librustc_metadata/common.rs deleted file mode 100644 index f30551cadd9..00000000000 --- a/src/librustc_metadata/common.rs +++ /dev/null @@ -1,206 +0,0 @@ -// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -#![allow(non_camel_case_types, non_upper_case_globals)] - -use rustc::hir; -use rustc::hir::def; -use rustc::hir::def_id::{DefIndex, DefId}; -use rustc::ty; -use rustc::session::config::PanicStrategy; - -#[derive(Clone, Copy, Debug, PartialEq, RustcEncodable, RustcDecodable)] -pub enum Family { - ImmStatic, - MutStatic, - ForeignImmStatic, - ForeignMutStatic, - Fn, - ForeignFn, - Method, - AssociatedType, - Type, - Mod, - ForeignMod, - Enum, - Variant, - Impl, - DefaultImpl, - Trait, - Struct, - Union, - Field, - Const, - AssociatedConst, - Closure -} - -// NB: increment this if you change the format of metadata such that -// rustc_version can't be found. -pub const metadata_encoding_version : &'static [u8] = &[b'r', b'u', b's', b't', 0, 0, 0, 2]; - -// GAP 0x7c -// GAP 0x108 -pub fn rustc_version() -> String { - format!( - "rustc {}", - option_env!("CFG_VERSION").unwrap_or("unknown version") - ) -} - -#[derive(RustcEncodable, RustcDecodable)] -pub struct CrateInfo { - pub name: String, - pub triple: String, - pub hash: hir::svh::Svh, - pub disambiguator: String, - pub panic_strategy: PanicStrategy, - pub plugin_registrar_fn: Option, - pub macro_derive_registrar: Option -} - -pub mod root_tag { - pub const rustc_version: usize = 0x10f; - - pub const crate_info: usize = 0x104; - - pub const index: usize = 0x110; - pub const crate_deps: usize = 0x102; - pub const dylib_dependency_formats: usize = 0x106; - pub const native_libraries: usize = 0x10a; - pub const lang_items: usize = 0x107; - pub const lang_items_missing: usize = 0x76; - pub const impls: usize = 0x109; - pub const reachable_ids: usize = 0x10c; - pub const macro_defs: usize = 0x10e; - pub const codemap: usize = 0xa1; -} - -#[derive(RustcEncodable, RustcDecodable)] -pub struct ModData { - pub reexports: Vec -} - -#[derive(RustcEncodable, RustcDecodable)] -pub struct VariantData { - pub kind: ty::VariantKind, - pub disr: u64, - - /// If this is a struct's only variant, this - /// is the index of the "struct ctor" item. - pub struct_ctor: Option -} - -#[derive(RustcEncodable, RustcDecodable)] -pub struct TraitData { - pub unsafety: hir::Unsafety, - pub paren_sugar: bool, - pub has_default_impl: bool -} - -#[derive(RustcEncodable, RustcDecodable)] -pub struct ImplData { - pub polarity: hir::ImplPolarity, - pub parent_impl: Option, - pub coerce_unsized_kind: Option, -} - -#[derive(RustcEncodable, RustcDecodable)] -pub struct TraitAssociatedData { - pub has_default: bool -} - -#[derive(RustcEncodable, RustcDecodable)] -pub struct ImplAssociatedData { - pub defaultness: hir::Defaultness, - pub constness: hir::Constness -} - -#[derive(RustcEncodable, RustcDecodable)] -pub struct FnData { - pub constness: hir::Constness -} - -#[derive(RustcEncodable, RustcDecodable)] -pub struct ClosureData { - pub kind: ty::ClosureKind -} - -#[derive(RustcEncodable, RustcDecodable)] -pub enum EntryData { - Other, - Mod(ModData), - Variant(VariantData), - Trait(TraitData), - Impl(ImplData), - TraitAssociated(TraitAssociatedData), - ImplAssociated(ImplAssociatedData), - Fn(FnData), - Closure(ClosureData) -} - -#[derive(RustcEncodable, RustcDecodable)] -pub struct TraitTypedData<'tcx> { - pub trait_ref: ty::TraitRef<'tcx> -} - -#[derive(RustcEncodable, RustcDecodable)] -pub struct ImplTypedData<'tcx> { - pub trait_ref: Option> -} - -#[derive(RustcEncodable, RustcDecodable)] -pub struct MethodTypedData<'tcx> { - pub explicit_self: ty::ExplicitSelfCategory<'tcx> -} - -#[derive(RustcEncodable, RustcDecodable)] -pub struct ClosureTypedData<'tcx> { - pub ty: ty::ClosureTy<'tcx> -} - -#[derive(RustcEncodable, RustcDecodable)] -pub enum EntryTypedData<'tcx> { - Other, - Trait(TraitTypedData<'tcx>), - Impl(ImplTypedData<'tcx>), - Method(MethodTypedData<'tcx>), - Closure(ClosureTypedData<'tcx>) -} - -pub mod item_tag { - pub const def_key: usize = 0x2c; - pub const family: usize = 0x24; - pub const attributes: usize = 0x101; - pub const visibility: usize = 0x78; - pub const children: usize = 0x7b; - pub const stability: usize = 0x88; - pub const deprecation: usize = 0xa7; - - pub const ty: usize = 0x25; - pub const inherent_impls: usize = 0x79; - pub const variances: usize = 0x43; - pub const generics: usize = 0x8f; - pub const predicates: usize = 0x95; - pub const super_predicates: usize = 0xa3; - - pub const ast: usize = 0x50; - pub const mir: usize = 0x52; - - pub const data: usize = 0x3c; - pub const typed_data: usize = 0x3d; - - pub const fn_arg_names: usize = 0x85; -} - -/// The shorthand encoding uses an enum's variant index `usize` -/// and is offset by this value so it never matches a real variant. -/// This offset is also chosen so that the first byte is never < 0x80. -pub const SHORTHAND_OFFSET: usize = 0x80; diff --git a/src/librustc_metadata/creader.rs b/src/librustc_metadata/creader.rs index 77a583f7379..95be77c24f4 100644 --- a/src/librustc_metadata/creader.rs +++ b/src/librustc_metadata/creader.rs @@ -8,13 +8,11 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -#![allow(non_camel_case_types)] - //! Validates all used crates and extern libraries and loads their metadata -use common::CrateInfo; use cstore::{self, CStore, CrateSource, MetadataBlob}; use loader::{self, CratePaths}; +use schema::CrateRoot; use rustc::hir::def_id::{CrateNum, DefIndex}; use rustc::hir::svh::Svh; @@ -34,12 +32,11 @@ use std::fs; use syntax::ast; use syntax::abi::Abi; -use syntax::codemap; use syntax::parse; use syntax::attr; use syntax::parse::token::InternedString; use syntax::visit; -use syntax_pos::{self, Span, mk_sp, Pos}; +use syntax_pos::{self, Span, mk_sp}; use log; struct LocalCrateReader<'a> { @@ -148,7 +145,7 @@ impl Deref for PMDSource { fn deref(&self) -> &MetadataBlob { match *self { - PMDSource::Registered(ref cmd) => &cmd.data, + PMDSource::Registered(ref cmd) => &cmd.blob, PMDSource::Owned(ref lib) => &lib.metadata } } @@ -261,28 +258,28 @@ impl<'a> CrateReader<'a> { fn verify_no_symbol_conflicts(&self, span: Span, - info: &CrateInfo) { + root: &CrateRoot) { // Check for (potential) conflicts with the local crate - if self.local_crate_name == info.name && - self.sess.local_crate_disambiguator() == &info.disambiguator[..] { + if self.local_crate_name == root.name && + self.sess.local_crate_disambiguator() == &root.disambiguator[..] { span_fatal!(self.sess, span, E0519, "the current crate is indistinguishable from one of its \ dependencies: it has the same crate-name `{}` and was \ compiled with the same `-C metadata` arguments. This \ will result in symbol conflicts between the two.", - info.name) + root.name) } // Check for conflicts with any crate loaded so far self.cstore.iter_crate_data(|_, other| { - if other.name() == info.name && // same crate-name - other.disambiguator() == info.disambiguator && // same crate-disambiguator - other.hash() != info.hash { // but different SVH + if other.name() == root.name && // same crate-name + other.disambiguator() == root.disambiguator && // same crate-disambiguator + other.hash() != root.hash { // but different SVH span_fatal!(self.sess, span, E0523, "found two different crates with name `{}` that are \ not distinguished by differing `-C metadata`. This \ will result in symbol conflicts between the two.", - info.name) + root.name) } }); } @@ -297,8 +294,8 @@ impl<'a> CrateReader<'a> { -> (CrateNum, Rc, cstore::CrateSource) { info!("register crate `extern crate {} as {}`", name, ident); - let crate_info = lib.metadata.get_crate_info(); - self.verify_no_symbol_conflicts(span, &crate_info); + let crate_root = lib.metadata.get_root(); + self.verify_no_symbol_conflicts(span, &crate_root); // Claim this crate number and cache it let cnum = self.next_crate_num; @@ -319,9 +316,9 @@ impl<'a> CrateReader<'a> { let loader::Library { dylib, rlib, metadata } = lib; - let cnum_map = self.resolve_crate_deps(root, &metadata, cnum, span); + let cnum_map = self.resolve_crate_deps(root, &crate_root, &metadata, cnum, span); - if crate_info.macro_derive_registrar.is_some() { + if crate_root.macro_derive_registrar.is_some() { self.sess.span_err(span, "crates of the `rustc-macro` crate type \ cannot be linked at runtime"); } @@ -329,10 +326,9 @@ impl<'a> CrateReader<'a> { let cmeta = Rc::new(cstore::CrateMetadata { name: name.to_string(), extern_crate: Cell::new(None), - info: crate_info, - index: metadata.load_index(), - key_map: metadata.load_key_map(), - data: metadata, + key_map: metadata.load_key_map(crate_root.index), + root: crate_root, + blob: metadata, cnum_map: RefCell::new(cnum_map), cnum: cnum, codemap_import_info: RefCell::new(vec![]), @@ -416,11 +412,11 @@ impl<'a> CrateReader<'a> { // Note that we only do this for target triple crates, though, as we // don't want to match a host crate against an equivalent target one // already loaded. - let crate_info = library.metadata.get_crate_info(); + let root = library.metadata.get_root(); if loader.triple == self.sess.opts.target_triple { let mut result = LoadResult::Loaded(library); self.cstore.iter_crate_data(|cnum, data| { - if data.name() == crate_info.name && crate_info.hash == data.hash() { + if data.name() == root.name && root.hash == data.hash() { assert!(loader.hash.is_none()); info!("load success, going to previous cnum: {}", cnum); result = LoadResult::Previous(cnum); @@ -467,6 +463,7 @@ impl<'a> CrateReader<'a> { // Go through the crate metadata and load any crates that it references fn resolve_crate_deps(&mut self, root: &Option, + crate_root: &CrateRoot, metadata: &MetadataBlob, krate: CrateNum, span: Span) @@ -474,16 +471,17 @@ impl<'a> CrateReader<'a> { debug!("resolving deps of external crate"); // The map from crate numbers in the crate we're resolving to local crate // numbers - let map: FnvHashMap<_, _> = metadata.get_crate_deps().iter().map(|dep| { + let deps = crate_root.crate_deps.decode(metadata); + let map: FnvHashMap<_, _> = deps.enumerate().map(|(crate_num, dep)| { debug!("resolving dep crate {} hash: `{}`", dep.name, dep.hash); let (local_cnum, ..) = self.resolve_crate(root, - &dep.name, - &dep.name, + &dep.name.as_str(), + &dep.name.as_str(), Some(&dep.hash), span, PathKind::Dependency, dep.explicitly_linked); - (dep.cnum, local_cnum) + (CrateNum::new(crate_num + 1), local_cnum) }).collect(); let max_cnum = map.values().cloned().max().map(|cnum| cnum.as_u32()).unwrap_or(0); @@ -568,21 +566,21 @@ impl<'a> CrateReader<'a> { let ci = self.extract_crate_info(item).unwrap(); let ekrate = self.read_extension_crate(item.span, &ci); - let crate_info = ekrate.metadata.get_crate_info(); + let root = ekrate.metadata.get_root(); let source_name = format!("<{} macros>", item.ident); let mut ret = Macros { macro_rules: Vec::new(), custom_derive_registrar: None, - svh: crate_info.hash, + svh: root.hash, dylib: None, }; - ekrate.metadata.each_exported_macro(|name, attrs, span, body| { + for def in root.macro_defs.decode(&*ekrate.metadata) { // NB: Don't use parse::parse_tts_from_source_str because it parses with // quote_depth > 0. let mut p = parse::new_parser_from_source_str(&self.sess.parse_sess, self.local_crate_config.clone(), source_name.clone(), - body); + def.body); let lo = p.span.lo; let body = match p.parse_all_token_trees() { Ok(body) => body, @@ -595,13 +593,13 @@ impl<'a> CrateReader<'a> { let local_span = mk_sp(lo, p.last_span.hi); // Mark the attrs as used - for attr in &attrs { + for attr in &def.attrs { attr::mark_used(attr); } ret.macro_rules.push(ast::MacroDef { - ident: ast::Ident::with_empty_ctxt(name), - attrs: attrs, + ident: ast::Ident::with_empty_ctxt(def.name), + attrs: def.attrs, id: ast::DUMMY_NODE_ID, span: local_span, imported_from: Some(item.ident), @@ -613,11 +611,10 @@ impl<'a> CrateReader<'a> { body: body, }); self.sess.imported_macro_spans.borrow_mut() - .insert(local_span, (name.as_str().to_string(), span)); - true - }); + .insert(local_span, (def.name.as_str().to_string(), def.span)); + } - match crate_info.macro_derive_registrar { + match root.macro_derive_registrar { Some(id) => ret.custom_derive_registrar = Some(id), // If this crate is not a rustc-macro crate then we might be able to @@ -671,10 +668,10 @@ impl<'a> CrateReader<'a> { span_fatal!(self.sess, span, E0456, "{}", &message[..]); } - let crate_info = ekrate.metadata.get_crate_info(); - match (ekrate.dylib.as_ref(), crate_info.plugin_registrar_fn) { + let root = ekrate.metadata.get_root(); + match (ekrate.dylib.as_ref(), root.plugin_registrar_fn) { (Some(dylib), Some(reg)) => { - Some((dylib.to_path_buf(), crate_info.hash, reg)) + Some((dylib.to_path_buf(), root.hash, reg)) } (None, Some(_)) => { span_err!(self.sess, span, E0457, @@ -1086,133 +1083,3 @@ pub fn read_local_crates(sess: & Session, dep_graph: &DepGraph) { LocalCrateReader::new(sess, cstore, defs, krate, local_crate_name).read_crates(dep_graph) } - -/// Imports the codemap from an external crate into the codemap of the crate -/// currently being compiled (the "local crate"). -/// -/// The import algorithm works analogous to how AST items are inlined from an -/// external crate's metadata: -/// For every FileMap in the external codemap an 'inline' copy is created in the -/// local codemap. The correspondence relation between external and local -/// FileMaps is recorded in the `ImportedFileMap` objects returned from this -/// function. When an item from an external crate is later inlined into this -/// crate, this correspondence information is used to translate the span -/// information of the inlined item so that it refers the correct positions in -/// the local codemap (see `>`). -/// -/// The import algorithm in the function below will reuse FileMaps already -/// existing in the local codemap. For example, even if the FileMap of some -/// source file of libstd gets imported many times, there will only ever be -/// one FileMap object for the corresponding file in the local codemap. -/// -/// Note that imported FileMaps do not actually contain the source code of the -/// file they represent, just information about length, line breaks, and -/// multibyte characters. This information is enough to generate valid debuginfo -/// for items inlined from other crates. -pub fn import_codemap(local_codemap: &codemap::CodeMap, - metadata: &MetadataBlob) - -> Vec { - let external_codemap = metadata.get_imported_filemaps(); - - let imported_filemaps = external_codemap.into_iter().map(|filemap_to_import| { - // Try to find an existing FileMap that can be reused for the filemap to - // be imported. A FileMap is reusable if it is exactly the same, just - // positioned at a different offset within the codemap. - let reusable_filemap = { - local_codemap.files - .borrow() - .iter() - .find(|fm| are_equal_modulo_startpos(&fm, &filemap_to_import)) - .map(|rc| rc.clone()) - }; - - match reusable_filemap { - Some(fm) => { - cstore::ImportedFileMap { - original_start_pos: filemap_to_import.start_pos, - original_end_pos: filemap_to_import.end_pos, - translated_filemap: fm - } - } - None => { - // We can't reuse an existing FileMap, so allocate a new one - // containing the information we need. - let syntax_pos::FileMap { - name, - abs_path, - start_pos, - end_pos, - lines, - multibyte_chars, - .. - } = filemap_to_import; - - let source_length = (end_pos - start_pos).to_usize(); - - // Translate line-start positions and multibyte character - // position into frame of reference local to file. - // `CodeMap::new_imported_filemap()` will then translate those - // coordinates to their new global frame of reference when the - // offset of the FileMap is known. - let mut lines = lines.into_inner(); - for pos in &mut lines { - *pos = *pos - start_pos; - } - let mut multibyte_chars = multibyte_chars.into_inner(); - for mbc in &mut multibyte_chars { - mbc.pos = mbc.pos - start_pos; - } - - let local_version = local_codemap.new_imported_filemap(name, - abs_path, - source_length, - lines, - multibyte_chars); - cstore::ImportedFileMap { - original_start_pos: start_pos, - original_end_pos: end_pos, - translated_filemap: local_version - } - } - } - }).collect(); - - return imported_filemaps; - - fn are_equal_modulo_startpos(fm1: &syntax_pos::FileMap, - fm2: &syntax_pos::FileMap) - -> bool { - if fm1.name != fm2.name { - return false; - } - - let lines1 = fm1.lines.borrow(); - let lines2 = fm2.lines.borrow(); - - if lines1.len() != lines2.len() { - return false; - } - - for (&line1, &line2) in lines1.iter().zip(lines2.iter()) { - if (line1 - fm1.start_pos) != (line2 - fm2.start_pos) { - return false; - } - } - - let multibytes1 = fm1.multibyte_chars.borrow(); - let multibytes2 = fm2.multibyte_chars.borrow(); - - if multibytes1.len() != multibytes2.len() { - return false; - } - - for (mb1, mb2) in multibytes1.iter().zip(multibytes2.iter()) { - if (mb1.bytes != mb2.bytes) || - ((mb1.pos - fm1.start_pos) != (mb2.pos - fm2.start_pos)) { - return false; - } - } - - true - } -} diff --git a/src/librustc_metadata/csearch.rs b/src/librustc_metadata/csearch.rs index 8569dbcd507..f508c5dc9cf 100644 --- a/src/librustc_metadata/csearch.rs +++ b/src/librustc_metadata/csearch.rs @@ -9,9 +9,9 @@ // except according to those terms. use cstore; -use common; use encoder; use loader; +use schema; use rustc::middle::cstore::{InlinedItem, CrateStore, CrateSource, ExternCrate}; use rustc::middle::cstore::{NativeLibraryKind, LinkMeta, LinkagePreference}; @@ -97,7 +97,7 @@ impl<'tcx> CrateStore<'tcx> for cstore::CStore { } fn item_generics<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) - -> &'tcx ty::Generics<'tcx> + -> ty::Generics<'tcx> { self.dep_graph.read(DepNode::MetaData(def)); self.get_crate_data(def.krate).get_generics(def.index, tcx) @@ -121,7 +121,7 @@ impl<'tcx> CrateStore<'tcx> for cstore::CStore { self.get_crate_data(def.krate).get_adt_def(def.index, tcx) } - fn fn_arg_names(&self, did: DefId) -> Vec + fn fn_arg_names(&self, did: DefId) -> Vec { self.dep_graph.read(DepNode::MetaData(did)); self.get_crate_data(did.krate).get_fn_arg_names(did.index) @@ -140,10 +140,7 @@ impl<'tcx> CrateStore<'tcx> for cstore::CStore { fn inherent_implementations_for_type(&self, def_id: DefId) -> Vec { self.dep_graph.read(DepNode::MetaData(def_id)); - let mut result = vec![]; - self.get_crate_data(def_id.krate) - .each_inherent_implementation_for_type(def_id.index, |iid| result.push(iid)); - result + self.get_crate_data(def_id.krate).get_inherent_implementations_for_type(def_id.index) } fn implementations_of_trait(&self, filter: Option) -> Vec @@ -153,9 +150,7 @@ impl<'tcx> CrateStore<'tcx> for cstore::CStore { } let mut result = vec![]; self.iter_crate_data(|_, cdata| { - cdata.each_implementation_for_trait(filter, &mut |iid| { - result.push(iid) - }) + cdata.get_implementations_for_trait(filter, &mut result) }); result } @@ -308,7 +303,7 @@ impl<'tcx> CrateStore<'tcx> for cstore::CStore { fn plugin_registrar_fn(&self, cnum: CrateNum) -> Option { - self.get_crate_data(cnum).info.plugin_registrar_fn.map(|index| DefId { + self.get_crate_data(cnum).root.plugin_registrar_fn.map(|index| DefId { krate: cnum, index: index }) @@ -552,7 +547,7 @@ impl<'tcx> CrateStore<'tcx> for cstore::CStore { fn metadata_encoding_version(&self) -> &[u8] { - common::metadata_encoding_version + schema::METADATA_HEADER } /// Returns a map from a sufficiently visible external item (i.e. an external item that is diff --git a/src/librustc_metadata/cstore.rs b/src/librustc_metadata/cstore.rs index 4151f98b3da..0a1ff70a049 100644 --- a/src/librustc_metadata/cstore.rs +++ b/src/librustc_metadata/cstore.rs @@ -11,12 +11,8 @@ // The crate store - a central repo for information collected about external // crates and libraries -pub use self::MetadataBlob::*; - -use common; -use creader; -use index; use loader; +use schema; use rustc::dep_graph::DepGraph; use rustc::hir::def_id::{CRATE_DEF_INDEX, CrateNum, DefIndex, DefId}; @@ -27,13 +23,12 @@ use rustc::session::config::PanicStrategy; use rustc_data_structures::indexed_vec::IndexVec; use rustc::util::nodemap::{FnvHashMap, NodeMap, NodeSet, DefIdMap, FnvHashSet}; -use std::cell::{RefCell, Ref, Cell}; +use std::cell::{RefCell, Cell}; use std::rc::Rc; use std::path::PathBuf; use flate::Bytes; use syntax::ast::{self, Ident}; use syntax::attr; -use syntax::codemap; use syntax_pos; pub use rustc::middle::cstore::{NativeLibraryKind, LinkagePreference}; @@ -47,12 +42,12 @@ pub use rustc::middle::cstore::{CrateSource, LinkMeta}; pub type CrateNumMap = IndexVec; pub enum MetadataBlob { - MetadataVec(Bytes), - MetadataArchive(loader::ArchiveMetadata), + Inflated(Bytes), + Archive(loader::ArchiveMetadata), } /// Holds information about a syntax_pos::FileMap imported from another crate. -/// See creader::import_codemap() for more information. +/// See `imported_filemaps()` for more information. pub struct ImportedFileMap { /// This FileMap's byte-offset within the codemap of its original crate pub original_start_pos: syntax_pos::BytePos, @@ -70,13 +65,12 @@ pub struct CrateMetadata { /// (e.g., by the allocator) pub extern_crate: Cell>, - pub data: MetadataBlob, + pub blob: MetadataBlob, pub cnum_map: RefCell, pub cnum: CrateNum, pub codemap_import_info: RefCell>, - pub info: common::CrateInfo, - pub index: index::Index, + pub root: schema::CrateRoot, /// For each public item in this crate, we encode a key. When the /// crate is loaded, we read all the keys and put them in this @@ -294,23 +288,9 @@ impl CStore { } impl CrateMetadata { - pub fn name(&self) -> &str { &self.info.name } - pub fn hash(&self) -> Svh { self.info.hash } - pub fn disambiguator(&self) -> &str { &self.info.disambiguator } - pub fn imported_filemaps<'a>(&'a self, codemap: &codemap::CodeMap) - -> Ref<'a, Vec> { - let filemaps = self.codemap_import_info.borrow(); - if filemaps.is_empty() { - drop(filemaps); - let filemaps = creader::import_codemap(codemap, &self.data); - - // This shouldn't borrow twice, but there is no way to downgrade RefMut to Ref. - *self.codemap_import_info.borrow_mut() = filemaps; - self.codemap_import_info.borrow() - } else { - filemaps - } - } + pub fn name(&self) -> &str { &self.root.name } + pub fn hash(&self) -> Svh { self.root.hash } + pub fn disambiguator(&self) -> &str { &self.root.disambiguator } pub fn is_staged_api(&self) -> bool { self.get_item_attrs(CRATE_DEF_INDEX).iter().any(|attr| { @@ -349,33 +329,6 @@ impl CrateMetadata { } pub fn panic_strategy(&self) -> PanicStrategy { - self.info.panic_strategy.clone() - } -} - -impl MetadataBlob { - pub fn as_slice_raw<'a>(&'a self) -> &'a [u8] { - match *self { - MetadataVec(ref vec) => &vec[..], - MetadataArchive(ref ar) => ar.as_slice(), - } - } - - pub fn as_slice<'a>(&'a self) -> &'a [u8] { - let slice = self.as_slice_raw(); - let len_offset = 4 + common::metadata_encoding_version.len(); - if slice.len() < len_offset+4 { - &[] // corrupt metadata - } else { - let len = (((slice[len_offset+0] as u32) << 24) | - ((slice[len_offset+1] as u32) << 16) | - ((slice[len_offset+2] as u32) << 8) | - ((slice[len_offset+3] as u32) << 0)) as usize; - if len <= slice.len() - 4 - len_offset { - &slice[len_offset + 4..len_offset + len + 4] - } else { - &[] // corrupt or old metadata - } - } + self.root.panic_strategy.clone() } } diff --git a/src/librustc_metadata/decoder.rs b/src/librustc_metadata/decoder.rs index a34daba7000..7a4d3ed657a 100644 --- a/src/librustc_metadata/decoder.rs +++ b/src/librustc_metadata/decoder.rs @@ -11,11 +11,10 @@ // Decoding metadata from a single crate's metadata use astencode::decode_inlined_item; -use cstore::{CrateMetadata, MetadataBlob, NativeLibraryKind}; -use common::*; -use index; +use cstore::{self, CrateMetadata, MetadataBlob, NativeLibraryKind}; +use index::Index; +use schema::*; -use rustc::hir::svh::Svh; use rustc::hir::map as hir_map; use rustc::hir::map::{DefKey, DefPathData}; use rustc::util::nodemap::FnvHashMap; @@ -26,7 +25,6 @@ use rustc::middle::cstore::{InlinedItem, LinkagePreference}; use rustc::hir::def::{self, Def}; use rustc::hir::def_id::{CrateNum, DefId, DefIndex, LOCAL_CRATE}; use rustc::middle::lang_items; -use rustc::ty::{ImplContainer, TraitContainer}; use rustc::ty::{self, Ty, TyCtxt}; use rustc::ty::subst::Substs; @@ -34,46 +32,102 @@ use rustc_const_math::ConstInt; use rustc::mir::repr::Mir; +use std::cell::Ref; use std::io; use std::mem; use std::rc::Rc; use std::str; use std::u32; -use rbml; use rustc_serialize::{Decodable, Decoder, SpecializedDecoder, opaque}; use syntax::attr; use syntax::ast::{self, NodeId}; +use syntax::codemap; use syntax::parse::token; -use syntax_pos::{self, Span, BytePos}; +use syntax_pos::{self, Span, BytePos, Pos}; pub struct DecodeContext<'a, 'tcx: 'a> { - pub opaque: opaque::Decoder<'a>, + opaque: opaque::Decoder<'a>, tcx: Option>, cdata: Option<&'a CrateMetadata>, - pub from_id_range: IdRange, - pub to_id_range: IdRange, + from_id_range: IdRange, + to_id_range: IdRange, // Cache the last used filemap for translating spans as an optimization. last_filemap_index: usize, } -impl<'a, 'tcx> DecodeContext<'a, 'tcx> { - pub fn new(doc: rbml::Doc<'a>, cdata: Option<&'a CrateMetadata>) - -> DecodeContext<'a, 'tcx> { +/// Abstract over the various ways one can create metadata decoders. +pub trait Metadata<'a, 'tcx>: Copy { + fn raw_bytes(self) -> &'a [u8]; + fn cdata(self) -> Option<&'a CrateMetadata> { None } + fn tcx(self) -> Option> { None } + + fn decoder(self, pos: usize) -> DecodeContext<'a, 'tcx> { let id_range = IdRange { min: NodeId::from_u32(u32::MIN), max: NodeId::from_u32(u32::MAX) }; DecodeContext { - opaque: opaque::Decoder::new(doc.data, doc.start), - cdata: cdata, - tcx: None, + opaque: opaque::Decoder::new(self.raw_bytes(), pos), + cdata: self.cdata(), + tcx: self.tcx(), from_id_range: id_range, to_id_range: id_range, last_filemap_index: 0 } } +} +impl<'a, 'tcx> Metadata<'a, 'tcx> for &'a MetadataBlob { + fn raw_bytes(self) -> &'a [u8] { + match *self { + MetadataBlob::Inflated(ref vec) => &vec[..], + MetadataBlob::Archive(ref ar) => ar.as_slice(), + } + } +} + +impl<'a, 'tcx> Metadata<'a, 'tcx> for &'a CrateMetadata { + fn raw_bytes(self) -> &'a [u8] { self.blob.raw_bytes() } + fn cdata(self) -> Option<&'a CrateMetadata> { Some(self) } +} + +impl<'a, 'tcx> Metadata<'a, 'tcx> for (&'a CrateMetadata, TyCtxt<'a, 'tcx, 'tcx>) { + fn raw_bytes(self) -> &'a [u8] { self.0.raw_bytes() } + fn cdata(self) -> Option<&'a CrateMetadata> { Some(self.0) } + fn tcx(self) -> Option> { Some(self.1) } +} + +// HACK(eddyb) Only used by astencode to customize the from/to IdRange's. +impl<'a, 'tcx> Metadata<'a, 'tcx> for (&'a CrateMetadata, TyCtxt<'a, 'tcx, 'tcx>, [IdRange; 2]) { + fn raw_bytes(self) -> &'a [u8] { self.0.raw_bytes() } + fn cdata(self) -> Option<&'a CrateMetadata> { Some(self.0) } + fn tcx(self) -> Option> { Some(self.1) } + + fn decoder(self, pos: usize) -> DecodeContext<'a, 'tcx> { + let mut dcx = (self.0, self.1).decoder(pos); + dcx.from_id_range = self.2[0]; + dcx.to_id_range = self.2[1]; + dcx + } +} + +impl<'a, 'tcx: 'a, T: Decodable> Lazy { + pub fn decode>(self, meta: M) -> T { + T::decode(&mut meta.decoder(self.position)).unwrap() + } +} + +impl<'a, 'tcx: 'a, T: Decodable> LazySeq { + pub fn decode>(self, meta: M) -> impl Iterator + 'a { + let mut dcx = meta.decoder(self.position); + (0..self.len).map(move |_| { + T::decode(&mut dcx).unwrap() + }) + } +} + +impl<'a, 'tcx> DecodeContext<'a, 'tcx> { pub fn tcx(&self) -> TyCtxt<'a, 'tcx, 'tcx> { self.tcx.expect("missing TyCtxt in DecodeContext") } @@ -82,22 +136,12 @@ impl<'a, 'tcx> DecodeContext<'a, 'tcx> { self.cdata.expect("missing CrateMetadata in DecodeContext") } - pub fn decode(&mut self) -> T { - T::decode(self).unwrap() - } - - pub fn typed(mut self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Self { - self.tcx = Some(tcx); - self - } - - /// Iterate over the indices of a sequence. - /// This will work solely because of `serialize::opaque`'s - /// simple encoding of `n: usize` followed by `n` elements. - pub fn seq(mut self) -> impl Iterator { - (0..self.read_usize().unwrap()).map(move |_| { - self.decode() - }) + fn with_position R, R>(&mut self, pos: usize, f: F) -> R { + let new = opaque::Decoder::new(self.opaque.data, pos); + let old = mem::replace(&mut self.opaque, new); + let r = f(self); + self.opaque = old; + r } } @@ -139,6 +183,19 @@ impl<'doc, 'tcx> Decoder for DecodeContext<'doc, 'tcx> { } } +impl<'a, 'tcx, T> SpecializedDecoder> for DecodeContext<'a, 'tcx> { + fn specialized_decode(&mut self) -> Result, Self::Error> { + Ok(Lazy::with_position(self.read_usize()?)) + } +} + +impl<'a, 'tcx, T> SpecializedDecoder> for DecodeContext<'a, 'tcx> { + fn specialized_decode(&mut self) -> Result, Self::Error> { + let len = self.read_usize()?; + Ok(LazySeq::with_position_and_length(self.read_usize()?, len)) + } +} + impl<'a, 'tcx> SpecializedDecoder for DecodeContext<'a, 'tcx> { fn specialized_decode(&mut self) -> Result { let id = u32::decode(self)?; @@ -252,15 +309,33 @@ impl<'a, 'tcx> SpecializedDecoder> for DecodeContext<'a, 'tcx> { return Ok(ty); } - let new = opaque::Decoder::new(self.opaque.data, key.pos); - let old = mem::replace(&mut self.opaque, new); - let ty = Ty::decode(self)?; - self.opaque = old; + let ty = self.with_position(key.pos, Ty::decode)?; tcx.rcache.borrow_mut().insert(key, ty); - return Ok(ty); + Ok(ty) + } else { + Ok(tcx.mk_ty(ty::TypeVariants::decode(self)?)) } + } +} - Ok(tcx.mk_ty(ty::TypeVariants::decode(self)?)) + +impl<'a, 'tcx> SpecializedDecoder> for DecodeContext<'a, 'tcx> { + fn specialized_decode(&mut self) -> Result, Self::Error> { + Ok(ty::GenericPredicates { + parent: Decodable::decode(self)?, + predicates: (0..self.read_usize()?).map(|_| { + // Handle shorthands first, if we have an usize > 0x80. + if self.opaque.data[self.opaque.position()] & 0x80 != 0 { + let pos = self.read_usize()?; + assert!(pos >= SHORTHAND_OFFSET); + let pos = pos - SHORTHAND_OFFSET; + + self.with_position(pos, ty::Predicate::decode) + } else { + ty::Predicate::decode(self) + } + }).collect()? + }) } } @@ -295,185 +370,110 @@ impl<'a, 'tcx> SpecializedDecoder> for DecodeContext<'a, 'tcx> } } -#[derive(Clone)] -pub struct CrateDep { - pub cnum: CrateNum, - pub name: String, - pub hash: Svh, - pub explicitly_linked: bool, -} - impl<'a, 'tcx> MetadataBlob { - fn root(&self) -> rbml::Doc { - rbml::Doc::new(self.as_slice()) + pub fn is_compatible(&self) -> bool { + self.raw_bytes().starts_with(METADATA_HEADER) } - fn child_at(&'a self, pos: usize, tag: usize) -> DecodeContext<'a, 'tcx> { - DecodeContext::new(rbml::Doc::at(self.as_slice(), pos).child(tag), None) + pub fn get_root(&self) -> CrateRoot { + let slice = self.raw_bytes(); + let offset = METADATA_HEADER.len(); + let pos = (((slice[offset + 0] as u32) << 24) | + ((slice[offset + 1] as u32) << 16) | + ((slice[offset + 2] as u32) << 8) | + ((slice[offset + 3] as u32) << 0)) as usize; + Lazy::with_position(pos).decode(self) } - fn get(&'a self, tag: usize) -> DecodeContext<'a, 'tcx> { - DecodeContext::new(self.root().child(tag), None) - } - - pub fn load_index(&self) -> index::Index { - index::Index::from_rbml(self.root().child(root_tag::index)) - } - - pub fn crate_rustc_version(&self) -> Option { - self.root().maybe_child(root_tag::rustc_version).map(|s| { - str::from_utf8(&s.data[s.start..s.end]).unwrap().to_string() - }) - } - - // Go through each item in the metadata and create a map from that - // item's def-key to the item's DefIndex. - pub fn load_key_map(&self) -> FnvHashMap { - self.load_index().iter_enumerated(self.as_slice()).map(|(index, pos)| { - (self.child_at(pos as usize, item_tag::def_key).decode(), index) + /// Go through each item in the metadata and create a map from that + /// item's def-key to the item's DefIndex. + pub fn load_key_map(&self, index: LazySeq) -> FnvHashMap { + index.iter_enumerated(self.raw_bytes()).map(|(index, item)| { + (item.decode(self).def_key.decode(self), index) }).collect() } - pub fn get_crate_deps(&self) -> Vec { - let dcx = self.get(root_tag::crate_deps); - - dcx.seq().enumerate().map(|(crate_num, (name, hash, explicitly_linked))| { - CrateDep { - cnum: CrateNum::new(crate_num + 1), - name: name, - hash: hash, - explicitly_linked: explicitly_linked, - } - }).collect() - } - - pub fn get_crate_info(&self) -> CrateInfo { - self.get(root_tag::crate_info).decode() - } - pub fn list_crate_metadata(&self, out: &mut io::Write) -> io::Result<()> { write!(out, "=External Dependencies=\n")?; - for dep in &self.get_crate_deps() { - write!(out, "{} {}-{}\n", dep.cnum, dep.name, dep.hash)?; + let root = self.get_root(); + for (i, dep) in root.crate_deps.decode(self).enumerate() { + write!(out, "{} {}-{}\n", i + 1, dep.name, dep.hash)?; } write!(out, "\n")?; Ok(()) } - - pub fn get_imported_filemaps(&self) -> Vec { - self.get(root_tag::codemap).decode() - } - - pub fn each_exported_macro(&self, mut f: F) where - F: FnMut(ast::Name, Vec, Span, String) -> bool, - { - for (name, attrs, span, body) in self.get(root_tag::macro_defs).seq() { - if !f(name, attrs, span, body) { - break; - } - } - } } -impl Family { +impl<'tcx> EntryKind<'tcx> { fn to_def(&self, did: DefId) -> Option { Some(match *self { - Family::Const => Def::Const(did), - Family::AssociatedConst => Def::AssociatedConst(did), - Family::ImmStatic | Family::ForeignImmStatic => Def::Static(did, false), - Family::MutStatic | Family::ForeignMutStatic => Def::Static(did, true), - Family::Struct => Def::Struct(did), - Family::Union => Def::Union(did), - Family::Fn | Family::ForeignFn => Def::Fn(did), - Family::Method => Def::Method(did), - Family::Type => Def::TyAlias(did), - Family::AssociatedType => Def::AssociatedTy(did), - Family::Mod => Def::Mod(did), - Family::Variant => Def::Variant(did), - Family::Trait => Def::Trait(did), - Family::Enum => Def::Enum(did), + EntryKind::Const => Def::Const(did), + EntryKind::AssociatedConst(_) => Def::AssociatedConst(did), + EntryKind::ImmStatic | + EntryKind::ForeignImmStatic => Def::Static(did, false), + EntryKind::MutStatic | + EntryKind::ForeignMutStatic => Def::Static(did, true), + EntryKind::Struct(_) => Def::Struct(did), + EntryKind::Union(_) => Def::Union(did), + EntryKind::Fn(_) | + EntryKind::ForeignFn(_) => Def::Fn(did), + EntryKind::Method(_) => Def::Method(did), + EntryKind::Type => Def::TyAlias(did), + EntryKind::AssociatedType(_) => Def::AssociatedTy(did), + EntryKind::Mod(_) => Def::Mod(did), + EntryKind::Variant(_) => Def::Variant(did), + EntryKind::Trait(_) => Def::Trait(did), + EntryKind::Enum => Def::Enum(did), - Family::ForeignMod | - Family::Impl | - Family::DefaultImpl | - Family::Field | - Family::Closure => { + EntryKind::ForeignMod | + EntryKind::Impl(_) | + EntryKind::DefaultImpl(_) | + EntryKind::Field | + EntryKind::Closure (_) => { return None } }) } } -impl<'a, 'tcx> CrateMetadata { - fn maybe_get(&'a self, item: rbml::Doc<'a>, tag: usize) - -> Option> { - item.maybe_child(tag).map(|child| { - DecodeContext::new(child, Some(self)) - }) - } - - fn get(&'a self, item: rbml::Doc<'a>, tag: usize) -> DecodeContext<'a, 'tcx> { - match self.maybe_get(item, tag) { - Some(dcx) => dcx, - None => bug!("failed to find child with tag {}", tag) +fn def_key_name(def_key: &hir_map::DefKey) -> Option { + match def_key.disambiguated_data.data { + DefPathData::TypeNs(ref name) | + DefPathData::ValueNs(ref name) | + DefPathData::Module(ref name) | + DefPathData::MacroDef(ref name) | + DefPathData::TypeParam(ref name) | + DefPathData::LifetimeDef(ref name) | + DefPathData::EnumVariant(ref name) | + DefPathData::Field(ref name) | + DefPathData::Binding(ref name) => { + Some(token::intern(name)) } + + DefPathData::InlinedRoot(_) => bug!("unexpected DefPathData"), + + DefPathData::CrateRoot | + DefPathData::Misc | + DefPathData::Impl | + DefPathData::ClosureExpr | + DefPathData::StructCtor | + DefPathData::Initializer | + DefPathData::ImplTrait => None + } +} + +impl<'a, 'tcx> CrateMetadata { + fn maybe_entry(&self, item_id: DefIndex) -> Option>> { + self.root.index.lookup(self.blob.raw_bytes(), item_id) } - fn item_family(&self, item: rbml::Doc) -> Family { - self.get(item, item_tag::family).decode() - } - - fn item_visibility(&self, item: rbml::Doc) -> ty::Visibility { - self.get(item, item_tag::visibility).decode() - } - - fn item_def_key(&self, item: rbml::Doc) -> hir_map::DefKey { - self.get(item, item_tag::def_key).decode() - } - - fn item_name(&self, item: rbml::Doc) -> ast::Name { - self.maybe_item_name(item).expect("no item in item_name") - } - - fn maybe_item_name(&self, item: rbml::Doc) -> Option { - let name = match self.item_def_key(item).disambiguated_data.data { - DefPathData::TypeNs(name) | - DefPathData::ValueNs(name) | - DefPathData::Module(name) | - DefPathData::MacroDef(name) | - DefPathData::TypeParam(name) | - DefPathData::LifetimeDef(name) | - DefPathData::EnumVariant(name) | - DefPathData::Field(name) | - DefPathData::Binding(name) => Some(name), - - DefPathData::InlinedRoot(_) => bug!("unexpected DefPathData"), - - DefPathData::CrateRoot | - DefPathData::Misc | - DefPathData::Impl | - DefPathData::ClosureExpr | - DefPathData::StructCtor | - DefPathData::Initializer | - DefPathData::ImplTrait => None - }; - - name.map(|s| token::intern(&s)) - } - - fn maybe_entry(&self, item_id: DefIndex) -> Option { - self.index.lookup_item(self.data.as_slice(), item_id).map(|pos| { - rbml::Doc::at(self.data.as_slice(), pos as usize) - }) - } - - fn entry(&self, item_id: DefIndex) -> rbml::Doc { + fn entry(&self, item_id: DefIndex) -> Entry<'tcx> { match self.maybe_entry(item_id) { None => bug!("entry: id not found: {:?} in crate {:?} with number {}", item_id, self.name, self.cnum), - Some(d) => d + Some(d) => d.decode(self) } } @@ -484,62 +484,42 @@ impl<'a, 'tcx> CrateMetadata { } } - fn entry_data(&self, doc: rbml::Doc) -> EntryData { - self.get(doc, item_tag::data).decode() - } - - fn entry_typed_data(&self, doc: rbml::Doc, tcx: TyCtxt<'a, 'tcx, 'tcx>) - -> EntryTypedData<'tcx> { - self.get(doc, item_tag::typed_data).typed(tcx).decode() - } - - fn item_parent_item(&self, d: rbml::Doc) -> Option { - self.item_def_key(d).parent.map(|index| self.local_def_id(index)) - } - - fn doc_type(&self, doc: rbml::Doc, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Ty<'tcx> { - self.maybe_doc_type(doc, tcx).expect("missing item_tag::ty") - } - - fn maybe_doc_type(&self, doc: rbml::Doc, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Option> { - self.maybe_get(doc, item_tag::ty).map(|dcx| dcx.typed(tcx).decode()) + fn item_name(&self, item: &Entry<'tcx>) -> ast::Name { + def_key_name(&item.def_key.decode(self)).expect("no name in item_name") } pub fn get_def(&self, index: DefIndex) -> Option { - self.item_family(self.entry(index)).to_def(self.local_def_id(index)) + self.entry(index).kind.to_def(self.local_def_id(index)) } pub fn get_trait_def(&self, item_id: DefIndex, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> ty::TraitDef<'tcx> { - let item_doc = self.entry(item_id); - let generics = self.doc_generics(item_doc, tcx); - - let data = match self.entry_data(item_doc) { - EntryData::Trait(data) => data, - _ => bug!() - }; - let typed_data = match self.entry_typed_data(item_doc, tcx) { - EntryTypedData::Trait(data) => data, + let data = match self.entry(item_id).kind { + EntryKind::Trait(data) => data.decode(self), _ => bug!() }; - ty::TraitDef::new(data.unsafety, data.paren_sugar, generics, typed_data.trait_ref, - self.def_path(item_id).unwrap().deterministic_hash(tcx))) + ty::TraitDef::new(data.unsafety, data.paren_sugar, + tcx.lookup_generics(self.local_def_id(item_id)), + data.trait_ref.decode((self, tcx)), + self.def_path(item_id).unwrap().deterministic_hash(tcx)) } - fn get_variant(&self, item: rbml::Doc, index: DefIndex) - -> (ty::VariantDefData<'tcx, 'tcx>, Option) { - let data = match self.entry_data(item) { - EntryData::Variant(data) => data, + fn get_variant(&self, item: &Entry<'tcx>, index: DefIndex) + -> (ty::VariantDefData<'tcx, 'tcx>, Option) { + let data = match item.kind { + EntryKind::Variant(data) | + EntryKind::Struct(data) | + EntryKind::Union(data) => data.decode(self), _ => bug!() }; - let fields = self.get(item, item_tag::children).seq().map(|index| { + let fields = item.children.decode(self).map(|index| { let f = self.entry(index); ty::FieldDefData::new(self.local_def_id(index), - self.item_name(f), - self.item_visibility(f)) + self.item_name(&f), + f.visibility) }).collect(); (ty::VariantDefData { @@ -553,27 +533,25 @@ impl<'a, 'tcx> CrateMetadata { pub fn get_adt_def(&self, item_id: DefIndex, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> ty::AdtDefMaster<'tcx> { - let doc = self.entry(item_id); + let item = self.entry(item_id); let did = self.local_def_id(item_id); let mut ctor_index = None; - let family = self.item_family(doc); - let variants = if family == Family::Enum { - self.get(doc, item_tag::children).seq().map(|index| { - let (variant, struct_ctor) = self.get_variant(self.entry(index), index); + let variants = if let EntryKind::Enum = item.kind { + item.children.decode(self).map(|index| { + let (variant, struct_ctor) = self.get_variant(&self.entry(index), index); assert_eq!(struct_ctor, None); variant }).collect() } else{ - let (variant, struct_ctor) = self.get_variant(doc, item_id); + let (variant, struct_ctor) = self.get_variant(&item, item_id); ctor_index = struct_ctor; vec![variant] }; - let kind = match family { - Family::Enum => ty::AdtKind::Enum, - Family::Struct => ty::AdtKind::Struct, - Family::Union => ty::AdtKind::Union, - _ => bug!("get_adt_def called on a non-ADT {:?} - {:?}", - family, did) + let kind = match item.kind { + EntryKind::Enum => ty::AdtKind::Enum, + EntryKind::Struct(_) => ty::AdtKind::Struct, + EntryKind::Union(_) => ty::AdtKind::Union, + _ => bug!("get_adt_def called on a non-ADT {:?}", did) }; let adt = tcx.intern_adt_def(did, kind, variants); @@ -599,42 +577,43 @@ impl<'a, 'tcx> CrateMetadata { pub fn get_predicates(&self, item_id: DefIndex, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> ty::GenericPredicates<'tcx> { - self.doc_predicates(self.entry(item_id), tcx, item_tag::predicates) + self.entry(item_id).predicates.unwrap().decode((self, tcx)) } pub fn get_super_predicates(&self, item_id: DefIndex, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> ty::GenericPredicates<'tcx> { - self.doc_predicates(self.entry(item_id), tcx, item_tag::super_predicates) + match self.entry(item_id).kind { + EntryKind::Trait(data) => { + data.decode(self).super_predicates.decode((self, tcx)) + } + _ => bug!() + } } pub fn get_generics(&self, item_id: DefIndex, tcx: TyCtxt<'a, 'tcx, 'tcx>) - -> &'tcx ty::Generics<'tcx> { - self.doc_generics(self.entry(item_id), tcx) + -> ty::Generics<'tcx> { + self.entry(item_id).generics.unwrap().decode((self, tcx)) } pub fn get_type(&self, id: DefIndex, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Ty<'tcx> { - self.doc_type(self.entry(id), tcx) + self.entry(id).ty.unwrap().decode((self, tcx)) } pub fn get_stability(&self, id: DefIndex) -> Option { - self.maybe_get(self.entry(id), item_tag::stability).map(|mut dcx| { - dcx.decode() - }) + self.entry(id).stability.map(|stab| stab.decode(self)) } pub fn get_deprecation(&self, id: DefIndex) -> Option { - self.maybe_get(self.entry(id), item_tag::deprecation).map(|mut dcx| { - dcx.decode() - }) + self.entry(id).deprecation.map(|depr| depr.decode(self)) } pub fn get_visibility(&self, id: DefIndex) -> ty::Visibility { - self.item_visibility(self.entry(id)) + self.entry(id).visibility } - fn get_impl_data(&self, id: DefIndex) -> ImplData { - match self.entry_data(self.entry(id)) { - EntryData::Impl(data) => data, + fn get_impl_data(&self, id: DefIndex) -> ImplData<'tcx> { + match self.entry(id).kind { + EntryKind::Impl(data) => data.decode(self), _ => bug!() } } @@ -656,15 +635,12 @@ impl<'a, 'tcx> CrateMetadata { id: DefIndex, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Option> { - match self.entry_typed_data(self.entry(id), tcx) { - EntryTypedData::Impl(data) => data.trait_ref, - _ => bug!() - } + self.get_impl_data(id).trait_ref.map(|tr| tr.decode((self, tcx))) } /// Iterates over the language items in the given crate. pub fn get_lang_items(&self) -> Vec<(DefIndex, usize)> { - self.get(self.data.root(), root_tag::lang_items).decode() + self.root.lang_items.decode(self).collect() } /// Iterates over each child of the given item. @@ -672,38 +648,34 @@ impl<'a, 'tcx> CrateMetadata { where F: FnMut(def::Export) { // Find the item. - let item_doc = match self.maybe_entry(id) { + let item = match self.maybe_entry(id) { None => return, - Some(item_doc) => item_doc, - }; - - let dcx = match self.maybe_get(item_doc, item_tag::children) { - Some(dcx) => dcx, - None => return + Some(item) => item.decode(self), }; // Iterate over all children. - for child_index in dcx.seq() { + for child_index in item.children.decode(self) { // Get the item. if let Some(child) = self.maybe_entry(child_index) { + let child = child.decode(self); // Hand off the item to the callback. - match self.item_family(child) { + match child.kind { // FIXME(eddyb) Don't encode these in children. - Family::ForeignMod => { - for child_index in self.get(child, item_tag::children).seq() { + EntryKind::ForeignMod => { + for child_index in child.children.decode(self) { callback(def::Export { def_id: self.local_def_id(child_index), - name: self.item_name(self.entry(child_index)) + name: self.item_name(&self.entry(child_index)) }); } continue; } - Family::Impl | Family::DefaultImpl => continue, + EntryKind::Impl(_) | EntryKind::DefaultImpl(_) => continue, _ => {} } - if let Some(name) = self.maybe_item_name(child) { + if let Some(name) = def_key_name(&child.def_key.decode(self)) { callback(def::Export { def_id: self.local_def_id(child_index), name: name @@ -712,17 +684,15 @@ impl<'a, 'tcx> CrateMetadata { } } - let reexports = match self.entry_data(item_doc) { - EntryData::Mod(data) => data.reexports, - _ => return - }; - for exp in reexports { - callback(exp); + if let EntryKind::Mod(data) = item.kind { + for exp in data.decode(self).reexports.decode(self) { + callback(exp); + } } } pub fn maybe_get_item_name(&self, id: DefIndex) -> Option { - self.maybe_item_name(self.entry(id)) + def_key_name(&self.entry(id).def_key.decode(self)) } pub fn maybe_get_item_ast(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, id: DefIndex) @@ -733,76 +703,46 @@ impl<'a, 'tcx> CrateMetadata { let parent_def_id = self.local_def_id(self.def_key(id).parent.unwrap()); let mut parent_def_path = self.def_path(id).unwrap(); parent_def_path.data.pop(); - item_doc.maybe_child(item_tag::ast).map(|ast_doc| { - decode_inlined_item(self, tcx, parent_def_path, parent_def_id, ast_doc, item_did) + item_doc.ast.map(|ast| { + let ast = ast.decode(self); + decode_inlined_item(self, tcx, parent_def_path, parent_def_id, ast, item_did) }) } pub fn is_item_mir_available(&self, id: DefIndex) -> bool { - if let Some(item_doc) = self.maybe_entry(id) { - return item_doc.maybe_child(item_tag::mir).is_some(); - } - - false + self.maybe_entry(id).and_then(|item| item.decode(self).mir).is_some() } pub fn maybe_get_item_mir(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, id: DefIndex) -> Option> { - self.maybe_get(self.entry(id), item_tag::mir).map(|dcx| { - dcx.typed(tcx).decode() - }) + self.entry(id).mir.map(|mir| mir.decode((self, tcx))) } pub fn get_impl_or_trait_item(&self, id: DefIndex, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Option> { - let item_doc = self.entry(id); - let family = self.item_family(item_doc); - - match family { - Family::AssociatedConst | - Family::Method | - Family::AssociatedType => {} - - _ => return None - } - - let def_id = self.local_def_id(id); - - let container_id = self.item_parent_item(item_doc).unwrap(); - let container = match self.item_family(self.entry(container_id.index)) { - Family::Trait => TraitContainer(container_id), - _ => ImplContainer(container_id), + let item = self.entry(id); + let parent_and_name = || { + let def_key = item.def_key.decode(self); + (self.local_def_id(def_key.parent.unwrap()), + def_key_name(&def_key).unwrap()) }; - let name = self.item_name(item_doc); - let vis = self.item_visibility(item_doc); - - let (defaultness, has_body) = match self.entry_data(item_doc) { - EntryData::TraitAssociated(data) => { - (hir::Defaultness::Default, data.has_default) - } - EntryData::ImplAssociated(data) => { - (data.defaultness, true) - } - _ => bug!() - }; - - Some(match family { - Family::AssociatedConst => { + Some(match item.kind { + EntryKind::AssociatedConst(container) => { + let (parent, name) = parent_and_name(); ty::ConstTraitItem(Rc::new(ty::AssociatedConst { name: name, - ty: self.doc_type(item_doc, tcx), - vis: vis, - defaultness: defaultness, - def_id: def_id, - container: container, - has_value: has_body, + ty: item.ty.unwrap().decode((self, tcx)), + vis: item.visibility, + defaultness: container.defaultness(), + def_id: self.local_def_id(id), + container: container.with_def_id(parent), + has_value: container.has_body(), })) } - Family::Method => { - let generics = self.doc_generics(item_doc, tcx); - let predicates = self.doc_predicates(item_doc, tcx, item_tag::predicates); - let ity = tcx.lookup_item_type(def_id).ty; + EntryKind::Method(data) => { + let (parent, name) = parent_and_name(); + let ity = item.ty.unwrap().decode((self, tcx)); let fty = match ity.sty { ty::TyFnDef(.., fty) => fty, _ => bug!( @@ -810,49 +750,46 @@ impl<'a, 'tcx> CrateMetadata { ity, name) }; - let explicit_self = match self.entry_typed_data(item_doc, tcx) { - EntryTypedData::Method(data) => data.explicit_self, - _ => bug!() - }; + let data = data.decode(self); ty::MethodTraitItem(Rc::new(ty::Method { name: name, - generics: generics, - predicates: predicates, + generics: tcx.lookup_generics(self.local_def_id(id)), + predicates: item.predicates.unwrap().decode((self, tcx)), fty: fty, - explicit_self: explicit_self, - vis: vis, - defaultness: defaultness, - has_body: has_body, - def_id: def_id, - container: container, + explicit_self: data.explicit_self.decode((self, tcx)), + vis: item.visibility, + defaultness: data.container.defaultness(), + has_body: data.container.has_body(), + def_id: self.local_def_id(id), + container: data.container.with_def_id(parent), })) } - Family::AssociatedType => { + EntryKind::AssociatedType(container) => { + let (parent, name) = parent_and_name(); ty::TypeTraitItem(Rc::new(ty::AssociatedType { name: name, - ty: self.maybe_doc_type(item_doc, tcx), - vis: vis, - defaultness: defaultness, - def_id: def_id, - container: container, + ty: item.ty.map(|ty| ty.decode((self, tcx))), + vis: item.visibility, + defaultness: container.defaultness(), + def_id: self.local_def_id(id), + container: container.with_def_id(parent), })) } - _ => bug!() + _ => return None }) } pub fn get_item_variances(&self, id: DefIndex) -> Vec { - let item_doc = self.entry(id); - self.get(item_doc, item_tag::variances).decode() + self.entry(id).variances.decode(self).collect() } pub fn get_struct_ctor_def_id(&self, node_id: DefIndex) -> Option { - let data = match self.entry_data(self.entry(node_id)) { - EntryData::Variant(data) => data, - _ => bug!() - }; - - data.struct_ctor.map(|index| self.local_def_id(index)) + match self.entry(node_id).kind { + EntryKind::Struct(data) => { + data.decode(self).struct_ctor.map(|index| self.local_def_id(index)) + } + _ => None + } } pub fn get_item_attrs(&self, node_id: DefIndex) -> Vec { @@ -860,30 +797,25 @@ impl<'a, 'tcx> CrateMetadata { // we assume that someone passing in a tuple struct ctor is actually wanting to // look at the definition let mut item = self.entry(node_id); - let def_key = self.item_def_key(item); + let def_key = item.def_key.decode(self); if def_key.disambiguated_data.data == DefPathData::StructCtor { item = self.entry(def_key.parent.unwrap()); } - self.get_attributes(item) + self.get_attributes(&item) } pub fn get_struct_field_names(&self, id: DefIndex) -> Vec { - self.get(self.entry(id), item_tag::children).seq().map(|index| { - self.item_name(self.entry(index)) + self.entry(id).children.decode(self).map(|index| { + self.item_name(&self.entry(index)) }).collect() } - fn get_attributes(&self, md: rbml::Doc) -> Vec { - self.maybe_get(md, item_tag::attributes).map_or(vec![], |mut dcx| { - let mut attrs = dcx.decode::>(); - + fn get_attributes(&self, item: &Entry<'tcx>) -> Vec { + item.attributes.decode(self).map(|mut attr| { // Need new unique IDs: old thread-local IDs won't map to new threads. - for attr in attrs.iter_mut() { - attr.node.id = attr::mk_attr_id(); - } - - attrs - }) + attr.node.id = attr::mk_attr_id(); + attr + }).collect() } // Translate a DefId from the current compilation environment to a DefId @@ -898,106 +830,97 @@ impl<'a, 'tcx> CrateMetadata { None } - pub fn each_inherent_implementation_for_type(&self, id: DefIndex, mut callback: F) - where F: FnMut(DefId), - { - for impl_def_id in self.get(self.entry(id), item_tag::inherent_impls).seq() { - callback(impl_def_id); - } + pub fn get_inherent_implementations_for_type(&self, id: DefIndex) -> Vec { + self.entry(id).inherent_impls.decode(self).map(|index| { + self.local_def_id(index) + }).collect() } - pub fn each_implementation_for_trait(&self, - filter: Option, - mut callback: F) where - F: FnMut(DefId), - { + pub fn get_implementations_for_trait(&self, filter: Option, result: &mut Vec) { // Do a reverse lookup beforehand to avoid touching the crate_num // hash map in the loop below. let filter = match filter.map(|def_id| self.reverse_translate_def_id(def_id)) { - Some(Some(def_id)) => Some(def_id), + Some(Some(def_id)) => Some((def_id.krate.as_u32(), def_id.index)), Some(None) => return, None => None }; // FIXME(eddyb) Make this O(1) instead of O(n). - for trait_doc in self.data.root().children_of(root_tag::impls) { - let mut dcx = DecodeContext::new(trait_doc, Some(self)); - - let (krate, index) = dcx.decode(); - if let Some(local_did) = filter { - if (local_did.krate.as_u32(), local_did.index) != (krate, index) { - continue; - } + for trait_impls in self.root.impls.decode(self) { + if filter.is_some() && filter != Some(trait_impls.trait_id) { + continue; } - for impl_def_id in dcx.seq() { - callback(impl_def_id); + result.extend(trait_impls.impls.decode(self).map(|index| { + self.local_def_id(index) + })); + + if filter.is_some() { + break; } } } pub fn get_trait_of_item(&self, id: DefIndex) -> Option { - let item_doc = self.entry(id); - let parent_item_id = match self.item_parent_item(item_doc) { - None => return None, - Some(item_id) => item_id, - }; - match self.item_family(self.entry(parent_item_id.index)) { - Family::Trait => Some(parent_item_id), - _ => None - } + self.entry(id).def_key.decode(self).parent.and_then(|parent_index| { + match self.entry(parent_index).kind { + EntryKind::Trait(_) => Some(self.local_def_id(parent_index)), + _ => None + } + }) } pub fn get_native_libraries(&self) -> Vec<(NativeLibraryKind, String)> { - self.get(self.data.root(), root_tag::native_libraries).decode() + self.root.native_libraries.decode(self).collect() } pub fn get_dylib_dependency_formats(&self) -> Vec<(CrateNum, LinkagePreference)> { - let dcx = self.get(self.data.root(), root_tag::dylib_dependency_formats); - - dcx.seq::>().enumerate().flat_map(|(i, link)| { + self.root.dylib_dependency_formats.decode(self).enumerate().flat_map(|(i, link)| { let cnum = CrateNum::new(i + 1); link.map(|link| (self.cnum_map.borrow()[cnum], link)) }).collect() } pub fn get_missing_lang_items(&self) -> Vec { - self.get(self.data.root(), root_tag::lang_items_missing).decode() + self.root.lang_items_missing.decode(self).collect() } - pub fn get_fn_arg_names(&self, id: DefIndex) -> Vec { - self.maybe_get(self.entry(id), item_tag::fn_arg_names) - .map_or(vec![], |mut dcx| dcx.decode()) + pub fn get_fn_arg_names(&self, id: DefIndex) -> Vec { + let arg_names = match self.entry(id).kind { + EntryKind::Fn(data) | + EntryKind::ForeignFn(data) => data.decode(self).arg_names, + EntryKind::Method(data) => data.decode(self).fn_data.arg_names, + _ => LazySeq::empty() + }; + arg_names.decode(self).collect() } pub fn get_reachable_ids(&self) -> Vec { - let dcx = self.get(self.data.root(), root_tag::reachable_ids); - - dcx.seq().map(|index| self.local_def_id(index)).collect() + self.root.reachable_ids.decode(self).map(|index| self.local_def_id(index)).collect() } pub fn is_const_fn(&self, id: DefIndex) -> bool { - let constness = match self.entry_data(self.entry(id)) { - EntryData::ImplAssociated(data) => data.constness, - EntryData::Fn(data) => data.constness, + let constness = match self.entry(id).kind { + EntryKind::Method(data) => data.decode(self).fn_data.constness, + EntryKind::Fn(data) => data.decode(self).constness, _ => hir::Constness::NotConst }; constness == hir::Constness::Const } pub fn is_extern_item(&self, id: DefIndex, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> bool { - let item_doc = match self.maybe_entry(id) { - Some(doc) => doc, + let item = match self.maybe_entry(id) { + Some(item) => item.decode(self), None => return false, }; - let applicable = match self.item_family(item_doc) { - Family::ImmStatic | - Family::MutStatic | - Family::ForeignImmStatic | - Family::ForeignMutStatic => true, + let applicable = match item.kind { + EntryKind::ImmStatic | + EntryKind::MutStatic | + EntryKind::ForeignImmStatic | + EntryKind::ForeignMutStatic => true, - Family::Fn | Family::ForeignFn => { + EntryKind::Fn(_) | EntryKind::ForeignFn(_) => { self.get_generics(id, tcx).types.is_empty() } @@ -1006,83 +929,53 @@ impl<'a, 'tcx> CrateMetadata { if applicable { attr::contains_extern_indicator(tcx.sess.diagnostic(), - &self.get_attributes(item_doc)) + &self.get_attributes(&item)) } else { false } } pub fn is_foreign_item(&self, id: DefIndex) -> bool { - match self.item_family(self.entry(id)) { - Family::ForeignImmStatic | - Family::ForeignMutStatic | - Family::ForeignFn => true, + match self.entry(id).kind { + EntryKind::ForeignImmStatic | + EntryKind::ForeignMutStatic | + EntryKind::ForeignFn(_) => true, _ => false } } - fn doc_generics(&self, base_doc: rbml::Doc, tcx: TyCtxt<'a, 'tcx, 'tcx>) - -> &'tcx ty::Generics<'tcx> { - let generics = self.get(base_doc, item_tag::generics).typed(tcx).decode(); - tcx.alloc_generics(generics) - } - - fn doc_predicates(&self, base_doc: rbml::Doc, tcx: TyCtxt<'a, 'tcx, 'tcx>, tag: usize) - -> ty::GenericPredicates<'tcx> { - let mut dcx = self.get(base_doc, tag).typed(tcx); - - ty::GenericPredicates { - parent: dcx.decode(), - predicates: (0..dcx.decode::()).map(|_| { - // Handle shorthands first, if we have an usize > 0x80. - if dcx.opaque.data[dcx.opaque.position()] & 0x80 != 0 { - let pos = dcx.decode::(); - assert!(pos >= SHORTHAND_OFFSET); - let pos = pos - SHORTHAND_OFFSET; - - let data = self.data.as_slice(); - let doc = rbml::Doc { - data: data, - start: pos, - end: data.len(), - }; - DecodeContext::new(doc, Some(self)).typed(tcx).decode() - } else { - dcx.decode() - } - }).collect() - } - } - pub fn is_defaulted_trait(&self, trait_id: DefIndex) -> bool { - match self.entry_data(self.entry(trait_id)) { - EntryData::Trait(data) => data.has_default_impl, + match self.entry(trait_id).kind { + EntryKind::Trait(data) => data.decode(self).has_default_impl, _ => bug!() } } pub fn is_default_impl(&self, impl_id: DefIndex) -> bool { - self.item_family(self.entry(impl_id)) == Family::DefaultImpl + match self.entry(impl_id).kind { + EntryKind::DefaultImpl(_) => true, + _ => false + } } pub fn closure_kind(&self, closure_id: DefIndex) -> ty::ClosureKind { - match self.entry_data(self.entry(closure_id)) { - EntryData::Closure(data) => data.kind, + match self.entry(closure_id).kind { + EntryKind::Closure(data) => data.decode(self).kind, _ => bug!() } } pub fn closure_ty(&self, closure_id: DefIndex, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> ty::ClosureTy<'tcx> { - match self.entry_typed_data(self.entry(closure_id), tcx) { - EntryTypedData::Closure(data) => data.ty, + match self.entry(closure_id).kind { + EntryKind::Closure(data) => data.decode(self).ty.decode((self, tcx)), _ => bug!() } } pub fn def_key(&self, id: DefIndex) -> hir_map::DefKey { debug!("def_key: id={:?}", id); - self.item_def_key(self.entry(id)) + self.entry(id).def_key.decode(self) } // Returns the path leading to the thing with this `id`. Note that @@ -1096,4 +989,140 @@ impl<'a, 'tcx> CrateMetadata { None } } + + /// Imports the codemap from an external crate into the codemap of the crate + /// currently being compiled (the "local crate"). + /// + /// The import algorithm works analogous to how AST items are inlined from an + /// external crate's metadata: + /// For every FileMap in the external codemap an 'inline' copy is created in the + /// local codemap. The correspondence relation between external and local + /// FileMaps is recorded in the `ImportedFileMap` objects returned from this + /// function. When an item from an external crate is later inlined into this + /// crate, this correspondence information is used to translate the span + /// information of the inlined item so that it refers the correct positions in + /// the local codemap (see `>`). + /// + /// The import algorithm in the function below will reuse FileMaps already + /// existing in the local codemap. For example, even if the FileMap of some + /// source file of libstd gets imported many times, there will only ever be + /// one FileMap object for the corresponding file in the local codemap. + /// + /// Note that imported FileMaps do not actually contain the source code of the + /// file they represent, just information about length, line breaks, and + /// multibyte characters. This information is enough to generate valid debuginfo + /// for items inlined from other crates. + pub fn imported_filemaps(&'a self, local_codemap: &codemap::CodeMap) + -> Ref<'a, Vec> { + { + let filemaps = self.codemap_import_info.borrow(); + if !filemaps.is_empty() { + return filemaps; + } + } + + let external_codemap = self.root.codemap.decode(self); + + let imported_filemaps = external_codemap.map(|filemap_to_import| { + // Try to find an existing FileMap that can be reused for the filemap to + // be imported. A FileMap is reusable if it is exactly the same, just + // positioned at a different offset within the codemap. + let reusable_filemap = { + local_codemap.files + .borrow() + .iter() + .find(|fm| are_equal_modulo_startpos(&fm, &filemap_to_import)) + .map(|rc| rc.clone()) + }; + + match reusable_filemap { + Some(fm) => { + cstore::ImportedFileMap { + original_start_pos: filemap_to_import.start_pos, + original_end_pos: filemap_to_import.end_pos, + translated_filemap: fm + } + } + None => { + // We can't reuse an existing FileMap, so allocate a new one + // containing the information we need. + let syntax_pos::FileMap { + name, + abs_path, + start_pos, + end_pos, + lines, + multibyte_chars, + .. + } = filemap_to_import; + + let source_length = (end_pos - start_pos).to_usize(); + + // Translate line-start positions and multibyte character + // position into frame of reference local to file. + // `CodeMap::new_imported_filemap()` will then translate those + // coordinates to their new global frame of reference when the + // offset of the FileMap is known. + let mut lines = lines.into_inner(); + for pos in &mut lines { + *pos = *pos - start_pos; + } + let mut multibyte_chars = multibyte_chars.into_inner(); + for mbc in &mut multibyte_chars { + mbc.pos = mbc.pos - start_pos; + } + + let local_version = local_codemap.new_imported_filemap(name, + abs_path, + source_length, + lines, + multibyte_chars); + cstore::ImportedFileMap { + original_start_pos: start_pos, + original_end_pos: end_pos, + translated_filemap: local_version + } + } + } + }).collect(); + + // This shouldn't borrow twice, but there is no way to downgrade RefMut to Ref. + *self.codemap_import_info.borrow_mut() = imported_filemaps; + self.codemap_import_info.borrow() + } +} + +fn are_equal_modulo_startpos(fm1: &syntax_pos::FileMap, fm2: &syntax_pos::FileMap) -> bool { + if fm1.name != fm2.name { + return false; + } + + let lines1 = fm1.lines.borrow(); + let lines2 = fm2.lines.borrow(); + + if lines1.len() != lines2.len() { + return false; + } + + for (&line1, &line2) in lines1.iter().zip(lines2.iter()) { + if (line1 - fm1.start_pos) != (line2 - fm2.start_pos) { + return false; + } + } + + let multibytes1 = fm1.multibyte_chars.borrow(); + let multibytes2 = fm2.multibyte_chars.borrow(); + + if multibytes1.len() != multibytes2.len() { + return false; + } + + for (mb1, mb2) in multibytes1.iter().zip(multibytes2.iter()) { + if (mb1.bytes != mb2.bytes) || + ((mb1.pos - fm1.start_pos) != (mb2.pos - fm2.start_pos)) { + return false; + } + } + + true } diff --git a/src/librustc_metadata/encoder.rs b/src/librustc_metadata/encoder.rs index e690e519879..185aa9e3b92 100644 --- a/src/librustc_metadata/encoder.rs +++ b/src/librustc_metadata/encoder.rs @@ -8,19 +8,17 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// Metadata encoding - -#![allow(unused_must_use)] // everything is just a MemWriter, can't fail - -use astencode::encode_inlined_item; -use common::*; use cstore; -use index::IndexData; +use index::Index; +use schema::*; -use rustc::middle::cstore::{InlinedItemRef, LinkMeta, LinkagePreference}; +use rustc::middle::cstore::{InlinedItemRef, LinkMeta}; +use rustc::middle::cstore::{LinkagePreference, NativeLibraryKind}; use rustc::hir::def; use rustc::hir::def_id::{CrateNum, CRATE_DEF_INDEX, DefIndex, DefId}; use rustc::middle::dependency_format::Linkage; +use rustc::middle::lang_items; +use rustc::mir; use rustc::traits::specialization_graph; use rustc::ty::{self, Ty, TyCtxt}; @@ -33,13 +31,12 @@ use std::hash::Hash; use std::intrinsics; use std::io::prelude::*; use std::io::Cursor; -use std::ops::{Deref, DerefMut}; use std::rc::Rc; use std::u32; use syntax::ast::{self, CRATE_NODE_ID}; use syntax::attr; use syntax; -use rbml; +use syntax_pos; use rustc::hir::{self, PatKind}; use rustc::hir::intravisit::Visitor; @@ -48,7 +45,7 @@ use rustc::hir::intravisit; use super::index_builder::{FromId, IndexBuilder, Untracked}; pub struct EncodeContext<'a, 'tcx: 'a> { - rbml_w: rbml::writer::Encoder<'a>, + opaque: opaque::Encoder<'a>, pub tcx: TyCtxt<'a, 'tcx, 'tcx>, reexports: &'a def::ExportMap, link_meta: &'a LinkMeta, @@ -60,19 +57,6 @@ pub struct EncodeContext<'a, 'tcx: 'a> { predicate_shorthands: FnvHashMap, usize>, } -impl<'a, 'tcx> Deref for EncodeContext<'a, 'tcx> { - type Target = rbml::writer::Encoder<'a>; - fn deref(&self) -> &Self::Target { - &self.rbml_w - } -} - -impl<'a, 'tcx> DerefMut for EncodeContext<'a, 'tcx> { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.rbml_w - } -} - macro_rules! encoder_methods { ($($name:ident($ty:ty);)*) => { $(fn $name(&mut self, value: $ty) -> Result<(), Self::Error> { @@ -109,27 +93,60 @@ impl<'a, 'tcx> Encoder for EncodeContext<'a, 'tcx> { } } +impl<'a, 'tcx, T> SpecializedEncoder> for EncodeContext<'a, 'tcx> { + fn specialized_encode(&mut self, lazy: &Lazy) -> Result<(), Self::Error> { + self.emit_usize(lazy.position) + } +} + +impl<'a, 'tcx, T> SpecializedEncoder> for EncodeContext<'a, 'tcx> { + fn specialized_encode(&mut self, seq: &LazySeq) -> Result<(), Self::Error> { + self.emit_usize(seq.len)?; + self.emit_usize(seq.position) + } +} + impl<'a, 'tcx> SpecializedEncoder> for EncodeContext<'a, 'tcx> { fn specialized_encode(&mut self, ty: &Ty<'tcx>) -> Result<(), Self::Error> { self.encode_with_shorthand(ty, &ty.sty, |ecx| &mut ecx.type_shorthands) } } +impl<'a, 'tcx> SpecializedEncoder> for EncodeContext<'a, 'tcx> { + fn specialized_encode(&mut self, predicates: &ty::GenericPredicates<'tcx>) + -> Result<(), Self::Error> { + predicates.parent.encode(self)?; + predicates.predicates.len().encode(self)?; + for predicate in &predicates.predicates { + self.encode_with_shorthand(predicate, predicate, |ecx| &mut ecx.predicate_shorthands)? + } + Ok(()) + } +} + impl<'a, 'tcx> EncodeContext<'a, 'tcx> { - fn seq(&mut self, iter: I, mut f: F) - where I: IntoIterator, - I::IntoIter: ExactSizeIterator, - F: FnMut(&mut Self, I::Item) -> T, - T: Encodable { - let iter = iter.into_iter(); - self.emit_seq(iter.len(), move |ecx| { - for (i, elem) in iter.enumerate() { - ecx.emit_seq_elt(i, |ecx| { - f(ecx, elem).encode(ecx) - })?; - } - Ok(()) - }).unwrap(); + pub fn position(&self) -> usize { + self.opaque.position() + } + + pub fn lazy(&mut self, value: &T) -> Lazy { + let pos = self.position(); + value.encode(self).unwrap(); + Lazy::with_position(pos) + } + + fn lazy_seq(&mut self, iter: I) -> LazySeq + where I: IntoIterator, T: Encodable { + let pos = self.position(); + let len = iter.into_iter().map(|value| value.encode(self).unwrap()).count(); + LazySeq::with_position_and_length(pos, len) + } + + fn lazy_seq_ref<'b, I, T>(&mut self, iter: I) -> LazySeq + where I: IntoIterator, T: 'b + Encodable { + let pos = self.position(); + let len = iter.into_iter().map(|value| value.encode(self).unwrap()).count(); + LazySeq::with_position_and_length(pos, len) } /// Encode the given value or a previously cached shorthand. @@ -143,9 +160,9 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { return self.emit_usize(shorthand); } - let start = self.mark_stable_position(); + let start = self.position(); variant.encode(self)?; - let len = self.mark_stable_position() - start; + let len = self.position() - start; // The shorthand encoding uses the same usize as the // discriminant, with an offset so they can't conflict. @@ -170,60 +187,19 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { /// For every DefId that we create a metadata item for, we include a /// serialized copy of its DefKey, which allows us to recreate a path. - fn encode_def_key(&mut self, def_id: DefId) { - self.start_tag(item_tag::def_key); - self.tcx.map.def_key(def_id).encode(self); - self.end_tag(); - } - - // Item info table encoding - fn encode_family(&mut self, f: Family) { - self.start_tag(item_tag::family); - f.encode(self).unwrap(); - self.end_tag(); - } - - fn encode_item_variances(&mut self, def_id: DefId) { - let v = self.tcx.item_variances(def_id); - self.start_tag(item_tag::variances); - v.encode(self); - self.end_tag(); - } - - fn encode_bounds_and_type_for_item(&mut self, def_id: DefId) { + fn encode_def_key(&mut self, def_id: DefId) -> Lazy { let tcx = self.tcx; - self.encode_bounds_and_type(&tcx.lookup_item_type(def_id), - &tcx.lookup_predicates(def_id)); + self.lazy(&tcx.map.def_key(def_id)) } - fn encode_bounds_and_type(&mut self, - scheme: &ty::TypeScheme<'tcx>, - predicates: &ty::GenericPredicates<'tcx>) { - self.encode_generics(&scheme.generics, &predicates); - self.encode_type(scheme.ty); + fn encode_item_variances(&mut self, def_id: DefId) -> LazySeq { + let tcx = self.tcx; + self.lazy_seq(tcx.item_variances(def_id).iter().cloned()) } - fn encode_type(&mut self, typ: Ty<'tcx>) { - self.start_tag(item_tag::ty); - typ.encode(self).unwrap(); - self.end_tag(); - } - - fn encode_variant(&mut self, variant: ty::VariantDef, - struct_ctor: Option) - -> EntryData { - self.start_tag(item_tag::children); - self.seq(&variant.fields, |_, f| { - assert!(f.did.is_local()); - f.did.index - }); - self.end_tag(); - - EntryData::Variant(VariantData { - kind: variant.kind, - disr: variant.disr_val.to_u64_unchecked(), - struct_ctor: struct_ctor - }) + fn encode_item_type(&mut self, def_id: DefId) -> Lazy> { + let tcx = self.tcx; + self.lazy(&tcx.lookup_item_type(def_id).ty) } /// Encode data for the given variant of the given ADT. The @@ -233,97 +209,104 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { /// e.g., the length of the various vectors). fn encode_enum_variant_info(&mut self, (enum_did, Untracked(index)): - (DefId, Untracked)) { + (DefId, Untracked)) -> Entry<'tcx> { let tcx = self.tcx; let def = tcx.lookup_adt_def(enum_did); let variant = &def.variants[index]; - let vid = variant.did; - self.encode_def_key(vid); - self.encode_family(Family::Variant); + let def_id = variant.did; + + let data = VariantData { + kind: variant.kind, + disr: variant.disr_val.to_u64_unchecked(), + struct_ctor: None + }; let enum_id = tcx.map.as_local_node_id(enum_did).unwrap(); let enum_vis = &tcx.map.expect_item(enum_id).vis; - self.encode_visibility(enum_vis); - let attrs = tcx.get_attrs(vid); - self.encode_attributes(&attrs); - self.encode_stability(vid); + Entry { + kind: EntryKind::Variant(self.lazy(&data)), + visibility: enum_vis.simplify(), + def_key: self.encode_def_key(def_id), + attributes: self.encode_attributes(&tcx.get_attrs(def_id)), + children: self.lazy_seq(variant.fields.iter().map(|f| { + assert!(f.did.is_local()); + f.did.index + })), + stability: self.encode_stability(def_id), + deprecation: self.encode_deprecation(def_id), - let data = self.encode_variant(variant, None); + ty: Some(self.encode_item_type(def_id)), + inherent_impls: LazySeq::empty(), + variances: LazySeq::empty(), + generics: Some(self.encode_generics(def_id)), + predicates: Some(self.encode_predicates(def_id)), - self.start_tag(item_tag::data); - data.encode(self).unwrap(); - self.end_tag(); - - self.start_tag(item_tag::typed_data); - EntryTypedData::Other.encode(self).unwrap(); - self.end_tag(); - - self.encode_bounds_and_type_for_item(vid); + ast: None, + mir: None + } } fn encode_info_for_mod(&mut self, FromId(id, (md, attrs, vis)): - FromId<(&hir::Mod, &[ast::Attribute], &hir::Visibility)>) { + FromId<(&hir::Mod, &[ast::Attribute], &hir::Visibility)>) + -> Entry<'tcx> { let tcx = self.tcx; - let def_id = tcx.map.local_def_id(id); - self.encode_def_key(def_id); - self.encode_family(Family::Mod); - self.encode_visibility(vis); - self.encode_stability(def_id); - self.encode_attributes(attrs); - debug!("(encoding info for module) encoding info for module ID {}", id); - // Encode info about all the module children. - self.start_tag(item_tag::children); - self.seq(&md.item_ids, |_, item_id| { - tcx.map.local_def_id(item_id.id).index - }); - self.end_tag(); - - // Encode the reexports of this module, if this module is public. - let reexports = match self.reexports.get(&id) { - Some(exports) if *vis == hir::Public => exports.clone(), - _ => vec![] + let data = ModData { + reexports: match self.reexports.get(&id) { + Some(exports) if *vis == hir::Public => { + self.lazy_seq_ref(exports) + } + _ => LazySeq::empty() + } }; - self.start_tag(item_tag::data); - EntryData::Mod(ModData { - reexports: reexports - }).encode(self).unwrap(); - self.end_tag(); + Entry { + kind: EntryKind::Mod(self.lazy(&data)), + visibility: vis.simplify(), + def_key: self.encode_def_key(def_id), + attributes: self.encode_attributes(attrs), + children: self.lazy_seq(md.item_ids.iter().map(|item_id| { + tcx.map.local_def_id(item_id.id).index + })), + stability: self.encode_stability(def_id), + deprecation: self.encode_deprecation(def_id), - self.start_tag(item_tag::typed_data); - EntryTypedData::Other.encode(self).unwrap(); - self.end_tag(); + ty: None, + inherent_impls: LazySeq::empty(), + variances: LazySeq::empty(), + generics: None, + predicates: None, + + ast: None, + mir: None + } } +} - fn encode_visibility(&mut self, visibility: T) { - let vis = if visibility.is_public() { +trait Visibility { + fn simplify(&self) -> ty::Visibility; +} + +impl Visibility for hir::Visibility { + fn simplify(&self) -> ty::Visibility { + if *self == hir::Public { ty::Visibility::Public } else { ty::Visibility::PrivateExternal - }; - self.start_tag(item_tag::visibility); - vis.encode(self).unwrap(); - self.end_tag(); + } } } -trait HasVisibility: Sized { - fn is_public(self) -> bool; -} - -impl<'a> HasVisibility for &'a hir::Visibility { - fn is_public(self) -> bool { - *self == hir::Public - } -} - -impl HasVisibility for ty::Visibility { - fn is_public(self) -> bool { - self == ty::Visibility::Public +impl Visibility for ty::Visibility { + fn simplify(&self) -> ty::Visibility { + if *self == ty::Visibility::Public { + ty::Visibility::Public + } else { + ty::Visibility::PrivateExternal + } } } @@ -350,312 +333,292 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { /// vectors). fn encode_field(&mut self, (adt_def_id, Untracked((variant_index, field_index))): - (DefId, Untracked<(usize, usize)>)) { + (DefId, Untracked<(usize, usize)>)) -> Entry<'tcx> { let tcx = self.tcx; - let def = tcx.lookup_adt_def(adt_def_id); - let variant = &def.variants[variant_index]; + let variant = &tcx.lookup_adt_def(adt_def_id).variants[variant_index]; let field = &variant.fields[field_index]; - let nm = field.name; - debug!("encode_field: encoding {} {:?}", nm, field.did); - - self.encode_family(Family::Field); - self.encode_visibility(field.vis); - self.encode_bounds_and_type_for_item(field.did); - self.encode_def_key(field.did); - + let def_id = field.did; let variant_id = tcx.map.as_local_node_id(variant.did).unwrap(); let variant_data = tcx.map.expect_variant_data(variant_id); - self.encode_attributes(&variant_data.fields()[field_index].attrs); - self.encode_stability(field.did); + + Entry { + kind: EntryKind::Field, + visibility: field.vis.simplify(), + def_key: self.encode_def_key(def_id), + attributes: self.encode_attributes(&variant_data.fields()[field_index].attrs), + children: LazySeq::empty(), + stability: self.encode_stability(def_id), + deprecation: self.encode_deprecation(def_id), + + ty: Some(self.encode_item_type(def_id)), + inherent_impls: LazySeq::empty(), + variances: LazySeq::empty(), + generics: Some(self.encode_generics(def_id)), + predicates: Some(self.encode_predicates(def_id)), + + ast: None, + mir: None + } } - fn encode_struct_ctor(&mut self, ctor_def_id: DefId) { - self.encode_def_key(ctor_def_id); - self.encode_family(Family::Struct); - self.encode_visibility(ty::Visibility::Public); - self.encode_bounds_and_type_for_item(ctor_def_id); + fn encode_struct_ctor(&mut self, (adt_def_id, def_id): (DefId, DefId)) + -> Entry<'tcx> { + let variant = self.tcx.lookup_adt_def(adt_def_id).struct_variant(); - self.encode_stability(ctor_def_id); + let data = VariantData { + kind: variant.kind, + disr: variant.disr_val.to_u64_unchecked(), + struct_ctor: Some(def_id.index) + }; + + Entry { + kind: EntryKind::Struct(self.lazy(&data)), + visibility: ty::Visibility::Public, + def_key: self.encode_def_key(def_id), + attributes: LazySeq::empty(), + children: LazySeq::empty(), + stability: self.encode_stability(def_id), + deprecation: self.encode_deprecation(def_id), + + ty: Some(self.encode_item_type(def_id)), + inherent_impls: LazySeq::empty(), + variances: LazySeq::empty(), + generics: Some(self.encode_generics(def_id)), + predicates: Some(self.encode_predicates(def_id)), + + ast: None, + mir: None + } } - fn encode_generics(&mut self, - generics: &ty::Generics<'tcx>, - predicates: &ty::GenericPredicates<'tcx>) - { - self.start_tag(item_tag::generics); - generics.encode(self).unwrap(); - self.end_tag(); - self.encode_predicates(predicates, item_tag::predicates); + fn encode_generics(&mut self, def_id: DefId) -> Lazy> { + let tcx = self.tcx; + self.lazy(tcx.lookup_generics(def_id)) } - fn encode_predicates(&mut self, - predicates: &ty::GenericPredicates<'tcx>, - tag: usize) { - self.start_tag(tag); - predicates.parent.encode(self).unwrap(); - self.seq(&predicates.predicates, |ecx, predicate| { - ecx.encode_with_shorthand(predicate, predicate, - |ecx| &mut ecx.predicate_shorthands).unwrap() - }); - self.end_tag(); + fn encode_predicates(&mut self, def_id: DefId) -> Lazy> { + let tcx = self.tcx; + self.lazy(&tcx.lookup_predicates(def_id)) } - fn encode_info_for_trait_item(&mut self, def_id: DefId) { + fn encode_info_for_trait_item(&mut self, def_id: DefId) -> Entry<'tcx> { let tcx = self.tcx; let node_id = tcx.map.as_local_node_id(def_id).unwrap(); let ast_item = tcx.map.expect_trait_item(node_id); let trait_item = tcx.impl_or_trait_item(def_id); - let (family, has_default, typed_data) = match trait_item { + + let container = |has_body| if has_body { + AssociatedContainer::TraitWithDefault + } else { + AssociatedContainer::TraitRequired + }; + + let kind = match trait_item { ty::ConstTraitItem(ref associated_const) => { - self.encode_bounds_and_type_for_item(def_id); - - let trait_def_id = trait_item.container().id(); - encode_inlined_item(self, - InlinedItemRef::TraitItem(trait_def_id, ast_item)); - - (Family::AssociatedConst, - associated_const.has_value, - EntryTypedData::Other) + EntryKind::AssociatedConst(container(associated_const.has_value)) } ty::MethodTraitItem(ref method_ty) => { - self.encode_bounds_and_type_for_item(def_id); - - (Family::Method, - method_ty.has_body, - EntryTypedData::Method(MethodTypedData { - explicit_self: method_ty.explicit_self - })) + let fn_data = if let hir::MethodTraitItem(ref sig, _) = ast_item.node { + FnData { + constness: hir::Constness::NotConst, + arg_names: self.encode_fn_arg_names(&sig.decl) + } + } else { + bug!() + }; + let data = MethodData { + fn_data: fn_data, + container: container(method_ty.has_body), + explicit_self: self.lazy(&method_ty.explicit_self) + }; + EntryKind::Method(self.lazy(&data)) } - ty::TypeTraitItem(ref associated_type) => { - if let Some(ty) = associated_type.ty { - self.encode_type(ty); + ty::TypeTraitItem(_) => { + EntryKind::AssociatedType(container(false)) + } + }; + + Entry { + kind: kind, + visibility: trait_item.vis().simplify(), + def_key: self.encode_def_key(def_id), + attributes: self.encode_attributes(&ast_item.attrs), + children: LazySeq::empty(), + stability: self.encode_stability(def_id), + deprecation: self.encode_deprecation(def_id), + + ty: match trait_item { + ty::ConstTraitItem(_) | + ty::MethodTraitItem(_) => { + Some(self.encode_item_type(def_id)) } + ty::TypeTraitItem(ref associated_type) => { + associated_type.ty.map(|ty| self.lazy(&ty)) + } + }, + inherent_impls: LazySeq::empty(), + variances: LazySeq::empty(), + generics: Some(self.encode_generics(def_id)), + predicates: Some(self.encode_predicates(def_id)), - (Family::AssociatedType, false, EntryTypedData::Other) - } - }; - - self.encode_def_key(def_id); - self.encode_family(family); - self.encode_visibility(trait_item.vis()); - - self.encode_stability(def_id); - self.encode_attributes(&ast_item.attrs); - if let hir::MethodTraitItem(ref sig, _) = ast_item.node { - self.encode_fn_arg_names(&sig.decl); - }; - - self.start_tag(item_tag::data); - EntryData::TraitAssociated(TraitAssociatedData { - has_default: has_default - }).encode(self).unwrap(); - self.end_tag(); - - self.start_tag(item_tag::typed_data); - typed_data.encode(self).unwrap(); - self.end_tag(); - - self.encode_mir(def_id); + ast: if let ty::ConstTraitItem(_) = trait_item { + let trait_def_id = trait_item.container().id(); + Some(self.encode_inlined_item(InlinedItemRef::TraitItem(trait_def_id, ast_item))) + } else { + None + }, + mir: self.encode_mir(def_id) + } } - fn encode_info_for_impl_item(&mut self, def_id: DefId) { + fn encode_info_for_impl_item(&mut self, def_id: DefId) -> Entry<'tcx> { let node_id = self.tcx.map.as_local_node_id(def_id).unwrap(); let ast_item = self.tcx.map.expect_impl_item(node_id); let impl_item = self.tcx.impl_or_trait_item(def_id); let impl_def_id = impl_item.container().id(); - let (family, typed_data) = match impl_item { + + let container = match ast_item.defaultness { + hir::Defaultness::Default => AssociatedContainer::ImplDefault, + hir::Defaultness::Final => AssociatedContainer::ImplFinal + }; + + let kind = match impl_item { ty::ConstTraitItem(_) => { - self.encode_bounds_and_type_for_item(def_id); - - encode_inlined_item(self, - InlinedItemRef::ImplItem(impl_def_id, ast_item)); - self.encode_mir(def_id); - - (Family::AssociatedConst, EntryTypedData::Other) + EntryKind::AssociatedConst(container) } - ty::MethodTraitItem(ref method_type) => { - self.encode_bounds_and_type_for_item(def_id); - - (Family::Method, - EntryTypedData::Method(MethodTypedData { - explicit_self: method_type.explicit_self - })) + ty::MethodTraitItem(ref method_ty) => { + let fn_data = if let hir::ImplItemKind::Method(ref sig, _) = ast_item.node { + FnData { + constness: sig.constness, + arg_names: self.encode_fn_arg_names(&sig.decl) + } + } else { + bug!() + }; + let data = MethodData { + fn_data: fn_data, + container: container, + explicit_self: self.lazy(&method_ty.explicit_self) + }; + EntryKind::Method(self.lazy(&data)) } - ty::TypeTraitItem(ref associated_type) => { - if let Some(ty) = associated_type.ty { - self.encode_type(ty); - } - - (Family::AssociatedType, EntryTypedData::Other) + ty::TypeTraitItem(_) => { + EntryKind::AssociatedType(container) } }; - self.encode_def_key(def_id); - self.encode_family(family); - self.encode_visibility(impl_item.vis()); - self.encode_attributes(&ast_item.attrs); - self.encode_stability(def_id); - - let constness = if let hir::ImplItemKind::Method(ref sig, _) = ast_item.node { - if sig.constness == hir::Constness::Const { - encode_inlined_item( - self, - InlinedItemRef::ImplItem(impl_def_id, ast_item)); - } - + let (ast, mir) = if let ty::ConstTraitItem(_) = impl_item { + (true, true) + } else if let hir::ImplItemKind::Method(ref sig, _) = ast_item.node { let generics = self.tcx.lookup_generics(def_id); let types = generics.parent_types as usize + generics.types.len(); let needs_inline = types > 0 || attr::requests_inline(&ast_item.attrs); - if needs_inline || sig.constness == hir::Constness::Const { - self.encode_mir(def_id); - } - self.encode_fn_arg_names(&sig.decl); - sig.constness + let is_const_fn = sig.constness == hir::Constness::Const; + (is_const_fn, needs_inline || is_const_fn) } else { - hir::Constness::NotConst + (false, false) }; - self.start_tag(item_tag::data); - EntryData::ImplAssociated(ImplAssociatedData { - defaultness: ast_item.defaultness, - constness:constness - }).encode(self).unwrap(); - self.end_tag(); + Entry { + kind: kind, + visibility: impl_item.vis().simplify(), + def_key: self.encode_def_key(def_id), + attributes: self.encode_attributes(&ast_item.attrs), + children: LazySeq::empty(), + stability: self.encode_stability(def_id), + deprecation: self.encode_deprecation(def_id), - self.start_tag(item_tag::typed_data); - typed_data.encode(self).unwrap(); - self.end_tag(); + ty: match impl_item { + ty::ConstTraitItem(_) | + ty::MethodTraitItem(_) => { + Some(self.encode_item_type(def_id)) + } + ty::TypeTraitItem(ref associated_type) => { + associated_type.ty.map(|ty| self.lazy(&ty)) + } + }, + inherent_impls: LazySeq::empty(), + variances: LazySeq::empty(), + generics: Some(self.encode_generics(def_id)), + predicates: Some(self.encode_predicates(def_id)), + + ast: if ast { + Some(self.encode_inlined_item(InlinedItemRef::ImplItem(impl_def_id, ast_item))) + } else { + None + }, + mir: if mir { + self.encode_mir(def_id) + } else { + None + } + } } - fn encode_fn_arg_names(&mut self, - decl: &hir::FnDecl) { - self.start_tag(item_tag::fn_arg_names); - - self.seq(&decl.inputs, |_, arg| { + fn encode_fn_arg_names(&mut self, decl: &hir::FnDecl) -> LazySeq { + self.lazy_seq(decl.inputs.iter().map(|arg| { if let PatKind::Binding(_, ref path1, _) = arg.pat.node { path1.node } else { syntax::parse::token::intern("") } - }); - - self.end_tag(); + })) } - fn encode_mir(&mut self, def_id: DefId) { - if let Some(mir) = self.mir_map.map.get(&def_id) { - self.start_tag(item_tag::mir as usize); - mir.encode(self); - self.end_tag(); - } + fn encode_mir(&mut self, def_id: DefId) -> Option>> { + self.mir_map.map.get(&def_id).map(|mir| self.lazy(mir)) } // Encodes the inherent implementations of a structure, enumeration, or trait. - fn encode_inherent_implementations(&mut self, def_id: DefId) { - self.start_tag(item_tag::inherent_impls); + fn encode_inherent_implementations(&mut self, def_id: DefId) -> LazySeq { match self.tcx.inherent_impls.borrow().get(&def_id) { - None => <[DefId]>::encode(&[], self).unwrap(), - Some(implementations) => implementations.encode(self).unwrap() + None => LazySeq::empty(), + Some(implementations) => { + self.lazy_seq(implementations.iter().map(|&def_id| { + assert!(def_id.is_local()); + def_id.index + })) + } } - self.end_tag(); } - fn encode_stability(&mut self, def_id: DefId) { - self.tcx.lookup_stability(def_id).map(|stab| { - self.start_tag(item_tag::stability); - stab.encode(self).unwrap(); - self.end_tag(); - }); - self.tcx.lookup_deprecation(def_id).map(|depr| { - self.start_tag(item_tag::deprecation); - depr.encode(self).unwrap(); - self.end_tag(); - }); + fn encode_stability(&mut self, def_id: DefId) -> Option> { + self.tcx.lookup_stability(def_id).map(|stab| self.lazy(stab)) + } + + fn encode_deprecation(&mut self, def_id: DefId) -> Option> { + self.tcx.lookup_deprecation(def_id).map(|depr| self.lazy(&depr)) } fn encode_info_for_item(&mut self, - (def_id, item): (DefId, &hir::Item)) { + (def_id, item): (DefId, &hir::Item)) -> Entry<'tcx> { let tcx = self.tcx; debug!("encoding info for item at {}", tcx.sess.codemap().span_to_string(item.span)); - let (family, data, typed_data) = match item.node { - hir::ItemStatic(_, m, _) => { - self.encode_bounds_and_type_for_item(def_id); + let kind = match item.node { + hir::ItemStatic(_, hir::MutMutable, _) => EntryKind::MutStatic, + hir::ItemStatic(_, hir::MutImmutable, _) => EntryKind::ImmStatic, + hir::ItemConst(..) => EntryKind::Const, + hir::ItemFn(ref decl, _, constness, ..) => { + let data = FnData { + constness: constness, + arg_names: self.encode_fn_arg_names(&decl) + }; - if m == hir::MutMutable { - (Family::MutStatic, EntryData::Other, EntryTypedData::Other) - } else { - (Family::ImmStatic, EntryData::Other, EntryTypedData::Other) - } - } - hir::ItemConst(..) => { - self.encode_bounds_and_type_for_item(def_id); - encode_inlined_item(self, InlinedItemRef::Item(def_id, item)); - self.encode_mir(def_id); - - (Family::Const, EntryData::Other, EntryTypedData::Other) - } - hir::ItemFn(ref decl, _, constness, _, ref generics, _) => { - let tps_len = generics.ty_params.len(); - self.encode_bounds_and_type_for_item(def_id); - let needs_inline = tps_len > 0 || attr::requests_inline(&item.attrs); - if constness == hir::Constness::Const { - encode_inlined_item(self, InlinedItemRef::Item(def_id, item)); - } - if needs_inline || constness == hir::Constness::Const { - self.encode_mir(def_id); - } - self.encode_fn_arg_names(&decl); - - (Family::Fn, EntryData::Fn(FnData { - constness: constness - }), EntryTypedData::Other) + EntryKind::Fn(self.lazy(&data)) } hir::ItemMod(ref m) => { - self.encode_info_for_mod(FromId(item.id, (m, &item.attrs, &item.vis))); - return; - } - hir::ItemForeignMod(ref fm) => { - // Encode all the items in self module. - self.start_tag(item_tag::children); - self.seq(&fm.items, |_, foreign_item| { - tcx.map.local_def_id(foreign_item.id).index - }); - self.end_tag(); - - (Family::ForeignMod, EntryData::Other, EntryTypedData::Other) - } - hir::ItemTy(..) => { - self.encode_bounds_and_type_for_item(def_id); - - (Family::Type, EntryData::Other, EntryTypedData::Other) - } - hir::ItemEnum(ref enum_definition, _) => { - self.encode_item_variances(def_id); - self.encode_bounds_and_type_for_item(def_id); - - self.start_tag(item_tag::children); - self.seq(&enum_definition.variants, |_, v| { - tcx.map.local_def_id(v.node.data.id()).index - }); - self.end_tag(); - - // Encode inherent implementations for self enumeration. - self.encode_inherent_implementations(def_id); - - (Family::Enum, EntryData::Other, EntryTypedData::Other) + return self.encode_info_for_mod(FromId(item.id, (m, &item.attrs, &item.vis))); } + hir::ItemForeignMod(_) => EntryKind::ForeignMod, + hir::ItemTy(..) => EntryKind::Type, + hir::ItemEnum(..) => EntryKind::Enum, hir::ItemStruct(ref struct_def, _) => { - let def = tcx.lookup_adt_def(def_id); - let variant = def.struct_variant(); - - self.encode_bounds_and_type_for_item(def_id); - - self.encode_item_variances(def_id); + let variant = tcx.lookup_adt_def(def_id).struct_variant(); /* Encode def_ids for each field and method for methods, write all the stuff get_trait_method @@ -665,38 +628,32 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { } else { None }; - let data = self.encode_variant(variant, struct_ctor); - - // Encode inherent implementations for self structure. - self.encode_inherent_implementations(def_id); - - (Family::Struct, data, EntryTypedData::Other) + EntryKind::Struct(self.lazy(&VariantData { + kind: variant.kind, + disr: variant.disr_val.to_u64_unchecked(), + struct_ctor: struct_ctor + })) } hir::ItemUnion(..) => { - self.encode_bounds_and_type_for_item(def_id); + let variant = tcx.lookup_adt_def(def_id).struct_variant(); - self.encode_item_variances(def_id); - - /* Encode def_ids for each field and method - for methods, write all the stuff get_trait_method - needs to know*/ - let def = self.tcx.lookup_adt_def(def_id); - let data = self.encode_variant(def.struct_variant(), None); - - // Encode inherent implementations for self union. - self.encode_inherent_implementations(def_id); - - (Family::Union, data, EntryTypedData::Other) + EntryKind::Union(self.lazy(&VariantData { + kind: variant.kind, + disr: variant.disr_val.to_u64_unchecked(), + struct_ctor: None + })) } hir::ItemDefaultImpl(..) => { - (Family::DefaultImpl, EntryData::Other, - EntryTypedData::Impl(ImplTypedData { - trait_ref: tcx.impl_trait_ref(def_id) - })) + let data = ImplData { + polarity: hir::ImplPolarity::Positive, + parent_impl: None, + coerce_unsized_kind: None, + trait_ref: tcx.impl_trait_ref(def_id).map(|trait_ref| self.lazy(&trait_ref)) + }; + + EntryKind::DefaultImpl(self.lazy(&data)) } hir::ItemImpl(_, polarity, ..) => { - self.encode_bounds_and_type_for_item(def_id); - let trait_ref = tcx.impl_trait_ref(def_id); let parent = if let Some(trait_ref) = trait_ref { let trait_def = tcx.lookup_trait_def(trait_ref.def_id); @@ -710,71 +667,146 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { None }; - self.start_tag(item_tag::children); - self.seq(&tcx.impl_or_trait_items(def_id)[..], |_, &def_id| { - assert!(def_id.is_local()); - def_id.index - }); - self.end_tag(); - - (Family::Impl, - EntryData::Impl(ImplData { + let data = ImplData { polarity: polarity, parent_impl: parent, coerce_unsized_kind: tcx.custom_coerce_unsized_kinds.borrow() - .get(&def_id).cloned() - }), - EntryTypedData::Impl(ImplTypedData { - trait_ref: trait_ref - })) + .get(&def_id).cloned(), + trait_ref: trait_ref.map(|trait_ref| self.lazy(&trait_ref)) + }; + + EntryKind::Impl(self.lazy(&data)) } hir::ItemTrait(..) => { - self.encode_item_variances(def_id); let trait_def = tcx.lookup_trait_def(def_id); - let trait_predicates = tcx.lookup_predicates(def_id); - - self.encode_generics(&trait_def.generics, &trait_predicates); - self.encode_predicates(&tcx.lookup_super_predicates(def_id), - item_tag::super_predicates); - - self.start_tag(item_tag::children); - self.seq(&tcx.impl_or_trait_items(def_id)[..], |_, &def_id| { - assert!(def_id.is_local()); - def_id.index - }); - self.end_tag(); - - // Encode inherent implementations for self trait. - self.encode_inherent_implementations(def_id); - - (Family::Trait, - EntryData::Trait(TraitData { + let data = TraitData { unsafety: trait_def.unsafety, paren_sugar: trait_def.paren_sugar, - has_default_impl: tcx.trait_has_default_impl(def_id) - }), - EntryTypedData::Trait(TraitTypedData { - trait_ref: trait_def.trait_ref - })) + has_default_impl: tcx.trait_has_default_impl(def_id), + trait_ref: self.lazy(&trait_def.trait_ref), + super_predicates: self.lazy(&tcx.lookup_super_predicates(def_id)) + }; + + EntryKind::Trait(self.lazy(&data)) } hir::ItemExternCrate(_) | hir::ItemUse(_) => { bug!("cannot encode info for item {:?}", item) } }; - self.encode_family(family); - self.encode_def_key(def_id); - self.encode_visibility(&item.vis); - self.encode_attributes(&item.attrs); - self.encode_stability(def_id); + Entry { + kind: kind, + visibility: item.vis.simplify(), + def_key: self.encode_def_key(def_id), + attributes: self.encode_attributes(&item.attrs), + children: match item.node { + hir::ItemForeignMod(ref fm) => { + self.lazy_seq(fm.items.iter().map(|foreign_item| { + tcx.map.local_def_id(foreign_item.id).index + })) + } + hir::ItemEnum(..) => { + let def = self.tcx.lookup_adt_def(def_id); + self.lazy_seq(def.variants.iter().map(|v| { + assert!(v.did.is_local()); + v.did.index + })) + } + hir::ItemStruct(..) | + hir::ItemUnion(..) => { + let def = self.tcx.lookup_adt_def(def_id); + self.lazy_seq(def.struct_variant().fields.iter().map(|f| { + assert!(f.did.is_local()); + f.did.index + })) + } + hir::ItemImpl(..) | + hir::ItemTrait(..) => { + self.lazy_seq(tcx.impl_or_trait_items(def_id).iter().map(|&def_id| { + assert!(def_id.is_local()); + def_id.index + })) + } + _ => LazySeq::empty() + }, + stability: self.encode_stability(def_id), + deprecation: self.encode_deprecation(def_id), - self.start_tag(item_tag::data); - data.encode(self).unwrap(); - self.end_tag(); + ty: match item.node { + hir::ItemStatic(..) | + hir::ItemConst(..) | + hir::ItemFn(..) | + hir::ItemTy(..) | + hir::ItemEnum(..) | + hir::ItemStruct(..) | + hir::ItemUnion(..) | + hir::ItemImpl(..) => { + Some(self.encode_item_type(def_id)) + } + _ => None + }, + inherent_impls: self.encode_inherent_implementations(def_id), + variances: match item.node { + hir::ItemEnum(..) | + hir::ItemStruct(..) | + hir::ItemUnion(..) | + hir::ItemTrait(..) => { + self.encode_item_variances(def_id) + } + _ => LazySeq::empty() + }, + generics: match item.node { + hir::ItemStatic(..) | + hir::ItemConst(..) | + hir::ItemFn(..) | + hir::ItemTy(..) | + hir::ItemEnum(..) | + hir::ItemStruct(..) | + hir::ItemUnion(..) | + hir::ItemImpl(..) | + hir::ItemTrait(..) => { + Some(self.encode_generics(def_id)) + } + _ => None + }, + predicates: match item.node { + hir::ItemStatic(..) | + hir::ItemConst(..) | + hir::ItemFn(..) | + hir::ItemTy(..) | + hir::ItemEnum(..) | + hir::ItemStruct(..) | + hir::ItemUnion(..) | + hir::ItemImpl(..) | + hir::ItemTrait(..) => { + Some(self.encode_predicates(def_id)) + } + _ => None + }, - self.start_tag(item_tag::typed_data); - typed_data.encode(self).unwrap(); - self.end_tag(); + ast: match item.node { + hir::ItemConst(..) | + hir::ItemFn(_, _, hir::Constness::Const, ..) => { + Some(self.encode_inlined_item(InlinedItemRef::Item(def_id, item))) + } + _ => None + }, + mir: match item.node { + hir::ItemConst(..) => { + self.encode_mir(def_id) + } + hir::ItemFn(_, _, constness, _, ref generics, _) => { + let tps_len = generics.ty_params.len(); + let needs_inline = tps_len > 0 || attr::requests_inline(&item.attrs); + if needs_inline || constness == hir::Constness::Const { + self.encode_mir(def_id) + } else { + None + } + } + _ => None + } + } } } @@ -822,7 +854,7 @@ impl<'a, 'b, 'tcx> IndexBuilder<'a, 'b, 'tcx> { let ctor_def_id = self.tcx.map.local_def_id(struct_def.id()); self.record(ctor_def_id, EncodeContext::encode_struct_ctor, - ctor_def_id); + (def_id, ctor_def_id)); } } } @@ -849,35 +881,42 @@ impl<'a, 'b, 'tcx> IndexBuilder<'a, 'b, 'tcx> { impl<'a, 'tcx> EncodeContext<'a, 'tcx> { fn encode_info_for_foreign_item(&mut self, - (def_id, nitem): (DefId, &hir::ForeignItem)) { + (def_id, nitem): (DefId, &hir::ForeignItem)) + -> Entry<'tcx> { let tcx = self.tcx; debug!("writing foreign item {}", tcx.node_path_str(nitem.id)); - self.encode_def_key(def_id); - self.encode_visibility(&nitem.vis); - self.encode_bounds_and_type_for_item(def_id); - let family = match nitem.node { + let kind = match nitem.node { hir::ForeignItemFn(ref fndecl, _) => { - self.encode_fn_arg_names(&fndecl); - - Family::ForeignFn + let data = FnData { + constness: hir::Constness::NotConst, + arg_names: self.encode_fn_arg_names(&fndecl) + }; + EntryKind::ForeignFn(self.lazy(&data)) } - hir::ForeignItemStatic(_, true) => Family::ForeignMutStatic, - hir::ForeignItemStatic(_, false) => Family::ForeignImmStatic + hir::ForeignItemStatic(_, true) => EntryKind::ForeignMutStatic, + hir::ForeignItemStatic(_, false) => EntryKind::ForeignImmStatic }; - self.encode_family(family); - self.start_tag(item_tag::data); - EntryData::Other.encode(self).unwrap(); - self.end_tag(); + Entry { + kind: kind, + visibility: nitem.vis.simplify(), + def_key: self.encode_def_key(def_id), + attributes: self.encode_attributes(&nitem.attrs), + children: LazySeq::empty(), + stability: self.encode_stability(def_id), + deprecation: self.encode_deprecation(def_id), - self.start_tag(item_tag::typed_data); - EntryTypedData::Other.encode(self).unwrap(); - self.end_tag(); + ty: Some(self.encode_item_type(def_id)), + inherent_impls: LazySeq::empty(), + variances: LazySeq::empty(), + generics: Some(self.encode_generics(def_id)), + predicates: Some(self.encode_predicates(def_id)), - self.encode_attributes(&nitem.attrs); - self.encode_stability(def_id); + ast: None, + mir: None + } } } @@ -938,68 +977,73 @@ impl<'a, 'b, 'tcx> IndexBuilder<'a, 'b, 'tcx> { } impl<'a, 'tcx> EncodeContext<'a, 'tcx> { - fn encode_info_for_anon_ty(&mut self, def_id: DefId) { - self.encode_def_key(def_id); - self.encode_bounds_and_type_for_item(def_id); + fn encode_info_for_anon_ty(&mut self, def_id: DefId) -> Entry<'tcx> { + Entry { + kind: EntryKind::Type, + visibility: ty::Visibility::Public, + def_key: self.encode_def_key(def_id), + attributes: LazySeq::empty(), + children: LazySeq::empty(), + stability: None, + deprecation: None, + + ty: Some(self.encode_item_type(def_id)), + inherent_impls: LazySeq::empty(), + variances: LazySeq::empty(), + generics: Some(self.encode_generics(def_id)), + predicates: Some(self.encode_predicates(def_id)), + + ast: None, + mir: None + } } - fn encode_info_for_closure(&mut self, def_id: DefId) { + fn encode_info_for_closure(&mut self, def_id: DefId) -> Entry<'tcx> { let tcx = self.tcx; - self.encode_def_key(def_id); - self.encode_family(Family::Closure); - self.start_tag(item_tag::data); - EntryData::Closure(ClosureData { - kind: tcx.closure_kind(def_id) - }).encode(self).unwrap(); - self.end_tag(); - - self.start_tag(item_tag::typed_data); - EntryTypedData::Closure(ClosureTypedData { - ty: tcx.tables.borrow().closure_tys[&def_id].clone() - }).encode(self).unwrap(); - self.end_tag(); - - assert!(self.mir_map.map.contains_key(&def_id)); - self.encode_mir(def_id); - } - - fn encode_info_for_items(&mut self) -> IndexData { - let krate = self.tcx.map.krate(); - - // FIXME(eddyb) Avoid wrapping the items in a doc. - self.start_tag(0).unwrap(); - - let items = { - let mut index = IndexBuilder::new(self); - index.record(DefId::local(CRATE_DEF_INDEX), - EncodeContext::encode_info_for_mod, - FromId(CRATE_NODE_ID, (&krate.module, &krate.attrs, &hir::Public))); - let mut visitor = EncodeVisitor { - index: index, - }; - krate.visit_all_items(&mut visitor); - visitor.index.into_items() + let data = ClosureData { + kind: tcx.closure_kind(def_id), + ty: self.lazy(&tcx.tables.borrow().closure_tys[&def_id]) }; - self.end_tag(); + Entry { + kind: EntryKind::Closure(self.lazy(&data)), + visibility: ty::Visibility::Public, + def_key: self.encode_def_key(def_id), + attributes: self.encode_attributes(&tcx.get_attrs(def_id)), + children: LazySeq::empty(), + stability: None, + deprecation: None, - items + ty: None, + inherent_impls: LazySeq::empty(), + variances: LazySeq::empty(), + generics: None, + predicates: None, + + ast: None, + mir: self.encode_mir(def_id) + } } - fn encode_item_index(&mut self, index: IndexData) { - self.start_tag(root_tag::index); - index.write_index(&mut self.opaque.cursor); - self.end_tag(); + fn encode_info_for_items(&mut self) -> Index { + let krate = self.tcx.map.krate(); + let mut index = IndexBuilder::new(self); + index.record(DefId::local(CRATE_DEF_INDEX), + EncodeContext::encode_info_for_mod, + FromId(CRATE_NODE_ID, (&krate.module, &krate.attrs, &hir::Public))); + let mut visitor = EncodeVisitor { + index: index, + }; + krate.visit_all_items(&mut visitor); + visitor.index.into_items() } - fn encode_attributes(&mut self, attrs: &[ast::Attribute]) { - self.start_tag(item_tag::attributes); - attrs.encode(self).unwrap(); - self.end_tag(); + fn encode_attributes(&mut self, attrs: &[ast::Attribute]) -> LazySeq { + self.lazy_seq_ref(attrs) } - fn encode_crate_deps(&mut self) { + fn encode_crate_deps(&mut self) -> LazySeq { fn get_ordered_deps(cstore: &cstore::CStore) -> Vec<(CrateNum, Rc)> { // Pull the cnums and name,vers,hash out of cstore @@ -1025,96 +1069,71 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { // the assumption that they are numbered 1 to n. // FIXME (#2166): This is not nearly enough to support correct versioning // but is enough to get transitive crate dependencies working. - self.start_tag(root_tag::crate_deps); let deps = get_ordered_deps(self.cstore); - self.seq(&deps, |_, &(_, ref dep)| { - (dep.name(), dep.hash(), dep.explicitly_linked.get()) - }); - self.end_tag(); + self.lazy_seq(deps.iter().map(|&(_, ref dep)| { + CrateDep { + name: syntax::parse::token::intern(dep.name()), + hash: dep.hash(), + explicitly_linked: dep.explicitly_linked.get() + } + })) } - fn encode_lang_items(&mut self) { + fn encode_lang_items(&mut self) + -> (LazySeq<(DefIndex, usize)>, LazySeq) { let tcx = self.tcx; - let lang_items = || { - tcx.lang_items.items().iter().enumerate().filter_map(|(i, &opt_def_id)| { - if let Some(def_id) = opt_def_id { - if def_id.is_local() { - return Some((def_id.index, i)); - } + let lang_items = tcx.lang_items.items().iter(); + (self.lazy_seq(lang_items.enumerate().filter_map(|(i, &opt_def_id)| { + if let Some(def_id) = opt_def_id { + if def_id.is_local() { + return Some((def_id.index, i)); } - None - }) - }; - - let count = lang_items().count(); - let mut lang_items = lang_items(); - - self.start_tag(root_tag::lang_items); - self.seq(0..count, |_, _| lang_items.next().unwrap()); - self.end_tag(); - - self.start_tag(root_tag::lang_items_missing); - tcx.lang_items.missing.encode(self).unwrap(); - self.end_tag(); + } + None + })), self.lazy_seq_ref(&tcx.lang_items.missing)) } - fn encode_native_libraries(&mut self) { + fn encode_native_libraries(&mut self) -> LazySeq<(NativeLibraryKind, String)> { let used_libraries = self.tcx.sess.cstore.used_libraries(); - let libs = || { - used_libraries.iter().filter_map(|&(ref lib, kind)| { - match kind { - cstore::NativeStatic => None, // these libraries are not propagated - cstore::NativeFramework | cstore::NativeUnknown => { - Some((kind, lib)) - } + self.lazy_seq(used_libraries.into_iter().filter_map(|(lib, kind)| { + match kind { + cstore::NativeStatic => None, // these libraries are not propagated + cstore::NativeFramework | cstore::NativeUnknown => { + Some((kind, lib)) } - }) - }; - - let count = libs().count(); - let mut libs = libs(); - - self.start_tag(root_tag::native_libraries); - self.seq(0..count, |_, _| libs.next().unwrap()); - self.end_tag(); + } + })) } - fn encode_codemap(&mut self) { + fn encode_codemap(&mut self) -> LazySeq { let codemap = self.tcx.sess.codemap(); let all_filemaps = codemap.files.borrow(); - let filemaps = || { + self.lazy_seq_ref(all_filemaps.iter().filter(|filemap| { // No need to export empty filemaps, as they can't contain spans // that need translation. // Also no need to re-export imported filemaps, as any downstream // crate will import them from their original source. - all_filemaps.iter().filter(|filemap| { - !filemap.lines.borrow().is_empty() && !filemap.is_imported() - }) - }; - - let count = filemaps().count(); - let mut filemaps = filemaps(); - - self.start_tag(root_tag::codemap); - self.seq(0..count, |_, _| filemaps.next().unwrap()); - self.end_tag(); + !filemap.lines.borrow().is_empty() && !filemap.is_imported() + }).map(|filemap| &**filemap)) } /// Serialize the text of the exported macros - fn encode_macro_defs(&mut self) { + fn encode_macro_defs(&mut self) -> LazySeq { let tcx = self.tcx; - self.start_tag(root_tag::macro_defs); - self.seq(&tcx.map.krate().exported_macros, |_, def| { - let body = ::syntax::print::pprust::tts_to_string(&def.body); - (def.name, &def.attrs, def.span, body) - }); - self.end_tag(); + self.lazy_seq(tcx.map.krate().exported_macros.iter().map(|def| { + MacroDef { + name: def.name, + attrs: def.attrs.to_vec(), + span: def.span, + body: ::syntax::print::pprust::tts_to_string(&def.body) + } + })) } } struct ImplVisitor<'a, 'tcx:'a> { tcx: TyCtxt<'a, 'tcx, 'tcx>, - impls: FnvHashMap> + impls: FnvHashMap> } impl<'a, 'tcx, 'v> Visitor<'v> for ImplVisitor<'a, 'tcx> { @@ -1124,7 +1143,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for ImplVisitor<'a, 'tcx> { if let Some(trait_ref) = self.tcx.impl_trait_ref(impl_id) { self.impls.entry(trait_ref.def_id) .or_insert(vec![]) - .push(impl_id); + .push(impl_id.index); } } } @@ -1132,22 +1151,21 @@ impl<'a, 'tcx, 'v> Visitor<'v> for ImplVisitor<'a, 'tcx> { impl<'a, 'tcx> EncodeContext<'a, 'tcx> { /// Encodes an index, mapping each trait to its (local) implementations. - fn encode_impls(&mut self) { + fn encode_impls(&mut self) -> LazySeq { let mut visitor = ImplVisitor { tcx: self.tcx, impls: FnvHashMap() }; self.tcx.map.krate().visit_all_items(&mut visitor); - self.start_tag(root_tag::impls); - for (trait_def_id, trait_impls) in visitor.impls { - // FIXME(eddyb) Avoid wrapping the entries in docs. - self.start_tag(0); - (trait_def_id.krate.as_u32(), trait_def_id.index).encode(self).unwrap(); - trait_impls.encode(self).unwrap(); - self.end_tag(); - } - self.end_tag(); + let all_impls: Vec<_> = visitor.impls.into_iter().map(|(trait_def_id, impls)| { + TraitImpls { + trait_id: (trait_def_id.krate.as_u32(), trait_def_id.index), + impls: self.lazy_seq(impls) + } + }).collect(); + + self.lazy_seq(all_impls) } // Encodes all reachable symbols in this crate into the metadata. @@ -1156,20 +1174,16 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { // middle::reachable module but filters out items that either don't have a // symbol associated with them (they weren't translated) or if they're an FFI // definition (as that's not defined in this crate). - fn encode_reachable(&mut self) { - self.start_tag(root_tag::reachable_ids); - + fn encode_reachable(&mut self) -> LazySeq { let reachable = self.reachable; - self.seq(reachable, |ecx, &id| ecx.tcx.map.local_def_id(id).index); - - self.end_tag(); + let tcx = self.tcx; + self.lazy_seq(reachable.iter().map(|&id| tcx.map.local_def_id(id).index)) } - fn encode_dylib_dependency_formats(&mut self) { - self.start_tag(root_tag::dylib_dependency_formats); + fn encode_dylib_dependency_formats(&mut self) -> LazySeq> { match self.tcx.sess.dependency_formats.borrow().get(&config::CrateTypeDylib) { Some(arr) => { - self.seq(arr, |_, slot| { + self.lazy_seq(arr.iter().map(|slot| { match *slot { Linkage::NotLinked | Linkage::IncludedFromDylib => None, @@ -1177,16 +1191,140 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { Linkage::Dynamic => Some(LinkagePreference::RequireDynamic), Linkage::Static => Some(LinkagePreference::RequireStatic), } - }); - } - None => { - <[Option]>::encode(&[], self).unwrap(); + })) } + None => LazySeq::empty() } - self.end_tag(); + } + + fn encode_crate_root(&mut self) -> Lazy { + let mut i = self.position(); + let crate_deps = self.encode_crate_deps(); + let dylib_dependency_formats = self.encode_dylib_dependency_formats(); + let dep_bytes = self.position() - i; + + // Encode the language items. + i = self.position(); + let (lang_items, lang_items_missing) = self.encode_lang_items(); + let lang_item_bytes = self.position() - i; + + // Encode the native libraries used + i = self.position(); + let native_libraries = self.encode_native_libraries(); + let native_lib_bytes = self.position() - i; + + // Encode codemap + i = self.position(); + let codemap = self.encode_codemap(); + let codemap_bytes = self.position() - i; + + // Encode macro definitions + i = self.position(); + let macro_defs = self.encode_macro_defs(); + let macro_defs_bytes = self.position() - i; + + // Encode the def IDs of impls, for coherence checking. + i = self.position(); + let impls = self.encode_impls(); + let impl_bytes = self.position() - i; + + // Encode reachability info. + i = self.position(); + let reachable_ids = self.encode_reachable(); + let reachable_bytes = self.position() - i; + + // Encode and index the items. + i = self.position(); + let items = self.encode_info_for_items(); + let item_bytes = self.position() - i; + + i = self.position(); + let index = items.write_index(&mut self.opaque.cursor); + let index_bytes = self.position() - i; + + let tcx = self.tcx; + let link_meta = self.link_meta; + let is_rustc_macro = tcx.sess.crate_types.borrow().contains(&CrateTypeRustcMacro); + let root = self.lazy(&CrateRoot { + rustc_version: RUSTC_VERSION.to_string(), + name: link_meta.crate_name.clone(), + triple: tcx.sess.opts.target_triple.clone(), + hash: link_meta.crate_hash, + disambiguator: tcx.sess.local_crate_disambiguator().to_string(), + panic_strategy: tcx.sess.opts.cg.panic.clone(), + plugin_registrar_fn: tcx.sess.plugin_registrar_fn.get().map(|id| { + tcx.map.local_def_id(id).index + }), + macro_derive_registrar: if is_rustc_macro { + let id = tcx.sess.derive_registrar_fn.get().unwrap(); + Some(tcx.map.local_def_id(id).index) + } else { + None + }, + + index: index, + crate_deps: crate_deps, + dylib_dependency_formats: dylib_dependency_formats, + native_libraries: native_libraries, + lang_items: lang_items, + lang_items_missing: lang_items_missing, + impls: impls, + reachable_ids: reachable_ids, + macro_defs: macro_defs, + codemap: codemap + }); + + let total_bytes = self.position(); + + if self.tcx.sess.meta_stats() { + let mut zero_bytes = 0; + for e in self.opaque.cursor.get_ref() { + if *e == 0 { + zero_bytes += 1; + } + } + + println!("metadata stats:"); + println!(" dep bytes: {}", dep_bytes); + println!(" lang item bytes: {}", lang_item_bytes); + println!(" native bytes: {}", native_lib_bytes); + println!(" codemap bytes: {}", codemap_bytes); + println!(" macro def bytes: {}", macro_defs_bytes); + println!(" impl bytes: {}", impl_bytes); + println!(" reachable bytes: {}", reachable_bytes); + println!(" item bytes: {}", item_bytes); + println!(" index bytes: {}", index_bytes); + println!(" zero bytes: {}", zero_bytes); + println!(" total bytes: {}", total_bytes); + } + + root } } +// NOTE(eddyb) The following comment was preserved for posterity, even +// though it's no longer relevant as EBML (which uses nested & tagged +// "documents") was replaced with a scheme that can't go out of bounds. +// +// And here we run into yet another obscure archive bug: in which metadata +// loaded from archives may have trailing garbage bytes. Awhile back one of +// our tests was failing sporadically on the OSX 64-bit builders (both nopt +// and opt) by having ebml generate an out-of-bounds panic when looking at +// metadata. +// +// Upon investigation it turned out that the metadata file inside of an rlib +// (and ar archive) was being corrupted. Some compilations would generate a +// metadata file which would end in a few extra bytes, while other +// compilations would not have these extra bytes appended to the end. These +// extra bytes were interpreted by ebml as an extra tag, so they ended up +// being interpreted causing the out-of-bounds. +// +// The root cause of why these extra bytes were appearing was never +// discovered, and in the meantime the solution we're employing is to insert +// the length of the metadata to the start of the metadata. Later on this +// will allow us to slice the metadata to the precise length that we just +// generated regardless of trailing bytes that end up in it. + pub fn encode_metadata<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, cstore: &cstore::CStore, reexports: &def::ExportMap, @@ -1194,13 +1332,13 @@ pub fn encode_metadata<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, reachable: &NodeSet, mir_map: &MirMap<'tcx>) -> Vec { let mut cursor = Cursor::new(vec![]); - cursor.write_all(&[0, 0, 0, 0]).unwrap(); - cursor.write_all(metadata_encoding_version).unwrap(); - // Will be filed with the length after encoding the crate. + cursor.write_all(METADATA_HEADER).unwrap(); + + // Will be filed with the root position after encoding everything. cursor.write_all(&[0, 0, 0, 0]).unwrap(); - encode_metadata_inner(&mut EncodeContext { - rbml_w: rbml::writer::Encoder::new(&mut cursor), + let root = EncodeContext { + opaque: opaque::Encoder::new(&mut cursor), tcx: tcx, reexports: reexports, link_meta: link_meta, @@ -1209,138 +1347,16 @@ pub fn encode_metadata<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, mir_map: mir_map, type_shorthands: Default::default(), predicate_shorthands: Default::default() - }); - - // RBML compacts the encoded bytes whenever appropriate, - // so there are some garbages left after the end of the data. - let meta_len = cursor.position() as usize; - cursor.get_mut().truncate(meta_len); - - // And here we run into yet another obscure archive bug: in which metadata - // loaded from archives may have trailing garbage bytes. Awhile back one of - // our tests was failing sporadically on the OSX 64-bit builders (both nopt - // and opt) by having rbml generate an out-of-bounds panic when looking at - // metadata. - // - // Upon investigation it turned out that the metadata file inside of an rlib - // (and ar archive) was being corrupted. Some compilations would generate a - // metadata file which would end in a few extra bytes, while other - // compilations would not have these extra bytes appended to the end. These - // extra bytes were interpreted by rbml as an extra tag, so they ended up - // being interpreted causing the out-of-bounds. - // - // The root cause of why these extra bytes were appearing was never - // discovered, and in the meantime the solution we're employing is to insert - // the length of the metadata to the start of the metadata. Later on this - // will allow us to slice the metadata to the precise length that we just - // generated regardless of trailing bytes that end up in it. - // - // We also need to store the metadata encoding version here, because - // rlibs don't have it. To get older versions of rustc to ignore - // this metadata, there are 4 zero bytes at the start, which are - // treated as a length of 0 by old compilers. - - let meta_start = 8 + ::common::metadata_encoding_version.len(); - let len = meta_len - meta_start; + }.encode_crate_root(); let mut result = cursor.into_inner(); - result[meta_start - 4] = (len >> 24) as u8; - result[meta_start - 3] = (len >> 16) as u8; - result[meta_start - 2] = (len >> 8) as u8; - result[meta_start - 1] = (len >> 0) as u8; + + // Encode the root position. + let header = METADATA_HEADER.len(); + let pos = root.position; + result[header + 0] = (pos >> 24) as u8; + result[header + 1] = (pos >> 16) as u8; + result[header + 2] = (pos >> 8) as u8; + result[header + 3] = (pos >> 0) as u8; + result } - -fn encode_metadata_inner(ecx: &mut EncodeContext) { - ecx.wr_tagged_str(root_tag::rustc_version, &rustc_version()); - - let tcx = ecx.tcx; - let link_meta = ecx.link_meta; - - ecx.start_tag(root_tag::crate_info); - let is_rustc_macro = tcx.sess.crate_types.borrow().contains(&CrateTypeRustcMacro); - CrateInfo { - name: link_meta.crate_name.clone(), - triple: tcx.sess.opts.target_triple.clone(), - hash: link_meta.crate_hash, - disambiguator: tcx.sess.local_crate_disambiguator().to_string(), - panic_strategy: tcx.sess.opts.cg.panic.clone(), - plugin_registrar_fn: tcx.sess.plugin_registrar_fn.get().map(|id| { - tcx.map.local_def_id(id).index - }), - macro_derive_registrar: if is_rustc_macro { - let id = tcx.sess.derive_registrar_fn.get().unwrap(); - Some(tcx.map.local_def_id(id).index) - } else { - None - } - }.encode(ecx).unwrap(); - ecx.end_tag(); - - let mut i = ecx.position(); - ecx.encode_crate_deps(); - ecx.encode_dylib_dependency_formats(); - let dep_bytes = ecx.position() - i; - - // Encode the language items. - i = ecx.position(); - ecx.encode_lang_items(); - let lang_item_bytes = ecx.position() - i; - - // Encode the native libraries used - i = ecx.position(); - ecx.encode_native_libraries(); - let native_lib_bytes = ecx.position() - i; - - // Encode codemap - i = ecx.position(); - ecx.encode_codemap(); - let codemap_bytes = ecx.position() - i; - - // Encode macro definitions - i = ecx.position(); - ecx.encode_macro_defs(); - let macro_defs_bytes = ecx.position() - i; - - // Encode the def IDs of impls, for coherence checking. - i = ecx.position(); - ecx.encode_impls(); - let impl_bytes = ecx.position() - i; - - // Encode reachability info. - i = ecx.position(); - ecx.encode_reachable(); - let reachable_bytes = ecx.position() - i; - - // Encode and index the items. - i = ecx.position(); - let items = ecx.encode_info_for_items(); - let item_bytes = ecx.position() - i; - - i = ecx.position(); - ecx.encode_item_index(items); - let index_bytes = ecx.position() - i; - - let total_bytes = ecx.position(); - - if ecx.tcx.sess.meta_stats() { - let mut zero_bytes = 0; - for e in ecx.opaque.cursor.get_ref() { - if *e == 0 { - zero_bytes += 1; - } - } - - println!("metadata stats:"); - println!(" dep bytes: {}", dep_bytes); - println!(" lang item bytes: {}", lang_item_bytes); - println!(" native bytes: {}", native_lib_bytes); - println!(" codemap bytes: {}", codemap_bytes); - println!(" macro def bytes: {}", macro_defs_bytes); - println!(" impl bytes: {}", impl_bytes); - println!(" reachable bytes: {}", reachable_bytes); - println!(" item bytes: {}", item_bytes); - println!(" index bytes: {}", index_bytes); - println!(" zero bytes: {}", zero_bytes); - println!(" total bytes: {}", total_bytes); - } -} diff --git a/src/librustc_metadata/index.rs b/src/librustc_metadata/index.rs index 2c16411c37b..ef83251f51e 100644 --- a/src/librustc_metadata/index.rs +++ b/src/librustc_metadata/index.rs @@ -8,65 +8,13 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +use schema::*; + use rustc::hir::def_id::{DefId, DefIndex}; -use rbml; use std::io::{Cursor, Write}; use std::slice; use std::u32; -/// As part of the metadata, we generate an index that stores, for -/// each DefIndex, the position of the corresponding RBML document (if -/// any). This is just a big `[u32]` slice, where an entry of -/// `u32::MAX` indicates that there is no RBML document. This little -/// struct just stores the offsets within the metadata of the start -/// and end of this slice. These are actually part of an RBML -/// document, but for looking things up in the metadata, we just -/// discard the RBML positioning and jump directly to the data. -pub struct Index { - data_start: usize, - data_end: usize, -} - -impl Index { - /// Given the RBML doc representing the index, save the offests - /// for later. - pub fn from_rbml(index: rbml::Doc) -> Index { - Index { data_start: index.start, data_end: index.end } - } - - /// Given the metadata, extract out the offset of a particular - /// DefIndex (if any). - #[inline(never)] - pub fn lookup_item(&self, bytes: &[u8], def_index: DefIndex) -> Option { - let words = bytes_to_words(&bytes[self.data_start..self.data_end]); - let index = def_index.as_usize(); - - debug!("lookup_item: index={:?} words.len={:?}", - index, words.len()); - - let position = u32::from_le(words[index]); - if position == u32::MAX { - debug!("lookup_item: position=u32::MAX"); - None - } else { - debug!("lookup_item: position={:?}", position); - Some(position) - } - } - - pub fn iter_enumerated<'a>(&self, bytes: &'a [u8]) - -> impl Iterator + 'a { - let words = bytes_to_words(&bytes[self.data_start..self.data_end]); - words.iter().enumerate().filter_map(|(index, &position)| { - if position == u32::MAX { - None - } else { - Some((DefIndex::new(index), u32::from_le(position))) - } - }) - } -} - /// While we are generating the metadata, we also track the position /// of each DefIndex. It is not required that all definitions appear /// in the metadata, nor that they are serialized in order, and @@ -74,27 +22,27 @@ impl Index { /// `u32::MAX`. Whenever an index is visited, we fill in the /// appropriate spot by calling `record_position`. We should never /// visit the same index twice. -pub struct IndexData { +pub struct Index { positions: Vec, } -impl IndexData { - pub fn new(max_index: usize) -> IndexData { - IndexData { +impl Index { + pub fn new(max_index: usize) -> Index { + Index { positions: vec![u32::MAX; max_index] } } - pub fn record(&mut self, def_id: DefId, position: usize) { + pub fn record(&mut self, def_id: DefId, entry: Lazy) { assert!(def_id.is_local()); - self.record_index(def_id.index, position); + self.record_index(def_id.index, entry); } - pub fn record_index(&mut self, item: DefIndex, position: usize) { + pub fn record_index(&mut self, item: DefIndex, entry: Lazy) { let item = item.as_usize(); - assert!(position < (u32::MAX as usize)); - let position = position as u32; + assert!(entry.position < (u32::MAX as usize)); + let position = entry.position as u32; assert!(self.positions[item] == u32::MAX, "recorded position for item {:?} twice, first at {:?} and now at {:?}", @@ -103,16 +51,52 @@ impl IndexData { self.positions[item] = position.to_le(); } - pub fn write_index(&self, buf: &mut Cursor>) { + pub fn write_index(&self, buf: &mut Cursor>) -> LazySeq { + let pos = buf.position(); buf.write_all(words_to_bytes(&self.positions)).unwrap(); + LazySeq::with_position_and_length(pos as usize, self.positions.len()) + } +} + +impl<'tcx> LazySeq { + /// Given the metadata, extract out the offset of a particular + /// DefIndex (if any). + #[inline(never)] + pub fn lookup(&self, bytes: &[u8], def_index: DefIndex) -> Option>> { + let words = &bytes_to_words(&bytes[self.position..])[..self.len]; + let index = def_index.as_usize(); + + debug!("Index::lookup: index={:?} words.len={:?}", + index, words.len()); + + let position = u32::from_le(words[index]); + if position == u32::MAX { + debug!("Index::lookup: position=u32::MAX"); + None + } else { + debug!("Index::lookup: position={:?}", position); + Some(Lazy::with_position(position as usize)) + } + } + + pub fn iter_enumerated<'a>(&self, bytes: &'a [u8]) + -> impl Iterator>)> + 'a { + let words = &bytes_to_words(&bytes[self.position..])[..self.len]; + words.iter().enumerate().filter_map(|(index, &position)| { + if position == u32::MAX { + None + } else { + let position = u32::from_le(position) as usize; + Some((DefIndex::new(index), Lazy::with_position(position))) + } + }) } } fn bytes_to_words(b: &[u8]) -> &[u32] { - assert!(b.len() % 4 == 0); - unsafe { slice::from_raw_parts(b.as_ptr() as *const u32, b.len()/4) } + unsafe { slice::from_raw_parts(b.as_ptr() as *const u32, b.len() / 4) } } fn words_to_bytes(w: &[u32]) -> &[u8] { - unsafe { slice::from_raw_parts(w.as_ptr() as *const u8, w.len()*4) } + unsafe { slice::from_raw_parts(w.as_ptr() as *const u8, w.len() * 4) } } diff --git a/src/librustc_metadata/index_builder.rs b/src/librustc_metadata/index_builder.rs index 372577e21f1..aeb6f63252c 100644 --- a/src/librustc_metadata/index_builder.rs +++ b/src/librustc_metadata/index_builder.rs @@ -56,7 +56,9 @@ //! easily control precisely what data is given to that fn. use encoder::EncodeContext; -use index::IndexData; +use index::Index; +use schema::*; + use rustc::dep_graph::DepNode; use rustc::hir; use rustc::hir::def_id::DefId; @@ -68,7 +70,7 @@ use std::ops::{Deref, DerefMut}; /// Builder that can encode new items, adding them into the index. /// Item encoding cannot be nested. pub struct IndexBuilder<'a, 'b: 'a, 'tcx: 'b> { - items: IndexData, + items: Index, pub ecx: &'a mut EncodeContext<'b, 'tcx>, } @@ -88,16 +90,16 @@ impl<'a, 'b, 'tcx> DerefMut for IndexBuilder<'a, 'b, 'tcx> { impl<'a, 'b, 'tcx> IndexBuilder<'a, 'b, 'tcx> { pub fn new(ecx: &'a mut EncodeContext<'b, 'tcx>) -> Self { IndexBuilder { - items: IndexData::new(ecx.tcx.map.num_local_def_ids()), + items: Index::new(ecx.tcx.map.num_local_def_ids()), ecx: ecx, } } /// Emit the data for a def-id to the metadata. The function to /// emit the data is `op`, and it will be given `data` as - /// arguments. This `record` function will start/end an RBML tag - /// and record the current offset for use in the index, calling - /// `op` to generate the data in the RBML tag. + /// arguments. This `record` function will call `op` to generate + /// the `Entry` (which may point to other encoded information) + /// and will then record the `Lazy` for use in the index. /// /// In addition, it will setup a dep-graph task to track what data /// `op` accesses to generate the metadata, which is later used by @@ -112,21 +114,17 @@ impl<'a, 'b, 'tcx> IndexBuilder<'a, 'b, 'tcx> { /// content system. pub fn record(&mut self, id: DefId, - op: fn(&mut EncodeContext<'b, 'tcx>, DATA), + op: fn(&mut EncodeContext<'b, 'tcx>, DATA) -> Entry<'tcx>, data: DATA) where DATA: DepGraphRead { - let position = self.ecx.mark_stable_position(); - self.items.record(id, position); let _task = self.tcx.dep_graph.in_task(DepNode::MetaData(id)); - // FIXME(eddyb) Avoid wrapping the entries in docs. - self.ecx.start_tag(0).unwrap(); data.read(self.tcx); - op(&mut self.ecx, data); - self.ecx.end_tag().unwrap(); + let entry = op(&mut self.ecx, data); + self.items.record(id, self.ecx.lazy(&entry)); } - pub fn into_items(self) -> IndexData { + pub fn into_items(self) -> Index { self.items } } diff --git a/src/librustc_metadata/lib.rs b/src/librustc_metadata/lib.rs index b46c5be9f8a..4fc5a46762d 100644 --- a/src/librustc_metadata/lib.rs +++ b/src/librustc_metadata/lib.rs @@ -29,11 +29,9 @@ #![feature(rustc_private)] #![feature(specialization)] #![feature(staged_api)] -#![cfg_attr(test, feature(test))] #[macro_use] extern crate log; #[macro_use] extern crate syntax; -#[macro_use] #[no_link] extern crate rustc_bitflags; extern crate syntax_pos; extern crate flate; extern crate serialize as rustc_serialize; // used by deriving @@ -48,24 +46,15 @@ extern crate rustc_llvm; extern crate rustc_macro; extern crate rustc_const_math; -#[cfg(test)] -extern crate test; - -mod rbml { - pub mod writer; - pub mod reader; - pub use self::reader::Doc; -} - mod diagnostics; mod astencode; -mod common; mod index_builder; mod index; mod encoder; mod decoder; mod csearch; +mod schema; pub mod creader; pub mod cstore; diff --git a/src/librustc_metadata/loader.rs b/src/librustc_metadata/loader.rs index 883004b8486..fc94cec916a 100644 --- a/src/librustc_metadata/loader.rs +++ b/src/librustc_metadata/loader.rs @@ -212,8 +212,8 @@ //! no means all of the necessary details. Take a look at the rest of //! metadata::loader or metadata::creader for all the juicy details! -use cstore::{MetadataBlob, MetadataVec, MetadataArchive}; -use common::{metadata_encoding_version, rustc_version}; +use cstore::MetadataBlob; +use schema::{METADATA_HEADER, RUSTC_VERSION}; use rustc::hir::svh::Svh; use rustc::session::Session; @@ -382,7 +382,7 @@ impl<'a> Context<'a> { } if !self.rejected_via_version.is_empty() { err.help(&format!("please recompile that crate using this compiler ({})", - rustc_version())); + RUSTC_VERSION)); let mismatches = self.rejected_via_version.iter(); for (i, &CrateMismatch { ref path, ref got }) in mismatches.enumerate() { err.note(&format!("crate `{}` path #{}: {} compiled by {:?}", @@ -510,8 +510,7 @@ impl<'a> Context<'a> { if let Some((ref p, _)) = lib.rlib { err.note(&format!("path: {}", p.display())); } - let crate_info = lib.metadata.get_crate_info(); - note_crate_name(&mut err, &crate_info.name); + note_crate_name(&mut err, &lib.metadata.get_root().name); } err.emit(); None @@ -597,38 +596,37 @@ impl<'a> Context<'a> { } fn crate_matches(&mut self, metadata: &MetadataBlob, libpath: &Path) -> Option { - let crate_rustc_version = metadata.crate_rustc_version(); - if crate_rustc_version != Some(rustc_version()) { - let message = crate_rustc_version.unwrap_or(format!("an unknown compiler")); - info!("Rejecting via version: expected {} got {}", rustc_version(), message); + let root = metadata.get_root(); + if root.rustc_version != RUSTC_VERSION { + info!("Rejecting via version: expected {} got {}", + RUSTC_VERSION, root.rustc_version); self.rejected_via_version.push(CrateMismatch { path: libpath.to_path_buf(), - got: message + got: root.rustc_version }); return None; } - let crate_info = metadata.get_crate_info(); if self.should_match_name { - if self.crate_name != crate_info.name { + if self.crate_name != root.name { info!("Rejecting via crate name"); return None; } } - if crate_info.triple != self.triple { + if root.triple != self.triple { info!("Rejecting via crate triple: expected {} got {}", - self.triple, crate_info.triple); + self.triple, root.triple); self.rejected_via_triple.push(CrateMismatch { path: libpath.to_path_buf(), - got: crate_info.triple + got: root.triple }); return None; } if let Some(myhash) = self.hash { - if *myhash != crate_info.hash { + if *myhash != root.hash { info!("Rejecting via hash: expected {} got {}", - *myhash, crate_info.hash); + *myhash, root.hash); self.rejected_via_hash.push(CrateMismatch { path: libpath.to_path_buf(), got: myhash.to_string() @@ -637,7 +635,7 @@ impl<'a> Context<'a> { } } - Some(crate_info.hash) + Some(root.hash) } @@ -758,11 +756,7 @@ impl ArchiveMetadata { fn verify_decompressed_encoding_version(blob: &MetadataBlob, filename: &Path) -> Result<(), String> { - let data = blob.as_slice_raw(); - if data.len() < 4+metadata_encoding_version.len() || - !<[u8]>::eq(&data[..4], &[0, 0, 0, 0]) || - &data[4..4+metadata_encoding_version.len()] != metadata_encoding_version - { + if !blob.is_compatible() { Err((format!("incompatible metadata version found: '{}'", filename.display()))) } else { @@ -797,7 +791,7 @@ fn get_metadata_section_imp(target: &Target, flavor: CrateFlavor, filename: &Pat filename.display())); } }; - return match ArchiveMetadata::new(archive).map(|ar| MetadataArchive(ar)) { + return match ArchiveMetadata::new(archive).map(|ar| MetadataBlob::Archive(ar)) { None => Err(format!("failed to read rlib metadata: '{}'", filename.display())), Some(blob) => { @@ -832,12 +826,12 @@ fn get_metadata_section_imp(target: &Target, flavor: CrateFlavor, filename: &Pat let cbuf = llvm::LLVMGetSectionContents(si.llsi); let csz = llvm::LLVMGetSectionSize(si.llsi) as usize; let cvbuf: *const u8 = cbuf as *const u8; - let vlen = metadata_encoding_version.len(); + let vlen = METADATA_HEADER.len(); debug!("checking {} bytes of metadata-version stamp", vlen); let minsz = cmp::min(vlen, csz); let buf0 = slice::from_raw_parts(cvbuf, minsz); - let version_ok = buf0 == metadata_encoding_version; + let version_ok = buf0 == METADATA_HEADER; if !version_ok { return Err((format!("incompatible metadata version found: '{}'", filename.display()))); @@ -849,7 +843,7 @@ fn get_metadata_section_imp(target: &Target, flavor: CrateFlavor, filename: &Pat let bytes = slice::from_raw_parts(cvbuf1, csz - vlen); match flate::inflate_bytes(bytes) { Ok(inflated) => { - let blob = MetadataVec(inflated); + let blob = MetadataBlob::Inflated(inflated); verify_decompressed_encoding_version(&blob, filename)?; return Ok(blob); } diff --git a/src/librustc_metadata/rbml/reader.rs b/src/librustc_metadata/rbml/reader.rs deleted file mode 100644 index c4cfc32d633..00000000000 --- a/src/librustc_metadata/rbml/reader.rs +++ /dev/null @@ -1,411 +0,0 @@ -// Copyright 2012-2016 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -//! Really Bad Markup Language (rbml) is an internal serialization format of rustc. -//! This is not intended to be used by users. -//! -//! Originally based on the Extensible Binary Markup Language -//! (ebml; http://www.matroska.org/technical/specs/rfc/index.html), -//! it is now a separate format tuned for the rust object metadata. -//! -//! # Encoding -//! -//! RBML document consists of the tag, length and data. -//! The encoded data can contain multiple RBML documents concatenated. -//! -//! **Tags** are a hint for the following data. -//! Tags are a number from 0x000 to 0xfff, where 0xf0 through 0xff is reserved. -//! Tags less than 0xf0 are encoded in one literal byte. -//! Tags greater than 0xff are encoded in two big-endian bytes, -//! where the tag number is ORed with 0xf000. (E.g. tag 0x123 = `f1 23`) -//! -//! **Lengths** encode the length of the following data. -//! It is a variable-length unsigned isize, and one of the following forms: -//! -//! - `80` through `fe` for lengths up to 0x7e; -//! - `40 ff` through `7f ff` for lengths up to 0x3fff; -//! - `20 40 00` through `3f ff ff` for lengths up to 0x1fffff; -//! - `10 20 00 00` through `1f ff ff ff` for lengths up to 0xfffffff. -//! -//! The "overlong" form is allowed so that the length can be encoded -//! without the prior knowledge of the encoded data. -//! For example, the length 0 can be represented either by `80`, `40 00`, -//! `20 00 00` or `10 00 00 00`. -//! The encoder tries to minimize the length if possible. -//! Also, some predefined tags listed below are so commonly used that -//! their lengths are omitted ("implicit length"). -//! -//! **Data** can be either binary bytes or zero or more nested RBML documents. -//! Nested documents cannot overflow, and should be entirely contained -//! within a parent document. - -#[cfg(test)] -use test::Bencher; - -use std::fmt; -use std::str; - -macro_rules! try_or { - ($e:expr, $r:expr) => ( - match $e { - Ok(x) => x, - Err(_) => return $r - } - ) -} - -#[derive(Clone, Copy)] -pub struct Doc<'a> { - pub data: &'a [u8], - pub start: usize, - pub end: usize, -} - -impl<'doc> Doc<'doc> { - pub fn new(data: &'doc [u8]) -> Doc<'doc> { - Doc { - data: data, - start: 0, - end: data.len(), - } - } - - pub fn at(data: &'doc [u8], start: usize) -> Doc<'doc> { - let elt_tag = tag_at(data, start).unwrap(); - let elt_size = tag_len_at(data, elt_tag.next).unwrap(); - let end = elt_size.next + elt_size.val; - Doc { - data: data, - start: elt_size.next, - end: end, - } - } - - pub fn maybe_child(&self, tag: usize) -> Option> { - let mut pos = self.start; - while pos < self.end { - let elt_tag = try_or!(tag_at(self.data, pos), None); - let elt_size = try_or!(tag_len_at(self.data, elt_tag.next), None); - pos = elt_size.next + elt_size.val; - if elt_tag.val == tag { - return Some(Doc { - data: self.data, - start: elt_size.next, - end: pos, - }); - } - } - None - } - - pub fn child(&self, tag: usize) -> Doc<'doc> { - match self.maybe_child(tag) { - Some(d) => d, - None => { - bug!("failed to find child with tag {:?}", tag); - } - } - } - - pub fn children_of(&self, tag: usize) -> DocsIterator<'doc> { - DocsIterator { d: self.child(tag) } - } -} - -#[derive(Debug)] -pub enum Error { - IntTooBig(usize), - InvalidTag(usize) -} - -impl fmt::Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - // FIXME: this should be a more useful display form - fmt::Debug::fmt(self, f) - } -} - -#[derive(Copy, Clone)] -struct Res { - val: usize, - next: usize, -} - -fn tag_at(data: &[u8], start: usize) -> Result { - let v = data[start] as usize; - if v < 0xf0 { - Ok(Res { - val: v, - next: start + 1, - }) - } else if v > 0xf0 { - Ok(Res { - val: ((v & 0xf) << 8) | data[start + 1] as usize, - next: start + 2, - }) - } else { - // every tag starting with byte 0xf0 is an overlong form, which is prohibited. - Err(Error::InvalidTag(v)) - } -} - -#[inline(never)] -fn vuint_at_slow(data: &[u8], start: usize) -> Result { - let a = data[start]; - if a & 0x80 != 0 { - return Ok(Res { - val: (a & 0x7f) as usize, - next: start + 1, - }); - } - if a & 0x40 != 0 { - return Ok(Res { - val: ((a & 0x3f) as usize) << 8 | (data[start + 1] as usize), - next: start + 2, - }); - } - if a & 0x20 != 0 { - return Ok(Res { - val: ((a & 0x1f) as usize) << 16 | (data[start + 1] as usize) << 8 | - (data[start + 2] as usize), - next: start + 3, - }); - } - if a & 0x10 != 0 { - return Ok(Res { - val: ((a & 0x0f) as usize) << 24 | (data[start + 1] as usize) << 16 | - (data[start + 2] as usize) << 8 | - (data[start + 3] as usize), - next: start + 4, - }); - } - Err(Error::IntTooBig(a as usize)) -} - -fn vuint_at(data: &[u8], start: usize) -> Result { - if data.len() - start < 4 { - return vuint_at_slow(data, start); - } - - // Lookup table for parsing EBML Element IDs as per - // http://ebml.sourceforge.net/specs/ The Element IDs are parsed by - // reading a big endian u32 positioned at data[start]. Using the four - // most significant bits of the u32 we lookup in the table below how - // the element ID should be derived from it. - // - // The table stores tuples (shift, mask) where shift is the number the - // u32 should be right shifted with and mask is the value the right - // shifted value should be masked with. If for example the most - // significant bit is set this means it's a class A ID and the u32 - // should be right shifted with 24 and masked with 0x7f. Therefore we - // store (24, 0x7f) at index 0x8 - 0xF (four bit numbers where the most - // significant bit is set). - // - // By storing the number of shifts and masks in a table instead of - // checking in order if the most significant bit is set, the second - // most significant bit is set etc. we can replace up to three - // "and+branch" with a single table lookup which gives us a measured - // speedup of around 2x on x86_64. - static SHIFT_MASK_TABLE: [(usize, u32); 16] = [(0, 0x0), - (0, 0x0fffffff), - (8, 0x1fffff), - (8, 0x1fffff), - (16, 0x3fff), - (16, 0x3fff), - (16, 0x3fff), - (16, 0x3fff), - (24, 0x7f), - (24, 0x7f), - (24, 0x7f), - (24, 0x7f), - (24, 0x7f), - (24, 0x7f), - (24, 0x7f), - (24, 0x7f)]; - - unsafe { - let ptr = data.as_ptr().offset(start as isize) as *const u32; - let val = u32::from_be(*ptr); - - let i = (val >> 28) as usize; - let (shift, mask) = SHIFT_MASK_TABLE[i]; - Ok(Res { - val: ((val >> shift) & mask) as usize, - next: start + ((32 - shift) >> 3), - }) - } -} - -fn tag_len_at(data: &[u8], next: usize) -> Result { - vuint_at(data, next) -} - -pub struct DocsIterator<'a> { - d: Doc<'a>, -} - -impl<'a> Iterator for DocsIterator<'a> { - type Item = Doc<'a>; - - fn next(&mut self) -> Option> { - if self.d.start >= self.d.end { - return None; - } - - let elt_tag = try_or!(tag_at(self.d.data, self.d.start), { - self.d.start = self.d.end; - None - }); - let elt_size = try_or!(tag_len_at(self.d.data, elt_tag.next), { - self.d.start = self.d.end; - None - }); - - let end = elt_size.next + elt_size.val; - let doc = Doc { - data: self.d.data, - start: elt_size.next, - end: end, - }; - - self.d.start = end; - return Some(doc); - } -} - -#[test] -fn test_vuint_at() { - let data = &[ - 0x80, - 0xff, - 0x40, 0x00, - 0x7f, 0xff, - 0x20, 0x00, 0x00, - 0x3f, 0xff, 0xff, - 0x10, 0x00, 0x00, 0x00, - 0x1f, 0xff, 0xff, 0xff - ]; - - let mut res: Res; - - // Class A - res = vuint_at(data, 0).unwrap(); - assert_eq!(res.val, 0); - assert_eq!(res.next, 1); - res = vuint_at(data, res.next).unwrap(); - assert_eq!(res.val, (1 << 7) - 1); - assert_eq!(res.next, 2); - - // Class B - res = vuint_at(data, res.next).unwrap(); - assert_eq!(res.val, 0); - assert_eq!(res.next, 4); - res = vuint_at(data, res.next).unwrap(); - assert_eq!(res.val, (1 << 14) - 1); - assert_eq!(res.next, 6); - - // Class C - res = vuint_at(data, res.next).unwrap(); - assert_eq!(res.val, 0); - assert_eq!(res.next, 9); - res = vuint_at(data, res.next).unwrap(); - assert_eq!(res.val, (1 << 21) - 1); - assert_eq!(res.next, 12); - - // Class D - res = vuint_at(data, res.next).unwrap(); - assert_eq!(res.val, 0); - assert_eq!(res.next, 16); - res = vuint_at(data, res.next).unwrap(); - assert_eq!(res.val, (1 << 28) - 1); - assert_eq!(res.next, 20); -} - -#[bench] -pub fn vuint_at_A_aligned(b: &mut Bencher) { - let data = (0..4 * 100) - .map(|i| { - match i % 2 { - 0 => 0x80, - _ => i as u8, - } - }) - .collect::>(); - let mut sum = 0; - b.iter(|| { - let mut i = 0; - while i < data.len() { - sum += vuint_at(&data, i).unwrap().val; - i += 4; - } - }); -} - -#[bench] -pub fn vuint_at_A_unaligned(b: &mut Bencher) { - let data = (0..4 * 100 + 1) - .map(|i| { - match i % 2 { - 1 => 0x80, - _ => i as u8, - } - }) - .collect::>(); - let mut sum = 0; - b.iter(|| { - let mut i = 1; - while i < data.len() { - sum += vuint_at(&data, i).unwrap().val; - i += 4; - } - }); -} - -#[bench] -pub fn vuint_at_D_aligned(b: &mut Bencher) { - let data = (0..4 * 100) - .map(|i| { - match i % 4 { - 0 => 0x10, - 3 => i as u8, - _ => 0, - } - }) - .collect::>(); - let mut sum = 0; - b.iter(|| { - let mut i = 0; - while i < data.len() { - sum += vuint_at(&data, i).unwrap().val; - i += 4; - } - }); -} - -#[bench] -pub fn vuint_at_D_unaligned(b: &mut Bencher) { - let data = (0..4 * 100 + 1) - .map(|i| { - match i % 4 { - 1 => 0x10, - 0 => i as u8, - _ => 0, - } - }) - .collect::>(); - let mut sum = 0; - b.iter(|| { - let mut i = 1; - while i < data.len() { - sum += vuint_at(&data, i).unwrap().val; - i += 4; - } - }); -} diff --git a/src/librustc_metadata/rbml/writer.rs b/src/librustc_metadata/rbml/writer.rs deleted file mode 100644 index 46b63cb1340..00000000000 --- a/src/librustc_metadata/rbml/writer.rs +++ /dev/null @@ -1,134 +0,0 @@ -// Copyright 2012-2016 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -use std::io::prelude::*; -use std::io::{self, SeekFrom, Cursor}; - -use rustc_serialize::opaque; - -pub type EncodeResult = io::Result<()>; - -// rbml writing -pub struct Encoder<'a> { - pub opaque: opaque::Encoder<'a>, - size_positions: Vec, - relax_limit: usize, // do not move encoded bytes before this position -} - -const NUM_TAGS: usize = 0x1000; - -fn write_tag(w: &mut W, n: usize) -> EncodeResult { - if n < 0xf0 { - w.write_all(&[n as u8]) - } else if 0x100 <= n && n < NUM_TAGS { - w.write_all(&[0xf0 | (n >> 8) as u8, n as u8]) - } else { - Err(io::Error::new(io::ErrorKind::Other, &format!("invalid tag: {}", n)[..])) - } -} - -fn write_sized_vuint(w: &mut W, n: usize, size: usize) -> EncodeResult { - match size { - 1 => w.write_all(&[0x80 | (n as u8)]), - 2 => w.write_all(&[0x40 | ((n >> 8) as u8), n as u8]), - 3 => w.write_all(&[0x20 | ((n >> 16) as u8), (n >> 8) as u8, n as u8]), - 4 => w.write_all(&[0x10 | ((n >> 24) as u8), (n >> 16) as u8, (n >> 8) as u8, n as u8]), - _ => Err(io::Error::new(io::ErrorKind::Other, &format!("isize too big: {}", n)[..])), - } -} - -pub fn write_vuint(w: &mut W, n: usize) -> EncodeResult { - if n < 0x7f { - return write_sized_vuint(w, n, 1); - } - if n < 0x4000 { - return write_sized_vuint(w, n, 2); - } - if n < 0x200000 { - return write_sized_vuint(w, n, 3); - } - if n < 0x10000000 { - return write_sized_vuint(w, n, 4); - } - Err(io::Error::new(io::ErrorKind::Other, &format!("isize too big: {}", n)[..])) -} - -impl<'a> Encoder<'a> { - pub fn new(cursor: &'a mut Cursor>) -> Encoder<'a> { - Encoder { - opaque: opaque::Encoder::new(cursor), - size_positions: vec![], - relax_limit: 0, - } - } - - pub fn start_tag(&mut self, tag_id: usize) -> EncodeResult { - debug!("Start tag {:?}", tag_id); - - // Write the enum ID: - write_tag(&mut self.opaque.cursor, tag_id)?; - - // Write a placeholder four-byte size. - let cur_pos = self.position(); - self.size_positions.push(cur_pos); - self.opaque.cursor.write_all(&[0, 0, 0, 0]) - } - - pub fn end_tag(&mut self) -> EncodeResult { - let last_size_pos = self.size_positions.pop().unwrap(); - let cur_pos = self.position(); - self.opaque.cursor.seek(SeekFrom::Start(last_size_pos as u64))?; - let size = cur_pos - last_size_pos - 4; - - // relax the size encoding for small tags (bigger tags are costly to move). - // we should never try to move the stable positions, however. - const RELAX_MAX_SIZE: usize = 0x100; - if size <= RELAX_MAX_SIZE && last_size_pos >= self.relax_limit { - // we can't alter the buffer in place, so have a temporary buffer - let mut buf = [0u8; RELAX_MAX_SIZE]; - { - let data = &self.opaque.cursor.get_ref()[last_size_pos + 4..cur_pos]; - buf[..size].copy_from_slice(data); - } - - // overwrite the size and data and continue - write_vuint(&mut self.opaque.cursor, size)?; - self.opaque.cursor.write_all(&buf[..size])?; - } else { - // overwrite the size with an overlong encoding and skip past the data - write_sized_vuint(&mut self.opaque.cursor, size, 4)?; - self.opaque.cursor.seek(SeekFrom::Start(cur_pos as u64))?; - } - - debug!("End tag (size = {:?})", size); - Ok(()) - } - - pub fn wr_tagged_str(&mut self, tag_id: usize, v: &str) -> EncodeResult { - write_tag(&mut self.opaque.cursor, tag_id)?; - write_vuint(&mut self.opaque.cursor, v.len())?; - self.opaque.cursor.write_all(v.as_bytes()) - } - - pub fn position(&mut self) -> usize { - self.opaque.position() as usize - } - - /// Returns the current position while marking it stable, i.e. - /// generated bytes so far wouldn't be affected by relaxation. - pub fn mark_stable_position(&mut self) -> usize { - let pos = self.position(); - if self.relax_limit < pos { - self.relax_limit = pos; - } - let meta_start = 8 + ::common::metadata_encoding_version.len(); - pos - meta_start - } -} diff --git a/src/librustc_metadata/schema.rs b/src/librustc_metadata/schema.rs new file mode 100644 index 00000000000..b4ea2b19bf0 --- /dev/null +++ b/src/librustc_metadata/schema.rs @@ -0,0 +1,299 @@ +// Copyright 2012-2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use astencode; +use index; + +use rustc::hir; +use rustc::hir::def; +use rustc::hir::def_id::{DefIndex, DefId}; +use rustc::middle::cstore::{LinkagePreference, NativeLibraryKind}; +use rustc::middle::lang_items; +use rustc::mir; +use rustc::ty::{self, Ty}; +use rustc::session::config::PanicStrategy; + +use rustc_serialize as serialize; +use syntax::{ast, attr}; +use syntax_pos::{self, Span}; + +use std::marker::PhantomData; + +pub const RUSTC_VERSION: &'static str = concat!("rustc ", env!("CFG_VERSION")); + +/// Metadata encoding version. +/// NB: increment this if you change the format of metadata such that +/// the rustc version can't be found to compare with `RUSTC_VERSION`. +pub const METADATA_VERSION: u8 = 3; + +/// Metadata header which includes `METADATA_VERSION`. +/// To get older versions of rustc to ignore this metadata, +/// there are 4 zero bytes at the start, which are treated +/// as a length of 0 by old compilers. +/// +/// This header is followed by the position of the `CrateRoot`. +pub const METADATA_HEADER: &'static [u8; 12] = &[ + 0, 0, 0, 0, + b'r', b'u', b's', b't', + 0, 0, 0, METADATA_VERSION +]; + +/// The shorthand encoding uses an enum's variant index `usize` +/// and is offset by this value so it never matches a real variant. +/// This offset is also chosen so that the first byte is never < 0x80. +pub const SHORTHAND_OFFSET: usize = 0x80; + +/// A value of type T referred to by its absolute position +/// in the metadata, and which can be decoded lazily. +#[must_use] +pub struct Lazy { + pub position: usize, + _marker: PhantomData +} + +impl Lazy { + pub fn with_position(position: usize) -> Lazy { + Lazy { + position: position, + _marker: PhantomData + } + } +} + +impl Copy for Lazy {} +impl Clone for Lazy { + fn clone(&self) -> Self { *self } +} + +impl serialize::UseSpecializedEncodable for Lazy {} +impl serialize::UseSpecializedDecodable for Lazy {} + +/// A sequence of type T referred to by its absolute position +/// in the metadata and length, and which can be decoded lazily. +/// +/// Unlike `Lazy>`, the length is encoded next to the +/// position, not at the position, which means that the length +/// doesn't need to be known before encoding all the elements. +#[must_use] +pub struct LazySeq { + pub len: usize, + pub position: usize, + _marker: PhantomData +} + +impl LazySeq { + pub fn empty() -> LazySeq { + LazySeq::with_position_and_length(0, 0) + } + + pub fn with_position_and_length(position: usize, len: usize) -> LazySeq { + LazySeq { + len: len, + position: position, + _marker: PhantomData + } + } +} + +impl Copy for LazySeq {} +impl Clone for LazySeq { + fn clone(&self) -> Self { *self } +} + +impl serialize::UseSpecializedEncodable for LazySeq {} +impl serialize::UseSpecializedDecodable for LazySeq {} + +#[derive(RustcEncodable, RustcDecodable)] +pub struct CrateRoot { + pub rustc_version: String, + pub name: String, + pub triple: String, + pub hash: hir::svh::Svh, + pub disambiguator: String, + pub panic_strategy: PanicStrategy, + pub plugin_registrar_fn: Option, + pub macro_derive_registrar: Option, + + pub index: LazySeq, + pub crate_deps: LazySeq, + pub dylib_dependency_formats: LazySeq>, + pub native_libraries: LazySeq<(NativeLibraryKind, String)>, + pub lang_items: LazySeq<(DefIndex, usize)>, + pub lang_items_missing: LazySeq, + pub impls: LazySeq, + pub reachable_ids: LazySeq, + pub macro_defs: LazySeq, + pub codemap: LazySeq +} + +#[derive(RustcEncodable, RustcDecodable)] +pub struct CrateDep { + pub name: ast::Name, + pub hash: hir::svh::Svh, + pub explicitly_linked: bool +} + +#[derive(RustcEncodable, RustcDecodable)] +pub struct TraitImpls { + pub trait_id: (u32, DefIndex), + pub impls: LazySeq +} + +#[derive(RustcEncodable, RustcDecodable)] +pub struct MacroDef { + pub name: ast::Name, + pub attrs: Vec, + pub span: Span, + pub body: String +} + +#[derive(RustcEncodable, RustcDecodable)] +pub struct Entry<'tcx> { + pub kind: EntryKind<'tcx>, + pub visibility: ty::Visibility, + pub def_key: Lazy, + pub attributes: LazySeq, + pub children: LazySeq, + pub stability: Option>, + pub deprecation: Option>, + + pub ty: Option>>, + pub inherent_impls: LazySeq, + pub variances: LazySeq, + pub generics: Option>>, + pub predicates: Option>>, + + pub ast: Option>>, + pub mir: Option>> +} + +#[derive(Copy, Clone, RustcEncodable, RustcDecodable)] +pub enum EntryKind<'tcx> { + Const, + ImmStatic, + MutStatic, + ForeignImmStatic, + ForeignMutStatic, + ForeignMod, + Type, + Enum, + Field, + Variant(Lazy), + Struct(Lazy), + Union(Lazy), + Fn(Lazy), + ForeignFn(Lazy), + Mod(Lazy), + Closure(Lazy>), + Trait(Lazy>), + Impl(Lazy>), + DefaultImpl(Lazy>), + Method(Lazy>), + AssociatedType(AssociatedContainer), + AssociatedConst(AssociatedContainer) +} + +#[derive(RustcEncodable, RustcDecodable)] +pub struct ModData { + pub reexports: LazySeq +} + +#[derive(RustcEncodable, RustcDecodable)] +pub struct FnData { + pub constness: hir::Constness, + pub arg_names: LazySeq +} + +#[derive(RustcEncodable, RustcDecodable)] +pub struct VariantData { + pub kind: ty::VariantKind, + pub disr: u64, + + /// If this is a struct's only variant, this + /// is the index of the "struct ctor" item. + pub struct_ctor: Option +} + +#[derive(RustcEncodable, RustcDecodable)] +pub struct TraitData<'tcx> { + pub unsafety: hir::Unsafety, + pub paren_sugar: bool, + pub has_default_impl: bool, + pub trait_ref: Lazy>, + pub super_predicates: Lazy> +} + +#[derive(RustcEncodable, RustcDecodable)] +pub struct ImplData<'tcx> { + pub polarity: hir::ImplPolarity, + pub parent_impl: Option, + pub coerce_unsized_kind: Option, + pub trait_ref: Option>> +} + +/// Describes whether the container of an associated item +/// is a trait or an impl and whether, in a trait, it has +/// a default, or an in impl, whether it's marked "default". +#[derive(Copy, Clone, RustcEncodable, RustcDecodable)] +pub enum AssociatedContainer { + TraitRequired, + TraitWithDefault, + ImplDefault, + ImplFinal +} + +impl AssociatedContainer { + pub fn with_def_id(&self, def_id: DefId) -> ty::ImplOrTraitItemContainer { + match *self { + AssociatedContainer::TraitRequired | + AssociatedContainer::TraitWithDefault => { + ty::TraitContainer(def_id) + } + + AssociatedContainer::ImplDefault | + AssociatedContainer::ImplFinal => { + ty::ImplContainer(def_id) + } + } + } + + pub fn has_body(&self) -> bool { + match *self { + AssociatedContainer::TraitRequired => false, + + AssociatedContainer::TraitWithDefault | + AssociatedContainer::ImplDefault | + AssociatedContainer::ImplFinal => true + } + } + + pub fn defaultness(&self) -> hir::Defaultness { + match *self { + AssociatedContainer::TraitRequired | + AssociatedContainer::TraitWithDefault | + AssociatedContainer::ImplDefault => hir::Defaultness::Default, + + AssociatedContainer::ImplFinal => hir::Defaultness::Final + } + } +} + +#[derive(RustcEncodable, RustcDecodable)] +pub struct MethodData<'tcx> { + pub fn_data: FnData, + pub container: AssociatedContainer, + pub explicit_self: Lazy> +} + +#[derive(RustcEncodable, RustcDecodable)] +pub struct ClosureData<'tcx> { + pub kind: ty::ClosureKind, + pub ty: Lazy> +} diff --git a/src/librustc_typeck/collect.rs b/src/librustc_typeck/collect.rs index d67dcbb4baf..e5d4d4a9dae 100644 --- a/src/librustc_typeck/collect.rs +++ b/src/librustc_typeck/collect.rs @@ -674,6 +674,13 @@ fn convert_associated_type<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, defaultness: hir::Defaultness, ty: Option>) { + let predicates = ty::GenericPredicates { + parent: Some(container.id()), + predicates: vec![] + }; + ccx.tcx.predicates.borrow_mut().insert(ccx.tcx.map.local_def_id(id), + predicates); + let associated_type = Rc::new(ty::AssociatedType { name: name, vis: ty::Visibility::from_hir(vis, id, ccx.tcx), @@ -831,6 +838,9 @@ fn convert_item(ccx: &CrateCtxt, it: &hir::Item) { // Convert all the associated types. for impl_item in impl_items { if let hir::ImplItemKind::Type(ref ty) = impl_item.node { + let type_def_id = ccx.tcx.map.local_def_id(impl_item.id); + generics_of_def_id(ccx, type_def_id); + if opt_trait_ref.is_none() { span_err!(tcx.sess, impl_item.span, E0202, "associated types are not allowed in inherent impls"); @@ -898,6 +908,9 @@ fn convert_item(ccx: &CrateCtxt, it: &hir::Item) { // Convert all the associated types. for trait_item in trait_items { if let hir::TypeTraitItem(_, ref opt_ty) = trait_item.node { + let type_def_id = ccx.tcx.map.local_def_id(trait_item.id); + generics_of_def_id(ccx, type_def_id); + let typ = opt_ty.as_ref().map({ |ty| ccx.icx(&trait_predicates).to_ty(&ExplicitRscope, &ty) }); diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs index 9f208b7bed7..0ae059509bd 100644 --- a/src/librustdoc/clean/mod.rs +++ b/src/librustdoc/clean/mod.rs @@ -1168,7 +1168,7 @@ impl<'a, 'tcx> Clean for (DefId, &'a ty::PolyFnSig<'tcx>) { Argument { type_: t.clean(cx), id: ast::CRATE_NODE_ID, - name: names.next().unwrap_or("".to_string()), + name: names.next().map_or("".to_string(), |name| name.to_string()), } }).collect(), }, diff --git a/src/rustc/Cargo.lock b/src/rustc/Cargo.lock index d8a02badcee..69e3eab22e9 100644 --- a/src/rustc/Cargo.lock +++ b/src/rustc/Cargo.lock @@ -219,7 +219,6 @@ dependencies = [ "log 0.0.0", "rustc 0.0.0", "rustc_back 0.0.0", - "rustc_bitflags 0.0.0", "rustc_const_math 0.0.0", "rustc_data_structures 0.0.0", "rustc_errors 0.0.0", diff --git a/src/test/run-pass-fulldeps/issue-11881.rs b/src/test/run-pass-fulldeps/issue-11881.rs index 8369d08db36..914e3dd4932 100644 --- a/src/test/run-pass-fulldeps/issue-11881.rs +++ b/src/test/run-pass-fulldeps/issue-11881.rs @@ -34,14 +34,14 @@ struct Bar { enum WireProtocol { JSON, - RBML, + Opaque, // ... } fn encode_json(val: &T, wr: &mut Cursor>) { write!(wr, "{}", json::as_json(val)); } -fn encode_rbml(val: &T, wr: &mut Cursor>) { +fn encode_opaque(val: &T, wr: &mut Cursor>) { let mut encoder = opaque::Encoder::new(wr); val.encode(&mut encoder); } @@ -52,6 +52,6 @@ pub fn main() { let proto = WireProtocol::JSON; match proto { WireProtocol::JSON => encode_json(&target, &mut wr), - WireProtocol::RBML => encode_rbml(&target, &mut wr) + WireProtocol::Opaque => encode_opaque(&target, &mut wr) } }