mirror of
https://github.com/rust-lang/rust.git
synced 2024-11-02 07:22:42 +00:00
rustc_metadata: replace RBML with a simple and type-safe scheme.
This commit is contained in:
parent
24aef24e1a
commit
a96abca2a4
@ -138,11 +138,11 @@ pub trait CrateStore<'tcx> {
|
||||
fn item_super_predicates<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId)
|
||||
-> ty::GenericPredicates<'tcx>;
|
||||
fn item_generics<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId)
|
||||
-> &'tcx ty::Generics<'tcx>;
|
||||
-> ty::Generics<'tcx>;
|
||||
fn item_attrs(&self, def_id: DefId) -> Vec<ast::Attribute>;
|
||||
fn trait_def<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId)-> ty::TraitDef<'tcx>;
|
||||
fn adt_def<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) -> ty::AdtDefMaster<'tcx>;
|
||||
fn fn_arg_names(&self, did: DefId) -> Vec<String>;
|
||||
fn fn_arg_names(&self, did: DefId) -> Vec<ast::Name>;
|
||||
fn inherent_implementations_for_type(&self, def_id: DefId) -> Vec<DefId>;
|
||||
|
||||
// trait info
|
||||
@ -299,13 +299,13 @@ impl<'tcx> CrateStore<'tcx> for DummyCrateStore {
|
||||
fn item_super_predicates<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId)
|
||||
-> ty::GenericPredicates<'tcx> { bug!("item_super_predicates") }
|
||||
fn item_generics<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId)
|
||||
-> &'tcx ty::Generics<'tcx> { bug!("item_generics") }
|
||||
-> ty::Generics<'tcx> { bug!("item_generics") }
|
||||
fn item_attrs(&self, def_id: DefId) -> Vec<ast::Attribute> { bug!("item_attrs") }
|
||||
fn trait_def<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId)-> ty::TraitDef<'tcx>
|
||||
{ bug!("trait_def") }
|
||||
fn adt_def<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) -> ty::AdtDefMaster<'tcx>
|
||||
{ bug!("adt_def") }
|
||||
fn fn_arg_names(&self, did: DefId) -> Vec<String> { bug!("fn_arg_names") }
|
||||
fn fn_arg_names(&self, did: DefId) -> Vec<ast::Name> { bug!("fn_arg_names") }
|
||||
fn inherent_implementations_for_type(&self, def_id: DefId) -> Vec<DefId> { vec![] }
|
||||
|
||||
// trait info
|
||||
|
@ -737,6 +737,9 @@ pub struct GenericPredicates<'tcx> {
|
||||
pub predicates: Vec<Predicate<'tcx>>,
|
||||
}
|
||||
|
||||
impl<'tcx> serialize::UseSpecializedEncodable for GenericPredicates<'tcx> {}
|
||||
impl<'tcx> serialize::UseSpecializedDecodable for GenericPredicates<'tcx> {}
|
||||
|
||||
impl<'a, 'gcx, 'tcx> GenericPredicates<'tcx> {
|
||||
pub fn instantiate(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, substs: &Substs<'tcx>)
|
||||
-> InstantiatedPredicates<'tcx> {
|
||||
@ -2457,7 +2460,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
||||
pub fn lookup_generics(self, did: DefId) -> &'gcx Generics<'gcx> {
|
||||
lookup_locally_or_in_crate_store(
|
||||
"generics", did, &self.generics,
|
||||
|| self.sess.cstore.item_generics(self.global_tcx(), did))
|
||||
|| self.alloc_generics(self.sess.cstore.item_generics(self.global_tcx(), did)))
|
||||
}
|
||||
|
||||
/// Given the did of an item, returns its full set of predicates.
|
||||
|
@ -13,7 +13,6 @@ flate = { path = "../libflate" }
|
||||
log = { path = "../liblog" }
|
||||
rustc = { path = "../librustc" }
|
||||
rustc_back = { path = "../librustc_back" }
|
||||
rustc_bitflags = { path = "../librustc_bitflags" }
|
||||
rustc_const_math = { path = "../librustc_const_math" }
|
||||
rustc_data_structures = { path = "../librustc_data_structures" }
|
||||
rustc_errors = { path = "../librustc_errors" }
|
||||
|
@ -13,43 +13,92 @@ use rustc::hir::map as ast_map;
|
||||
use rustc::hir::intravisit::{Visitor, IdRangeComputingVisitor, IdRange};
|
||||
|
||||
use cstore::CrateMetadata;
|
||||
use decoder::DecodeContext;
|
||||
use encoder::EncodeContext;
|
||||
use schema::*;
|
||||
|
||||
use rustc::middle::cstore::{InlinedItem, InlinedItemRef};
|
||||
use rustc::hir::def;
|
||||
use rustc::middle::const_qualif::ConstQualif;
|
||||
use rustc::hir::def::{self, Def};
|
||||
use rustc::hir::def_id::DefId;
|
||||
use rustc::ty::TyCtxt;
|
||||
use rustc::ty::{self, TyCtxt, Ty};
|
||||
|
||||
use syntax::ast;
|
||||
|
||||
use rbml;
|
||||
use rustc_serialize::{Decodable, Encodable};
|
||||
use rustc_serialize::Encodable;
|
||||
|
||||
// ______________________________________________________________________
|
||||
// Top-level methods.
|
||||
#[derive(RustcEncodable, RustcDecodable)]
|
||||
pub struct Ast<'tcx> {
|
||||
id_range: IdRange,
|
||||
item: Lazy<InlinedItem>,
|
||||
side_tables: LazySeq<(ast::NodeId, TableEntry<'tcx>)>
|
||||
}
|
||||
|
||||
pub fn encode_inlined_item(ecx: &mut EncodeContext, ii: InlinedItemRef) {
|
||||
ecx.tag(::common::item_tag::ast, |ecx| {
|
||||
let mut visitor = IdRangeComputingVisitor::new();
|
||||
#[derive(RustcEncodable, RustcDecodable)]
|
||||
enum TableEntry<'tcx> {
|
||||
Def(Def),
|
||||
NodeType(Ty<'tcx>),
|
||||
ItemSubsts(ty::ItemSubsts<'tcx>),
|
||||
Adjustment(ty::adjustment::AutoAdjustment<'tcx>),
|
||||
ConstQualif(ConstQualif)
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
|
||||
pub fn encode_inlined_item(&mut self, ii: InlinedItemRef) -> Lazy<Ast<'tcx>> {
|
||||
let mut id_visitor = IdRangeComputingVisitor::new();
|
||||
match ii {
|
||||
InlinedItemRef::Item(_, i) => visitor.visit_item(i),
|
||||
InlinedItemRef::TraitItem(_, ti) => visitor.visit_trait_item(ti),
|
||||
InlinedItemRef::ImplItem(_, ii) => visitor.visit_impl_item(ii)
|
||||
InlinedItemRef::Item(_, i) => id_visitor.visit_item(i),
|
||||
InlinedItemRef::TraitItem(_, ti) => id_visitor.visit_trait_item(ti),
|
||||
InlinedItemRef::ImplItem(_, ii) => id_visitor.visit_impl_item(ii)
|
||||
}
|
||||
visitor.result().encode(ecx).unwrap();
|
||||
|
||||
ii.encode(ecx).unwrap();
|
||||
let ii_pos = self.position();
|
||||
ii.encode(self).unwrap();
|
||||
|
||||
let mut visitor = SideTableEncodingIdVisitor {
|
||||
ecx: ecx
|
||||
let tables_pos = self.position();
|
||||
let tables_count = {
|
||||
let mut visitor = SideTableEncodingIdVisitor {
|
||||
ecx: self,
|
||||
count: 0
|
||||
};
|
||||
match ii {
|
||||
InlinedItemRef::Item(_, i) => visitor.visit_item(i),
|
||||
InlinedItemRef::TraitItem(_, ti) => visitor.visit_trait_item(ti),
|
||||
InlinedItemRef::ImplItem(_, ii) => visitor.visit_impl_item(ii)
|
||||
}
|
||||
visitor.count
|
||||
};
|
||||
match ii {
|
||||
InlinedItemRef::Item(_, i) => visitor.visit_item(i),
|
||||
InlinedItemRef::TraitItem(_, ti) => visitor.visit_trait_item(ti),
|
||||
InlinedItemRef::ImplItem(_, ii) => visitor.visit_impl_item(ii)
|
||||
}
|
||||
});
|
||||
|
||||
self.lazy(&Ast {
|
||||
id_range: id_visitor.result(),
|
||||
item: Lazy::with_position(ii_pos),
|
||||
side_tables: LazySeq::with_position_and_length(tables_pos, tables_count)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
struct SideTableEncodingIdVisitor<'a, 'b:'a, 'tcx:'b> {
|
||||
ecx: &'a mut EncodeContext<'b, 'tcx>,
|
||||
count: usize
|
||||
}
|
||||
|
||||
impl<'a, 'b, 'tcx, 'v> Visitor<'v> for SideTableEncodingIdVisitor<'a, 'b, 'tcx> {
|
||||
fn visit_id(&mut self, id: ast::NodeId) {
|
||||
debug!("Encoding side tables for id {}", id);
|
||||
|
||||
let tcx = self.ecx.tcx;
|
||||
let mut encode = |entry: Option<TableEntry>| {
|
||||
if let Some(entry) = entry {
|
||||
(id, entry).encode(self.ecx).unwrap();
|
||||
self.count += 1;
|
||||
}
|
||||
};
|
||||
|
||||
encode(tcx.expect_def_or_none(id).map(TableEntry::Def));
|
||||
encode(tcx.node_types().get(&id).cloned().map(TableEntry::NodeType));
|
||||
encode(tcx.tables.borrow().item_substs.get(&id).cloned().map(TableEntry::ItemSubsts));
|
||||
encode(tcx.tables.borrow().adjustments.get(&id).cloned().map(TableEntry::Adjustment));
|
||||
encode(tcx.const_qualif_map.borrow().get(&id).cloned().map(TableEntry::ConstQualif));
|
||||
}
|
||||
}
|
||||
|
||||
/// Decodes an item from its AST in the cdata's metadata and adds it to the
|
||||
@ -58,17 +107,19 @@ pub fn decode_inlined_item<'a, 'tcx>(cdata: &CrateMetadata,
|
||||
tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
parent_def_path: ast_map::DefPath,
|
||||
parent_did: DefId,
|
||||
ast_doc: rbml::Doc,
|
||||
ast: Ast<'tcx>,
|
||||
orig_did: DefId)
|
||||
-> &'tcx InlinedItem {
|
||||
debug!("> Decoding inlined fn: {:?}", tcx.item_path_str(orig_did));
|
||||
let dcx = &mut DecodeContext::new(ast_doc, Some(cdata)).typed(tcx);
|
||||
dcx.from_id_range = IdRange::decode(dcx).unwrap();
|
||||
let cnt = dcx.from_id_range.max.as_usize() - dcx.from_id_range.min.as_usize();
|
||||
dcx.to_id_range.min = tcx.sess.reserve_node_ids(cnt);
|
||||
dcx.to_id_range.max = ast::NodeId::new(dcx.to_id_range.min.as_usize() + cnt);
|
||||
let ii = InlinedItem::decode(dcx).unwrap();
|
||||
|
||||
let cnt = ast.id_range.max.as_usize() - ast.id_range.min.as_usize();
|
||||
let start = tcx.sess.reserve_node_ids(cnt);
|
||||
let id_ranges = [ast.id_range, IdRange {
|
||||
min: start,
|
||||
max: ast::NodeId::new(start.as_usize() + cnt)
|
||||
}];
|
||||
|
||||
let ii = ast.item.decode((cdata, tcx, id_ranges));
|
||||
let ii = ast_map::map_decoded_item(&tcx.map,
|
||||
parent_def_path,
|
||||
parent_did,
|
||||
@ -83,107 +134,25 @@ pub fn decode_inlined_item<'a, 'tcx>(cdata: &CrateMetadata,
|
||||
let inlined_did = tcx.map.local_def_id(item_node_id);
|
||||
tcx.register_item_type(inlined_did, tcx.lookup_item_type(orig_did));
|
||||
|
||||
decode_side_tables(dcx, ast_doc);
|
||||
|
||||
ii
|
||||
}
|
||||
|
||||
// ______________________________________________________________________
|
||||
// Encoding and decoding the side tables
|
||||
|
||||
impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
|
||||
fn tag<F>(&mut self,
|
||||
tag_id: usize,
|
||||
f: F) where
|
||||
F: FnOnce(&mut Self),
|
||||
{
|
||||
self.start_tag(tag_id).unwrap();
|
||||
f(self);
|
||||
self.end_tag().unwrap();
|
||||
}
|
||||
|
||||
fn entry(&mut self, table: Table, id: ast::NodeId) {
|
||||
table.encode(self).unwrap();
|
||||
id.encode(self).unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
struct SideTableEncodingIdVisitor<'a, 'b:'a, 'tcx:'b> {
|
||||
ecx: &'a mut EncodeContext<'b, 'tcx>,
|
||||
}
|
||||
|
||||
impl<'a, 'b, 'tcx, 'v> Visitor<'v> for SideTableEncodingIdVisitor<'a, 'b, 'tcx> {
|
||||
fn visit_id(&mut self, id: ast::NodeId) {
|
||||
encode_side_tables_for_id(self.ecx, id)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(RustcEncodable, RustcDecodable, Debug)]
|
||||
enum Table {
|
||||
Def,
|
||||
NodeType,
|
||||
ItemSubsts,
|
||||
Adjustment,
|
||||
ConstQualif
|
||||
}
|
||||
|
||||
fn encode_side_tables_for_id(ecx: &mut EncodeContext, id: ast::NodeId) {
|
||||
let tcx = ecx.tcx;
|
||||
|
||||
debug!("Encoding side tables for id {}", id);
|
||||
|
||||
if let Some(def) = tcx.expect_def_or_none(id) {
|
||||
ecx.entry(Table::Def, id);
|
||||
def.encode(ecx).unwrap();
|
||||
}
|
||||
|
||||
if let Some(ty) = tcx.node_types().get(&id) {
|
||||
ecx.entry(Table::NodeType, id);
|
||||
ty.encode(ecx).unwrap();
|
||||
}
|
||||
|
||||
if let Some(item_substs) = tcx.tables.borrow().item_substs.get(&id) {
|
||||
ecx.entry(Table::ItemSubsts, id);
|
||||
item_substs.substs.encode(ecx).unwrap();
|
||||
}
|
||||
|
||||
if let Some(adjustment) = tcx.tables.borrow().adjustments.get(&id) {
|
||||
ecx.entry(Table::Adjustment, id);
|
||||
adjustment.encode(ecx).unwrap();
|
||||
}
|
||||
|
||||
if let Some(qualif) = tcx.const_qualif_map.borrow().get(&id) {
|
||||
ecx.entry(Table::ConstQualif, id);
|
||||
qualif.encode(ecx).unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
fn decode_side_tables(dcx: &mut DecodeContext, ast_doc: rbml::Doc) {
|
||||
while dcx.opaque.position() < ast_doc.end {
|
||||
let table = Decodable::decode(dcx).unwrap();
|
||||
let id = Decodable::decode(dcx).unwrap();
|
||||
debug!("decode_side_tables: entry for id={}, table={:?}", id, table);
|
||||
match table {
|
||||
Table::Def => {
|
||||
let def = Decodable::decode(dcx).unwrap();
|
||||
dcx.tcx().def_map.borrow_mut().insert(id, def::PathResolution::new(def));
|
||||
for (id, entry) in ast.side_tables.decode((cdata, tcx, id_ranges)) {
|
||||
match entry {
|
||||
TableEntry::Def(def) => {
|
||||
tcx.def_map.borrow_mut().insert(id, def::PathResolution::new(def));
|
||||
}
|
||||
Table::NodeType => {
|
||||
let ty = Decodable::decode(dcx).unwrap();
|
||||
dcx.tcx().node_type_insert(id, ty);
|
||||
TableEntry::NodeType(ty) => {
|
||||
tcx.node_type_insert(id, ty);
|
||||
}
|
||||
Table::ItemSubsts => {
|
||||
let item_substs = Decodable::decode(dcx).unwrap();
|
||||
dcx.tcx().tables.borrow_mut().item_substs.insert(id, item_substs);
|
||||
TableEntry::ItemSubsts(item_substs) => {
|
||||
tcx.tables.borrow_mut().item_substs.insert(id, item_substs);
|
||||
}
|
||||
Table::Adjustment => {
|
||||
let adj = Decodable::decode(dcx).unwrap();
|
||||
dcx.tcx().tables.borrow_mut().adjustments.insert(id, adj);
|
||||
TableEntry::Adjustment(adj) => {
|
||||
tcx.tables.borrow_mut().adjustments.insert(id, adj);
|
||||
}
|
||||
Table::ConstQualif => {
|
||||
let qualif = Decodable::decode(dcx).unwrap();
|
||||
dcx.tcx().const_qualif_map.borrow_mut().insert(id, qualif);
|
||||
TableEntry::ConstQualif(qualif) => {
|
||||
tcx.const_qualif_map.borrow_mut().insert(id, qualif);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ii
|
||||
}
|
||||
|
@ -1,206 +0,0 @@
|
||||
// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
#![allow(non_camel_case_types, non_upper_case_globals)]
|
||||
|
||||
use rustc::hir;
|
||||
use rustc::hir::def;
|
||||
use rustc::hir::def_id::{DefIndex, DefId};
|
||||
use rustc::ty;
|
||||
use rustc::session::config::PanicStrategy;
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, RustcEncodable, RustcDecodable)]
|
||||
pub enum Family {
|
||||
ImmStatic,
|
||||
MutStatic,
|
||||
ForeignImmStatic,
|
||||
ForeignMutStatic,
|
||||
Fn,
|
||||
ForeignFn,
|
||||
Method,
|
||||
AssociatedType,
|
||||
Type,
|
||||
Mod,
|
||||
ForeignMod,
|
||||
Enum,
|
||||
Variant,
|
||||
Impl,
|
||||
DefaultImpl,
|
||||
Trait,
|
||||
Struct,
|
||||
Union,
|
||||
Field,
|
||||
Const,
|
||||
AssociatedConst,
|
||||
Closure
|
||||
}
|
||||
|
||||
// NB: increment this if you change the format of metadata such that
|
||||
// rustc_version can't be found.
|
||||
pub const metadata_encoding_version : &'static [u8] = &[b'r', b'u', b's', b't', 0, 0, 0, 2];
|
||||
|
||||
// GAP 0x7c
|
||||
// GAP 0x108
|
||||
pub fn rustc_version() -> String {
|
||||
format!(
|
||||
"rustc {}",
|
||||
option_env!("CFG_VERSION").unwrap_or("unknown version")
|
||||
)
|
||||
}
|
||||
|
||||
#[derive(RustcEncodable, RustcDecodable)]
|
||||
pub struct CrateInfo {
|
||||
pub name: String,
|
||||
pub triple: String,
|
||||
pub hash: hir::svh::Svh,
|
||||
pub disambiguator: String,
|
||||
pub panic_strategy: PanicStrategy,
|
||||
pub plugin_registrar_fn: Option<DefIndex>,
|
||||
pub macro_derive_registrar: Option<DefIndex>
|
||||
}
|
||||
|
||||
pub mod root_tag {
|
||||
pub const rustc_version: usize = 0x10f;
|
||||
|
||||
pub const crate_info: usize = 0x104;
|
||||
|
||||
pub const index: usize = 0x110;
|
||||
pub const crate_deps: usize = 0x102;
|
||||
pub const dylib_dependency_formats: usize = 0x106;
|
||||
pub const native_libraries: usize = 0x10a;
|
||||
pub const lang_items: usize = 0x107;
|
||||
pub const lang_items_missing: usize = 0x76;
|
||||
pub const impls: usize = 0x109;
|
||||
pub const reachable_ids: usize = 0x10c;
|
||||
pub const macro_defs: usize = 0x10e;
|
||||
pub const codemap: usize = 0xa1;
|
||||
}
|
||||
|
||||
#[derive(RustcEncodable, RustcDecodable)]
|
||||
pub struct ModData {
|
||||
pub reexports: Vec<def::Export>
|
||||
}
|
||||
|
||||
#[derive(RustcEncodable, RustcDecodable)]
|
||||
pub struct VariantData {
|
||||
pub kind: ty::VariantKind,
|
||||
pub disr: u64,
|
||||
|
||||
/// If this is a struct's only variant, this
|
||||
/// is the index of the "struct ctor" item.
|
||||
pub struct_ctor: Option<DefIndex>
|
||||
}
|
||||
|
||||
#[derive(RustcEncodable, RustcDecodable)]
|
||||
pub struct TraitData {
|
||||
pub unsafety: hir::Unsafety,
|
||||
pub paren_sugar: bool,
|
||||
pub has_default_impl: bool
|
||||
}
|
||||
|
||||
#[derive(RustcEncodable, RustcDecodable)]
|
||||
pub struct ImplData {
|
||||
pub polarity: hir::ImplPolarity,
|
||||
pub parent_impl: Option<DefId>,
|
||||
pub coerce_unsized_kind: Option<ty::adjustment::CustomCoerceUnsized>,
|
||||
}
|
||||
|
||||
#[derive(RustcEncodable, RustcDecodable)]
|
||||
pub struct TraitAssociatedData {
|
||||
pub has_default: bool
|
||||
}
|
||||
|
||||
#[derive(RustcEncodable, RustcDecodable)]
|
||||
pub struct ImplAssociatedData {
|
||||
pub defaultness: hir::Defaultness,
|
||||
pub constness: hir::Constness
|
||||
}
|
||||
|
||||
#[derive(RustcEncodable, RustcDecodable)]
|
||||
pub struct FnData {
|
||||
pub constness: hir::Constness
|
||||
}
|
||||
|
||||
#[derive(RustcEncodable, RustcDecodable)]
|
||||
pub struct ClosureData {
|
||||
pub kind: ty::ClosureKind
|
||||
}
|
||||
|
||||
#[derive(RustcEncodable, RustcDecodable)]
|
||||
pub enum EntryData {
|
||||
Other,
|
||||
Mod(ModData),
|
||||
Variant(VariantData),
|
||||
Trait(TraitData),
|
||||
Impl(ImplData),
|
||||
TraitAssociated(TraitAssociatedData),
|
||||
ImplAssociated(ImplAssociatedData),
|
||||
Fn(FnData),
|
||||
Closure(ClosureData)
|
||||
}
|
||||
|
||||
#[derive(RustcEncodable, RustcDecodable)]
|
||||
pub struct TraitTypedData<'tcx> {
|
||||
pub trait_ref: ty::TraitRef<'tcx>
|
||||
}
|
||||
|
||||
#[derive(RustcEncodable, RustcDecodable)]
|
||||
pub struct ImplTypedData<'tcx> {
|
||||
pub trait_ref: Option<ty::TraitRef<'tcx>>
|
||||
}
|
||||
|
||||
#[derive(RustcEncodable, RustcDecodable)]
|
||||
pub struct MethodTypedData<'tcx> {
|
||||
pub explicit_self: ty::ExplicitSelfCategory<'tcx>
|
||||
}
|
||||
|
||||
#[derive(RustcEncodable, RustcDecodable)]
|
||||
pub struct ClosureTypedData<'tcx> {
|
||||
pub ty: ty::ClosureTy<'tcx>
|
||||
}
|
||||
|
||||
#[derive(RustcEncodable, RustcDecodable)]
|
||||
pub enum EntryTypedData<'tcx> {
|
||||
Other,
|
||||
Trait(TraitTypedData<'tcx>),
|
||||
Impl(ImplTypedData<'tcx>),
|
||||
Method(MethodTypedData<'tcx>),
|
||||
Closure(ClosureTypedData<'tcx>)
|
||||
}
|
||||
|
||||
pub mod item_tag {
|
||||
pub const def_key: usize = 0x2c;
|
||||
pub const family: usize = 0x24;
|
||||
pub const attributes: usize = 0x101;
|
||||
pub const visibility: usize = 0x78;
|
||||
pub const children: usize = 0x7b;
|
||||
pub const stability: usize = 0x88;
|
||||
pub const deprecation: usize = 0xa7;
|
||||
|
||||
pub const ty: usize = 0x25;
|
||||
pub const inherent_impls: usize = 0x79;
|
||||
pub const variances: usize = 0x43;
|
||||
pub const generics: usize = 0x8f;
|
||||
pub const predicates: usize = 0x95;
|
||||
pub const super_predicates: usize = 0xa3;
|
||||
|
||||
pub const ast: usize = 0x50;
|
||||
pub const mir: usize = 0x52;
|
||||
|
||||
pub const data: usize = 0x3c;
|
||||
pub const typed_data: usize = 0x3d;
|
||||
|
||||
pub const fn_arg_names: usize = 0x85;
|
||||
}
|
||||
|
||||
/// The shorthand encoding uses an enum's variant index `usize`
|
||||
/// and is offset by this value so it never matches a real variant.
|
||||
/// This offset is also chosen so that the first byte is never < 0x80.
|
||||
pub const SHORTHAND_OFFSET: usize = 0x80;
|
@ -8,13 +8,11 @@
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
#![allow(non_camel_case_types)]
|
||||
|
||||
//! Validates all used crates and extern libraries and loads their metadata
|
||||
|
||||
use common::CrateInfo;
|
||||
use cstore::{self, CStore, CrateSource, MetadataBlob};
|
||||
use loader::{self, CratePaths};
|
||||
use schema::CrateRoot;
|
||||
|
||||
use rustc::hir::def_id::{CrateNum, DefIndex};
|
||||
use rustc::hir::svh::Svh;
|
||||
@ -34,12 +32,11 @@ use std::fs;
|
||||
|
||||
use syntax::ast;
|
||||
use syntax::abi::Abi;
|
||||
use syntax::codemap;
|
||||
use syntax::parse;
|
||||
use syntax::attr;
|
||||
use syntax::parse::token::InternedString;
|
||||
use syntax::visit;
|
||||
use syntax_pos::{self, Span, mk_sp, Pos};
|
||||
use syntax_pos::{self, Span, mk_sp};
|
||||
use log;
|
||||
|
||||
struct LocalCrateReader<'a> {
|
||||
@ -148,7 +145,7 @@ impl Deref for PMDSource {
|
||||
|
||||
fn deref(&self) -> &MetadataBlob {
|
||||
match *self {
|
||||
PMDSource::Registered(ref cmd) => &cmd.data,
|
||||
PMDSource::Registered(ref cmd) => &cmd.blob,
|
||||
PMDSource::Owned(ref lib) => &lib.metadata
|
||||
}
|
||||
}
|
||||
@ -261,28 +258,28 @@ impl<'a> CrateReader<'a> {
|
||||
|
||||
fn verify_no_symbol_conflicts(&self,
|
||||
span: Span,
|
||||
info: &CrateInfo) {
|
||||
root: &CrateRoot) {
|
||||
// Check for (potential) conflicts with the local crate
|
||||
if self.local_crate_name == info.name &&
|
||||
self.sess.local_crate_disambiguator() == &info.disambiguator[..] {
|
||||
if self.local_crate_name == root.name &&
|
||||
self.sess.local_crate_disambiguator() == &root.disambiguator[..] {
|
||||
span_fatal!(self.sess, span, E0519,
|
||||
"the current crate is indistinguishable from one of its \
|
||||
dependencies: it has the same crate-name `{}` and was \
|
||||
compiled with the same `-C metadata` arguments. This \
|
||||
will result in symbol conflicts between the two.",
|
||||
info.name)
|
||||
root.name)
|
||||
}
|
||||
|
||||
// Check for conflicts with any crate loaded so far
|
||||
self.cstore.iter_crate_data(|_, other| {
|
||||
if other.name() == info.name && // same crate-name
|
||||
other.disambiguator() == info.disambiguator && // same crate-disambiguator
|
||||
other.hash() != info.hash { // but different SVH
|
||||
if other.name() == root.name && // same crate-name
|
||||
other.disambiguator() == root.disambiguator && // same crate-disambiguator
|
||||
other.hash() != root.hash { // but different SVH
|
||||
span_fatal!(self.sess, span, E0523,
|
||||
"found two different crates with name `{}` that are \
|
||||
not distinguished by differing `-C metadata`. This \
|
||||
will result in symbol conflicts between the two.",
|
||||
info.name)
|
||||
root.name)
|
||||
}
|
||||
});
|
||||
}
|
||||
@ -297,8 +294,8 @@ impl<'a> CrateReader<'a> {
|
||||
-> (CrateNum, Rc<cstore::CrateMetadata>,
|
||||
cstore::CrateSource) {
|
||||
info!("register crate `extern crate {} as {}`", name, ident);
|
||||
let crate_info = lib.metadata.get_crate_info();
|
||||
self.verify_no_symbol_conflicts(span, &crate_info);
|
||||
let crate_root = lib.metadata.get_root();
|
||||
self.verify_no_symbol_conflicts(span, &crate_root);
|
||||
|
||||
// Claim this crate number and cache it
|
||||
let cnum = self.next_crate_num;
|
||||
@ -319,9 +316,9 @@ impl<'a> CrateReader<'a> {
|
||||
|
||||
let loader::Library { dylib, rlib, metadata } = lib;
|
||||
|
||||
let cnum_map = self.resolve_crate_deps(root, &metadata, cnum, span);
|
||||
let cnum_map = self.resolve_crate_deps(root, &crate_root, &metadata, cnum, span);
|
||||
|
||||
if crate_info.macro_derive_registrar.is_some() {
|
||||
if crate_root.macro_derive_registrar.is_some() {
|
||||
self.sess.span_err(span, "crates of the `rustc-macro` crate type \
|
||||
cannot be linked at runtime");
|
||||
}
|
||||
@ -329,10 +326,9 @@ impl<'a> CrateReader<'a> {
|
||||
let cmeta = Rc::new(cstore::CrateMetadata {
|
||||
name: name.to_string(),
|
||||
extern_crate: Cell::new(None),
|
||||
info: crate_info,
|
||||
index: metadata.load_index(),
|
||||
key_map: metadata.load_key_map(),
|
||||
data: metadata,
|
||||
key_map: metadata.load_key_map(crate_root.index),
|
||||
root: crate_root,
|
||||
blob: metadata,
|
||||
cnum_map: RefCell::new(cnum_map),
|
||||
cnum: cnum,
|
||||
codemap_import_info: RefCell::new(vec![]),
|
||||
@ -416,11 +412,11 @@ impl<'a> CrateReader<'a> {
|
||||
// Note that we only do this for target triple crates, though, as we
|
||||
// don't want to match a host crate against an equivalent target one
|
||||
// already loaded.
|
||||
let crate_info = library.metadata.get_crate_info();
|
||||
let root = library.metadata.get_root();
|
||||
if loader.triple == self.sess.opts.target_triple {
|
||||
let mut result = LoadResult::Loaded(library);
|
||||
self.cstore.iter_crate_data(|cnum, data| {
|
||||
if data.name() == crate_info.name && crate_info.hash == data.hash() {
|
||||
if data.name() == root.name && root.hash == data.hash() {
|
||||
assert!(loader.hash.is_none());
|
||||
info!("load success, going to previous cnum: {}", cnum);
|
||||
result = LoadResult::Previous(cnum);
|
||||
@ -467,6 +463,7 @@ impl<'a> CrateReader<'a> {
|
||||
// Go through the crate metadata and load any crates that it references
|
||||
fn resolve_crate_deps(&mut self,
|
||||
root: &Option<CratePaths>,
|
||||
crate_root: &CrateRoot,
|
||||
metadata: &MetadataBlob,
|
||||
krate: CrateNum,
|
||||
span: Span)
|
||||
@ -474,16 +471,17 @@ impl<'a> CrateReader<'a> {
|
||||
debug!("resolving deps of external crate");
|
||||
// The map from crate numbers in the crate we're resolving to local crate
|
||||
// numbers
|
||||
let map: FnvHashMap<_, _> = metadata.get_crate_deps().iter().map(|dep| {
|
||||
let deps = crate_root.crate_deps.decode(metadata);
|
||||
let map: FnvHashMap<_, _> = deps.enumerate().map(|(crate_num, dep)| {
|
||||
debug!("resolving dep crate {} hash: `{}`", dep.name, dep.hash);
|
||||
let (local_cnum, ..) = self.resolve_crate(root,
|
||||
&dep.name,
|
||||
&dep.name,
|
||||
&dep.name.as_str(),
|
||||
&dep.name.as_str(),
|
||||
Some(&dep.hash),
|
||||
span,
|
||||
PathKind::Dependency,
|
||||
dep.explicitly_linked);
|
||||
(dep.cnum, local_cnum)
|
||||
(CrateNum::new(crate_num + 1), local_cnum)
|
||||
}).collect();
|
||||
|
||||
let max_cnum = map.values().cloned().max().map(|cnum| cnum.as_u32()).unwrap_or(0);
|
||||
@ -568,21 +566,21 @@ impl<'a> CrateReader<'a> {
|
||||
let ci = self.extract_crate_info(item).unwrap();
|
||||
let ekrate = self.read_extension_crate(item.span, &ci);
|
||||
|
||||
let crate_info = ekrate.metadata.get_crate_info();
|
||||
let root = ekrate.metadata.get_root();
|
||||
let source_name = format!("<{} macros>", item.ident);
|
||||
let mut ret = Macros {
|
||||
macro_rules: Vec::new(),
|
||||
custom_derive_registrar: None,
|
||||
svh: crate_info.hash,
|
||||
svh: root.hash,
|
||||
dylib: None,
|
||||
};
|
||||
ekrate.metadata.each_exported_macro(|name, attrs, span, body| {
|
||||
for def in root.macro_defs.decode(&*ekrate.metadata) {
|
||||
// NB: Don't use parse::parse_tts_from_source_str because it parses with
|
||||
// quote_depth > 0.
|
||||
let mut p = parse::new_parser_from_source_str(&self.sess.parse_sess,
|
||||
self.local_crate_config.clone(),
|
||||
source_name.clone(),
|
||||
body);
|
||||
def.body);
|
||||
let lo = p.span.lo;
|
||||
let body = match p.parse_all_token_trees() {
|
||||
Ok(body) => body,
|
||||
@ -595,13 +593,13 @@ impl<'a> CrateReader<'a> {
|
||||
let local_span = mk_sp(lo, p.last_span.hi);
|
||||
|
||||
// Mark the attrs as used
|
||||
for attr in &attrs {
|
||||
for attr in &def.attrs {
|
||||
attr::mark_used(attr);
|
||||
}
|
||||
|
||||
ret.macro_rules.push(ast::MacroDef {
|
||||
ident: ast::Ident::with_empty_ctxt(name),
|
||||
attrs: attrs,
|
||||
ident: ast::Ident::with_empty_ctxt(def.name),
|
||||
attrs: def.attrs,
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
span: local_span,
|
||||
imported_from: Some(item.ident),
|
||||
@ -613,11 +611,10 @@ impl<'a> CrateReader<'a> {
|
||||
body: body,
|
||||
});
|
||||
self.sess.imported_macro_spans.borrow_mut()
|
||||
.insert(local_span, (name.as_str().to_string(), span));
|
||||
true
|
||||
});
|
||||
.insert(local_span, (def.name.as_str().to_string(), def.span));
|
||||
}
|
||||
|
||||
match crate_info.macro_derive_registrar {
|
||||
match root.macro_derive_registrar {
|
||||
Some(id) => ret.custom_derive_registrar = Some(id),
|
||||
|
||||
// If this crate is not a rustc-macro crate then we might be able to
|
||||
@ -671,10 +668,10 @@ impl<'a> CrateReader<'a> {
|
||||
span_fatal!(self.sess, span, E0456, "{}", &message[..]);
|
||||
}
|
||||
|
||||
let crate_info = ekrate.metadata.get_crate_info();
|
||||
match (ekrate.dylib.as_ref(), crate_info.plugin_registrar_fn) {
|
||||
let root = ekrate.metadata.get_root();
|
||||
match (ekrate.dylib.as_ref(), root.plugin_registrar_fn) {
|
||||
(Some(dylib), Some(reg)) => {
|
||||
Some((dylib.to_path_buf(), crate_info.hash, reg))
|
||||
Some((dylib.to_path_buf(), root.hash, reg))
|
||||
}
|
||||
(None, Some(_)) => {
|
||||
span_err!(self.sess, span, E0457,
|
||||
@ -1086,133 +1083,3 @@ pub fn read_local_crates(sess: & Session,
|
||||
dep_graph: &DepGraph) {
|
||||
LocalCrateReader::new(sess, cstore, defs, krate, local_crate_name).read_crates(dep_graph)
|
||||
}
|
||||
|
||||
/// Imports the codemap from an external crate into the codemap of the crate
|
||||
/// currently being compiled (the "local crate").
|
||||
///
|
||||
/// The import algorithm works analogous to how AST items are inlined from an
|
||||
/// external crate's metadata:
|
||||
/// For every FileMap in the external codemap an 'inline' copy is created in the
|
||||
/// local codemap. The correspondence relation between external and local
|
||||
/// FileMaps is recorded in the `ImportedFileMap` objects returned from this
|
||||
/// function. When an item from an external crate is later inlined into this
|
||||
/// crate, this correspondence information is used to translate the span
|
||||
/// information of the inlined item so that it refers the correct positions in
|
||||
/// the local codemap (see `<decoder::DecodeContext as SpecializedDecoder<Span>>`).
|
||||
///
|
||||
/// The import algorithm in the function below will reuse FileMaps already
|
||||
/// existing in the local codemap. For example, even if the FileMap of some
|
||||
/// source file of libstd gets imported many times, there will only ever be
|
||||
/// one FileMap object for the corresponding file in the local codemap.
|
||||
///
|
||||
/// Note that imported FileMaps do not actually contain the source code of the
|
||||
/// file they represent, just information about length, line breaks, and
|
||||
/// multibyte characters. This information is enough to generate valid debuginfo
|
||||
/// for items inlined from other crates.
|
||||
pub fn import_codemap(local_codemap: &codemap::CodeMap,
|
||||
metadata: &MetadataBlob)
|
||||
-> Vec<cstore::ImportedFileMap> {
|
||||
let external_codemap = metadata.get_imported_filemaps();
|
||||
|
||||
let imported_filemaps = external_codemap.into_iter().map(|filemap_to_import| {
|
||||
// Try to find an existing FileMap that can be reused for the filemap to
|
||||
// be imported. A FileMap is reusable if it is exactly the same, just
|
||||
// positioned at a different offset within the codemap.
|
||||
let reusable_filemap = {
|
||||
local_codemap.files
|
||||
.borrow()
|
||||
.iter()
|
||||
.find(|fm| are_equal_modulo_startpos(&fm, &filemap_to_import))
|
||||
.map(|rc| rc.clone())
|
||||
};
|
||||
|
||||
match reusable_filemap {
|
||||
Some(fm) => {
|
||||
cstore::ImportedFileMap {
|
||||
original_start_pos: filemap_to_import.start_pos,
|
||||
original_end_pos: filemap_to_import.end_pos,
|
||||
translated_filemap: fm
|
||||
}
|
||||
}
|
||||
None => {
|
||||
// We can't reuse an existing FileMap, so allocate a new one
|
||||
// containing the information we need.
|
||||
let syntax_pos::FileMap {
|
||||
name,
|
||||
abs_path,
|
||||
start_pos,
|
||||
end_pos,
|
||||
lines,
|
||||
multibyte_chars,
|
||||
..
|
||||
} = filemap_to_import;
|
||||
|
||||
let source_length = (end_pos - start_pos).to_usize();
|
||||
|
||||
// Translate line-start positions and multibyte character
|
||||
// position into frame of reference local to file.
|
||||
// `CodeMap::new_imported_filemap()` will then translate those
|
||||
// coordinates to their new global frame of reference when the
|
||||
// offset of the FileMap is known.
|
||||
let mut lines = lines.into_inner();
|
||||
for pos in &mut lines {
|
||||
*pos = *pos - start_pos;
|
||||
}
|
||||
let mut multibyte_chars = multibyte_chars.into_inner();
|
||||
for mbc in &mut multibyte_chars {
|
||||
mbc.pos = mbc.pos - start_pos;
|
||||
}
|
||||
|
||||
let local_version = local_codemap.new_imported_filemap(name,
|
||||
abs_path,
|
||||
source_length,
|
||||
lines,
|
||||
multibyte_chars);
|
||||
cstore::ImportedFileMap {
|
||||
original_start_pos: start_pos,
|
||||
original_end_pos: end_pos,
|
||||
translated_filemap: local_version
|
||||
}
|
||||
}
|
||||
}
|
||||
}).collect();
|
||||
|
||||
return imported_filemaps;
|
||||
|
||||
fn are_equal_modulo_startpos(fm1: &syntax_pos::FileMap,
|
||||
fm2: &syntax_pos::FileMap)
|
||||
-> bool {
|
||||
if fm1.name != fm2.name {
|
||||
return false;
|
||||
}
|
||||
|
||||
let lines1 = fm1.lines.borrow();
|
||||
let lines2 = fm2.lines.borrow();
|
||||
|
||||
if lines1.len() != lines2.len() {
|
||||
return false;
|
||||
}
|
||||
|
||||
for (&line1, &line2) in lines1.iter().zip(lines2.iter()) {
|
||||
if (line1 - fm1.start_pos) != (line2 - fm2.start_pos) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
let multibytes1 = fm1.multibyte_chars.borrow();
|
||||
let multibytes2 = fm2.multibyte_chars.borrow();
|
||||
|
||||
if multibytes1.len() != multibytes2.len() {
|
||||
return false;
|
||||
}
|
||||
|
||||
for (mb1, mb2) in multibytes1.iter().zip(multibytes2.iter()) {
|
||||
if (mb1.bytes != mb2.bytes) ||
|
||||
((mb1.pos - fm1.start_pos) != (mb2.pos - fm2.start_pos)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
true
|
||||
}
|
||||
}
|
||||
|
@ -9,9 +9,9 @@
|
||||
// except according to those terms.
|
||||
|
||||
use cstore;
|
||||
use common;
|
||||
use encoder;
|
||||
use loader;
|
||||
use schema;
|
||||
|
||||
use rustc::middle::cstore::{InlinedItem, CrateStore, CrateSource, ExternCrate};
|
||||
use rustc::middle::cstore::{NativeLibraryKind, LinkMeta, LinkagePreference};
|
||||
@ -97,7 +97,7 @@ impl<'tcx> CrateStore<'tcx> for cstore::CStore {
|
||||
}
|
||||
|
||||
fn item_generics<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId)
|
||||
-> &'tcx ty::Generics<'tcx>
|
||||
-> ty::Generics<'tcx>
|
||||
{
|
||||
self.dep_graph.read(DepNode::MetaData(def));
|
||||
self.get_crate_data(def.krate).get_generics(def.index, tcx)
|
||||
@ -121,7 +121,7 @@ impl<'tcx> CrateStore<'tcx> for cstore::CStore {
|
||||
self.get_crate_data(def.krate).get_adt_def(def.index, tcx)
|
||||
}
|
||||
|
||||
fn fn_arg_names(&self, did: DefId) -> Vec<String>
|
||||
fn fn_arg_names(&self, did: DefId) -> Vec<ast::Name>
|
||||
{
|
||||
self.dep_graph.read(DepNode::MetaData(did));
|
||||
self.get_crate_data(did.krate).get_fn_arg_names(did.index)
|
||||
@ -140,10 +140,7 @@ impl<'tcx> CrateStore<'tcx> for cstore::CStore {
|
||||
fn inherent_implementations_for_type(&self, def_id: DefId) -> Vec<DefId>
|
||||
{
|
||||
self.dep_graph.read(DepNode::MetaData(def_id));
|
||||
let mut result = vec![];
|
||||
self.get_crate_data(def_id.krate)
|
||||
.each_inherent_implementation_for_type(def_id.index, |iid| result.push(iid));
|
||||
result
|
||||
self.get_crate_data(def_id.krate).get_inherent_implementations_for_type(def_id.index)
|
||||
}
|
||||
|
||||
fn implementations_of_trait(&self, filter: Option<DefId>) -> Vec<DefId>
|
||||
@ -153,9 +150,7 @@ impl<'tcx> CrateStore<'tcx> for cstore::CStore {
|
||||
}
|
||||
let mut result = vec![];
|
||||
self.iter_crate_data(|_, cdata| {
|
||||
cdata.each_implementation_for_trait(filter, &mut |iid| {
|
||||
result.push(iid)
|
||||
})
|
||||
cdata.get_implementations_for_trait(filter, &mut result)
|
||||
});
|
||||
result
|
||||
}
|
||||
@ -308,7 +303,7 @@ impl<'tcx> CrateStore<'tcx> for cstore::CStore {
|
||||
|
||||
fn plugin_registrar_fn(&self, cnum: CrateNum) -> Option<DefId>
|
||||
{
|
||||
self.get_crate_data(cnum).info.plugin_registrar_fn.map(|index| DefId {
|
||||
self.get_crate_data(cnum).root.plugin_registrar_fn.map(|index| DefId {
|
||||
krate: cnum,
|
||||
index: index
|
||||
})
|
||||
@ -552,7 +547,7 @@ impl<'tcx> CrateStore<'tcx> for cstore::CStore {
|
||||
|
||||
fn metadata_encoding_version(&self) -> &[u8]
|
||||
{
|
||||
common::metadata_encoding_version
|
||||
schema::METADATA_HEADER
|
||||
}
|
||||
|
||||
/// Returns a map from a sufficiently visible external item (i.e. an external item that is
|
||||
|
@ -11,12 +11,8 @@
|
||||
// The crate store - a central repo for information collected about external
|
||||
// crates and libraries
|
||||
|
||||
pub use self::MetadataBlob::*;
|
||||
|
||||
use common;
|
||||
use creader;
|
||||
use index;
|
||||
use loader;
|
||||
use schema;
|
||||
|
||||
use rustc::dep_graph::DepGraph;
|
||||
use rustc::hir::def_id::{CRATE_DEF_INDEX, CrateNum, DefIndex, DefId};
|
||||
@ -27,13 +23,12 @@ use rustc::session::config::PanicStrategy;
|
||||
use rustc_data_structures::indexed_vec::IndexVec;
|
||||
use rustc::util::nodemap::{FnvHashMap, NodeMap, NodeSet, DefIdMap, FnvHashSet};
|
||||
|
||||
use std::cell::{RefCell, Ref, Cell};
|
||||
use std::cell::{RefCell, Cell};
|
||||
use std::rc::Rc;
|
||||
use std::path::PathBuf;
|
||||
use flate::Bytes;
|
||||
use syntax::ast::{self, Ident};
|
||||
use syntax::attr;
|
||||
use syntax::codemap;
|
||||
use syntax_pos;
|
||||
|
||||
pub use rustc::middle::cstore::{NativeLibraryKind, LinkagePreference};
|
||||
@ -47,12 +42,12 @@ pub use rustc::middle::cstore::{CrateSource, LinkMeta};
|
||||
pub type CrateNumMap = IndexVec<CrateNum, CrateNum>;
|
||||
|
||||
pub enum MetadataBlob {
|
||||
MetadataVec(Bytes),
|
||||
MetadataArchive(loader::ArchiveMetadata),
|
||||
Inflated(Bytes),
|
||||
Archive(loader::ArchiveMetadata),
|
||||
}
|
||||
|
||||
/// Holds information about a syntax_pos::FileMap imported from another crate.
|
||||
/// See creader::import_codemap() for more information.
|
||||
/// See `imported_filemaps()` for more information.
|
||||
pub struct ImportedFileMap {
|
||||
/// This FileMap's byte-offset within the codemap of its original crate
|
||||
pub original_start_pos: syntax_pos::BytePos,
|
||||
@ -70,13 +65,12 @@ pub struct CrateMetadata {
|
||||
/// (e.g., by the allocator)
|
||||
pub extern_crate: Cell<Option<ExternCrate>>,
|
||||
|
||||
pub data: MetadataBlob,
|
||||
pub blob: MetadataBlob,
|
||||
pub cnum_map: RefCell<CrateNumMap>,
|
||||
pub cnum: CrateNum,
|
||||
pub codemap_import_info: RefCell<Vec<ImportedFileMap>>,
|
||||
|
||||
pub info: common::CrateInfo,
|
||||
pub index: index::Index,
|
||||
pub root: schema::CrateRoot,
|
||||
|
||||
/// For each public item in this crate, we encode a key. When the
|
||||
/// crate is loaded, we read all the keys and put them in this
|
||||
@ -294,23 +288,9 @@ impl CStore {
|
||||
}
|
||||
|
||||
impl CrateMetadata {
|
||||
pub fn name(&self) -> &str { &self.info.name }
|
||||
pub fn hash(&self) -> Svh { self.info.hash }
|
||||
pub fn disambiguator(&self) -> &str { &self.info.disambiguator }
|
||||
pub fn imported_filemaps<'a>(&'a self, codemap: &codemap::CodeMap)
|
||||
-> Ref<'a, Vec<ImportedFileMap>> {
|
||||
let filemaps = self.codemap_import_info.borrow();
|
||||
if filemaps.is_empty() {
|
||||
drop(filemaps);
|
||||
let filemaps = creader::import_codemap(codemap, &self.data);
|
||||
|
||||
// This shouldn't borrow twice, but there is no way to downgrade RefMut to Ref.
|
||||
*self.codemap_import_info.borrow_mut() = filemaps;
|
||||
self.codemap_import_info.borrow()
|
||||
} else {
|
||||
filemaps
|
||||
}
|
||||
}
|
||||
pub fn name(&self) -> &str { &self.root.name }
|
||||
pub fn hash(&self) -> Svh { self.root.hash }
|
||||
pub fn disambiguator(&self) -> &str { &self.root.disambiguator }
|
||||
|
||||
pub fn is_staged_api(&self) -> bool {
|
||||
self.get_item_attrs(CRATE_DEF_INDEX).iter().any(|attr| {
|
||||
@ -349,33 +329,6 @@ impl CrateMetadata {
|
||||
}
|
||||
|
||||
pub fn panic_strategy(&self) -> PanicStrategy {
|
||||
self.info.panic_strategy.clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl MetadataBlob {
|
||||
pub fn as_slice_raw<'a>(&'a self) -> &'a [u8] {
|
||||
match *self {
|
||||
MetadataVec(ref vec) => &vec[..],
|
||||
MetadataArchive(ref ar) => ar.as_slice(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_slice<'a>(&'a self) -> &'a [u8] {
|
||||
let slice = self.as_slice_raw();
|
||||
let len_offset = 4 + common::metadata_encoding_version.len();
|
||||
if slice.len() < len_offset+4 {
|
||||
&[] // corrupt metadata
|
||||
} else {
|
||||
let len = (((slice[len_offset+0] as u32) << 24) |
|
||||
((slice[len_offset+1] as u32) << 16) |
|
||||
((slice[len_offset+2] as u32) << 8) |
|
||||
((slice[len_offset+3] as u32) << 0)) as usize;
|
||||
if len <= slice.len() - 4 - len_offset {
|
||||
&slice[len_offset + 4..len_offset + len + 4]
|
||||
} else {
|
||||
&[] // corrupt or old metadata
|
||||
}
|
||||
}
|
||||
self.root.panic_strategy.clone()
|
||||
}
|
||||
}
|
||||
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -8,65 +8,13 @@
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use schema::*;
|
||||
|
||||
use rustc::hir::def_id::{DefId, DefIndex};
|
||||
use rbml;
|
||||
use std::io::{Cursor, Write};
|
||||
use std::slice;
|
||||
use std::u32;
|
||||
|
||||
/// As part of the metadata, we generate an index that stores, for
|
||||
/// each DefIndex, the position of the corresponding RBML document (if
|
||||
/// any). This is just a big `[u32]` slice, where an entry of
|
||||
/// `u32::MAX` indicates that there is no RBML document. This little
|
||||
/// struct just stores the offsets within the metadata of the start
|
||||
/// and end of this slice. These are actually part of an RBML
|
||||
/// document, but for looking things up in the metadata, we just
|
||||
/// discard the RBML positioning and jump directly to the data.
|
||||
pub struct Index {
|
||||
data_start: usize,
|
||||
data_end: usize,
|
||||
}
|
||||
|
||||
impl Index {
|
||||
/// Given the RBML doc representing the index, save the offests
|
||||
/// for later.
|
||||
pub fn from_rbml(index: rbml::Doc) -> Index {
|
||||
Index { data_start: index.start, data_end: index.end }
|
||||
}
|
||||
|
||||
/// Given the metadata, extract out the offset of a particular
|
||||
/// DefIndex (if any).
|
||||
#[inline(never)]
|
||||
pub fn lookup_item(&self, bytes: &[u8], def_index: DefIndex) -> Option<u32> {
|
||||
let words = bytes_to_words(&bytes[self.data_start..self.data_end]);
|
||||
let index = def_index.as_usize();
|
||||
|
||||
debug!("lookup_item: index={:?} words.len={:?}",
|
||||
index, words.len());
|
||||
|
||||
let position = u32::from_le(words[index]);
|
||||
if position == u32::MAX {
|
||||
debug!("lookup_item: position=u32::MAX");
|
||||
None
|
||||
} else {
|
||||
debug!("lookup_item: position={:?}", position);
|
||||
Some(position)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn iter_enumerated<'a>(&self, bytes: &'a [u8])
|
||||
-> impl Iterator<Item=(DefIndex, u32)> + 'a {
|
||||
let words = bytes_to_words(&bytes[self.data_start..self.data_end]);
|
||||
words.iter().enumerate().filter_map(|(index, &position)| {
|
||||
if position == u32::MAX {
|
||||
None
|
||||
} else {
|
||||
Some((DefIndex::new(index), u32::from_le(position)))
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// While we are generating the metadata, we also track the position
|
||||
/// of each DefIndex. It is not required that all definitions appear
|
||||
/// in the metadata, nor that they are serialized in order, and
|
||||
@ -74,27 +22,27 @@ impl Index {
|
||||
/// `u32::MAX`. Whenever an index is visited, we fill in the
|
||||
/// appropriate spot by calling `record_position`. We should never
|
||||
/// visit the same index twice.
|
||||
pub struct IndexData {
|
||||
pub struct Index {
|
||||
positions: Vec<u32>,
|
||||
}
|
||||
|
||||
impl IndexData {
|
||||
pub fn new(max_index: usize) -> IndexData {
|
||||
IndexData {
|
||||
impl Index {
|
||||
pub fn new(max_index: usize) -> Index {
|
||||
Index {
|
||||
positions: vec![u32::MAX; max_index]
|
||||
}
|
||||
}
|
||||
|
||||
pub fn record(&mut self, def_id: DefId, position: usize) {
|
||||
pub fn record(&mut self, def_id: DefId, entry: Lazy<Entry>) {
|
||||
assert!(def_id.is_local());
|
||||
self.record_index(def_id.index, position);
|
||||
self.record_index(def_id.index, entry);
|
||||
}
|
||||
|
||||
pub fn record_index(&mut self, item: DefIndex, position: usize) {
|
||||
pub fn record_index(&mut self, item: DefIndex, entry: Lazy<Entry>) {
|
||||
let item = item.as_usize();
|
||||
|
||||
assert!(position < (u32::MAX as usize));
|
||||
let position = position as u32;
|
||||
assert!(entry.position < (u32::MAX as usize));
|
||||
let position = entry.position as u32;
|
||||
|
||||
assert!(self.positions[item] == u32::MAX,
|
||||
"recorded position for item {:?} twice, first at {:?} and now at {:?}",
|
||||
@ -103,16 +51,52 @@ impl IndexData {
|
||||
self.positions[item] = position.to_le();
|
||||
}
|
||||
|
||||
pub fn write_index(&self, buf: &mut Cursor<Vec<u8>>) {
|
||||
pub fn write_index(&self, buf: &mut Cursor<Vec<u8>>) -> LazySeq<Index> {
|
||||
let pos = buf.position();
|
||||
buf.write_all(words_to_bytes(&self.positions)).unwrap();
|
||||
LazySeq::with_position_and_length(pos as usize, self.positions.len())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> LazySeq<Index> {
|
||||
/// Given the metadata, extract out the offset of a particular
|
||||
/// DefIndex (if any).
|
||||
#[inline(never)]
|
||||
pub fn lookup(&self, bytes: &[u8], def_index: DefIndex) -> Option<Lazy<Entry<'tcx>>> {
|
||||
let words = &bytes_to_words(&bytes[self.position..])[..self.len];
|
||||
let index = def_index.as_usize();
|
||||
|
||||
debug!("Index::lookup: index={:?} words.len={:?}",
|
||||
index, words.len());
|
||||
|
||||
let position = u32::from_le(words[index]);
|
||||
if position == u32::MAX {
|
||||
debug!("Index::lookup: position=u32::MAX");
|
||||
None
|
||||
} else {
|
||||
debug!("Index::lookup: position={:?}", position);
|
||||
Some(Lazy::with_position(position as usize))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn iter_enumerated<'a>(&self, bytes: &'a [u8])
|
||||
-> impl Iterator<Item=(DefIndex, Lazy<Entry<'tcx>>)> + 'a {
|
||||
let words = &bytes_to_words(&bytes[self.position..])[..self.len];
|
||||
words.iter().enumerate().filter_map(|(index, &position)| {
|
||||
if position == u32::MAX {
|
||||
None
|
||||
} else {
|
||||
let position = u32::from_le(position) as usize;
|
||||
Some((DefIndex::new(index), Lazy::with_position(position)))
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn bytes_to_words(b: &[u8]) -> &[u32] {
|
||||
assert!(b.len() % 4 == 0);
|
||||
unsafe { slice::from_raw_parts(b.as_ptr() as *const u32, b.len()/4) }
|
||||
unsafe { slice::from_raw_parts(b.as_ptr() as *const u32, b.len() / 4) }
|
||||
}
|
||||
|
||||
fn words_to_bytes(w: &[u32]) -> &[u8] {
|
||||
unsafe { slice::from_raw_parts(w.as_ptr() as *const u8, w.len()*4) }
|
||||
unsafe { slice::from_raw_parts(w.as_ptr() as *const u8, w.len() * 4) }
|
||||
}
|
||||
|
@ -56,7 +56,9 @@
|
||||
//! easily control precisely what data is given to that fn.
|
||||
|
||||
use encoder::EncodeContext;
|
||||
use index::IndexData;
|
||||
use index::Index;
|
||||
use schema::*;
|
||||
|
||||
use rustc::dep_graph::DepNode;
|
||||
use rustc::hir;
|
||||
use rustc::hir::def_id::DefId;
|
||||
@ -68,7 +70,7 @@ use std::ops::{Deref, DerefMut};
|
||||
/// Builder that can encode new items, adding them into the index.
|
||||
/// Item encoding cannot be nested.
|
||||
pub struct IndexBuilder<'a, 'b: 'a, 'tcx: 'b> {
|
||||
items: IndexData,
|
||||
items: Index,
|
||||
pub ecx: &'a mut EncodeContext<'b, 'tcx>,
|
||||
}
|
||||
|
||||
@ -88,16 +90,16 @@ impl<'a, 'b, 'tcx> DerefMut for IndexBuilder<'a, 'b, 'tcx> {
|
||||
impl<'a, 'b, 'tcx> IndexBuilder<'a, 'b, 'tcx> {
|
||||
pub fn new(ecx: &'a mut EncodeContext<'b, 'tcx>) -> Self {
|
||||
IndexBuilder {
|
||||
items: IndexData::new(ecx.tcx.map.num_local_def_ids()),
|
||||
items: Index::new(ecx.tcx.map.num_local_def_ids()),
|
||||
ecx: ecx,
|
||||
}
|
||||
}
|
||||
|
||||
/// Emit the data for a def-id to the metadata. The function to
|
||||
/// emit the data is `op`, and it will be given `data` as
|
||||
/// arguments. This `record` function will start/end an RBML tag
|
||||
/// and record the current offset for use in the index, calling
|
||||
/// `op` to generate the data in the RBML tag.
|
||||
/// arguments. This `record` function will call `op` to generate
|
||||
/// the `Entry` (which may point to other encoded information)
|
||||
/// and will then record the `Lazy<Entry>` for use in the index.
|
||||
///
|
||||
/// In addition, it will setup a dep-graph task to track what data
|
||||
/// `op` accesses to generate the metadata, which is later used by
|
||||
@ -112,21 +114,17 @@ impl<'a, 'b, 'tcx> IndexBuilder<'a, 'b, 'tcx> {
|
||||
/// content system.
|
||||
pub fn record<DATA>(&mut self,
|
||||
id: DefId,
|
||||
op: fn(&mut EncodeContext<'b, 'tcx>, DATA),
|
||||
op: fn(&mut EncodeContext<'b, 'tcx>, DATA) -> Entry<'tcx>,
|
||||
data: DATA)
|
||||
where DATA: DepGraphRead
|
||||
{
|
||||
let position = self.ecx.mark_stable_position();
|
||||
self.items.record(id, position);
|
||||
let _task = self.tcx.dep_graph.in_task(DepNode::MetaData(id));
|
||||
// FIXME(eddyb) Avoid wrapping the entries in docs.
|
||||
self.ecx.start_tag(0).unwrap();
|
||||
data.read(self.tcx);
|
||||
op(&mut self.ecx, data);
|
||||
self.ecx.end_tag().unwrap();
|
||||
let entry = op(&mut self.ecx, data);
|
||||
self.items.record(id, self.ecx.lazy(&entry));
|
||||
}
|
||||
|
||||
pub fn into_items(self) -> IndexData {
|
||||
pub fn into_items(self) -> Index {
|
||||
self.items
|
||||
}
|
||||
}
|
||||
|
@ -29,11 +29,9 @@
|
||||
#![feature(rustc_private)]
|
||||
#![feature(specialization)]
|
||||
#![feature(staged_api)]
|
||||
#![cfg_attr(test, feature(test))]
|
||||
|
||||
#[macro_use] extern crate log;
|
||||
#[macro_use] extern crate syntax;
|
||||
#[macro_use] #[no_link] extern crate rustc_bitflags;
|
||||
extern crate syntax_pos;
|
||||
extern crate flate;
|
||||
extern crate serialize as rustc_serialize; // used by deriving
|
||||
@ -48,24 +46,15 @@ extern crate rustc_llvm;
|
||||
extern crate rustc_macro;
|
||||
extern crate rustc_const_math;
|
||||
|
||||
#[cfg(test)]
|
||||
extern crate test;
|
||||
|
||||
mod rbml {
|
||||
pub mod writer;
|
||||
pub mod reader;
|
||||
pub use self::reader::Doc;
|
||||
}
|
||||
|
||||
mod diagnostics;
|
||||
|
||||
mod astencode;
|
||||
mod common;
|
||||
mod index_builder;
|
||||
mod index;
|
||||
mod encoder;
|
||||
mod decoder;
|
||||
mod csearch;
|
||||
mod schema;
|
||||
|
||||
pub mod creader;
|
||||
pub mod cstore;
|
||||
|
@ -212,8 +212,8 @@
|
||||
//! no means all of the necessary details. Take a look at the rest of
|
||||
//! metadata::loader or metadata::creader for all the juicy details!
|
||||
|
||||
use cstore::{MetadataBlob, MetadataVec, MetadataArchive};
|
||||
use common::{metadata_encoding_version, rustc_version};
|
||||
use cstore::MetadataBlob;
|
||||
use schema::{METADATA_HEADER, RUSTC_VERSION};
|
||||
|
||||
use rustc::hir::svh::Svh;
|
||||
use rustc::session::Session;
|
||||
@ -382,7 +382,7 @@ impl<'a> Context<'a> {
|
||||
}
|
||||
if !self.rejected_via_version.is_empty() {
|
||||
err.help(&format!("please recompile that crate using this compiler ({})",
|
||||
rustc_version()));
|
||||
RUSTC_VERSION));
|
||||
let mismatches = self.rejected_via_version.iter();
|
||||
for (i, &CrateMismatch { ref path, ref got }) in mismatches.enumerate() {
|
||||
err.note(&format!("crate `{}` path #{}: {} compiled by {:?}",
|
||||
@ -510,8 +510,7 @@ impl<'a> Context<'a> {
|
||||
if let Some((ref p, _)) = lib.rlib {
|
||||
err.note(&format!("path: {}", p.display()));
|
||||
}
|
||||
let crate_info = lib.metadata.get_crate_info();
|
||||
note_crate_name(&mut err, &crate_info.name);
|
||||
note_crate_name(&mut err, &lib.metadata.get_root().name);
|
||||
}
|
||||
err.emit();
|
||||
None
|
||||
@ -597,38 +596,37 @@ impl<'a> Context<'a> {
|
||||
}
|
||||
|
||||
fn crate_matches(&mut self, metadata: &MetadataBlob, libpath: &Path) -> Option<Svh> {
|
||||
let crate_rustc_version = metadata.crate_rustc_version();
|
||||
if crate_rustc_version != Some(rustc_version()) {
|
||||
let message = crate_rustc_version.unwrap_or(format!("an unknown compiler"));
|
||||
info!("Rejecting via version: expected {} got {}", rustc_version(), message);
|
||||
let root = metadata.get_root();
|
||||
if root.rustc_version != RUSTC_VERSION {
|
||||
info!("Rejecting via version: expected {} got {}",
|
||||
RUSTC_VERSION, root.rustc_version);
|
||||
self.rejected_via_version.push(CrateMismatch {
|
||||
path: libpath.to_path_buf(),
|
||||
got: message
|
||||
got: root.rustc_version
|
||||
});
|
||||
return None;
|
||||
}
|
||||
|
||||
let crate_info = metadata.get_crate_info();
|
||||
if self.should_match_name {
|
||||
if self.crate_name != crate_info.name {
|
||||
if self.crate_name != root.name {
|
||||
info!("Rejecting via crate name"); return None;
|
||||
}
|
||||
}
|
||||
|
||||
if crate_info.triple != self.triple {
|
||||
if root.triple != self.triple {
|
||||
info!("Rejecting via crate triple: expected {} got {}",
|
||||
self.triple, crate_info.triple);
|
||||
self.triple, root.triple);
|
||||
self.rejected_via_triple.push(CrateMismatch {
|
||||
path: libpath.to_path_buf(),
|
||||
got: crate_info.triple
|
||||
got: root.triple
|
||||
});
|
||||
return None;
|
||||
}
|
||||
|
||||
if let Some(myhash) = self.hash {
|
||||
if *myhash != crate_info.hash {
|
||||
if *myhash != root.hash {
|
||||
info!("Rejecting via hash: expected {} got {}",
|
||||
*myhash, crate_info.hash);
|
||||
*myhash, root.hash);
|
||||
self.rejected_via_hash.push(CrateMismatch {
|
||||
path: libpath.to_path_buf(),
|
||||
got: myhash.to_string()
|
||||
@ -637,7 +635,7 @@ impl<'a> Context<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
Some(crate_info.hash)
|
||||
Some(root.hash)
|
||||
}
|
||||
|
||||
|
||||
@ -758,11 +756,7 @@ impl ArchiveMetadata {
|
||||
fn verify_decompressed_encoding_version(blob: &MetadataBlob, filename: &Path)
|
||||
-> Result<(), String>
|
||||
{
|
||||
let data = blob.as_slice_raw();
|
||||
if data.len() < 4+metadata_encoding_version.len() ||
|
||||
!<[u8]>::eq(&data[..4], &[0, 0, 0, 0]) ||
|
||||
&data[4..4+metadata_encoding_version.len()] != metadata_encoding_version
|
||||
{
|
||||
if !blob.is_compatible() {
|
||||
Err((format!("incompatible metadata version found: '{}'",
|
||||
filename.display())))
|
||||
} else {
|
||||
@ -797,7 +791,7 @@ fn get_metadata_section_imp(target: &Target, flavor: CrateFlavor, filename: &Pat
|
||||
filename.display()));
|
||||
}
|
||||
};
|
||||
return match ArchiveMetadata::new(archive).map(|ar| MetadataArchive(ar)) {
|
||||
return match ArchiveMetadata::new(archive).map(|ar| MetadataBlob::Archive(ar)) {
|
||||
None => Err(format!("failed to read rlib metadata: '{}'",
|
||||
filename.display())),
|
||||
Some(blob) => {
|
||||
@ -832,12 +826,12 @@ fn get_metadata_section_imp(target: &Target, flavor: CrateFlavor, filename: &Pat
|
||||
let cbuf = llvm::LLVMGetSectionContents(si.llsi);
|
||||
let csz = llvm::LLVMGetSectionSize(si.llsi) as usize;
|
||||
let cvbuf: *const u8 = cbuf as *const u8;
|
||||
let vlen = metadata_encoding_version.len();
|
||||
let vlen = METADATA_HEADER.len();
|
||||
debug!("checking {} bytes of metadata-version stamp",
|
||||
vlen);
|
||||
let minsz = cmp::min(vlen, csz);
|
||||
let buf0 = slice::from_raw_parts(cvbuf, minsz);
|
||||
let version_ok = buf0 == metadata_encoding_version;
|
||||
let version_ok = buf0 == METADATA_HEADER;
|
||||
if !version_ok {
|
||||
return Err((format!("incompatible metadata version found: '{}'",
|
||||
filename.display())));
|
||||
@ -849,7 +843,7 @@ fn get_metadata_section_imp(target: &Target, flavor: CrateFlavor, filename: &Pat
|
||||
let bytes = slice::from_raw_parts(cvbuf1, csz - vlen);
|
||||
match flate::inflate_bytes(bytes) {
|
||||
Ok(inflated) => {
|
||||
let blob = MetadataVec(inflated);
|
||||
let blob = MetadataBlob::Inflated(inflated);
|
||||
verify_decompressed_encoding_version(&blob, filename)?;
|
||||
return Ok(blob);
|
||||
}
|
||||
|
@ -1,411 +0,0 @@
|
||||
// Copyright 2012-2016 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
//! Really Bad Markup Language (rbml) is an internal serialization format of rustc.
|
||||
//! This is not intended to be used by users.
|
||||
//!
|
||||
//! Originally based on the Extensible Binary Markup Language
|
||||
//! (ebml; http://www.matroska.org/technical/specs/rfc/index.html),
|
||||
//! it is now a separate format tuned for the rust object metadata.
|
||||
//!
|
||||
//! # Encoding
|
||||
//!
|
||||
//! RBML document consists of the tag, length and data.
|
||||
//! The encoded data can contain multiple RBML documents concatenated.
|
||||
//!
|
||||
//! **Tags** are a hint for the following data.
|
||||
//! Tags are a number from 0x000 to 0xfff, where 0xf0 through 0xff is reserved.
|
||||
//! Tags less than 0xf0 are encoded in one literal byte.
|
||||
//! Tags greater than 0xff are encoded in two big-endian bytes,
|
||||
//! where the tag number is ORed with 0xf000. (E.g. tag 0x123 = `f1 23`)
|
||||
//!
|
||||
//! **Lengths** encode the length of the following data.
|
||||
//! It is a variable-length unsigned isize, and one of the following forms:
|
||||
//!
|
||||
//! - `80` through `fe` for lengths up to 0x7e;
|
||||
//! - `40 ff` through `7f ff` for lengths up to 0x3fff;
|
||||
//! - `20 40 00` through `3f ff ff` for lengths up to 0x1fffff;
|
||||
//! - `10 20 00 00` through `1f ff ff ff` for lengths up to 0xfffffff.
|
||||
//!
|
||||
//! The "overlong" form is allowed so that the length can be encoded
|
||||
//! without the prior knowledge of the encoded data.
|
||||
//! For example, the length 0 can be represented either by `80`, `40 00`,
|
||||
//! `20 00 00` or `10 00 00 00`.
|
||||
//! The encoder tries to minimize the length if possible.
|
||||
//! Also, some predefined tags listed below are so commonly used that
|
||||
//! their lengths are omitted ("implicit length").
|
||||
//!
|
||||
//! **Data** can be either binary bytes or zero or more nested RBML documents.
|
||||
//! Nested documents cannot overflow, and should be entirely contained
|
||||
//! within a parent document.
|
||||
|
||||
#[cfg(test)]
|
||||
use test::Bencher;
|
||||
|
||||
use std::fmt;
|
||||
use std::str;
|
||||
|
||||
macro_rules! try_or {
|
||||
($e:expr, $r:expr) => (
|
||||
match $e {
|
||||
Ok(x) => x,
|
||||
Err(_) => return $r
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
pub struct Doc<'a> {
|
||||
pub data: &'a [u8],
|
||||
pub start: usize,
|
||||
pub end: usize,
|
||||
}
|
||||
|
||||
impl<'doc> Doc<'doc> {
|
||||
pub fn new(data: &'doc [u8]) -> Doc<'doc> {
|
||||
Doc {
|
||||
data: data,
|
||||
start: 0,
|
||||
end: data.len(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn at(data: &'doc [u8], start: usize) -> Doc<'doc> {
|
||||
let elt_tag = tag_at(data, start).unwrap();
|
||||
let elt_size = tag_len_at(data, elt_tag.next).unwrap();
|
||||
let end = elt_size.next + elt_size.val;
|
||||
Doc {
|
||||
data: data,
|
||||
start: elt_size.next,
|
||||
end: end,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn maybe_child(&self, tag: usize) -> Option<Doc<'doc>> {
|
||||
let mut pos = self.start;
|
||||
while pos < self.end {
|
||||
let elt_tag = try_or!(tag_at(self.data, pos), None);
|
||||
let elt_size = try_or!(tag_len_at(self.data, elt_tag.next), None);
|
||||
pos = elt_size.next + elt_size.val;
|
||||
if elt_tag.val == tag {
|
||||
return Some(Doc {
|
||||
data: self.data,
|
||||
start: elt_size.next,
|
||||
end: pos,
|
||||
});
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
pub fn child(&self, tag: usize) -> Doc<'doc> {
|
||||
match self.maybe_child(tag) {
|
||||
Some(d) => d,
|
||||
None => {
|
||||
bug!("failed to find child with tag {:?}", tag);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn children_of(&self, tag: usize) -> DocsIterator<'doc> {
|
||||
DocsIterator { d: self.child(tag) }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum Error {
|
||||
IntTooBig(usize),
|
||||
InvalidTag(usize)
|
||||
}
|
||||
|
||||
impl fmt::Display for Error {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
// FIXME: this should be a more useful display form
|
||||
fmt::Debug::fmt(self, f)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
struct Res {
|
||||
val: usize,
|
||||
next: usize,
|
||||
}
|
||||
|
||||
fn tag_at(data: &[u8], start: usize) -> Result<Res, Error> {
|
||||
let v = data[start] as usize;
|
||||
if v < 0xf0 {
|
||||
Ok(Res {
|
||||
val: v,
|
||||
next: start + 1,
|
||||
})
|
||||
} else if v > 0xf0 {
|
||||
Ok(Res {
|
||||
val: ((v & 0xf) << 8) | data[start + 1] as usize,
|
||||
next: start + 2,
|
||||
})
|
||||
} else {
|
||||
// every tag starting with byte 0xf0 is an overlong form, which is prohibited.
|
||||
Err(Error::InvalidTag(v))
|
||||
}
|
||||
}
|
||||
|
||||
#[inline(never)]
|
||||
fn vuint_at_slow(data: &[u8], start: usize) -> Result<Res, Error> {
|
||||
let a = data[start];
|
||||
if a & 0x80 != 0 {
|
||||
return Ok(Res {
|
||||
val: (a & 0x7f) as usize,
|
||||
next: start + 1,
|
||||
});
|
||||
}
|
||||
if a & 0x40 != 0 {
|
||||
return Ok(Res {
|
||||
val: ((a & 0x3f) as usize) << 8 | (data[start + 1] as usize),
|
||||
next: start + 2,
|
||||
});
|
||||
}
|
||||
if a & 0x20 != 0 {
|
||||
return Ok(Res {
|
||||
val: ((a & 0x1f) as usize) << 16 | (data[start + 1] as usize) << 8 |
|
||||
(data[start + 2] as usize),
|
||||
next: start + 3,
|
||||
});
|
||||
}
|
||||
if a & 0x10 != 0 {
|
||||
return Ok(Res {
|
||||
val: ((a & 0x0f) as usize) << 24 | (data[start + 1] as usize) << 16 |
|
||||
(data[start + 2] as usize) << 8 |
|
||||
(data[start + 3] as usize),
|
||||
next: start + 4,
|
||||
});
|
||||
}
|
||||
Err(Error::IntTooBig(a as usize))
|
||||
}
|
||||
|
||||
fn vuint_at(data: &[u8], start: usize) -> Result<Res, Error> {
|
||||
if data.len() - start < 4 {
|
||||
return vuint_at_slow(data, start);
|
||||
}
|
||||
|
||||
// Lookup table for parsing EBML Element IDs as per
|
||||
// http://ebml.sourceforge.net/specs/ The Element IDs are parsed by
|
||||
// reading a big endian u32 positioned at data[start]. Using the four
|
||||
// most significant bits of the u32 we lookup in the table below how
|
||||
// the element ID should be derived from it.
|
||||
//
|
||||
// The table stores tuples (shift, mask) where shift is the number the
|
||||
// u32 should be right shifted with and mask is the value the right
|
||||
// shifted value should be masked with. If for example the most
|
||||
// significant bit is set this means it's a class A ID and the u32
|
||||
// should be right shifted with 24 and masked with 0x7f. Therefore we
|
||||
// store (24, 0x7f) at index 0x8 - 0xF (four bit numbers where the most
|
||||
// significant bit is set).
|
||||
//
|
||||
// By storing the number of shifts and masks in a table instead of
|
||||
// checking in order if the most significant bit is set, the second
|
||||
// most significant bit is set etc. we can replace up to three
|
||||
// "and+branch" with a single table lookup which gives us a measured
|
||||
// speedup of around 2x on x86_64.
|
||||
static SHIFT_MASK_TABLE: [(usize, u32); 16] = [(0, 0x0),
|
||||
(0, 0x0fffffff),
|
||||
(8, 0x1fffff),
|
||||
(8, 0x1fffff),
|
||||
(16, 0x3fff),
|
||||
(16, 0x3fff),
|
||||
(16, 0x3fff),
|
||||
(16, 0x3fff),
|
||||
(24, 0x7f),
|
||||
(24, 0x7f),
|
||||
(24, 0x7f),
|
||||
(24, 0x7f),
|
||||
(24, 0x7f),
|
||||
(24, 0x7f),
|
||||
(24, 0x7f),
|
||||
(24, 0x7f)];
|
||||
|
||||
unsafe {
|
||||
let ptr = data.as_ptr().offset(start as isize) as *const u32;
|
||||
let val = u32::from_be(*ptr);
|
||||
|
||||
let i = (val >> 28) as usize;
|
||||
let (shift, mask) = SHIFT_MASK_TABLE[i];
|
||||
Ok(Res {
|
||||
val: ((val >> shift) & mask) as usize,
|
||||
next: start + ((32 - shift) >> 3),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn tag_len_at(data: &[u8], next: usize) -> Result<Res, Error> {
|
||||
vuint_at(data, next)
|
||||
}
|
||||
|
||||
pub struct DocsIterator<'a> {
|
||||
d: Doc<'a>,
|
||||
}
|
||||
|
||||
impl<'a> Iterator for DocsIterator<'a> {
|
||||
type Item = Doc<'a>;
|
||||
|
||||
fn next(&mut self) -> Option<Doc<'a>> {
|
||||
if self.d.start >= self.d.end {
|
||||
return None;
|
||||
}
|
||||
|
||||
let elt_tag = try_or!(tag_at(self.d.data, self.d.start), {
|
||||
self.d.start = self.d.end;
|
||||
None
|
||||
});
|
||||
let elt_size = try_or!(tag_len_at(self.d.data, elt_tag.next), {
|
||||
self.d.start = self.d.end;
|
||||
None
|
||||
});
|
||||
|
||||
let end = elt_size.next + elt_size.val;
|
||||
let doc = Doc {
|
||||
data: self.d.data,
|
||||
start: elt_size.next,
|
||||
end: end,
|
||||
};
|
||||
|
||||
self.d.start = end;
|
||||
return Some(doc);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_vuint_at() {
|
||||
let data = &[
|
||||
0x80,
|
||||
0xff,
|
||||
0x40, 0x00,
|
||||
0x7f, 0xff,
|
||||
0x20, 0x00, 0x00,
|
||||
0x3f, 0xff, 0xff,
|
||||
0x10, 0x00, 0x00, 0x00,
|
||||
0x1f, 0xff, 0xff, 0xff
|
||||
];
|
||||
|
||||
let mut res: Res;
|
||||
|
||||
// Class A
|
||||
res = vuint_at(data, 0).unwrap();
|
||||
assert_eq!(res.val, 0);
|
||||
assert_eq!(res.next, 1);
|
||||
res = vuint_at(data, res.next).unwrap();
|
||||
assert_eq!(res.val, (1 << 7) - 1);
|
||||
assert_eq!(res.next, 2);
|
||||
|
||||
// Class B
|
||||
res = vuint_at(data, res.next).unwrap();
|
||||
assert_eq!(res.val, 0);
|
||||
assert_eq!(res.next, 4);
|
||||
res = vuint_at(data, res.next).unwrap();
|
||||
assert_eq!(res.val, (1 << 14) - 1);
|
||||
assert_eq!(res.next, 6);
|
||||
|
||||
// Class C
|
||||
res = vuint_at(data, res.next).unwrap();
|
||||
assert_eq!(res.val, 0);
|
||||
assert_eq!(res.next, 9);
|
||||
res = vuint_at(data, res.next).unwrap();
|
||||
assert_eq!(res.val, (1 << 21) - 1);
|
||||
assert_eq!(res.next, 12);
|
||||
|
||||
// Class D
|
||||
res = vuint_at(data, res.next).unwrap();
|
||||
assert_eq!(res.val, 0);
|
||||
assert_eq!(res.next, 16);
|
||||
res = vuint_at(data, res.next).unwrap();
|
||||
assert_eq!(res.val, (1 << 28) - 1);
|
||||
assert_eq!(res.next, 20);
|
||||
}
|
||||
|
||||
#[bench]
|
||||
pub fn vuint_at_A_aligned(b: &mut Bencher) {
|
||||
let data = (0..4 * 100)
|
||||
.map(|i| {
|
||||
match i % 2 {
|
||||
0 => 0x80,
|
||||
_ => i as u8,
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
let mut sum = 0;
|
||||
b.iter(|| {
|
||||
let mut i = 0;
|
||||
while i < data.len() {
|
||||
sum += vuint_at(&data, i).unwrap().val;
|
||||
i += 4;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
#[bench]
|
||||
pub fn vuint_at_A_unaligned(b: &mut Bencher) {
|
||||
let data = (0..4 * 100 + 1)
|
||||
.map(|i| {
|
||||
match i % 2 {
|
||||
1 => 0x80,
|
||||
_ => i as u8,
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
let mut sum = 0;
|
||||
b.iter(|| {
|
||||
let mut i = 1;
|
||||
while i < data.len() {
|
||||
sum += vuint_at(&data, i).unwrap().val;
|
||||
i += 4;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
#[bench]
|
||||
pub fn vuint_at_D_aligned(b: &mut Bencher) {
|
||||
let data = (0..4 * 100)
|
||||
.map(|i| {
|
||||
match i % 4 {
|
||||
0 => 0x10,
|
||||
3 => i as u8,
|
||||
_ => 0,
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
let mut sum = 0;
|
||||
b.iter(|| {
|
||||
let mut i = 0;
|
||||
while i < data.len() {
|
||||
sum += vuint_at(&data, i).unwrap().val;
|
||||
i += 4;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
#[bench]
|
||||
pub fn vuint_at_D_unaligned(b: &mut Bencher) {
|
||||
let data = (0..4 * 100 + 1)
|
||||
.map(|i| {
|
||||
match i % 4 {
|
||||
1 => 0x10,
|
||||
0 => i as u8,
|
||||
_ => 0,
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
let mut sum = 0;
|
||||
b.iter(|| {
|
||||
let mut i = 1;
|
||||
while i < data.len() {
|
||||
sum += vuint_at(&data, i).unwrap().val;
|
||||
i += 4;
|
||||
}
|
||||
});
|
||||
}
|
@ -1,134 +0,0 @@
|
||||
// Copyright 2012-2016 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use std::io::prelude::*;
|
||||
use std::io::{self, SeekFrom, Cursor};
|
||||
|
||||
use rustc_serialize::opaque;
|
||||
|
||||
pub type EncodeResult = io::Result<()>;
|
||||
|
||||
// rbml writing
|
||||
pub struct Encoder<'a> {
|
||||
pub opaque: opaque::Encoder<'a>,
|
||||
size_positions: Vec<usize>,
|
||||
relax_limit: usize, // do not move encoded bytes before this position
|
||||
}
|
||||
|
||||
const NUM_TAGS: usize = 0x1000;
|
||||
|
||||
fn write_tag<W: Write>(w: &mut W, n: usize) -> EncodeResult {
|
||||
if n < 0xf0 {
|
||||
w.write_all(&[n as u8])
|
||||
} else if 0x100 <= n && n < NUM_TAGS {
|
||||
w.write_all(&[0xf0 | (n >> 8) as u8, n as u8])
|
||||
} else {
|
||||
Err(io::Error::new(io::ErrorKind::Other, &format!("invalid tag: {}", n)[..]))
|
||||
}
|
||||
}
|
||||
|
||||
fn write_sized_vuint<W: Write>(w: &mut W, n: usize, size: usize) -> EncodeResult {
|
||||
match size {
|
||||
1 => w.write_all(&[0x80 | (n as u8)]),
|
||||
2 => w.write_all(&[0x40 | ((n >> 8) as u8), n as u8]),
|
||||
3 => w.write_all(&[0x20 | ((n >> 16) as u8), (n >> 8) as u8, n as u8]),
|
||||
4 => w.write_all(&[0x10 | ((n >> 24) as u8), (n >> 16) as u8, (n >> 8) as u8, n as u8]),
|
||||
_ => Err(io::Error::new(io::ErrorKind::Other, &format!("isize too big: {}", n)[..])),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn write_vuint<W: Write>(w: &mut W, n: usize) -> EncodeResult {
|
||||
if n < 0x7f {
|
||||
return write_sized_vuint(w, n, 1);
|
||||
}
|
||||
if n < 0x4000 {
|
||||
return write_sized_vuint(w, n, 2);
|
||||
}
|
||||
if n < 0x200000 {
|
||||
return write_sized_vuint(w, n, 3);
|
||||
}
|
||||
if n < 0x10000000 {
|
||||
return write_sized_vuint(w, n, 4);
|
||||
}
|
||||
Err(io::Error::new(io::ErrorKind::Other, &format!("isize too big: {}", n)[..]))
|
||||
}
|
||||
|
||||
impl<'a> Encoder<'a> {
|
||||
pub fn new(cursor: &'a mut Cursor<Vec<u8>>) -> Encoder<'a> {
|
||||
Encoder {
|
||||
opaque: opaque::Encoder::new(cursor),
|
||||
size_positions: vec![],
|
||||
relax_limit: 0,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn start_tag(&mut self, tag_id: usize) -> EncodeResult {
|
||||
debug!("Start tag {:?}", tag_id);
|
||||
|
||||
// Write the enum ID:
|
||||
write_tag(&mut self.opaque.cursor, tag_id)?;
|
||||
|
||||
// Write a placeholder four-byte size.
|
||||
let cur_pos = self.position();
|
||||
self.size_positions.push(cur_pos);
|
||||
self.opaque.cursor.write_all(&[0, 0, 0, 0])
|
||||
}
|
||||
|
||||
pub fn end_tag(&mut self) -> EncodeResult {
|
||||
let last_size_pos = self.size_positions.pop().unwrap();
|
||||
let cur_pos = self.position();
|
||||
self.opaque.cursor.seek(SeekFrom::Start(last_size_pos as u64))?;
|
||||
let size = cur_pos - last_size_pos - 4;
|
||||
|
||||
// relax the size encoding for small tags (bigger tags are costly to move).
|
||||
// we should never try to move the stable positions, however.
|
||||
const RELAX_MAX_SIZE: usize = 0x100;
|
||||
if size <= RELAX_MAX_SIZE && last_size_pos >= self.relax_limit {
|
||||
// we can't alter the buffer in place, so have a temporary buffer
|
||||
let mut buf = [0u8; RELAX_MAX_SIZE];
|
||||
{
|
||||
let data = &self.opaque.cursor.get_ref()[last_size_pos + 4..cur_pos];
|
||||
buf[..size].copy_from_slice(data);
|
||||
}
|
||||
|
||||
// overwrite the size and data and continue
|
||||
write_vuint(&mut self.opaque.cursor, size)?;
|
||||
self.opaque.cursor.write_all(&buf[..size])?;
|
||||
} else {
|
||||
// overwrite the size with an overlong encoding and skip past the data
|
||||
write_sized_vuint(&mut self.opaque.cursor, size, 4)?;
|
||||
self.opaque.cursor.seek(SeekFrom::Start(cur_pos as u64))?;
|
||||
}
|
||||
|
||||
debug!("End tag (size = {:?})", size);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn wr_tagged_str(&mut self, tag_id: usize, v: &str) -> EncodeResult {
|
||||
write_tag(&mut self.opaque.cursor, tag_id)?;
|
||||
write_vuint(&mut self.opaque.cursor, v.len())?;
|
||||
self.opaque.cursor.write_all(v.as_bytes())
|
||||
}
|
||||
|
||||
pub fn position(&mut self) -> usize {
|
||||
self.opaque.position() as usize
|
||||
}
|
||||
|
||||
/// Returns the current position while marking it stable, i.e.
|
||||
/// generated bytes so far wouldn't be affected by relaxation.
|
||||
pub fn mark_stable_position(&mut self) -> usize {
|
||||
let pos = self.position();
|
||||
if self.relax_limit < pos {
|
||||
self.relax_limit = pos;
|
||||
}
|
||||
let meta_start = 8 + ::common::metadata_encoding_version.len();
|
||||
pos - meta_start
|
||||
}
|
||||
}
|
299
src/librustc_metadata/schema.rs
Normal file
299
src/librustc_metadata/schema.rs
Normal file
@ -0,0 +1,299 @@
|
||||
// Copyright 2012-2016 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use astencode;
|
||||
use index;
|
||||
|
||||
use rustc::hir;
|
||||
use rustc::hir::def;
|
||||
use rustc::hir::def_id::{DefIndex, DefId};
|
||||
use rustc::middle::cstore::{LinkagePreference, NativeLibraryKind};
|
||||
use rustc::middle::lang_items;
|
||||
use rustc::mir;
|
||||
use rustc::ty::{self, Ty};
|
||||
use rustc::session::config::PanicStrategy;
|
||||
|
||||
use rustc_serialize as serialize;
|
||||
use syntax::{ast, attr};
|
||||
use syntax_pos::{self, Span};
|
||||
|
||||
use std::marker::PhantomData;
|
||||
|
||||
pub const RUSTC_VERSION: &'static str = concat!("rustc ", env!("CFG_VERSION"));
|
||||
|
||||
/// Metadata encoding version.
|
||||
/// NB: increment this if you change the format of metadata such that
|
||||
/// the rustc version can't be found to compare with `RUSTC_VERSION`.
|
||||
pub const METADATA_VERSION: u8 = 3;
|
||||
|
||||
/// Metadata header which includes `METADATA_VERSION`.
|
||||
/// To get older versions of rustc to ignore this metadata,
|
||||
/// there are 4 zero bytes at the start, which are treated
|
||||
/// as a length of 0 by old compilers.
|
||||
///
|
||||
/// This header is followed by the position of the `CrateRoot`.
|
||||
pub const METADATA_HEADER: &'static [u8; 12] = &[
|
||||
0, 0, 0, 0,
|
||||
b'r', b'u', b's', b't',
|
||||
0, 0, 0, METADATA_VERSION
|
||||
];
|
||||
|
||||
/// The shorthand encoding uses an enum's variant index `usize`
|
||||
/// and is offset by this value so it never matches a real variant.
|
||||
/// This offset is also chosen so that the first byte is never < 0x80.
|
||||
pub const SHORTHAND_OFFSET: usize = 0x80;
|
||||
|
||||
/// A value of type T referred to by its absolute position
|
||||
/// in the metadata, and which can be decoded lazily.
|
||||
#[must_use]
|
||||
pub struct Lazy<T> {
|
||||
pub position: usize,
|
||||
_marker: PhantomData<T>
|
||||
}
|
||||
|
||||
impl<T> Lazy<T> {
|
||||
pub fn with_position(position: usize) -> Lazy<T> {
|
||||
Lazy {
|
||||
position: position,
|
||||
_marker: PhantomData
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Copy for Lazy<T> {}
|
||||
impl<T> Clone for Lazy<T> {
|
||||
fn clone(&self) -> Self { *self }
|
||||
}
|
||||
|
||||
impl<T> serialize::UseSpecializedEncodable for Lazy<T> {}
|
||||
impl<T> serialize::UseSpecializedDecodable for Lazy<T> {}
|
||||
|
||||
/// A sequence of type T referred to by its absolute position
|
||||
/// in the metadata and length, and which can be decoded lazily.
|
||||
///
|
||||
/// Unlike `Lazy<Vec<T>>`, the length is encoded next to the
|
||||
/// position, not at the position, which means that the length
|
||||
/// doesn't need to be known before encoding all the elements.
|
||||
#[must_use]
|
||||
pub struct LazySeq<T> {
|
||||
pub len: usize,
|
||||
pub position: usize,
|
||||
_marker: PhantomData<T>
|
||||
}
|
||||
|
||||
impl<T> LazySeq<T> {
|
||||
pub fn empty() -> LazySeq<T> {
|
||||
LazySeq::with_position_and_length(0, 0)
|
||||
}
|
||||
|
||||
pub fn with_position_and_length(position: usize, len: usize) -> LazySeq<T> {
|
||||
LazySeq {
|
||||
len: len,
|
||||
position: position,
|
||||
_marker: PhantomData
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Copy for LazySeq<T> {}
|
||||
impl<T> Clone for LazySeq<T> {
|
||||
fn clone(&self) -> Self { *self }
|
||||
}
|
||||
|
||||
impl<T> serialize::UseSpecializedEncodable for LazySeq<T> {}
|
||||
impl<T> serialize::UseSpecializedDecodable for LazySeq<T> {}
|
||||
|
||||
#[derive(RustcEncodable, RustcDecodable)]
|
||||
pub struct CrateRoot {
|
||||
pub rustc_version: String,
|
||||
pub name: String,
|
||||
pub triple: String,
|
||||
pub hash: hir::svh::Svh,
|
||||
pub disambiguator: String,
|
||||
pub panic_strategy: PanicStrategy,
|
||||
pub plugin_registrar_fn: Option<DefIndex>,
|
||||
pub macro_derive_registrar: Option<DefIndex>,
|
||||
|
||||
pub index: LazySeq<index::Index>,
|
||||
pub crate_deps: LazySeq<CrateDep>,
|
||||
pub dylib_dependency_formats: LazySeq<Option<LinkagePreference>>,
|
||||
pub native_libraries: LazySeq<(NativeLibraryKind, String)>,
|
||||
pub lang_items: LazySeq<(DefIndex, usize)>,
|
||||
pub lang_items_missing: LazySeq<lang_items::LangItem>,
|
||||
pub impls: LazySeq<TraitImpls>,
|
||||
pub reachable_ids: LazySeq<DefIndex>,
|
||||
pub macro_defs: LazySeq<MacroDef>,
|
||||
pub codemap: LazySeq<syntax_pos::FileMap>
|
||||
}
|
||||
|
||||
#[derive(RustcEncodable, RustcDecodable)]
|
||||
pub struct CrateDep {
|
||||
pub name: ast::Name,
|
||||
pub hash: hir::svh::Svh,
|
||||
pub explicitly_linked: bool
|
||||
}
|
||||
|
||||
#[derive(RustcEncodable, RustcDecodable)]
|
||||
pub struct TraitImpls {
|
||||
pub trait_id: (u32, DefIndex),
|
||||
pub impls: LazySeq<DefIndex>
|
||||
}
|
||||
|
||||
#[derive(RustcEncodable, RustcDecodable)]
|
||||
pub struct MacroDef {
|
||||
pub name: ast::Name,
|
||||
pub attrs: Vec<ast::Attribute>,
|
||||
pub span: Span,
|
||||
pub body: String
|
||||
}
|
||||
|
||||
#[derive(RustcEncodable, RustcDecodable)]
|
||||
pub struct Entry<'tcx> {
|
||||
pub kind: EntryKind<'tcx>,
|
||||
pub visibility: ty::Visibility,
|
||||
pub def_key: Lazy<hir::map::DefKey>,
|
||||
pub attributes: LazySeq<ast::Attribute>,
|
||||
pub children: LazySeq<DefIndex>,
|
||||
pub stability: Option<Lazy<attr::Stability>>,
|
||||
pub deprecation: Option<Lazy<attr::Deprecation>>,
|
||||
|
||||
pub ty: Option<Lazy<Ty<'tcx>>>,
|
||||
pub inherent_impls: LazySeq<DefIndex>,
|
||||
pub variances: LazySeq<ty::Variance>,
|
||||
pub generics: Option<Lazy<ty::Generics<'tcx>>>,
|
||||
pub predicates: Option<Lazy<ty::GenericPredicates<'tcx>>>,
|
||||
|
||||
pub ast: Option<Lazy<astencode::Ast<'tcx>>>,
|
||||
pub mir: Option<Lazy<mir::repr::Mir<'tcx>>>
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, RustcEncodable, RustcDecodable)]
|
||||
pub enum EntryKind<'tcx> {
|
||||
Const,
|
||||
ImmStatic,
|
||||
MutStatic,
|
||||
ForeignImmStatic,
|
||||
ForeignMutStatic,
|
||||
ForeignMod,
|
||||
Type,
|
||||
Enum,
|
||||
Field,
|
||||
Variant(Lazy<VariantData>),
|
||||
Struct(Lazy<VariantData>),
|
||||
Union(Lazy<VariantData>),
|
||||
Fn(Lazy<FnData>),
|
||||
ForeignFn(Lazy<FnData>),
|
||||
Mod(Lazy<ModData>),
|
||||
Closure(Lazy<ClosureData<'tcx>>),
|
||||
Trait(Lazy<TraitData<'tcx>>),
|
||||
Impl(Lazy<ImplData<'tcx>>),
|
||||
DefaultImpl(Lazy<ImplData<'tcx>>),
|
||||
Method(Lazy<MethodData<'tcx>>),
|
||||
AssociatedType(AssociatedContainer),
|
||||
AssociatedConst(AssociatedContainer)
|
||||
}
|
||||
|
||||
#[derive(RustcEncodable, RustcDecodable)]
|
||||
pub struct ModData {
|
||||
pub reexports: LazySeq<def::Export>
|
||||
}
|
||||
|
||||
#[derive(RustcEncodable, RustcDecodable)]
|
||||
pub struct FnData {
|
||||
pub constness: hir::Constness,
|
||||
pub arg_names: LazySeq<ast::Name>
|
||||
}
|
||||
|
||||
#[derive(RustcEncodable, RustcDecodable)]
|
||||
pub struct VariantData {
|
||||
pub kind: ty::VariantKind,
|
||||
pub disr: u64,
|
||||
|
||||
/// If this is a struct's only variant, this
|
||||
/// is the index of the "struct ctor" item.
|
||||
pub struct_ctor: Option<DefIndex>
|
||||
}
|
||||
|
||||
#[derive(RustcEncodable, RustcDecodable)]
|
||||
pub struct TraitData<'tcx> {
|
||||
pub unsafety: hir::Unsafety,
|
||||
pub paren_sugar: bool,
|
||||
pub has_default_impl: bool,
|
||||
pub trait_ref: Lazy<ty::TraitRef<'tcx>>,
|
||||
pub super_predicates: Lazy<ty::GenericPredicates<'tcx>>
|
||||
}
|
||||
|
||||
#[derive(RustcEncodable, RustcDecodable)]
|
||||
pub struct ImplData<'tcx> {
|
||||
pub polarity: hir::ImplPolarity,
|
||||
pub parent_impl: Option<DefId>,
|
||||
pub coerce_unsized_kind: Option<ty::adjustment::CustomCoerceUnsized>,
|
||||
pub trait_ref: Option<Lazy<ty::TraitRef<'tcx>>>
|
||||
}
|
||||
|
||||
/// Describes whether the container of an associated item
|
||||
/// is a trait or an impl and whether, in a trait, it has
|
||||
/// a default, or an in impl, whether it's marked "default".
|
||||
#[derive(Copy, Clone, RustcEncodable, RustcDecodable)]
|
||||
pub enum AssociatedContainer {
|
||||
TraitRequired,
|
||||
TraitWithDefault,
|
||||
ImplDefault,
|
||||
ImplFinal
|
||||
}
|
||||
|
||||
impl AssociatedContainer {
|
||||
pub fn with_def_id(&self, def_id: DefId) -> ty::ImplOrTraitItemContainer {
|
||||
match *self {
|
||||
AssociatedContainer::TraitRequired |
|
||||
AssociatedContainer::TraitWithDefault => {
|
||||
ty::TraitContainer(def_id)
|
||||
}
|
||||
|
||||
AssociatedContainer::ImplDefault |
|
||||
AssociatedContainer::ImplFinal => {
|
||||
ty::ImplContainer(def_id)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn has_body(&self) -> bool {
|
||||
match *self {
|
||||
AssociatedContainer::TraitRequired => false,
|
||||
|
||||
AssociatedContainer::TraitWithDefault |
|
||||
AssociatedContainer::ImplDefault |
|
||||
AssociatedContainer::ImplFinal => true
|
||||
}
|
||||
}
|
||||
|
||||
pub fn defaultness(&self) -> hir::Defaultness {
|
||||
match *self {
|
||||
AssociatedContainer::TraitRequired |
|
||||
AssociatedContainer::TraitWithDefault |
|
||||
AssociatedContainer::ImplDefault => hir::Defaultness::Default,
|
||||
|
||||
AssociatedContainer::ImplFinal => hir::Defaultness::Final
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(RustcEncodable, RustcDecodable)]
|
||||
pub struct MethodData<'tcx> {
|
||||
pub fn_data: FnData,
|
||||
pub container: AssociatedContainer,
|
||||
pub explicit_self: Lazy<ty::ExplicitSelfCategory<'tcx>>
|
||||
}
|
||||
|
||||
#[derive(RustcEncodable, RustcDecodable)]
|
||||
pub struct ClosureData<'tcx> {
|
||||
pub kind: ty::ClosureKind,
|
||||
pub ty: Lazy<ty::ClosureTy<'tcx>>
|
||||
}
|
@ -674,6 +674,13 @@ fn convert_associated_type<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
|
||||
defaultness: hir::Defaultness,
|
||||
ty: Option<Ty<'tcx>>)
|
||||
{
|
||||
let predicates = ty::GenericPredicates {
|
||||
parent: Some(container.id()),
|
||||
predicates: vec![]
|
||||
};
|
||||
ccx.tcx.predicates.borrow_mut().insert(ccx.tcx.map.local_def_id(id),
|
||||
predicates);
|
||||
|
||||
let associated_type = Rc::new(ty::AssociatedType {
|
||||
name: name,
|
||||
vis: ty::Visibility::from_hir(vis, id, ccx.tcx),
|
||||
@ -831,6 +838,9 @@ fn convert_item(ccx: &CrateCtxt, it: &hir::Item) {
|
||||
// Convert all the associated types.
|
||||
for impl_item in impl_items {
|
||||
if let hir::ImplItemKind::Type(ref ty) = impl_item.node {
|
||||
let type_def_id = ccx.tcx.map.local_def_id(impl_item.id);
|
||||
generics_of_def_id(ccx, type_def_id);
|
||||
|
||||
if opt_trait_ref.is_none() {
|
||||
span_err!(tcx.sess, impl_item.span, E0202,
|
||||
"associated types are not allowed in inherent impls");
|
||||
@ -898,6 +908,9 @@ fn convert_item(ccx: &CrateCtxt, it: &hir::Item) {
|
||||
// Convert all the associated types.
|
||||
for trait_item in trait_items {
|
||||
if let hir::TypeTraitItem(_, ref opt_ty) = trait_item.node {
|
||||
let type_def_id = ccx.tcx.map.local_def_id(trait_item.id);
|
||||
generics_of_def_id(ccx, type_def_id);
|
||||
|
||||
let typ = opt_ty.as_ref().map({
|
||||
|ty| ccx.icx(&trait_predicates).to_ty(&ExplicitRscope, &ty)
|
||||
});
|
||||
|
@ -1168,7 +1168,7 @@ impl<'a, 'tcx> Clean<FnDecl> for (DefId, &'a ty::PolyFnSig<'tcx>) {
|
||||
Argument {
|
||||
type_: t.clean(cx),
|
||||
id: ast::CRATE_NODE_ID,
|
||||
name: names.next().unwrap_or("".to_string()),
|
||||
name: names.next().map_or("".to_string(), |name| name.to_string()),
|
||||
}
|
||||
}).collect(),
|
||||
},
|
||||
|
1
src/rustc/Cargo.lock
generated
1
src/rustc/Cargo.lock
generated
@ -219,7 +219,6 @@ dependencies = [
|
||||
"log 0.0.0",
|
||||
"rustc 0.0.0",
|
||||
"rustc_back 0.0.0",
|
||||
"rustc_bitflags 0.0.0",
|
||||
"rustc_const_math 0.0.0",
|
||||
"rustc_data_structures 0.0.0",
|
||||
"rustc_errors 0.0.0",
|
||||
|
@ -34,14 +34,14 @@ struct Bar {
|
||||
|
||||
enum WireProtocol {
|
||||
JSON,
|
||||
RBML,
|
||||
Opaque,
|
||||
// ...
|
||||
}
|
||||
|
||||
fn encode_json<T: Encodable>(val: &T, wr: &mut Cursor<Vec<u8>>) {
|
||||
write!(wr, "{}", json::as_json(val));
|
||||
}
|
||||
fn encode_rbml<T: Encodable>(val: &T, wr: &mut Cursor<Vec<u8>>) {
|
||||
fn encode_opaque<T: Encodable>(val: &T, wr: &mut Cursor<Vec<u8>>) {
|
||||
let mut encoder = opaque::Encoder::new(wr);
|
||||
val.encode(&mut encoder);
|
||||
}
|
||||
@ -52,6 +52,6 @@ pub fn main() {
|
||||
let proto = WireProtocol::JSON;
|
||||
match proto {
|
||||
WireProtocol::JSON => encode_json(&target, &mut wr),
|
||||
WireProtocol::RBML => encode_rbml(&target, &mut wr)
|
||||
WireProtocol::Opaque => encode_opaque(&target, &mut wr)
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user