mirror of
https://github.com/rust-lang/rust.git
synced 2025-02-04 11:04:03 +00:00
Auto merge of #44529 - alexcrichton:trans-query, r=michaelwoerister
Refactor translation unit partitioning/collection as a query This commit is targeted at #44486 with the ultimate goal of making the `collect_and_partition_translation_items` function a query. This mostly just involved query-ifying a few other systems along with plumbing the tcx instead of `SharedCrateContext` in a few locations. Currently this only tackles the first bullet of #44486 and doesn't add a dedicated query for a particular codegen unit. I wasn't quite sure how to do that yet but figured this was good to put up. Closes #44486
This commit is contained in:
commit
e8a76d8acc
@ -71,6 +71,7 @@ use rustc_data_structures::stable_hasher::{StableHasher, HashStable};
|
||||
use ich::StableHashingContext;
|
||||
use std::fmt;
|
||||
use std::hash::Hash;
|
||||
use syntax_pos::symbol::InternedString;
|
||||
|
||||
// erase!() just makes tokens go away. It's used to specify which macro argument
|
||||
// is repeated (i.e. which sub-expression of the macro we are in) but don't need
|
||||
@ -535,7 +536,7 @@ define_dep_nodes!( <'tcx>
|
||||
[] GetPanicStrategy(CrateNum),
|
||||
[] IsNoBuiltins(CrateNum),
|
||||
[] ImplDefaultness(DefId),
|
||||
[] ExportedSymbols(CrateNum),
|
||||
[] ExportedSymbolIds(CrateNum),
|
||||
[] NativeLibraries(CrateNum),
|
||||
[] PluginRegistrarFn(CrateNum),
|
||||
[] DeriveRegistrarFn(CrateNum),
|
||||
@ -575,6 +576,14 @@ define_dep_nodes!( <'tcx>
|
||||
[] MaybeUnusedExternCrates,
|
||||
[] StabilityIndex,
|
||||
[] AllCrateNums,
|
||||
[] ExportedSymbols(CrateNum),
|
||||
[] CollectAndPartitionTranslationItems,
|
||||
[] ExportName(DefId),
|
||||
[] ContainsExternIndicator(DefId),
|
||||
[] IsTranslatedFunction(DefId),
|
||||
[] CodegenUnit(InternedString),
|
||||
[] CompileCodegenUnit(InternedString),
|
||||
[] OutputFilenames,
|
||||
);
|
||||
|
||||
trait DepNodeParams<'a, 'gcx: 'tcx + 'a, 'tcx: 'a> : fmt::Debug {
|
||||
|
@ -92,6 +92,7 @@ pub mod middle {
|
||||
pub mod dependency_format;
|
||||
pub mod effect;
|
||||
pub mod entry;
|
||||
pub mod exported_symbols;
|
||||
pub mod free_region;
|
||||
pub mod intrinsicck;
|
||||
pub mod lang_items;
|
||||
@ -103,6 +104,7 @@ pub mod middle {
|
||||
pub mod recursion_limit;
|
||||
pub mod resolve_lifetime;
|
||||
pub mod stability;
|
||||
pub mod trans;
|
||||
pub mod weak_lang_items;
|
||||
}
|
||||
|
||||
|
@ -366,8 +366,9 @@ pub trait CrateLoader {
|
||||
// In order to get this left-to-right dependency ordering, we perform a
|
||||
// topological sort of all crates putting the leaves at the right-most
|
||||
// positions.
|
||||
pub fn used_crates<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
prefer: LinkagePreference) -> Vec<(CrateNum, LibSource)> {
|
||||
pub fn used_crates(tcx: TyCtxt, prefer: LinkagePreference)
|
||||
-> Vec<(CrateNum, LibSource)>
|
||||
{
|
||||
let mut libs = tcx.crates()
|
||||
.iter()
|
||||
.cloned()
|
||||
|
31
src/librustc/middle/exported_symbols.rs
Normal file
31
src/librustc/middle/exported_symbols.rs
Normal file
@ -0,0 +1,31 @@
|
||||
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
/// The SymbolExportLevel of a symbols specifies from which kinds of crates
|
||||
/// the symbol will be exported. `C` symbols will be exported from any
|
||||
/// kind of crate, including cdylibs which export very few things.
|
||||
/// `Rust` will only be exported if the crate produced is a Rust
|
||||
/// dylib.
|
||||
#[derive(Eq, PartialEq, Debug, Copy, Clone)]
|
||||
pub enum SymbolExportLevel {
|
||||
C,
|
||||
Rust,
|
||||
}
|
||||
|
||||
impl SymbolExportLevel {
|
||||
pub fn is_below_threshold(self, threshold: SymbolExportLevel) -> bool {
|
||||
if threshold == SymbolExportLevel::Rust {
|
||||
// We export everything from Rust dylibs
|
||||
true
|
||||
} else {
|
||||
self == SymbolExportLevel::C
|
||||
}
|
||||
}
|
||||
}
|
@ -233,8 +233,8 @@ impl<'a, 'tcx> ReachableContext<'a, 'tcx> {
|
||||
} else {
|
||||
false
|
||||
};
|
||||
let is_extern = attr::contains_extern_indicator(&self.tcx.sess.diagnostic(),
|
||||
&item.attrs);
|
||||
let def_id = self.tcx.hir.local_def_id(item.id);
|
||||
let is_extern = self.tcx.contains_extern_indicator(def_id);
|
||||
if reachable || is_extern {
|
||||
self.reachable_symbols.insert(search_item);
|
||||
}
|
||||
@ -369,10 +369,6 @@ impl<'a, 'tcx: 'a> ItemLikeVisitor<'tcx> for CollectPrivateImplItemsVisitor<'a,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn find_reachable<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Rc<NodeSet> {
|
||||
tcx.reachable_set(LOCAL_CRATE)
|
||||
}
|
||||
|
||||
fn reachable_set<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, crate_num: CrateNum) -> Rc<NodeSet> {
|
||||
debug_assert!(crate_num == LOCAL_CRATE);
|
||||
|
||||
|
110
src/librustc/middle/trans.rs
Normal file
110
src/librustc/middle/trans.rs
Normal file
@ -0,0 +1,110 @@
|
||||
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use syntax::ast::NodeId;
|
||||
use syntax::symbol::InternedString;
|
||||
use ty::Instance;
|
||||
use util::nodemap::FxHashMap;
|
||||
|
||||
#[derive(PartialEq, Eq, Clone, Copy, Debug, Hash)]
|
||||
pub enum TransItem<'tcx> {
|
||||
Fn(Instance<'tcx>),
|
||||
Static(NodeId),
|
||||
GlobalAsm(NodeId),
|
||||
}
|
||||
|
||||
pub struct CodegenUnit<'tcx> {
|
||||
/// A name for this CGU. Incremental compilation requires that
|
||||
/// name be unique amongst **all** crates. Therefore, it should
|
||||
/// contain something unique to this crate (e.g., a module path)
|
||||
/// as well as the crate name and disambiguator.
|
||||
name: InternedString,
|
||||
items: FxHashMap<TransItem<'tcx>, (Linkage, Visibility)>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||
pub enum Linkage {
|
||||
External,
|
||||
AvailableExternally,
|
||||
LinkOnceAny,
|
||||
LinkOnceODR,
|
||||
WeakAny,
|
||||
WeakODR,
|
||||
Appending,
|
||||
Internal,
|
||||
Private,
|
||||
ExternalWeak,
|
||||
Common,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||
pub enum Visibility {
|
||||
Default,
|
||||
Hidden,
|
||||
Protected,
|
||||
}
|
||||
|
||||
impl<'tcx> CodegenUnit<'tcx> {
|
||||
pub fn new(name: InternedString) -> CodegenUnit<'tcx> {
|
||||
CodegenUnit {
|
||||
name: name,
|
||||
items: FxHashMap(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn name(&self) -> &InternedString {
|
||||
&self.name
|
||||
}
|
||||
|
||||
pub fn set_name(&mut self, name: InternedString) {
|
||||
self.name = name;
|
||||
}
|
||||
|
||||
pub fn items(&self) -> &FxHashMap<TransItem<'tcx>, (Linkage, Visibility)> {
|
||||
&self.items
|
||||
}
|
||||
|
||||
pub fn items_mut(&mut self)
|
||||
-> &mut FxHashMap<TransItem<'tcx>, (Linkage, Visibility)>
|
||||
{
|
||||
&mut self.items
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
pub struct Stats {
|
||||
pub n_glues_created: usize,
|
||||
pub n_null_glues: usize,
|
||||
pub n_real_glues: usize,
|
||||
pub n_fns: usize,
|
||||
pub n_inlines: usize,
|
||||
pub n_closures: usize,
|
||||
pub n_llvm_insns: usize,
|
||||
pub llvm_insns: FxHashMap<String, usize>,
|
||||
// (ident, llvm-instructions)
|
||||
pub fn_stats: Vec<(String, usize)>,
|
||||
}
|
||||
|
||||
impl Stats {
|
||||
pub fn extend(&mut self, stats: Stats) {
|
||||
self.n_glues_created += stats.n_glues_created;
|
||||
self.n_null_glues += stats.n_null_glues;
|
||||
self.n_real_glues += stats.n_real_glues;
|
||||
self.n_fns += stats.n_fns;
|
||||
self.n_inlines += stats.n_inlines;
|
||||
self.n_closures += stats.n_closures;
|
||||
self.n_llvm_insns += stats.n_llvm_insns;
|
||||
|
||||
for (k, v) in stats.llvm_insns {
|
||||
*self.llvm_insns.entry(k).or_insert(0) += v;
|
||||
}
|
||||
self.fn_stats.extend(stats.fn_stats);
|
||||
}
|
||||
}
|
@ -13,6 +13,7 @@
|
||||
use dep_graph::DepGraph;
|
||||
use errors::DiagnosticBuilder;
|
||||
use session::Session;
|
||||
use session::config::OutputFilenames;
|
||||
use middle;
|
||||
use hir::{TraitCandidate, HirId, ItemLocalId};
|
||||
use hir::def::{Def, Export};
|
||||
@ -64,6 +65,8 @@ use std::mem;
|
||||
use std::ops::Deref;
|
||||
use std::iter;
|
||||
use std::rc::Rc;
|
||||
use std::sync::mpsc;
|
||||
use std::sync::Arc;
|
||||
use syntax::abi;
|
||||
use syntax::ast::{self, Name, NodeId};
|
||||
use syntax::attr;
|
||||
@ -901,6 +904,16 @@ pub struct GlobalCtxt<'tcx> {
|
||||
/// error reporting, and so is lazily initialized and generally
|
||||
/// shouldn't taint the common path (hence the RefCell).
|
||||
pub all_traits: RefCell<Option<Vec<DefId>>>,
|
||||
|
||||
/// A general purpose channel to throw data out the back towards LLVM worker
|
||||
/// threads.
|
||||
///
|
||||
/// This is intended to only get used during the trans phase of the compiler
|
||||
/// when satisfying the query for a particular codegen unit. Internally in
|
||||
/// the query it'll send data along this channel to get processed later.
|
||||
pub tx_to_llvm_workers: mpsc::Sender<Box<Any + Send>>,
|
||||
|
||||
output_filenames: Arc<OutputFilenames>,
|
||||
}
|
||||
|
||||
impl<'tcx> GlobalCtxt<'tcx> {
|
||||
@ -1025,6 +1038,8 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
||||
named_region_map: resolve_lifetime::NamedRegionMap,
|
||||
hir: hir_map::Map<'tcx>,
|
||||
crate_name: &str,
|
||||
tx: mpsc::Sender<Box<Any + Send>>,
|
||||
output_filenames: &OutputFilenames,
|
||||
f: F) -> R
|
||||
where F: for<'b> FnOnce(TyCtxt<'b, 'tcx, 'tcx>) -> R
|
||||
{
|
||||
@ -1145,6 +1160,8 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
||||
derive_macros: RefCell::new(NodeMap()),
|
||||
stability_interner: RefCell::new(FxHashSet()),
|
||||
all_traits: RefCell::new(None),
|
||||
tx_to_llvm_workers: tx,
|
||||
output_filenames: Arc::new(output_filenames.clone()),
|
||||
}, f)
|
||||
}
|
||||
|
||||
@ -2218,4 +2235,8 @@ pub fn provide(providers: &mut ty::maps::Providers) {
|
||||
assert_eq!(cnum, LOCAL_CRATE);
|
||||
Rc::new(tcx.cstore.postorder_cnums_untracked())
|
||||
};
|
||||
providers.output_filenames = |tcx, cnum| {
|
||||
assert_eq!(cnum, LOCAL_CRATE);
|
||||
tcx.output_filenames.clone()
|
||||
};
|
||||
}
|
||||
|
@ -23,9 +23,12 @@ use middle::region;
|
||||
use middle::resolve_lifetime::{Region, ObjectLifetimeDefault};
|
||||
use middle::stability::{self, DeprecationEntry};
|
||||
use middle::lang_items::{LanguageItems, LangItem};
|
||||
use middle::exported_symbols::SymbolExportLevel;
|
||||
use middle::trans::{CodegenUnit, Stats};
|
||||
use mir;
|
||||
use mir::transform::{MirSuite, MirPassIndex};
|
||||
use session::CompileResult;
|
||||
use session::config::OutputFilenames;
|
||||
use traits::specialization_graph;
|
||||
use ty::{self, CrateInherentImpls, Ty, TyCtxt};
|
||||
use ty::layout::{Layout, LayoutError};
|
||||
@ -48,7 +51,9 @@ use std::mem;
|
||||
use std::collections::BTreeMap;
|
||||
use std::ops::Deref;
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
use syntax_pos::{Span, DUMMY_SP};
|
||||
use syntax_pos::symbol::InternedString;
|
||||
use syntax::attr;
|
||||
use syntax::ast;
|
||||
use syntax::symbol::Symbol;
|
||||
@ -177,6 +182,15 @@ impl<'tcx, T: Key> Key for ty::ParamEnvAnd<'tcx, T> {
|
||||
}
|
||||
}
|
||||
|
||||
impl Key for InternedString {
|
||||
fn map_crate(&self) -> CrateNum {
|
||||
LOCAL_CRATE
|
||||
}
|
||||
fn default_span(&self, _tcx: TyCtxt) -> Span {
|
||||
DUMMY_SP
|
||||
}
|
||||
}
|
||||
|
||||
trait Value<'tcx>: Sized {
|
||||
fn from_cycle_error<'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Self;
|
||||
}
|
||||
@ -595,7 +609,7 @@ impl<'tcx> QueryDescription for queries::is_sanitizer_runtime<'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> QueryDescription for queries::exported_symbols<'tcx> {
|
||||
impl<'tcx> QueryDescription for queries::exported_symbol_ids<'tcx> {
|
||||
fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
|
||||
format!("looking up the exported symbols of a crate")
|
||||
}
|
||||
@ -745,6 +759,36 @@ impl<'tcx> QueryDescription for queries::all_crate_nums<'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> QueryDescription for queries::exported_symbols<'tcx> {
|
||||
fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
|
||||
format!("exported_symbols")
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> QueryDescription for queries::collect_and_partition_translation_items<'tcx> {
|
||||
fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
|
||||
format!("collect_and_partition_translation_items")
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> QueryDescription for queries::codegen_unit<'tcx> {
|
||||
fn describe(_tcx: TyCtxt, _: InternedString) -> String {
|
||||
format!("codegen_unit")
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> QueryDescription for queries::compile_codegen_unit<'tcx> {
|
||||
fn describe(_tcx: TyCtxt, _: InternedString) -> String {
|
||||
format!("compile_codegen_unit")
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> QueryDescription for queries::output_filenames<'tcx> {
|
||||
fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
|
||||
format!("output_filenames")
|
||||
}
|
||||
}
|
||||
|
||||
// If enabled, send a message to the profile-queries thread
|
||||
macro_rules! profq_msg {
|
||||
($tcx:expr, $msg:expr) => {
|
||||
@ -1322,7 +1366,7 @@ define_maps! { <'tcx>
|
||||
[] fn lint_levels: lint_levels_node(CrateNum) -> Rc<lint::LintLevelMap>,
|
||||
|
||||
[] fn impl_defaultness: ImplDefaultness(DefId) -> hir::Defaultness,
|
||||
[] fn exported_symbols: ExportedSymbols(CrateNum) -> Rc<Vec<DefId>>,
|
||||
[] fn exported_symbol_ids: ExportedSymbolIds(CrateNum) -> Rc<DefIdSet>,
|
||||
[] fn native_libraries: NativeLibraries(CrateNum) -> Rc<Vec<NativeLibrary>>,
|
||||
[] fn plugin_registrar_fn: PluginRegistrarFn(CrateNum) -> Option<DefId>,
|
||||
[] fn derive_registrar_fn: DeriveRegistrarFn(CrateNum) -> Option<DefId>,
|
||||
@ -1371,6 +1415,19 @@ define_maps! { <'tcx>
|
||||
|
||||
[] fn stability_index: stability_index_node(CrateNum) -> Rc<stability::Index<'tcx>>,
|
||||
[] fn all_crate_nums: all_crate_nums_node(CrateNum) -> Rc<Vec<CrateNum>>,
|
||||
|
||||
[] fn exported_symbols: ExportedSymbols(CrateNum)
|
||||
-> Arc<Vec<(String, DefId, SymbolExportLevel)>>,
|
||||
[] fn collect_and_partition_translation_items:
|
||||
collect_and_partition_translation_items_node(CrateNum)
|
||||
-> (Arc<DefIdSet>, Arc<Vec<Arc<CodegenUnit<'tcx>>>>),
|
||||
[] fn export_name: ExportName(DefId) -> Option<Symbol>,
|
||||
[] fn contains_extern_indicator: ContainsExternIndicator(DefId) -> bool,
|
||||
[] fn is_translated_function: IsTranslatedFunction(DefId) -> bool,
|
||||
[] fn codegen_unit: CodegenUnit(InternedString) -> Arc<CodegenUnit<'tcx>>,
|
||||
[] fn compile_codegen_unit: CompileCodegenUnit(InternedString) -> Stats,
|
||||
[] fn output_filenames: output_filenames_node(CrateNum)
|
||||
-> Arc<OutputFilenames>,
|
||||
}
|
||||
|
||||
fn type_param_predicates<'tcx>((item_id, param_id): (DefId, DefId)) -> DepConstructor<'tcx> {
|
||||
@ -1484,3 +1541,11 @@ fn stability_index_node<'tcx>(_: CrateNum) -> DepConstructor<'tcx> {
|
||||
fn all_crate_nums_node<'tcx>(_: CrateNum) -> DepConstructor<'tcx> {
|
||||
DepConstructor::AllCrateNums
|
||||
}
|
||||
|
||||
fn collect_and_partition_translation_items_node<'tcx>(_: CrateNum) -> DepConstructor<'tcx> {
|
||||
DepConstructor::CollectAndPartitionTranslationItems
|
||||
}
|
||||
|
||||
fn output_filenames_node<'tcx>(_: CrateNum) -> DepConstructor<'tcx> {
|
||||
DepConstructor::OutputFilenames
|
||||
}
|
||||
|
@ -121,7 +121,6 @@ mod sty;
|
||||
#[derive(Clone)]
|
||||
pub struct CrateAnalysis {
|
||||
pub access_levels: Rc<AccessLevels>,
|
||||
pub reachable: Rc<NodeSet>,
|
||||
pub name: String,
|
||||
pub glob_map: Option<hir::GlobMap>,
|
||||
}
|
||||
|
@ -28,7 +28,6 @@ use rustc::mir::transform::{MIR_CONST, MIR_VALIDATED, MIR_OPTIMIZED, Passes};
|
||||
use rustc::ty::{self, TyCtxt, Resolutions, GlobalArenas};
|
||||
use rustc::traits;
|
||||
use rustc::util::common::{ErrorReported, time};
|
||||
use rustc::util::nodemap::NodeSet;
|
||||
use rustc_allocator as allocator;
|
||||
use rustc_borrowck as borrowck;
|
||||
use rustc_incremental::{self, IncrementalHashesMap};
|
||||
@ -47,6 +46,7 @@ use super::Compilation;
|
||||
|
||||
use serialize::json;
|
||||
|
||||
use std::any::Any;
|
||||
use std::env;
|
||||
use std::ffi::{OsString, OsStr};
|
||||
use std::fs;
|
||||
@ -54,6 +54,7 @@ use std::io::{self, Write};
|
||||
use std::iter;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::rc::Rc;
|
||||
use std::sync::mpsc;
|
||||
use syntax::{ast, diagnostics, visit};
|
||||
use syntax::attr;
|
||||
use syntax::ext::base::ExtCtxt;
|
||||
@ -192,6 +193,7 @@ pub fn compile_input(sess: &Session,
|
||||
&resolutions,
|
||||
&expanded_crate,
|
||||
&hir_map.krate(),
|
||||
&outputs,
|
||||
&crate_name),
|
||||
Ok(()));
|
||||
}
|
||||
@ -215,7 +217,8 @@ pub fn compile_input(sess: &Session,
|
||||
&arena,
|
||||
&arenas,
|
||||
&crate_name,
|
||||
|tcx, analysis, incremental_hashes_map, result| {
|
||||
&outputs,
|
||||
|tcx, analysis, incremental_hashes_map, rx, result| {
|
||||
{
|
||||
// Eventually, we will want to track plugins.
|
||||
let _ignore = tcx.dep_graph.in_ignore();
|
||||
@ -243,8 +246,9 @@ pub fn compile_input(sess: &Session,
|
||||
tcx.print_debug_stats();
|
||||
}
|
||||
|
||||
let trans = phase_4_translate_to_llvm(tcx, analysis, incremental_hashes_map,
|
||||
&outputs);
|
||||
let trans = phase_4_translate_to_llvm(tcx,
|
||||
incremental_hashes_map,
|
||||
rx);
|
||||
|
||||
if log_enabled!(::log::LogLevel::Info) {
|
||||
println!("Post-trans");
|
||||
@ -258,7 +262,7 @@ pub fn compile_input(sess: &Session,
|
||||
}
|
||||
}
|
||||
|
||||
Ok((outputs, trans, tcx.dep_graph.clone()))
|
||||
Ok((outputs.clone(), trans, tcx.dep_graph.clone()))
|
||||
})??
|
||||
};
|
||||
|
||||
@ -483,6 +487,7 @@ impl<'a, 'tcx> CompileState<'a, 'tcx> {
|
||||
resolutions: &'a Resolutions,
|
||||
krate: &'a ast::Crate,
|
||||
hir_crate: &'a hir::Crate,
|
||||
output_filenames: &'a OutputFilenames,
|
||||
crate_name: &'a str)
|
||||
-> Self {
|
||||
CompileState {
|
||||
@ -495,6 +500,7 @@ impl<'a, 'tcx> CompileState<'a, 'tcx> {
|
||||
resolutions: Some(resolutions),
|
||||
expanded_crate: Some(krate),
|
||||
hir_crate: Some(hir_crate),
|
||||
output_filenames: Some(output_filenames),
|
||||
out_file: out_file.as_ref().map(|s| &**s),
|
||||
..CompileState::empty(input, session, out_dir)
|
||||
}
|
||||
@ -885,7 +891,6 @@ pub fn phase_2_configure_and_expand<F>(sess: &Session,
|
||||
defs: resolver.definitions,
|
||||
analysis: ty::CrateAnalysis {
|
||||
access_levels: Rc::new(AccessLevels::default()),
|
||||
reachable: Rc::new(NodeSet()),
|
||||
name: crate_name.to_string(),
|
||||
glob_map: if resolver.make_glob_map { Some(resolver.glob_map) } else { None },
|
||||
},
|
||||
@ -911,19 +916,21 @@ pub fn phase_3_run_analysis_passes<'tcx, F, R>(sess: &'tcx Session,
|
||||
arena: &'tcx DroplessArena,
|
||||
arenas: &'tcx GlobalArenas<'tcx>,
|
||||
name: &str,
|
||||
output_filenames: &OutputFilenames,
|
||||
f: F)
|
||||
-> Result<R, CompileIncomplete>
|
||||
where F: for<'a> FnOnce(TyCtxt<'a, 'tcx, 'tcx>,
|
||||
ty::CrateAnalysis,
|
||||
IncrementalHashesMap,
|
||||
mpsc::Receiver<Box<Any + Send>>,
|
||||
CompileResult) -> R
|
||||
{
|
||||
macro_rules! try_with_f {
|
||||
($e: expr, ($t: expr, $a: expr, $h: expr)) => {
|
||||
($e: expr, ($($t:tt)*)) => {
|
||||
match $e {
|
||||
Ok(x) => x,
|
||||
Err(x) => {
|
||||
f($t, $a, $h, Err(x));
|
||||
f($($t)*, Err(x));
|
||||
return Err(x);
|
||||
}
|
||||
}
|
||||
@ -958,7 +965,7 @@ pub fn phase_3_run_analysis_passes<'tcx, F, R>(sess: &'tcx Session,
|
||||
mir::provide(&mut local_providers);
|
||||
reachable::provide(&mut local_providers);
|
||||
rustc_privacy::provide(&mut local_providers);
|
||||
trans::provide(&mut local_providers);
|
||||
trans::provide_local(&mut local_providers);
|
||||
typeck::provide(&mut local_providers);
|
||||
ty::provide(&mut local_providers);
|
||||
traits::provide(&mut local_providers);
|
||||
@ -970,7 +977,7 @@ pub fn phase_3_run_analysis_passes<'tcx, F, R>(sess: &'tcx Session,
|
||||
|
||||
let mut extern_providers = ty::maps::Providers::default();
|
||||
cstore::provide(&mut extern_providers);
|
||||
trans::provide(&mut extern_providers);
|
||||
trans::provide_extern(&mut extern_providers);
|
||||
ty::provide_extern(&mut extern_providers);
|
||||
traits::provide_extern(&mut extern_providers);
|
||||
// FIXME(eddyb) get rid of this once we replace const_eval with miri.
|
||||
@ -1030,6 +1037,8 @@ pub fn phase_3_run_analysis_passes<'tcx, F, R>(sess: &'tcx Session,
|
||||
passes.push_pass(MIR_OPTIMIZED, mir::transform::add_call_guards::CriticalCallEdges);
|
||||
passes.push_pass(MIR_OPTIMIZED, mir::transform::dump_mir::Marker("PreTrans"));
|
||||
|
||||
let (tx, rx) = mpsc::channel();
|
||||
|
||||
TyCtxt::create_and_enter(sess,
|
||||
cstore,
|
||||
local_providers,
|
||||
@ -1041,6 +1050,8 @@ pub fn phase_3_run_analysis_passes<'tcx, F, R>(sess: &'tcx Session,
|
||||
named_region_map,
|
||||
hir_map,
|
||||
name,
|
||||
tx,
|
||||
output_filenames,
|
||||
|tcx| {
|
||||
let incremental_hashes_map =
|
||||
time(time_passes,
|
||||
@ -1056,7 +1067,8 @@ pub fn phase_3_run_analysis_passes<'tcx, F, R>(sess: &'tcx Session,
|
||||
|| stability::check_unstable_api_usage(tcx));
|
||||
|
||||
// passes are timed inside typeck
|
||||
try_with_f!(typeck::check_crate(tcx), (tcx, analysis, incremental_hashes_map));
|
||||
try_with_f!(typeck::check_crate(tcx),
|
||||
(tcx, analysis, incremental_hashes_map, rx));
|
||||
|
||||
time(time_passes,
|
||||
"const checking",
|
||||
@ -1100,14 +1112,9 @@ pub fn phase_3_run_analysis_passes<'tcx, F, R>(sess: &'tcx Session,
|
||||
// lint warnings and so on -- kindck used to do this abort, but
|
||||
// kindck is gone now). -nmatsakis
|
||||
if sess.err_count() > 0 {
|
||||
return Ok(f(tcx, analysis, incremental_hashes_map, sess.compile_status()));
|
||||
return Ok(f(tcx, analysis, incremental_hashes_map, rx, sess.compile_status()));
|
||||
}
|
||||
|
||||
analysis.reachable =
|
||||
time(time_passes,
|
||||
"reachability checking",
|
||||
|| reachable::find_reachable(tcx));
|
||||
|
||||
time(time_passes, "death checking", || middle::dead::check_crate(tcx));
|
||||
|
||||
time(time_passes, "unused lib feature checking", || {
|
||||
@ -1116,16 +1123,15 @@ pub fn phase_3_run_analysis_passes<'tcx, F, R>(sess: &'tcx Session,
|
||||
|
||||
time(time_passes, "lint checking", || lint::check_crate(tcx));
|
||||
|
||||
return Ok(f(tcx, analysis, incremental_hashes_map, tcx.sess.compile_status()));
|
||||
return Ok(f(tcx, analysis, incremental_hashes_map, rx, tcx.sess.compile_status()));
|
||||
})
|
||||
}
|
||||
|
||||
/// Run the translation phase to LLVM, after which the AST and analysis can
|
||||
/// be discarded.
|
||||
pub fn phase_4_translate_to_llvm<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
analysis: ty::CrateAnalysis,
|
||||
incremental_hashes_map: IncrementalHashesMap,
|
||||
output_filenames: &OutputFilenames)
|
||||
rx: mpsc::Receiver<Box<Any + Send>>)
|
||||
-> write::OngoingCrateTranslation {
|
||||
let time_passes = tcx.sess.time_passes();
|
||||
|
||||
@ -1134,9 +1140,9 @@ pub fn phase_4_translate_to_llvm<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
|| ::rustc::middle::dependency_format::calculate(tcx));
|
||||
|
||||
let translation =
|
||||
time(time_passes,
|
||||
"translation",
|
||||
move || trans::trans_crate(tcx, analysis, incremental_hashes_map, output_filenames));
|
||||
time(time_passes, "translation", move || {
|
||||
trans::trans_crate(tcx, incremental_hashes_map, rx)
|
||||
});
|
||||
|
||||
if tcx.sess.profile_queries() {
|
||||
profile::dump("profile_queries".to_string())
|
||||
|
@ -645,6 +645,7 @@ impl<'a> CompilerCalls<'a> for RustcDefaultCalls {
|
||||
ppm,
|
||||
state.arena.unwrap(),
|
||||
state.arenas.unwrap(),
|
||||
state.output_filenames.unwrap(),
|
||||
opt_uii.clone(),
|
||||
state.out_file);
|
||||
};
|
||||
|
@ -23,7 +23,7 @@ use rustc::cfg::graphviz::LabelledCFG;
|
||||
use rustc::dep_graph::DepGraph;
|
||||
use rustc::middle::cstore::CrateStore;
|
||||
use rustc::session::Session;
|
||||
use rustc::session::config::Input;
|
||||
use rustc::session::config::{Input, OutputFilenames};
|
||||
use rustc_borrowck as borrowck;
|
||||
use rustc_borrowck::graphviz as borrowck_dot;
|
||||
|
||||
@ -205,6 +205,7 @@ impl PpSourceMode {
|
||||
resolutions: &Resolutions,
|
||||
arena: &'tcx DroplessArena,
|
||||
arenas: &'tcx GlobalArenas<'tcx>,
|
||||
output_filenames: &OutputFilenames,
|
||||
id: &str,
|
||||
f: F)
|
||||
-> A
|
||||
@ -235,7 +236,8 @@ impl PpSourceMode {
|
||||
arena,
|
||||
arenas,
|
||||
id,
|
||||
|tcx, _, _, _| {
|
||||
output_filenames,
|
||||
|tcx, _, _, _, _| {
|
||||
let empty_tables = ty::TypeckTables::empty(None);
|
||||
let annotation = TypedAnnotation {
|
||||
tcx,
|
||||
@ -888,6 +890,7 @@ pub fn print_after_hir_lowering<'tcx, 'a: 'tcx>(sess: &'a Session,
|
||||
ppm: PpMode,
|
||||
arena: &'tcx DroplessArena,
|
||||
arenas: &'tcx GlobalArenas<'tcx>,
|
||||
output_filenames: &OutputFilenames,
|
||||
opt_uii: Option<UserIdentifiedItem>,
|
||||
ofile: Option<&Path>) {
|
||||
let dep_graph = DepGraph::new(false);
|
||||
@ -902,6 +905,7 @@ pub fn print_after_hir_lowering<'tcx, 'a: 'tcx>(sess: &'a Session,
|
||||
crate_name,
|
||||
arena,
|
||||
arenas,
|
||||
output_filenames,
|
||||
ppm,
|
||||
opt_uii,
|
||||
ofile);
|
||||
@ -940,6 +944,7 @@ pub fn print_after_hir_lowering<'tcx, 'a: 'tcx>(sess: &'a Session,
|
||||
resolutions,
|
||||
arena,
|
||||
arenas,
|
||||
output_filenames,
|
||||
crate_name,
|
||||
move |annotation, krate| {
|
||||
debug!("pretty printing source code {:?}", s);
|
||||
@ -964,6 +969,7 @@ pub fn print_after_hir_lowering<'tcx, 'a: 'tcx>(sess: &'a Session,
|
||||
resolutions,
|
||||
arena,
|
||||
arenas,
|
||||
output_filenames,
|
||||
crate_name,
|
||||
move |annotation, _| {
|
||||
debug!("pretty printing source code {:?}", s);
|
||||
@ -1007,6 +1013,7 @@ fn print_with_analysis<'tcx, 'a: 'tcx>(sess: &'a Session,
|
||||
crate_name: &str,
|
||||
arena: &'tcx DroplessArena,
|
||||
arenas: &'tcx GlobalArenas<'tcx>,
|
||||
output_filenames: &OutputFilenames,
|
||||
ppm: PpMode,
|
||||
uii: Option<UserIdentifiedItem>,
|
||||
ofile: Option<&Path>) {
|
||||
@ -1028,7 +1035,8 @@ fn print_with_analysis<'tcx, 'a: 'tcx>(sess: &'a Session,
|
||||
arena,
|
||||
arenas,
|
||||
crate_name,
|
||||
|tcx, _, _, _| {
|
||||
output_filenames,
|
||||
|tcx, _, _, _, _| {
|
||||
match ppm {
|
||||
PpmMir | PpmMirCFG => {
|
||||
if let Some(nodeid) = nodeid {
|
||||
|
@ -10,6 +10,9 @@
|
||||
|
||||
//! # Standalone Tests for the Inference Module
|
||||
|
||||
use std::path::PathBuf;
|
||||
use std::sync::mpsc;
|
||||
|
||||
use driver;
|
||||
use rustc_lint;
|
||||
use rustc_resolve::MakeGlobMap;
|
||||
@ -26,6 +29,7 @@ use rustc_metadata::cstore::CStore;
|
||||
use rustc::hir::map as hir_map;
|
||||
use rustc::mir::transform::Passes;
|
||||
use rustc::session::{self, config};
|
||||
use rustc::session::config::{OutputFilenames, OutputTypes};
|
||||
use std::rc::Rc;
|
||||
use syntax::ast;
|
||||
use syntax::abi::Abi;
|
||||
@ -133,6 +137,14 @@ fn test_env<F>(source_string: &str,
|
||||
|
||||
// run just enough stuff to build a tcx:
|
||||
let named_region_map = resolve_lifetime::krate(&sess, &*cstore, &hir_map);
|
||||
let (tx, _rx) = mpsc::channel();
|
||||
let outputs = OutputFilenames {
|
||||
out_directory: PathBuf::new(),
|
||||
out_filestem: String::new(),
|
||||
single_output_file: None,
|
||||
extra: String::new(),
|
||||
outputs: OutputTypes::new(&[]),
|
||||
};
|
||||
TyCtxt::create_and_enter(&sess,
|
||||
&*cstore,
|
||||
ty::maps::Providers::default(),
|
||||
@ -144,6 +156,8 @@ fn test_env<F>(source_string: &str,
|
||||
named_region_map.unwrap(),
|
||||
hir_map,
|
||||
"test_crate",
|
||||
tx,
|
||||
&outputs,
|
||||
|tcx| {
|
||||
tcx.infer_ctxt().enter(|infcx| {
|
||||
let mut region_scope_tree = region::ScopeTree::default();
|
||||
|
@ -175,7 +175,7 @@ provide! { <'tcx> tcx, def_id, other, cdata,
|
||||
extern_crate => { Rc::new(cdata.extern_crate.get()) }
|
||||
is_no_builtins => { cdata.is_no_builtins() }
|
||||
impl_defaultness => { cdata.get_impl_defaultness(def_id.index) }
|
||||
exported_symbols => { Rc::new(cdata.get_exported_symbols()) }
|
||||
exported_symbol_ids => { Rc::new(cdata.get_exported_symbols()) }
|
||||
native_libraries => { Rc::new(cdata.get_native_libraries()) }
|
||||
plugin_registrar_fn => {
|
||||
cdata.root.plugin_registrar_fn.map(|index| {
|
||||
|
@ -24,6 +24,7 @@ use rustc::middle::lang_items;
|
||||
use rustc::session::Session;
|
||||
use rustc::ty::{self, Ty, TyCtxt};
|
||||
use rustc::ty::subst::Substs;
|
||||
use rustc::util::nodemap::DefIdSet;
|
||||
|
||||
use rustc::mir::Mir;
|
||||
|
||||
@ -1017,7 +1018,7 @@ impl<'a, 'tcx> CrateMetadata {
|
||||
arg_names.decode(self).collect()
|
||||
}
|
||||
|
||||
pub fn get_exported_symbols(&self) -> Vec<DefId> {
|
||||
pub fn get_exported_symbols(&self) -> DefIdSet {
|
||||
self.exported_symbols
|
||||
.iter()
|
||||
.map(|&index| self.local_def_id(index))
|
||||
|
@ -612,7 +612,7 @@ pub struct FnType<'tcx> {
|
||||
impl<'a, 'tcx> FnType<'tcx> {
|
||||
pub fn of_instance(ccx: &CrateContext<'a, 'tcx>, instance: &ty::Instance<'tcx>)
|
||||
-> Self {
|
||||
let fn_ty = instance_ty(ccx.shared(), &instance);
|
||||
let fn_ty = instance_ty(ccx.tcx(), &instance);
|
||||
let sig = ty_fn_sig(ccx, fn_ty);
|
||||
let sig = ccx.tcx().erase_late_bound_regions_and_normalize(&sig);
|
||||
Self::new(ccx, sig, &[])
|
||||
|
@ -15,16 +15,15 @@ use std::io::prelude::*;
|
||||
use std::io::{self, BufWriter};
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use context::SharedCrateContext;
|
||||
|
||||
use back::archive;
|
||||
use back::command::Command;
|
||||
use back::symbol_export::ExportedSymbols;
|
||||
use rustc::middle::dependency_format::Linkage;
|
||||
use back::symbol_export;
|
||||
use rustc::hir::def_id::{LOCAL_CRATE, CrateNum};
|
||||
use rustc_back::LinkerFlavor;
|
||||
use rustc::middle::dependency_format::Linkage;
|
||||
use rustc::session::Session;
|
||||
use rustc::session::config::{self, CrateType, OptLevel, DebugInfoLevel};
|
||||
use rustc::ty::TyCtxt;
|
||||
use rustc_back::LinkerFlavor;
|
||||
use serialize::{json, Encoder};
|
||||
|
||||
/// For all the linkers we support, and information they might
|
||||
@ -33,19 +32,18 @@ pub struct LinkerInfo {
|
||||
exports: HashMap<CrateType, Vec<String>>,
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> LinkerInfo {
|
||||
pub fn new(scx: &SharedCrateContext<'a, 'tcx>,
|
||||
exports: &ExportedSymbols) -> LinkerInfo {
|
||||
impl LinkerInfo {
|
||||
pub fn new(tcx: TyCtxt) -> LinkerInfo {
|
||||
LinkerInfo {
|
||||
exports: scx.sess().crate_types.borrow().iter().map(|&c| {
|
||||
(c, exported_symbols(scx, exports, c))
|
||||
exports: tcx.sess.crate_types.borrow().iter().map(|&c| {
|
||||
(c, exported_symbols(tcx, c))
|
||||
}).collect(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn to_linker(&'a self,
|
||||
cmd: Command,
|
||||
sess: &'a Session) -> Box<Linker+'a> {
|
||||
pub fn to_linker<'a>(&'a self,
|
||||
cmd: Command,
|
||||
sess: &'a Session) -> Box<Linker+'a> {
|
||||
match sess.linker_flavor() {
|
||||
LinkerFlavor::Msvc => {
|
||||
Box::new(MsvcLinker {
|
||||
@ -734,16 +732,17 @@ impl<'a> Linker for EmLinker<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn exported_symbols(scx: &SharedCrateContext,
|
||||
exported_symbols: &ExportedSymbols,
|
||||
crate_type: CrateType)
|
||||
-> Vec<String> {
|
||||
fn exported_symbols(tcx: TyCtxt, crate_type: CrateType) -> Vec<String> {
|
||||
let mut symbols = Vec::new();
|
||||
exported_symbols.for_each_exported_symbol(LOCAL_CRATE, |name, _, _| {
|
||||
symbols.push(name.to_owned());
|
||||
});
|
||||
|
||||
let formats = scx.sess().dependency_formats.borrow();
|
||||
let export_threshold = symbol_export::threshold(tcx);
|
||||
for &(ref name, _, level) in tcx.exported_symbols(LOCAL_CRATE).iter() {
|
||||
if level.is_below_threshold(export_threshold) {
|
||||
symbols.push(name.clone());
|
||||
}
|
||||
}
|
||||
|
||||
let formats = tcx.sess.dependency_formats.borrow();
|
||||
let deps = formats[&crate_type].iter();
|
||||
|
||||
for (index, dep_format) in deps.enumerate() {
|
||||
@ -751,9 +750,11 @@ fn exported_symbols(scx: &SharedCrateContext,
|
||||
// For each dependency that we are linking to statically ...
|
||||
if *dep_format == Linkage::Static {
|
||||
// ... we add its symbol list to our export list.
|
||||
exported_symbols.for_each_exported_symbol(cnum, |name, _, _| {
|
||||
symbols.push(name.to_owned());
|
||||
})
|
||||
for &(ref name, _, level) in tcx.exported_symbols(cnum).iter() {
|
||||
if level.is_below_threshold(export_threshold) {
|
||||
symbols.push(name.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -16,6 +16,7 @@ use errors::{FatalError, Handler};
|
||||
use llvm;
|
||||
use llvm::archive_ro::ArchiveRO;
|
||||
use llvm::{ModuleRef, TargetMachineRef, True, False};
|
||||
use rustc::middle::exported_symbols::SymbolExportLevel;
|
||||
use rustc::util::common::time;
|
||||
use rustc::util::common::path2cstr;
|
||||
use rustc::hir::def_id::LOCAL_CRATE;
|
||||
@ -67,8 +68,8 @@ pub fn run(cgcx: &CodegenContext,
|
||||
let export_threshold =
|
||||
symbol_export::crates_export_threshold(&cgcx.crate_types);
|
||||
|
||||
let symbol_filter = &|&(ref name, _, level): &(String, _, _)| {
|
||||
if symbol_export::is_below_threshold(level, export_threshold) {
|
||||
let symbol_filter = &|&(ref name, _, level): &(String, _, SymbolExportLevel)| {
|
||||
if level.is_below_threshold(export_threshold) {
|
||||
let mut bytes = Vec::with_capacity(name.len() + 1);
|
||||
bytes.extend(name.bytes());
|
||||
Some(CString::new(bytes).unwrap())
|
||||
@ -77,8 +78,7 @@ pub fn run(cgcx: &CodegenContext,
|
||||
}
|
||||
};
|
||||
|
||||
let mut symbol_white_list: Vec<CString> = cgcx.exported_symbols
|
||||
.exported_symbols(LOCAL_CRATE)
|
||||
let mut symbol_white_list: Vec<CString> = cgcx.exported_symbols[&LOCAL_CRATE]
|
||||
.iter()
|
||||
.filter_map(symbol_filter)
|
||||
.collect();
|
||||
@ -88,9 +88,9 @@ pub fn run(cgcx: &CodegenContext,
|
||||
// module that we've got.
|
||||
for &(cnum, ref path) in cgcx.each_linked_rlib_for_lto.iter() {
|
||||
symbol_white_list.extend(
|
||||
cgcx.exported_symbols.exported_symbols(cnum)
|
||||
.iter()
|
||||
.filter_map(symbol_filter));
|
||||
cgcx.exported_symbols[&cnum]
|
||||
.iter()
|
||||
.filter_map(symbol_filter));
|
||||
|
||||
let archive = ArchiveRO::open(&path).expect("wanted an rlib");
|
||||
let bytecodes = archive.iter().filter_map(|child| {
|
||||
|
@ -8,46 +8,83 @@
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
|
||||
use base;
|
||||
use monomorphize::Instance;
|
||||
use rustc::util::nodemap::{FxHashMap, NodeSet};
|
||||
use rustc::hir::def_id::{DefId, CrateNum, LOCAL_CRATE, INVALID_CRATE, CRATE_DEF_INDEX};
|
||||
use rustc::hir::def_id::CrateNum;
|
||||
use rustc::hir::def_id::{DefId, LOCAL_CRATE, INVALID_CRATE, CRATE_DEF_INDEX};
|
||||
use rustc::middle::exported_symbols::SymbolExportLevel;
|
||||
use rustc::session::config;
|
||||
use rustc::ty::TyCtxt;
|
||||
use rustc::ty::maps::Providers;
|
||||
use rustc::util::nodemap::FxHashMap;
|
||||
use rustc_allocator::ALLOCATOR_METHODS;
|
||||
use syntax::attr;
|
||||
|
||||
/// The SymbolExportLevel of a symbols specifies from which kinds of crates
|
||||
/// the symbol will be exported. `C` symbols will be exported from any
|
||||
/// kind of crate, including cdylibs which export very few things.
|
||||
/// `Rust` will only be exported if the crate produced is a Rust
|
||||
/// dylib.
|
||||
#[derive(Eq, PartialEq, Debug, Copy, Clone)]
|
||||
pub enum SymbolExportLevel {
|
||||
C,
|
||||
Rust,
|
||||
pub type ExportedSymbols = FxHashMap<
|
||||
CrateNum,
|
||||
Arc<Vec<(String, DefId, SymbolExportLevel)>>,
|
||||
>;
|
||||
|
||||
pub fn threshold(tcx: TyCtxt) -> SymbolExportLevel {
|
||||
crates_export_threshold(&tcx.sess.crate_types.borrow())
|
||||
}
|
||||
|
||||
/// The set of symbols exported from each crate in the crate graph.
|
||||
#[derive(Debug)]
|
||||
pub struct ExportedSymbols {
|
||||
pub export_threshold: SymbolExportLevel,
|
||||
exports: FxHashMap<CrateNum, Vec<(String, DefId, SymbolExportLevel)>>,
|
||||
local_exports: NodeSet,
|
||||
pub fn metadata_symbol_name(tcx: TyCtxt) -> String {
|
||||
format!("rust_metadata_{}_{}",
|
||||
tcx.crate_name(LOCAL_CRATE),
|
||||
tcx.crate_disambiguator(LOCAL_CRATE))
|
||||
}
|
||||
|
||||
impl ExportedSymbols {
|
||||
pub fn empty() -> ExportedSymbols {
|
||||
ExportedSymbols {
|
||||
export_threshold: SymbolExportLevel::C,
|
||||
exports: FxHashMap(),
|
||||
local_exports: NodeSet(),
|
||||
}
|
||||
fn crate_export_threshold(crate_type: config::CrateType) -> SymbolExportLevel {
|
||||
match crate_type {
|
||||
config::CrateTypeExecutable |
|
||||
config::CrateTypeStaticlib |
|
||||
config::CrateTypeProcMacro |
|
||||
config::CrateTypeCdylib => SymbolExportLevel::C,
|
||||
config::CrateTypeRlib |
|
||||
config::CrateTypeDylib => SymbolExportLevel::Rust,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn compute<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
local_exported_symbols: &NodeSet)
|
||||
-> ExportedSymbols {
|
||||
let export_threshold = crates_export_threshold(&tcx.sess.crate_types.borrow());
|
||||
pub fn crates_export_threshold(crate_types: &[config::CrateType])
|
||||
-> SymbolExportLevel {
|
||||
if crate_types.iter().any(|&crate_type| {
|
||||
crate_export_threshold(crate_type) == SymbolExportLevel::Rust
|
||||
}) {
|
||||
SymbolExportLevel::Rust
|
||||
} else {
|
||||
SymbolExportLevel::C
|
||||
}
|
||||
}
|
||||
|
||||
pub fn provide_local(providers: &mut Providers) {
|
||||
providers.exported_symbol_ids = |tcx, cnum| {
|
||||
let export_threshold = threshold(tcx);
|
||||
Rc::new(tcx.exported_symbols(cnum)
|
||||
.iter()
|
||||
.filter_map(|&(_, id, level)| {
|
||||
if level.is_below_threshold(export_threshold) {
|
||||
Some(id)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect())
|
||||
};
|
||||
|
||||
providers.is_exported_symbol = |tcx, id| {
|
||||
// FIXME(#42293) needs red/green to not break a bunch of incremental
|
||||
// tests
|
||||
tcx.dep_graph.with_ignore(|| {
|
||||
tcx.exported_symbol_ids(id.krate).contains(&id)
|
||||
})
|
||||
};
|
||||
|
||||
providers.exported_symbols = |tcx, cnum| {
|
||||
assert_eq!(cnum, LOCAL_CRATE);
|
||||
let local_exported_symbols = base::find_exported_symbols(tcx);
|
||||
|
||||
let mut local_crate: Vec<_> = local_exported_symbols
|
||||
.iter()
|
||||
@ -62,17 +99,6 @@ impl ExportedSymbols {
|
||||
})
|
||||
.collect();
|
||||
|
||||
let mut local_exports = local_crate
|
||||
.iter()
|
||||
.filter_map(|&(_, def_id, level)| {
|
||||
if is_below_threshold(level, export_threshold) {
|
||||
tcx.hir.as_local_node_id(def_id)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect::<NodeSet>();
|
||||
|
||||
const INVALID_DEF_ID: DefId = DefId {
|
||||
krate: INVALID_CRATE,
|
||||
index: CRATE_DEF_INDEX,
|
||||
@ -98,7 +124,6 @@ impl ExportedSymbols {
|
||||
let disambiguator = tcx.sess.local_crate_disambiguator();
|
||||
let registrar = tcx.sess.generate_derive_registrar_symbol(disambiguator, idx);
|
||||
local_crate.push((registrar, def_id, SymbolExportLevel::C));
|
||||
local_exports.insert(id);
|
||||
}
|
||||
|
||||
if tcx.sess.crate_types.borrow().contains(&config::CrateTypeDylib) {
|
||||
@ -106,141 +131,65 @@ impl ExportedSymbols {
|
||||
INVALID_DEF_ID,
|
||||
SymbolExportLevel::Rust));
|
||||
}
|
||||
Arc::new(local_crate)
|
||||
};
|
||||
}
|
||||
|
||||
let mut exports = FxHashMap();
|
||||
exports.insert(LOCAL_CRATE, local_crate);
|
||||
pub fn provide_extern(providers: &mut Providers) {
|
||||
providers.exported_symbols = |tcx, cnum| {
|
||||
// If this crate is a plugin and/or a custom derive crate, then
|
||||
// we're not even going to link those in so we skip those crates.
|
||||
if tcx.plugin_registrar_fn(cnum).is_some() ||
|
||||
tcx.derive_registrar_fn(cnum).is_some() {
|
||||
return Arc::new(Vec::new())
|
||||
}
|
||||
|
||||
for &cnum in tcx.crates().iter() {
|
||||
debug_assert!(cnum != LOCAL_CRATE);
|
||||
// Check to see if this crate is a "special runtime crate". These
|
||||
// crates, implementation details of the standard library, typically
|
||||
// have a bunch of `pub extern` and `#[no_mangle]` functions as the
|
||||
// ABI between them. We don't want their symbols to have a `C`
|
||||
// export level, however, as they're just implementation details.
|
||||
// Down below we'll hardwire all of the symbols to the `Rust` export
|
||||
// level instead.
|
||||
let special_runtime_crate =
|
||||
tcx.is_panic_runtime(cnum) || tcx.is_compiler_builtins(cnum);
|
||||
|
||||
// If this crate is a plugin and/or a custom derive crate, then
|
||||
// we're not even going to link those in so we skip those crates.
|
||||
if tcx.plugin_registrar_fn(cnum).is_some() ||
|
||||
tcx.derive_registrar_fn(cnum).is_some() {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check to see if this crate is a "special runtime crate". These
|
||||
// crates, implementation details of the standard library, typically
|
||||
// have a bunch of `pub extern` and `#[no_mangle]` functions as the
|
||||
// ABI between them. We don't want their symbols to have a `C`
|
||||
// export level, however, as they're just implementation details.
|
||||
// Down below we'll hardwire all of the symbols to the `Rust` export
|
||||
// level instead.
|
||||
let special_runtime_crate =
|
||||
tcx.is_panic_runtime(cnum) || tcx.is_compiler_builtins(cnum);
|
||||
|
||||
let crate_exports = tcx
|
||||
.exported_symbols(cnum)
|
||||
.iter()
|
||||
.map(|&def_id| {
|
||||
let name = tcx.symbol_name(Instance::mono(tcx, def_id));
|
||||
let export_level = if special_runtime_crate {
|
||||
// We can probably do better here by just ensuring that
|
||||
// it has hidden visibility rather than public
|
||||
// visibility, as this is primarily here to ensure it's
|
||||
// not stripped during LTO.
|
||||
//
|
||||
// In general though we won't link right if these
|
||||
// symbols are stripped, and LTO currently strips them.
|
||||
if &*name == "rust_eh_personality" ||
|
||||
&*name == "rust_eh_register_frames" ||
|
||||
&*name == "rust_eh_unregister_frames" {
|
||||
SymbolExportLevel::C
|
||||
} else {
|
||||
SymbolExportLevel::Rust
|
||||
}
|
||||
let crate_exports = tcx
|
||||
.exported_symbol_ids(cnum)
|
||||
.iter()
|
||||
.map(|&def_id| {
|
||||
let name = tcx.symbol_name(Instance::mono(tcx, def_id));
|
||||
let export_level = if special_runtime_crate {
|
||||
// We can probably do better here by just ensuring that
|
||||
// it has hidden visibility rather than public
|
||||
// visibility, as this is primarily here to ensure it's
|
||||
// not stripped during LTO.
|
||||
//
|
||||
// In general though we won't link right if these
|
||||
// symbols are stripped, and LTO currently strips them.
|
||||
if &*name == "rust_eh_personality" ||
|
||||
&*name == "rust_eh_register_frames" ||
|
||||
&*name == "rust_eh_unregister_frames" {
|
||||
SymbolExportLevel::C
|
||||
} else {
|
||||
export_level(tcx, def_id)
|
||||
};
|
||||
debug!("EXPORTED SYMBOL (re-export): {} ({:?})", name, export_level);
|
||||
(str::to_owned(&name), def_id, export_level)
|
||||
})
|
||||
.collect();
|
||||
SymbolExportLevel::Rust
|
||||
}
|
||||
} else {
|
||||
export_level(tcx, def_id)
|
||||
};
|
||||
debug!("EXPORTED SYMBOL (re-export): {} ({:?})", name, export_level);
|
||||
(str::to_owned(&name), def_id, export_level)
|
||||
})
|
||||
.collect();
|
||||
|
||||
exports.insert(cnum, crate_exports);
|
||||
}
|
||||
|
||||
return ExportedSymbols {
|
||||
export_threshold,
|
||||
exports,
|
||||
local_exports,
|
||||
};
|
||||
|
||||
fn export_level(tcx: TyCtxt,
|
||||
sym_def_id: DefId)
|
||||
-> SymbolExportLevel {
|
||||
let attrs = tcx.get_attrs(sym_def_id);
|
||||
if attr::contains_extern_indicator(tcx.sess.diagnostic(), &attrs) {
|
||||
SymbolExportLevel::C
|
||||
} else {
|
||||
SymbolExportLevel::Rust
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn local_exports(&self) -> &NodeSet {
|
||||
&self.local_exports
|
||||
}
|
||||
|
||||
pub fn exported_symbols(&self,
|
||||
cnum: CrateNum)
|
||||
-> &[(String, DefId, SymbolExportLevel)] {
|
||||
match self.exports.get(&cnum) {
|
||||
Some(exports) => exports,
|
||||
None => &[]
|
||||
}
|
||||
}
|
||||
|
||||
pub fn for_each_exported_symbol<F>(&self,
|
||||
cnum: CrateNum,
|
||||
mut f: F)
|
||||
where F: FnMut(&str, DefId, SymbolExportLevel)
|
||||
{
|
||||
for &(ref name, def_id, export_level) in self.exported_symbols(cnum) {
|
||||
if is_below_threshold(export_level, self.export_threshold) {
|
||||
f(&name, def_id, export_level)
|
||||
}
|
||||
}
|
||||
}
|
||||
Arc::new(crate_exports)
|
||||
};
|
||||
}
|
||||
|
||||
pub fn metadata_symbol_name(tcx: TyCtxt) -> String {
|
||||
format!("rust_metadata_{}_{}",
|
||||
tcx.crate_name(LOCAL_CRATE),
|
||||
tcx.crate_disambiguator(LOCAL_CRATE))
|
||||
}
|
||||
|
||||
pub fn crate_export_threshold(crate_type: config::CrateType)
|
||||
-> SymbolExportLevel {
|
||||
match crate_type {
|
||||
config::CrateTypeExecutable |
|
||||
config::CrateTypeStaticlib |
|
||||
config::CrateTypeProcMacro |
|
||||
config::CrateTypeCdylib => SymbolExportLevel::C,
|
||||
config::CrateTypeRlib |
|
||||
config::CrateTypeDylib => SymbolExportLevel::Rust,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn crates_export_threshold(crate_types: &[config::CrateType])
|
||||
-> SymbolExportLevel {
|
||||
if crate_types.iter().any(|&crate_type| {
|
||||
crate_export_threshold(crate_type) == SymbolExportLevel::Rust
|
||||
}) {
|
||||
SymbolExportLevel::Rust
|
||||
} else {
|
||||
fn export_level(tcx: TyCtxt, sym_def_id: DefId) -> SymbolExportLevel {
|
||||
if tcx.contains_extern_indicator(sym_def_id) {
|
||||
SymbolExportLevel::C
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_below_threshold(level: SymbolExportLevel,
|
||||
threshold: SymbolExportLevel)
|
||||
-> bool {
|
||||
if threshold == SymbolExportLevel::Rust {
|
||||
// We export everything from Rust dylibs
|
||||
true
|
||||
} else {
|
||||
level == SymbolExportLevel::C
|
||||
SymbolExportLevel::Rust
|
||||
}
|
||||
}
|
||||
|
@ -119,6 +119,30 @@ pub fn provide(providers: &mut Providers) {
|
||||
*providers = Providers {
|
||||
def_symbol_name,
|
||||
symbol_name,
|
||||
|
||||
export_name: |tcx, id| {
|
||||
tcx.get_attrs(id).iter().fold(None, |ia, attr| {
|
||||
if attr.check_name("export_name") {
|
||||
if let s @ Some(_) = attr.value_str() {
|
||||
s
|
||||
} else {
|
||||
struct_span_err!(tcx.sess, attr.span, E0558,
|
||||
"export_name attribute has invalid format")
|
||||
.span_label(attr.span, "did you mean #[export_name=\"*\"]?")
|
||||
.emit();
|
||||
None
|
||||
}
|
||||
} else {
|
||||
ia
|
||||
}
|
||||
})
|
||||
},
|
||||
|
||||
contains_extern_indicator: |tcx, id| {
|
||||
attr::contains_name(&tcx.get_attrs(id), "no_mangle") ||
|
||||
tcx.export_name(id).is_some()
|
||||
},
|
||||
|
||||
..*providers
|
||||
};
|
||||
}
|
||||
@ -245,7 +269,7 @@ fn compute_symbol_name<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, instance: Instance
|
||||
return tcx.item_name(def_id).to_string();
|
||||
}
|
||||
|
||||
if let Some(name) = attr::find_export_name_attr(tcx.sess.diagnostic(), &attrs) {
|
||||
if let Some(name) = tcx.export_name(def_id) {
|
||||
// Use provided name
|
||||
return name.to_string();
|
||||
}
|
||||
|
@ -18,17 +18,20 @@ use rustc::middle::cstore::{LinkMeta, EncodedMetadata};
|
||||
use rustc::session::config::{self, OutputFilenames, OutputType, OutputTypes, Passes, SomePasses,
|
||||
AllPasses, Sanitizer};
|
||||
use rustc::session::Session;
|
||||
use rustc::util::nodemap::FxHashMap;
|
||||
use time_graph::{self, TimeGraph};
|
||||
use llvm;
|
||||
use llvm::{ModuleRef, TargetMachineRef, PassManagerRef, DiagnosticInfoRef};
|
||||
use llvm::SMDiagnosticRef;
|
||||
use {CrateTranslation, ModuleSource, ModuleTranslation, CompiledModule, ModuleKind};
|
||||
use CrateInfo;
|
||||
use rustc::hir::def_id::CrateNum;
|
||||
use rustc::hir::def_id::{CrateNum, LOCAL_CRATE};
|
||||
use rustc::ty::TyCtxt;
|
||||
use rustc::util::common::{time, time_depth, set_time_depth, path2cstr, print_time_passes_entry};
|
||||
use rustc::util::fs::{link_or_copy, rename_or_copy_remove};
|
||||
use errors::{self, Handler, Level, DiagnosticBuilder, FatalError};
|
||||
use errors::emitter::{Emitter};
|
||||
use syntax::attr;
|
||||
use syntax::ext::hygiene::Mark;
|
||||
use syntax_pos::MultiSpan;
|
||||
use syntax_pos::symbol::Symbol;
|
||||
@ -36,6 +39,7 @@ use context::{is_pie_binary, get_reloc_model};
|
||||
use jobserver::{Client, Acquired};
|
||||
use rustc_demangle;
|
||||
|
||||
use std::any::Any;
|
||||
use std::ffi::CString;
|
||||
use std::fmt;
|
||||
use std::fs;
|
||||
@ -199,8 +203,6 @@ pub fn create_target_machine(sess: &Session) -> TargetMachineRef {
|
||||
|
||||
/// Module-specific configuration for `optimize_and_codegen`.
|
||||
pub struct ModuleConfig {
|
||||
/// LLVM TargetMachine to use for codegen.
|
||||
tm: TargetMachineRef,
|
||||
/// Names of additional optimization passes to run.
|
||||
passes: Vec<String>,
|
||||
/// Some(level) to optimize at a certain level, or None to run
|
||||
@ -233,12 +235,9 @@ pub struct ModuleConfig {
|
||||
obj_is_bitcode: bool,
|
||||
}
|
||||
|
||||
unsafe impl Send for ModuleConfig { }
|
||||
|
||||
impl ModuleConfig {
|
||||
fn new(sess: &Session, passes: Vec<String>) -> ModuleConfig {
|
||||
fn new(passes: Vec<String>) -> ModuleConfig {
|
||||
ModuleConfig {
|
||||
tm: create_target_machine(sess),
|
||||
passes,
|
||||
opt_level: None,
|
||||
opt_size: None,
|
||||
@ -286,40 +285,6 @@ impl ModuleConfig {
|
||||
self.merge_functions = sess.opts.optimize == config::OptLevel::Default ||
|
||||
sess.opts.optimize == config::OptLevel::Aggressive;
|
||||
}
|
||||
|
||||
fn clone(&self, sess: &Session) -> ModuleConfig {
|
||||
ModuleConfig {
|
||||
tm: create_target_machine(sess),
|
||||
passes: self.passes.clone(),
|
||||
opt_level: self.opt_level,
|
||||
opt_size: self.opt_size,
|
||||
|
||||
emit_no_opt_bc: self.emit_no_opt_bc,
|
||||
emit_bc: self.emit_bc,
|
||||
emit_lto_bc: self.emit_lto_bc,
|
||||
emit_ir: self.emit_ir,
|
||||
emit_asm: self.emit_asm,
|
||||
emit_obj: self.emit_obj,
|
||||
obj_is_bitcode: self.obj_is_bitcode,
|
||||
|
||||
no_verify: self.no_verify,
|
||||
no_prepopulate_passes: self.no_prepopulate_passes,
|
||||
no_builtins: self.no_builtins,
|
||||
time_passes: self.time_passes,
|
||||
vectorize_loop: self.vectorize_loop,
|
||||
vectorize_slp: self.vectorize_slp,
|
||||
merge_functions: self.merge_functions,
|
||||
inline_threshold: self.inline_threshold,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for ModuleConfig {
|
||||
fn drop(&mut self) {
|
||||
unsafe {
|
||||
llvm::LLVMRustDisposeTargetMachine(self.tm);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Additional resources used by optimize_and_codegen (not module specific)
|
||||
@ -333,6 +298,11 @@ pub struct CodegenContext {
|
||||
pub opts: Arc<config::Options>,
|
||||
pub crate_types: Vec<config::CrateType>,
|
||||
pub each_linked_rlib_for_lto: Vec<(CrateNum, PathBuf)>,
|
||||
output_filenames: Arc<OutputFilenames>,
|
||||
regular_module_config: Arc<ModuleConfig>,
|
||||
metadata_module_config: Arc<ModuleConfig>,
|
||||
allocator_module_config: Arc<ModuleConfig>,
|
||||
|
||||
// Handler to use for diagnostics produced during codegen.
|
||||
pub diag_emitter: SharedEmitter,
|
||||
// LLVM passes added by plugins.
|
||||
@ -345,7 +315,7 @@ pub struct CodegenContext {
|
||||
// compiling incrementally
|
||||
pub incr_comp_session_dir: Option<PathBuf>,
|
||||
// Channel back to the main control thread to send messages to
|
||||
coordinator_send: Sender<Message>,
|
||||
coordinator_send: Sender<Box<Any + Send>>,
|
||||
// A reference to the TimeGraph so we can register timings. None means that
|
||||
// measuring is disabled.
|
||||
time_graph: Option<TimeGraph>,
|
||||
@ -355,6 +325,14 @@ impl CodegenContext {
|
||||
fn create_diag_handler(&self) -> Handler {
|
||||
Handler::with_emitter(true, false, Box::new(self.diag_emitter.clone()))
|
||||
}
|
||||
|
||||
fn config(&self, kind: ModuleKind) -> &ModuleConfig {
|
||||
match kind {
|
||||
ModuleKind::Regular => &self.regular_module_config,
|
||||
ModuleKind::Metadata => &self.metadata_module_config,
|
||||
ModuleKind::Allocator => &self.allocator_module_config,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct HandlerFreeVars<'a> {
|
||||
@ -414,8 +392,8 @@ unsafe extern "C" fn diagnostic_handler(info: DiagnosticInfoRef, user: *mut c_vo
|
||||
unsafe fn optimize_and_codegen(cgcx: &CodegenContext,
|
||||
diag_handler: &Handler,
|
||||
mtrans: ModuleTranslation,
|
||||
config: ModuleConfig,
|
||||
output_names: OutputFilenames)
|
||||
tm: TargetMachineRef,
|
||||
config: &ModuleConfig)
|
||||
-> Result<CompiledModule, FatalError>
|
||||
{
|
||||
let (llmod, llcx) = match mtrans.source {
|
||||
@ -425,8 +403,6 @@ unsafe fn optimize_and_codegen(cgcx: &CodegenContext,
|
||||
}
|
||||
};
|
||||
|
||||
let tm = config.tm;
|
||||
|
||||
let fv = HandlerFreeVars {
|
||||
cgcx,
|
||||
diag_handler,
|
||||
@ -440,7 +416,7 @@ unsafe fn optimize_and_codegen(cgcx: &CodegenContext,
|
||||
let module_name = Some(&module_name[..]);
|
||||
|
||||
if config.emit_no_opt_bc {
|
||||
let out = output_names.temp_path_ext("no-opt.bc", module_name);
|
||||
let out = cgcx.output_filenames.temp_path_ext("no-opt.bc", module_name);
|
||||
let out = path2cstr(&out);
|
||||
llvm::LLVMWriteBitcodeToFile(llmod, out.as_ptr());
|
||||
}
|
||||
@ -513,7 +489,7 @@ unsafe fn optimize_and_codegen(cgcx: &CodegenContext,
|
||||
if cgcx.lto {
|
||||
time(cgcx.time_passes, "all lto passes", || {
|
||||
let temp_no_opt_bc_filename =
|
||||
output_names.temp_path_ext("no-opt.lto.bc", module_name);
|
||||
cgcx.output_filenames.temp_path_ext("no-opt.lto.bc", module_name);
|
||||
lto::run(cgcx,
|
||||
diag_handler,
|
||||
llmod,
|
||||
@ -522,7 +498,7 @@ unsafe fn optimize_and_codegen(cgcx: &CodegenContext,
|
||||
&temp_no_opt_bc_filename)
|
||||
})?;
|
||||
if config.emit_lto_bc {
|
||||
let out = output_names.temp_path_ext("lto.bc", module_name);
|
||||
let out = cgcx.output_filenames.temp_path_ext("lto.bc", module_name);
|
||||
let out = path2cstr(&out);
|
||||
llvm::LLVMWriteBitcodeToFile(llmod, out.as_ptr());
|
||||
}
|
||||
@ -558,8 +534,8 @@ unsafe fn optimize_and_codegen(cgcx: &CodegenContext,
|
||||
let write_obj = config.emit_obj && !config.obj_is_bitcode;
|
||||
let copy_bc_to_obj = config.emit_obj && config.obj_is_bitcode;
|
||||
|
||||
let bc_out = output_names.temp_path(OutputType::Bitcode, module_name);
|
||||
let obj_out = output_names.temp_path(OutputType::Object, module_name);
|
||||
let bc_out = cgcx.output_filenames.temp_path(OutputType::Bitcode, module_name);
|
||||
let obj_out = cgcx.output_filenames.temp_path(OutputType::Object, module_name);
|
||||
|
||||
if write_bc {
|
||||
let bc_out_c = path2cstr(&bc_out);
|
||||
@ -569,7 +545,7 @@ unsafe fn optimize_and_codegen(cgcx: &CodegenContext,
|
||||
time(config.time_passes, &format!("codegen passes [{}]", module_name.unwrap()),
|
||||
|| -> Result<(), FatalError> {
|
||||
if config.emit_ir {
|
||||
let out = output_names.temp_path(OutputType::LlvmAssembly, module_name);
|
||||
let out = cgcx.output_filenames.temp_path(OutputType::LlvmAssembly, module_name);
|
||||
let out = path2cstr(&out);
|
||||
|
||||
extern "C" fn demangle_callback(input_ptr: *const c_char,
|
||||
@ -610,7 +586,7 @@ unsafe fn optimize_and_codegen(cgcx: &CodegenContext,
|
||||
}
|
||||
|
||||
if config.emit_asm {
|
||||
let path = output_names.temp_path(OutputType::Assembly, module_name);
|
||||
let path = cgcx.output_filenames.temp_path(OutputType::Assembly, module_name);
|
||||
|
||||
// We can't use the same module for asm and binary output, because that triggers
|
||||
// various errors like invalid IR or broken binaries, so we might have to clone the
|
||||
@ -667,19 +643,34 @@ fn need_crate_bitcode_for_rlib(sess: &Session) -> bool {
|
||||
sess.opts.output_types.contains_key(&OutputType::Exe)
|
||||
}
|
||||
|
||||
pub fn start_async_translation(sess: &Session,
|
||||
crate_output: &OutputFilenames,
|
||||
pub fn start_async_translation(tcx: TyCtxt,
|
||||
time_graph: Option<TimeGraph>,
|
||||
crate_name: Symbol,
|
||||
link: LinkMeta,
|
||||
metadata: EncodedMetadata,
|
||||
exported_symbols: Arc<ExportedSymbols>,
|
||||
no_builtins: bool,
|
||||
windows_subsystem: Option<String>,
|
||||
linker_info: LinkerInfo,
|
||||
crate_info: CrateInfo,
|
||||
no_integrated_as: bool)
|
||||
coordinator_receive: Receiver<Box<Any + Send>>)
|
||||
-> OngoingCrateTranslation {
|
||||
let sess = tcx.sess;
|
||||
let crate_output = tcx.output_filenames(LOCAL_CRATE);
|
||||
let crate_name = tcx.crate_name(LOCAL_CRATE);
|
||||
let no_builtins = attr::contains_name(&tcx.hir.krate().attrs, "no_builtins");
|
||||
let subsystem = attr::first_attr_value_str_by_name(&tcx.hir.krate().attrs,
|
||||
"windows_subsystem");
|
||||
let windows_subsystem = subsystem.map(|subsystem| {
|
||||
if subsystem != "windows" && subsystem != "console" {
|
||||
tcx.sess.fatal(&format!("invalid windows subsystem `{}`, only \
|
||||
`windows` and `console` are allowed",
|
||||
subsystem));
|
||||
}
|
||||
subsystem.to_string()
|
||||
});
|
||||
|
||||
let no_integrated_as = tcx.sess.opts.cg.no_integrated_as ||
|
||||
(tcx.sess.target.target.options.no_integrated_as &&
|
||||
(crate_output.outputs.contains_key(&OutputType::Object) ||
|
||||
crate_output.outputs.contains_key(&OutputType::Exe)));
|
||||
let linker_info = LinkerInfo::new(tcx);
|
||||
let crate_info = CrateInfo::new(tcx);
|
||||
|
||||
let output_types_override = if no_integrated_as {
|
||||
OutputTypes::new(&[(OutputType::Assembly, None)])
|
||||
} else {
|
||||
@ -687,9 +678,9 @@ pub fn start_async_translation(sess: &Session,
|
||||
};
|
||||
|
||||
// Figure out what we actually need to build.
|
||||
let mut modules_config = ModuleConfig::new(sess, sess.opts.cg.passes.clone());
|
||||
let mut metadata_config = ModuleConfig::new(sess, vec![]);
|
||||
let mut allocator_config = ModuleConfig::new(sess, vec![]);
|
||||
let mut modules_config = ModuleConfig::new(sess.opts.cg.passes.clone());
|
||||
let mut metadata_config = ModuleConfig::new(vec![]);
|
||||
let mut allocator_config = ModuleConfig::new(vec![]);
|
||||
|
||||
if let Some(ref sanitizer) = sess.opts.debugging_opts.sanitizer {
|
||||
match *sanitizer {
|
||||
@ -774,17 +765,18 @@ pub fn start_async_translation(sess: &Session,
|
||||
|
||||
let (shared_emitter, shared_emitter_main) = SharedEmitter::new();
|
||||
let (trans_worker_send, trans_worker_receive) = channel();
|
||||
let (coordinator_send, coordinator_receive) = channel();
|
||||
|
||||
let coordinator_thread = start_executing_work(sess,
|
||||
let coordinator_thread = start_executing_work(tcx,
|
||||
&crate_info,
|
||||
shared_emitter,
|
||||
trans_worker_send,
|
||||
coordinator_send.clone(),
|
||||
coordinator_receive,
|
||||
client,
|
||||
time_graph.clone(),
|
||||
exported_symbols.clone());
|
||||
Arc::new(modules_config),
|
||||
Arc::new(metadata_config),
|
||||
Arc::new(allocator_config));
|
||||
|
||||
OngoingCrateTranslation {
|
||||
crate_name,
|
||||
link,
|
||||
@ -794,16 +786,12 @@ pub fn start_async_translation(sess: &Session,
|
||||
no_integrated_as,
|
||||
crate_info,
|
||||
|
||||
regular_module_config: modules_config,
|
||||
metadata_module_config: metadata_config,
|
||||
allocator_module_config: allocator_config,
|
||||
|
||||
time_graph,
|
||||
output_filenames: crate_output.clone(),
|
||||
coordinator_send,
|
||||
coordinator_send: tcx.tx_to_llvm_workers.clone(),
|
||||
trans_worker_receive,
|
||||
shared_emitter_main,
|
||||
future: coordinator_thread
|
||||
future: coordinator_thread,
|
||||
output_filenames: tcx.output_filenames(LOCAL_CRATE),
|
||||
}
|
||||
}
|
||||
|
||||
@ -1004,8 +992,7 @@ pub fn dump_incremental_data(trans: &CrateTranslation) {
|
||||
|
||||
struct WorkItem {
|
||||
mtrans: ModuleTranslation,
|
||||
config: ModuleConfig,
|
||||
output_names: OutputFilenames
|
||||
tm: TargetMachine,
|
||||
}
|
||||
|
||||
impl fmt::Debug for WorkItem {
|
||||
@ -1014,15 +1001,15 @@ impl fmt::Debug for WorkItem {
|
||||
}
|
||||
}
|
||||
|
||||
fn build_work_item(mtrans: ModuleTranslation,
|
||||
config: ModuleConfig,
|
||||
output_names: OutputFilenames)
|
||||
-> WorkItem
|
||||
{
|
||||
WorkItem {
|
||||
mtrans,
|
||||
config,
|
||||
output_names,
|
||||
struct TargetMachine(TargetMachineRef);
|
||||
|
||||
unsafe impl Send for TargetMachine {}
|
||||
|
||||
impl Drop for TargetMachine {
|
||||
fn drop(&mut self) {
|
||||
unsafe {
|
||||
llvm::LLVMRustDisposeTargetMachine(self.0);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -1031,6 +1018,7 @@ fn execute_work_item(cgcx: &CodegenContext, work_item: WorkItem)
|
||||
{
|
||||
let diag_handler = cgcx.create_diag_handler();
|
||||
let module_name = work_item.mtrans.name.clone();
|
||||
let config = cgcx.config(work_item.mtrans.kind);
|
||||
|
||||
let pre_existing = match work_item.mtrans.source {
|
||||
ModuleSource::Translated(_) => None,
|
||||
@ -1043,7 +1031,7 @@ fn execute_work_item(cgcx: &CodegenContext, work_item: WorkItem)
|
||||
.unwrap();
|
||||
let name = &work_item.mtrans.name;
|
||||
for (kind, saved_file) in wp.saved_files {
|
||||
let obj_out = work_item.output_names.temp_path(kind, Some(name));
|
||||
let obj_out = cgcx.output_filenames.temp_path(kind, Some(name));
|
||||
let source_file = in_incr_comp_dir(&incr_comp_session_dir,
|
||||
&saved_file);
|
||||
debug!("copying pre-existing module `{}` from {:?} to {}",
|
||||
@ -1066,8 +1054,8 @@ fn execute_work_item(cgcx: &CodegenContext, work_item: WorkItem)
|
||||
kind: ModuleKind::Regular,
|
||||
pre_existing: true,
|
||||
symbol_name_hash: work_item.mtrans.symbol_name_hash,
|
||||
emit_bc: work_item.config.emit_bc,
|
||||
emit_obj: work_item.config.emit_obj,
|
||||
emit_bc: config.emit_bc,
|
||||
emit_obj: config.emit_obj,
|
||||
})
|
||||
} else {
|
||||
debug!("llvm-optimizing {:?}", module_name);
|
||||
@ -1076,8 +1064,8 @@ fn execute_work_item(cgcx: &CodegenContext, work_item: WorkItem)
|
||||
optimize_and_codegen(cgcx,
|
||||
&diag_handler,
|
||||
work_item.mtrans,
|
||||
work_item.config,
|
||||
work_item.output_names)
|
||||
work_item.tm.0,
|
||||
config)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1092,8 +1080,8 @@ enum Message {
|
||||
TranslationDone {
|
||||
llvm_work_item: WorkItem,
|
||||
cost: u64,
|
||||
is_last: bool,
|
||||
},
|
||||
TranslationComplete,
|
||||
TranslateItem,
|
||||
}
|
||||
|
||||
@ -1110,16 +1098,26 @@ enum MainThreadWorkerState {
|
||||
LLVMing,
|
||||
}
|
||||
|
||||
fn start_executing_work(sess: &Session,
|
||||
fn start_executing_work(tcx: TyCtxt,
|
||||
crate_info: &CrateInfo,
|
||||
shared_emitter: SharedEmitter,
|
||||
trans_worker_send: Sender<Message>,
|
||||
coordinator_send: Sender<Message>,
|
||||
coordinator_receive: Receiver<Message>,
|
||||
coordinator_receive: Receiver<Box<Any + Send>>,
|
||||
jobserver: Client,
|
||||
time_graph: Option<TimeGraph>,
|
||||
exported_symbols: Arc<ExportedSymbols>)
|
||||
modules_config: Arc<ModuleConfig>,
|
||||
metadata_config: Arc<ModuleConfig>,
|
||||
allocator_config: Arc<ModuleConfig>)
|
||||
-> thread::JoinHandle<CompiledModules> {
|
||||
let coordinator_send = tcx.tx_to_llvm_workers.clone();
|
||||
let mut exported_symbols = FxHashMap();
|
||||
exported_symbols.insert(LOCAL_CRATE, tcx.exported_symbols(LOCAL_CRATE));
|
||||
for &cnum in tcx.crates().iter() {
|
||||
exported_symbols.insert(cnum, tcx.exported_symbols(cnum));
|
||||
}
|
||||
let exported_symbols = Arc::new(exported_symbols);
|
||||
let sess = tcx.sess;
|
||||
|
||||
// First up, convert our jobserver into a helper thread so we can use normal
|
||||
// mpsc channels to manage our messages and such. Once we've got the helper
|
||||
// thread then request `n-1` tokens because all of our work items are ready
|
||||
@ -1132,7 +1130,7 @@ fn start_executing_work(sess: &Session,
|
||||
// tokens on `rx` above which will get managed in the main loop below.
|
||||
let coordinator_send2 = coordinator_send.clone();
|
||||
let helper = jobserver.into_helper_thread(move |token| {
|
||||
drop(coordinator_send2.send(Message::Token(token)));
|
||||
drop(coordinator_send2.send(Box::new(Message::Token(token))));
|
||||
}).expect("failed to spawn helper thread");
|
||||
|
||||
let mut each_linked_rlib_for_lto = Vec::new();
|
||||
@ -1158,6 +1156,10 @@ fn start_executing_work(sess: &Session,
|
||||
coordinator_send,
|
||||
diag_emitter: shared_emitter.clone(),
|
||||
time_graph,
|
||||
output_filenames: tcx.output_filenames(LOCAL_CRATE),
|
||||
regular_module_config: modules_config,
|
||||
metadata_module_config: metadata_config,
|
||||
allocator_module_config: allocator_config,
|
||||
};
|
||||
|
||||
// This is the "main loop" of parallel work happening for parallel codegen.
|
||||
@ -1307,7 +1309,7 @@ fn start_executing_work(sess: &Session,
|
||||
let mut translation_done = false;
|
||||
|
||||
// This is the queue of LLVM work items that still need processing.
|
||||
let mut work_items = Vec::new();
|
||||
let mut work_items = Vec::<(WorkItem, u64)>::new();
|
||||
|
||||
// This are the Jobserver Tokens we currently hold. Does not include
|
||||
// the implicit Token the compiler process owns no matter what.
|
||||
@ -1346,7 +1348,8 @@ fn start_executing_work(sess: &Session,
|
||||
worker: get_worker_id(&mut free_worker_ids),
|
||||
.. cgcx.clone()
|
||||
};
|
||||
maybe_start_llvm_timer(&item, &mut llvm_start_time);
|
||||
maybe_start_llvm_timer(cgcx.config(item.mtrans.kind),
|
||||
&mut llvm_start_time);
|
||||
main_thread_worker_state = MainThreadWorkerState::LLVMing;
|
||||
spawn_work(cgcx, item);
|
||||
}
|
||||
@ -1362,7 +1365,8 @@ fn start_executing_work(sess: &Session,
|
||||
worker: get_worker_id(&mut free_worker_ids),
|
||||
.. cgcx.clone()
|
||||
};
|
||||
maybe_start_llvm_timer(&item, &mut llvm_start_time);
|
||||
maybe_start_llvm_timer(cgcx.config(item.mtrans.kind),
|
||||
&mut llvm_start_time);
|
||||
main_thread_worker_state = MainThreadWorkerState::LLVMing;
|
||||
spawn_work(cgcx, item);
|
||||
} else {
|
||||
@ -1392,7 +1396,8 @@ fn start_executing_work(sess: &Session,
|
||||
while work_items.len() > 0 && running < tokens.len() {
|
||||
let (item, _) = work_items.pop().unwrap();
|
||||
|
||||
maybe_start_llvm_timer(&item, &mut llvm_start_time);
|
||||
maybe_start_llvm_timer(cgcx.config(item.mtrans.kind),
|
||||
&mut llvm_start_time);
|
||||
|
||||
let cgcx = CodegenContext {
|
||||
worker: get_worker_id(&mut free_worker_ids),
|
||||
@ -1406,7 +1411,8 @@ fn start_executing_work(sess: &Session,
|
||||
// Relinquish accidentally acquired extra tokens
|
||||
tokens.truncate(running);
|
||||
|
||||
match coordinator_receive.recv().unwrap() {
|
||||
let msg = coordinator_receive.recv().unwrap();
|
||||
match *msg.downcast::<Message>().ok().unwrap() {
|
||||
// Save the token locally and the next turn of the loop will use
|
||||
// this to spawn a new unit of work, or it may get dropped
|
||||
// immediately if we have no more work to spawn.
|
||||
@ -1433,7 +1439,7 @@ fn start_executing_work(sess: &Session,
|
||||
}
|
||||
}
|
||||
|
||||
Message::TranslationDone { llvm_work_item, cost, is_last } => {
|
||||
Message::TranslationDone { llvm_work_item, cost } => {
|
||||
// We keep the queue sorted by estimated processing cost,
|
||||
// so that more expensive items are processed earlier. This
|
||||
// is good for throughput as it gives the main thread more
|
||||
@ -1449,15 +1455,14 @@ fn start_executing_work(sess: &Session,
|
||||
};
|
||||
work_items.insert(insertion_index, (llvm_work_item, cost));
|
||||
|
||||
if is_last {
|
||||
// If this is the last, don't request a token because
|
||||
// the trans worker thread will be free to handle this
|
||||
// immediately.
|
||||
translation_done = true;
|
||||
} else {
|
||||
helper.request_token();
|
||||
}
|
||||
helper.request_token();
|
||||
assert_eq!(main_thread_worker_state,
|
||||
MainThreadWorkerState::Translating);
|
||||
main_thread_worker_state = MainThreadWorkerState::Idle;
|
||||
}
|
||||
|
||||
Message::TranslationComplete => {
|
||||
translation_done = true;
|
||||
assert_eq!(main_thread_worker_state,
|
||||
MainThreadWorkerState::Translating);
|
||||
main_thread_worker_state = MainThreadWorkerState::Idle;
|
||||
@ -1535,11 +1540,11 @@ fn start_executing_work(sess: &Session,
|
||||
items_in_queue >= max_workers.saturating_sub(workers_running / 2)
|
||||
}
|
||||
|
||||
fn maybe_start_llvm_timer(work_item: &WorkItem,
|
||||
fn maybe_start_llvm_timer(config: &ModuleConfig,
|
||||
llvm_start_time: &mut Option<Instant>) {
|
||||
// We keep track of the -Ztime-passes output manually,
|
||||
// since the closure-based interface does not fit well here.
|
||||
if work_item.config.time_passes {
|
||||
if config.time_passes {
|
||||
if llvm_start_time.is_none() {
|
||||
*llvm_start_time = Some(Instant::now());
|
||||
}
|
||||
@ -1564,7 +1569,7 @@ fn spawn_work(cgcx: CodegenContext, work: WorkItem) {
|
||||
// Set up a destructor which will fire off a message that we're done as
|
||||
// we exit.
|
||||
struct Bomb {
|
||||
coordinator_send: Sender<Message>,
|
||||
coordinator_send: Sender<Box<Any + Send>>,
|
||||
result: Option<CompiledModule>,
|
||||
worker_id: usize,
|
||||
}
|
||||
@ -1575,10 +1580,10 @@ fn spawn_work(cgcx: CodegenContext, work: WorkItem) {
|
||||
None => Err(())
|
||||
};
|
||||
|
||||
drop(self.coordinator_send.send(Message::Done {
|
||||
drop(self.coordinator_send.send(Box::new(Message::Done {
|
||||
result,
|
||||
worker_id: self.worker_id,
|
||||
}));
|
||||
})));
|
||||
}
|
||||
}
|
||||
|
||||
@ -1814,17 +1819,12 @@ pub struct OngoingCrateTranslation {
|
||||
linker_info: LinkerInfo,
|
||||
no_integrated_as: bool,
|
||||
crate_info: CrateInfo,
|
||||
|
||||
output_filenames: OutputFilenames,
|
||||
regular_module_config: ModuleConfig,
|
||||
metadata_module_config: ModuleConfig,
|
||||
allocator_module_config: ModuleConfig,
|
||||
|
||||
time_graph: Option<TimeGraph>,
|
||||
coordinator_send: Sender<Message>,
|
||||
coordinator_send: Sender<Box<Any + Send>>,
|
||||
trans_worker_receive: Receiver<Message>,
|
||||
shared_emitter_main: SharedEmitterMain,
|
||||
future: thread::JoinHandle<CompiledModules>,
|
||||
output_filenames: Arc<OutputFilenames>,
|
||||
}
|
||||
|
||||
impl OngoingCrateTranslation {
|
||||
@ -1892,38 +1892,21 @@ impl OngoingCrateTranslation {
|
||||
trans
|
||||
}
|
||||
|
||||
pub fn submit_translated_module_to_llvm(&self,
|
||||
sess: &Session,
|
||||
mtrans: ModuleTranslation,
|
||||
cost: u64,
|
||||
is_last: bool) {
|
||||
let module_config = match mtrans.kind {
|
||||
ModuleKind::Regular => self.regular_module_config.clone(sess),
|
||||
ModuleKind::Metadata => self.metadata_module_config.clone(sess),
|
||||
ModuleKind::Allocator => self.allocator_module_config.clone(sess),
|
||||
};
|
||||
|
||||
let llvm_work_item = build_work_item(mtrans,
|
||||
module_config,
|
||||
self.output_filenames.clone());
|
||||
|
||||
drop(self.coordinator_send.send(Message::TranslationDone {
|
||||
llvm_work_item,
|
||||
cost,
|
||||
is_last
|
||||
}));
|
||||
}
|
||||
|
||||
pub fn submit_pre_translated_module_to_llvm(&self,
|
||||
sess: &Session,
|
||||
mtrans: ModuleTranslation,
|
||||
is_last: bool) {
|
||||
tcx: TyCtxt,
|
||||
mtrans: ModuleTranslation) {
|
||||
self.wait_for_signal_to_translate_item();
|
||||
self.check_for_errors(sess);
|
||||
self.check_for_errors(tcx.sess);
|
||||
|
||||
// These are generally cheap and won't through off scheduling.
|
||||
let cost = 0;
|
||||
self.submit_translated_module_to_llvm(sess, mtrans, cost, is_last);
|
||||
submit_translated_module_to_llvm(tcx, mtrans, cost);
|
||||
}
|
||||
|
||||
pub fn translation_finished(&self, tcx: TyCtxt) {
|
||||
self.wait_for_signal_to_translate_item();
|
||||
self.check_for_errors(tcx.sess);
|
||||
drop(self.coordinator_send.send(Box::new(Message::TranslationComplete)));
|
||||
}
|
||||
|
||||
pub fn check_for_errors(&self, sess: &Session) {
|
||||
@ -1945,3 +1928,16 @@ impl OngoingCrateTranslation {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn submit_translated_module_to_llvm(tcx: TyCtxt,
|
||||
mtrans: ModuleTranslation,
|
||||
cost: u64) {
|
||||
let llvm_work_item = WorkItem {
|
||||
mtrans,
|
||||
tm: TargetMachine(create_target_machine(tcx.sess)),
|
||||
};
|
||||
drop(tcx.tx_to_llvm_workers.send(Box::new(Message::TranslationDone {
|
||||
llvm_work_item,
|
||||
cost,
|
||||
})));
|
||||
}
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -101,11 +101,14 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
|
||||
fn count_insn(&self, category: &str) {
|
||||
if self.ccx.sess().trans_stats() {
|
||||
self.ccx.stats().n_llvm_insns.set(self.ccx.stats().n_llvm_insns.get() + 1);
|
||||
self.ccx.stats().borrow_mut().n_llvm_insns += 1;
|
||||
}
|
||||
if self.ccx.sess().count_llvm_insns() {
|
||||
let mut h = self.ccx.stats().llvm_insns.borrow_mut();
|
||||
*h.entry(category.to_string()).or_insert(0) += 1;
|
||||
*self.ccx.stats()
|
||||
.borrow_mut()
|
||||
.llvm_insns
|
||||
.entry(category.to_string())
|
||||
.or_insert(0) += 1;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -23,7 +23,6 @@ use monomorphize::{self, Instance};
|
||||
use rustc::hir::def_id::DefId;
|
||||
use rustc::ty::TypeFoldable;
|
||||
use rustc::ty::subst::Substs;
|
||||
use trans_item::TransItem;
|
||||
use type_of;
|
||||
|
||||
/// Translates a reference to a fn/method item, monomorphizing and
|
||||
@ -45,7 +44,7 @@ pub fn get_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
|
||||
assert!(!instance.substs.has_escaping_regions());
|
||||
assert!(!instance.substs.has_param_types());
|
||||
|
||||
let fn_ty = common::instance_ty(ccx.shared(), &instance);
|
||||
let fn_ty = common::instance_ty(ccx.tcx(), &instance);
|
||||
if let Some(&llfn) = ccx.instances().borrow().get(&instance) {
|
||||
return llfn;
|
||||
}
|
||||
@ -53,35 +52,34 @@ pub fn get_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
|
||||
let sym = tcx.symbol_name(instance);
|
||||
debug!("get_fn({:?}: {:?}) => {}", instance, fn_ty, sym);
|
||||
|
||||
// This is subtle and surprising, but sometimes we have to bitcast
|
||||
// the resulting fn pointer. The reason has to do with external
|
||||
// functions. If you have two crates that both bind the same C
|
||||
// library, they may not use precisely the same types: for
|
||||
// example, they will probably each declare their own structs,
|
||||
// which are distinct types from LLVM's point of view (nominal
|
||||
// types).
|
||||
//
|
||||
// Now, if those two crates are linked into an application, and
|
||||
// they contain inlined code, you can wind up with a situation
|
||||
// where both of those functions wind up being loaded into this
|
||||
// application simultaneously. In that case, the same function
|
||||
// (from LLVM's point of view) requires two types. But of course
|
||||
// LLVM won't allow one function to have two types.
|
||||
//
|
||||
// What we currently do, therefore, is declare the function with
|
||||
// one of the two types (whichever happens to come first) and then
|
||||
// bitcast as needed when the function is referenced to make sure
|
||||
// it has the type we expect.
|
||||
//
|
||||
// This can occur on either a crate-local or crate-external
|
||||
// reference. It also occurs when testing libcore and in some
|
||||
// other weird situations. Annoying.
|
||||
|
||||
// Create a fn pointer with the substituted signature.
|
||||
let fn_ptr_ty = tcx.mk_fn_ptr(common::ty_fn_sig(ccx, fn_ty));
|
||||
let llptrty = type_of::type_of(ccx, fn_ptr_ty);
|
||||
|
||||
let llfn = if let Some(llfn) = declare::get_declared_value(ccx, &sym) {
|
||||
// This is subtle and surprising, but sometimes we have to bitcast
|
||||
// the resulting fn pointer. The reason has to do with external
|
||||
// functions. If you have two crates that both bind the same C
|
||||
// library, they may not use precisely the same types: for
|
||||
// example, they will probably each declare their own structs,
|
||||
// which are distinct types from LLVM's point of view (nominal
|
||||
// types).
|
||||
//
|
||||
// Now, if those two crates are linked into an application, and
|
||||
// they contain inlined code, you can wind up with a situation
|
||||
// where both of those functions wind up being loaded into this
|
||||
// application simultaneously. In that case, the same function
|
||||
// (from LLVM's point of view) requires two types. But of course
|
||||
// LLVM won't allow one function to have two types.
|
||||
//
|
||||
// What we currently do, therefore, is declare the function with
|
||||
// one of the two types (whichever happens to come first) and then
|
||||
// bitcast as needed when the function is referenced to make sure
|
||||
// it has the type we expect.
|
||||
//
|
||||
// This can occur on either a crate-local or crate-external
|
||||
// reference. It also occurs when testing libcore and in some
|
||||
// other weird situations. Annoying.
|
||||
if common::val_ty(llfn) != llptrty {
|
||||
debug!("get_fn: casting {:?} to {:?}", llfn, llptrty);
|
||||
consts::ptrcast(llfn, llptrty)
|
||||
@ -110,12 +108,45 @@ pub fn get_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
|
||||
attributes::unwind(llfn, true);
|
||||
}
|
||||
|
||||
// Apply an appropriate linkage/visibility value to our item that we
|
||||
// just declared.
|
||||
//
|
||||
// This is sort of subtle. Inside our codegen unit we started off
|
||||
// compilation by predefining all our own `TransItem` instances. That
|
||||
// is, everything we're translating ourselves is already defined. That
|
||||
// means that anything we're actually translating ourselves will have
|
||||
// hit the above branch in `get_declared_value`. As a result, we're
|
||||
// guaranteed here that we're declaring a symbol that won't get defined,
|
||||
// or in other words we're referencing a foreign value.
|
||||
//
|
||||
// So because this is a foreign value we blanket apply an external
|
||||
// linkage directive because it's coming from a different object file.
|
||||
// The visibility here is where it gets tricky. This symbol could be
|
||||
// referencing some foreign crate or foreign library (an `extern`
|
||||
// block) in which case we want to leave the default visibility. We may
|
||||
// also, though, have multiple codegen units.
|
||||
//
|
||||
// In the situation of multiple codegen units this function may be
|
||||
// referencing a function from another codegen unit. If we're
|
||||
// indeed referencing a symbol in another codegen unit then we're in one
|
||||
// of two cases:
|
||||
//
|
||||
// * This is a symbol defined in a foreign crate and we're just
|
||||
// monomorphizing in another codegen unit. In this case this symbols
|
||||
// is for sure not exported, so both codegen units will be using
|
||||
// hidden visibility. Hence, we apply a hidden visibility here.
|
||||
//
|
||||
// * This is a symbol defined in our local crate. If the symbol in the
|
||||
// other codegen unit is also not exported then like with the foreign
|
||||
// case we apply a hidden visibility. If the symbol is exported from
|
||||
// the foreign object file, however, then we leave this at the
|
||||
// default visibility as we'll just import it naturally.
|
||||
unsafe {
|
||||
llvm::LLVMRustSetLinkage(llfn, llvm::Linkage::ExternalLinkage);
|
||||
|
||||
if ccx.crate_trans_items().contains(&TransItem::Fn(instance)) {
|
||||
if let Some(node_id) = tcx.hir.as_local_node_id(instance_def_id) {
|
||||
if !ccx.exported_symbols().local_exports().contains(&node_id) {
|
||||
if ccx.tcx().is_translated_function(instance_def_id) {
|
||||
if instance_def_id.is_local() {
|
||||
if !ccx.tcx().is_exported_symbol(instance_def_id) {
|
||||
llvm::LLVMRustSetVisibility(llfn, llvm::Visibility::Hidden);
|
||||
}
|
||||
} else {
|
||||
@ -148,5 +179,5 @@ pub fn resolve_and_get_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
|
||||
substs: &'tcx Substs<'tcx>)
|
||||
-> ValueRef
|
||||
{
|
||||
get_fn(ccx, monomorphize::resolve(ccx.shared(), def_id, substs))
|
||||
get_fn(ccx, monomorphize::resolve(ccx.tcx(), def_id, substs))
|
||||
}
|
||||
|
@ -202,15 +202,13 @@ use rustc::ty::adjustment::CustomCoerceUnsized;
|
||||
use rustc::mir::{self, Location};
|
||||
use rustc::mir::visit::Visitor as MirVisitor;
|
||||
|
||||
use context::SharedCrateContext;
|
||||
use common::{def_ty, instance_ty};
|
||||
use common::{def_ty, instance_ty, type_is_sized};
|
||||
use monomorphize::{self, Instance};
|
||||
use rustc::util::nodemap::{FxHashSet, FxHashMap, DefIdMap};
|
||||
|
||||
use trans_item::{TransItem, DefPathBasedNames, InstantiationMode};
|
||||
use trans_item::{TransItem, TransItemExt, DefPathBasedNames, InstantiationMode};
|
||||
|
||||
use rustc_data_structures::bitvec::BitVector;
|
||||
use back::symbol_export::ExportedSymbols;
|
||||
|
||||
#[derive(PartialEq, Eq, Hash, Clone, Copy, Debug)]
|
||||
pub enum TransItemCollectionMode {
|
||||
@ -294,15 +292,14 @@ impl<'tcx> InliningMap<'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn collect_crate_translation_items<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
|
||||
exported_symbols: &ExportedSymbols,
|
||||
pub fn collect_crate_translation_items<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
mode: TransItemCollectionMode)
|
||||
-> (FxHashSet<TransItem<'tcx>>,
|
||||
InliningMap<'tcx>) {
|
||||
// We are not tracking dependencies of this pass as it has to be re-executed
|
||||
// every time no matter what.
|
||||
scx.tcx().dep_graph.with_ignore(|| {
|
||||
let roots = collect_roots(scx, exported_symbols, mode);
|
||||
tcx.dep_graph.with_ignore(|| {
|
||||
let roots = collect_roots(tcx, mode);
|
||||
|
||||
debug!("Building translation item graph, beginning at roots");
|
||||
let mut visited = FxHashSet();
|
||||
@ -310,7 +307,7 @@ pub fn collect_crate_translation_items<'a, 'tcx>(scx: &SharedCrateContext<'a, 't
|
||||
let mut inlining_map = InliningMap::new();
|
||||
|
||||
for root in roots {
|
||||
collect_items_rec(scx,
|
||||
collect_items_rec(tcx,
|
||||
root,
|
||||
&mut visited,
|
||||
&mut recursion_depths,
|
||||
@ -323,8 +320,7 @@ pub fn collect_crate_translation_items<'a, 'tcx>(scx: &SharedCrateContext<'a, 't
|
||||
|
||||
// Find all non-generic items by walking the HIR. These items serve as roots to
|
||||
// start monomorphizing from.
|
||||
fn collect_roots<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
|
||||
exported_symbols: &ExportedSymbols,
|
||||
fn collect_roots<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
mode: TransItemCollectionMode)
|
||||
-> Vec<TransItem<'tcx>> {
|
||||
debug!("Collecting roots");
|
||||
@ -332,25 +328,24 @@ fn collect_roots<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
|
||||
|
||||
{
|
||||
let mut visitor = RootCollector {
|
||||
scx,
|
||||
tcx,
|
||||
mode,
|
||||
exported_symbols,
|
||||
output: &mut roots,
|
||||
};
|
||||
|
||||
scx.tcx().hir.krate().visit_all_item_likes(&mut visitor);
|
||||
tcx.hir.krate().visit_all_item_likes(&mut visitor);
|
||||
}
|
||||
|
||||
// We can only translate items that are instantiable - items all of
|
||||
// whose predicates hold. Luckily, items that aren't instantiable
|
||||
// can't actually be used, so we can just skip translating them.
|
||||
roots.retain(|root| root.is_instantiable(scx.tcx()));
|
||||
roots.retain(|root| root.is_instantiable(tcx));
|
||||
|
||||
roots
|
||||
}
|
||||
|
||||
// Collect all monomorphized translation items reachable from `starting_point`
|
||||
fn collect_items_rec<'a, 'tcx: 'a>(scx: &SharedCrateContext<'a, 'tcx>,
|
||||
fn collect_items_rec<'a, 'tcx: 'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
starting_point: TransItem<'tcx>,
|
||||
visited: &mut FxHashSet<TransItem<'tcx>>,
|
||||
recursion_depths: &mut DefIdMap<usize>,
|
||||
@ -359,54 +354,54 @@ fn collect_items_rec<'a, 'tcx: 'a>(scx: &SharedCrateContext<'a, 'tcx>,
|
||||
// We've been here already, no need to search again.
|
||||
return;
|
||||
}
|
||||
debug!("BEGIN collect_items_rec({})", starting_point.to_string(scx.tcx()));
|
||||
debug!("BEGIN collect_items_rec({})", starting_point.to_string(tcx));
|
||||
|
||||
let mut neighbors = Vec::new();
|
||||
let recursion_depth_reset;
|
||||
|
||||
match starting_point {
|
||||
TransItem::Static(node_id) => {
|
||||
let def_id = scx.tcx().hir.local_def_id(node_id);
|
||||
let instance = Instance::mono(scx.tcx(), def_id);
|
||||
let def_id = tcx.hir.local_def_id(node_id);
|
||||
let instance = Instance::mono(tcx, def_id);
|
||||
|
||||
// Sanity check whether this ended up being collected accidentally
|
||||
debug_assert!(should_trans_locally(scx.tcx(), &instance));
|
||||
debug_assert!(should_trans_locally(tcx, &instance));
|
||||
|
||||
let ty = instance_ty(scx, &instance);
|
||||
visit_drop_use(scx, ty, true, &mut neighbors);
|
||||
let ty = instance_ty(tcx, &instance);
|
||||
visit_drop_use(tcx, ty, true, &mut neighbors);
|
||||
|
||||
recursion_depth_reset = None;
|
||||
|
||||
collect_neighbours(scx, instance, true, &mut neighbors);
|
||||
collect_neighbours(tcx, instance, true, &mut neighbors);
|
||||
}
|
||||
TransItem::Fn(instance) => {
|
||||
// Sanity check whether this ended up being collected accidentally
|
||||
debug_assert!(should_trans_locally(scx.tcx(), &instance));
|
||||
debug_assert!(should_trans_locally(tcx, &instance));
|
||||
|
||||
// Keep track of the monomorphization recursion depth
|
||||
recursion_depth_reset = Some(check_recursion_limit(scx.tcx(),
|
||||
recursion_depth_reset = Some(check_recursion_limit(tcx,
|
||||
instance,
|
||||
recursion_depths));
|
||||
check_type_length_limit(scx.tcx(), instance);
|
||||
check_type_length_limit(tcx, instance);
|
||||
|
||||
collect_neighbours(scx, instance, false, &mut neighbors);
|
||||
collect_neighbours(tcx, instance, false, &mut neighbors);
|
||||
}
|
||||
TransItem::GlobalAsm(..) => {
|
||||
recursion_depth_reset = None;
|
||||
}
|
||||
}
|
||||
|
||||
record_accesses(scx.tcx(), starting_point, &neighbors[..], inlining_map);
|
||||
record_accesses(tcx, starting_point, &neighbors[..], inlining_map);
|
||||
|
||||
for neighbour in neighbors {
|
||||
collect_items_rec(scx, neighbour, visited, recursion_depths, inlining_map);
|
||||
collect_items_rec(tcx, neighbour, visited, recursion_depths, inlining_map);
|
||||
}
|
||||
|
||||
if let Some((def_id, depth)) = recursion_depth_reset {
|
||||
recursion_depths.insert(def_id, depth);
|
||||
}
|
||||
|
||||
debug!("END collect_items_rec({})", starting_point.to_string(scx.tcx()));
|
||||
debug!("END collect_items_rec({})", starting_point.to_string(tcx));
|
||||
}
|
||||
|
||||
fn record_accesses<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
@ -494,7 +489,7 @@ fn check_type_length_limit<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
}
|
||||
|
||||
struct MirNeighborCollector<'a, 'tcx: 'a> {
|
||||
scx: &'a SharedCrateContext<'a, 'tcx>,
|
||||
tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
mir: &'a mir::Mir<'tcx>,
|
||||
output: &'a mut Vec<TransItem<'tcx>>,
|
||||
param_substs: &'tcx Substs<'tcx>,
|
||||
@ -511,49 +506,49 @@ impl<'a, 'tcx> MirVisitor<'tcx> for MirNeighborCollector<'a, 'tcx> {
|
||||
// have to instantiate all methods of the trait being cast to, so we
|
||||
// can build the appropriate vtable.
|
||||
mir::Rvalue::Cast(mir::CastKind::Unsize, ref operand, target_ty) => {
|
||||
let target_ty = self.scx.tcx().trans_apply_param_substs(self.param_substs,
|
||||
&target_ty);
|
||||
let source_ty = operand.ty(self.mir, self.scx.tcx());
|
||||
let source_ty = self.scx.tcx().trans_apply_param_substs(self.param_substs,
|
||||
&source_ty);
|
||||
let (source_ty, target_ty) = find_vtable_types_for_unsizing(self.scx,
|
||||
let target_ty = self.tcx.trans_apply_param_substs(self.param_substs,
|
||||
&target_ty);
|
||||
let source_ty = operand.ty(self.mir, self.tcx);
|
||||
let source_ty = self.tcx.trans_apply_param_substs(self.param_substs,
|
||||
&source_ty);
|
||||
let (source_ty, target_ty) = find_vtable_types_for_unsizing(self.tcx,
|
||||
source_ty,
|
||||
target_ty);
|
||||
// This could also be a different Unsize instruction, like
|
||||
// from a fixed sized array to a slice. But we are only
|
||||
// interested in things that produce a vtable.
|
||||
if target_ty.is_trait() && !source_ty.is_trait() {
|
||||
create_trans_items_for_vtable_methods(self.scx,
|
||||
create_trans_items_for_vtable_methods(self.tcx,
|
||||
target_ty,
|
||||
source_ty,
|
||||
self.output);
|
||||
}
|
||||
}
|
||||
mir::Rvalue::Cast(mir::CastKind::ReifyFnPointer, ref operand, _) => {
|
||||
let fn_ty = operand.ty(self.mir, self.scx.tcx());
|
||||
let fn_ty = self.scx.tcx().trans_apply_param_substs(self.param_substs,
|
||||
&fn_ty);
|
||||
visit_fn_use(self.scx, fn_ty, false, &mut self.output);
|
||||
let fn_ty = operand.ty(self.mir, self.tcx);
|
||||
let fn_ty = self.tcx.trans_apply_param_substs(self.param_substs,
|
||||
&fn_ty);
|
||||
visit_fn_use(self.tcx, fn_ty, false, &mut self.output);
|
||||
}
|
||||
mir::Rvalue::Cast(mir::CastKind::ClosureFnPointer, ref operand, _) => {
|
||||
let source_ty = operand.ty(self.mir, self.scx.tcx());
|
||||
let source_ty = self.scx.tcx().trans_apply_param_substs(self.param_substs,
|
||||
&source_ty);
|
||||
let source_ty = operand.ty(self.mir, self.tcx);
|
||||
let source_ty = self.tcx.trans_apply_param_substs(self.param_substs,
|
||||
&source_ty);
|
||||
match source_ty.sty {
|
||||
ty::TyClosure(def_id, substs) => {
|
||||
let instance = monomorphize::resolve_closure(
|
||||
self.scx, def_id, substs, ty::ClosureKind::FnOnce);
|
||||
self.tcx, def_id, substs, ty::ClosureKind::FnOnce);
|
||||
self.output.push(create_fn_trans_item(instance));
|
||||
}
|
||||
_ => bug!(),
|
||||
}
|
||||
}
|
||||
mir::Rvalue::NullaryOp(mir::NullOp::Box, _) => {
|
||||
let tcx = self.scx.tcx();
|
||||
let tcx = self.tcx;
|
||||
let exchange_malloc_fn_def_id = tcx
|
||||
.lang_items()
|
||||
.require(ExchangeMallocFnLangItem)
|
||||
.unwrap_or_else(|e| self.scx.sess().fatal(&e));
|
||||
.unwrap_or_else(|e| tcx.sess.fatal(&e));
|
||||
let instance = Instance::mono(tcx, exchange_malloc_fn_def_id);
|
||||
if should_trans_locally(tcx, &instance) {
|
||||
self.output.push(create_fn_trans_item(instance));
|
||||
@ -569,10 +564,10 @@ impl<'a, 'tcx> MirVisitor<'tcx> for MirNeighborCollector<'a, 'tcx> {
|
||||
debug!("visiting const {:?} @ {:?}", *constant, location);
|
||||
|
||||
if let ConstVal::Unevaluated(def_id, substs) = constant.val {
|
||||
let substs = self.scx.tcx().trans_apply_param_substs(self.param_substs,
|
||||
&substs);
|
||||
let instance = monomorphize::resolve(self.scx, def_id, substs);
|
||||
collect_neighbours(self.scx, instance, true, self.output);
|
||||
let substs = self.tcx.trans_apply_param_substs(self.param_substs,
|
||||
&substs);
|
||||
let instance = monomorphize::resolve(self.tcx, def_id, substs);
|
||||
collect_neighbours(self.tcx, instance, true, self.output);
|
||||
}
|
||||
|
||||
self.super_const(constant);
|
||||
@ -584,15 +579,15 @@ impl<'a, 'tcx> MirVisitor<'tcx> for MirNeighborCollector<'a, 'tcx> {
|
||||
location: Location) {
|
||||
debug!("visiting terminator {:?} @ {:?}", kind, location);
|
||||
|
||||
let tcx = self.scx.tcx();
|
||||
let tcx = self.tcx;
|
||||
match *kind {
|
||||
mir::TerminatorKind::Call { ref func, .. } => {
|
||||
let callee_ty = func.ty(self.mir, tcx);
|
||||
let callee_ty = tcx.trans_apply_param_substs(self.param_substs, &callee_ty);
|
||||
|
||||
let constness = match (self.const_context, &callee_ty.sty) {
|
||||
(true, &ty::TyFnDef(def_id, substs)) if self.scx.tcx().is_const_fn(def_id) => {
|
||||
let instance = monomorphize::resolve(self.scx, def_id, substs);
|
||||
(true, &ty::TyFnDef(def_id, substs)) if self.tcx.is_const_fn(def_id) => {
|
||||
let instance = monomorphize::resolve(self.tcx, def_id, substs);
|
||||
Some(instance)
|
||||
}
|
||||
_ => None
|
||||
@ -602,20 +597,20 @@ impl<'a, 'tcx> MirVisitor<'tcx> for MirNeighborCollector<'a, 'tcx> {
|
||||
// If this is a const fn, called from a const context, we
|
||||
// have to visit its body in order to find any fn reifications
|
||||
// it might contain.
|
||||
collect_neighbours(self.scx,
|
||||
collect_neighbours(self.tcx,
|
||||
const_fn_instance,
|
||||
true,
|
||||
self.output);
|
||||
} else {
|
||||
visit_fn_use(self.scx, callee_ty, true, &mut self.output);
|
||||
visit_fn_use(self.tcx, callee_ty, true, &mut self.output);
|
||||
}
|
||||
}
|
||||
mir::TerminatorKind::Drop { ref location, .. } |
|
||||
mir::TerminatorKind::DropAndReplace { ref location, .. } => {
|
||||
let ty = location.ty(self.mir, self.scx.tcx())
|
||||
.to_ty(self.scx.tcx());
|
||||
let ty = location.ty(self.mir, self.tcx)
|
||||
.to_ty(self.tcx);
|
||||
let ty = tcx.trans_apply_param_substs(self.param_substs, &ty);
|
||||
visit_drop_use(self.scx, ty, true, self.output);
|
||||
visit_drop_use(self.tcx, ty, true, self.output);
|
||||
}
|
||||
mir::TerminatorKind::Goto { .. } |
|
||||
mir::TerminatorKind::SwitchInt { .. } |
|
||||
@ -636,7 +631,7 @@ impl<'a, 'tcx> MirVisitor<'tcx> for MirNeighborCollector<'a, 'tcx> {
|
||||
location: Location) {
|
||||
debug!("visiting static {:?} @ {:?}", static_.def_id, location);
|
||||
|
||||
let tcx = self.scx.tcx();
|
||||
let tcx = self.tcx;
|
||||
let instance = Instance::mono(tcx, static_.def_id);
|
||||
if should_trans_locally(tcx, &instance) {
|
||||
let node_id = tcx.hir.as_local_node_id(static_.def_id).unwrap();
|
||||
@ -647,33 +642,33 @@ impl<'a, 'tcx> MirVisitor<'tcx> for MirNeighborCollector<'a, 'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_drop_use<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
|
||||
fn visit_drop_use<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
ty: Ty<'tcx>,
|
||||
is_direct_call: bool,
|
||||
output: &mut Vec<TransItem<'tcx>>)
|
||||
{
|
||||
let instance = monomorphize::resolve_drop_in_place(scx, ty);
|
||||
visit_instance_use(scx, instance, is_direct_call, output);
|
||||
let instance = monomorphize::resolve_drop_in_place(tcx, ty);
|
||||
visit_instance_use(tcx, instance, is_direct_call, output);
|
||||
}
|
||||
|
||||
fn visit_fn_use<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
|
||||
fn visit_fn_use<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
ty: Ty<'tcx>,
|
||||
is_direct_call: bool,
|
||||
output: &mut Vec<TransItem<'tcx>>)
|
||||
{
|
||||
if let ty::TyFnDef(def_id, substs) = ty.sty {
|
||||
let instance = monomorphize::resolve(scx, def_id, substs);
|
||||
visit_instance_use(scx, instance, is_direct_call, output);
|
||||
let instance = monomorphize::resolve(tcx, def_id, substs);
|
||||
visit_instance_use(tcx, instance, is_direct_call, output);
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_instance_use<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
|
||||
fn visit_instance_use<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
instance: ty::Instance<'tcx>,
|
||||
is_direct_call: bool,
|
||||
output: &mut Vec<TransItem<'tcx>>)
|
||||
{
|
||||
debug!("visit_item_use({:?}, is_direct_call={:?})", instance, is_direct_call);
|
||||
if !should_trans_locally(scx.tcx(), &instance) {
|
||||
if !should_trans_locally(tcx, &instance) {
|
||||
return
|
||||
}
|
||||
|
||||
@ -775,15 +770,15 @@ fn should_trans_locally<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, instance: &Instan
|
||||
///
|
||||
/// Finally, there is also the case of custom unsizing coercions, e.g. for
|
||||
/// smart pointers such as `Rc` and `Arc`.
|
||||
fn find_vtable_types_for_unsizing<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
|
||||
fn find_vtable_types_for_unsizing<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
source_ty: Ty<'tcx>,
|
||||
target_ty: Ty<'tcx>)
|
||||
-> (Ty<'tcx>, Ty<'tcx>) {
|
||||
let ptr_vtable = |inner_source: Ty<'tcx>, inner_target: Ty<'tcx>| {
|
||||
if !scx.type_is_sized(inner_source) {
|
||||
if !type_is_sized(tcx, inner_source) {
|
||||
(inner_source, inner_target)
|
||||
} else {
|
||||
scx.tcx().struct_lockstep_tails(inner_source, inner_target)
|
||||
tcx.struct_lockstep_tails(inner_source, inner_target)
|
||||
}
|
||||
};
|
||||
match (&source_ty.sty, &target_ty.sty) {
|
||||
@ -804,7 +799,7 @@ fn find_vtable_types_for_unsizing<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
|
||||
assert_eq!(source_adt_def, target_adt_def);
|
||||
|
||||
let kind =
|
||||
monomorphize::custom_coerce_unsize_info(scx, source_ty, target_ty);
|
||||
monomorphize::custom_coerce_unsize_info(tcx, source_ty, target_ty);
|
||||
|
||||
let coerce_index = match kind {
|
||||
CustomCoerceUnsized::Struct(i) => i
|
||||
@ -816,10 +811,10 @@ fn find_vtable_types_for_unsizing<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
|
||||
assert!(coerce_index < source_fields.len() &&
|
||||
source_fields.len() == target_fields.len());
|
||||
|
||||
find_vtable_types_for_unsizing(scx,
|
||||
source_fields[coerce_index].ty(scx.tcx(),
|
||||
find_vtable_types_for_unsizing(tcx,
|
||||
source_fields[coerce_index].ty(tcx,
|
||||
source_substs),
|
||||
target_fields[coerce_index].ty(scx.tcx(),
|
||||
target_fields[coerce_index].ty(tcx,
|
||||
target_substs))
|
||||
}
|
||||
_ => bug!("find_vtable_types_for_unsizing: invalid coercion {:?} -> {:?}",
|
||||
@ -835,7 +830,7 @@ fn create_fn_trans_item<'a, 'tcx>(instance: Instance<'tcx>) -> TransItem<'tcx> {
|
||||
|
||||
/// Creates a `TransItem` for each method that is referenced by the vtable for
|
||||
/// the given trait/impl pair.
|
||||
fn create_trans_items_for_vtable_methods<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
|
||||
fn create_trans_items_for_vtable_methods<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
trait_ty: Ty<'tcx>,
|
||||
impl_ty: Ty<'tcx>,
|
||||
output: &mut Vec<TransItem<'tcx>>) {
|
||||
@ -844,19 +839,19 @@ fn create_trans_items_for_vtable_methods<'a, 'tcx>(scx: &SharedCrateContext<'a,
|
||||
|
||||
if let ty::TyDynamic(ref trait_ty, ..) = trait_ty.sty {
|
||||
if let Some(principal) = trait_ty.principal() {
|
||||
let poly_trait_ref = principal.with_self_ty(scx.tcx(), impl_ty);
|
||||
let poly_trait_ref = principal.with_self_ty(tcx, impl_ty);
|
||||
assert!(!poly_trait_ref.has_escaping_regions());
|
||||
|
||||
// Walk all methods of the trait, including those of its supertraits
|
||||
let methods = traits::get_vtable_methods(scx.tcx(), poly_trait_ref);
|
||||
let methods = traits::get_vtable_methods(tcx, poly_trait_ref);
|
||||
let methods = methods.filter_map(|method| method)
|
||||
.map(|(def_id, substs)| monomorphize::resolve(scx, def_id, substs))
|
||||
.filter(|&instance| should_trans_locally(scx.tcx(), &instance))
|
||||
.map(|(def_id, substs)| monomorphize::resolve(tcx, def_id, substs))
|
||||
.filter(|&instance| should_trans_locally(tcx, &instance))
|
||||
.map(|instance| create_fn_trans_item(instance));
|
||||
output.extend(methods);
|
||||
}
|
||||
// Also add the destructor
|
||||
visit_drop_use(scx, impl_ty, false, output);
|
||||
visit_drop_use(tcx, impl_ty, false, output);
|
||||
}
|
||||
}
|
||||
|
||||
@ -865,8 +860,7 @@ fn create_trans_items_for_vtable_methods<'a, 'tcx>(scx: &SharedCrateContext<'a,
|
||||
//=-----------------------------------------------------------------------------
|
||||
|
||||
struct RootCollector<'b, 'a: 'b, 'tcx: 'a + 'b> {
|
||||
scx: &'b SharedCrateContext<'a, 'tcx>,
|
||||
exported_symbols: &'b ExportedSymbols,
|
||||
tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
mode: TransItemCollectionMode,
|
||||
output: &'b mut Vec<TransItem<'tcx>>,
|
||||
}
|
||||
@ -886,7 +880,7 @@ impl<'b, 'a, 'v> ItemLikeVisitor<'v> for RootCollector<'b, 'a, 'v> {
|
||||
|
||||
hir::ItemImpl(..) => {
|
||||
if self.mode == TransItemCollectionMode::Eager {
|
||||
create_trans_items_for_default_impls(self.scx,
|
||||
create_trans_items_for_default_impls(self.tcx,
|
||||
item,
|
||||
self.output);
|
||||
}
|
||||
@ -897,25 +891,25 @@ impl<'b, 'a, 'v> ItemLikeVisitor<'v> for RootCollector<'b, 'a, 'v> {
|
||||
hir::ItemUnion(_, ref generics) => {
|
||||
if !generics.is_parameterized() {
|
||||
if self.mode == TransItemCollectionMode::Eager {
|
||||
let def_id = self.scx.tcx().hir.local_def_id(item.id);
|
||||
let def_id = self.tcx.hir.local_def_id(item.id);
|
||||
debug!("RootCollector: ADT drop-glue for {}",
|
||||
def_id_to_string(self.scx.tcx(), def_id));
|
||||
def_id_to_string(self.tcx, def_id));
|
||||
|
||||
let ty = def_ty(self.scx, def_id, Substs::empty());
|
||||
visit_drop_use(self.scx, ty, true, self.output);
|
||||
let ty = def_ty(self.tcx, def_id, Substs::empty());
|
||||
visit_drop_use(self.tcx, ty, true, self.output);
|
||||
}
|
||||
}
|
||||
}
|
||||
hir::ItemGlobalAsm(..) => {
|
||||
debug!("RootCollector: ItemGlobalAsm({})",
|
||||
def_id_to_string(self.scx.tcx(),
|
||||
self.scx.tcx().hir.local_def_id(item.id)));
|
||||
def_id_to_string(self.tcx,
|
||||
self.tcx.hir.local_def_id(item.id)));
|
||||
self.output.push(TransItem::GlobalAsm(item.id));
|
||||
}
|
||||
hir::ItemStatic(..) => {
|
||||
debug!("RootCollector: ItemStatic({})",
|
||||
def_id_to_string(self.scx.tcx(),
|
||||
self.scx.tcx().hir.local_def_id(item.id)));
|
||||
def_id_to_string(self.tcx,
|
||||
self.tcx.hir.local_def_id(item.id)));
|
||||
self.output.push(TransItem::Static(item.id));
|
||||
}
|
||||
hir::ItemConst(..) => {
|
||||
@ -923,12 +917,11 @@ impl<'b, 'a, 'v> ItemLikeVisitor<'v> for RootCollector<'b, 'a, 'v> {
|
||||
// actually used somewhere. Just declaring them is insufficient.
|
||||
}
|
||||
hir::ItemFn(..) => {
|
||||
let tcx = self.scx.tcx();
|
||||
let tcx = self.tcx;
|
||||
let def_id = tcx.hir.local_def_id(item.id);
|
||||
|
||||
if (self.mode == TransItemCollectionMode::Eager ||
|
||||
!tcx.is_const_fn(def_id) ||
|
||||
self.exported_symbols.local_exports().contains(&item.id)) &&
|
||||
!tcx.is_const_fn(def_id) || tcx.is_exported_symbol(def_id)) &&
|
||||
!item_has_type_parameters(tcx, def_id) {
|
||||
|
||||
debug!("RootCollector: ItemFn({})",
|
||||
@ -949,12 +942,12 @@ impl<'b, 'a, 'v> ItemLikeVisitor<'v> for RootCollector<'b, 'a, 'v> {
|
||||
fn visit_impl_item(&mut self, ii: &'v hir::ImplItem) {
|
||||
match ii.node {
|
||||
hir::ImplItemKind::Method(hir::MethodSig { .. }, _) => {
|
||||
let tcx = self.scx.tcx();
|
||||
let tcx = self.tcx;
|
||||
let def_id = tcx.hir.local_def_id(ii.id);
|
||||
|
||||
if (self.mode == TransItemCollectionMode::Eager ||
|
||||
!tcx.is_const_fn(def_id) ||
|
||||
self.exported_symbols.local_exports().contains(&ii.id)) &&
|
||||
tcx.is_exported_symbol(def_id)) &&
|
||||
!item_has_type_parameters(tcx, def_id) {
|
||||
debug!("RootCollector: MethodImplItem({})",
|
||||
def_id_to_string(tcx, def_id));
|
||||
@ -973,10 +966,9 @@ fn item_has_type_parameters<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId
|
||||
generics.parent_types as usize + generics.types.len() > 0
|
||||
}
|
||||
|
||||
fn create_trans_items_for_default_impls<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
|
||||
fn create_trans_items_for_default_impls<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
item: &'tcx hir::Item,
|
||||
output: &mut Vec<TransItem<'tcx>>) {
|
||||
let tcx = scx.tcx();
|
||||
match item.node {
|
||||
hir::ItemImpl(_,
|
||||
_,
|
||||
@ -1009,7 +1001,7 @@ fn create_trans_items_for_default_impls<'a, 'tcx>(scx: &SharedCrateContext<'a, '
|
||||
}
|
||||
|
||||
let instance =
|
||||
monomorphize::resolve(scx, method.def_id, callee_substs);
|
||||
monomorphize::resolve(tcx, method.def_id, callee_substs);
|
||||
|
||||
let trans_item = create_fn_trans_item(instance);
|
||||
if trans_item.is_instantiable(tcx) && should_trans_locally(tcx, &instance) {
|
||||
@ -1025,15 +1017,15 @@ fn create_trans_items_for_default_impls<'a, 'tcx>(scx: &SharedCrateContext<'a, '
|
||||
}
|
||||
|
||||
/// Scan the MIR in order to find function calls, closures, and drop-glue
|
||||
fn collect_neighbours<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
|
||||
fn collect_neighbours<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
instance: Instance<'tcx>,
|
||||
const_context: bool,
|
||||
output: &mut Vec<TransItem<'tcx>>)
|
||||
{
|
||||
let mir = scx.tcx().instance_mir(instance.def);
|
||||
let mir = tcx.instance_mir(instance.def);
|
||||
|
||||
let mut visitor = MirNeighborCollector {
|
||||
scx,
|
||||
tcx,
|
||||
mir: &mir,
|
||||
output,
|
||||
param_substs: instance.substs,
|
||||
|
@ -26,6 +26,7 @@ use machine;
|
||||
use monomorphize;
|
||||
use type_::Type;
|
||||
use value::Value;
|
||||
use rustc::traits;
|
||||
use rustc::ty::{self, Ty, TyCtxt};
|
||||
use rustc::ty::layout::{Layout, LayoutTyper};
|
||||
use rustc::ty::subst::{Kind, Subst, Substs};
|
||||
@ -37,7 +38,7 @@ use std::iter;
|
||||
use syntax::abi::Abi;
|
||||
use syntax::attr;
|
||||
use syntax::symbol::InternedString;
|
||||
use syntax_pos::Span;
|
||||
use syntax_pos::{Span, DUMMY_SP};
|
||||
|
||||
pub use context::{CrateContext, SharedCrateContext};
|
||||
|
||||
@ -140,6 +141,18 @@ pub fn type_is_zero_size<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, ty: Ty<'tcx>) -
|
||||
!layout.is_unsized() && layout.size(ccx).bytes() == 0
|
||||
}
|
||||
|
||||
pub fn type_needs_drop<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, ty: Ty<'tcx>) -> bool {
|
||||
ty.needs_drop(tcx, ty::ParamEnv::empty(traits::Reveal::All))
|
||||
}
|
||||
|
||||
pub fn type_is_sized<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, ty: Ty<'tcx>) -> bool {
|
||||
ty.is_sized(tcx, ty::ParamEnv::empty(traits::Reveal::All), DUMMY_SP)
|
||||
}
|
||||
|
||||
pub fn type_is_freeze<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, ty: Ty<'tcx>) -> bool {
|
||||
ty.is_freeze(tcx, ty::ParamEnv::empty(traits::Reveal::All), DUMMY_SP)
|
||||
}
|
||||
|
||||
/*
|
||||
* A note on nomenclature of linking: "extern", "foreign", and "upcall".
|
||||
*
|
||||
@ -573,20 +586,20 @@ pub fn is_inline_instance<'a, 'tcx>(
|
||||
}
|
||||
|
||||
/// Given a DefId and some Substs, produces the monomorphic item type.
|
||||
pub fn def_ty<'a, 'tcx>(shared: &SharedCrateContext<'a, 'tcx>,
|
||||
pub fn def_ty<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
def_id: DefId,
|
||||
substs: &'tcx Substs<'tcx>)
|
||||
-> Ty<'tcx>
|
||||
{
|
||||
let ty = shared.tcx().type_of(def_id);
|
||||
shared.tcx().trans_apply_param_substs(substs, &ty)
|
||||
let ty = tcx.type_of(def_id);
|
||||
tcx.trans_apply_param_substs(substs, &ty)
|
||||
}
|
||||
|
||||
/// Return the substituted type of an instance.
|
||||
pub fn instance_ty<'a, 'tcx>(shared: &SharedCrateContext<'a, 'tcx>,
|
||||
pub fn instance_ty<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
instance: &ty::Instance<'tcx>)
|
||||
-> Ty<'tcx>
|
||||
{
|
||||
let ty = instance.def.def_ty(shared.tcx());
|
||||
shared.tcx().trans_apply_param_substs(instance.substs, &ty)
|
||||
let ty = instance.def.def_ty(tcx);
|
||||
tcx.trans_apply_param_substs(instance.substs, &ty)
|
||||
}
|
||||
|
@ -16,7 +16,7 @@ use rustc::hir::map as hir_map;
|
||||
use rustc::middle::const_val::ConstEvalErr;
|
||||
use {debuginfo, machine};
|
||||
use base;
|
||||
use trans_item::TransItem;
|
||||
use trans_item::{TransItem, TransItemExt};
|
||||
use common::{self, CrateContext, val_ty};
|
||||
use declare;
|
||||
use monomorphize::Instance;
|
||||
@ -109,7 +109,7 @@ pub fn get_static(ccx: &CrateContext, def_id: DefId) -> ValueRef {
|
||||
return g;
|
||||
}
|
||||
|
||||
let ty = common::instance_ty(ccx.shared(), &instance);
|
||||
let ty = common::instance_ty(ccx.tcx(), &instance);
|
||||
let g = if let Some(id) = ccx.tcx().hir.as_local_node_id(def_id) {
|
||||
|
||||
let llty = type_of::type_of(ccx, ty);
|
||||
@ -130,7 +130,7 @@ pub fn get_static(ccx: &CrateContext, def_id: DefId) -> ValueRef {
|
||||
|
||||
let g = declare::define_global(ccx, &sym[..], llty).unwrap();
|
||||
|
||||
if !ccx.exported_symbols().local_exports().contains(&id) {
|
||||
if !ccx.tcx().is_exported_symbol(def_id) {
|
||||
unsafe {
|
||||
llvm::LLVMRustSetVisibility(g, llvm::Visibility::Hidden);
|
||||
}
|
||||
@ -150,7 +150,7 @@ pub fn get_static(ccx: &CrateContext, def_id: DefId) -> ValueRef {
|
||||
// extern "C" fn() from being non-null, so we can't just declare a
|
||||
// static and call it a day. Some linkages (like weak) will make it such
|
||||
// that the static actually has a null value.
|
||||
let linkage = match base::llvm_linkage_by_name(&name.as_str()) {
|
||||
let linkage = match base::linkage_by_name(&name.as_str()) {
|
||||
Some(linkage) => linkage,
|
||||
None => {
|
||||
ccx.sess().span_fatal(span, "invalid linkage specified");
|
||||
@ -165,7 +165,7 @@ pub fn get_static(ccx: &CrateContext, def_id: DefId) -> ValueRef {
|
||||
unsafe {
|
||||
// Declare a symbol `foo` with the desired linkage.
|
||||
let g1 = declare::declare_global(ccx, &sym, llty2);
|
||||
llvm::LLVMRustSetLinkage(g1, linkage);
|
||||
llvm::LLVMRustSetLinkage(g1, base::linkage_to_llvm(linkage));
|
||||
|
||||
// Declare an internal global `extern_with_linkage_foo` which
|
||||
// is initialized with the address of `foo`. If `foo` is
|
||||
@ -269,7 +269,7 @@ pub fn trans_static<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
|
||||
};
|
||||
|
||||
let instance = Instance::mono(ccx.tcx(), def_id);
|
||||
let ty = common::instance_ty(ccx.shared(), &instance);
|
||||
let ty = common::instance_ty(ccx.tcx(), &instance);
|
||||
let llty = type_of::type_of(ccx, ty);
|
||||
let g = if val_llty == llty {
|
||||
g
|
||||
|
@ -8,6 +8,7 @@
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use common;
|
||||
use llvm;
|
||||
use llvm::{ContextRef, ModuleRef, ValueRef};
|
||||
use rustc::dep_graph::{DepGraph, DepGraphSafe};
|
||||
@ -16,20 +17,19 @@ use rustc::hir::def_id::DefId;
|
||||
use rustc::traits;
|
||||
use debuginfo;
|
||||
use callee;
|
||||
use back::symbol_export::ExportedSymbols;
|
||||
use base;
|
||||
use declare;
|
||||
use monomorphize::Instance;
|
||||
|
||||
use partitioning::CodegenUnit;
|
||||
use trans_item::TransItem;
|
||||
use type_::Type;
|
||||
use rustc_data_structures::base_n;
|
||||
use rustc::session::config::{self, NoDebugInfo, OutputFilenames};
|
||||
use rustc::middle::trans::Stats;
|
||||
use rustc::session::Session;
|
||||
use rustc::ty::{self, Ty, TyCtxt};
|
||||
use rustc::session::config::{self, NoDebugInfo};
|
||||
use rustc::ty::layout::{LayoutCx, LayoutError, LayoutTyper, TyLayout};
|
||||
use rustc::util::nodemap::{FxHashMap, FxHashSet};
|
||||
use rustc::ty::{self, Ty, TyCtxt};
|
||||
use rustc::util::nodemap::FxHashMap;
|
||||
|
||||
use std::ffi::{CStr, CString};
|
||||
use std::cell::{Cell, RefCell};
|
||||
@ -39,39 +39,8 @@ use std::str;
|
||||
use std::sync::Arc;
|
||||
use std::marker::PhantomData;
|
||||
use syntax::symbol::InternedString;
|
||||
use syntax_pos::DUMMY_SP;
|
||||
use abi::Abi;
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
pub struct Stats {
|
||||
pub n_glues_created: Cell<usize>,
|
||||
pub n_null_glues: Cell<usize>,
|
||||
pub n_real_glues: Cell<usize>,
|
||||
pub n_fns: Cell<usize>,
|
||||
pub n_inlines: Cell<usize>,
|
||||
pub n_closures: Cell<usize>,
|
||||
pub n_llvm_insns: Cell<usize>,
|
||||
pub llvm_insns: RefCell<FxHashMap<String, usize>>,
|
||||
// (ident, llvm-instructions)
|
||||
pub fn_stats: RefCell<Vec<(String, usize)> >,
|
||||
}
|
||||
|
||||
impl Stats {
|
||||
pub fn extend(&mut self, stats: Stats) {
|
||||
self.n_glues_created.set(self.n_glues_created.get() + stats.n_glues_created.get());
|
||||
self.n_null_glues.set(self.n_null_glues.get() + stats.n_null_glues.get());
|
||||
self.n_real_glues.set(self.n_real_glues.get() + stats.n_real_glues.get());
|
||||
self.n_fns.set(self.n_fns.get() + stats.n_fns.get());
|
||||
self.n_inlines.set(self.n_inlines.get() + stats.n_inlines.get());
|
||||
self.n_closures.set(self.n_closures.get() + stats.n_closures.get());
|
||||
self.n_llvm_insns.set(self.n_llvm_insns.get() + stats.n_llvm_insns.get());
|
||||
self.llvm_insns.borrow_mut().extend(
|
||||
stats.llvm_insns.borrow().iter()
|
||||
.map(|(key, value)| (key.clone(), value.clone())));
|
||||
self.fn_stats.borrow_mut().append(&mut *stats.fn_stats.borrow_mut());
|
||||
}
|
||||
}
|
||||
|
||||
/// The shared portion of a `CrateContext`. There is one `SharedCrateContext`
|
||||
/// per crate. The data here is shared between all compilation units of the
|
||||
/// crate, so it must not contain references to any LLVM data structures
|
||||
@ -79,10 +48,7 @@ impl Stats {
|
||||
pub struct SharedCrateContext<'a, 'tcx: 'a> {
|
||||
tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
check_overflow: bool,
|
||||
|
||||
use_dll_storage_attrs: bool,
|
||||
|
||||
output_filenames: &'a OutputFilenames,
|
||||
}
|
||||
|
||||
/// The local portion of a `CrateContext`. There is one `LocalCrateContext`
|
||||
@ -92,14 +58,8 @@ pub struct SharedCrateContext<'a, 'tcx: 'a> {
|
||||
pub struct LocalCrateContext<'a, 'tcx: 'a> {
|
||||
llmod: ModuleRef,
|
||||
llcx: ContextRef,
|
||||
stats: Stats,
|
||||
codegen_unit: CodegenUnit<'tcx>,
|
||||
|
||||
/// The translation items of the whole crate.
|
||||
crate_trans_items: Arc<FxHashSet<TransItem<'tcx>>>,
|
||||
|
||||
/// Information about which symbols are exported from the crate.
|
||||
exported_symbols: Arc<ExportedSymbols>,
|
||||
stats: RefCell<Stats>,
|
||||
codegen_unit: Arc<CodegenUnit<'tcx>>,
|
||||
|
||||
/// Cache instances of monomorphic and polymorphic items
|
||||
instances: RefCell<FxHashMap<Instance<'tcx>, ValueRef>>,
|
||||
@ -261,10 +221,7 @@ pub unsafe fn create_context_and_module(sess: &Session, mod_name: &str) -> (Cont
|
||||
}
|
||||
|
||||
impl<'b, 'tcx> SharedCrateContext<'b, 'tcx> {
|
||||
pub fn new(tcx: TyCtxt<'b, 'tcx, 'tcx>,
|
||||
check_overflow: bool,
|
||||
output_filenames: &'b OutputFilenames)
|
||||
-> SharedCrateContext<'b, 'tcx> {
|
||||
pub fn new(tcx: TyCtxt<'b, 'tcx, 'tcx>) -> SharedCrateContext<'b, 'tcx> {
|
||||
// An interesting part of Windows which MSVC forces our hand on (and
|
||||
// apparently MinGW didn't) is the usage of `dllimport` and `dllexport`
|
||||
// attributes in LLVM IR as well as native dependencies (in C these
|
||||
@ -310,27 +267,28 @@ impl<'b, 'tcx> SharedCrateContext<'b, 'tcx> {
|
||||
// start) and then strongly recommending static linkage on MSVC!
|
||||
let use_dll_storage_attrs = tcx.sess.target.target.options.is_like_msvc;
|
||||
|
||||
let check_overflow = tcx.sess.overflow_checks();
|
||||
|
||||
SharedCrateContext {
|
||||
tcx,
|
||||
check_overflow,
|
||||
use_dll_storage_attrs,
|
||||
output_filenames,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn type_needs_drop(&self, ty: Ty<'tcx>) -> bool {
|
||||
ty.needs_drop(self.tcx, ty::ParamEnv::empty(traits::Reveal::All))
|
||||
common::type_needs_drop(self.tcx, ty)
|
||||
}
|
||||
|
||||
pub fn type_is_sized(&self, ty: Ty<'tcx>) -> bool {
|
||||
ty.is_sized(self.tcx, ty::ParamEnv::empty(traits::Reveal::All), DUMMY_SP)
|
||||
common::type_is_sized(self.tcx, ty)
|
||||
}
|
||||
|
||||
pub fn type_is_freeze(&self, ty: Ty<'tcx>) -> bool {
|
||||
ty.is_freeze(self.tcx, ty::ParamEnv::empty(traits::Reveal::All), DUMMY_SP)
|
||||
common::type_is_freeze(self.tcx, ty)
|
||||
}
|
||||
|
||||
pub fn tcx<'a>(&'a self) -> TyCtxt<'a, 'tcx, 'tcx> {
|
||||
pub fn tcx(&self) -> TyCtxt<'b, 'tcx, 'tcx> {
|
||||
self.tcx
|
||||
}
|
||||
|
||||
@ -345,17 +303,11 @@ impl<'b, 'tcx> SharedCrateContext<'b, 'tcx> {
|
||||
pub fn use_dll_storage_attrs(&self) -> bool {
|
||||
self.use_dll_storage_attrs
|
||||
}
|
||||
|
||||
pub fn output_filenames(&self) -> &OutputFilenames {
|
||||
self.output_filenames
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> LocalCrateContext<'a, 'tcx> {
|
||||
pub fn new(shared: &SharedCrateContext<'a, 'tcx>,
|
||||
codegen_unit: CodegenUnit<'tcx>,
|
||||
crate_trans_items: Arc<FxHashSet<TransItem<'tcx>>>,
|
||||
exported_symbols: Arc<ExportedSymbols>,)
|
||||
codegen_unit: Arc<CodegenUnit<'tcx>>)
|
||||
-> LocalCrateContext<'a, 'tcx> {
|
||||
unsafe {
|
||||
// Append ".rs" to LLVM module identifier.
|
||||
@ -385,10 +337,8 @@ impl<'a, 'tcx> LocalCrateContext<'a, 'tcx> {
|
||||
let local_ccx = LocalCrateContext {
|
||||
llmod,
|
||||
llcx,
|
||||
stats: Stats::default(),
|
||||
stats: RefCell::new(Stats::default()),
|
||||
codegen_unit,
|
||||
crate_trans_items,
|
||||
exported_symbols,
|
||||
instances: RefCell::new(FxHashMap()),
|
||||
vtables: RefCell::new(FxHashMap()),
|
||||
const_cstr_cache: RefCell::new(FxHashMap()),
|
||||
@ -452,7 +402,7 @@ impl<'a, 'tcx> LocalCrateContext<'a, 'tcx> {
|
||||
}
|
||||
|
||||
pub fn into_stats(self) -> Stats {
|
||||
self.stats
|
||||
self.stats.into_inner()
|
||||
}
|
||||
}
|
||||
|
||||
@ -465,7 +415,7 @@ impl<'b, 'tcx> CrateContext<'b, 'tcx> {
|
||||
self.local_ccx
|
||||
}
|
||||
|
||||
pub fn tcx<'a>(&'a self) -> TyCtxt<'a, 'tcx, 'tcx> {
|
||||
pub fn tcx(&self) -> TyCtxt<'b, 'tcx, 'tcx> {
|
||||
self.shared.tcx
|
||||
}
|
||||
|
||||
@ -495,14 +445,6 @@ impl<'b, 'tcx> CrateContext<'b, 'tcx> {
|
||||
&self.local().codegen_unit
|
||||
}
|
||||
|
||||
pub fn crate_trans_items(&self) -> &FxHashSet<TransItem<'tcx>> {
|
||||
&self.local().crate_trans_items
|
||||
}
|
||||
|
||||
pub fn exported_symbols(&self) -> &ExportedSymbols {
|
||||
&self.local().exported_symbols
|
||||
}
|
||||
|
||||
pub fn td(&self) -> llvm::TargetDataRef {
|
||||
unsafe { llvm::LLVMRustGetModuleDataLayout(self.llmod()) }
|
||||
}
|
||||
@ -545,7 +487,7 @@ impl<'b, 'tcx> CrateContext<'b, 'tcx> {
|
||||
&self.local().lltypes
|
||||
}
|
||||
|
||||
pub fn stats<'a>(&'a self) -> &'a Stats {
|
||||
pub fn stats<'a>(&'a self) -> &'a RefCell<Stats> {
|
||||
&self.local().stats
|
||||
}
|
||||
|
||||
|
@ -822,9 +822,9 @@ pub fn compile_unit_metadata(scc: &SharedCrateContext,
|
||||
|
||||
let gcov_cu_info = [
|
||||
path_to_mdstring(debug_context.llcontext,
|
||||
&scc.output_filenames().with_extension("gcno")),
|
||||
&scc.tcx().output_filenames(LOCAL_CRATE).with_extension("gcno")),
|
||||
path_to_mdstring(debug_context.llcontext,
|
||||
&scc.output_filenames().with_extension("gcda")),
|
||||
&scc.tcx().output_filenames(LOCAL_CRATE).with_extension("gcda")),
|
||||
cu_desc_metadata,
|
||||
];
|
||||
let gcov_metadata = llvm::LLVMMDNodeInContext(debug_context.llcontext,
|
||||
@ -1803,7 +1803,7 @@ pub fn create_global_var_metadata(cx: &CrateContext,
|
||||
};
|
||||
|
||||
let is_local_to_unit = is_node_local_to_unit(cx, node_id);
|
||||
let variable_type = common::def_ty(cx.shared(), node_def_id, Substs::empty());
|
||||
let variable_type = common::def_ty(cx.tcx(), node_def_id, Substs::empty());
|
||||
let type_metadata = type_metadata(cx, variable_type, span);
|
||||
let var_name = tcx.item_name(node_def_id).to_string();
|
||||
let linkage_name = mangled_name_of_item(cx, node_def_id, "");
|
||||
|
@ -428,7 +428,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
||||
// If the method does *not* belong to a trait, proceed
|
||||
if cx.tcx().trait_id_of_impl(impl_def_id).is_none() {
|
||||
let impl_self_ty =
|
||||
common::def_ty(cx.shared(), impl_def_id, instance.substs);
|
||||
common::def_ty(cx.tcx(), impl_def_id, instance.substs);
|
||||
|
||||
// Only "class" methods are generally understood by LLVM,
|
||||
// so avoid methods on other types (e.g. `<*mut T>::null`).
|
||||
|
@ -37,7 +37,8 @@ pub fn is_node_local_to_unit(cx: &CrateContext, node_id: ast::NodeId) -> bool
|
||||
// visible). It might better to use the `exported_items` set from
|
||||
// `driver::CrateAnalysis` in the future, but (atm) this set is not
|
||||
// available in the translation pass.
|
||||
!cx.exported_symbols().local_exports().contains(&node_id)
|
||||
let def_id = cx.tcx().hir.local_def_id(node_id);
|
||||
!cx.tcx().is_exported_symbol(def_id)
|
||||
}
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
|
@ -46,4 +46,28 @@ extern "platform-intrinsic" {
|
||||
unsafe { simd_add(i32x1(0), i32x1(1)); } // ok!
|
||||
```
|
||||
"##,
|
||||
|
||||
E0558: r##"
|
||||
The `export_name` attribute was malformed.
|
||||
|
||||
Erroneous code example:
|
||||
|
||||
```ignore (error-emitted-at-codegen-which-cannot-be-handled-by-compile_fail)
|
||||
#[export_name] // error: export_name attribute has invalid format
|
||||
pub fn something() {}
|
||||
|
||||
fn main() {}
|
||||
```
|
||||
|
||||
The `export_name` attribute expects a string in order to determine the name of
|
||||
the exported symbol. Example:
|
||||
|
||||
```
|
||||
#[export_name = "some_function"] // ok!
|
||||
pub fn something() {}
|
||||
|
||||
fn main() {}
|
||||
```
|
||||
"##,
|
||||
|
||||
}
|
||||
|
@ -14,15 +14,15 @@
|
||||
|
||||
use std;
|
||||
|
||||
use llvm;
|
||||
use llvm::{ValueRef};
|
||||
use rustc::ty::{self, Ty};
|
||||
use rustc::ty::layout::LayoutTyper;
|
||||
use builder::Builder;
|
||||
use common::*;
|
||||
use llvm::{ValueRef};
|
||||
use llvm;
|
||||
use meth;
|
||||
use monomorphize;
|
||||
use rustc::ty::layout::LayoutTyper;
|
||||
use rustc::ty::{self, Ty};
|
||||
use value::Value;
|
||||
use builder::Builder;
|
||||
|
||||
pub fn size_and_align_of_dst<'a, 'tcx>(bcx: &Builder<'a, 'tcx>, t: Ty<'tcx>, info: ValueRef)
|
||||
-> (ValueRef, ValueRef) {
|
||||
|
@ -64,7 +64,6 @@ extern crate serialize;
|
||||
extern crate gcc; // Used to locate MSVC, not gcc :)
|
||||
|
||||
pub use base::trans_crate;
|
||||
pub use back::symbol_names::provide;
|
||||
|
||||
pub use metadata::LlvmMetadataLoader;
|
||||
pub use llvm_util::{init, target_features, print_version, print_passes, print, enable_llvm_debug};
|
||||
@ -72,8 +71,11 @@ pub use llvm_util::{init, target_features, print_version, print_passes, print, e
|
||||
use std::rc::Rc;
|
||||
|
||||
use rustc::hir::def_id::CrateNum;
|
||||
use rustc::util::nodemap::{FxHashSet, FxHashMap};
|
||||
use rustc::middle::cstore::{NativeLibrary, CrateSource, LibSource};
|
||||
use rustc::ty::maps::Providers;
|
||||
use rustc::util::nodemap::{FxHashSet, FxHashMap};
|
||||
|
||||
mod diagnostics;
|
||||
|
||||
pub mod back {
|
||||
mod archive;
|
||||
@ -87,8 +89,6 @@ pub mod back {
|
||||
mod rpath;
|
||||
}
|
||||
|
||||
mod diagnostics;
|
||||
|
||||
mod abi;
|
||||
mod adt;
|
||||
mod allocator;
|
||||
@ -247,3 +247,15 @@ pub struct CrateInfo {
|
||||
}
|
||||
|
||||
__build_diagnostic_array! { librustc_trans, DIAGNOSTICS }
|
||||
|
||||
pub fn provide_local(providers: &mut Providers) {
|
||||
back::symbol_names::provide(providers);
|
||||
back::symbol_export::provide_local(providers);
|
||||
base::provide_local(providers);
|
||||
}
|
||||
|
||||
pub fn provide_extern(providers: &mut Providers) {
|
||||
back::symbol_names::provide(providers);
|
||||
back::symbol_export::provide_extern(providers);
|
||||
base::provide_extern(providers);
|
||||
}
|
||||
|
@ -80,7 +80,7 @@ pub fn get_vtable<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
|
||||
let nullptr = C_null(Type::nil(ccx).ptr_to());
|
||||
|
||||
let mut components: Vec<_> = [
|
||||
callee::get_fn(ccx, monomorphize::resolve_drop_in_place(ccx.shared(), ty)),
|
||||
callee::get_fn(ccx, monomorphize::resolve_drop_in_place(ccx.tcx(), ty)),
|
||||
C_usize(ccx, ccx.size_of(ty)),
|
||||
C_usize(ccx, ccx.align_of(ty) as u64)
|
||||
].iter().cloned().collect();
|
||||
|
@ -265,7 +265,7 @@ impl<'a, 'tcx> MirContext<'a, 'tcx> {
|
||||
mir::TerminatorKind::Drop { ref location, target, unwind } => {
|
||||
let ty = location.ty(self.mir, bcx.tcx()).to_ty(bcx.tcx());
|
||||
let ty = self.monomorphize(&ty);
|
||||
let drop_fn = monomorphize::resolve_drop_in_place(bcx.ccx.shared(), ty);
|
||||
let drop_fn = monomorphize::resolve_drop_in_place(bcx.ccx.tcx(), ty);
|
||||
|
||||
if let ty::InstanceDef::DropGlue(_, None) = drop_fn.def {
|
||||
// we don't actually need to drop anything.
|
||||
@ -429,7 +429,7 @@ impl<'a, 'tcx> MirContext<'a, 'tcx> {
|
||||
|
||||
let (instance, mut llfn) = match callee.ty.sty {
|
||||
ty::TyFnDef(def_id, substs) => {
|
||||
(Some(monomorphize::resolve(bcx.ccx.shared(), def_id, substs)),
|
||||
(Some(monomorphize::resolve(bcx.ccx.tcx(), def_id, substs)),
|
||||
None)
|
||||
}
|
||||
ty::TyFnPtr(_) => {
|
||||
@ -546,7 +546,7 @@ impl<'a, 'tcx> MirContext<'a, 'tcx> {
|
||||
};
|
||||
|
||||
let callee_ty = common::instance_ty(
|
||||
bcx.ccx.shared(), instance.as_ref().unwrap());
|
||||
bcx.ccx.tcx(), instance.as_ref().unwrap());
|
||||
trans_intrinsic_call(&bcx, callee_ty, &fn_ty, &llargs, dest,
|
||||
terminator.source_info.span);
|
||||
|
||||
|
@ -261,7 +261,7 @@ impl<'a, 'tcx> MirConstContext<'a, 'tcx> {
|
||||
substs: &'tcx Substs<'tcx>,
|
||||
args: IndexVec<mir::Local, Result<Const<'tcx>, ConstEvalErr<'tcx>>>)
|
||||
-> Result<Const<'tcx>, ConstEvalErr<'tcx>> {
|
||||
let instance = monomorphize::resolve(ccx.shared(), def_id, substs);
|
||||
let instance = monomorphize::resolve(ccx.tcx(), def_id, substs);
|
||||
let mir = ccx.tcx().instance_mir(instance.def);
|
||||
MirConstContext::new(ccx, &mir, instance.substs, args).trans()
|
||||
}
|
||||
|
@ -222,7 +222,7 @@ impl<'a, 'tcx> MirContext<'a, 'tcx> {
|
||||
match operand.ty.sty {
|
||||
ty::TyClosure(def_id, substs) => {
|
||||
let instance = monomorphize::resolve_closure(
|
||||
bcx.ccx.shared(), def_id, substs, ty::ClosureKind::FnOnce);
|
||||
bcx.ccx.tcx(), def_id, substs, ty::ClosureKind::FnOnce);
|
||||
OperandValue::Immediate(callee::get_fn(bcx.ccx, instance))
|
||||
}
|
||||
_ => {
|
||||
|
@ -85,27 +85,26 @@ fn needs_fn_once_adapter_shim(actual_closure_kind: ty::ClosureKind,
|
||||
}
|
||||
|
||||
pub fn resolve_closure<'a, 'tcx> (
|
||||
scx: &SharedCrateContext<'a, 'tcx>,
|
||||
tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
def_id: DefId,
|
||||
substs: ty::ClosureSubsts<'tcx>,
|
||||
requested_kind: ty::ClosureKind)
|
||||
-> Instance<'tcx>
|
||||
{
|
||||
let actual_kind = scx.tcx().closure_kind(def_id);
|
||||
let actual_kind = tcx.closure_kind(def_id);
|
||||
|
||||
match needs_fn_once_adapter_shim(actual_kind, requested_kind) {
|
||||
Ok(true) => fn_once_adapter_instance(scx.tcx(), def_id, substs),
|
||||
Ok(true) => fn_once_adapter_instance(tcx, def_id, substs),
|
||||
_ => Instance::new(def_id, substs.substs)
|
||||
}
|
||||
}
|
||||
|
||||
fn resolve_associated_item<'a, 'tcx>(
|
||||
scx: &SharedCrateContext<'a, 'tcx>,
|
||||
tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
trait_item: &ty::AssociatedItem,
|
||||
trait_id: DefId,
|
||||
rcvr_substs: &'tcx Substs<'tcx>
|
||||
) -> Instance<'tcx> {
|
||||
let tcx = scx.tcx();
|
||||
let def_id = trait_item.def_id;
|
||||
debug!("resolve_associated_item(trait_item={:?}, \
|
||||
trait_id={:?}, \
|
||||
@ -132,7 +131,7 @@ fn resolve_associated_item<'a, 'tcx>(
|
||||
}
|
||||
traits::VtableClosure(closure_data) => {
|
||||
let trait_closure_kind = tcx.lang_items().fn_trait_kind(trait_id).unwrap();
|
||||
resolve_closure(scx, closure_data.closure_def_id, closure_data.substs,
|
||||
resolve_closure(tcx, closure_data.closure_def_id, closure_data.substs,
|
||||
trait_closure_kind)
|
||||
}
|
||||
traits::VtableFnPointer(ref data) => {
|
||||
@ -163,21 +162,21 @@ fn resolve_associated_item<'a, 'tcx>(
|
||||
/// The point where linking happens. Resolve a (def_id, substs)
|
||||
/// pair to an instance.
|
||||
pub fn resolve<'a, 'tcx>(
|
||||
scx: &SharedCrateContext<'a, 'tcx>,
|
||||
tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
def_id: DefId,
|
||||
substs: &'tcx Substs<'tcx>
|
||||
) -> Instance<'tcx> {
|
||||
debug!("resolve(def_id={:?}, substs={:?})",
|
||||
def_id, substs);
|
||||
let result = if let Some(trait_def_id) = scx.tcx().trait_of_item(def_id) {
|
||||
let result = if let Some(trait_def_id) = tcx.trait_of_item(def_id) {
|
||||
debug!(" => associated item, attempting to find impl");
|
||||
let item = scx.tcx().associated_item(def_id);
|
||||
resolve_associated_item(scx, &item, trait_def_id, substs)
|
||||
let item = tcx.associated_item(def_id);
|
||||
resolve_associated_item(tcx, &item, trait_def_id, substs)
|
||||
} else {
|
||||
let item_type = def_ty(scx, def_id, substs);
|
||||
let item_type = def_ty(tcx, def_id, substs);
|
||||
let def = match item_type.sty {
|
||||
ty::TyFnDef(..) if {
|
||||
let f = item_type.fn_sig(scx.tcx());
|
||||
let f = item_type.fn_sig(tcx);
|
||||
f.abi() == Abi::RustIntrinsic ||
|
||||
f.abi() == Abi::PlatformIntrinsic
|
||||
} =>
|
||||
@ -186,9 +185,9 @@ pub fn resolve<'a, 'tcx>(
|
||||
ty::InstanceDef::Intrinsic(def_id)
|
||||
}
|
||||
_ => {
|
||||
if Some(def_id) == scx.tcx().lang_items().drop_in_place_fn() {
|
||||
if Some(def_id) == tcx.lang_items().drop_in_place_fn() {
|
||||
let ty = substs.type_at(0);
|
||||
if scx.type_needs_drop(ty) {
|
||||
if type_needs_drop(tcx, ty) {
|
||||
debug!(" => nontrivial drop glue");
|
||||
ty::InstanceDef::DropGlue(def_id, Some(ty))
|
||||
} else {
|
||||
@ -209,27 +208,27 @@ pub fn resolve<'a, 'tcx>(
|
||||
}
|
||||
|
||||
pub fn resolve_drop_in_place<'a, 'tcx>(
|
||||
scx: &SharedCrateContext<'a, 'tcx>,
|
||||
tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
ty: Ty<'tcx>)
|
||||
-> ty::Instance<'tcx>
|
||||
{
|
||||
let def_id = scx.tcx().require_lang_item(DropInPlaceFnLangItem);
|
||||
let substs = scx.tcx().intern_substs(&[Kind::from(ty)]);
|
||||
resolve(scx, def_id, substs)
|
||||
let def_id = tcx.require_lang_item(DropInPlaceFnLangItem);
|
||||
let substs = tcx.intern_substs(&[Kind::from(ty)]);
|
||||
resolve(tcx, def_id, substs)
|
||||
}
|
||||
|
||||
pub fn custom_coerce_unsize_info<'scx, 'tcx>(scx: &SharedCrateContext<'scx, 'tcx>,
|
||||
source_ty: Ty<'tcx>,
|
||||
target_ty: Ty<'tcx>)
|
||||
-> CustomCoerceUnsized {
|
||||
pub fn custom_coerce_unsize_info<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
source_ty: Ty<'tcx>,
|
||||
target_ty: Ty<'tcx>)
|
||||
-> CustomCoerceUnsized {
|
||||
let trait_ref = ty::Binder(ty::TraitRef {
|
||||
def_id: scx.tcx().lang_items().coerce_unsized_trait().unwrap(),
|
||||
substs: scx.tcx().mk_substs_trait(source_ty, &[target_ty])
|
||||
def_id: tcx.lang_items().coerce_unsized_trait().unwrap(),
|
||||
substs: tcx.mk_substs_trait(source_ty, &[target_ty])
|
||||
});
|
||||
|
||||
match scx.tcx().trans_fulfill_obligation(DUMMY_SP, trait_ref) {
|
||||
match tcx.trans_fulfill_obligation(DUMMY_SP, trait_ref) {
|
||||
traits::VtableImpl(traits::VtableImplData { impl_def_id, .. }) => {
|
||||
scx.tcx().coerce_unsized_info(impl_def_id).custom_kind.unwrap()
|
||||
tcx.coerce_unsized_info(impl_def_id).custom_kind.unwrap()
|
||||
}
|
||||
vtable => {
|
||||
bug!("invalid CoerceUnsized vtable: {:?}", vtable);
|
||||
|
@ -102,14 +102,12 @@
|
||||
//! source-level module, functions from the same module will be available for
|
||||
//! inlining, even when they are not marked #[inline].
|
||||
|
||||
use back::symbol_export::ExportedSymbols;
|
||||
use collector::InliningMap;
|
||||
use common;
|
||||
use context::SharedCrateContext;
|
||||
use llvm;
|
||||
use rustc::dep_graph::{DepNode, WorkProductId};
|
||||
use rustc::hir::def_id::DefId;
|
||||
use rustc::hir::map::DefPathData;
|
||||
use rustc::middle::trans::{Linkage, Visibility};
|
||||
use rustc::session::config::NUMBERED_CODEGEN_UNIT_MARKER;
|
||||
use rustc::ty::{self, TyCtxt, InstanceDef};
|
||||
use rustc::ty::item_path::characteristic_def_id_of_type;
|
||||
@ -119,7 +117,9 @@ use std::collections::hash_map::Entry;
|
||||
use std::hash::Hash;
|
||||
use syntax::ast::NodeId;
|
||||
use syntax::symbol::{Symbol, InternedString};
|
||||
use trans_item::{TransItem, InstantiationMode};
|
||||
use trans_item::{TransItem, TransItemExt, InstantiationMode};
|
||||
|
||||
pub use rustc::middle::trans::CodegenUnit;
|
||||
|
||||
pub enum PartitioningStrategy {
|
||||
/// Generate one codegen unit per source-level module.
|
||||
@ -129,57 +129,36 @@ pub enum PartitioningStrategy {
|
||||
FixedUnitCount(usize)
|
||||
}
|
||||
|
||||
pub struct CodegenUnit<'tcx> {
|
||||
/// A name for this CGU. Incremental compilation requires that
|
||||
/// name be unique amongst **all** crates. Therefore, it should
|
||||
/// contain something unique to this crate (e.g., a module path)
|
||||
/// as well as the crate name and disambiguator.
|
||||
name: InternedString,
|
||||
pub trait CodegenUnitExt<'tcx> {
|
||||
fn as_codegen_unit(&self) -> &CodegenUnit<'tcx>;
|
||||
|
||||
items: FxHashMap<TransItem<'tcx>, (llvm::Linkage, llvm::Visibility)>,
|
||||
}
|
||||
|
||||
impl<'tcx> CodegenUnit<'tcx> {
|
||||
pub fn new(name: InternedString,
|
||||
items: FxHashMap<TransItem<'tcx>, (llvm::Linkage, llvm::Visibility)>)
|
||||
-> Self {
|
||||
CodegenUnit {
|
||||
name,
|
||||
items,
|
||||
}
|
||||
fn contains_item(&self, item: &TransItem<'tcx>) -> bool {
|
||||
self.items().contains_key(item)
|
||||
}
|
||||
|
||||
pub fn empty(name: InternedString) -> Self {
|
||||
Self::new(name, FxHashMap())
|
||||
fn name<'a>(&'a self) -> &'a InternedString
|
||||
where 'tcx: 'a,
|
||||
{
|
||||
&self.as_codegen_unit().name()
|
||||
}
|
||||
|
||||
pub fn contains_item(&self, item: &TransItem<'tcx>) -> bool {
|
||||
self.items.contains_key(item)
|
||||
fn items(&self) -> &FxHashMap<TransItem<'tcx>, (Linkage, Visibility)> {
|
||||
&self.as_codegen_unit().items()
|
||||
}
|
||||
|
||||
pub fn name(&self) -> &str {
|
||||
&self.name
|
||||
}
|
||||
|
||||
pub fn items(&self) -> &FxHashMap<TransItem<'tcx>, (llvm::Linkage, llvm::Visibility)> {
|
||||
&self.items
|
||||
}
|
||||
|
||||
pub fn work_product_id(&self) -> WorkProductId {
|
||||
fn work_product_id(&self) -> WorkProductId {
|
||||
WorkProductId::from_cgu_name(self.name())
|
||||
}
|
||||
|
||||
pub fn work_product_dep_node(&self) -> DepNode {
|
||||
fn work_product_dep_node(&self) -> DepNode {
|
||||
self.work_product_id().to_dep_node()
|
||||
}
|
||||
|
||||
pub fn compute_symbol_name_hash<'a>(&self,
|
||||
scx: &SharedCrateContext<'a, 'tcx>)
|
||||
-> u64 {
|
||||
fn compute_symbol_name_hash<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> u64 {
|
||||
let mut state = IchHasher::new();
|
||||
let all_items = self.items_in_deterministic_order(scx.tcx());
|
||||
let all_items = self.items_in_deterministic_order(tcx);
|
||||
for (item, (linkage, visibility)) in all_items {
|
||||
let symbol_name = item.symbol_name(scx.tcx());
|
||||
let symbol_name = item.symbol_name(tcx);
|
||||
symbol_name.len().hash(&mut state);
|
||||
symbol_name.hash(&mut state);
|
||||
linkage.hash(&mut state);
|
||||
@ -188,10 +167,10 @@ impl<'tcx> CodegenUnit<'tcx> {
|
||||
state.finish().to_smaller_hash()
|
||||
}
|
||||
|
||||
pub fn items_in_deterministic_order<'a>(&self,
|
||||
tcx: TyCtxt<'a, 'tcx, 'tcx>)
|
||||
-> Vec<(TransItem<'tcx>,
|
||||
(llvm::Linkage, llvm::Visibility))> {
|
||||
fn items_in_deterministic_order<'a>(&self,
|
||||
tcx: TyCtxt<'a, 'tcx, 'tcx>)
|
||||
-> Vec<(TransItem<'tcx>,
|
||||
(Linkage, Visibility))> {
|
||||
// The codegen tests rely on items being process in the same order as
|
||||
// they appear in the file, so for local items, we sort by node_id first
|
||||
#[derive(PartialEq, Eq, PartialOrd, Ord)]
|
||||
@ -209,7 +188,7 @@ impl<'tcx> CodegenUnit<'tcx> {
|
||||
}, item.symbol_name(tcx))
|
||||
}
|
||||
|
||||
let items: Vec<_> = self.items.iter().map(|(&i, &l)| (i, l)).collect();
|
||||
let items: Vec<_> = self.items().iter().map(|(&i, &l)| (i, l)).collect();
|
||||
let mut items : Vec<_> = items.iter()
|
||||
.map(|il| (il, item_sort_key(tcx, il.0))).collect();
|
||||
items.sort_by(|&(_, ref key1), &(_, ref key2)| key1.cmp(key2));
|
||||
@ -217,25 +196,26 @@ impl<'tcx> CodegenUnit<'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> CodegenUnitExt<'tcx> for CodegenUnit<'tcx> {
|
||||
fn as_codegen_unit(&self) -> &CodegenUnit<'tcx> {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
// Anything we can't find a proper codegen unit for goes into this.
|
||||
const FALLBACK_CODEGEN_UNIT: &'static str = "__rustc_fallback_codegen_unit";
|
||||
|
||||
pub fn partition<'a, 'tcx, I>(scx: &SharedCrateContext<'a, 'tcx>,
|
||||
pub fn partition<'a, 'tcx, I>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
trans_items: I,
|
||||
strategy: PartitioningStrategy,
|
||||
inlining_map: &InliningMap<'tcx>,
|
||||
exported_symbols: &ExportedSymbols)
|
||||
inlining_map: &InliningMap<'tcx>)
|
||||
-> Vec<CodegenUnit<'tcx>>
|
||||
where I: Iterator<Item = TransItem<'tcx>>
|
||||
{
|
||||
let tcx = scx.tcx();
|
||||
|
||||
// In the first step, we place all regular translation items into their
|
||||
// respective 'home' codegen unit. Regular translation items are all
|
||||
// functions and statics defined in the local crate.
|
||||
let mut initial_partitioning = place_root_translation_items(scx,
|
||||
exported_symbols,
|
||||
let mut initial_partitioning = place_root_translation_items(tcx,
|
||||
trans_items);
|
||||
|
||||
debug_dump(tcx, "INITIAL PARTITIONING:", initial_partitioning.codegen_units.iter());
|
||||
@ -269,13 +249,13 @@ pub fn partition<'a, 'tcx, I>(scx: &SharedCrateContext<'a, 'tcx>,
|
||||
} = post_inlining;
|
||||
|
||||
result.sort_by(|cgu1, cgu2| {
|
||||
(&cgu1.name[..]).cmp(&cgu2.name[..])
|
||||
cgu1.name().cmp(cgu2.name())
|
||||
});
|
||||
|
||||
if scx.sess().opts.enable_dep_node_debug_strs() {
|
||||
if tcx.sess.opts.enable_dep_node_debug_strs() {
|
||||
for cgu in &result {
|
||||
let dep_node = cgu.work_product_dep_node();
|
||||
scx.tcx().dep_graph.register_dep_node_debug_str(dep_node,
|
||||
tcx.dep_graph.register_dep_node_debug_str(dep_node,
|
||||
|| cgu.name().to_string());
|
||||
}
|
||||
}
|
||||
@ -304,15 +284,11 @@ struct PostInliningPartitioning<'tcx> {
|
||||
internalization_candidates: FxHashSet<TransItem<'tcx>>,
|
||||
}
|
||||
|
||||
fn place_root_translation_items<'a, 'tcx, I>(scx: &SharedCrateContext<'a, 'tcx>,
|
||||
exported_symbols: &ExportedSymbols,
|
||||
fn place_root_translation_items<'a, 'tcx, I>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
trans_items: I)
|
||||
-> PreInliningPartitioning<'tcx>
|
||||
where I: Iterator<Item = TransItem<'tcx>>
|
||||
{
|
||||
let tcx = scx.tcx();
|
||||
let exported_symbols = exported_symbols.local_exports();
|
||||
|
||||
let mut roots = FxHashSet();
|
||||
let mut codegen_units = FxHashMap();
|
||||
let is_incremental_build = tcx.sess.opts.incremental.is_some();
|
||||
@ -322,7 +298,7 @@ fn place_root_translation_items<'a, 'tcx, I>(scx: &SharedCrateContext<'a, 'tcx>,
|
||||
let is_root = trans_item.instantiation_mode(tcx) == InstantiationMode::GloballyShared;
|
||||
|
||||
if is_root {
|
||||
let characteristic_def_id = characteristic_def_id_of_trans_item(scx, trans_item);
|
||||
let characteristic_def_id = characteristic_def_id_of_trans_item(tcx, trans_item);
|
||||
let is_volatile = is_incremental_build &&
|
||||
trans_item.is_generic_fn();
|
||||
|
||||
@ -332,29 +308,29 @@ fn place_root_translation_items<'a, 'tcx, I>(scx: &SharedCrateContext<'a, 'tcx>,
|
||||
};
|
||||
|
||||
let make_codegen_unit = || {
|
||||
CodegenUnit::empty(codegen_unit_name.clone())
|
||||
CodegenUnit::new(codegen_unit_name.clone())
|
||||
};
|
||||
|
||||
let codegen_unit = codegen_units.entry(codegen_unit_name.clone())
|
||||
.or_insert_with(make_codegen_unit);
|
||||
|
||||
let (linkage, visibility) = match trans_item.explicit_linkage(tcx) {
|
||||
Some(explicit_linkage) => (explicit_linkage, llvm::Visibility::Default),
|
||||
Some(explicit_linkage) => (explicit_linkage, Visibility::Default),
|
||||
None => {
|
||||
match trans_item {
|
||||
TransItem::Fn(ref instance) => {
|
||||
let visibility = match instance.def {
|
||||
InstanceDef::Item(def_id) => {
|
||||
if let Some(node_id) = tcx.hir.as_local_node_id(def_id) {
|
||||
if exported_symbols.contains(&node_id) {
|
||||
llvm::Visibility::Default
|
||||
if def_id.is_local() {
|
||||
if tcx.is_exported_symbol(def_id) {
|
||||
Visibility::Default
|
||||
} else {
|
||||
internalization_candidates.insert(trans_item);
|
||||
llvm::Visibility::Hidden
|
||||
Visibility::Hidden
|
||||
}
|
||||
} else {
|
||||
internalization_candidates.insert(trans_item);
|
||||
llvm::Visibility::Hidden
|
||||
Visibility::Hidden
|
||||
}
|
||||
}
|
||||
InstanceDef::FnPtrShim(..) |
|
||||
@ -368,23 +344,24 @@ fn place_root_translation_items<'a, 'tcx, I>(scx: &SharedCrateContext<'a, 'tcx>,
|
||||
trans_item)
|
||||
}
|
||||
};
|
||||
(llvm::ExternalLinkage, visibility)
|
||||
(Linkage::External, visibility)
|
||||
}
|
||||
TransItem::Static(node_id) |
|
||||
TransItem::GlobalAsm(node_id) => {
|
||||
let visibility = if exported_symbols.contains(&node_id) {
|
||||
llvm::Visibility::Default
|
||||
let def_id = tcx.hir.local_def_id(node_id);
|
||||
let visibility = if tcx.is_exported_symbol(def_id) {
|
||||
Visibility::Default
|
||||
} else {
|
||||
internalization_candidates.insert(trans_item);
|
||||
llvm::Visibility::Hidden
|
||||
Visibility::Hidden
|
||||
};
|
||||
(llvm::ExternalLinkage, visibility)
|
||||
(Linkage::External, visibility)
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
codegen_unit.items.insert(trans_item, (linkage, visibility));
|
||||
codegen_unit.items_mut().insert(trans_item, (linkage, visibility));
|
||||
roots.insert(trans_item);
|
||||
}
|
||||
}
|
||||
@ -394,7 +371,7 @@ fn place_root_translation_items<'a, 'tcx, I>(scx: &SharedCrateContext<'a, 'tcx>,
|
||||
if codegen_units.is_empty() {
|
||||
let codegen_unit_name = Symbol::intern(FALLBACK_CODEGEN_UNIT).as_str();
|
||||
codegen_units.insert(codegen_unit_name.clone(),
|
||||
CodegenUnit::empty(codegen_unit_name.clone()));
|
||||
CodegenUnit::new(codegen_unit_name.clone()));
|
||||
}
|
||||
|
||||
PreInliningPartitioning {
|
||||
@ -417,17 +394,17 @@ fn merge_codegen_units<'tcx>(initial_partitioning: &mut PreInliningPartitioning<
|
||||
// translation items in a given unit. This could be improved on.
|
||||
while codegen_units.len() > target_cgu_count {
|
||||
// Sort small cgus to the back
|
||||
codegen_units.sort_by_key(|cgu| -(cgu.items.len() as i64));
|
||||
let smallest = codegen_units.pop().unwrap();
|
||||
codegen_units.sort_by_key(|cgu| -(cgu.items().len() as i64));
|
||||
let mut smallest = codegen_units.pop().unwrap();
|
||||
let second_smallest = codegen_units.last_mut().unwrap();
|
||||
|
||||
for (k, v) in smallest.items.into_iter() {
|
||||
second_smallest.items.insert(k, v);
|
||||
for (k, v) in smallest.items_mut().drain() {
|
||||
second_smallest.items_mut().insert(k, v);
|
||||
}
|
||||
}
|
||||
|
||||
for (index, cgu) in codegen_units.iter_mut().enumerate() {
|
||||
cgu.name = numbered_codegen_unit_name(crate_name, index);
|
||||
cgu.set_name(numbered_codegen_unit_name(crate_name, index));
|
||||
}
|
||||
|
||||
// If the initial partitioning contained less than target_cgu_count to begin
|
||||
@ -435,8 +412,8 @@ fn merge_codegen_units<'tcx>(initial_partitioning: &mut PreInliningPartitioning<
|
||||
// we reach the target count
|
||||
while codegen_units.len() < target_cgu_count {
|
||||
let index = codegen_units.len();
|
||||
codegen_units.push(
|
||||
CodegenUnit::empty(numbered_codegen_unit_name(crate_name, index)));
|
||||
let name = numbered_codegen_unit_name(crate_name, index);
|
||||
codegen_units.push(CodegenUnit::new(name));
|
||||
}
|
||||
}
|
||||
|
||||
@ -457,20 +434,17 @@ fn place_inlined_translation_items<'tcx>(initial_partitioning: PreInliningPartit
|
||||
for old_codegen_unit in initial_cgus {
|
||||
// Collect all items that need to be available in this codegen unit
|
||||
let mut reachable = FxHashSet();
|
||||
for root in old_codegen_unit.items.keys() {
|
||||
for root in old_codegen_unit.items().keys() {
|
||||
follow_inlining(*root, inlining_map, &mut reachable);
|
||||
}
|
||||
|
||||
let mut new_codegen_unit = CodegenUnit {
|
||||
name: old_codegen_unit.name,
|
||||
items: FxHashMap(),
|
||||
};
|
||||
let mut new_codegen_unit = CodegenUnit::new(old_codegen_unit.name().clone());
|
||||
|
||||
// Add all translation items that are not already there
|
||||
for trans_item in reachable {
|
||||
if let Some(linkage) = old_codegen_unit.items.get(&trans_item) {
|
||||
if let Some(linkage) = old_codegen_unit.items().get(&trans_item) {
|
||||
// This is a root, just copy it over
|
||||
new_codegen_unit.items.insert(trans_item, *linkage);
|
||||
new_codegen_unit.items_mut().insert(trans_item, *linkage);
|
||||
} else {
|
||||
if roots.contains(&trans_item) {
|
||||
bug!("GloballyShared trans-item inlined into other CGU: \
|
||||
@ -478,8 +452,10 @@ fn place_inlined_translation_items<'tcx>(initial_partitioning: PreInliningPartit
|
||||
}
|
||||
|
||||
// This is a cgu-private copy
|
||||
new_codegen_unit.items.insert(trans_item,
|
||||
(llvm::InternalLinkage, llvm::Visibility::Default));
|
||||
new_codegen_unit.items_mut().insert(
|
||||
trans_item,
|
||||
(Linkage::Internal, Visibility::Default),
|
||||
);
|
||||
}
|
||||
|
||||
if !single_codegen_unit {
|
||||
@ -490,7 +466,7 @@ fn place_inlined_translation_items<'tcx>(initial_partitioning: PreInliningPartit
|
||||
let placement = e.into_mut();
|
||||
debug_assert!(match *placement {
|
||||
TransItemPlacement::SingleCgu { ref cgu_name } => {
|
||||
*cgu_name != new_codegen_unit.name
|
||||
*cgu_name != *new_codegen_unit.name()
|
||||
}
|
||||
TransItemPlacement::MultipleCgus => true,
|
||||
});
|
||||
@ -498,7 +474,7 @@ fn place_inlined_translation_items<'tcx>(initial_partitioning: PreInliningPartit
|
||||
}
|
||||
Entry::Vacant(e) => {
|
||||
e.insert(TransItemPlacement::SingleCgu {
|
||||
cgu_name: new_codegen_unit.name.clone()
|
||||
cgu_name: new_codegen_unit.name().clone()
|
||||
});
|
||||
}
|
||||
}
|
||||
@ -536,8 +512,8 @@ fn internalize_symbols<'a, 'tcx>(_tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
// could be accessed from.
|
||||
for cgu in &mut partitioning.codegen_units {
|
||||
for candidate in &partitioning.internalization_candidates {
|
||||
cgu.items.insert(*candidate, (llvm::InternalLinkage,
|
||||
llvm::Visibility::Default));
|
||||
cgu.items_mut().insert(*candidate,
|
||||
(Linkage::Internal, Visibility::Default));
|
||||
}
|
||||
}
|
||||
|
||||
@ -561,10 +537,10 @@ fn internalize_symbols<'a, 'tcx>(_tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
// accessed from outside its defining codegen unit.
|
||||
for cgu in &mut partitioning.codegen_units {
|
||||
let home_cgu = TransItemPlacement::SingleCgu {
|
||||
cgu_name: cgu.name.clone()
|
||||
cgu_name: cgu.name().clone()
|
||||
};
|
||||
|
||||
for (accessee, linkage_and_visibility) in &mut cgu.items {
|
||||
for (accessee, linkage_and_visibility) in cgu.items_mut() {
|
||||
if !partitioning.internalization_candidates.contains(accessee) {
|
||||
// This item is no candidate for internalizing, so skip it.
|
||||
continue
|
||||
@ -587,15 +563,14 @@ fn internalize_symbols<'a, 'tcx>(_tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
|
||||
// If we got here, we did not find any accesses from other CGUs,
|
||||
// so it's fine to make this translation item internal.
|
||||
*linkage_and_visibility = (llvm::InternalLinkage, llvm::Visibility::Default);
|
||||
*linkage_and_visibility = (Linkage::Internal, Visibility::Default);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn characteristic_def_id_of_trans_item<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
|
||||
fn characteristic_def_id_of_trans_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
trans_item: TransItem<'tcx>)
|
||||
-> Option<DefId> {
|
||||
let tcx = scx.tcx();
|
||||
match trans_item {
|
||||
TransItem::Fn(instance) => {
|
||||
let def_id = match instance.def {
|
||||
@ -621,7 +596,7 @@ fn characteristic_def_id_of_trans_item<'a, 'tcx>(scx: &SharedCrateContext<'a, 't
|
||||
if let Some(impl_def_id) = tcx.impl_of_method(def_id) {
|
||||
// This is a method within an inherent impl, find out what the
|
||||
// self-type is:
|
||||
let impl_self_ty = common::def_ty(scx, impl_def_id, instance.substs);
|
||||
let impl_self_ty = common::def_ty(tcx, impl_def_id, instance.substs);
|
||||
if let Some(def_id) = characteristic_def_id_of_type(impl_self_ty) {
|
||||
return Some(def_id);
|
||||
}
|
||||
@ -679,9 +654,9 @@ fn debug_dump<'a, 'b, 'tcx, I>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
if cfg!(debug_assertions) {
|
||||
debug!("{}", label);
|
||||
for cgu in cgus {
|
||||
debug!("CodegenUnit {}:", cgu.name);
|
||||
debug!("CodegenUnit {}:", cgu.name());
|
||||
|
||||
for (trans_item, linkage) in &cgu.items {
|
||||
for (trans_item, linkage) in cgu.items() {
|
||||
let symbol_name = trans_item.symbol_name(tcx);
|
||||
let symbol_hash_start = symbol_name.rfind('h');
|
||||
let symbol_hash = symbol_hash_start.map(|i| &symbol_name[i ..])
|
||||
|
@ -25,23 +25,19 @@ use llvm;
|
||||
use monomorphize::Instance;
|
||||
use rustc::hir;
|
||||
use rustc::hir::def_id::DefId;
|
||||
use rustc::middle::trans::{Linkage, Visibility};
|
||||
use rustc::traits;
|
||||
use rustc::ty::{self, Ty, TyCtxt, TypeFoldable};
|
||||
use rustc::ty::subst::{Subst, Substs};
|
||||
use syntax::ast::{self, NodeId};
|
||||
use syntax::ast;
|
||||
use syntax::attr;
|
||||
use syntax_pos::Span;
|
||||
use syntax_pos::symbol::Symbol;
|
||||
use type_of;
|
||||
use std::fmt::Write;
|
||||
use std::fmt::{self, Write};
|
||||
use std::iter;
|
||||
|
||||
#[derive(PartialEq, Eq, Clone, Copy, Debug, Hash)]
|
||||
pub enum TransItem<'tcx> {
|
||||
Fn(Instance<'tcx>),
|
||||
Static(NodeId),
|
||||
GlobalAsm(NodeId),
|
||||
}
|
||||
pub use rustc::middle::trans::TransItem;
|
||||
|
||||
/// Describes how a translation item will be instantiated in object files.
|
||||
#[derive(PartialEq, Eq, Clone, Copy, Debug, Hash)]
|
||||
@ -55,15 +51,16 @@ pub enum InstantiationMode {
|
||||
LocalCopy,
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> TransItem<'tcx> {
|
||||
pub trait TransItemExt<'a, 'tcx>: fmt::Debug {
|
||||
fn as_trans_item(&self) -> &TransItem<'tcx>;
|
||||
|
||||
pub fn define(&self, ccx: &CrateContext<'a, 'tcx>) {
|
||||
fn define(&self, ccx: &CrateContext<'a, 'tcx>) {
|
||||
debug!("BEGIN IMPLEMENTING '{} ({})' in cgu {}",
|
||||
self.to_string(ccx.tcx()),
|
||||
self.to_raw_string(),
|
||||
ccx.codegen_unit().name());
|
||||
self.to_string(ccx.tcx()),
|
||||
self.to_raw_string(),
|
||||
ccx.codegen_unit().name());
|
||||
|
||||
match *self {
|
||||
match *self.as_trans_item() {
|
||||
TransItem::Static(node_id) => {
|
||||
let tcx = ccx.tcx();
|
||||
let item = tcx.hir.expect_item(node_id);
|
||||
@ -97,10 +94,10 @@ impl<'a, 'tcx> TransItem<'tcx> {
|
||||
ccx.codegen_unit().name());
|
||||
}
|
||||
|
||||
pub fn predefine(&self,
|
||||
ccx: &CrateContext<'a, 'tcx>,
|
||||
linkage: llvm::Linkage,
|
||||
visibility: llvm::Visibility) {
|
||||
fn predefine(&self,
|
||||
ccx: &CrateContext<'a, 'tcx>,
|
||||
linkage: Linkage,
|
||||
visibility: Visibility) {
|
||||
debug!("BEGIN PREDEFINING '{} ({})' in cgu {}",
|
||||
self.to_string(ccx.tcx()),
|
||||
self.to_raw_string(),
|
||||
@ -110,12 +107,12 @@ impl<'a, 'tcx> TransItem<'tcx> {
|
||||
|
||||
debug!("symbol {}", &symbol_name);
|
||||
|
||||
match *self {
|
||||
match *self.as_trans_item() {
|
||||
TransItem::Static(node_id) => {
|
||||
TransItem::predefine_static(ccx, node_id, linkage, visibility, &symbol_name);
|
||||
predefine_static(ccx, node_id, linkage, visibility, &symbol_name);
|
||||
}
|
||||
TransItem::Fn(instance) => {
|
||||
TransItem::predefine_fn(ccx, instance, linkage, visibility, &symbol_name);
|
||||
predefine_fn(ccx, instance, linkage, visibility, &symbol_name);
|
||||
}
|
||||
TransItem::GlobalAsm(..) => {}
|
||||
}
|
||||
@ -126,75 +123,8 @@ impl<'a, 'tcx> TransItem<'tcx> {
|
||||
ccx.codegen_unit().name());
|
||||
}
|
||||
|
||||
fn predefine_static(ccx: &CrateContext<'a, 'tcx>,
|
||||
node_id: ast::NodeId,
|
||||
linkage: llvm::Linkage,
|
||||
visibility: llvm::Visibility,
|
||||
symbol_name: &str) {
|
||||
let def_id = ccx.tcx().hir.local_def_id(node_id);
|
||||
let instance = Instance::mono(ccx.tcx(), def_id);
|
||||
let ty = common::instance_ty(ccx.shared(), &instance);
|
||||
let llty = type_of::type_of(ccx, ty);
|
||||
|
||||
let g = declare::define_global(ccx, symbol_name, llty).unwrap_or_else(|| {
|
||||
ccx.sess().span_fatal(ccx.tcx().hir.span(node_id),
|
||||
&format!("symbol `{}` is already defined", symbol_name))
|
||||
});
|
||||
|
||||
unsafe {
|
||||
llvm::LLVMRustSetLinkage(g, linkage);
|
||||
llvm::LLVMRustSetVisibility(g, visibility);
|
||||
}
|
||||
|
||||
ccx.instances().borrow_mut().insert(instance, g);
|
||||
ccx.statics().borrow_mut().insert(g, def_id);
|
||||
}
|
||||
|
||||
fn predefine_fn(ccx: &CrateContext<'a, 'tcx>,
|
||||
instance: Instance<'tcx>,
|
||||
linkage: llvm::Linkage,
|
||||
visibility: llvm::Visibility,
|
||||
symbol_name: &str) {
|
||||
assert!(!instance.substs.needs_infer() &&
|
||||
!instance.substs.has_param_types());
|
||||
|
||||
let mono_ty = common::instance_ty(ccx.shared(), &instance);
|
||||
let attrs = instance.def.attrs(ccx.tcx());
|
||||
let lldecl = declare::declare_fn(ccx, symbol_name, mono_ty);
|
||||
unsafe { llvm::LLVMRustSetLinkage(lldecl, linkage) };
|
||||
base::set_link_section(ccx, lldecl, &attrs);
|
||||
if linkage == llvm::Linkage::LinkOnceODRLinkage ||
|
||||
linkage == llvm::Linkage::WeakODRLinkage {
|
||||
llvm::SetUniqueComdat(ccx.llmod(), lldecl);
|
||||
}
|
||||
|
||||
// If we're compiling the compiler-builtins crate, e.g. the equivalent of
|
||||
// compiler-rt, then we want to implicitly compile everything with hidden
|
||||
// visibility as we're going to link this object all over the place but
|
||||
// don't want the symbols to get exported.
|
||||
if linkage != llvm::Linkage::InternalLinkage &&
|
||||
linkage != llvm::Linkage::PrivateLinkage &&
|
||||
attr::contains_name(ccx.tcx().hir.krate_attrs(), "compiler_builtins") {
|
||||
unsafe {
|
||||
llvm::LLVMRustSetVisibility(lldecl, llvm::Visibility::Hidden);
|
||||
}
|
||||
} else {
|
||||
unsafe {
|
||||
llvm::LLVMRustSetVisibility(lldecl, visibility);
|
||||
}
|
||||
}
|
||||
|
||||
debug!("predefine_fn: mono_ty = {:?} instance = {:?}", mono_ty, instance);
|
||||
if common::is_inline_instance(ccx.tcx(), &instance) {
|
||||
attributes::inline(lldecl, attributes::InlineAttr::Hint);
|
||||
}
|
||||
attributes::from_fn_attrs(ccx, &attrs, lldecl);
|
||||
|
||||
ccx.instances().borrow_mut().insert(instance, lldecl);
|
||||
}
|
||||
|
||||
pub fn symbol_name(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> ty::SymbolName {
|
||||
match *self {
|
||||
fn symbol_name(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> ty::SymbolName {
|
||||
match *self.as_trans_item() {
|
||||
TransItem::Fn(instance) => tcx.symbol_name(instance),
|
||||
TransItem::Static(node_id) => {
|
||||
let def_id = tcx.hir.local_def_id(node_id);
|
||||
@ -209,8 +139,8 @@ impl<'a, 'tcx> TransItem<'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn local_span(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Option<Span> {
|
||||
match *self {
|
||||
fn local_span(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Option<Span> {
|
||||
match *self.as_trans_item() {
|
||||
TransItem::Fn(Instance { def, .. }) => {
|
||||
tcx.hir.as_local_node_id(def.def_id())
|
||||
}
|
||||
@ -221,10 +151,10 @@ impl<'a, 'tcx> TransItem<'tcx> {
|
||||
}.map(|node_id| tcx.hir.span(node_id))
|
||||
}
|
||||
|
||||
pub fn instantiation_mode(&self,
|
||||
tcx: TyCtxt<'a, 'tcx, 'tcx>)
|
||||
-> InstantiationMode {
|
||||
match *self {
|
||||
fn instantiation_mode(&self,
|
||||
tcx: TyCtxt<'a, 'tcx, 'tcx>)
|
||||
-> InstantiationMode {
|
||||
match *self.as_trans_item() {
|
||||
TransItem::Fn(ref instance) => {
|
||||
if self.explicit_linkage(tcx).is_none() &&
|
||||
common::requests_inline(tcx, instance)
|
||||
@ -239,8 +169,8 @@ impl<'a, 'tcx> TransItem<'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_generic_fn(&self) -> bool {
|
||||
match *self {
|
||||
fn is_generic_fn(&self) -> bool {
|
||||
match *self.as_trans_item() {
|
||||
TransItem::Fn(ref instance) => {
|
||||
instance.substs.types().next().is_some()
|
||||
}
|
||||
@ -249,8 +179,8 @@ impl<'a, 'tcx> TransItem<'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn explicit_linkage(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Option<llvm::Linkage> {
|
||||
let def_id = match *self {
|
||||
fn explicit_linkage(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Option<Linkage> {
|
||||
let def_id = match *self.as_trans_item() {
|
||||
TransItem::Fn(ref instance) => instance.def_id(),
|
||||
TransItem::Static(node_id) => tcx.hir.local_def_id(node_id),
|
||||
TransItem::GlobalAsm(..) => return None,
|
||||
@ -258,7 +188,7 @@ impl<'a, 'tcx> TransItem<'tcx> {
|
||||
|
||||
let attributes = tcx.get_attrs(def_id);
|
||||
if let Some(name) = attr::first_attr_value_str_by_name(&attributes, "linkage") {
|
||||
if let Some(linkage) = base::llvm_linkage_by_name(&name.as_str()) {
|
||||
if let Some(linkage) = base::linkage_by_name(&name.as_str()) {
|
||||
Some(linkage)
|
||||
} else {
|
||||
let span = tcx.hir.span_if_local(def_id);
|
||||
@ -298,9 +228,9 @@ impl<'a, 'tcx> TransItem<'tcx> {
|
||||
/// Similarly, if a vtable method has such a signature, and therefore can't
|
||||
/// be used, we can just not emit it and have a placeholder (a null pointer,
|
||||
/// which will never be accessed) in its place.
|
||||
pub fn is_instantiable(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> bool {
|
||||
fn is_instantiable(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> bool {
|
||||
debug!("is_instantiable({:?})", self);
|
||||
let (def_id, substs) = match *self {
|
||||
let (def_id, substs) = match *self.as_trans_item() {
|
||||
TransItem::Fn(ref instance) => (instance.def_id(), instance.substs),
|
||||
TransItem::Static(node_id) => (tcx.hir.local_def_id(node_id), Substs::empty()),
|
||||
// global asm never has predicates
|
||||
@ -311,10 +241,10 @@ impl<'a, 'tcx> TransItem<'tcx> {
|
||||
traits::normalize_and_test_predicates(tcx, predicates)
|
||||
}
|
||||
|
||||
pub fn to_string(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> String {
|
||||
fn to_string(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> String {
|
||||
let hir_map = &tcx.hir;
|
||||
|
||||
return match *self {
|
||||
return match *self.as_trans_item() {
|
||||
TransItem::Fn(instance) => {
|
||||
to_string_internal(tcx, "fn ", instance)
|
||||
},
|
||||
@ -340,8 +270,8 @@ impl<'a, 'tcx> TransItem<'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn to_raw_string(&self) -> String {
|
||||
match *self {
|
||||
fn to_raw_string(&self) -> String {
|
||||
match *self.as_trans_item() {
|
||||
TransItem::Fn(instance) => {
|
||||
format!("Fn({:?}, {})",
|
||||
instance.def,
|
||||
@ -357,6 +287,77 @@ impl<'a, 'tcx> TransItem<'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> TransItemExt<'a, 'tcx> for TransItem<'tcx> {
|
||||
fn as_trans_item(&self) -> &TransItem<'tcx> {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
fn predefine_static<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
|
||||
node_id: ast::NodeId,
|
||||
linkage: Linkage,
|
||||
visibility: Visibility,
|
||||
symbol_name: &str) {
|
||||
let def_id = ccx.tcx().hir.local_def_id(node_id);
|
||||
let instance = Instance::mono(ccx.tcx(), def_id);
|
||||
let ty = common::instance_ty(ccx.tcx(), &instance);
|
||||
let llty = type_of::type_of(ccx, ty);
|
||||
|
||||
let g = declare::define_global(ccx, symbol_name, llty).unwrap_or_else(|| {
|
||||
ccx.sess().span_fatal(ccx.tcx().hir.span(node_id),
|
||||
&format!("symbol `{}` is already defined", symbol_name))
|
||||
});
|
||||
|
||||
unsafe {
|
||||
llvm::LLVMRustSetLinkage(g, base::linkage_to_llvm(linkage));
|
||||
llvm::LLVMRustSetVisibility(g, base::visibility_to_llvm(visibility));
|
||||
}
|
||||
|
||||
ccx.instances().borrow_mut().insert(instance, g);
|
||||
ccx.statics().borrow_mut().insert(g, def_id);
|
||||
}
|
||||
|
||||
fn predefine_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
|
||||
instance: Instance<'tcx>,
|
||||
linkage: Linkage,
|
||||
visibility: Visibility,
|
||||
symbol_name: &str) {
|
||||
assert!(!instance.substs.needs_infer() &&
|
||||
!instance.substs.has_param_types());
|
||||
|
||||
let mono_ty = common::instance_ty(ccx.tcx(), &instance);
|
||||
let attrs = instance.def.attrs(ccx.tcx());
|
||||
let lldecl = declare::declare_fn(ccx, symbol_name, mono_ty);
|
||||
unsafe { llvm::LLVMRustSetLinkage(lldecl, base::linkage_to_llvm(linkage)) };
|
||||
base::set_link_section(ccx, lldecl, &attrs);
|
||||
if linkage == Linkage::LinkOnceODR ||
|
||||
linkage == Linkage::WeakODR {
|
||||
llvm::SetUniqueComdat(ccx.llmod(), lldecl);
|
||||
}
|
||||
|
||||
// If we're compiling the compiler-builtins crate, e.g. the equivalent of
|
||||
// compiler-rt, then we want to implicitly compile everything with hidden
|
||||
// visibility as we're going to link this object all over the place but
|
||||
// don't want the symbols to get exported.
|
||||
if linkage != Linkage::Internal && linkage != Linkage::Private &&
|
||||
attr::contains_name(ccx.tcx().hir.krate_attrs(), "compiler_builtins") {
|
||||
unsafe {
|
||||
llvm::LLVMRustSetVisibility(lldecl, llvm::Visibility::Hidden);
|
||||
}
|
||||
} else {
|
||||
unsafe {
|
||||
llvm::LLVMRustSetVisibility(lldecl, base::visibility_to_llvm(visibility));
|
||||
}
|
||||
}
|
||||
|
||||
debug!("predefine_fn: mono_ty = {:?} instance = {:?}", mono_ty, instance);
|
||||
if common::is_inline_instance(ccx.tcx(), &instance) {
|
||||
attributes::inline(lldecl, attributes::InlineAttr::Hint);
|
||||
}
|
||||
attributes::from_fn_attrs(ccx, &attrs, lldecl);
|
||||
|
||||
ccx.instances().borrow_mut().insert(instance, lldecl);
|
||||
}
|
||||
|
||||
//=-----------------------------------------------------------------------------
|
||||
// TransItem String Keys
|
||||
|
@ -176,6 +176,11 @@ pub fn run_core(search_paths: SearchPaths,
|
||||
let arena = DroplessArena::new();
|
||||
let arenas = GlobalArenas::new();
|
||||
let hir_map = hir_map::map_crate(&mut hir_forest, defs);
|
||||
let output_filenames = driver::build_output_filenames(&input,
|
||||
&None,
|
||||
&None,
|
||||
&[],
|
||||
&sess);
|
||||
|
||||
abort_on_err(driver::phase_3_run_analysis_passes(&sess,
|
||||
&*cstore,
|
||||
@ -185,7 +190,8 @@ pub fn run_core(search_paths: SearchPaths,
|
||||
&arena,
|
||||
&arenas,
|
||||
&name,
|
||||
|tcx, analysis, _, result| {
|
||||
&output_filenames,
|
||||
|tcx, analysis, _, _, result| {
|
||||
if let Err(_) = result {
|
||||
sess.fatal("Compilation failed, aborting rustdoc");
|
||||
}
|
||||
|
@ -506,30 +506,6 @@ pub fn find_crate_name(attrs: &[Attribute]) -> Option<Symbol> {
|
||||
first_attr_value_str_by_name(attrs, "crate_name")
|
||||
}
|
||||
|
||||
/// Find the value of #[export_name=*] attribute and check its validity.
|
||||
pub fn find_export_name_attr(diag: &Handler, attrs: &[Attribute]) -> Option<Symbol> {
|
||||
attrs.iter().fold(None, |ia,attr| {
|
||||
if attr.check_name("export_name") {
|
||||
if let s@Some(_) = attr.value_str() {
|
||||
s
|
||||
} else {
|
||||
struct_span_err!(diag, attr.span, E0558,
|
||||
"export_name attribute has invalid format")
|
||||
.span_label(attr.span, "did you mean #[export_name=\"*\"]?")
|
||||
.emit();
|
||||
None
|
||||
}
|
||||
} else {
|
||||
ia
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn contains_extern_indicator(diag: &Handler, attrs: &[Attribute]) -> bool {
|
||||
contains_name(attrs, "no_mangle") ||
|
||||
find_export_name_attr(diag, attrs).is_some()
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq)]
|
||||
pub enum InlineAttr {
|
||||
None,
|
||||
|
@ -219,29 +219,6 @@ Erroneous code example:
|
||||
Delete the offending feature attribute.
|
||||
"##,
|
||||
|
||||
E0558: r##"
|
||||
The `export_name` attribute was malformed.
|
||||
|
||||
Erroneous code example:
|
||||
|
||||
```compile_fail,E0558
|
||||
#[export_name] // error: export_name attribute has invalid format
|
||||
pub fn something() {}
|
||||
|
||||
fn main() {}
|
||||
```
|
||||
|
||||
The `export_name` attribute expects a string in order to determine the name of
|
||||
the exported symbol. Example:
|
||||
|
||||
```
|
||||
#[export_name = "some_function"] // ok!
|
||||
pub fn something() {}
|
||||
|
||||
fn main() {}
|
||||
```
|
||||
"##,
|
||||
|
||||
E0565: r##"
|
||||
A literal was used in an attribute that doesn't support literals.
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user