mirror of
https://github.com/rust-lang/rust.git
synced 2025-01-26 22:53:28 +00:00
rustc: Attach an mpsc channel to TyCtxt
This commit attaches a channel to the LLVM workers to the `TyCtxt` which will later be used during the codegen query to actually send work to LLVM workers. Otherwise this commit is just plumbing this channel throughout the compiler to ensure it reaches the right consumers.
This commit is contained in:
parent
2eada58706
commit
3021c1d0bf
@ -64,6 +64,7 @@ use std::mem;
|
||||
use std::ops::Deref;
|
||||
use std::iter;
|
||||
use std::rc::Rc;
|
||||
use std::sync::mpsc;
|
||||
use syntax::abi;
|
||||
use syntax::ast::{self, Name, NodeId};
|
||||
use syntax::attr;
|
||||
@ -901,6 +902,14 @@ pub struct GlobalCtxt<'tcx> {
|
||||
/// error reporting, and so is lazily initialized and generally
|
||||
/// shouldn't taint the common path (hence the RefCell).
|
||||
pub all_traits: RefCell<Option<Vec<DefId>>>,
|
||||
|
||||
/// A general purpose channel to throw data out the back towards LLVM worker
|
||||
/// threads.
|
||||
///
|
||||
/// This is intended to only get used during the trans phase of the compiler
|
||||
/// when satisfying the query for a particular codegen unit. Internally in
|
||||
/// the query it'll send data along this channel to get processed later.
|
||||
pub tx_to_llvm_workers: mpsc::Sender<Box<Any + Send>>,
|
||||
}
|
||||
|
||||
impl<'tcx> GlobalCtxt<'tcx> {
|
||||
@ -1025,6 +1034,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
||||
named_region_map: resolve_lifetime::NamedRegionMap,
|
||||
hir: hir_map::Map<'tcx>,
|
||||
crate_name: &str,
|
||||
tx: mpsc::Sender<Box<Any + Send>>,
|
||||
f: F) -> R
|
||||
where F: for<'b> FnOnce(TyCtxt<'b, 'tcx, 'tcx>) -> R
|
||||
{
|
||||
@ -1145,6 +1155,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
||||
derive_macros: RefCell::new(NodeMap()),
|
||||
stability_interner: RefCell::new(FxHashSet()),
|
||||
all_traits: RefCell::new(None),
|
||||
tx_to_llvm_workers: tx,
|
||||
}, f)
|
||||
}
|
||||
|
||||
|
@ -46,6 +46,7 @@ use super::Compilation;
|
||||
|
||||
use serialize::json;
|
||||
|
||||
use std::any::Any;
|
||||
use std::env;
|
||||
use std::ffi::{OsString, OsStr};
|
||||
use std::fs;
|
||||
@ -53,6 +54,7 @@ use std::io::{self, Write};
|
||||
use std::iter;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::rc::Rc;
|
||||
use std::sync::mpsc;
|
||||
use syntax::{ast, diagnostics, visit};
|
||||
use syntax::attr;
|
||||
use syntax::ext::base::ExtCtxt;
|
||||
@ -214,7 +216,7 @@ pub fn compile_input(sess: &Session,
|
||||
&arena,
|
||||
&arenas,
|
||||
&crate_name,
|
||||
|tcx, analysis, incremental_hashes_map, result| {
|
||||
|tcx, analysis, incremental_hashes_map, rx, result| {
|
||||
{
|
||||
// Eventually, we will want to track plugins.
|
||||
let _ignore = tcx.dep_graph.in_ignore();
|
||||
@ -242,7 +244,9 @@ pub fn compile_input(sess: &Session,
|
||||
tcx.print_debug_stats();
|
||||
}
|
||||
|
||||
let trans = phase_4_translate_to_llvm(tcx, incremental_hashes_map,
|
||||
let trans = phase_4_translate_to_llvm(tcx,
|
||||
incremental_hashes_map,
|
||||
rx,
|
||||
&outputs);
|
||||
|
||||
if log_enabled!(::log::LogLevel::Info) {
|
||||
@ -914,6 +918,7 @@ pub fn phase_3_run_analysis_passes<'tcx, F, R>(sess: &'tcx Session,
|
||||
where F: for<'a> FnOnce(TyCtxt<'a, 'tcx, 'tcx>,
|
||||
ty::CrateAnalysis,
|
||||
IncrementalHashesMap,
|
||||
mpsc::Receiver<Box<Any + Send>>,
|
||||
CompileResult) -> R
|
||||
{
|
||||
macro_rules! try_with_f {
|
||||
@ -1028,6 +1033,8 @@ pub fn phase_3_run_analysis_passes<'tcx, F, R>(sess: &'tcx Session,
|
||||
passes.push_pass(MIR_OPTIMIZED, mir::transform::add_call_guards::CriticalCallEdges);
|
||||
passes.push_pass(MIR_OPTIMIZED, mir::transform::dump_mir::Marker("PreTrans"));
|
||||
|
||||
let (tx, rx) = mpsc::channel();
|
||||
|
||||
TyCtxt::create_and_enter(sess,
|
||||
cstore,
|
||||
local_providers,
|
||||
@ -1039,6 +1046,7 @@ pub fn phase_3_run_analysis_passes<'tcx, F, R>(sess: &'tcx Session,
|
||||
named_region_map,
|
||||
hir_map,
|
||||
name,
|
||||
tx,
|
||||
|tcx| {
|
||||
let incremental_hashes_map =
|
||||
time(time_passes,
|
||||
@ -1109,7 +1117,7 @@ pub fn phase_3_run_analysis_passes<'tcx, F, R>(sess: &'tcx Session,
|
||||
|
||||
time(time_passes, "lint checking", || lint::check_crate(tcx));
|
||||
|
||||
return Ok(f(tcx, analysis, incremental_hashes_map, tcx.sess.compile_status()));
|
||||
return Ok(f(tcx, analysis, incremental_hashes_map, rx, tcx.sess.compile_status()));
|
||||
})
|
||||
}
|
||||
|
||||
@ -1117,6 +1125,7 @@ pub fn phase_3_run_analysis_passes<'tcx, F, R>(sess: &'tcx Session,
|
||||
/// be discarded.
|
||||
pub fn phase_4_translate_to_llvm<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
incremental_hashes_map: IncrementalHashesMap,
|
||||
rx: mpsc::Receiver<Box<Any + Send>>,
|
||||
output_filenames: &OutputFilenames)
|
||||
-> write::OngoingCrateTranslation {
|
||||
let time_passes = tcx.sess.time_passes();
|
||||
@ -1126,9 +1135,9 @@ pub fn phase_4_translate_to_llvm<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
|| ::rustc::middle::dependency_format::calculate(tcx));
|
||||
|
||||
let translation =
|
||||
time(time_passes,
|
||||
"translation",
|
||||
move || trans::trans_crate(tcx, incremental_hashes_map, output_filenames));
|
||||
time(time_passes, "translation", move || {
|
||||
trans::trans_crate(tcx, incremental_hashes_map, rx, output_filenames)
|
||||
});
|
||||
|
||||
if tcx.sess.profile_queries() {
|
||||
profile::dump("profile_queries".to_string())
|
||||
|
@ -39,6 +39,7 @@ use context::{is_pie_binary, get_reloc_model};
|
||||
use jobserver::{Client, Acquired};
|
||||
use rustc_demangle;
|
||||
|
||||
use std::any::Any;
|
||||
use std::ffi::CString;
|
||||
use std::fmt;
|
||||
use std::fs;
|
||||
@ -348,7 +349,7 @@ pub struct CodegenContext {
|
||||
// compiling incrementally
|
||||
pub incr_comp_session_dir: Option<PathBuf>,
|
||||
// Channel back to the main control thread to send messages to
|
||||
coordinator_send: Sender<Message>,
|
||||
coordinator_send: Sender<Box<Any + Send>>,
|
||||
// A reference to the TimeGraph so we can register timings. None means that
|
||||
// measuring is disabled.
|
||||
time_graph: Option<TimeGraph>,
|
||||
@ -674,7 +675,8 @@ pub fn start_async_translation(tcx: TyCtxt,
|
||||
crate_output: &OutputFilenames,
|
||||
time_graph: Option<TimeGraph>,
|
||||
link: LinkMeta,
|
||||
metadata: EncodedMetadata)
|
||||
metadata: EncodedMetadata,
|
||||
coordinator_receive: Receiver<Box<Any + Send>>)
|
||||
-> OngoingCrateTranslation {
|
||||
let sess = tcx.sess;
|
||||
let crate_name = tcx.crate_name(LOCAL_CRATE);
|
||||
@ -798,13 +800,12 @@ pub fn start_async_translation(tcx: TyCtxt,
|
||||
|
||||
let (shared_emitter, shared_emitter_main) = SharedEmitter::new();
|
||||
let (trans_worker_send, trans_worker_receive) = channel();
|
||||
let (coordinator_send, coordinator_receive) = channel();
|
||||
|
||||
let coordinator_thread = start_executing_work(sess,
|
||||
&crate_info,
|
||||
shared_emitter,
|
||||
trans_worker_send,
|
||||
coordinator_send.clone(),
|
||||
tcx.tx_to_llvm_workers.clone(),
|
||||
coordinator_receive,
|
||||
client,
|
||||
time_graph.clone(),
|
||||
@ -824,7 +825,7 @@ pub fn start_async_translation(tcx: TyCtxt,
|
||||
|
||||
time_graph,
|
||||
output_filenames: crate_output.clone(),
|
||||
coordinator_send,
|
||||
coordinator_send: tcx.tx_to_llvm_workers.clone(),
|
||||
trans_worker_receive,
|
||||
shared_emitter_main,
|
||||
future: coordinator_thread
|
||||
@ -1138,8 +1139,8 @@ fn start_executing_work(sess: &Session,
|
||||
crate_info: &CrateInfo,
|
||||
shared_emitter: SharedEmitter,
|
||||
trans_worker_send: Sender<Message>,
|
||||
coordinator_send: Sender<Message>,
|
||||
coordinator_receive: Receiver<Message>,
|
||||
coordinator_send: Sender<Box<Any + Send>>,
|
||||
coordinator_receive: Receiver<Box<Any + Send>>,
|
||||
jobserver: Client,
|
||||
time_graph: Option<TimeGraph>,
|
||||
exported_symbols: Arc<ExportedSymbols>)
|
||||
@ -1156,7 +1157,7 @@ fn start_executing_work(sess: &Session,
|
||||
// tokens on `rx` above which will get managed in the main loop below.
|
||||
let coordinator_send2 = coordinator_send.clone();
|
||||
let helper = jobserver.into_helper_thread(move |token| {
|
||||
drop(coordinator_send2.send(Message::Token(token)));
|
||||
drop(coordinator_send2.send(Box::new(Message::Token(token))));
|
||||
}).expect("failed to spawn helper thread");
|
||||
|
||||
let mut each_linked_rlib_for_lto = Vec::new();
|
||||
@ -1430,7 +1431,8 @@ fn start_executing_work(sess: &Session,
|
||||
// Relinquish accidentally acquired extra tokens
|
||||
tokens.truncate(running);
|
||||
|
||||
match coordinator_receive.recv().unwrap() {
|
||||
let msg = coordinator_receive.recv().unwrap();
|
||||
match *msg.downcast::<Message>().ok().unwrap() {
|
||||
// Save the token locally and the next turn of the loop will use
|
||||
// this to spawn a new unit of work, or it may get dropped
|
||||
// immediately if we have no more work to spawn.
|
||||
@ -1588,7 +1590,7 @@ fn spawn_work(cgcx: CodegenContext, work: WorkItem) {
|
||||
// Set up a destructor which will fire off a message that we're done as
|
||||
// we exit.
|
||||
struct Bomb {
|
||||
coordinator_send: Sender<Message>,
|
||||
coordinator_send: Sender<Box<Any + Send>>,
|
||||
result: Option<CompiledModule>,
|
||||
worker_id: usize,
|
||||
}
|
||||
@ -1599,10 +1601,10 @@ fn spawn_work(cgcx: CodegenContext, work: WorkItem) {
|
||||
None => Err(())
|
||||
};
|
||||
|
||||
drop(self.coordinator_send.send(Message::Done {
|
||||
drop(self.coordinator_send.send(Box::new(Message::Done {
|
||||
result,
|
||||
worker_id: self.worker_id,
|
||||
}));
|
||||
})));
|
||||
}
|
||||
}
|
||||
|
||||
@ -1845,7 +1847,7 @@ pub struct OngoingCrateTranslation {
|
||||
allocator_module_config: ModuleConfig,
|
||||
|
||||
time_graph: Option<TimeGraph>,
|
||||
coordinator_send: Sender<Message>,
|
||||
coordinator_send: Sender<Box<Any + Send>>,
|
||||
trans_worker_receive: Receiver<Message>,
|
||||
shared_emitter_main: SharedEmitterMain,
|
||||
future: thread::JoinHandle<CompiledModules>,
|
||||
@ -1931,11 +1933,11 @@ impl OngoingCrateTranslation {
|
||||
module_config,
|
||||
self.output_filenames.clone());
|
||||
|
||||
drop(self.coordinator_send.send(Message::TranslationDone {
|
||||
drop(self.coordinator_send.send(Box::new(Message::TranslationDone {
|
||||
llvm_work_item,
|
||||
cost,
|
||||
is_last
|
||||
}));
|
||||
})));
|
||||
}
|
||||
|
||||
pub fn submit_pre_translated_module_to_llvm(&self,
|
||||
|
@ -79,11 +79,13 @@ use rustc::util::nodemap::{NodeSet, FxHashMap, FxHashSet, DefIdSet};
|
||||
use CrateInfo;
|
||||
|
||||
use libc::c_uint;
|
||||
use std::any::Any;
|
||||
use std::ffi::{CStr, CString};
|
||||
use std::str;
|
||||
use std::sync::Arc;
|
||||
use std::time::{Instant, Duration};
|
||||
use std::i32;
|
||||
use std::sync::mpsc;
|
||||
use syntax_pos::Span;
|
||||
use syntax::attr;
|
||||
use rustc::hir;
|
||||
@ -933,6 +935,7 @@ pub fn find_exported_symbols(tcx: TyCtxt) -> NodeSet {
|
||||
|
||||
pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
incremental_hashes_map: IncrementalHashesMap,
|
||||
rx: mpsc::Receiver<Box<Any + Send>>,
|
||||
output_filenames: &OutputFilenames)
|
||||
-> OngoingCrateTranslation {
|
||||
check_for_rustc_errors_attr(tcx);
|
||||
@ -974,7 +977,8 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
output_filenames,
|
||||
time_graph.clone(),
|
||||
link_meta,
|
||||
metadata);
|
||||
metadata,
|
||||
rx);
|
||||
|
||||
ongoing_translation.submit_pre_translated_module_to_llvm(tcx.sess, metadata_module, true);
|
||||
|
||||
@ -1001,7 +1005,8 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
output_filenames,
|
||||
time_graph.clone(),
|
||||
link_meta,
|
||||
metadata);
|
||||
metadata,
|
||||
rx);
|
||||
|
||||
// Translate an allocator shim, if any
|
||||
//
|
||||
|
Loading…
Reference in New Issue
Block a user