Rollup merge of #133567 - bjorn3:various_cleanups, r=cjgillot

A bunch of cleanups

These are all extracted from a branch I have to get rid of driver queries. Most of the commits are not directly necessary for this, but were found in the process of implementing the removal of driver queries.

Previous PR: https://github.com/rust-lang/rust/pull/132410
This commit is contained in:
Matthias Krüger 2024-12-09 01:56:32 +01:00 committed by GitHub
commit d2881e4eb5
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
36 changed files with 309 additions and 335 deletions

View File

@ -227,8 +227,6 @@ impl CodegenBackend for CraneliftCodegenBackend {
sess: &Session,
outputs: &OutputFilenames,
) -> (CodegenResults, FxIndexMap<WorkProductId, WorkProduct>) {
let _timer = sess.timer("finish_ongoing_codegen");
ongoing_codegen.downcast::<driver::aot::OngoingCodegen>().unwrap().join(sess, outputs)
}
}

View File

@ -36,7 +36,7 @@ use rustc_codegen_ssa::back::write::{
use rustc_codegen_ssa::traits::*;
use rustc_codegen_ssa::{CodegenResults, CompiledModule, ModuleCodegen};
use rustc_data_structures::fx::FxIndexMap;
use rustc_errors::{DiagCtxtHandle, ErrorGuaranteed, FatalError};
use rustc_errors::{DiagCtxtHandle, FatalError};
use rustc_metadata::EncodedMetadata;
use rustc_middle::dep_graph::{WorkProduct, WorkProductId};
use rustc_middle::ty::TyCtxt;
@ -370,19 +370,14 @@ impl CodegenBackend for LlvmCodegenBackend {
(codegen_results, work_products)
}
fn link(
&self,
sess: &Session,
codegen_results: CodegenResults,
outputs: &OutputFilenames,
) -> Result<(), ErrorGuaranteed> {
fn link(&self, sess: &Session, codegen_results: CodegenResults, outputs: &OutputFilenames) {
use rustc_codegen_ssa::back::link::link_binary;
use crate::back::archive::LlvmArchiveBuilderBuilder;
// Run the linker on any artifacts that resulted from the LLVM run.
// This should produce either a finished executable or library.
link_binary(sess, &LlvmArchiveBuilderBuilder, codegen_results, outputs)
link_binary(sess, &LlvmArchiveBuilderBuilder, codegen_results, outputs);
}
}

View File

@ -15,7 +15,7 @@ use rustc_ast::CRATE_NODE_ID;
use rustc_data_structures::fx::{FxIndexMap, FxIndexSet};
use rustc_data_structures::memmap::Mmap;
use rustc_data_structures::temp_dir::MaybeTempDir;
use rustc_errors::{DiagCtxtHandle, ErrorGuaranteed, FatalError};
use rustc_errors::{DiagCtxtHandle, FatalError};
use rustc_fs_util::{fix_windows_verbatim_for_gcc, try_canonicalize};
use rustc_hir::def_id::{CrateNum, LOCAL_CRATE};
use rustc_metadata::fs::{METADATA_FILENAME, copy_to_stdout, emit_wrapper_file};
@ -71,7 +71,7 @@ pub fn link_binary(
archive_builder_builder: &dyn ArchiveBuilderBuilder,
codegen_results: CodegenResults,
outputs: &OutputFilenames,
) -> Result<(), ErrorGuaranteed> {
) {
let _timer = sess.timer("link_binary");
let output_metadata = sess.opts.output_types.contains_key(&OutputType::Metadata);
let mut tempfiles_for_stdout_output: Vec<PathBuf> = Vec::new();
@ -119,7 +119,7 @@ pub fn link_binary(
&codegen_results,
RlibFlavor::Normal,
&path,
)?
)
.build(&out_filename);
}
CrateType::Staticlib => {
@ -129,7 +129,7 @@ pub fn link_binary(
&codegen_results,
&out_filename,
&path,
)?;
);
}
_ => {
link_natively(
@ -139,7 +139,7 @@ pub fn link_binary(
&out_filename,
&codegen_results,
path.as_ref(),
)?;
);
}
}
if sess.opts.json_artifact_notifications {
@ -225,8 +225,6 @@ pub fn link_binary(
maybe_remove_temps_from_module(preserve_objects, preserve_dwarf_objects, module);
}
});
Ok(())
}
// Crate type is not passed when calculating the dylibs to include for LTO. In that case all
@ -298,7 +296,7 @@ fn link_rlib<'a>(
codegen_results: &CodegenResults,
flavor: RlibFlavor,
tmpdir: &MaybeTempDir,
) -> Result<Box<dyn ArchiveBuilder + 'a>, ErrorGuaranteed> {
) -> Box<dyn ArchiveBuilder + 'a> {
let mut ab = archive_builder_builder.new_archive_builder(sess);
let trailing_metadata = match flavor {
@ -374,7 +372,7 @@ fn link_rlib<'a>(
{
let path = find_native_static_library(filename.as_str(), true, sess);
let src = read(path)
.map_err(|e| sess.dcx().emit_fatal(errors::ReadFileError { message: e }))?;
.unwrap_or_else(|e| sess.dcx().emit_fatal(errors::ReadFileError { message: e }));
let (data, _) = create_wrapper_file(sess, ".bundled_lib".to_string(), &src);
let wrapper_file = emit_wrapper_file(sess, &data, tmpdir, filename.as_str());
packed_bundled_libs.push(wrapper_file);
@ -392,7 +390,7 @@ fn link_rlib<'a>(
codegen_results.crate_info.used_libraries.iter(),
tmpdir.as_ref(),
true,
)? {
) {
ab.add_archive(&output_path, Box::new(|_| false)).unwrap_or_else(|error| {
sess.dcx().emit_fatal(errors::AddNativeLibrary { library_path: output_path, error });
});
@ -433,7 +431,7 @@ fn link_rlib<'a>(
ab.add_file(&lib)
}
Ok(ab)
ab
}
/// Extract all symbols defined in raw-dylib libraries, collated by library name.
@ -445,7 +443,7 @@ fn link_rlib<'a>(
fn collate_raw_dylibs<'a>(
sess: &Session,
used_libraries: impl IntoIterator<Item = &'a NativeLib>,
) -> Result<Vec<(String, Vec<DllImport>)>, ErrorGuaranteed> {
) -> Vec<(String, Vec<DllImport>)> {
// Use index maps to preserve original order of imports and libraries.
let mut dylib_table = FxIndexMap::<String, FxIndexMap<Symbol, &DllImport>>::default();
@ -469,15 +467,13 @@ fn collate_raw_dylibs<'a>(
}
}
}
if let Some(guar) = sess.dcx().has_errors() {
return Err(guar);
}
Ok(dylib_table
sess.dcx().abort_if_errors();
dylib_table
.into_iter()
.map(|(name, imports)| {
(name, imports.into_iter().map(|(_, import)| import.clone()).collect())
})
.collect())
.collect()
}
fn create_dll_import_libs<'a>(
@ -486,8 +482,8 @@ fn create_dll_import_libs<'a>(
used_libraries: impl IntoIterator<Item = &'a NativeLib>,
tmpdir: &Path,
is_direct_dependency: bool,
) -> Result<Vec<PathBuf>, ErrorGuaranteed> {
Ok(collate_raw_dylibs(sess, used_libraries)?
) -> Vec<PathBuf> {
collate_raw_dylibs(sess, used_libraries)
.into_iter()
.map(|(raw_dylib_name, raw_dylib_imports)| {
let name_suffix = if is_direct_dependency { "_imports" } else { "_imports_indirect" };
@ -537,7 +533,7 @@ fn create_dll_import_libs<'a>(
output_path
})
.collect())
.collect()
}
/// Create a static archive.
@ -557,7 +553,7 @@ fn link_staticlib(
codegen_results: &CodegenResults,
out_filename: &Path,
tempdir: &MaybeTempDir,
) -> Result<(), ErrorGuaranteed> {
) {
info!("preparing staticlib to {:?}", out_filename);
let mut ab = link_rlib(
sess,
@ -565,7 +561,7 @@ fn link_staticlib(
codegen_results,
RlibFlavor::StaticlibBase,
tempdir,
)?;
);
let mut all_native_libs = vec![];
let res = each_linked_rlib(
@ -656,8 +652,6 @@ fn link_staticlib(
print_native_static_libs(sess, &print.out, &all_native_libs, &all_rust_dylibs);
}
}
Ok(())
}
/// Use `thorin` (rust implementation of a dwarf packaging utility) to link DWARF objects into a
@ -773,7 +767,7 @@ fn link_natively(
out_filename: &Path,
codegen_results: &CodegenResults,
tmpdir: &Path,
) -> Result<(), ErrorGuaranteed> {
) {
info!("preparing {:?} to {:?}", crate_type, out_filename);
let (linker_path, flavor) = linker_and_flavor(sess);
let self_contained_components = self_contained_components(sess, crate_type);
@ -797,7 +791,7 @@ fn link_natively(
temp_filename,
codegen_results,
self_contained_components,
)?;
);
linker::disable_localization(&mut cmd);
@ -1177,8 +1171,6 @@ fn link_natively(
ab.add_file(temp_filename);
ab.build(out_filename);
}
Ok(())
}
fn strip_symbols_with_external_utility(
@ -2232,7 +2224,7 @@ fn linker_with_args(
out_filename: &Path,
codegen_results: &CodegenResults,
self_contained_components: LinkSelfContainedComponents,
) -> Result<Command, ErrorGuaranteed> {
) -> Command {
let self_contained_crt_objects = self_contained_components.is_crt_objects_enabled();
let cmd = &mut *super::linker::get_linker(
sess,
@ -2356,7 +2348,7 @@ fn linker_with_args(
codegen_results.crate_info.used_libraries.iter(),
tmpdir,
true,
)? {
) {
cmd.add_object(&output_path);
}
// As with add_upstream_native_libraries, we need to add the upstream raw-dylib symbols in case
@ -2388,7 +2380,7 @@ fn linker_with_args(
native_libraries_from_nonstatics,
tmpdir,
false,
)? {
) {
cmd.add_object(&output_path);
}
@ -2435,7 +2427,7 @@ fn linker_with_args(
// to it and remove the option. Currently the last holdout is wasm32-unknown-emscripten.
add_post_link_args(cmd, sess, flavor);
Ok(cmd.take_cmd())
cmd.take_cmd()
}
fn add_order_independent_options(

View File

@ -1883,7 +1883,11 @@ impl Translate for SharedEmitter {
}
impl Emitter for SharedEmitter {
fn emit_diagnostic(&mut self, mut diag: rustc_errors::DiagInner) {
fn emit_diagnostic(
&mut self,
mut diag: rustc_errors::DiagInner,
_registry: &rustc_errors::registry::Registry,
) {
// Check that we aren't missing anything interesting when converting to
// the cut-down local `DiagInner`.
assert_eq!(diag.span, MultiSpan::new());
@ -2028,8 +2032,6 @@ pub struct OngoingCodegen<B: ExtraBackendMethods> {
impl<B: ExtraBackendMethods> OngoingCodegen<B> {
pub fn join(self, sess: &Session) -> (CodegenResults, FxIndexMap<WorkProductId, WorkProduct>) {
let _timer = sess.timer("finish_ongoing_codegen");
self.shared_emitter_main.check(sess, true);
let compiled_modules = sess.time("join_worker_thread", || match self.coordinator.join() {
Ok(Ok(compiled_modules)) => compiled_modules,

View File

@ -4,7 +4,6 @@ use std::hash::Hash;
use rustc_ast::expand::allocator::AllocatorKind;
use rustc_data_structures::fx::FxIndexMap;
use rustc_data_structures::sync::{DynSend, DynSync};
use rustc_errors::ErrorGuaranteed;
use rustc_metadata::EncodedMetadata;
use rustc_metadata::creader::MetadataLoaderDyn;
use rustc_middle::dep_graph::{WorkProduct, WorkProductId};
@ -84,13 +83,8 @@ pub trait CodegenBackend {
) -> (CodegenResults, FxIndexMap<WorkProductId, WorkProduct>);
/// This is called on the returned [`CodegenResults`] from [`join_codegen`](Self::join_codegen).
fn link(
&self,
sess: &Session,
codegen_results: CodegenResults,
outputs: &OutputFilenames,
) -> Result<(), ErrorGuaranteed> {
link_binary(sess, &ArArchiveBuilderBuilder, codegen_results, outputs)
fn link(&self, sess: &Session, codegen_results: CodegenResults, outputs: &OutputFilenames) {
link_binary(sess, &ArArchiveBuilderBuilder, codegen_results, outputs);
}
/// Returns `true` if this backend can be safely called from multiple threads.

View File

@ -99,10 +99,7 @@ impl Expander {
/// If this function is intended to be used with command line arguments,
/// `argv[0]` must be removed prior to calling it manually.
#[allow(rustc::untranslatable_diagnostic)] // FIXME: make this translatable
pub fn arg_expand_all(
early_dcx: &EarlyDiagCtxt,
at_args: &[String],
) -> Result<Vec<String>, ErrorGuaranteed> {
pub fn arg_expand_all(early_dcx: &EarlyDiagCtxt, at_args: &[String]) -> Vec<String> {
let mut expander = Expander::default();
let mut result = Ok(());
for arg in at_args {
@ -110,7 +107,10 @@ pub fn arg_expand_all(
result = Err(early_dcx.early_err(format!("failed to load argument file: {err}")));
}
}
result.map(|()| expander.finish())
if let Err(guar) = result {
guar.raise_fatal();
}
expander.finish()
}
/// Gets the raw unprocessed command-line arguments as Unicode strings, without doing any further

View File

@ -42,9 +42,7 @@ use rustc_data_structures::profiling::{
};
use rustc_errors::emitter::stderr_destination;
use rustc_errors::registry::Registry;
use rustc_errors::{
ColorConfig, DiagCtxt, ErrCode, ErrorGuaranteed, FatalError, PResult, markdown,
};
use rustc_errors::{ColorConfig, DiagCtxt, ErrCode, FatalError, PResult, markdown};
use rustc_feature::find_gated_cfg;
use rustc_interface::util::{self, get_codegen_backend};
use rustc_interface::{Linker, Queries, interface, passes};
@ -271,14 +269,14 @@ impl<'a> RunCompiler<'a> {
}
/// Parse args and run the compiler.
pub fn run(self) -> interface::Result<()> {
pub fn run(self) {
run_compiler(
self.at_args,
self.callbacks,
self.file_loader,
self.make_codegen_backend,
self.using_internal_features,
)
);
}
}
@ -290,7 +288,7 @@ fn run_compiler(
Box<dyn FnOnce(&config::Options) -> Box<dyn CodegenBackend> + Send>,
>,
using_internal_features: Arc<std::sync::atomic::AtomicBool>,
) -> interface::Result<()> {
) {
let mut default_early_dcx = EarlyDiagCtxt::new(ErrorOutputType::default());
// Throw away the first argument, the name of the binary.
@ -303,9 +301,11 @@ fn run_compiler(
// the compiler with @empty_file as argv[0] and no more arguments.
let at_args = at_args.get(1..).unwrap_or_default();
let args = args::arg_expand_all(&default_early_dcx, at_args)?;
let args = args::arg_expand_all(&default_early_dcx, at_args);
let Some(matches) = handle_options(&default_early_dcx, &args) else { return Ok(()) };
let Some(matches) = handle_options(&default_early_dcx, &args) else {
return;
};
let sopts = config::build_session_options(&mut default_early_dcx, &matches);
// fully initialize ice path static once unstable options are available as context
@ -313,7 +313,7 @@ fn run_compiler(
if let Some(ref code) = matches.opt_str("explain") {
handle_explain(&default_early_dcx, diagnostics_registry(), code, sopts.color);
return Ok(());
return;
}
let (odir, ofile) = make_output(&matches);
@ -338,7 +338,7 @@ fn run_compiler(
expanded_args: args,
};
let has_input = match make_input(&default_early_dcx, &matches.free)? {
let has_input = match make_input(&default_early_dcx, &matches.free) {
Some(input) => {
config.input = input;
true // has input: normal compilation
@ -358,7 +358,7 @@ fn run_compiler(
// printing some information without compiling, or exiting immediately
// after parsing, etc.
let early_exit = || {
if let Some(guar) = sess.dcx().has_errors() { Err(guar) } else { Ok(()) }
sess.dcx().abort_if_errors();
};
// This implements `-Whelp`. It should be handled very early, like
@ -389,22 +389,25 @@ fn run_compiler(
}
let linker = compiler.enter(|queries| {
let early_exit = || early_exit().map(|_| None);
let early_exit = || {
sess.dcx().abort_if_errors();
None
};
// Parse the crate root source code (doesn't parse submodules yet)
// Everything else is parsed during macro expansion.
queries.parse()?;
queries.parse();
// If pretty printing is requested: Figure out the representation, print it and exit
if let Some(pp_mode) = sess.opts.pretty {
if pp_mode.needs_ast_map() {
queries.global_ctxt()?.enter(|tcx| {
queries.global_ctxt().enter(|tcx| {
tcx.ensure().early_lint_checks(());
pretty::print(sess, pp_mode, pretty::PrintExtra::NeedsAstMap { tcx });
passes::write_dep_info(tcx);
});
} else {
let krate = queries.parse()?;
let krate = queries.parse();
pretty::print(sess, pp_mode, pretty::PrintExtra::AfterParsing {
krate: &*krate.borrow(),
});
@ -423,17 +426,17 @@ fn run_compiler(
}
// Make sure name resolution and macro expansion is run.
queries.global_ctxt()?.enter(|tcx| tcx.resolver_for_lowering());
queries.global_ctxt().enter(|tcx| tcx.resolver_for_lowering());
if let Some(metrics_dir) = &sess.opts.unstable_opts.metrics_dir {
queries.global_ctxt()?.enter(|tcxt| dump_feature_usage_metrics(tcxt, metrics_dir));
queries.global_ctxt().enter(|tcxt| dump_feature_usage_metrics(tcxt, metrics_dir));
}
if callbacks.after_expansion(compiler, queries) == Compilation::Stop {
return early_exit();
}
queries.global_ctxt()?.enter(|tcx| {
queries.global_ctxt().enter(|tcx| {
passes::write_dep_info(tcx);
if sess.opts.output_types.contains_key(&OutputType::DepInfo)
@ -446,24 +449,21 @@ fn run_compiler(
return early_exit();
}
tcx.analysis(())?;
tcx.ensure().analysis(());
if callbacks.after_analysis(compiler, tcx) == Compilation::Stop {
return early_exit();
}
Ok(Some(Linker::codegen_and_build_linker(tcx, &*compiler.codegen_backend)?))
Some(Linker::codegen_and_build_linker(tcx, &*compiler.codegen_backend))
})
})?;
});
// Linking is done outside the `compiler.enter()` so that the
// `GlobalCtxt` within `Queries` can be freed as early as possible.
if let Some(linker) = linker {
let _timer = sess.timer("link");
linker.link(sess, codegen_backend)?
linker.link(sess, codegen_backend);
}
Ok(())
})
}
@ -496,21 +496,17 @@ fn make_output(matches: &getopts::Matches) -> (Option<PathBuf>, Option<OutFileNa
/// Extract input (string or file and optional path) from matches.
/// This handles reading from stdin if `-` is provided.
fn make_input(
early_dcx: &EarlyDiagCtxt,
free_matches: &[String],
) -> Result<Option<Input>, ErrorGuaranteed> {
fn make_input(early_dcx: &EarlyDiagCtxt, free_matches: &[String]) -> Option<Input> {
match free_matches {
[] => Ok(None), // no input: we will exit early,
[] => None, // no input: we will exit early,
[ifile] if ifile == "-" => {
// read from stdin as `Input::Str`
let mut input = String::new();
if io::stdin().read_to_string(&mut input).is_err() {
// Immediately stop compilation if there was an issue reading
// the input (for example if the input stream is not UTF-8).
let reported = early_dcx
.early_err("couldn't read from stdin, as it did not contain valid UTF-8");
return Err(reported);
early_dcx
.early_fatal("couldn't read from stdin, as it did not contain valid UTF-8");
}
let name = match env::var("UNSTABLE_RUSTDOC_TEST_PATH") {
@ -526,9 +522,9 @@ fn make_input(
Err(_) => FileName::anon_source_code(&input),
};
Ok(Some(Input::Str { name, input }))
Some(Input::Str { name, input })
}
[ifile] => Ok(Some(Input::File(PathBuf::from(ifile)))),
[ifile] => Some(Input::File(PathBuf::from(ifile))),
[ifile1, ifile2, ..] => early_dcx.early_fatal(format!(
"multiple input filenames provided (first two filenames are `{}` and `{}`)",
ifile1, ifile2
@ -663,9 +659,7 @@ fn process_rlink(sess: &Session, compiler: &interface::Compiler) {
};
}
};
if compiler.codegen_backend.link(sess, codegen_results, &outputs).is_err() {
FatalError.raise();
}
compiler.codegen_backend.link(sess, codegen_results, &outputs);
} else {
dcx.emit_fatal(RlinkNotAFile {});
}
@ -1608,7 +1602,8 @@ pub fn main() -> ! {
let exit_code = catch_with_exit_code(|| {
RunCompiler::new(&args::raw_args(&early_dcx)?, &mut callbacks)
.set_using_internal_features(using_internal_features)
.run()
.run();
Ok(())
});
if let Some(format) = callbacks.time_passes {

View File

@ -222,8 +222,8 @@ impl<'tcx> PrintExtra<'tcx> {
}
pub fn print<'tcx>(sess: &Session, ppm: PpMode, ex: PrintExtra<'tcx>) {
if ppm.needs_analysis() && ex.tcx().analysis(()).is_err() {
FatalError.raise();
if ppm.needs_analysis() {
ex.tcx().ensure().analysis(());
}
let (src, src_name) = get_source(sess);

View File

@ -12,6 +12,7 @@ use rustc_span::SourceFile;
use rustc_span::source_map::SourceMap;
use crate::emitter::FileWithAnnotatedLines;
use crate::registry::Registry;
use crate::snippet::Line;
use crate::translation::{Translate, to_fluent_args};
use crate::{
@ -45,7 +46,7 @@ impl Translate for AnnotateSnippetEmitter {
impl Emitter for AnnotateSnippetEmitter {
/// The entry point for the diagnostics generation
fn emit_diagnostic(&mut self, mut diag: DiagInner) {
fn emit_diagnostic(&mut self, mut diag: DiagInner, _registry: &Registry) {
let fluent_args = to_fluent_args(diag.args.iter());
let mut suggestions = diag.suggestions.unwrap_tag();

View File

@ -27,6 +27,7 @@ use termcolor::{Buffer, BufferWriter, Color, ColorChoice, ColorSpec, StandardStr
use tracing::{debug, instrument, trace, warn};
use crate::diagnostic::DiagLocation;
use crate::registry::Registry;
use crate::snippet::{
Annotation, AnnotationColumn, AnnotationType, Line, MultilineAnnotation, Style, StyledString,
};
@ -181,7 +182,7 @@ pub type DynEmitter = dyn Emitter + DynSend;
/// Emitter trait for emitting errors.
pub trait Emitter: Translate {
/// Emit a structured diagnostic.
fn emit_diagnostic(&mut self, diag: DiagInner);
fn emit_diagnostic(&mut self, diag: DiagInner, registry: &Registry);
/// Emit a notification that an artifact has been output.
/// Currently only supported for the JSON format.
@ -189,7 +190,7 @@ pub trait Emitter: Translate {
/// Emit a report about future breakage.
/// Currently only supported for the JSON format.
fn emit_future_breakage_report(&mut self, _diags: Vec<DiagInner>) {}
fn emit_future_breakage_report(&mut self, _diags: Vec<DiagInner>, _registry: &Registry) {}
/// Emit list of unused externs.
/// Currently only supported for the JSON format.
@ -500,7 +501,7 @@ impl Emitter for HumanEmitter {
self.sm.as_deref()
}
fn emit_diagnostic(&mut self, mut diag: DiagInner) {
fn emit_diagnostic(&mut self, mut diag: DiagInner, _registry: &Registry) {
let fluent_args = to_fluent_args(diag.args.iter());
let mut suggestions = diag.suggestions.unwrap_tag();
@ -561,7 +562,7 @@ impl Emitter for SilentEmitter {
None
}
fn emit_diagnostic(&mut self, mut diag: DiagInner) {
fn emit_diagnostic(&mut self, mut diag: DiagInner, _registry: &Registry) {
if self.emit_fatal_diagnostic && diag.level == Level::Fatal {
if let Some(fatal_note) = &self.fatal_note {
diag.sub(Level::Note, fatal_note.clone(), MultiSpan::new());

View File

@ -44,7 +44,6 @@ mod tests;
pub struct JsonEmitter {
#[setters(skip)]
dst: IntoDynSyncSend<Box<dyn Write + Send>>,
registry: Option<Registry>,
#[setters(skip)]
sm: Lrc<SourceMap>,
fluent_bundle: Option<Lrc<FluentBundle>>,
@ -74,7 +73,6 @@ impl JsonEmitter {
) -> JsonEmitter {
JsonEmitter {
dst: IntoDynSyncSend(dst),
registry: None,
sm,
fluent_bundle: None,
fallback_bundle,
@ -121,8 +119,8 @@ impl Translate for JsonEmitter {
}
impl Emitter for JsonEmitter {
fn emit_diagnostic(&mut self, diag: crate::DiagInner) {
let data = Diagnostic::from_errors_diagnostic(diag, self);
fn emit_diagnostic(&mut self, diag: crate::DiagInner, registry: &Registry) {
let data = Diagnostic::from_errors_diagnostic(diag, self, registry);
let result = self.emit(EmitTyped::Diagnostic(data));
if let Err(e) = result {
panic!("failed to print diagnostics: {e:?}");
@ -137,7 +135,7 @@ impl Emitter for JsonEmitter {
}
}
fn emit_future_breakage_report(&mut self, diags: Vec<crate::DiagInner>) {
fn emit_future_breakage_report(&mut self, diags: Vec<crate::DiagInner>, registry: &Registry) {
let data: Vec<FutureBreakageItem<'_>> = diags
.into_iter()
.map(|mut diag| {
@ -151,7 +149,7 @@ impl Emitter for JsonEmitter {
}
FutureBreakageItem {
diagnostic: EmitTyped::Diagnostic(Diagnostic::from_errors_diagnostic(
diag, self,
diag, self, registry,
)),
}
})
@ -291,7 +289,11 @@ struct UnusedExterns<'a> {
impl Diagnostic {
/// Converts from `rustc_errors::DiagInner` to `Diagnostic`.
fn from_errors_diagnostic(diag: crate::DiagInner, je: &JsonEmitter) -> Diagnostic {
fn from_errors_diagnostic(
diag: crate::DiagInner,
je: &JsonEmitter,
registry: &Registry,
) -> Diagnostic {
let args = to_fluent_args(diag.args.iter());
let sugg_to_diag = |sugg: &CodeSuggestion| {
let translated_message =
@ -344,7 +346,7 @@ impl Diagnostic {
let code = if let Some(code) = diag.code {
Some(DiagnosticCode {
code: code.to_string(),
explanation: je.registry.as_ref().unwrap().try_find_description(code).ok(),
explanation: registry.try_find_description(code).ok(),
})
} else if let Some(IsLint { name, .. }) = &diag.is_lint {
Some(DiagnosticCode { code: name.to_string(), explanation: None })
@ -382,7 +384,7 @@ impl Diagnostic {
} else {
OutputTheme::Ascii
})
.emit_diagnostic(diag);
.emit_diagnostic(diag, registry);
let buf = Arc::try_unwrap(buf.0).unwrap().into_inner().unwrap();
let buf = String::from_utf8(buf).unwrap();

View File

@ -55,7 +55,6 @@ pub use diagnostic_impls::{
};
pub use emitter::ColorConfig;
use emitter::{DynEmitter, Emitter, is_case_difference, is_different};
use registry::Registry;
use rustc_data_structures::AtomicRef;
use rustc_data_structures::fx::{FxHashSet, FxIndexMap, FxIndexSet};
use rustc_data_structures::stable_hasher::{Hash128, StableHasher};
@ -77,6 +76,8 @@ pub use snippet::Style;
pub use termcolor::{Color, ColorSpec, WriteColor};
use tracing::debug;
use crate::registry::Registry;
pub mod annotate_snippet_emitter_writer;
pub mod codes;
mod diagnostic;
@ -483,6 +484,8 @@ impl<'a> std::ops::Deref for DiagCtxtHandle<'a> {
struct DiagCtxtInner {
flags: DiagCtxtFlags,
registry: Registry,
/// The error guarantees from all emitted errors. The length gives the error count.
err_guars: Vec<ErrorGuaranteed>,
/// The error guarantee from all emitted lint errors. The length gives the
@ -619,9 +622,7 @@ impl Drop for DiagCtxtInner {
// Important: it is sound to produce an `ErrorGuaranteed` when emitting
// delayed bugs because they are guaranteed to be emitted here if
// necessary.
if self.err_guars.is_empty() {
self.flush_delayed()
}
self.flush_delayed();
// Sanity check: did we use some of the expensive `trimmed_def_paths` functions
// unexpectedly, that is, without producing diagnostics? If so, for debugging purposes, we
@ -664,6 +665,11 @@ impl DiagCtxt {
self
}
pub fn with_registry(mut self, registry: Registry) -> Self {
self.inner.get_mut().registry = registry;
self
}
pub fn new(emitter: Box<DynEmitter>) -> Self {
Self { inner: Lock::new(DiagCtxtInner::new(emitter)) }
}
@ -694,7 +700,7 @@ impl DiagCtxt {
struct FalseEmitter;
impl Emitter for FalseEmitter {
fn emit_diagnostic(&mut self, _: DiagInner) {
fn emit_diagnostic(&mut self, _: DiagInner, _: &Registry) {
unimplemented!("false emitter must only used during `wrap_emitter`")
}
@ -759,6 +765,7 @@ impl DiagCtxt {
let mut inner = self.inner.borrow_mut();
let DiagCtxtInner {
flags: _,
registry: _,
err_guars,
lint_err_guars,
delayed_bugs,
@ -964,7 +971,7 @@ impl<'a> DiagCtxtHandle<'a> {
self.inner.borrow().has_errors_or_delayed_bugs()
}
pub fn print_error_count(&self, registry: &Registry) {
pub fn print_error_count(&self) {
let mut inner = self.inner.borrow_mut();
// Any stashed diagnostics should have been handled by
@ -1014,7 +1021,7 @@ impl<'a> DiagCtxtHandle<'a> {
.emitted_diagnostic_codes
.iter()
.filter_map(|&code| {
if registry.try_find_description(code).is_ok() {
if inner.registry.try_find_description(code).is_ok() {
Some(code.to_string())
} else {
None
@ -1075,10 +1082,10 @@ impl<'a> DiagCtxtHandle<'a> {
}
pub fn emit_future_breakage_report(&self) {
let mut inner = self.inner.borrow_mut();
let inner = &mut *self.inner.borrow_mut();
let diags = std::mem::take(&mut inner.future_breakage_diagnostics);
if !diags.is_empty() {
inner.emitter.emit_future_breakage_report(diags);
inner.emitter.emit_future_breakage_report(diags, &inner.registry);
}
}
@ -1409,6 +1416,7 @@ impl DiagCtxtInner {
fn new(emitter: Box<DynEmitter>) -> Self {
Self {
flags: DiagCtxtFlags { can_emit_warnings: true, ..Default::default() },
registry: Registry::new(&[]),
err_guars: Vec::new(),
lint_err_guars: Vec::new(),
delayed_bugs: Vec::new(),
@ -1582,7 +1590,7 @@ impl DiagCtxtInner {
}
self.has_printed = true;
self.emitter.emit_diagnostic(diagnostic);
self.emitter.emit_diagnostic(diagnostic, &self.registry);
}
if is_error {
@ -1695,7 +1703,13 @@ impl DiagCtxtInner {
// eventually happened.
assert!(self.stashed_diagnostics.is_empty());
if !self.err_guars.is_empty() {
// If an error happened already. We shouldn't expose delayed bugs.
return;
}
if self.delayed_bugs.is_empty() {
// Nothing to do.
return;
}

View File

@ -114,7 +114,6 @@ use rustc_data_structures::fx::{FxHashSet, FxIndexSet};
use rustc_data_structures::svh::Svh;
use rustc_data_structures::unord::{UnordMap, UnordSet};
use rustc_data_structures::{base_n, flock};
use rustc_errors::ErrorGuaranteed;
use rustc_fs_util::{LinkOrCopy, link_or_copy, try_canonicalize};
use rustc_middle::bug;
use rustc_session::config::CrateType;
@ -212,9 +211,9 @@ pub fn in_incr_comp_dir(incr_comp_session_dir: &Path, file_name: &str) -> PathBu
/// The garbage collection will take care of it.
///
/// [`rustc_interface::queries::dep_graph`]: ../../rustc_interface/struct.Queries.html#structfield.dep_graph
pub(crate) fn prepare_session_directory(sess: &Session) -> Result<(), ErrorGuaranteed> {
pub(crate) fn prepare_session_directory(sess: &Session) {
if sess.opts.incremental.is_none() {
return Ok(());
return;
}
let _timer = sess.timer("incr_comp_prepare_session_directory");
@ -224,7 +223,7 @@ pub(crate) fn prepare_session_directory(sess: &Session) -> Result<(), ErrorGuara
// {incr-comp-dir}/{crate-name-and-disambiguator}
let crate_dir = crate_path(sess);
debug!("crate-dir: {}", crate_dir.display());
create_dir(sess, &crate_dir, "crate")?;
create_dir(sess, &crate_dir, "crate");
// Hack: canonicalize the path *after creating the directory*
// because, on windows, long paths can cause problems;
@ -233,7 +232,7 @@ pub(crate) fn prepare_session_directory(sess: &Session) -> Result<(), ErrorGuara
let crate_dir = match try_canonicalize(&crate_dir) {
Ok(v) => v,
Err(err) => {
return Err(sess.dcx().emit_err(errors::CanonicalizePath { path: crate_dir, err }));
sess.dcx().emit_fatal(errors::CanonicalizePath { path: crate_dir, err });
}
};
@ -248,11 +247,11 @@ pub(crate) fn prepare_session_directory(sess: &Session) -> Result<(), ErrorGuara
// Lock the new session directory. If this fails, return an
// error without retrying
let (directory_lock, lock_file_path) = lock_directory(sess, &session_dir)?;
let (directory_lock, lock_file_path) = lock_directory(sess, &session_dir);
// Now that we have the lock, we can actually create the session
// directory
create_dir(sess, &session_dir, "session")?;
create_dir(sess, &session_dir, "session");
// Find a suitable source directory to copy from. Ignore those that we
// have already tried before.
@ -266,7 +265,7 @@ pub(crate) fn prepare_session_directory(sess: &Session) -> Result<(), ErrorGuara
);
sess.init_incr_comp_session(session_dir, directory_lock);
return Ok(());
return;
};
debug!("attempting to copy data from source: {}", source_directory.display());
@ -280,7 +279,7 @@ pub(crate) fn prepare_session_directory(sess: &Session) -> Result<(), ErrorGuara
}
sess.init_incr_comp_session(session_dir, directory_lock);
return Ok(());
return;
} else {
debug!("copying failed - trying next directory");
@ -459,21 +458,17 @@ fn generate_session_dir_path(crate_dir: &Path) -> PathBuf {
directory_path
}
fn create_dir(sess: &Session, path: &Path, dir_tag: &str) -> Result<(), ErrorGuaranteed> {
fn create_dir(sess: &Session, path: &Path, dir_tag: &str) {
match std_fs::create_dir_all(path) {
Ok(()) => {
debug!("{} directory created successfully", dir_tag);
Ok(())
}
Err(err) => Err(sess.dcx().emit_err(errors::CreateIncrCompDir { tag: dir_tag, path, err })),
Err(err) => sess.dcx().emit_fatal(errors::CreateIncrCompDir { tag: dir_tag, path, err }),
}
}
/// Allocate the lock-file and lock it.
fn lock_directory(
sess: &Session,
session_dir: &Path,
) -> Result<(flock::Lock, PathBuf), ErrorGuaranteed> {
fn lock_directory(sess: &Session, session_dir: &Path) -> (flock::Lock, PathBuf) {
let lock_file_path = lock_file_path(session_dir);
debug!("lock_directory() - lock_file: {}", lock_file_path.display());
@ -484,15 +479,15 @@ fn lock_directory(
true,
) {
// the lock should be exclusive
Ok(lock) => Ok((lock, lock_file_path)),
Ok(lock) => (lock, lock_file_path),
Err(lock_err) => {
let is_unsupported_lock = flock::Lock::error_unsupported(&lock_err);
Err(sess.dcx().emit_err(errors::CreateLock {
sess.dcx().emit_fatal(errors::CreateLock {
lock_err,
session_dir,
is_unsupported_lock,
is_cargo: rustc_session::utils::was_invoked_from_cargo(),
}))
});
}
}
}

View File

@ -11,7 +11,6 @@ use rustc_serialize::Decodable;
use rustc_serialize::opaque::MemDecoder;
use rustc_session::Session;
use rustc_session::config::IncrementalStateAssertion;
use rustc_span::ErrorGuaranteed;
use tracing::{debug, warn};
use super::data::*;
@ -182,7 +181,7 @@ fn load_dep_graph(sess: &Session) -> LoadResult<(Arc<SerializedDepGraph>, WorkPr
/// If we are not in incremental compilation mode, returns `None`.
/// Otherwise, tries to load the query result cache from disk,
/// creating an empty cache if it could not be loaded.
pub fn load_query_result_cache(sess: &Session) -> Option<OnDiskCache<'_>> {
pub fn load_query_result_cache(sess: &Session) -> Option<OnDiskCache> {
if sess.opts.incremental.is_none() {
return None;
}
@ -194,19 +193,19 @@ pub fn load_query_result_cache(sess: &Session) -> Option<OnDiskCache<'_>> {
LoadResult::Ok { data: (bytes, start_pos) } => {
let cache = OnDiskCache::new(sess, bytes, start_pos).unwrap_or_else(|()| {
sess.dcx().emit_warn(errors::CorruptFile { path: &path });
OnDiskCache::new_empty(sess.source_map())
OnDiskCache::new_empty()
});
Some(cache)
}
_ => Some(OnDiskCache::new_empty(sess.source_map())),
_ => Some(OnDiskCache::new_empty()),
}
}
/// Setups the dependency graph by loading an existing graph from disk and set up streaming of a
/// new graph to an incremental session directory.
pub fn setup_dep_graph(sess: &Session) -> Result<DepGraph, ErrorGuaranteed> {
pub fn setup_dep_graph(sess: &Session) -> DepGraph {
// `load_dep_graph` can only be called after `prepare_session_directory`.
prepare_session_directory(sess)?;
prepare_session_directory(sess);
let res = sess.opts.build_dep_graph().then(|| load_dep_graph(sess));
@ -222,10 +221,9 @@ pub fn setup_dep_graph(sess: &Session) -> Result<DepGraph, ErrorGuaranteed> {
});
}
Ok(res
.and_then(|result| {
let (prev_graph, prev_work_products) = result.open(sess);
build_dep_graph(sess, prev_graph, prev_work_products)
})
.unwrap_or_else(DepGraph::new_disabled))
res.and_then(|result| {
let (prev_graph, prev_work_products) = result.open(sess);
build_dep_graph(sess, prev_graph, prev_work_products)
})
.unwrap_or_else(DepGraph::new_disabled)
}

View File

@ -5,9 +5,9 @@ use std::sync::Arc;
use rustc_ast::{LitKind, MetaItemKind, token};
use rustc_codegen_ssa::traits::CodegenBackend;
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_data_structures::jobserver;
use rustc_data_structures::stable_hasher::StableHasher;
use rustc_data_structures::sync::Lrc;
use rustc_data_structures::{defer, jobserver};
use rustc_errors::registry::Registry;
use rustc_errors::{DiagCtxtHandle, ErrorGuaranteed};
use rustc_lint::LintStore;
@ -441,7 +441,7 @@ pub fn run_compiler<R: Send>(config: Config, f: impl FnOnce(&Compiler) -> R + Se
temps_dir,
},
bundle,
config.registry.clone(),
config.registry,
locale_resources,
config.lint_caps,
target,
@ -492,32 +492,34 @@ pub fn run_compiler<R: Send>(config: Config, f: impl FnOnce(&Compiler) -> R + Se
// There are two paths out of `f`.
// - Normal exit.
// - Panic, e.g. triggered by `abort_if_errors`.
// - Panic, e.g. triggered by `abort_if_errors` or a fatal error.
//
// We must run `finish_diagnostics` in both cases.
let res = {
// If `f` panics, `finish_diagnostics` will run during
// unwinding because of the `defer`.
let sess_abort_guard = defer(|| {
compiler.sess.finish_diagnostics(&config.registry);
});
let res = std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| f(&compiler)));
let res = f(&compiler);
compiler.sess.finish_diagnostics();
// If `f` doesn't panic, `finish_diagnostics` will run
// normally when `sess_abort_guard` is dropped.
drop(sess_abort_guard);
// If error diagnostics have been emitted, we can't return an
// error directly, because the return type of this function
// is `R`, not `Result<R, E>`. But we need to communicate the
// errors' existence to the caller, otherwise the caller might
// mistakenly think that no errors occurred and return a zero
// exit code. So we abort (panic) instead, similar to if `f`
// had panicked.
// If error diagnostics have been emitted, we can't return an
// error directly, because the return type of this function
// is `R`, not `Result<R, E>`. But we need to communicate the
// errors' existence to the caller, otherwise the caller might
// mistakenly think that no errors occurred and return a zero
// exit code. So we abort (panic) instead, similar to if `f`
// had panicked.
if res.is_ok() {
compiler.sess.dcx().abort_if_errors();
}
res
// Also make sure to flush delayed bugs as if we panicked, the
// bugs would be flushed by the Drop impl of DiagCtxt while
// unwinding, which would result in an abort with
// "panic in a destructor during cleanup".
compiler.sess.dcx().flush_delayed();
let res = match res {
Ok(res) => res,
// Resume unwinding if a panic happened.
Err(err) => std::panic::resume_unwind(err),
};
let prof = compiler.sess.prof.clone();

View File

@ -33,15 +33,15 @@ use rustc_session::output::{collect_crate_types, filename_for_input, find_crate_
use rustc_session::search_paths::PathKind;
use rustc_session::{Limit, Session};
use rustc_span::symbol::{Symbol, sym};
use rustc_span::{FileName, SourceFileHash, SourceFileHashAlgorithm};
use rustc_span::{ErrorGuaranteed, FileName, SourceFileHash, SourceFileHashAlgorithm};
use rustc_target::spec::PanicStrategy;
use rustc_trait_selection::traits;
use tracing::{info, instrument};
use crate::interface::{Compiler, Result};
use crate::interface::Compiler;
use crate::{errors, proc_macro_decls, util};
pub(crate) fn parse<'a>(sess: &'a Session) -> Result<ast::Crate> {
pub(crate) fn parse<'a>(sess: &'a Session) -> ast::Crate {
let krate = sess
.time("parse_crate", || {
let mut parser = unwrap_or_emit_fatal(match &sess.io.input {
@ -52,13 +52,16 @@ pub(crate) fn parse<'a>(sess: &'a Session) -> Result<ast::Crate> {
});
parser.parse_crate_mod()
})
.map_err(|parse_error| parse_error.emit())?;
.unwrap_or_else(|parse_error| {
let guar: ErrorGuaranteed = parse_error.emit();
guar.raise_fatal();
});
if sess.opts.unstable_opts.input_stats {
input_stats::print_ast_stats(&krate, "PRE EXPANSION AST STATS", "ast-stats-1");
}
Ok(krate)
krate
}
fn pre_expansion_lint<'a>(
@ -712,7 +715,7 @@ pub(crate) fn create_global_ctxt<'tcx>(
gcx_cell: &'tcx OnceLock<GlobalCtxt<'tcx>>,
arena: &'tcx WorkerLocal<Arena<'tcx>>,
hir_arena: &'tcx WorkerLocal<rustc_hir::Arena<'tcx>>,
) -> Result<&'tcx GlobalCtxt<'tcx>> {
) -> &'tcx GlobalCtxt<'tcx> {
let sess = &compiler.sess;
rustc_builtin_macros::cmdline_attrs::inject(
@ -733,7 +736,7 @@ pub(crate) fn create_global_ctxt<'tcx>(
sess.cfg_version,
);
let outputs = util::build_output_filenames(&pre_configured_attrs, sess);
let dep_graph = setup_dep_graph(sess)?;
let dep_graph = setup_dep_graph(sess);
let cstore =
FreezeLock::new(Box::new(CStore::new(compiler.codegen_backend.metadata_loader())) as _);
@ -796,7 +799,7 @@ pub(crate) fn create_global_ctxt<'tcx>(
feed.crate_for_resolver(tcx.arena.alloc(Steal::new((krate, pre_configured_attrs))));
feed.output_filenames(Arc::new(outputs));
});
Ok(qcx)
qcx
})
}
@ -906,7 +909,7 @@ fn run_required_analyses(tcx: TyCtxt<'_>) {
/// Runs the type-checking, region checking and other miscellaneous analysis
/// passes on the crate.
fn analysis(tcx: TyCtxt<'_>, (): ()) -> Result<()> {
fn analysis(tcx: TyCtxt<'_>, (): ()) {
run_required_analyses(tcx);
let sess = tcx.sess;
@ -920,7 +923,7 @@ fn analysis(tcx: TyCtxt<'_>, (): ()) -> Result<()> {
// But we exclude lint errors from this, because lint errors are typically
// less serious and we're more likely to want to continue (#87337).
if let Some(guar) = sess.dcx().has_errors_excluding_lint_errors() {
return Err(guar);
guar.raise_fatal();
}
sess.time("misc_checking_3", || {
@ -1048,8 +1051,6 @@ fn analysis(tcx: TyCtxt<'_>, (): ()) -> Result<()> {
})
}
}
Ok(())
}
/// Check for the `#[rustc_error]` annotation, which forces an error in codegen. This is used
@ -1091,12 +1092,12 @@ fn check_for_rustc_errors_attr(tcx: TyCtxt<'_>) {
pub(crate) fn start_codegen<'tcx>(
codegen_backend: &dyn CodegenBackend,
tcx: TyCtxt<'tcx>,
) -> Result<Box<dyn Any>> {
) -> Box<dyn Any> {
// Don't do code generation if there were any errors. Likewise if
// there were any delayed bugs, because codegen will likely cause
// more ICEs, obscuring the original problem.
if let Some(guar) = tcx.sess.dcx().has_errors_or_delayed_bugs() {
return Err(guar);
guar.raise_fatal();
}
// Hook for UI tests.
@ -1124,7 +1125,7 @@ pub(crate) fn start_codegen<'tcx>(
}
}
Ok(codegen)
codegen
}
fn get_recursion_limit(krate_attrs: &[ast::Attribute], sess: &Session) -> Limit {

View File

@ -16,7 +16,7 @@ use rustc_session::Session;
use rustc_session::config::{self, OutputFilenames, OutputType};
use crate::errors::FailedWritingFile;
use crate::interface::{Compiler, Result};
use crate::interface::Compiler;
use crate::passes;
/// Represent the result of a query.
@ -27,19 +27,17 @@ use crate::passes;
/// [`compute`]: Self::compute
pub struct Query<T> {
/// `None` means no value has been computed yet.
result: RefCell<Option<Result<Steal<T>>>>,
result: RefCell<Option<Steal<T>>>,
}
impl<T> Query<T> {
fn compute<F: FnOnce() -> Result<T>>(&self, f: F) -> Result<QueryResult<'_, T>> {
RefMut::filter_map(
fn compute<F: FnOnce() -> T>(&self, f: F) -> QueryResult<'_, T> {
QueryResult(RefMut::map(
self.result.borrow_mut(),
|r: &mut Option<Result<Steal<T>>>| -> Option<&mut Steal<T>> {
r.get_or_insert_with(|| f().map(Steal::new)).as_mut().ok()
|r: &mut Option<Steal<T>>| -> &mut Steal<T> {
r.get_or_insert_with(|| Steal::new(f()))
},
)
.map_err(|r| *r.as_ref().unwrap().as_ref().map(|_| ()).unwrap_err())
.map(QueryResult)
))
}
}
@ -95,13 +93,13 @@ impl<'tcx> Queries<'tcx> {
}
}
pub fn parse(&self) -> Result<QueryResult<'_, ast::Crate>> {
pub fn parse(&self) -> QueryResult<'_, ast::Crate> {
self.parse.compute(|| passes::parse(&self.compiler.sess))
}
pub fn global_ctxt(&'tcx self) -> Result<QueryResult<'tcx, &'tcx GlobalCtxt<'tcx>>> {
pub fn global_ctxt(&'tcx self) -> QueryResult<'tcx, &'tcx GlobalCtxt<'tcx>> {
self.gcx.compute(|| {
let krate = self.parse()?.steal();
let krate = self.parse().steal();
passes::create_global_ctxt(
self.compiler,
@ -126,8 +124,8 @@ impl Linker {
pub fn codegen_and_build_linker(
tcx: TyCtxt<'_>,
codegen_backend: &dyn CodegenBackend,
) -> Result<Linker> {
let ongoing_codegen = passes::start_codegen(codegen_backend, tcx)?;
) -> Linker {
let ongoing_codegen = passes::start_codegen(codegen_backend, tcx);
// This must run after monomorphization so that all generic types
// have been instantiated.
@ -141,7 +139,7 @@ impl Linker {
tcx.sess.code_stats.print_vtable_sizes(crate_name);
}
Ok(Linker {
Linker {
dep_graph: tcx.dep_graph.clone(),
output_filenames: Arc::clone(tcx.output_filenames(())),
crate_hash: if tcx.needs_crate_hash() {
@ -150,16 +148,17 @@ impl Linker {
None
},
ongoing_codegen,
})
}
}
pub fn link(self, sess: &Session, codegen_backend: &dyn CodegenBackend) -> Result<()> {
let (codegen_results, work_products) =
codegen_backend.join_codegen(self.ongoing_codegen, sess, &self.output_filenames);
pub fn link(self, sess: &Session, codegen_backend: &dyn CodegenBackend) {
let (codegen_results, work_products) = sess.time("finish_ongoing_codegen", || {
codegen_backend.join_codegen(self.ongoing_codegen, sess, &self.output_filenames)
});
if let Some(guar) = sess.dcx().has_errors() {
return Err(guar);
}
sess.dcx().abort_if_errors();
let _timer = sess.timer("link");
sess.time("serialize_work_products", || {
rustc_incremental::save_work_product_index(sess, &self.dep_graph, work_products)
@ -178,7 +177,7 @@ impl Linker {
.keys()
.any(|&i| i == OutputType::Exe || i == OutputType::Metadata)
{
return Ok(());
return;
}
if sess.opts.unstable_opts.no_link {
@ -189,10 +188,10 @@ impl Linker {
&codegen_results,
&*self.output_filenames,
)
.map_err(|error| {
.unwrap_or_else(|error| {
sess.dcx().emit_fatal(FailedWritingFile { path: &rlink_file, error })
})?;
return Ok(());
});
return;
}
let _timer = sess.prof.verbose_generic_activity("link_crate");

View File

@ -276,7 +276,7 @@ rustc_queries! {
}
/// The root query triggering all analysis passes like typeck or borrowck.
query analysis(key: ()) -> Result<(), ErrorGuaranteed> {
query analysis(key: ()) {
eval_always
desc { "running analysis passes on this crate" }
}

View File

@ -23,7 +23,7 @@ use rustc_session::Session;
use rustc_span::hygiene::{
ExpnId, HygieneDecodeContext, HygieneEncodeContext, SyntaxContext, SyntaxContextData,
};
use rustc_span::source_map::{SourceMap, Spanned};
use rustc_span::source_map::Spanned;
use rustc_span::{
BytePos, CachingSourceMapView, ExpnData, ExpnHash, Pos, RelativeBytePos, SourceFile, Span,
SpanDecoder, SpanEncoder, StableSourceFileId, Symbol,
@ -49,7 +49,7 @@ const SYMBOL_PREINTERNED: u8 = 2;
/// previous compilation session. This data will eventually include the results
/// of a few selected queries (like `typeck` and `mir_optimized`) and
/// any side effects that have been emitted during a query.
pub struct OnDiskCache<'sess> {
pub struct OnDiskCache {
// The complete cache data in serialized form.
serialized_data: RwLock<Option<Mmap>>,
@ -57,7 +57,6 @@ pub struct OnDiskCache<'sess> {
// session.
current_side_effects: Lock<FxHashMap<DepNodeIndex, QuerySideEffects>>,
source_map: &'sess SourceMap,
file_index_to_stable_id: FxHashMap<SourceFileIndex, EncodedSourceFileId>,
// Caches that are populated lazily during decoding.
@ -151,12 +150,12 @@ impl EncodedSourceFileId {
}
}
impl<'sess> OnDiskCache<'sess> {
impl OnDiskCache {
/// Creates a new `OnDiskCache` instance from the serialized data in `data`.
///
/// The serialized cache has some basic integrity checks, if those checks indicate that the
/// on-disk data is corrupt, an error is returned.
pub fn new(sess: &'sess Session, data: Mmap, start_pos: usize) -> Result<Self, ()> {
pub fn new(sess: &Session, data: Mmap, start_pos: usize) -> Result<Self, ()> {
assert!(sess.opts.incremental.is_some());
let mut decoder = MemDecoder::new(&data, start_pos)?;
@ -175,7 +174,6 @@ impl<'sess> OnDiskCache<'sess> {
serialized_data: RwLock::new(Some(data)),
file_index_to_stable_id: footer.file_index_to_stable_id,
file_index_to_file: Default::default(),
source_map: sess.source_map(),
current_side_effects: Default::default(),
query_result_index: footer.query_result_index.into_iter().collect(),
prev_side_effects_index: footer.side_effects_index.into_iter().collect(),
@ -187,12 +185,11 @@ impl<'sess> OnDiskCache<'sess> {
})
}
pub fn new_empty(source_map: &'sess SourceMap) -> Self {
pub fn new_empty() -> Self {
Self {
serialized_data: RwLock::new(None),
file_index_to_stable_id: Default::default(),
file_index_to_file: Default::default(),
source_map,
current_side_effects: Default::default(),
query_result_index: Default::default(),
prev_side_effects_index: Default::default(),
@ -423,7 +420,7 @@ impl<'sess> OnDiskCache<'sess> {
}
fn with_decoder<'a, 'tcx, T, F: for<'s> FnOnce(&mut CacheDecoder<'s, 'tcx>) -> T>(
&'sess self,
&self,
tcx: TyCtxt<'tcx>,
pos: AbsoluteBytePos,
f: F,
@ -436,7 +433,6 @@ impl<'sess> OnDiskCache<'sess> {
tcx,
opaque: MemDecoder::new(serialized_data.as_deref().unwrap_or(&[]), pos.to_usize())
.unwrap(),
source_map: self.source_map,
file_index_to_file: &self.file_index_to_file,
file_index_to_stable_id: &self.file_index_to_stable_id,
alloc_decoding_session: self.alloc_decoding_state.new_decoding_session(),
@ -457,7 +453,6 @@ impl<'sess> OnDiskCache<'sess> {
pub struct CacheDecoder<'a, 'tcx> {
tcx: TyCtxt<'tcx>,
opaque: MemDecoder<'a>,
source_map: &'a SourceMap,
file_index_to_file: &'a Lock<FxHashMap<SourceFileIndex, Lrc<SourceFile>>>,
file_index_to_stable_id: &'a FxHashMap<SourceFileIndex, EncodedSourceFileId>,
alloc_decoding_session: AllocDecodingSession<'a>,
@ -470,8 +465,7 @@ pub struct CacheDecoder<'a, 'tcx> {
impl<'a, 'tcx> CacheDecoder<'a, 'tcx> {
#[inline]
fn file_index_to_file(&self, index: SourceFileIndex) -> Lrc<SourceFile> {
let CacheDecoder { tcx, file_index_to_file, file_index_to_stable_id, source_map, .. } =
*self;
let CacheDecoder { tcx, file_index_to_file, file_index_to_stable_id, .. } = *self;
Lrc::clone(file_index_to_file.borrow_mut().entry(index).or_insert_with(|| {
let source_file_id = &file_index_to_stable_id[&index];
@ -490,7 +484,8 @@ impl<'a, 'tcx> CacheDecoder<'a, 'tcx> {
self.tcx.import_source_files(source_file_cnum);
}
source_map
tcx.sess
.source_map()
.source_file_by_stable_id(source_file_id.stable_source_file_id)
.expect("failed to lookup `SourceFile` in new context")
}))

View File

@ -66,7 +66,7 @@ pub struct QuerySystem<'tcx> {
/// Do not access this directly. It is only meant to be used by
/// `DepGraph::try_mark_green()` and the query infrastructure.
/// This is `None` if we are not incremental compilation mode
pub on_disk_cache: Option<OnDiskCache<'tcx>>,
pub on_disk_cache: Option<OnDiskCache>,
pub fns: QuerySystemFns<'tcx>,

View File

@ -198,12 +198,12 @@ trait QueryConfigRestored<'tcx> {
-> Self::RestoredValue;
}
pub fn query_system<'tcx>(
pub fn query_system<'a>(
local_providers: Providers,
extern_providers: ExternProviders,
on_disk_cache: Option<OnDiskCache<'tcx>>,
on_disk_cache: Option<OnDiskCache>,
incremental: bool,
) -> QuerySystem<'tcx> {
) -> QuerySystem<'a> {
QuerySystem {
states: Default::default(),
arenas: Default::default(),

View File

@ -19,7 +19,6 @@ use rustc_errors::emitter::{
DynEmitter, HumanEmitter, HumanReadableErrorType, OutputTheme, stderr_destination,
};
use rustc_errors::json::JsonEmitter;
use rustc_errors::registry::Registry;
use rustc_errors::{
Diag, DiagCtxt, DiagCtxtHandle, DiagMessage, Diagnostic, ErrorGuaranteed, FatalAbort,
FluentBundle, LazyFallbackBundle, TerminalUrl, fallback_fluent_bundle,
@ -276,11 +275,11 @@ impl Session {
}
/// Invoked all the way at the end to finish off diagnostics printing.
pub fn finish_diagnostics(&self, registry: &Registry) -> Option<ErrorGuaranteed> {
pub fn finish_diagnostics(&self) -> Option<ErrorGuaranteed> {
let mut guar = None;
guar = guar.or(self.check_miri_unleashed_features());
guar = guar.or(self.dcx().emit_stashed_diagnostics());
self.dcx().print_error_count(registry);
self.dcx().print_error_count();
if self.opts.json_future_incompat {
self.dcx().emit_future_breakage_report();
}
@ -880,7 +879,6 @@ impl Session {
#[allow(rustc::bad_opt_access)]
fn default_emitter(
sopts: &config::Options,
registry: rustc_errors::registry::Registry,
source_map: Lrc<SourceMap>,
bundle: Option<Lrc<FluentBundle>>,
fallback_bundle: LazyFallbackBundle,
@ -943,7 +941,6 @@ fn default_emitter(
json_rendered,
color_config,
)
.registry(Some(registry))
.fluent_bundle(bundle)
.ui_testing(sopts.unstable_opts.ui_testing)
.ignored_directories_in_source_blocks(
@ -999,11 +996,11 @@ pub fn build_session(
sopts.unstable_opts.translate_directionality_markers,
);
let source_map = rustc_span::source_map::get_source_map().unwrap();
let emitter =
default_emitter(&sopts, registry, Lrc::clone(&source_map), bundle, fallback_bundle);
let emitter = default_emitter(&sopts, Lrc::clone(&source_map), bundle, fallback_bundle);
let mut dcx =
DiagCtxt::new(emitter).with_flags(sopts.unstable_opts.dcx_flags(can_emit_warnings));
let mut dcx = DiagCtxt::new(emitter)
.with_flags(sopts.unstable_opts.dcx_flags(can_emit_warnings))
.with_registry(registry);
if let Some(ice_file) = ice_file {
dcx = dcx.with_ice_file(ice_file);
}

View File

@ -342,8 +342,9 @@ macro_rules! run_driver {
/// Runs the compiler against given target and tests it with `test_function`
pub fn run(&mut self) -> Result<C, CompilerError<B>> {
let compiler_result = rustc_driver::catch_fatal_errors(|| {
RunCompiler::new(&self.args.clone(), self).run()
let compiler_result = rustc_driver::catch_fatal_errors(|| -> interface::Result::<()> {
RunCompiler::new(&self.args.clone(), self).run();
Ok(())
});
match (compiler_result, self.result.take()) {
(Ok(Ok(())), Some(ControlFlow::Continue(value))) => Ok(value),

View File

@ -51,6 +51,7 @@ pub mod source_map;
use source_map::{SourceMap, SourceMapInputs};
pub use self::caching_source_map_view::CachingSourceMapView;
use crate::fatal_error::FatalError;
pub mod edition;
use edition::Edition;
@ -2614,6 +2615,10 @@ impl ErrorGuaranteed {
pub fn unchecked_error_guaranteed() -> Self {
ErrorGuaranteed(())
}
pub fn raise_fatal(self) -> ! {
FatalError.raise()
}
}
impl<E: rustc_serialize::Encoder> Encodable<E> for ErrorGuaranteed {

View File

@ -5,12 +5,12 @@ use std::{io, mem};
use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexMap};
use rustc_data_structures::sync::Lrc;
use rustc_data_structures::unord::UnordSet;
use rustc_errors::TerminalUrl;
use rustc_errors::codes::*;
use rustc_errors::emitter::{
DynEmitter, HumanEmitter, HumanReadableErrorType, OutputTheme, stderr_destination,
};
use rustc_errors::json::JsonEmitter;
use rustc_errors::{ErrorGuaranteed, TerminalUrl};
use rustc_feature::UnstableFeatures;
use rustc_hir::def::Res;
use rustc_hir::def_id::{DefId, DefIdMap, DefIdSet, LocalDefId};
@ -326,7 +326,7 @@ pub(crate) fn run_global_ctxt(
show_coverage: bool,
render_options: RenderOptions,
output_format: OutputFormat,
) -> Result<(clean::Crate, RenderOptions, Cache), ErrorGuaranteed> {
) -> (clean::Crate, RenderOptions, Cache) {
// Certain queries assume that some checks were run elsewhere
// (see https://github.com/rust-lang/rust/pull/73566#issuecomment-656954425),
// so type-check everything other than function bodies in this crate before running lints.
@ -340,9 +340,7 @@ pub(crate) fn run_global_ctxt(
tcx.hir().try_par_for_each_module(|module| tcx.ensure().check_mod_type_wf(module))
});
if let Some(guar) = tcx.dcx().has_errors() {
return Err(guar);
}
tcx.dcx().abort_if_errors();
tcx.sess.time("missing_docs", || rustc_lint::check_crate(tcx));
tcx.sess.time("check_mod_attrs", || {
@ -446,11 +444,9 @@ pub(crate) fn run_global_ctxt(
LinkCollector { cx: &mut ctxt, visited_links: visited, ambiguous_links: ambiguous };
collector.resolve_ambiguities();
if let Some(guar) = tcx.dcx().has_errors() {
return Err(guar);
}
tcx.dcx().abort_if_errors();
Ok((krate, ctxt.render_options, ctxt.cache))
(krate, ctxt.render_options, ctxt.cache)
}
/// Due to <https://github.com/rust-lang/rust/pull/73566>,

View File

@ -16,7 +16,7 @@ pub(crate) use markdown::test as test_markdown;
use rustc_ast as ast;
use rustc_data_structures::fx::{FxHashMap, FxIndexMap, FxIndexSet};
use rustc_errors::emitter::HumanReadableErrorType;
use rustc_errors::{ColorConfig, DiagCtxtHandle, ErrorGuaranteed, FatalError};
use rustc_errors::{ColorConfig, DiagCtxtHandle};
use rustc_hir::CRATE_HIR_ID;
use rustc_hir::def_id::LOCAL_CRATE;
use rustc_interface::interface;
@ -89,11 +89,7 @@ fn get_doctest_dir() -> io::Result<TempDir> {
TempFileBuilder::new().prefix("rustdoctest").tempdir()
}
pub(crate) fn run(
dcx: DiagCtxtHandle<'_>,
input: Input,
options: RustdocOptions,
) -> Result<(), ErrorGuaranteed> {
pub(crate) fn run(dcx: DiagCtxtHandle<'_>, input: Input, options: RustdocOptions) {
let invalid_codeblock_attributes_name = crate::lint::INVALID_CODEBLOCK_ATTRIBUTES.name;
// See core::create_config for what's going on here.
@ -167,7 +163,7 @@ pub(crate) fn run(
Err(error) => return crate::wrap_return(dcx, Err(error)),
};
let args_path = temp_dir.path().join("rustdoc-cfgs");
crate::wrap_return(dcx, generate_args_file(&args_path, &options))?;
crate::wrap_return(dcx, generate_args_file(&args_path, &options));
let CreateRunnableDocTests {
standalone_tests,
@ -179,7 +175,7 @@ pub(crate) fn run(
..
} = interface::run_compiler(config, |compiler| {
compiler.enter(|queries| {
let collector = queries.global_ctxt()?.enter(|tcx| {
let collector = queries.global_ctxt().enter(|tcx| {
let crate_name = tcx.crate_name(LOCAL_CRATE).to_string();
let crate_attrs = tcx.hir().attrs(CRATE_HIR_ID);
let opts = scrape_test_config(crate_name, crate_attrs, args_path);
@ -196,13 +192,11 @@ pub(crate) fn run(
collector
});
if compiler.sess.dcx().has_errors().is_some() {
FatalError.raise();
}
compiler.sess.dcx().abort_if_errors();
Ok(collector)
collector
})
})?;
});
run_tests(opts, &rustdoc_options, &unused_extern_reports, standalone_tests, mergeable_tests);
@ -246,8 +240,6 @@ pub(crate) fn run(
eprintln!("{unused_extern_json}");
}
}
Ok(())
}
pub(crate) fn run_tests(

View File

@ -76,7 +76,7 @@ use std::process;
use std::sync::Arc;
use std::sync::atomic::AtomicBool;
use rustc_errors::{DiagCtxtHandle, ErrorGuaranteed, FatalError};
use rustc_errors::DiagCtxtHandle;
use rustc_interface::interface;
use rustc_middle::ty::TyCtxt;
use rustc_session::config::{ErrorOutputType, RustcOptGroup, make_crate_type_option};
@ -179,7 +179,8 @@ pub fn main() {
let exit_code = rustc_driver::catch_with_exit_code(|| {
let at_args = rustc_driver::args::raw_args(&early_dcx)?;
main_args(&mut early_dcx, &at_args, using_internal_features)
main_args(&mut early_dcx, &at_args, using_internal_features);
Ok(())
});
process::exit(exit_code);
}
@ -699,13 +700,10 @@ fn usage(argv0: &str) {
);
}
/// A result type used by several functions under `main()`.
type MainResult = Result<(), ErrorGuaranteed>;
pub(crate) fn wrap_return(dcx: DiagCtxtHandle<'_>, res: Result<(), String>) -> MainResult {
pub(crate) fn wrap_return(dcx: DiagCtxtHandle<'_>, res: Result<(), String>) {
match res {
Ok(()) => dcx.has_errors().map_or(Ok(()), Err),
Err(err) => Err(dcx.err(err)),
Ok(()) => dcx.abort_if_errors(),
Err(err) => dcx.fatal(err),
}
}
@ -714,17 +712,17 @@ fn run_renderer<'tcx, T: formats::FormatRenderer<'tcx>>(
renderopts: config::RenderOptions,
cache: formats::cache::Cache,
tcx: TyCtxt<'tcx>,
) -> MainResult {
) {
match formats::run_format::<T>(krate, renderopts, cache, tcx) {
Ok(_) => tcx.dcx().has_errors().map_or(Ok(()), Err),
Ok(_) => tcx.dcx().abort_if_errors(),
Err(e) => {
let mut msg =
tcx.dcx().struct_err(format!("couldn't generate documentation: {}", e.error));
tcx.dcx().struct_fatal(format!("couldn't generate documentation: {}", e.error));
let file = e.file.display().to_string();
if !file.is_empty() {
msg.note(format!("failed to create or modify \"{file}\""));
}
Err(msg.emit())
msg.emit();
}
}
}
@ -759,7 +757,7 @@ fn main_args(
early_dcx: &mut EarlyDiagCtxt,
at_args: &[String],
using_internal_features: Arc<AtomicBool>,
) -> MainResult {
) {
// Throw away the first argument, the name of the binary.
// In case of at_args being empty, as might be the case by
// passing empty argument array to execve under some platforms,
@ -770,7 +768,7 @@ fn main_args(
// the compiler with @empty_file as argv[0] and no more arguments.
let at_args = at_args.get(1..).unwrap_or_default();
let args = rustc_driver::args::arg_expand_all(early_dcx, at_args)?;
let args = rustc_driver::args::arg_expand_all(early_dcx, at_args);
let mut options = getopts::Options::new();
for option in opts() {
@ -788,7 +786,7 @@ fn main_args(
let (input, options, render_options) =
match config::Options::from_matches(early_dcx, &matches, args) {
Some(opts) => opts,
None => return Ok(()),
None => return,
};
let dcx =
@ -853,11 +851,11 @@ fn main_args(
if sess.opts.describe_lints {
rustc_driver::describe_lints(sess);
return Ok(());
return;
}
compiler.enter(|queries| {
let Ok(mut gcx) = queries.global_ctxt() else { FatalError.raise() };
let mut gcx = queries.global_ctxt();
if sess.dcx().has_errors().is_some() {
sess.dcx().fatal("Compilation failed, aborting rustdoc");
}
@ -865,7 +863,7 @@ fn main_args(
gcx.enter(|tcx| {
let (krate, render_opts, mut cache) = sess.time("run_global_ctxt", || {
core::run_global_ctxt(tcx, show_coverage, render_options, output_format)
})?;
});
info!("finished with rustc");
if let Some(options) = scrape_examples_options {
@ -884,10 +882,10 @@ fn main_args(
if show_coverage {
// if we ran coverage, bail early, we don't need to also generate docs at this point
// (also we didn't load in any of the useful passes)
return Ok(());
return;
} else if run_check {
// Since we're in "check" mode, no need to generate anything beyond this point.
return Ok(());
return;
}
info!("going to format");

View File

@ -2,6 +2,7 @@
use rustc_data_structures::sync::{Lock, Lrc};
use rustc_errors::emitter::Emitter;
use rustc_errors::registry::Registry;
use rustc_errors::translation::{Translate, to_fluent_args};
use rustc_errors::{Applicability, DiagCtxt, DiagInner, LazyFallbackBundle};
use rustc_parse::{source_str_to_stream, unwrap_or_emit_fatal};
@ -155,7 +156,7 @@ impl Translate for BufferEmitter {
}
impl Emitter for BufferEmitter {
fn emit_diagnostic(&mut self, diag: DiagInner) {
fn emit_diagnostic(&mut self, diag: DiagInner, _registry: &Registry) {
let mut buffer = self.buffer.borrow_mut();
let fluent_args = to_fluent_args(diag.args.iter());

View File

@ -7,7 +7,6 @@ use rustc_data_structures::fx::FxIndexMap;
use rustc_errors::DiagCtxtHandle;
use rustc_hir::intravisit::{self, Visitor};
use rustc_hir::{self as hir};
use rustc_interface::interface;
use rustc_macros::{Decodable, Encodable};
use rustc_middle::hir::nested_filter;
use rustc_middle::ty::{self, TyCtxt};
@ -275,7 +274,7 @@ pub(crate) fn run(
tcx: TyCtxt<'_>,
options: ScrapeExamplesOptions,
bin_crate: bool,
) -> interface::Result<()> {
) {
let inner = move || -> Result<(), String> {
// Generates source files for examples
renderopts.no_emit_shared = true;
@ -329,8 +328,6 @@ pub(crate) fn run(
if let Err(e) = inner() {
tcx.dcx().fatal(e);
}
Ok(())
}
// Note: the DiagCtxt must be passed in explicitly because sess isn't available while parsing

View File

@ -236,7 +236,8 @@ pub fn main() {
let mut args: Vec<String> = orig_args.clone();
pass_sysroot_env_if_given(&mut args, sys_root_env);
return rustc_driver::RunCompiler::new(&args, &mut DefaultCallbacks).run();
rustc_driver::RunCompiler::new(&args, &mut DefaultCallbacks).run();
return Ok(());
}
if orig_args.iter().any(|a| a == "--version" || a == "-V") {
@ -296,12 +297,13 @@ pub fn main() {
args.extend(clippy_args);
rustc_driver::RunCompiler::new(&args, &mut ClippyCallbacks { clippy_args_var })
.set_using_internal_features(using_internal_features)
.run()
.run();
} else {
rustc_driver::RunCompiler::new(&args, &mut RustcCallbacks { clippy_args_var })
.set_using_internal_features(using_internal_features)
.run()
.run();
}
return Ok(());
}))
}

View File

@ -289,7 +289,8 @@ fn run_compiler(
let exit_code = rustc_driver::catch_with_exit_code(move || {
rustc_driver::RunCompiler::new(&args, callbacks)
.set_using_internal_features(using_internal_features)
.run()
.run();
Ok(())
});
std::process::exit(exit_code)
}

View File

@ -3,6 +3,7 @@ use std::sync::atomic::{AtomicBool, Ordering};
use rustc_data_structures::sync::{IntoDynSyncSend, Lrc};
use rustc_errors::emitter::{DynEmitter, Emitter, HumanEmitter, SilentEmitter, stderr_destination};
use rustc_errors::registry::Registry;
use rustc_errors::translation::Translate;
use rustc_errors::{ColorConfig, Diag, DiagCtxt, DiagInner, Level as DiagnosticLevel};
use rustc_session::parse::ParseSess as RawParseSess;
@ -38,10 +39,10 @@ struct SilentOnIgnoredFilesEmitter {
}
impl SilentOnIgnoredFilesEmitter {
fn handle_non_ignoreable_error(&mut self, diag: DiagInner) {
fn handle_non_ignoreable_error(&mut self, diag: DiagInner, registry: &Registry) {
self.has_non_ignorable_parser_errors = true;
self.can_reset.store(false, Ordering::Release);
self.emitter.emit_diagnostic(diag);
self.emitter.emit_diagnostic(diag, registry);
}
}
@ -60,9 +61,9 @@ impl Emitter for SilentOnIgnoredFilesEmitter {
None
}
fn emit_diagnostic(&mut self, diag: DiagInner) {
fn emit_diagnostic(&mut self, diag: DiagInner, registry: &Registry) {
if diag.level() == DiagnosticLevel::Fatal {
return self.handle_non_ignoreable_error(diag);
return self.handle_non_ignoreable_error(diag, registry);
}
if let Some(primary_span) = &diag.span.primary_span() {
let file_name = self.source_map.span_to_filename(*primary_span);
@ -80,7 +81,7 @@ impl Emitter for SilentOnIgnoredFilesEmitter {
}
};
}
self.handle_non_ignoreable_error(diag);
self.handle_non_ignoreable_error(diag, registry);
}
}
@ -358,7 +359,7 @@ mod tests {
None
}
fn emit_diagnostic(&mut self, _diag: DiagInner) {
fn emit_diagnostic(&mut self, _diag: DiagInner, _registry: &Registry) {
self.num_emitted_errors.fetch_add(1, Ordering::Release);
}
}
@ -412,6 +413,7 @@ mod tests {
SourceMapFileName::Real(RealFileName::LocalPath(PathBuf::from("foo.rs"))),
source,
);
let registry = Registry::new(&[]);
let mut emitter = build_emitter(
Lrc::clone(&num_emitted_errors),
Lrc::clone(&can_reset_errors),
@ -420,7 +422,7 @@ mod tests {
);
let span = MultiSpan::from_span(mk_sp(BytePos(0), BytePos(1)));
let fatal_diagnostic = build_diagnostic(DiagnosticLevel::Fatal, Some(span));
emitter.emit_diagnostic(fatal_diagnostic);
emitter.emit_diagnostic(fatal_diagnostic, &registry);
assert_eq!(num_emitted_errors.load(Ordering::Acquire), 1);
assert_eq!(can_reset_errors.load(Ordering::Acquire), false);
}
@ -437,6 +439,7 @@ mod tests {
SourceMapFileName::Real(RealFileName::LocalPath(PathBuf::from("foo.rs"))),
source,
);
let registry = Registry::new(&[]);
let mut emitter = build_emitter(
Lrc::clone(&num_emitted_errors),
Lrc::clone(&can_reset_errors),
@ -445,7 +448,7 @@ mod tests {
);
let span = MultiSpan::from_span(mk_sp(BytePos(0), BytePos(1)));
let non_fatal_diagnostic = build_diagnostic(DiagnosticLevel::Warning, Some(span));
emitter.emit_diagnostic(non_fatal_diagnostic);
emitter.emit_diagnostic(non_fatal_diagnostic, &registry);
assert_eq!(num_emitted_errors.load(Ordering::Acquire), 0);
assert_eq!(can_reset_errors.load(Ordering::Acquire), true);
}
@ -461,6 +464,7 @@ mod tests {
SourceMapFileName::Real(RealFileName::LocalPath(PathBuf::from("foo.rs"))),
source,
);
let registry = Registry::new(&[]);
let mut emitter = build_emitter(
Lrc::clone(&num_emitted_errors),
Lrc::clone(&can_reset_errors),
@ -469,7 +473,7 @@ mod tests {
);
let span = MultiSpan::from_span(mk_sp(BytePos(0), BytePos(1)));
let non_fatal_diagnostic = build_diagnostic(DiagnosticLevel::Warning, Some(span));
emitter.emit_diagnostic(non_fatal_diagnostic);
emitter.emit_diagnostic(non_fatal_diagnostic, &registry);
assert_eq!(num_emitted_errors.load(Ordering::Acquire), 1);
assert_eq!(can_reset_errors.load(Ordering::Acquire), false);
}
@ -497,6 +501,7 @@ mod tests {
SourceMapFileName::Real(RealFileName::LocalPath(PathBuf::from("fatal.rs"))),
fatal_source,
);
let registry = Registry::new(&[]);
let mut emitter = build_emitter(
Lrc::clone(&num_emitted_errors),
Lrc::clone(&can_reset_errors),
@ -508,9 +513,9 @@ mod tests {
let bar_diagnostic = build_diagnostic(DiagnosticLevel::Warning, Some(bar_span));
let foo_diagnostic = build_diagnostic(DiagnosticLevel::Warning, Some(foo_span));
let fatal_diagnostic = build_diagnostic(DiagnosticLevel::Fatal, None);
emitter.emit_diagnostic(bar_diagnostic);
emitter.emit_diagnostic(foo_diagnostic);
emitter.emit_diagnostic(fatal_diagnostic);
emitter.emit_diagnostic(bar_diagnostic, &registry);
emitter.emit_diagnostic(foo_diagnostic, &registry);
emitter.emit_diagnostic(fatal_diagnostic, &registry);
assert_eq!(num_emitted_errors.load(Ordering::Acquire), 2);
assert_eq!(can_reset_errors.load(Ordering::Acquire), false);
}

View File

@ -15,16 +15,16 @@ extern crate rustc_span;
extern crate rustc_symbol_mangling;
extern crate rustc_target;
use std::any::Any;
use rustc_codegen_ssa::traits::CodegenBackend;
use rustc_codegen_ssa::{CodegenResults, CrateInfo};
use rustc_data_structures::fx::FxIndexMap;
use rustc_errors::ErrorGuaranteed;
use rustc_metadata::EncodedMetadata;
use rustc_middle::dep_graph::{WorkProduct, WorkProductId};
use rustc_middle::ty::TyCtxt;
use rustc_session::config::OutputFilenames;
use rustc_session::Session;
use std::any::Any;
use rustc_session::config::OutputFilenames;
struct TheBackend;
@ -60,17 +60,12 @@ impl CodegenBackend for TheBackend {
(*codegen_results, FxIndexMap::default())
}
fn link(
&self,
sess: &Session,
codegen_results: CodegenResults,
outputs: &OutputFilenames,
) -> Result<(), ErrorGuaranteed> {
use rustc_session::{
config::{CrateType, OutFileName},
output::out_filename,
};
fn link(&self, sess: &Session, codegen_results: CodegenResults, outputs: &OutputFilenames) {
use std::io::Write;
use rustc_session::config::{CrateType, OutFileName};
use rustc_session::output::out_filename;
let crate_name = codegen_results.crate_info.local_crate_name;
for &crate_type in sess.opts.crate_types.iter() {
if crate_type != CrateType::Rlib {
@ -88,7 +83,6 @@ impl CodegenBackend for TheBackend {
}
}
}
Ok(())
}
}

View File

@ -12,7 +12,7 @@ extern crate rustc_interface;
use rustc_interface::interface;
struct TestCalls<'a> {
count: &'a mut u32
count: &'a mut u32,
}
impl rustc_driver::Callbacks for TestCalls<'_> {
@ -24,8 +24,9 @@ impl rustc_driver::Callbacks for TestCalls<'_> {
fn main() {
let mut count = 1;
let args = vec!["compiler-calls".to_string(), "foo.rs".to_string()];
rustc_driver::catch_fatal_errors(|| {
rustc_driver::RunCompiler::new(&args, &mut TestCalls { count: &mut count }).run().ok();
rustc_driver::catch_fatal_errors(|| -> interface::Result<()> {
rustc_driver::RunCompiler::new(&args, &mut TestCalls { count: &mut count }).run();
Ok(())
})
.ok();
assert_eq!(count, 2);

View File

@ -47,7 +47,8 @@ fn main() {
rustc_args.push("-Zpolonius".to_owned());
let mut callbacks = CompilerCalls::default();
// Call the Rust compiler with our callbacks.
rustc_driver::RunCompiler::new(&rustc_args, &mut callbacks).run()
rustc_driver::RunCompiler::new(&rustc_args, &mut callbacks).run();
Ok(())
});
std::process::exit(exit_code);
}

View File

@ -17,8 +17,7 @@ extern crate rustc_span;
use std::path::{Path, PathBuf};
use rustc_interface::Linker;
use rustc_interface::interface;
use rustc_interface::{Linker, interface};
use rustc_session::config::{Input, Options, OutFileName, OutputType, OutputTypes};
use rustc_span::FileName;
@ -79,11 +78,11 @@ fn compile(code: String, output: PathBuf, sysroot: PathBuf, linker: Option<&Path
interface::run_compiler(config, |compiler| {
let linker = compiler.enter(|queries| {
queries.global_ctxt()?.enter(|tcx| {
tcx.analysis(())?;
queries.global_ctxt().enter(|tcx| {
let _ = tcx.analysis(());
Linker::codegen_and_build_linker(tcx, &*compiler.codegen_backend)
})
});
linker.unwrap().link(&compiler.sess, &*compiler.codegen_backend).unwrap();
linker.link(&compiler.sess, &*compiler.codegen_backend);
});
}