Auto merge of #136158 - cuviper:stable-next, r=cuviper

[stable] Prepare Rust 1.84.1 point release

- [Fix ICE 132920 in duplicate-crate diagnostics.](https://github.com/rust-lang/rust/pull/133304/)
- [Fix errors for overlapping impls in incremental rebuilds.](https://github.com/rust-lang/rust/pull/133828/)
- [Fix slow compilation related to the next-generation trait solver.](https://github.com/rust-lang/rust/pull/135618/)
- [Fix debuginfo when LLVM's location discriminator value limit is exceeded.](https://github.com/rust-lang/rust/pull/135643/)
- Fixes for building Rust from source:
  - [Only try to distribute `llvm-objcopy` if llvm tools are enabled.](https://github.com/rust-lang/rust/pull/134240/)
  - [Add Profile Override for Non-Git Sources.](https://github.com/rust-lang/rust/pull/135433/)
  - [Resolve symlinks of LLVM tool binaries before copying them.](https://github.com/rust-lang/rust/pull/135585/)
  - [Make it possible to use ci-rustc on tarball sources.](https://github.com/rust-lang/rust/pull/135722/)

cc `@rust-lang/release`
r? ghost
This commit is contained in:
bors 2025-01-27 23:20:59 +00:00
commit e71f9a9a98
32 changed files with 421 additions and 133 deletions

View File

@ -1,3 +1,18 @@
Version 1.84.1 (2025-01-30)
==========================
<a id="1.84.1"></a>
- [Fix ICE 132920 in duplicate-crate diagnostics.](https://github.com/rust-lang/rust/pull/133304/)
- [Fix errors for overlapping impls in incremental rebuilds.](https://github.com/rust-lang/rust/pull/133828/)
- [Fix slow compilation related to the next-generation trait solver.](https://github.com/rust-lang/rust/pull/135618/)
- [Fix debuginfo when LLVM's location discriminator value limit is exceeded.](https://github.com/rust-lang/rust/pull/135643/)
- Fixes for building Rust from source:
- [Only try to distribute `llvm-objcopy` if llvm tools are enabled.](https://github.com/rust-lang/rust/pull/134240/)
- [Add Profile Override for Non-Git Sources.](https://github.com/rust-lang/rust/pull/135433/)
- [Resolve symlinks of LLVM tool binaries before copying them.](https://github.com/rust-lang/rust/pull/135585/)
- [Make it possible to use ci-rustc on tarball sources.](https://github.com/rust-lang/rust/pull/135722/)
Version 1.84.0 (2025-01-09) Version 1.84.0 (2025-01-09)
========================== ==========================

View File

@ -113,15 +113,15 @@ fn make_mir_scope<'gcc, 'tcx>(
let scope_data = &mir.source_scopes[scope]; let scope_data = &mir.source_scopes[scope];
let parent_scope = if let Some(parent) = scope_data.parent_scope { let parent_scope = if let Some(parent) = scope_data.parent_scope {
make_mir_scope(cx, _instance, mir, variables, debug_context, instantiated, parent); make_mir_scope(cx, _instance, mir, variables, debug_context, instantiated, parent);
debug_context.scopes[parent].unwrap() debug_context.scopes[parent]
} else { } else {
// The root is the function itself. // The root is the function itself.
let file = cx.sess().source_map().lookup_source_file(mir.span.lo()); let file = cx.sess().source_map().lookup_source_file(mir.span.lo());
debug_context.scopes[scope] = Some(DebugScope { debug_context.scopes[scope] = DebugScope {
file_start_pos: file.start_pos, file_start_pos: file.start_pos,
file_end_pos: file.end_position(), file_end_pos: file.end_position(),
..debug_context.scopes[scope].unwrap() ..debug_context.scopes[scope]
}); };
instantiated.insert(scope); instantiated.insert(scope);
return; return;
}; };
@ -130,7 +130,7 @@ fn make_mir_scope<'gcc, 'tcx>(
if !vars.contains(scope) && scope_data.inlined.is_none() { if !vars.contains(scope) && scope_data.inlined.is_none() {
// Do not create a DIScope if there are no variables defined in this // Do not create a DIScope if there are no variables defined in this
// MIR `SourceScope`, and it's not `inlined`, to avoid debuginfo bloat. // MIR `SourceScope`, and it's not `inlined`, to avoid debuginfo bloat.
debug_context.scopes[scope] = Some(parent_scope); debug_context.scopes[scope] = parent_scope;
instantiated.insert(scope); instantiated.insert(scope);
return; return;
} }
@ -157,12 +157,12 @@ fn make_mir_scope<'gcc, 'tcx>(
// TODO(tempdragon): dbg_scope: Add support for scope extension here. // TODO(tempdragon): dbg_scope: Add support for scope extension here.
inlined_at.or(p_inlined_at); inlined_at.or(p_inlined_at);
debug_context.scopes[scope] = Some(DebugScope { debug_context.scopes[scope] = DebugScope {
dbg_scope, dbg_scope,
inlined_at, inlined_at,
file_start_pos: loc.file.start_pos, file_start_pos: loc.file.start_pos,
file_end_pos: loc.file.end_position(), file_end_pos: loc.file.end_position(),
}); };
instantiated.insert(scope); instantiated.insert(scope);
} }
@ -232,12 +232,12 @@ impl<'gcc, 'tcx> DebugInfoCodegenMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
} }
// Initialize fn debug context (including scopes). // Initialize fn debug context (including scopes).
let empty_scope = Some(DebugScope { let empty_scope = DebugScope {
dbg_scope: self.dbg_scope_fn(instance, fn_abi, Some(llfn)), dbg_scope: self.dbg_scope_fn(instance, fn_abi, Some(llfn)),
inlined_at: None, inlined_at: None,
file_start_pos: BytePos(0), file_start_pos: BytePos(0),
file_end_pos: BytePos(0), file_end_pos: BytePos(0),
}); };
let mut fn_debug_context = FunctionDebugContext { let mut fn_debug_context = FunctionDebugContext {
scopes: IndexVec::from_elem(empty_scope, mir.source_scopes.as_slice()), scopes: IndexVec::from_elem(empty_scope, mir.source_scopes.as_slice()),
inlined_function_scopes: Default::default(), inlined_function_scopes: Default::default(),

View File

@ -9,7 +9,7 @@ use rustc_middle::mir::{Body, SourceScope};
use rustc_middle::ty::layout::{FnAbiOf, HasTypingEnv}; use rustc_middle::ty::layout::{FnAbiOf, HasTypingEnv};
use rustc_middle::ty::{self, Instance}; use rustc_middle::ty::{self, Instance};
use rustc_session::config::DebugInfo; use rustc_session::config::DebugInfo;
use rustc_span::{BytePos, hygiene}; use rustc_span::{BytePos, DUMMY_SP, hygiene};
use super::metadata::file_metadata; use super::metadata::file_metadata;
use super::utils::DIB; use super::utils::DIB;
@ -85,23 +85,15 @@ fn make_mir_scope<'ll, 'tcx>(
discriminators, discriminators,
parent, parent,
); );
if let Some(parent_scope) = debug_context.scopes[parent] { debug_context.scopes[parent]
parent_scope
} else {
// If the parent scope could not be represented then no children
// can be either.
debug_context.scopes[scope] = None;
instantiated.insert(scope);
return;
}
} else { } else {
// The root is the function itself. // The root is the function itself.
let file = cx.sess().source_map().lookup_source_file(mir.span.lo()); let file = cx.sess().source_map().lookup_source_file(mir.span.lo());
debug_context.scopes[scope] = Some(DebugScope { debug_context.scopes[scope] = DebugScope {
file_start_pos: file.start_pos, file_start_pos: file.start_pos,
file_end_pos: file.end_position(), file_end_pos: file.end_position(),
..debug_context.scopes[scope].unwrap() ..debug_context.scopes[scope]
}); };
instantiated.insert(scope); instantiated.insert(scope);
return; return;
}; };
@ -112,7 +104,7 @@ fn make_mir_scope<'ll, 'tcx>(
{ {
// Do not create a DIScope if there are no variables defined in this // Do not create a DIScope if there are no variables defined in this
// MIR `SourceScope`, and it's not `inlined`, to avoid debuginfo bloat. // MIR `SourceScope`, and it's not `inlined`, to avoid debuginfo bloat.
debug_context.scopes[scope] = Some(parent_scope); debug_context.scopes[scope] = parent_scope;
instantiated.insert(scope); instantiated.insert(scope);
return; return;
} }
@ -145,14 +137,7 @@ fn make_mir_scope<'ll, 'tcx>(
}, },
}; };
let mut debug_scope = Some(DebugScope { let inlined_at = scope_data.inlined.map(|(_, callsite_span)| {
dbg_scope,
inlined_at: parent_scope.inlined_at,
file_start_pos: loc.file.start_pos,
file_end_pos: loc.file.end_position(),
});
if let Some((_, callsite_span)) = scope_data.inlined {
let callsite_span = hygiene::walk_chain_collapsed(callsite_span, mir.span); let callsite_span = hygiene::walk_chain_collapsed(callsite_span, mir.span);
let callsite_scope = parent_scope.adjust_dbg_scope_for_span(cx, callsite_span); let callsite_scope = parent_scope.adjust_dbg_scope_for_span(cx, callsite_span);
let loc = cx.dbg_loc(callsite_scope, parent_scope.inlined_at, callsite_span); let loc = cx.dbg_loc(callsite_scope, parent_scope.inlined_at, callsite_span);
@ -175,29 +160,29 @@ fn make_mir_scope<'ll, 'tcx>(
// Note further that we can't key this hashtable on the span itself, // Note further that we can't key this hashtable on the span itself,
// because these spans could have distinct SyntaxContexts. We have // because these spans could have distinct SyntaxContexts. We have
// to key on exactly what we're giving to LLVM. // to key on exactly what we're giving to LLVM.
let inlined_at = match discriminators.entry(callsite_span.lo()) { match discriminators.entry(callsite_span.lo()) {
Entry::Occupied(mut o) => { Entry::Occupied(mut o) => {
*o.get_mut() += 1; *o.get_mut() += 1;
// NB: We have to emit *something* here or we'll fail LLVM IR verification
// in at least some circumstances (see issue #135322) so if the required
// discriminant cannot be encoded fall back to the dummy location.
unsafe { llvm::LLVMRustDILocationCloneWithBaseDiscriminator(loc, *o.get()) } unsafe { llvm::LLVMRustDILocationCloneWithBaseDiscriminator(loc, *o.get()) }
.unwrap_or_else(|| {
cx.dbg_loc(callsite_scope, parent_scope.inlined_at, DUMMY_SP)
})
} }
Entry::Vacant(v) => { Entry::Vacant(v) => {
v.insert(0); v.insert(0);
Some(loc) loc
}
};
match inlined_at {
Some(inlined_at) => {
debug_scope.as_mut().unwrap().inlined_at = Some(inlined_at);
}
None => {
// LLVM has a maximum discriminator that it can encode (currently
// it uses 12 bits for 4096 possible values). If we exceed that
// there is little we can do but drop the debug info.
debug_scope = None;
}
} }
} }
});
debug_context.scopes[scope] = debug_scope; debug_context.scopes[scope] = DebugScope {
dbg_scope,
inlined_at: inlined_at.or(parent_scope.inlined_at),
file_start_pos: loc.file.start_pos,
file_end_pos: loc.file.end_position(),
};
instantiated.insert(scope); instantiated.insert(scope);
} }

View File

@ -294,12 +294,12 @@ impl<'ll, 'tcx> DebugInfoCodegenMethods<'tcx> for CodegenCx<'ll, 'tcx> {
} }
// Initialize fn debug context (including scopes). // Initialize fn debug context (including scopes).
let empty_scope = Some(DebugScope { let empty_scope = DebugScope {
dbg_scope: self.dbg_scope_fn(instance, fn_abi, Some(llfn)), dbg_scope: self.dbg_scope_fn(instance, fn_abi, Some(llfn)),
inlined_at: None, inlined_at: None,
file_start_pos: BytePos(0), file_start_pos: BytePos(0),
file_end_pos: BytePos(0), file_end_pos: BytePos(0),
}); };
let mut fn_debug_context = FunctionDebugContext { let mut fn_debug_context = FunctionDebugContext {
scopes: IndexVec::from_elem(empty_scope, &mir.source_scopes), scopes: IndexVec::from_elem(empty_scope, &mir.source_scopes),
inlined_function_scopes: Default::default(), inlined_function_scopes: Default::default(),

View File

@ -20,9 +20,7 @@ use crate::traits::*;
pub struct FunctionDebugContext<'tcx, S, L> { pub struct FunctionDebugContext<'tcx, S, L> {
/// Maps from source code to the corresponding debug info scope. /// Maps from source code to the corresponding debug info scope.
/// May be None if the backend is not capable of representing the scope for pub scopes: IndexVec<mir::SourceScope, DebugScope<S, L>>,
/// some reason.
pub scopes: IndexVec<mir::SourceScope, Option<DebugScope<S, L>>>,
/// Maps from an inlined function to its debug info declaration. /// Maps from an inlined function to its debug info declaration.
pub inlined_function_scopes: FxHashMap<Instance<'tcx>, S>, pub inlined_function_scopes: FxHashMap<Instance<'tcx>, S>,
@ -233,7 +231,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
&self, &self,
source_info: mir::SourceInfo, source_info: mir::SourceInfo,
) -> Option<(Bx::DIScope, Option<Bx::DILocation>, Span)> { ) -> Option<(Bx::DIScope, Option<Bx::DILocation>, Span)> {
let scope = &self.debug_context.as_ref()?.scopes[source_info.scope]?; let scope = &self.debug_context.as_ref()?.scopes[source_info.scope];
let span = hygiene::walk_chain_collapsed(source_info.span, self.mir.span); let span = hygiene::walk_chain_collapsed(source_info.span, self.mir.span);
Some((scope.adjust_dbg_scope_for_span(self.cx, span), scope.inlined_at, span)) Some((scope.adjust_dbg_scope_for_span(self.cx, span), scope.inlined_at, span))
} }

View File

@ -302,7 +302,11 @@ impl<D: Deps> DepGraph<D> {
OP: FnOnce() -> R, OP: FnOnce() -> R,
{ {
match self.data() { match self.data() {
Some(data) => data.with_anon_task(cx, dep_kind, op), Some(data) => {
let (result, index) = data.with_anon_task_inner(cx, dep_kind, op);
self.read_index(index);
(result, index)
}
None => (op(), self.next_virtual_depnode_index()), None => (op(), self.next_virtual_depnode_index()),
} }
} }
@ -397,7 +401,16 @@ impl<D: Deps> DepGraphData<D> {
/// Executes something within an "anonymous" task, that is, a task the /// Executes something within an "anonymous" task, that is, a task the
/// `DepNode` of which is determined by the list of inputs it read from. /// `DepNode` of which is determined by the list of inputs it read from.
pub(crate) fn with_anon_task<Tcx: DepContext<Deps = D>, OP, R>( ///
/// NOTE: this does not actually count as a read of the DepNode here.
/// Using the result of this task without reading the DepNode will result
/// in untracked dependencies which may lead to ICEs as nodes are
/// incorrectly marked green.
///
/// FIXME: This could perhaps return a `WithDepNode` to ensure that the
/// user of this function actually performs the read; we'll have to see
/// how to make that work with `anon` in `execute_job_incr`, though.
pub(crate) fn with_anon_task_inner<Tcx: DepContext<Deps = D>, OP, R>(
&self, &self,
cx: Tcx, cx: Tcx,
dep_kind: DepKind, dep_kind: DepKind,

View File

@ -520,9 +520,11 @@ where
let (result, dep_node_index) = let (result, dep_node_index) =
qcx.start_query(job_id, query.depth_limit(), Some(&diagnostics), || { qcx.start_query(job_id, query.depth_limit(), Some(&diagnostics), || {
if query.anon() { if query.anon() {
return dep_graph_data.with_anon_task(*qcx.dep_context(), query.dep_kind(), || { return dep_graph_data.with_anon_task_inner(
query.compute(qcx, key) *qcx.dep_context(),
}); query.dep_kind(),
|| query.compute(qcx, key),
);
} }
// `to_dep_node` is expensive for some `DepKind`s. // `to_dep_node` is expensive for some `DepKind`s.

View File

@ -1808,24 +1808,6 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
StringPart::highlighted("cargo tree".to_string()), StringPart::highlighted("cargo tree".to_string()),
StringPart::normal("` to explore your dependency tree".to_string()), StringPart::normal("` to explore your dependency tree".to_string()),
]); ]);
// FIXME: this is a giant hack for the benefit of this specific diagnostic. Because
// we're so nested in method calls before the error gets emitted, bubbling a single bit
// flag informing the top level caller to stop adding extra detail to the diagnostic,
// would actually be harder to follow. So we do something naughty here: we consume the
// diagnostic, emit it and leave in its place a "delayed bug" that will continue being
// modified but won't actually be printed to end users. This *is not ideal*, but allows
// us to reduce the verbosity of an error that is already quite verbose and increase its
// specificity. Below we modify the main message as well, in a way that *could* break if
// the implementation of Diagnostics change significantly, but that would be caught with
// a make test failure when this diagnostic is tested.
err.primary_message(format!(
"{} because the trait comes from a different crate version",
err.messages[0].0.as_str().unwrap(),
));
let diag = err.clone();
err.downgrade_to_delayed_bug();
self.tcx.dcx().emit_diagnostic(diag);
return true; return true;
} }

View File

@ -6,7 +6,7 @@
use std::fmt::Debug; use std::fmt::Debug;
use rustc_data_structures::fx::FxIndexSet; use rustc_data_structures::fx::{FxHashSet, FxIndexSet};
use rustc_errors::{Diag, EmissionGuarantee}; use rustc_errors::{Diag, EmissionGuarantee};
use rustc_hir::def::DefKind; use rustc_hir::def::DefKind;
use rustc_hir::def_id::DefId; use rustc_hir::def_id::DefId;
@ -117,6 +117,16 @@ pub fn overlapping_impls(
return None; return None;
} }
if tcx.next_trait_solver_in_coherence() {
overlap(
tcx,
TrackAmbiguityCauses::Yes,
skip_leak_check,
impl1_def_id,
impl2_def_id,
overlap_mode,
)
} else {
let _overlap_with_bad_diagnostics = overlap( let _overlap_with_bad_diagnostics = overlap(
tcx, tcx,
TrackAmbiguityCauses::No, TrackAmbiguityCauses::No,
@ -139,6 +149,7 @@ pub fn overlapping_impls(
) )
.unwrap(); .unwrap();
Some(overlap) Some(overlap)
}
} }
fn fresh_impl_header<'tcx>(infcx: &InferCtxt<'tcx>, impl_def_id: DefId) -> ty::ImplHeader<'tcx> { fn fresh_impl_header<'tcx>(infcx: &InferCtxt<'tcx>, impl_def_id: DefId) -> ty::ImplHeader<'tcx> {
@ -616,6 +627,7 @@ fn compute_intercrate_ambiguity_causes<'tcx>(
} }
struct AmbiguityCausesVisitor<'a, 'tcx> { struct AmbiguityCausesVisitor<'a, 'tcx> {
cache: FxHashSet<Goal<'tcx, ty::Predicate<'tcx>>>,
causes: &'a mut FxIndexSet<IntercrateAmbiguityCause<'tcx>>, causes: &'a mut FxIndexSet<IntercrateAmbiguityCause<'tcx>>,
} }
@ -625,6 +637,10 @@ impl<'a, 'tcx> ProofTreeVisitor<'tcx> for AmbiguityCausesVisitor<'a, 'tcx> {
} }
fn visit_goal(&mut self, goal: &InspectGoal<'_, 'tcx>) { fn visit_goal(&mut self, goal: &InspectGoal<'_, 'tcx>) {
if !self.cache.insert(goal.goal()) {
return;
}
let infcx = goal.infcx(); let infcx = goal.infcx();
for cand in goal.candidates() { for cand in goal.candidates() {
cand.visit_nested_in_probe(self); cand.visit_nested_in_probe(self);
@ -749,5 +765,10 @@ fn search_ambiguity_causes<'tcx>(
goal: Goal<'tcx, ty::Predicate<'tcx>>, goal: Goal<'tcx, ty::Predicate<'tcx>>,
causes: &mut FxIndexSet<IntercrateAmbiguityCause<'tcx>>, causes: &mut FxIndexSet<IntercrateAmbiguityCause<'tcx>>,
) { ) {
infcx.probe(|_| infcx.visit_proof_tree(goal, &mut AmbiguityCausesVisitor { causes })); infcx.probe(|_| {
infcx.visit_proof_tree(goal, &mut AmbiguityCausesVisitor {
cache: Default::default(),
causes,
})
});
} }

View File

@ -1390,10 +1390,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
where where
OP: FnOnce(&mut Self) -> R, OP: FnOnce(&mut Self) -> R,
{ {
let (result, dep_node) = self.tcx().dep_graph.with_anon_task(self.tcx(), dep_kinds::TraitSelect, || op(self))
self.tcx().dep_graph.with_anon_task(self.tcx(), dep_kinds::TraitSelect, || op(self));
self.tcx().dep_graph.read_index(dep_node);
(result, dep_node)
} }
/// filter_impls filters candidates that have a positive impl for a negative /// filter_impls filters candidates that have a positive impl for a negative

View File

@ -511,7 +511,7 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> {
// This is for global caching, so we properly track query dependencies. // This is for global caching, so we properly track query dependencies.
// Everything that affects the `result` should be performed within this // Everything that affects the `result` should be performed within this
// `with_anon_task` closure. If computing this goal depends on something // `with_cached_task` closure. If computing this goal depends on something
// not tracked by the cache key and from outside of this anon task, it // not tracked by the cache key and from outside of this anon task, it
// must not be added to the global cache. Notably, this is the case for // must not be added to the global cache. Notably, this is the case for
// trait solver cycles participants. // trait solver cycles participants.

View File

@ -1145,7 +1145,12 @@ def bootstrap(args):
else: else:
config_toml = '' config_toml = ''
profile = RustBuild.get_toml_static(config_toml, 'profile') profile = RustBuild.get_toml_static(config_toml, "profile")
is_non_git_source = not os.path.exists(os.path.join(rust_root, ".git"))
if profile is None and is_non_git_source:
profile = "dist"
if profile is not None: if profile is not None:
# Allows creating alias for profile names, allowing # Allows creating alias for profile names, allowing
# profiles to be renamed while maintaining back compatibility # profiles to be renamed while maintaining back compatibility

View File

@ -1800,7 +1800,13 @@ impl Step for Assemble {
// When using `download-ci-llvm`, some of the tools // When using `download-ci-llvm`, some of the tools
// may not exist, so skip trying to copy them. // may not exist, so skip trying to copy them.
if src_path.exists() { if src_path.exists() {
builder.copy_link(&src_path, &libdir_bin.join(&tool_exe)); // There is a chance that these tools are being installed from an external LLVM.
// Use `Builder::resolve_symlink_and_copy` instead of `Builder::copy_link` to ensure
// we are copying the original file not the symlinked path, which causes issues for
// tarball distribution.
//
// See https://github.com/rust-lang/rust/issues/135554.
builder.resolve_symlink_and_copy(&src_path, &libdir_bin.join(&tool_exe));
} }
} }
} }

View File

@ -471,7 +471,7 @@ impl Step for Rustc {
} }
} }
{ if builder.config.llvm_enabled(compiler.host) && builder.config.llvm_tools_enabled {
let src_dir = builder.sysroot_target_bindir(compiler, host); let src_dir = builder.sysroot_target_bindir(compiler, host);
let llvm_objcopy = exe("llvm-objcopy", compiler.host); let llvm_objcopy = exe("llvm-objcopy", compiler.host);
let rust_objcopy = exe("rust-objcopy", compiler.host); let rust_objcopy = exe("rust-objcopy", compiler.host);

View File

@ -2815,10 +2815,10 @@ impl Config {
allowed_paths.push(":!library"); allowed_paths.push(":!library");
} }
let commit = if self.rust_info.is_managed_git_subrepository() {
// Look for a version to compare to based on the current commit. // Look for a version to compare to based on the current commit.
// Only commits merged by bors will have CI artifacts. // Only commits merged by bors will have CI artifacts.
let commit = match self.last_modified_commit(&allowed_paths, "download-rustc", if_unchanged) match self.last_modified_commit(&allowed_paths, "download-rustc", if_unchanged) {
{
Some(commit) => commit, Some(commit) => commit,
None => { None => {
if if_unchanged { if if_unchanged {
@ -2830,6 +2830,11 @@ impl Config {
println!("HELP: or fetch enough history to include one upstream commit"); println!("HELP: or fetch enough history to include one upstream commit");
crate::exit!(1); crate::exit!(1);
} }
}
} else {
channel::read_commit_info_file(&self.src)
.map(|info| info.sha.trim().to_owned())
.expect("git-commit-info is missing in the project root")
}; };
if CiEnv::is_ci() && { if CiEnv::is_ci() && {
@ -2858,10 +2863,8 @@ impl Config {
let if_unchanged = || { let if_unchanged = || {
if self.rust_info.is_from_tarball() { if self.rust_info.is_from_tarball() {
// Git is needed for running "if-unchanged" logic. // Git is needed for running "if-unchanged" logic.
println!( println!("ERROR: 'if-unchanged' is only compatible with Git managed sources.");
"WARNING: 'if-unchanged' has no effect on tarball sources; ignoring `download-ci-llvm`." crate::exit!(1);
);
return false;
} }
// Fetching the LLVM submodule is unnecessary for self-tests. // Fetching the LLVM submodule is unnecessary for self-tests.
@ -2903,6 +2906,11 @@ impl Config {
option_name: &str, option_name: &str,
if_unchanged: bool, if_unchanged: bool,
) -> Option<String> { ) -> Option<String> {
assert!(
self.rust_info.is_managed_git_subrepository(),
"Can't run `Config::last_modified_commit` on a non-git source."
);
// Look for a version to compare to based on the current commit. // Look for a version to compare to based on the current commit.
// Only commits merged by bors will have CI artifacts. // Only commits merged by bors will have CI artifacts.
let commit = get_closest_merge_commit(Some(&self.src), &self.git_config(), &[]).unwrap(); let commit = get_closest_merge_commit(Some(&self.src), &self.git_config(), &[]).unwrap();

View File

@ -1681,6 +1681,14 @@ Executed at: {executed_at}"#,
paths paths
} }
/// Copies a file from `src` to `dst`.
///
/// If `src` is a symlink, `src` will be resolved to the actual path
/// and copied to `dst` instead of the symlink itself.
pub fn resolve_symlink_and_copy(&self, src: &Path, dst: &Path) {
self.copy_link_internal(src, dst, true);
}
/// Links a file from `src` to `dst`. /// Links a file from `src` to `dst`.
/// Attempts to use hard links if possible, falling back to copying. /// Attempts to use hard links if possible, falling back to copying.
/// You can neither rely on this being a copy nor it being a link, /// You can neither rely on this being a copy nor it being a link,

View File

@ -1 +1 @@
1.84.0 1.84.1

View File

@ -0,0 +1,25 @@
//@ revisions: cfail1 cfail2
//@ compile-flags: -Znext-solver
//@ check-pass
pub trait Future {
type Error;
fn poll() -> Self::Error;
}
struct S;
impl Future for S {
type Error = Error;
fn poll() -> Self::Error {
todo!()
}
}
#[cfg(cfail1)]
pub struct Error(());
#[cfg(cfail2)]
pub struct Error();
fn main() {}

View File

@ -1,4 +1,4 @@
error[E0277]: the trait bound `foo::Struct: Trait` is not satisfied because the trait comes from a different crate version error[E0277]: the trait bound `foo::Struct: Trait` is not satisfied
--> foo-current.rs:13:19 --> foo-current.rs:13:19
| |
13 | check_trait::<foo::Struct>(); 13 | check_trait::<foo::Struct>();
@ -23,6 +23,11 @@ note: there are multiple different versions of crate `foo` in the dependency gra
| --------------- this is the found trait | --------------- this is the found trait
= note: two types coming from two different versions of the same crate are different types even if they look the same = note: two types coming from two different versions of the same crate are different types even if they look the same
= help: you can use `cargo tree` to explore your dependency tree = help: you can use `cargo tree` to explore your dependency tree
note: required by a bound in `check_trait`
--> foo-current.rs:10:19
|
10 | fn check_trait<T: Trait>() {}
| ^^^^^ required by this bound in `check_trait`
error: aborting due to 1 previous error error: aborting due to 1 previous error

View File

@ -1,8 +1,10 @@
error[E0277]: the trait bound `dep_2_reexport::Type: Trait` is not satisfied because the trait comes from a different crate version error[E0277]: the trait bound `dep_2_reexport::Type: Trait` is not satisfied
--> replaced --> replaced
| |
LL | do_something(Type); LL | do_something(Type);
| ^^^^ the trait `Trait` is not implemented for `dep_2_reexport::Type` | ------------ ^^^^ the trait `Trait` is not implemented for `dep_2_reexport::Type`
| |
| required by a bound introduced by this call
| |
note: there are multiple different versions of crate `dependency` in the dependency graph note: there are multiple different versions of crate `dependency` in the dependency graph
--> replaced --> replaced
@ -27,6 +29,11 @@ LL | pub trait Trait {
| --------------- this is the found trait | --------------- this is the found trait
= note: two types coming from two different versions of the same crate are different types even if they look the same = note: two types coming from two different versions of the same crate are different types even if they look the same
= help: you can use `cargo tree` to explore your dependency tree = help: you can use `cargo tree` to explore your dependency tree
note: required by a bound in `do_something`
--> replaced
|
LL | pub fn do_something<X: Trait>(_: X) {}
| ^^^^^ required by this bound in `do_something`
error[E0599]: no method named `foo` found for struct `dep_2_reexport::Type` in the current scope error[E0599]: no method named `foo` found for struct `dep_2_reexport::Type` in the current scope
--> replaced --> replaced
@ -77,11 +84,13 @@ LL | use dependency::{Trait, do_something};
LL | pub trait Trait { LL | pub trait Trait {
| --------------- this is the trait that was imported | --------------- this is the trait that was imported
error[E0277]: the trait bound `OtherType: Trait` is not satisfied because the trait comes from a different crate version error[E0277]: the trait bound `OtherType: Trait` is not satisfied
--> replaced --> replaced
| |
LL | do_something(OtherType); LL | do_something(OtherType);
| ^^^^^^^^^ the trait `Trait` is not implemented for `OtherType` | ------------ ^^^^^^^^^ the trait `Trait` is not implemented for `OtherType`
| |
| required by a bound introduced by this call
| |
note: there are multiple different versions of crate `dependency` in the dependency graph note: there are multiple different versions of crate `dependency` in the dependency graph
--> replaced --> replaced
@ -106,6 +115,11 @@ LL | pub struct OtherType;
LL | pub trait Trait { LL | pub trait Trait {
| --------------- this is the found trait | --------------- this is the found trait
= help: you can use `cargo tree` to explore your dependency tree = help: you can use `cargo tree` to explore your dependency tree
note: required by a bound in `do_something`
--> replaced
|
LL | pub fn do_something<X: Trait>(_: X) {}
| ^^^^^ required by this bound in `do_something`
error: aborting due to 4 previous errors error: aborting due to 4 previous errors

View File

@ -0,0 +1,2 @@
pub trait Resource {}
pub struct Ray2d;

View File

@ -0,0 +1 @@
pub type Ray = minibevy::Ray2d;

View File

@ -0,0 +1,14 @@
extern crate minibevy;
extern crate minirapier;
use minibevy::Resource;
use minirapier::Ray;
fn insert_resource<R: Resource>(_resource: R) {}
struct Res;
impl Resource for Res {}
fn main() {
insert_resource(Res.into());
}

View File

@ -0,0 +1,45 @@
// Non-regression test for issue #132920 where multiple versions of the same crate are present in
// the dependency graph, and an unexpected error in a dependent crate caused an ICE in the
// unsatisfied bounds diagnostics for traits present in multiple crate versions.
//
// Setup:
// - two versions of the same crate: minibevy_a and minibevy_b
// - minirapier: depends on minibevy_a
// - repro: depends on minirapier and minibevy_b
use run_make_support::rustc;
fn main() {
// Prepare dependencies, mimicking a check build with cargo.
rustc()
.input("minibevy.rs")
.crate_name("minibevy")
.crate_type("lib")
.emit("metadata")
.metadata("a")
.extra_filename("-a")
.run();
rustc()
.input("minibevy.rs")
.crate_name("minibevy")
.crate_type("lib")
.emit("metadata")
.metadata("b")
.extra_filename("-b")
.run();
rustc()
.input("minirapier.rs")
.crate_name("minirapier")
.crate_type("lib")
.emit("metadata")
.extern_("minibevy", "libminibevy-a.rmeta")
.run();
// Building the main crate used to ICE here when printing the `type annotations needed` error.
rustc()
.input("repro.rs")
.extern_("minibevy", "libminibevy-b.rmeta")
.extern_("minirapier", "libminirapier.rmeta")
.run_fail()
.assert_stderr_not_contains("error: the compiler unexpectedly panicked. this is a bug");
}

View File

@ -0,0 +1,3 @@
fn main() {
other::big_function();
}

View File

@ -0,0 +1 @@
proc::declare_big_function!();

View File

@ -0,0 +1,7 @@
extern crate proc_macro;
use proc_macro::TokenStream;
#[proc_macro]
pub fn declare_big_function(_input: TokenStream) -> TokenStream {
include_str!("./generated.rs").parse().unwrap()
}

View File

@ -0,0 +1,65 @@
//! Regression test for <https://github.com/rust-lang/rust/issues/135332>.
//!
//! We can't simply drop debuginfo location spans when LLVM's location discriminator value limit is
//! reached. Otherwise, with `-Z verify-llvm-ir` and fat LTO, LLVM will report a broken module for
//!
//! ```text
//! inlinable function call in a function with debug info must have a !dbg location
//! ```
//@ ignore-cross-compile
//@ needs-dynamic-linking
//@ only-nightly (requires unstable rustc flag)
#![deny(warnings)]
use run_make_support::{dynamic_lib_name, rfs, rust_lib_name, rustc};
// Synthesize a function that will have a large (`n`) number of functions
// MIR-inlined into it. When combined with a proc-macro, all of these inline
// callsites will have the same span, forcing rustc to use the DWARF
// discriminator to distinguish between them. LLVM's capacity to store that
// discriminator is not infinite (currently it allocates 12 bits for a
// maximum value of 4096) so if this function gets big enough rustc's error
// handling path will be exercised.
fn generate_program(n: u32) -> String {
let mut program = String::from("pub type BigType = Vec<Vec<String>>;\n\n");
program.push_str("pub fn big_function() -> BigType {\n");
program.push_str(" vec![\n");
for i in 1..=n {
program.push_str(&format!("vec![\"string{}\".to_owned()],\n", i));
}
program.push_str(" ]\n");
program.push_str("}\n");
program
}
fn main() {
// The reported threshold is around 1366 (4096/3), but let's bump it to
// around 1500 to be less sensitive.
rfs::write("generated.rs", generate_program(1500));
rustc()
.input("proc.rs")
.crate_type("proc-macro")
.edition("2021")
.arg("-Cdebuginfo=line-tables-only")
.run();
rustc()
.extern_("proc", dynamic_lib_name("proc"))
.input("other.rs")
.crate_type("rlib")
.edition("2021")
.opt_level("3")
.arg("-Cdebuginfo=line-tables-only")
.run();
rustc()
.extern_("other", rust_lib_name("other"))
.input("main.rs")
.edition("2021")
.opt_level("3")
.arg("-Cdebuginfo=line-tables-only")
.arg("-Clto=fat")
.arg("-Zverify-llvm-ir")
.run();
}

View File

@ -1,7 +1,5 @@
WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [*const ?1t, '^0.Named(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), "'a")], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit), .. } WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [*const ?1t, '^0.Named(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), "'a")], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit), .. }
WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [*const ?1t, '^0.Named(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), "'a")], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit), .. } WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [*const ?1t, '^0.Named(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), "'a")], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit), .. }
WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [*const ?1t, '^0.Named(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), "'a")], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit), .. }
WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [*const ?1t, '^0.Named(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), "'a")], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit), .. }
error[E0119]: conflicting implementations of trait `Overlap<for<'a> fn(&'a (), ())>` for type `for<'a> fn(&'a (), ())` error[E0119]: conflicting implementations of trait `Overlap<for<'a> fn(&'a (), ())>` for type `for<'a> fn(&'a (), ())`
--> $DIR/associated-type.rs:32:1 --> $DIR/associated-type.rs:32:1
| |

View File

@ -1,7 +1,5 @@
WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [*const ?1t, '^0.Named(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), "'a")], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit), .. } WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [*const ?1t, '^0.Named(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), "'a")], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit), .. }
WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [*const ?1t, '^0.Named(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), "'a")], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit), .. } WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [*const ?1t, '^0.Named(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), "'a")], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit), .. }
WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [*const ?1t, '^0.Named(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), "'a")], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit), .. }
WARN rustc_infer::infer::relate::generalize may incompletely handle alias type: AliasTy { args: [*const ?1t, '^0.Named(DefId(0:27 ~ associated_type[f554]::{impl#3}::'a#1), "'a")], def_id: DefId(0:5 ~ associated_type[f554]::ToUnit::Unit), .. }
error[E0119]: conflicting implementations of trait `Overlap<for<'a> fn(&'a (), ())>` for type `for<'a> fn(&'a (), ())` error[E0119]: conflicting implementations of trait `Overlap<for<'a> fn(&'a (), ())>` for type `for<'a> fn(&'a (), ())`
--> $DIR/associated-type.rs:32:1 --> $DIR/associated-type.rs:32:1
| |

View File

@ -0,0 +1,56 @@
// Computing the ambiguity causes for the overlap ended up
// causing an exponential blowup when recursing into the normalization
// goals for `<Box<?t> as RecursiveSuper>::Assoc`. This test
// takes multiple minutes when doing so and less than a second
// otherwise.
//@ compile-flags: -Znext-solver=coherence
trait RecursiveSuper:
Super<
A0 = Self::Assoc,
A1 = Self::Assoc,
A2 = Self::Assoc,
A3 = Self::Assoc,
A4 = Self::Assoc,
A5 = Self::Assoc,
A6 = Self::Assoc,
A7 = Self::Assoc,
A8 = Self::Assoc,
A9 = Self::Assoc,
A10 = Self::Assoc,
A11 = Self::Assoc,
A12 = Self::Assoc,
A13 = Self::Assoc,
A14 = Self::Assoc,
A15 = Self::Assoc,
>
{
type Assoc;
}
trait Super {
type A0;
type A1;
type A2;
type A3;
type A4;
type A5;
type A6;
type A7;
type A8;
type A9;
type A10;
type A11;
type A12;
type A13;
type A14;
type A15;
}
trait Overlap {}
impl<T: RecursiveSuper> Overlap for T {}
impl<T> Overlap for Box<T> {}
//~^ ERROR conflicting implementations of trait `Overlap` for type `Box<_>`
fn main() {}

View File

@ -0,0 +1,14 @@
error[E0119]: conflicting implementations of trait `Overlap` for type `Box<_>`
--> $DIR/ambiguity-causes-visitor-hang.rs:53:1
|
LL | impl<T: RecursiveSuper> Overlap for T {}
| ------------------------------------- first implementation here
LL | impl<T> Overlap for Box<T> {}
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ conflicting implementation for `Box<_>`
|
= note: downstream crates may implement trait `Super` for type `std::boxed::Box<_>`
= note: downstream crates may implement trait `RecursiveSuper` for type `std::boxed::Box<_>`
error: aborting due to 1 previous error
For more information about this error, try `rustc --explain E0119`.