Auto merge of #138523 - fmease:rollup-j2j5h59, r=fmease

Rollup of 9 pull requests

Successful merges:

 - #138056 (rustc_target: Add target features for LoongArch v1.1)
 - #138451 (Build GCC on CI with GCC, not Clang)
 - #138454 (Improve post-merge workflow)
 - #138460 (Pass struct field HirId when check_expr_struct_fields)
 - #138474 (Refactor is_snake_case.)
 - #138482 (Fix HIR printing of parameters)
 - #138507 (Mirror NetBSD sources)
 - #138511 (Make `Parser::parse_expr_cond` public)
 - #138518 (Fix typo in hir lowering lint diag)

r? `@ghost`
`@rustbot` modify labels: rollup
This commit is contained in:
bors 2025-03-15 02:22:46 +00:00
commit 282865097d
38 changed files with 493 additions and 217 deletions

View File

@ -35,8 +35,13 @@ jobs:
cd src/ci/citool
echo "Post-merge analysis result" > output.log
printf "*This is an experimental post-merge analysis report. You can ignore it.*\n\n" > output.log
printf "<details>\n<summary>Post-merge report</summary>\n\n" >> output.log
cargo run --release post-merge-report ${PARENT_COMMIT} ${{ github.sha }} >> output.log
printf "</details>\n" >> output.log
cat output.log
gh pr comment ${HEAD_PR} -F output.log

View File

@ -1516,7 +1516,14 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
fn lower_fn_params_to_names(&mut self, decl: &FnDecl) -> &'hir [Ident] {
self.arena.alloc_from_iter(decl.inputs.iter().map(|param| match param.pat.kind {
PatKind::Ident(_, ident, _) => self.lower_ident(ident),
_ => Ident::new(kw::Empty, self.lower_span(param.pat.span)),
PatKind::Wild => Ident::new(kw::Underscore, self.lower_span(param.pat.span)),
_ => {
self.dcx().span_delayed_bug(
param.pat.span,
"non-ident/wild param pat must trigger an error",
);
Ident::new(kw::Empty, self.lower_span(param.pat.span))
}
}))
}

View File

@ -411,14 +411,16 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
Applicability::MachineApplicable,
);
if !is_dyn_compatible {
diag.note(format!("`{trait_name}` it is dyn-incompatible, so it can't be `dyn`"));
diag.note(format!(
"`{trait_name}` is dyn-incompatible, otherwise a trait object could be used"
));
} else {
// No ampersand in suggestion if it's borrowed already
let (dyn_str, paren_dyn_str) =
if borrowed { ("dyn ", "(dyn ") } else { ("&dyn ", "&(dyn ") };
let sugg = if let hir::TyKind::TraitObject([_, _, ..], _) = self_ty.kind {
// There are more than one trait bound, we need surrounding parentheses.
// There is more than one trait bound, we need surrounding parentheses.
vec![
(self_ty.span.shrink_to_lo(), paren_dyn_str.to_string()),
(self_ty.span.shrink_to_hi(), ")".to_string()),

View File

@ -2148,9 +2148,11 @@ impl<'a> State<'a> {
s.print_implicit_self(&decl.implicit_self);
} else {
if let Some(arg_name) = arg_names.get(i) {
s.word(arg_name.to_string());
s.word(":");
s.space();
if arg_name.name != kw::Empty {
s.word(arg_name.to_string());
s.word(":");
s.space();
}
} else if let Some(body_id) = body_id {
s.ann.nested(s, Nested::BodyParamPat(body_id, i));
s.word(":");

View File

@ -2060,7 +2060,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
// struct-like enums (yet...), but it's definitely not
// a bug to have constructed one.
if adt_kind != AdtKind::Enum {
tcx.check_stability(v_field.did, Some(expr.hir_id), field.span, None);
tcx.check_stability(v_field.did, Some(field.hir_id), field.span, None);
}
self.field_ty(field.span, v_field, args)

View File

@ -19,7 +19,7 @@ use rustc_middle::ty::visit::TypeVisitableExt;
use rustc_middle::ty::{self, IsSuggestable, Ty, TyCtxt};
use rustc_middle::{bug, span_bug};
use rustc_session::Session;
use rustc_span::{DUMMY_SP, Ident, Span, Symbol, kw, sym};
use rustc_span::{DUMMY_SP, Ident, Span, kw, sym};
use rustc_trait_selection::error_reporting::infer::{FailureCode, ObligationCauseExt};
use rustc_trait_selection::infer::InferCtxtExt;
use rustc_trait_selection::traits::{self, ObligationCauseCode, ObligationCtxt, SelectionContext};
@ -2679,7 +2679,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
params.get(is_method as usize..params.len() - sig.decl.c_variadic as usize)?;
debug_assert_eq!(params.len(), fn_inputs.len());
Some((
fn_inputs.zip(params.iter().map(|param| FnParam::Name(param))).collect(),
fn_inputs.zip(params.iter().map(|&param| FnParam::Name(param))).collect(),
generics,
))
}
@ -2710,23 +2710,14 @@ impl<'tcx> Visitor<'tcx> for FindClosureArg<'tcx> {
#[derive(Clone, Copy)]
enum FnParam<'hir> {
Param(&'hir hir::Param<'hir>),
Name(&'hir Ident),
Name(Ident),
}
impl FnParam<'_> {
fn span(&self) -> Span {
match self {
Self::Param(x) => x.span,
Self::Name(x) => x.span,
}
}
fn name(&self) -> Option<Symbol> {
match self {
Self::Param(x) if let hir::PatKind::Binding(_, _, ident, _) = x.pat.kind => {
Some(ident.name)
}
Self::Name(x) if x.name != kw::Empty => Some(x.name),
_ => None,
Self::Param(param) => param.span,
Self::Name(ident) => ident.span,
}
}
@ -2734,8 +2725,23 @@ impl FnParam<'_> {
struct D<'a>(FnParam<'a>, usize);
impl fmt::Display for D<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if let Some(name) = self.0.name() {
write!(f, "`{name}`")
// A "unique" param name is one that (a) exists, and (b) is guaranteed to be unique
// among the parameters, i.e. `_` does not count.
let unique_name = match self.0 {
FnParam::Param(param)
if let hir::PatKind::Binding(_, _, ident, _) = param.pat.kind =>
{
Some(ident.name)
}
FnParam::Name(ident)
if ident.name != kw::Empty && ident.name != kw::Underscore =>
{
Some(ident.name)
}
_ => None,
};
if let Some(unique_name) = unique_name {
write!(f, "`{unique_name}`")
} else {
write!(f, "parameter #{}", self.1 + 1)
}

View File

@ -1422,7 +1422,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
self.tcx.check_stability(
variant.fields[FieldIdx::from_usize(i)].did,
Some(pat.hir_id),
Some(subpat.hir_id),
subpat.span,
None,
);
@ -1686,7 +1686,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
.get(&ident)
.map(|(i, f)| {
self.write_field_index(field.hir_id, *i);
self.tcx.check_stability(f.did, Some(pat.hir_id), span, None);
self.tcx.check_stability(f.did, Some(field.hir_id), span, None);
self.field_ty(span, f, args)
})
.unwrap_or_else(|| {

View File

@ -274,18 +274,13 @@ impl NonSnakeCase {
let ident = ident.trim_start_matches('\'');
let ident = ident.trim_matches('_');
let mut allow_underscore = true;
ident.chars().all(|c| {
allow_underscore = match c {
'_' if !allow_underscore => return false,
'_' => false,
// It would be more obvious to use `c.is_lowercase()`,
// but some characters do not have a lowercase form
c if !c.is_uppercase() => true,
_ => return false,
};
true
})
if ident.contains("__") {
return false;
}
// This correctly handles letters in languages with and without
// cases, as well as numbers and underscores.
!ident.chars().any(char::is_uppercase)
}
let name = ident.name.as_str();

View File

@ -281,8 +281,9 @@ impl<'tcx> TyCtxt<'tcx> {
}
pub fn hir_body_param_names(self, id: BodyId) -> impl Iterator<Item = Ident> {
self.hir_body(id).params.iter().map(|arg| match arg.pat.kind {
self.hir_body(id).params.iter().map(|param| match param.pat.kind {
PatKind::Binding(_, _, ident, _) => ident,
PatKind::Wild => Ident::new(kw::Underscore, param.pat.span),
_ => Ident::empty(),
})
}

View File

@ -2588,7 +2588,8 @@ impl<'a> Parser<'a> {
}
/// Parses the condition of a `if` or `while` expression.
fn parse_expr_cond(&mut self) -> PResult<'a, P<Expr>> {
// Public because it is used in rustfmt forks such as https://github.com/tucant/rustfmt/blob/30c83df9e1db10007bdd16dafce8a86b404329b2/src/parse/macros/html.rs#L57 for custom if expressions.
pub fn parse_expr_cond(&mut self) -> PResult<'a, P<Expr>> {
let attrs = self.parse_outer_attributes()?;
let (mut cond, _) =
self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL | Restrictions::ALLOW_LET, attrs)?;

View File

@ -603,13 +603,18 @@ static CSKY_FEATURES: &[(&str, Stability, ImpliedFeatures)] = &[
static LOONGARCH_FEATURES: &[(&str, Stability, ImpliedFeatures)] = &[
// tidy-alphabetical-start
("d", Unstable(sym::loongarch_target_feature), &["f"]),
("div32", Unstable(sym::loongarch_target_feature), &[]),
("f", Unstable(sym::loongarch_target_feature), &[]),
("frecipe", Unstable(sym::loongarch_target_feature), &[]),
("lam-bh", Unstable(sym::loongarch_target_feature), &[]),
("lamcas", Unstable(sym::loongarch_target_feature), &[]),
("lasx", Unstable(sym::loongarch_target_feature), &["lsx"]),
("lbt", Unstable(sym::loongarch_target_feature), &[]),
("ld-seq-sa", Unstable(sym::loongarch_target_feature), &[]),
("lsx", Unstable(sym::loongarch_target_feature), &["d"]),
("lvz", Unstable(sym::loongarch_target_feature), &[]),
("relax", Unstable(sym::loongarch_target_feature), &[]),
("scq", Unstable(sym::loongarch_target_feature), &[]),
("ual", Unstable(sym::loongarch_target_feature), &[]),
// tidy-alphabetical-end
];

View File

@ -1998,7 +1998,10 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
.iter()
.enumerate()
.map(|(i, ident)| {
if ident.name.is_empty() || ident.name == kw::SelfLower {
if ident.name.is_empty()
|| ident.name == kw::Underscore
|| ident.name == kw::SelfLower
{
format!("arg{i}")
} else {
format!("{ident}")

View File

@ -2481,7 +2481,7 @@ impl Step for Gcc {
fn run(self, builder: &Builder<'_>) -> Self::Output {
let tarball = Tarball::new(builder, "gcc", &self.target.triple);
let output = builder.ensure(super::gcc::Gcc { target: self.target });
tarball.add_file(output.libgccjit, ".", 0o644);
tarball.add_file(output.libgccjit, "lib", 0o644);
tarball.generate()
}
}

View File

@ -63,11 +63,7 @@ impl Step for Gcc {
}
build_gcc(&metadata, builder, target);
let lib_alias = metadata.install_dir.join("lib/libgccjit.so.0");
if !lib_alias.exists() {
t!(builder.symlink_file(&libgccjit_path, lib_alias));
}
create_lib_alias(builder, &libgccjit_path);
t!(metadata.stamp.write());
@ -75,6 +71,15 @@ impl Step for Gcc {
}
}
/// Creates a libgccjit.so.0 alias next to libgccjit.so if it does not
/// already exist
fn create_lib_alias(builder: &Builder<'_>, libgccjit: &PathBuf) {
let lib_alias = libgccjit.parent().unwrap().join("libgccjit.so.0");
if !lib_alias.exists() {
t!(builder.symlink_file(libgccjit, lib_alias));
}
}
pub struct Meta {
stamp: BuildStamp,
out_dir: PathBuf,
@ -109,8 +114,10 @@ fn try_download_gcc(builder: &Builder<'_>, target: TargetSelection) -> Option<Pa
builder.config.download_ci_gcc(&sha, &root);
t!(gcc_stamp.write());
}
// FIXME: put libgccjit.so into a lib directory in dist::Gcc
Some(root.join("libgccjit.so"))
let libgccjit = root.join("lib").join("libgccjit.so");
create_lib_alias(builder, &libgccjit);
Some(libgccjit)
}
#[cfg(test)]
@ -177,6 +184,14 @@ fn libgccjit_built_path(install_dir: &Path) -> PathBuf {
}
fn build_gcc(metadata: &Meta, builder: &Builder<'_>, target: TargetSelection) {
if builder.build.cc_tool(target).is_like_clang()
|| builder.build.cxx_tool(target).is_like_clang()
{
panic!(
"Attempting to build GCC using Clang, which is known to misbehave. Please use GCC as the host C/C++ compiler. "
);
}
let Meta { stamp: _, out_dir, install_dir, root } = metadata;
t!(fs::create_dir_all(out_dir));
@ -203,18 +218,13 @@ fn build_gcc(metadata: &Meta, builder: &Builder<'_>, target: TargetSelection) {
let mut configure_cmd = command(src_dir.join("configure"));
configure_cmd
.current_dir(out_dir)
// On CI, we compile GCC with Clang.
// The -Wno-everything flag is needed to make GCC compile with Clang 19.
// `-g -O2` are the default flags that are otherwise used by Make.
// FIXME(kobzol): change the flags once we have [gcc] configuration in config.toml.
.env("CXXFLAGS", "-Wno-everything -g -O2")
.env("CFLAGS", "-Wno-everything -g -O2")
.arg("--enable-host-shared")
.arg("--enable-languages=jit")
.arg("--enable-languages=c,jit,lto")
.arg("--enable-checking=release")
.arg("--disable-bootstrap")
.arg("--disable-multilib")
.arg(format!("--prefix={}", install_dir.display()));
let cc = builder.build.cc(target).display().to_string();
let cc = builder
.build

View File

@ -27,6 +27,7 @@ use std::{env, fs, io, str};
use build_helper::ci::gha;
use build_helper::exit;
use cc::Tool;
use termcolor::{ColorChoice, StandardStream, WriteColor};
use utils::build_stamp::BuildStamp;
use utils::channel::GitInfo;
@ -1218,6 +1219,16 @@ Executed at: {executed_at}"#,
self.cc.borrow()[&target].path().into()
}
/// Returns the internal `cc::Tool` for the C compiler.
fn cc_tool(&self, target: TargetSelection) -> Tool {
self.cc.borrow()[&target].clone()
}
/// Returns the internal `cc::Tool` for the C++ compiler.
fn cxx_tool(&self, target: TargetSelection) -> Tool {
self.cxx.borrow()[&target].clone()
}
/// Returns C flags that `cc-rs` thinks should be enabled for the
/// specified target by default.
fn cc_handled_clags(&self, target: TargetSelection, c: CLang) -> Vec<String> {

View File

@ -1,8 +1,8 @@
use std::cmp::Reverse;
use std::collections::HashMap;
use std::collections::{HashMap, HashSet};
use std::path::PathBuf;
use anyhow::Context;
use build_helper::metrics::{JsonRoot, TestOutcome};
use build_helper::metrics::{JsonRoot, TestOutcome, TestSuiteMetadata};
use crate::jobs::JobDatabase;
use crate::metrics::get_test_suites;
@ -13,8 +13,10 @@ type JobName = String;
/// Computes a post merge CI analysis report between the `parent` and `current` commits.
pub fn post_merge_report(job_db: JobDatabase, parent: Sha, current: Sha) -> anyhow::Result<()> {
let jobs = download_all_metrics(&job_db, &parent, &current)?;
let diffs = aggregate_test_diffs(&jobs)?;
report_test_changes(diffs);
let aggregated_test_diffs = aggregate_test_diffs(&jobs)?;
println!("Comparing {parent} (base) -> {current} (this PR)\n");
report_test_diffs(aggregated_test_diffs);
Ok(())
}
@ -54,7 +56,16 @@ Maybe it was newly added?"#,
Ok(jobs)
}
/// Downloads job metrics of the given job for the given commit.
/// Caches the result on the local disk.
fn download_job_metrics(job_name: &str, sha: &str) -> anyhow::Result<JsonRoot> {
let cache_path = PathBuf::from(".citool-cache").join(sha).join(job_name).join("metrics.json");
if let Some(cache_entry) =
std::fs::read_to_string(&cache_path).ok().and_then(|data| serde_json::from_str(&data).ok())
{
return Ok(cache_entry);
}
let url = get_metrics_url(job_name, sha);
let mut response = ureq::get(&url).call()?;
if !response.status().is_success() {
@ -68,6 +79,13 @@ fn download_job_metrics(job_name: &str, sha: &str) -> anyhow::Result<JsonRoot> {
.body_mut()
.read_json()
.with_context(|| anyhow::anyhow!("cannot deserialize metrics from {url}"))?;
// Ignore errors if cache cannot be created
if std::fs::create_dir_all(cache_path.parent().unwrap()).is_ok() {
if let Ok(serialized) = serde_json::to_string(&data) {
let _ = std::fs::write(&cache_path, &serialized);
}
}
Ok(data)
}
@ -76,72 +94,30 @@ fn get_metrics_url(job_name: &str, sha: &str) -> String {
format!("https://ci-artifacts.rust-lang.org/rustc-builds{suffix}/{sha}/metrics-{job_name}.json")
}
/// Represents a difference in the outcome of tests between a base and a current commit.
/// Maps test diffs to jobs that contained them.
#[derive(Debug)]
struct AggregatedTestDiffs {
diffs: HashMap<TestDiff, Vec<JobName>>,
}
fn aggregate_test_diffs(
jobs: &HashMap<JobName, JobMetrics>,
) -> anyhow::Result<Vec<AggregatedTestDiffs>> {
let mut job_diffs = vec![];
) -> anyhow::Result<AggregatedTestDiffs> {
let mut diffs: HashMap<TestDiff, Vec<JobName>> = HashMap::new();
// Aggregate test suites
for (name, metrics) in jobs {
if let Some(parent) = &metrics.parent {
let tests_parent = aggregate_tests(parent);
let tests_current = aggregate_tests(&metrics.current);
let test_diffs = calculate_test_diffs(tests_parent, tests_current);
if !test_diffs.is_empty() {
job_diffs.push((name.clone(), test_diffs));
for diff in calculate_test_diffs(tests_parent, tests_current) {
diffs.entry(diff).or_default().push(name.to_string());
}
}
}
// Aggregate jobs with the same diff, as often the same diff will appear in many jobs
let job_diffs: HashMap<Vec<(Test, TestOutcomeDiff)>, Vec<String>> =
job_diffs.into_iter().fold(HashMap::new(), |mut acc, (job, diffs)| {
acc.entry(diffs).or_default().push(job);
acc
});
Ok(job_diffs
.into_iter()
.map(|(test_diffs, jobs)| AggregatedTestDiffs { jobs, test_diffs })
.collect())
}
fn calculate_test_diffs(
reference: TestSuiteData,
current: TestSuiteData,
) -> Vec<(Test, TestOutcomeDiff)> {
let mut diffs = vec![];
for (test, outcome) in &current.tests {
match reference.tests.get(test) {
Some(before) => {
if before != outcome {
diffs.push((
test.clone(),
TestOutcomeDiff::ChangeOutcome {
before: before.clone(),
after: outcome.clone(),
},
));
}
}
None => diffs.push((test.clone(), TestOutcomeDiff::Added(outcome.clone()))),
}
}
for (test, outcome) in &reference.tests {
if !current.tests.contains_key(test) {
diffs.push((test.clone(), TestOutcomeDiff::Missing { before: outcome.clone() }));
}
}
diffs
}
/// Represents a difference in the outcome of tests between a base and a current commit.
#[derive(Debug)]
struct AggregatedTestDiffs {
/// All jobs that had the exact same test diffs.
jobs: Vec<String>,
test_diffs: Vec<(Test, TestOutcomeDiff)>,
Ok(AggregatedTestDiffs { diffs })
}
#[derive(Eq, PartialEq, Hash, Debug)]
@ -151,6 +127,47 @@ enum TestOutcomeDiff {
Added(TestOutcome),
}
#[derive(Eq, PartialEq, Hash, Debug)]
struct TestDiff {
test: Test,
diff: TestOutcomeDiff,
}
fn calculate_test_diffs(parent: TestSuiteData, current: TestSuiteData) -> HashSet<TestDiff> {
let mut diffs = HashSet::new();
for (test, outcome) in &current.tests {
match parent.tests.get(test) {
Some(before) => {
if before != outcome {
diffs.insert(TestDiff {
test: test.clone(),
diff: TestOutcomeDiff::ChangeOutcome {
before: before.clone(),
after: outcome.clone(),
},
});
}
}
None => {
diffs.insert(TestDiff {
test: test.clone(),
diff: TestOutcomeDiff::Added(outcome.clone()),
});
}
}
}
for (test, outcome) in &parent.tests {
if !current.tests.contains_key(test) {
diffs.insert(TestDiff {
test: test.clone(),
diff: TestOutcomeDiff::Missing { before: outcome.clone() },
});
}
}
diffs
}
/// Aggregates test suite executions from all bootstrap invocations in a given CI job.
#[derive(Default)]
struct TestSuiteData {
@ -160,6 +177,7 @@ struct TestSuiteData {
#[derive(Hash, PartialEq, Eq, Debug, Clone)]
struct Test {
name: String,
is_doctest: bool,
}
/// Extracts all tests from the passed metrics and map them to their outcomes.
@ -168,7 +186,10 @@ fn aggregate_tests(metrics: &JsonRoot) -> TestSuiteData {
let test_suites = get_test_suites(&metrics);
for suite in test_suites {
for test in &suite.tests {
let test_entry = Test { name: normalize_test_name(&test.name) };
// Poor man's detection of doctests based on the "(line XYZ)" suffix
let is_doctest = matches!(suite.metadata, TestSuiteMetadata::CargoPackage { .. })
&& test.name.contains("(line");
let test_entry = Test { name: normalize_test_name(&test.name), is_doctest };
tests.insert(test_entry, test.outcome.clone());
}
}
@ -181,16 +202,13 @@ fn normalize_test_name(name: &str) -> String {
}
/// Prints test changes in Markdown format to stdout.
fn report_test_changes(mut diffs: Vec<AggregatedTestDiffs>) {
fn report_test_diffs(diff: AggregatedTestDiffs) {
println!("## Test differences");
if diffs.is_empty() {
if diff.diffs.is_empty() {
println!("No test diffs found");
return;
}
// Sort diffs in decreasing order by diff count
diffs.sort_by_key(|entry| Reverse(entry.test_diffs.len()));
fn format_outcome(outcome: &TestOutcome) -> String {
match outcome {
TestOutcome::Passed => "pass".to_string(),
@ -219,36 +237,79 @@ fn report_test_changes(mut diffs: Vec<AggregatedTestDiffs>) {
}
}
let max_diff_count = 10;
let max_job_count = 5;
let max_test_count = 10;
fn format_job_group(group: u64) -> String {
format!("**J{group}**")
}
for diff in diffs.iter().take(max_diff_count) {
let mut jobs = diff.jobs.clone();
jobs.sort();
// It would be quite noisy to repeat the jobs that contained the test changes after/next to
// every test diff. At the same time, grouping the test diffs by
// [unique set of jobs that contained them] also doesn't work well, because the test diffs
// would have to be duplicated several times.
// Instead, we create a set of unique job groups, and then print a job group after each test.
// We then print the job groups at the end, as a sort of index.
let mut grouped_diffs: Vec<(&TestDiff, u64)> = vec![];
let mut job_list_to_group: HashMap<&[JobName], u64> = HashMap::new();
let mut job_index: Vec<&[JobName]> = vec![];
let jobs = jobs.iter().take(max_job_count).map(|j| format!("`{j}`")).collect::<Vec<_>>();
let original_diff_count = diff.diffs.len();
let diffs = diff
.diffs
.into_iter()
.filter(|(diff, _)| !diff.test.is_doctest)
.map(|(diff, mut jobs)| {
jobs.sort();
(diff, jobs)
})
.collect::<Vec<_>>();
let doctest_count = original_diff_count.saturating_sub(diffs.len());
let extra_jobs = diff.jobs.len().saturating_sub(max_job_count);
let suffix = if extra_jobs > 0 {
format!(" (and {extra_jobs} {})", pluralize("other", extra_jobs))
} else {
String::new()
let max_diff_count = 100;
for (diff, jobs) in diffs.iter().take(max_diff_count) {
let jobs = &*jobs;
let job_group = match job_list_to_group.get(jobs.as_slice()) {
Some(id) => *id,
None => {
let id = job_index.len() as u64;
job_index.push(jobs);
job_list_to_group.insert(jobs, id);
id
}
};
println!("- {}{suffix}", jobs.join(","));
grouped_diffs.push((diff, job_group));
}
let extra_tests = diff.test_diffs.len().saturating_sub(max_test_count);
for (test, outcome_diff) in diff.test_diffs.iter().take(max_test_count) {
println!(" - {}: {}", test.name, format_diff(&outcome_diff));
}
if extra_tests > 0 {
println!(" - (and {extra_tests} additional {})", pluralize("tests", extra_tests));
}
// Sort diffs by job group and test name
grouped_diffs.sort_by(|(d1, g1), (d2, g2)| g1.cmp(&g2).then(d1.test.name.cmp(&d2.test.name)));
for (diff, job_group) in grouped_diffs {
println!(
"- `{}`: {} ({})",
diff.test.name,
format_diff(&diff.diff),
format_job_group(job_group)
);
}
let extra_diffs = diffs.len().saturating_sub(max_diff_count);
if extra_diffs > 0 {
println!("\n(and {extra_diffs} additional {})", pluralize("diff", extra_diffs));
println!("\n(and {extra_diffs} additional {})", pluralize("test diff", extra_diffs));
}
if doctest_count > 0 {
println!(
"\nAdditionally, {doctest_count} doctest {} were found. These are ignored, as they are noisy.",
pluralize("diff", doctest_count)
);
}
// Now print the job group index
println!("\n**Job group index**\n");
for (group, jobs) in job_index.into_iter().enumerate() {
println!(
"- {}: {}",
format_job_group(group as u64),
jobs.iter().map(|j| format!("`{j}`")).collect::<Vec<_>>().join(", ")
);
}
}

View File

@ -101,7 +101,9 @@ ENV SCRIPT python3 ../x.py build --set rust.debug=true opt-dist && \
./build/$HOSTS/stage0-tools-bin/opt-dist linux-ci -- python3 ../x.py dist \
--host $HOSTS --target $HOSTS \
--include-default-paths \
build-manifest bootstrap gcc
build-manifest bootstrap && \
# Use GCC for building GCC, as it seems to behave badly when built with Clang
CC=/rustroot/bin/cc CXX=/rustroot/bin/c++ python3 ../x.py dist gcc
ENV CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_LINKER=clang
# This is the only builder which will create source tarballs

View File

@ -41,8 +41,6 @@ cd netbsd
mkdir -p /x-tools/x86_64-unknown-netbsd/sysroot
# URL=https://ci-mirrors.rust-lang.org/rustc
# Hashes come from https://cdn.netbsd.org/pub/NetBSD/security/hashes/NetBSD-9.0_hashes.asc
SRC_SHA=2c791ae009a6929c6fc893ec5df7e62910ee8207e0b2159d6937309c03efe175b6ae1e445829a13d041b6851334ad35c521f2fa03c97675d4a05f1fafe58ede0
GNUSRC_SHA=3710085a73feecf6a843415271ec794c90146b03f6bbd30f07c9e0c79febf8995d557e40194f1e05db655e4f5ef2fae97563f8456fceaae65d4ea98857a83b1c
@ -51,22 +49,16 @@ SYSSRC_SHA=60b9ddf4cc6402256473e2e1eefeabd9001aa4e205208715ecc6d6fc3f5b400e46994
BASE_SHA=b5926b107cebf40c3c19b4f6cd039b610987dd7f819e7cdde3bd1e5230a856906e7930b15ab242d52ced9f0bda01d574be59488b8dbb95fa5df2987d0a70995f
COMP_SHA=38ea54f30d5fc2afea87e5096f06873e00182789e8ad9cec0cb3e9f7c538c1aa4779e63fd401a36ba02676158e83fa5c95e8e87898db59c1914fb206aecd82d2
# FIXME: the archive URL is being used temporarily while the CDN is down.
# We should serve this from our own CDN
# SOURCE_URL=https://cdn.netbsd.org/pub/NetBSD/NetBSD-9.0/source/sets
SOURCE_URL=http://archive.netbsd.org/pub/NetBSD-archive/NetBSD-9.0/source/sets
download src.tgz "$SOURCE_URL/src.tgz" "$SRC_SHA" tar xzf src.tgz
download gnusrc.tgz "$SOURCE_URL/gnusrc.tgz" "$GNUSRC_SHA" tar xzf gnusrc.tgz
download sharesrc.tgz "$SOURCE_URL/sharesrc.tgz" "$SHARESRC_SHA" tar xzf sharesrc.tgz
download syssrc.tgz "$SOURCE_URL/syssrc.tgz" "$SYSSRC_SHA" tar xzf syssrc.tgz
SOURCE_URL=https://ci-mirrors.rust-lang.org/rustc/2025-03-14-netbsd-9.0-src
download src.tgz "$SOURCE_URL-src.tgz" "$SRC_SHA" tar xzf src.tgz
download gnusrc.tgz "$SOURCE_URL-gnusrc.tgz" "$GNUSRC_SHA" tar xzf gnusrc.tgz
download sharesrc.tgz "$SOURCE_URL-sharesrc.tgz" "$SHARESRC_SHA" tar xzf sharesrc.tgz
download syssrc.tgz "$SOURCE_URL-syssrc.tgz" "$SYSSRC_SHA" tar xzf syssrc.tgz
# FIXME: the archive URL is being used temporarily while the CDN is down.
# We should serve this from our own CDN
# BINARY_URL=https://cdn.netbsd.org/pub/NetBSD/NetBSD-9.0/amd64/binary/sets
BINARY_URL=http://archive.netbsd.org/pub/NetBSD-archive/NetBSD-9.0/amd64/binary/sets
download base.tar.xz "$BINARY_URL/base.tar.xz" "$BASE_SHA" \
BINARY_URL=https://ci-mirrors.rust-lang.org/rustc/2025-03-14-netbsd-9.0-amd64-binary
download base.tar.xz "$BINARY_URL-base.tar.xz" "$BASE_SHA" \
tar xJf base.tar.xz -C /x-tools/x86_64-unknown-netbsd/sysroot ./usr/include ./usr/lib ./lib
download comp.tar.xz "$BINARY_URL/comp.tar.xz" "$COMP_SHA" \
download comp.tar.xz "$BINARY_URL-comp.tar.xz" "$COMP_SHA" \
tar xJf comp.tar.xz -C /x-tools/x86_64-unknown-netbsd/sysroot ./usr/include ./usr/lib
cd usr/src

View File

@ -1,3 +1,5 @@
use std::path::Path;
use anyhow::Context;
use camino::{Utf8Path, Utf8PathBuf};
@ -86,36 +88,57 @@ llvm-config = "{llvm_config}"
log::info!("Using following `config.toml` for running tests:\n{config_content}");
// Simulate a stage 0 compiler with the extracted optimized dist artifacts.
std::fs::write("config.toml", config_content)?;
with_backed_up_file(Path::new("config.toml"), &config_content, || {
let x_py = env.checkout_path().join("x.py");
let mut args = vec![
env.python_binary(),
x_py.as_str(),
"test",
"--build",
env.host_tuple(),
"--stage",
"0",
"tests/assembly",
"tests/codegen",
"tests/codegen-units",
"tests/incremental",
"tests/mir-opt",
"tests/pretty",
"tests/run-make/glibc-symbols-x86_64-unknown-linux-gnu",
"tests/ui",
"tests/crashes",
];
for test_path in env.skipped_tests() {
args.extend(["--skip", test_path]);
}
cmd(&args)
.env("COMPILETEST_FORCE_STAGE0", "1")
// Also run dist-only tests
.env("COMPILETEST_ENABLE_DIST_TESTS", "1")
.run()
.context("Cannot execute tests")
})
}
let x_py = env.checkout_path().join("x.py");
let mut args = vec![
env.python_binary(),
x_py.as_str(),
"test",
"--build",
env.host_tuple(),
"--stage",
"0",
"tests/assembly",
"tests/codegen",
"tests/codegen-units",
"tests/incremental",
"tests/mir-opt",
"tests/pretty",
"tests/run-make/glibc-symbols-x86_64-unknown-linux-gnu",
"tests/ui",
"tests/crashes",
];
for test_path in env.skipped_tests() {
args.extend(["--skip", test_path]);
/// Backup `path` (if it exists), then write `contents` into it, and then restore the original
/// contents of the file.
fn with_backed_up_file<F>(path: &Path, contents: &str, func: F) -> anyhow::Result<()>
where
F: FnOnce() -> anyhow::Result<()>,
{
let original_contents =
if path.is_file() { Some(std::fs::read_to_string(path)?) } else { None };
// Overwrite it with new contents
std::fs::write(path, contents)?;
let ret = func();
if let Some(original_contents) = original_contents {
std::fs::write(path, original_contents)?;
}
cmd(&args)
.env("COMPILETEST_FORCE_STAGE0", "1")
// Also run dist-only tests
.env("COMPILETEST_ENABLE_DIST_TESTS", "1")
.run()
.context("Cannot execute tests")
ret
}
/// Tries to find the version of the dist artifacts (either nightly, beta, or 1.XY.Z).

View File

@ -0,0 +1,38 @@
#[prelude_import]
use ::std::prelude::rust_2015::*;
#[macro_use]
extern crate std;
//@ pretty-compare-only
//@ pretty-mode:hir
//@ pp-exact:hir-fn-params.pp
// This tests the pretty-printing of various kinds of function parameters.
//---------------------------------------------------------------------------
// Normal functions and methods.
fn normal_fn(_: u32, a: u32) { }
struct S;
impl S {
fn method(_: u32, a: u32) { }
}
//---------------------------------------------------------------------------
// More exotic forms, which get a different pretty-printing path. In the past,
// anonymous params and `_` params printed incorrectly, e.g. `fn(u32, _: u32)`
// was printed as `fn(: u32, : u32)`.
//
// Ideally we would also test invalid patterns, e.g. `fn(1: u32, &a: u32)`,
// because they had similar problems. But the pretty-printing tests currently
// can't contain compile errors.
fn bare_fn(x: fn(u32, _: u32, a: u32)) { }
extern "C" {
unsafe fn foreign_fn(_: u32, a: u32);
}
trait T {
fn trait_fn(u32, _: u32, a: u32);
}

View File

@ -0,0 +1,34 @@
//@ pretty-compare-only
//@ pretty-mode:hir
//@ pp-exact:hir-fn-params.pp
// This tests the pretty-printing of various kinds of function parameters.
//---------------------------------------------------------------------------
// Normal functions and methods.
fn normal_fn(_: u32, a: u32) {}
struct S;
impl S {
fn method(_: u32, a: u32) {}
}
//---------------------------------------------------------------------------
// More exotic forms, which get a different pretty-printing path. In the past,
// anonymous params and `_` params printed incorrectly, e.g. `fn(u32, _: u32)`
// was printed as `fn(: u32, : u32)`.
//
// Ideally we would also test invalid patterns, e.g. `fn(1: u32, &a: u32)`,
// because they had similar problems. But the pretty-printing tests currently
// can't contain compile errors.
fn bare_fn(x: fn(u32, _: u32, a: u32)) {}
extern "C" {
fn foreign_fn(_: u32, a: u32);
}
trait T {
fn trait_fn(u32, _: u32, a: u32);
}

View File

@ -40,6 +40,7 @@
//@ revisions: loongarch64
//@[loongarch64] compile-flags: --target loongarch64-unknown-linux-gnu
//@[loongarch64] needs-llvm-components: loongarch
//@[loongarch64] min-llvm-version: 20
//FIXME: wasm is disabled due to <https://github.com/rust-lang/rust/issues/115666>.
//FIXME @ revisions: wasm
//FIXME @[wasm] compile-flags: --target wasm32-unknown-unknown

View File

@ -1,35 +1,35 @@
error: invalid register `$r0`: constant zero cannot be used as an operand for inline asm
--> $DIR/bad-reg.rs:22:18
--> $DIR/bad-reg.rs:23:18
|
LL | asm!("", out("$r0") _);
| ^^^^^^^^^^^^
error: invalid register `$tp`: reserved for TLS
--> $DIR/bad-reg.rs:24:18
--> $DIR/bad-reg.rs:25:18
|
LL | asm!("", out("$tp") _);
| ^^^^^^^^^^^^
error: invalid register `$sp`: the stack pointer cannot be used as an operand for inline asm
--> $DIR/bad-reg.rs:26:18
--> $DIR/bad-reg.rs:27:18
|
LL | asm!("", out("$sp") _);
| ^^^^^^^^^^^^
error: invalid register `$r21`: reserved by the ABI
--> $DIR/bad-reg.rs:28:18
--> $DIR/bad-reg.rs:29:18
|
LL | asm!("", out("$r21") _);
| ^^^^^^^^^^^^^
error: invalid register `$fp`: the frame pointer cannot be used as an operand for inline asm
--> $DIR/bad-reg.rs:30:18
--> $DIR/bad-reg.rs:31:18
|
LL | asm!("", out("$fp") _);
| ^^^^^^^^^^^^
error: invalid register `$r31`: $r31 is used internally by LLVM and cannot be used as an operand for inline asm
--> $DIR/bad-reg.rs:32:18
--> $DIR/bad-reg.rs:33:18
|
LL | asm!("", out("$r31") _);
| ^^^^^^^^^^^^^

View File

@ -1,59 +1,59 @@
error: invalid register `$r0`: constant zero cannot be used as an operand for inline asm
--> $DIR/bad-reg.rs:22:18
--> $DIR/bad-reg.rs:23:18
|
LL | asm!("", out("$r0") _);
| ^^^^^^^^^^^^
error: invalid register `$tp`: reserved for TLS
--> $DIR/bad-reg.rs:24:18
--> $DIR/bad-reg.rs:25:18
|
LL | asm!("", out("$tp") _);
| ^^^^^^^^^^^^
error: invalid register `$sp`: the stack pointer cannot be used as an operand for inline asm
--> $DIR/bad-reg.rs:26:18
--> $DIR/bad-reg.rs:27:18
|
LL | asm!("", out("$sp") _);
| ^^^^^^^^^^^^
error: invalid register `$r21`: reserved by the ABI
--> $DIR/bad-reg.rs:28:18
--> $DIR/bad-reg.rs:29:18
|
LL | asm!("", out("$r21") _);
| ^^^^^^^^^^^^^
error: invalid register `$fp`: the frame pointer cannot be used as an operand for inline asm
--> $DIR/bad-reg.rs:30:18
--> $DIR/bad-reg.rs:31:18
|
LL | asm!("", out("$fp") _);
| ^^^^^^^^^^^^
error: invalid register `$r31`: $r31 is used internally by LLVM and cannot be used as an operand for inline asm
--> $DIR/bad-reg.rs:32:18
--> $DIR/bad-reg.rs:33:18
|
LL | asm!("", out("$r31") _);
| ^^^^^^^^^^^^^
error: register class `freg` requires at least one of the following target features: d, f
--> $DIR/bad-reg.rs:36:26
--> $DIR/bad-reg.rs:37:26
|
LL | asm!("/* {} */", in(freg) f);
| ^^^^^^^^^^
error: register class `freg` requires at least one of the following target features: d, f
--> $DIR/bad-reg.rs:38:26
--> $DIR/bad-reg.rs:39:26
|
LL | asm!("/* {} */", out(freg) _);
| ^^^^^^^^^^^
error: register class `freg` requires at least one of the following target features: d, f
--> $DIR/bad-reg.rs:40:26
--> $DIR/bad-reg.rs:41:26
|
LL | asm!("/* {} */", in(freg) d);
| ^^^^^^^^^^
error: register class `freg` requires at least one of the following target features: d, f
--> $DIR/bad-reg.rs:42:26
--> $DIR/bad-reg.rs:43:26
|
LL | asm!("/* {} */", out(freg) d);
| ^^^^^^^^^^^

View File

@ -1,6 +1,7 @@
//@ add-core-stubs
//@ needs-asm-support
//@ revisions: loongarch64_lp64d loongarch64_lp64s
//@ min-llvm-version: 20
//@[loongarch64_lp64d] compile-flags: --target loongarch64-unknown-linux-gnu
//@[loongarch64_lp64d] needs-llvm-components: loongarch
//@[loongarch64_lp64s] compile-flags: --target loongarch64-unknown-none-softfloat

View File

@ -60,6 +60,7 @@ LL | cfg!(target_feature = "_UNEXPECTED_VALUE");
`d32`
`deflate-conversion`
`dit`
`div32`
`doloop`
`dotprod`
`dpb`
@ -133,8 +134,11 @@ LL | cfg!(target_feature = "_UNEXPECTED_VALUE");
`jsconv`
`kl`
`lahfsahf`
`lam-bh`
`lamcas`
`lasx`
`lbt`
`ld-seq-sa`
`leoncasa`
`lor`
`lse`
@ -190,6 +194,7 @@ LL | cfg!(target_feature = "_UNEXPECTED_VALUE");
`reserve-x18`
`rtm`
`sb`
`scq`
`sha`
`sha2`
`sha3`

View File

@ -4,7 +4,7 @@ error[E0782]: expected a type, found a trait
LL | fn id<F>(f: Copy) -> usize {
| ^^^^
|
= note: `Copy` it is dyn-incompatible, so it can't be `dyn`
= note: `Copy` is dyn-incompatible, otherwise a trait object could be used
help: use a new generic type parameter, constrained by `Copy`
|
LL - fn id<F>(f: Copy) -> usize {

View File

@ -4,7 +4,7 @@ error[E0782]: expected a type, found a trait
LL | trait A { fn g(b: B) -> B; }
| ^
|
= note: `B` it is dyn-incompatible, so it can't be `dyn`
= note: `B` is dyn-incompatible, otherwise a trait object could be used
help: use a new generic type parameter, constrained by `B`
|
LL - trait A { fn g(b: B) -> B; }
@ -32,7 +32,7 @@ error[E0782]: expected a type, found a trait
LL | trait B { fn f(a: A) -> A; }
| ^
|
= note: `A` it is dyn-incompatible, so it can't be `dyn`
= note: `A` is dyn-incompatible, otherwise a trait object could be used
help: use a new generic type parameter, constrained by `A`
|
LL - trait B { fn f(a: A) -> A; }

View File

@ -0,0 +1,12 @@
//@ check-pass
struct Point {
#[deprecated = "x is deprecated"]
_x: i32,
_y: i32,
}
fn main() {
let p = Point { _x: 1, _y: 2 }; //~ WARNING use of deprecated field `Point::_x`
// Before fix, it report an warning
let Point { #[expect(deprecated)]_x, .. } = p;
}

View File

@ -0,0 +1,10 @@
warning: use of deprecated field `Point::_x`: x is deprecated
--> $DIR/check-struct-pat-fields-stability-issue-138319.rs:9:21
|
LL | let p = Point { _x: 1, _y: 2 };
| ^^^^^
|
= note: `#[warn(deprecated)]` on by default
warning: 1 warning emitted

View File

@ -4,7 +4,7 @@ error[E0782]: expected a type, found a trait
LL | fn concrete(b: B) -> B;
| ^
|
= note: `B` it is dyn-incompatible, so it can't be `dyn`
= note: `B` is dyn-incompatible, otherwise a trait object could be used
help: use a new generic type parameter, constrained by `B`
|
LL - fn concrete(b: B) -> B;
@ -32,7 +32,7 @@ error[E0782]: expected a type, found a trait
LL | fn f(a: A) -> A;
| ^
|
= note: `A` it is dyn-incompatible, so it can't be `dyn`
= note: `A` is dyn-incompatible, otherwise a trait object could be used
help: use a new generic type parameter, constrained by `A`
|
LL - fn f(a: A) -> A;

View File

@ -4,7 +4,7 @@ error[E0782]: expected a type, found a trait
LL | fn g(new: B) -> B;
| ^
|
= note: `B` it is dyn-incompatible, so it can't be `dyn`
= note: `B` is dyn-incompatible, otherwise a trait object could be used
help: use a new generic type parameter, constrained by `B`
|
LL - fn g(new: B) -> B;

View File

@ -4,7 +4,7 @@ error[E0782]: expected a type, found a trait
LL | fn guard(_s: Copy) -> bool {
| ^^^^
|
= note: `Copy` it is dyn-incompatible, so it can't be `dyn`
= note: `Copy` is dyn-incompatible, otherwise a trait object could be used
help: use a new generic type parameter, constrained by `Copy`
|
LL - fn guard(_s: Copy) -> bool {

View File

@ -4,7 +4,7 @@ error[E0782]: expected a type, found a trait
LL | fn g(b: B) -> B;
| ^
|
= note: `B` it is dyn-incompatible, so it can't be `dyn`
= note: `B` is dyn-incompatible, otherwise a trait object could be used
help: use a new generic type parameter, constrained by `B`
|
LL - fn g(b: B) -> B;
@ -32,7 +32,7 @@ error[E0782]: expected a type, found a trait
LL | fn f(a: A) -> A;
| ^
|
= note: `A` it is dyn-incompatible, so it can't be `dyn`
= note: `A` is dyn-incompatible, otherwise a trait object could be used
help: use a new generic type parameter, constrained by `A`
|
LL - fn f(a: A) -> A;

View File

@ -0,0 +1,39 @@
//@ check-pass
fn _foo() {
_Bar { //~ WARNING use of deprecated struct `_Bar`: reason
#[expect(deprecated)]
foo: 0,
};
}
#[deprecated = "reason"]
struct _Bar {
foo: u32,
}
fn _foo2() {
#[expect(deprecated)]
_Bar2 {
foo2: 0,
};
}
#[deprecated = "reason"]
struct _Bar2 {
foo2: u32,
}
fn _foo3() {
_Bar3 {
#[expect(deprecated)]
foo3: 0,
};
}
struct _Bar3 {
#[deprecated = "reason"]
foo3: u32,
}
fn main() {}

View File

@ -0,0 +1,10 @@
warning: use of deprecated struct `_Bar`: reason
--> $DIR/check-stability-issue-138319.rs:3:5
|
LL | _Bar {
| ^^^^
|
= note: `#[warn(deprecated)]` on by default
warning: 1 warning emitted

View File

@ -4,7 +4,7 @@ error[E0782]: expected a type, found a trait
LL | fn f(a: A) -> A;
| ^
|
= note: `A` it is dyn-incompatible, so it can't be `dyn`
= note: `A` is dyn-incompatible, otherwise a trait object could be used
help: use a new generic type parameter, constrained by `A`
|
LL - fn f(a: A) -> A;
@ -32,7 +32,7 @@ error[E0782]: expected a type, found a trait
LL | fn f(b: B) -> B;
| ^
|
= note: `B` it is dyn-incompatible, so it can't be `dyn`
= note: `B` is dyn-incompatible, otherwise a trait object could be used
help: use a new generic type parameter, constrained by `B`
|
LL - fn f(b: B) -> B;
@ -60,7 +60,7 @@ error[E0782]: expected a type, found a trait
LL | fn f(&self, c: C) -> C;
| ^
|
= note: `C` it is dyn-incompatible, so it can't be `dyn`
= note: `C` is dyn-incompatible, otherwise a trait object could be used
help: use a new generic type parameter, constrained by `C`
|
LL - fn f(&self, c: C) -> C;

View File

@ -23,7 +23,7 @@ LL | let f = |_, _| ();
help: provide the argument
|
LL - f(f);
LL + f(/* */, /* */);
LL + f(/* _ */, /* _ */);
|
error: aborting due to 2 previous errors