Auto merge of #138127 - compiler-errors:rollup-kcarqrz, r=compiler-errors

Rollup of 17 pull requests

Successful merges:

 - #137827 (Add timestamp to unstable feature usage metrics)
 - #138041 (bootstrap and compiletest: Use `size_of_val` from the prelude instead of imported)
 - #138046 (trim channel value in `get_closest_merge_commit`)
 - #138053 (Increase the max. custom try jobs requested to `20`)
 - #138061 (triagebot: add a `compiler_leads` ad-hoc group)
 - #138064 (Remove - from xtensa targets cpu names)
 - #138075 (Use final path segment for diagnostic)
 - #138078 (Reduce the noise of bootstrap changelog warnings in --dry-run mode)
 - #138081 (Move `yield` expressions behind their own feature gate)
 - #138090 (`librustdoc`: flatten nested ifs)
 - #138092 (Re-add `DynSend` and `DynSync` impls for `TyCtxt`)
 - #138094 (a small borrowck cleanup)
 - #138098 (Stabilize feature `const_copy_from_slice`)
 - #138103 (Git ignore citool's target directory)
 - #138105 (Fix broken link to Miri intrinsics in documentation)
 - #138108 (Mention me (WaffleLapkin) when changes to `rustc_codegen_ssa` occur)
 - #138117 ([llvm/PassWrapper] use `size_t` when building arg strings)

r? `@ghost`
`@rustbot` modify labels: rollup
This commit is contained in:
bors 2025-03-07 02:56:46 +00:00
commit 91a0e1604f
51 changed files with 377 additions and 335 deletions

1
.gitignore vendored
View File

@ -53,6 +53,7 @@ no_llvm_build
/target
/library/target
/src/bootstrap/target
/src/ci/citool/target
/src/tools/x/target
# Created by `x vendor`
/vendor

View File

@ -1690,6 +1690,19 @@ impl<'hir> LoweringContext<'_, 'hir> {
let yielded =
opt_expr.as_ref().map(|x| self.lower_expr(x)).unwrap_or_else(|| self.expr_unit(span));
if !self.tcx.features().yield_expr()
&& !self.tcx.features().coroutines()
&& !self.tcx.features().gen_blocks()
{
rustc_session::parse::feature_err(
&self.tcx.sess,
sym::yield_expr,
span,
fluent_generated::ast_lowering_yield,
)
.emit();
}
let is_async_gen = match self.coroutine_kind {
Some(hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Gen, _)) => false,
Some(hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::AsyncGen, _)) => true,
@ -1714,28 +1727,8 @@ impl<'hir> LoweringContext<'_, 'hir> {
None,
);
}
Some(hir::CoroutineKind::Coroutine(_)) => {
if !self.tcx.features().coroutines() {
rustc_session::parse::feature_err(
&self.tcx.sess,
sym::coroutines,
span,
fluent_generated::ast_lowering_yield,
)
.emit();
}
false
}
Some(hir::CoroutineKind::Coroutine(_)) => false,
None => {
if !self.tcx.features().coroutines() {
rustc_session::parse::feature_err(
&self.tcx.sess,
sym::coroutines,
span,
fluent_generated::ast_lowering_yield,
)
.emit();
}
let suggestion = self.current_item.map(|s| s.shrink_to_lo());
self.dcx().emit_err(YieldInClosure { span, suggestion });
self.coroutine_kind = Some(hir::CoroutineKind::Coroutine(Movability::Movable));

View File

@ -1,4 +1,4 @@
use rustc_hir::def_id::DefId;
use rustc_hir::def_id::LocalDefId;
use rustc_infer::infer::canonical::QueryRegionConstraints;
use rustc_infer::infer::outlives::env::RegionBoundPairs;
use rustc_infer::infer::outlives::obligations::{TypeOutlives, TypeOutlivesDelegate};
@ -88,7 +88,7 @@ impl<'a, 'tcx> ConstraintConversion<'a, 'tcx> {
pub(crate) fn apply_closure_requirements(
&mut self,
closure_requirements: &ClosureRegionRequirements<'tcx>,
closure_def_id: DefId,
closure_def_id: LocalDefId,
closure_args: ty::GenericArgsRef<'tcx>,
) {
// Extract the values of the free regions in `closure_args`
@ -98,7 +98,7 @@ impl<'a, 'tcx> ConstraintConversion<'a, 'tcx> {
self.tcx,
closure_args,
closure_requirements.num_external_vids,
closure_def_id.expect_local(),
closure_def_id,
);
debug!(?closure_mapping);

View File

@ -328,9 +328,8 @@ impl<'a, 'b, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'tcx> {
}
}
#[instrument(level = "debug", skip(self))]
fn visit_const_operand(&mut self, constant: &ConstOperand<'tcx>, location: Location) {
debug!(?constant, ?location, "visit_const_operand");
self.super_const_operand(constant, location);
let ty = constant.const_.ty();
@ -339,14 +338,7 @@ impl<'a, 'b, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'tcx> {
self.typeck.constraints.liveness_constraints.add_location(live_region_vid, location);
});
// HACK(compiler-errors): Constants that are gathered into Body.required_consts
// have their locations erased...
let locations = if location != Location::START {
location.to_locations()
} else {
Locations::All(constant.span)
};
let locations = location.to_locations();
if let Some(annotation_index) = constant.user_ty {
if let Err(terr) = self.typeck.relate_type_and_user_type(
constant.const_.ty(),
@ -491,9 +483,28 @@ impl<'a, 'b, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'tcx> {
}
}
#[instrument(level = "debug", skip(self))]
fn visit_body(&mut self, body: &Body<'tcx>) {
// The types of local_decls are checked above which is called in super_body.
self.super_body(body);
// We intentionally do not recurse into `body.required_consts` or
// `body.mentioned_items` here as the MIR at this phase should still
// refer to all items and we don't want to check them multiple times.
for (local, local_decl) in body.local_decls.iter_enumerated() {
self.visit_local_decl(local, local_decl);
}
for (block, block_data) in body.basic_blocks.iter_enumerated() {
let mut location = Location { block, statement_index: 0 };
for stmt in &block_data.statements {
if !stmt.source_info.span.is_dummy() {
self.last_span = stmt.source_info.span;
}
self.visit_statement(stmt, location);
location.statement_index += 1;
}
self.visit_terminator(block_data.terminator(), location);
}
}
}
@ -2582,7 +2593,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
ConstraintCategory::Boring, // same as above.
self.constraints,
)
.apply_closure_requirements(closure_requirements, def_id.to_def_id(), args);
.apply_closure_requirements(closure_requirements, def_id, args);
}
// Now equate closure args to regions inherited from `typeck_root_def_id`. Fixes #98589.

View File

@ -1,6 +1,7 @@
//! List of the unstable feature gates.
use std::path::PathBuf;
use std::time::{SystemTime, UNIX_EPOCH};
use rustc_data_structures::fx::FxHashSet;
use rustc_span::{Span, Symbol, sym};
@ -669,6 +670,7 @@ declare_features! (
(unstable, xop_target_feature, "1.81.0", Some(127208)),
/// Allows `do yeet` expressions
(unstable, yeet_expr, "1.62.0", Some(96373)),
(unstable, yield_expr, "CURRENT_RUSTC_VERSION", Some(43122)),
// !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!!
// Features are listed in alphabetical order. Tidy will fail if you don't keep it this way.
// !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!! !!!!
@ -685,11 +687,13 @@ impl Features {
) -> Result<(), Box<dyn std::error::Error>> {
#[derive(serde::Serialize)]
struct LibFeature {
timestamp: u128,
symbol: String,
}
#[derive(serde::Serialize)]
struct LangFeature {
timestamp: u128,
symbol: String,
since: Option<String>,
}
@ -703,10 +707,20 @@ impl Features {
let metrics_file = std::fs::File::create(metrics_path)?;
let metrics_file = std::io::BufWriter::new(metrics_file);
let now = || {
SystemTime::now()
.duration_since(UNIX_EPOCH)
.expect("system time should always be greater than the unix epoch")
.as_nanos()
};
let lib_features = self
.enabled_lib_features
.iter()
.map(|EnabledLibFeature { gate_name, .. }| LibFeature { symbol: gate_name.to_string() })
.map(|EnabledLibFeature { gate_name, .. }| LibFeature {
symbol: gate_name.to_string(),
timestamp: now(),
})
.collect();
let lang_features = self
@ -715,6 +729,7 @@ impl Features {
.map(|EnabledLangFeature { gate_name, stable_since, .. }| LangFeature {
symbol: gate_name.to_string(),
since: stable_since.map(|since| since.to_string()),
timestamp: now(),
})
.collect();

View File

@ -1265,7 +1265,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
} else {
CallableKind::Function
};
maybe_emit_help(def_id, path.segments[0].ident, args, callable_kind);
maybe_emit_help(def_id, path.segments.last().unwrap().ident, args, callable_kind);
}
hir::ExprKind::MethodCall(method, _receiver, args, _span) => {
let Some(def_id) =

View File

@ -484,7 +484,7 @@ extern "C" LLVMTargetMachineRef LLVMRustCreateTargetMachine(
if (ArgsCstrBuff != nullptr) {
#if LLVM_VERSION_GE(20, 0)
int buffer_offset = 0;
size_t buffer_offset = 0;
assert(ArgsCstrBuff[ArgsCstrBuffLen - 1] == '\0');
auto Arg0 = std::string(ArgsCstrBuff);
buffer_offset = Arg0.size() + 1;
@ -502,7 +502,7 @@ extern "C" LLVMTargetMachineRef LLVMRustCreateTargetMachine(
Options.MCOptions.Argv0 = Arg0;
Options.MCOptions.CommandlineArgs = CommandlineArgs;
#else
int buffer_offset = 0;
size_t buffer_offset = 0;
assert(ArgsCstrBuff[ArgsCstrBuffLen - 1] == '\0');
const size_t arg0_len = std::strlen(ArgsCstrBuff);
@ -511,13 +511,13 @@ extern "C" LLVMTargetMachineRef LLVMRustCreateTargetMachine(
arg0[arg0_len] = '\0';
buffer_offset += arg0_len + 1;
const int num_cmd_arg_strings = std::count(
const size_t num_cmd_arg_strings = std::count(
&ArgsCstrBuff[buffer_offset], &ArgsCstrBuff[ArgsCstrBuffLen], '\0');
std::string *cmd_arg_strings = new std::string[num_cmd_arg_strings];
for (int i = 0; i < num_cmd_arg_strings; ++i) {
for (size_t i = 0; i < num_cmd_arg_strings; ++i) {
assert(buffer_offset < ArgsCstrBuffLen);
const int len = std::strlen(ArgsCstrBuff + buffer_offset);
const size_t len = std::strlen(ArgsCstrBuff + buffer_offset);
cmd_arg_strings[i] = std::string(&ArgsCstrBuff[buffer_offset], len);
buffer_offset += len + 1;
}

View File

@ -1327,6 +1327,11 @@ pub struct TyCtxt<'tcx> {
gcx: &'tcx GlobalCtxt<'tcx>,
}
// Explicitly implement `DynSync` and `DynSend` for `TyCtxt` to short circuit trait resolution. Its
// field are asserted to implement these traits below, so this is trivially safe, and it greatly
// speeds-up compilation of this crate and its dependents.
unsafe impl DynSend for TyCtxt<'_> {}
unsafe impl DynSync for TyCtxt<'_> {}
fn _assert_tcx_fields() {
sync::assert_dyn_sync::<&'_ GlobalCtxt<'_>>();
sync::assert_dyn_send::<&'_ GlobalCtxt<'_>>();

View File

@ -20,7 +20,7 @@ pub(crate) fn target() -> Target {
vendor: "espressif".into(),
executables: true,
cpu: "esp32-s2".into(),
cpu: "esp32s2".into(),
linker: Some("xtensa-esp32s2-elf-gcc".into()),
// See https://github.com/espressif/rust-esp32-example/issues/3#issuecomment-861054477

View File

@ -16,7 +16,7 @@ pub(crate) fn target() -> Target {
options: TargetOptions {
vendor: "espressif".into(),
cpu: "esp32-s2".into(),
cpu: "esp32s2".into(),
linker: Some("xtensa-esp32s2-elf-gcc".into()),
max_atomic_width: Some(32),
features: "+forced-atomics".into(),

View File

@ -20,7 +20,7 @@ pub(crate) fn target() -> Target {
vendor: "espressif".into(),
executables: true,
cpu: "esp32-s3".into(),
cpu: "esp32s3".into(),
linker: Some("xtensa-esp32s3-elf-gcc".into()),
// The esp32s3 only supports native 32bit atomics.

View File

@ -16,7 +16,7 @@ pub(crate) fn target() -> Target {
options: TargetOptions {
vendor: "espressif".into(),
cpu: "esp32-s3".into(),
cpu: "esp32s3".into(),
linker: Some("xtensa-esp32s3-elf-gcc".into()),
max_atomic_width: Some(32),
atomic_cas: true,

View File

@ -10,7 +10,7 @@
//!
//! In order to make an intrinsic usable at compile-time, it needs to be declared in the "new"
//! style, i.e. as a `#[rustc_intrinsic]` function, not inside an `extern` block. Then copy the
//! implementation from <https://github.com/rust-lang/miri/blob/master/src/shims/intrinsics> to
//! implementation from <https://github.com/rust-lang/miri/blob/master/src/intrinsics> to
//! <https://github.com/rust-lang/rust/blob/master/compiler/rustc_const_eval/src/interpret/intrinsics.rs>
//! and make the intrinsic declaration a `const fn`.
//!

View File

@ -3732,8 +3732,7 @@ impl<T> [T] {
#[doc(alias = "memcpy")]
#[inline]
#[stable(feature = "copy_from_slice", since = "1.9.0")]
#[rustc_const_unstable(feature = "const_copy_from_slice", issue = "131415")]
#[rustc_const_stable_indirect]
#[rustc_const_stable(feature = "const_copy_from_slice", since = "CURRENT_RUSTC_VERSION")]
#[track_caller]
pub const fn copy_from_slice(&mut self, src: &[T])
where

View File

@ -70,11 +70,12 @@ fn main() {
}
// check_version warnings are not printed during setup, or during CI
let changelog_suggestion = if matches!(config.cmd, Subcommand::Setup { .. }) || CiEnv::is_ci() {
None
} else {
check_version(&config)
};
let changelog_suggestion =
if matches!(config.cmd, Subcommand::Setup { .. }) || CiEnv::is_ci() || config.dry_run() {
None
} else {
check_version(&config)
};
// NOTE: Since `./configure` generates a `config.toml`, distro maintainers will see the
// changelog warning, not the `x.py setup` message.
@ -187,7 +188,7 @@ fn check_version(config: &Config) -> Option<String> {
"update `config.toml` to use `change-id = {latest_change_id}` instead"
));
if io::stdout().is_terminal() && !config.dry_run() {
if io::stdout().is_terminal() {
t!(fs::write(warned_id_path, latest_change_id.to_string()));
}
} else {

View File

@ -349,7 +349,7 @@ fn format_rusage_data(child: Child) -> Option<String> {
let mut kernel_filetime = Default::default();
let mut kernel_time = Default::default();
let mut memory_counters = PROCESS_MEMORY_COUNTERS::default();
let memory_counters_size = std::mem::size_of_val(&memory_counters);
let memory_counters_size = size_of_val(&memory_counters);
unsafe {
GetProcessTimes(

View File

@ -42,7 +42,7 @@ pub unsafe fn setup(build: &mut crate::Build) {
#[cfg(windows)]
mod for_windows {
use std::ffi::c_void;
use std::{io, mem};
use std::io;
use windows::Win32::Foundation::CloseHandle;
use windows::Win32::System::Diagnostics::Debug::{
@ -82,7 +82,7 @@ mod for_windows {
job,
JobObjectExtendedLimitInformation,
&info as *const _ as *const c_void,
mem::size_of_val(&info) as u32,
size_of_val(&info) as u32,
);
assert!(r.is_ok(), "{}", io::Error::last_os_error());

View File

@ -129,7 +129,7 @@ pub fn get_closest_merge_commit(
git.current_dir(git_dir);
}
let channel = include_str!("../../ci/channel");
let channel = include_str!("../../ci/channel").trim();
let merge_base = {
if CiEnv::is_ci() &&

View File

@ -182,6 +182,10 @@ fn yaml_map_to_json(map: &BTreeMap<String, Value>) -> BTreeMap<String, serde_jso
.collect()
}
/// Maximum number of custom try jobs that can be requested in a single
/// `@bors try` request.
const MAX_TRY_JOBS_COUNT: usize = 20;
fn calculate_jobs(
run_type: &RunType,
db: &JobDatabase,
@ -191,9 +195,9 @@ fn calculate_jobs(
RunType::PullRequest => (db.pr_jobs.clone(), "PR", &db.envs.pr_env),
RunType::TryJob { custom_jobs } => {
let jobs = if let Some(custom_jobs) = custom_jobs {
if custom_jobs.len() > 10 {
if custom_jobs.len() > MAX_TRY_JOBS_COUNT {
return Err(anyhow::anyhow!(
"It is only possible to schedule up to 10 custom jobs, received {} custom jobs",
"It is only possible to schedule up to {MAX_TRY_JOBS_COUNT} custom jobs, received {} custom jobs",
custom_jobs.len()
));
}

View File

@ -563,11 +563,13 @@ pub(crate) fn build_impl(
// Return if the trait itself or any types of the generic parameters are doc(hidden).
let mut stack: Vec<&Type> = vec![&for_];
if let Some(did) = trait_.as_ref().map(|t| t.def_id()) {
if !document_hidden && tcx.is_doc_hidden(did) {
return;
}
if let Some(did) = trait_.as_ref().map(|t| t.def_id())
&& !document_hidden
&& tcx.is_doc_hidden(did)
{
return;
}
if let Some(generics) = trait_.as_ref().and_then(|t| t.generics()) {
stack.extend(generics);
}

View File

@ -828,30 +828,26 @@ fn clean_ty_generics<'tcx>(
.iter()
.flat_map(|(pred, _)| {
let mut projection = None;
let param_idx = (|| {
let param_idx = {
let bound_p = pred.kind();
match bound_p.skip_binder() {
ty::ClauseKind::Trait(pred) => {
if let ty::Param(param) = pred.self_ty().kind() {
return Some(param.index);
}
ty::ClauseKind::Trait(pred) if let ty::Param(param) = pred.self_ty().kind() => {
Some(param.index)
}
ty::ClauseKind::TypeOutlives(ty::OutlivesPredicate(ty, _reg)) => {
if let ty::Param(param) = ty.kind() {
return Some(param.index);
}
ty::ClauseKind::TypeOutlives(ty::OutlivesPredicate(ty, _reg))
if let ty::Param(param) = ty.kind() =>
{
Some(param.index)
}
ty::ClauseKind::Projection(p) => {
if let ty::Param(param) = p.projection_term.self_ty().kind() {
projection = Some(bound_p.rebind(p));
return Some(param.index);
}
ty::ClauseKind::Projection(p)
if let ty::Param(param) = p.projection_term.self_ty().kind() =>
{
projection = Some(bound_p.rebind(p));
Some(param.index)
}
_ => (),
_ => None,
}
None
})();
};
if let Some(param_idx) = param_idx
&& let Some(bounds) = impl_trait.get_mut(&param_idx)
@ -1378,12 +1374,12 @@ pub(crate) fn clean_middle_assoc_item(assoc_item: &ty::AssocItem, cx: &mut DocCo
tcx.fn_sig(assoc_item.def_id).instantiate_identity().input(0).skip_binder();
if self_arg_ty == self_ty {
item.decl.inputs.values[0].type_ = SelfTy;
} else if let ty::Ref(_, ty, _) = *self_arg_ty.kind() {
if ty == self_ty {
match item.decl.inputs.values[0].type_ {
BorrowedRef { ref mut type_, .. } => **type_ = SelfTy,
_ => unreachable!(),
}
} else if let ty::Ref(_, ty, _) = *self_arg_ty.kind()
&& ty == self_ty
{
match item.decl.inputs.values[0].type_ {
BorrowedRef { ref mut type_, .. } => **type_ = SelfTy,
_ => unreachable!(),
}
}
}
@ -2331,25 +2327,22 @@ fn clean_middle_opaque_bounds<'tcx>(
let bindings: ThinVec<_> = bounds
.iter()
.filter_map(|(bound, _)| {
if let ty::ClauseKind::Projection(proj) = bound.kind().skip_binder() {
if proj.projection_term.trait_ref(cx.tcx) == trait_ref.skip_binder() {
Some(AssocItemConstraint {
assoc: projection_to_path_segment(
// FIXME: This needs to be made resilient for `AliasTerm`s that
// are associated consts.
bound.kind().rebind(proj.projection_term.expect_ty(cx.tcx)),
cx,
),
kind: AssocItemConstraintKind::Equality {
term: clean_middle_term(bound.kind().rebind(proj.term), cx),
},
})
} else {
None
}
} else {
None
if let ty::ClauseKind::Projection(proj) = bound.kind().skip_binder()
&& proj.projection_term.trait_ref(cx.tcx) == trait_ref.skip_binder()
{
return Some(AssocItemConstraint {
assoc: projection_to_path_segment(
// FIXME: This needs to be made resilient for `AliasTerm`s that
// are associated consts.
bound.kind().rebind(proj.projection_term.expect_ty(cx.tcx)),
cx,
),
kind: AssocItemConstraintKind::Equality {
term: clean_middle_term(bound.kind().rebind(proj.term), cx),
},
});
}
None
})
.collect();
@ -2743,23 +2736,20 @@ fn add_without_unwanted_attributes<'hir>(
}
let mut attr = attr.clone();
match attr {
hir::Attribute::Unparsed(ref mut normal) => {
if let [ident] = &*normal.path.segments {
let ident = ident.name;
if ident == sym::doc {
filter_doc_attr(&mut normal.args, is_inline);
attrs.push((Cow::Owned(attr), import_parent));
} else if is_inline || ident != sym::cfg {
// If it's not a `cfg()` attribute, we keep it.
attrs.push((Cow::Owned(attr), import_parent));
}
}
}
hir::Attribute::Parsed(..) => {
if is_inline {
hir::Attribute::Unparsed(ref mut normal) if let [ident] = &*normal.path.segments => {
let ident = ident.name;
if ident == sym::doc {
filter_doc_attr(&mut normal.args, is_inline);
attrs.push((Cow::Owned(attr), import_parent));
} else if is_inline || ident != sym::cfg {
// If it's not a `cfg()` attribute, we keep it.
attrs.push((Cow::Owned(attr), import_parent));
}
}
hir::Attribute::Parsed(..) if is_inline => {
attrs.push((Cow::Owned(attr), import_parent));
}
_ => {}
}
}
}
@ -2961,16 +2951,16 @@ fn clean_extern_crate<'tcx>(
&& !cx.is_json_output();
let krate_owner_def_id = krate.owner_id.def_id;
if please_inline {
if let Some(items) = inline::try_inline(
if please_inline
&& let Some(items) = inline::try_inline(
cx,
Res::Def(DefKind::Mod, crate_def_id),
name,
Some((attrs, Some(krate_owner_def_id))),
&mut Default::default(),
) {
return items;
}
)
{
return items;
}
vec![Item::from_def_id_and_parts(

View File

@ -208,11 +208,11 @@ impl ExternalCrate {
.get_attrs(def_id, sym::doc)
.flat_map(|attr| attr.meta_item_list().unwrap_or_default());
for meta in meta_items {
if meta.has_name(sym::keyword) {
if let Some(v) = meta.value_str() {
keyword = Some(v);
break;
}
if meta.has_name(sym::keyword)
&& let Some(v) = meta.value_str()
{
keyword = Some(v);
break;
}
}
return keyword.map(|p| (def_id, p));
@ -1071,16 +1071,14 @@ pub(crate) fn extract_cfg_from_attrs<'a, I: Iterator<Item = &'a hir::Attribute>
// treat #[target_feature(enable = "feat")] attributes as if they were
// #[doc(cfg(target_feature = "feat"))] attributes as well
for attr in hir_attr_lists(attrs, sym::target_feature) {
if attr.has_name(sym::enable) {
if attr.value_str().is_some() {
// Clone `enable = "feat"`, change to `target_feature = "feat"`.
// Unwrap is safe because `value_str` succeeded above.
let mut meta = attr.meta_item().unwrap().clone();
meta.path = ast::Path::from_ident(Ident::with_dummy_span(sym::target_feature));
if attr.has_name(sym::enable) && attr.value_str().is_some() {
// Clone `enable = "feat"`, change to `target_feature = "feat"`.
// Unwrap is safe because `value_str` succeeded above.
let mut meta = attr.meta_item().unwrap().clone();
meta.path = ast::Path::from_ident(Ident::with_dummy_span(sym::target_feature));
if let Ok(feat_cfg) = Cfg::parse(&ast::MetaItemInner::MetaItem(meta)) {
cfg &= feat_cfg;
}
if let Ok(feat_cfg) = Cfg::parse(&ast::MetaItemInner::MetaItem(meta)) {
cfg &= feat_cfg;
}
}
}
@ -1160,10 +1158,10 @@ impl Attributes {
continue;
}
if let Some(items) = attr.meta_item_list() {
if items.iter().filter_map(|i| i.meta_item()).any(|it| it.has_name(flag)) {
return true;
}
if let Some(items) = attr.meta_item_list()
&& items.iter().filter_map(|i| i.meta_item()).any(|it| it.has_name(flag))
{
return true;
}
}

View File

@ -645,10 +645,10 @@ impl Options {
let extension_css = matches.opt_str("e").map(|s| PathBuf::from(&s));
if let Some(ref p) = extension_css {
if !p.is_file() {
dcx.fatal("option --extend-css argument must be a file");
}
if let Some(ref p) = extension_css
&& !p.is_file()
{
dcx.fatal("option --extend-css argument must be a file");
}
let mut themes = Vec::new();
@ -720,10 +720,10 @@ impl Options {
}
let index_page = matches.opt_str("index-page").map(|s| PathBuf::from(&s));
if let Some(ref index_page) = index_page {
if !index_page.is_file() {
dcx.fatal("option `--index-page` argument must be a file");
}
if let Some(ref index_page) = index_page
&& !index_page.is_file()
{
dcx.fatal("option `--index-page` argument must be a file");
}
let target = parse_target_triple(early_dcx, matches);

View File

@ -98,10 +98,9 @@ impl HirCollector<'_> {
let ast_attrs = self.tcx.hir().attrs(self.tcx.local_def_id_to_hir_id(def_id));
if let Some(ref cfg) =
extract_cfg_from_attrs(ast_attrs.iter(), self.tcx, &FxHashSet::default())
&& !cfg.matches(&self.tcx.sess.psess, Some(self.tcx.features()))
{
if !cfg.matches(&self.tcx.sess.psess, Some(self.tcx.features())) {
return;
}
return;
}
let has_name = !name.is_empty();

View File

@ -419,7 +419,9 @@ impl DocFolder for CacheBuilder<'_, '_> {
}
}
if let Some(generics) = i.trait_.as_ref().and_then(|t| t.generics()) {
if let Some(trait_) = &i.trait_
&& let Some(generics) = trait_.generics()
{
for bound in generics {
dids.extend(bound.def_id(self.cache));
}

View File

@ -1102,53 +1102,52 @@ fn string_without_closing_tag<T: Display>(
});
}
if let Some(href_context) = href_context {
if let Some(href) =
href_context.context.shared.span_correspondence_map.get(&def_span).and_then(|href| {
let context = href_context.context;
// FIXME: later on, it'd be nice to provide two links (if possible) for all items:
// one to the documentation page and one to the source definition.
// FIXME: currently, external items only generate a link to their documentation,
// a link to their definition can be generated using this:
// https://github.com/rust-lang/rust/blob/60f1a2fc4b535ead9c85ce085fdce49b1b097531/src/librustdoc/html/render/context.rs#L315-L338
match href {
LinkFromSrc::Local(span) => {
context.href_from_span_relative(*span, &href_context.current_href)
}
LinkFromSrc::External(def_id) => {
format::href_with_root_path(*def_id, context, Some(href_context.root_path))
.ok()
.map(|(url, _, _)| url)
}
LinkFromSrc::Primitive(prim) => format::href_with_root_path(
PrimitiveType::primitive_locations(context.tcx())[prim],
context,
Some(href_context.root_path),
)
.ok()
.map(|(url, _, _)| url),
LinkFromSrc::Doc(def_id) => {
format::href_with_root_path(*def_id, context, Some(href_context.root_path))
.ok()
.map(|(doc_link, _, _)| doc_link)
}
if let Some(href_context) = href_context
&& let Some(href) = href_context.context.shared.span_correspondence_map.get(&def_span)
&& let Some(href) = {
let context = href_context.context;
// FIXME: later on, it'd be nice to provide two links (if possible) for all items:
// one to the documentation page and one to the source definition.
// FIXME: currently, external items only generate a link to their documentation,
// a link to their definition can be generated using this:
// https://github.com/rust-lang/rust/blob/60f1a2fc4b535ead9c85ce085fdce49b1b097531/src/librustdoc/html/render/context.rs#L315-L338
match href {
LinkFromSrc::Local(span) => {
context.href_from_span_relative(*span, &href_context.current_href)
}
})
{
if !open_tag {
// We're already inside an element which has the same klass, no need to give it
// again.
write!(out, "<a href=\"{href}\">{text_s}").unwrap();
} else {
let klass_s = klass.as_html();
if klass_s.is_empty() {
write!(out, "<a href=\"{href}\">{text_s}").unwrap();
} else {
write!(out, "<a class=\"{klass_s}\" href=\"{href}\">{text_s}").unwrap();
LinkFromSrc::External(def_id) => {
format::href_with_root_path(*def_id, context, Some(href_context.root_path))
.ok()
.map(|(url, _, _)| url)
}
LinkFromSrc::Primitive(prim) => format::href_with_root_path(
PrimitiveType::primitive_locations(context.tcx())[prim],
context,
Some(href_context.root_path),
)
.ok()
.map(|(url, _, _)| url),
LinkFromSrc::Doc(def_id) => {
format::href_with_root_path(*def_id, context, Some(href_context.root_path))
.ok()
.map(|(doc_link, _, _)| doc_link)
}
}
return Some("</a>");
}
{
if !open_tag {
// We're already inside an element which has the same klass, no need to give it
// again.
write!(out, "<a href=\"{href}\">{text_s}").unwrap();
} else {
let klass_s = klass.as_html();
if klass_s.is_empty() {
write!(out, "<a href=\"{href}\">{text_s}").unwrap();
} else {
write!(out, "<a class=\"{klass_s}\" href=\"{href}\">{text_s}").unwrap();
}
}
return Some("</a>");
}
if !open_tag {
write!(out, "{}", text_s).unwrap();

View File

@ -1308,18 +1308,17 @@ impl LangString {
seen_other_tags = true;
data.unknown.push(x.to_owned());
}
LangStringToken::KeyValueAttribute(key, value) => {
if key == "class" {
data.added_classes.push(value.to_owned());
} else if let Some(extra) = extra {
extra.error_invalid_codeblock_attr(format!(
"unsupported attribute `{key}`"
));
}
LangStringToken::KeyValueAttribute("class", value) => {
data.added_classes.push(value.to_owned());
}
LangStringToken::KeyValueAttribute(key, ..) if let Some(extra) = extra => {
extra
.error_invalid_codeblock_attr(format!("unsupported attribute `{key}`"));
}
LangStringToken::ClassAttribute(class) => {
data.added_classes.push(class.to_owned());
}
_ => {}
}
}
};

View File

@ -95,10 +95,8 @@ impl SpanMapVisitor<'_> {
.unwrap_or(path.span);
self.matches.insert(span, link);
}
Res::Local(_) => {
if let Some(span) = self.tcx.hir().res_span(path.res) {
self.matches.insert(path.span, LinkFromSrc::Local(clean::Span::new(span)));
}
Res::Local(_) if let Some(span) = self.tcx.hir().res_span(path.res) => {
self.matches.insert(path.span, LinkFromSrc::Local(clean::Span::new(span)));
}
Res::PrimTy(p) => {
// FIXME: Doesn't handle "path-like" primitives like arrays or tuples.
@ -111,15 +109,15 @@ impl SpanMapVisitor<'_> {
/// Used to generate links on items' definition to go to their documentation page.
pub(crate) fn extract_info_from_hir_id(&mut self, hir_id: HirId) {
if let Node::Item(item) = self.tcx.hir_node(hir_id) {
if let Some(span) = self.tcx.def_ident_span(item.owner_id) {
let cspan = clean::Span::new(span);
// If the span isn't from the current crate, we ignore it.
if cspan.inner().is_dummy() || cspan.cnum(self.tcx.sess) != LOCAL_CRATE {
return;
}
self.matches.insert(span, LinkFromSrc::Doc(item.owner_id.to_def_id()));
if let Node::Item(item) = self.tcx.hir_node(hir_id)
&& let Some(span) = self.tcx.def_ident_span(item.owner_id)
{
let cspan = clean::Span::new(span);
// If the span isn't from the current crate, we ignore it.
if cspan.inner().is_dummy() || cspan.cnum(self.tcx.sess) != LOCAL_CRATE {
return;
}
self.matches.insert(span, LinkFromSrc::Doc(item.owner_id.to_def_id()));
}
}

View File

@ -1334,14 +1334,12 @@ impl LinkCollector<'_, '_> {
}
// item can be non-local e.g. when using `#[rustc_doc_primitive = "pointer"]`
if let Some((src_id, dst_id)) = id.as_local().and_then(|dst_id| {
diag_info.item.item_id.expect_def_id().as_local().map(|src_id| (src_id, dst_id))
}) {
if self.cx.tcx.effective_visibilities(()).is_exported(src_id)
&& !self.cx.tcx.effective_visibilities(()).is_exported(dst_id)
{
privacy_error(self.cx, diag_info, path_str);
}
if let Some(dst_id) = id.as_local()
&& let Some(src_id) = diag_info.item.item_id.expect_def_id().as_local()
&& self.cx.tcx.effective_visibilities(()).is_exported(src_id)
&& !self.cx.tcx.effective_visibilities(()).is_exported(dst_id)
{
privacy_error(self.cx, diag_info, path_str);
}
Some(())
@ -1405,10 +1403,10 @@ impl LinkCollector<'_, '_> {
// which we want in some cases but not in others.
cache_errors: bool,
) -> Option<Vec<(Res, Option<UrlFragment>)>> {
if let Some(res) = self.visited_links.get(&key) {
if res.is_some() || cache_errors {
return res.clone().map(|r| vec![r]);
}
if let Some(res) = self.visited_links.get(&key)
&& (res.is_some() || cache_errors)
{
return res.clone().map(|r| vec![r]);
}
let mut candidates = self.resolve_with_disambiguator(&key, diag.clone());
@ -1432,10 +1430,10 @@ impl LinkCollector<'_, '_> {
// and after removing duplicated kinds, only one remains, the `ambiguity_error` function
// won't emit an error. So at this point, we can just take the first candidate as it was
// the first retrieved and use it to generate the link.
if let [candidate, _candidate2, ..] = *candidates {
if !ambiguity_error(self.cx, &diag, &key.path_str, &candidates, false) {
candidates = vec![candidate];
}
if let [candidate, _candidate2, ..] = *candidates
&& !ambiguity_error(self.cx, &diag, &key.path_str, &candidates, false)
{
candidates = vec![candidate];
}
let mut out = Vec::with_capacity(candidates.len());
@ -1480,17 +1478,16 @@ impl LinkCollector<'_, '_> {
// See https://github.com/rust-lang/rust/pull/76955#discussion_r493953382 for a good approach.
let mut err = ResolutionFailure::NotResolved(err);
for other_ns in [TypeNS, ValueNS, MacroNS] {
if other_ns != expected_ns {
if let Ok(&[res, ..]) = self
if other_ns != expected_ns
&& let Ok(&[res, ..]) = self
.resolve(path_str, other_ns, None, item_id, module_id)
.as_deref()
{
err = ResolutionFailure::WrongNamespace {
res: full_res(self.cx.tcx, res),
expected_ns,
};
break;
}
{
err = ResolutionFailure::WrongNamespace {
res: full_res(self.cx.tcx, res),
expected_ns,
};
break;
}
}
resolution_failure(self, diag, path_str, disambiguator, smallvec![err]);
@ -1674,11 +1671,11 @@ impl Disambiguator {
Ok(Some((d, &rest[1..], &rest[1..])))
} else {
for (suffix, kind) in suffixes {
if let Some(path_str) = link.strip_suffix(suffix) {
// Avoid turning `!` or `()` into an empty string
if !path_str.is_empty() {
return Ok(Some((Kind(kind), path_str, link)));
}
// Avoid turning `!` or `()` into an empty string
if let Some(path_str) = link.strip_suffix(suffix)
&& !path_str.is_empty()
{
return Ok(Some((Kind(kind), path_str, link)));
}
}
Ok(None)

View File

@ -177,23 +177,22 @@ pub(crate) fn collect_trait_impls(mut krate: Crate, cx: &mut DocContext<'_>) ->
} else if let Some(did) = target.def_id(&cx.cache) {
cleaner.items.insert(did.into());
}
if let Some(for_did) = for_.def_id(&cx.cache) {
if type_did_to_deref_target.insert(for_did, target).is_none() {
// Since only the `DefId` portion of the `Type` instances is known to be same for both the
// `Deref` target type and the impl for type positions, this map of types is keyed by
// `DefId` and for convenience uses a special cleaner that accepts `DefId`s directly.
if cleaner.keep_impl_with_def_id(for_did.into()) {
let mut targets = DefIdSet::default();
targets.insert(for_did);
add_deref_target(
cx,
&type_did_to_deref_target,
&mut cleaner,
&mut targets,
for_did,
);
}
}
if let Some(for_did) = for_.def_id(&cx.cache)
&& type_did_to_deref_target.insert(for_did, target).is_none()
// Since only the `DefId` portion of the `Type` instances is known to be same for both the
// `Deref` target type and the impl for type positions, this map of types is keyed by
// `DefId` and for convenience uses a special cleaner that accepts `DefId`s directly.
&& cleaner.keep_impl_with_def_id(for_did.into())
{
let mut targets = DefIdSet::default();
targets.insert(for_did);
add_deref_target(
cx,
&type_did_to_deref_target,
&mut cleaner,
&mut targets,
for_did,
);
}
}
}

View File

@ -28,9 +28,9 @@ pub(crate) fn visit_item(cx: &DocContext<'_>, item: &Item, hir_id: HirId, dox: &
// We don't try to detect stuff `<like, this>` because that's not valid HTML,
// and we don't try to detect stuff `<like this>` because that's not valid Rust.
let mut generics_end = range.end;
if let Some(Some(mut generics_start)) = (is_open_tag
&& dox[..generics_end].ends_with('>'))
.then(|| extract_path_backwards(dox, range.start))
if is_open_tag
&& dox[..generics_end].ends_with('>')
&& let Some(mut generics_start) = extract_path_backwards(dox, range.start)
{
while generics_start != 0
&& generics_end < dox.len()

View File

@ -73,15 +73,15 @@ pub(crate) fn visit_item(cx: &DocContext<'_>, item: &Item, hir_id: HirId, dox: &
}
let parser_old = cmarko::Parser::new_ext(dox, main_body_opts_old()).into_offset_iter();
for (event, span) in parser_old {
if let cmarko::Event::Start(cmarko::Tag::BlockQuote) = event {
if !dox[span.clone()].starts_with("> ") {
spaceless_block_quotes.remove(&span.start);
}
if let cmarko::Event::Start(cmarko::Tag::BlockQuote) = event
&& !dox[span.clone()].starts_with("> ")
{
spaceless_block_quotes.remove(&span.start);
}
if let cmarko::Event::FootnoteReference(_) = event {
if !found_footnote_references.contains(&(span.start + 1)) {
missing_footnote_references.insert(span.start + 1, span);
}
if let cmarko::Event::FootnoteReference(_) = event
&& !found_footnote_references.contains(&(span.start + 1))
{
missing_footnote_references.insert(span.start + 1, span);
}
}
}

View File

@ -57,10 +57,10 @@ impl LibEmbargoVisitor<'_, '_> {
}
for item in self.tcx.module_children(def_id).iter() {
if let Some(def_id) = item.res.opt_def_id() {
if item.vis.is_public() {
self.visit_item(def_id);
}
if let Some(def_id) = item.res.opt_def_id()
&& item.vis.is_public()
{
self.visit_item(def_id);
}
}
}

View File

@ -7,7 +7,6 @@
#[cfg(target_vendor = "apple")]
#[allow(non_camel_case_types)]
pub unsafe fn raise_fd_limit() {
use std::mem::size_of_val;
use std::ptr::null_mut;
use std::{cmp, io};

View File

@ -58,12 +58,17 @@ fn test_metrics_dump() {
);
let message = rfs::read_to_string(json_path);
let parsed: serde_json::Value =
let mut parsed: serde_json::Value =
serde_json::from_str(&message).expect("metrics should be dumped as json");
// remove timestamps
assert!(parsed["lib_features"][0]["timestamp"].is_number());
assert!(parsed["lang_features"][0]["timestamp"].is_number());
parsed["lib_features"][0]["timestamp"] = serde_json::json!(null);
parsed["lang_features"][0]["timestamp"] = serde_json::json!(null);
let expected = serde_json::json!(
{
"lib_features":[{"symbol":"ascii_char"}],
"lang_features":[{"symbol":"box_patterns","since":null}]
"lib_features":[{"symbol":"ascii_char", "timestamp":null}],
"lang_features":[{"symbol":"box_patterns","since":null, "timestamp":null}]
}
);

View File

@ -18,16 +18,6 @@ LL | let _ = #[coroutine] || {};
= help: add `#![feature(coroutines)]` to the crate attributes to enable
= note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date
error[E0658]: yield syntax is experimental
--> $DIR/gen_block.rs:16:16
|
LL | let _ = || yield true;
| ^^^^^^^^^^
|
= note: see issue #43122 <https://github.com/rust-lang/rust/issues/43122> for more information
= help: add `#![feature(coroutines)]` to the crate attributes to enable
= note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date
error: `yield` can only be used in `#[coroutine]` closures, or `gen` blocks
--> $DIR/gen_block.rs:16:16
|
@ -39,23 +29,13 @@ help: use `#[coroutine]` to make this closure a coroutine
LL | let _ = #[coroutine] || yield true;
| ++++++++++++
error[E0658]: yield syntax is experimental
--> $DIR/gen_block.rs:20:29
|
LL | let _ = #[coroutine] || yield true;
| ^^^^^^^^^^
|
= note: see issue #43122 <https://github.com/rust-lang/rust/issues/43122> for more information
= help: add `#![feature(coroutines)]` to the crate attributes to enable
= note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date
error[E0282]: type annotations needed
--> $DIR/gen_block.rs:7:13
|
LL | let x = gen {};
| ^^^^^^ cannot infer type
error: aborting due to 6 previous errors
error: aborting due to 4 previous errors
Some errors have detailed explanations: E0282, E0658.
For more information about an error, try `rustc --explain E0282`.

View File

@ -71,7 +71,7 @@ LL | let _ = || yield true;
| ^^^^^^^^^^
|
= note: see issue #43122 <https://github.com/rust-lang/rust/issues/43122> for more information
= help: add `#![feature(coroutines)]` to the crate attributes to enable
= help: add `#![feature(yield_expr)]` to the crate attributes to enable
= note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date
error: `yield` can only be used in `#[coroutine]` closures, or `gen` blocks
@ -92,7 +92,7 @@ LL | let _ = #[coroutine] || yield true;
| ^^^^^^^^^^
|
= note: see issue #43122 <https://github.com/rust-lang/rust/issues/43122> for more information
= help: add `#![feature(coroutines)]` to the crate attributes to enable
= help: add `#![feature(yield_expr)]` to the crate attributes to enable
= note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date
error: aborting due to 11 previous errors

View File

@ -14,12 +14,12 @@ fn main() {
//[none]~^ ERROR: cannot find
let _ = || yield true; //[none]~ ERROR yield syntax is experimental
//~^ ERROR yield syntax is experimental
//[none]~^ ERROR yield syntax is experimental
//~^^ ERROR `yield` can only be used in
let _ = #[coroutine] || yield true; //[none]~ ERROR yield syntax is experimental
//~^ ERROR `#[coroutine]` attribute is an experimental feature
//~^^ ERROR yield syntax is experimental
//[none]~^^ ERROR yield syntax is experimental
let _ = #[coroutine] || {};
//~^ ERROR `#[coroutine]` attribute is an experimental feature

View File

@ -14,7 +14,7 @@ error[E0308]: mismatched types
--> $DIR/E0582.rs:22:5
|
LL | bar(mk_unexpected_char_err)
| ^^^ one type is more general than the other
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^ one type is more general than the other
|
= note: expected enum `Option<&_>`
found enum `Option<&'a _>`

View File

@ -45,7 +45,7 @@ LL | yield true;
| ^^^^^^^^^^
|
= note: see issue #43122 <https://github.com/rust-lang/rust/issues/43122> for more information
= help: add `#![feature(coroutines)]` to the crate attributes to enable
= help: add `#![feature(yield_expr)]` to the crate attributes to enable
= note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date
error: `yield` can only be used in `#[coroutine]` closures, or `gen` blocks
@ -66,7 +66,7 @@ LL | let _ = || yield true;
| ^^^^^^^^^^
|
= note: see issue #43122 <https://github.com/rust-lang/rust/issues/43122> for more information
= help: add `#![feature(coroutines)]` to the crate attributes to enable
= help: add `#![feature(yield_expr)]` to the crate attributes to enable
= note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date
error: `yield` can only be used in `#[coroutine]` closures, or `gen` blocks

View File

@ -45,7 +45,7 @@ LL | yield true;
| ^^^^^^^^^^
|
= note: see issue #43122 <https://github.com/rust-lang/rust/issues/43122> for more information
= help: add `#![feature(coroutines)]` to the crate attributes to enable
= help: add `#![feature(yield_expr)]` to the crate attributes to enable
= note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date
error: `yield` can only be used in `#[coroutine]` closures, or `gen` blocks
@ -66,7 +66,7 @@ LL | let _ = || yield true;
| ^^^^^^^^^^
|
= note: see issue #43122 <https://github.com/rust-lang/rust/issues/43122> for more information
= help: add `#![feature(coroutines)]` to the crate attributes to enable
= help: add `#![feature(yield_expr)]` to the crate attributes to enable
= note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date
error: `yield` can only be used in `#[coroutine]` closures, or `gen` blocks

View File

@ -0,0 +1,9 @@
//@ edition: 2024
#![feature(stmt_expr_attributes)]
fn main() {
yield (); //~ ERROR yield syntax is experimental
//~^ ERROR yield syntax is experimental
//~^^ ERROR `yield` can only be used in `#[coroutine]` closures, or `gen` blocks
//~^^^ ERROR yield expression outside of coroutine literal
}

View File

@ -0,0 +1,41 @@
error[E0658]: yield syntax is experimental
--> $DIR/feature-gate-yield-expr.rs:5:5
|
LL | yield ();
| ^^^^^^^^
|
= note: see issue #43122 <https://github.com/rust-lang/rust/issues/43122> for more information
= help: add `#![feature(coroutines)]` to the crate attributes to enable
= note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date
error[E0658]: yield syntax is experimental
--> $DIR/feature-gate-yield-expr.rs:5:5
|
LL | yield ();
| ^^^^^^^^
|
= note: see issue #43122 <https://github.com/rust-lang/rust/issues/43122> for more information
= help: add `#![feature(yield_expr)]` to the crate attributes to enable
= note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date
error: `yield` can only be used in `#[coroutine]` closures, or `gen` blocks
--> $DIR/feature-gate-yield-expr.rs:5:5
|
LL | yield ();
| ^^^^^^^^
|
help: use `#[coroutine]` to make this closure a coroutine
|
LL | #[coroutine] fn main() {
| ++++++++++++
error[E0627]: yield expression outside of coroutine literal
--> $DIR/feature-gate-yield-expr.rs:5:5
|
LL | yield ();
| ^^^^^^^^
error: aborting due to 4 previous errors
Some errors have detailed explanations: E0627, E0658.
For more information about an error, try `rustc --explain E0627`.

View File

@ -2,7 +2,7 @@ error: implementation of `Foo` is not general enough
--> $DIR/hrtb-just-for-static.rs:24:5
|
LL | want_hrtb::<StaticInt>()
| ^^^^^^^^^^^^^^^^^^^^^^ implementation of `Foo` is not general enough
| ^^^^^^^^^^^^^^^^^^^^^^^^ implementation of `Foo` is not general enough
|
= note: `StaticInt` must implement `Foo<&'0 isize>`, for any lifetime `'0`...
= note: ...but it actually implements `Foo<&'static isize>`

View File

@ -12,5 +12,4 @@ fn main() {
<fn(&u8) as Foo>::ASSOC;
//~^ ERROR implementation of `Foo` is not general enough
//~| ERROR implementation of `Foo` is not general enough
}

View File

@ -7,15 +7,5 @@ LL | <fn(&u8) as Foo>::ASSOC;
= note: `Foo` would have to be implemented for the type `for<'a> fn(&'a u8)`
= note: ...but `Foo` is actually implemented for the type `fn(&'0 u8)`, for some specific lifetime `'0`
error: implementation of `Foo` is not general enough
--> $DIR/issue-97997.rs:13:5
|
LL | <fn(&u8) as Foo>::ASSOC;
| ^^^^^^^^^^^^^^^^^^^^^^^ implementation of `Foo` is not general enough
|
= note: `Foo` would have to be implemented for the type `for<'a> fn(&'a u8)`
= note: ...but `Foo` is actually implemented for the type `fn(&'0 u8)`, for some specific lifetime `'0`
= note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no`
error: aborting due to 2 previous errors
error: aborting due to 1 previous error

View File

@ -28,7 +28,7 @@ error[E0310]: the parameter type `A` may not live long enough
--> $DIR/implied_lifetime_wf_check3.rs:52:5
|
LL | test_type_param::assert_static::<A>()
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
| |
| the parameter type `A` must be valid for the static lifetime...
| ...so that the type `A` will meet its required lifetime bounds

View File

@ -21,7 +21,7 @@ error[E0310]: the parameter type `A` may not live long enough
--> $DIR/implied_lifetime_wf_check4_static.rs:17:5
|
LL | assert_static::<A>()
| ^^^^^^^^^^^^^^^^^^
| ^^^^^^^^^^^^^^^^^^^^
| |
| the parameter type `A` must be valid for the static lifetime...
| ...so that the type `A` will meet its required lifetime bounds

View File

@ -20,7 +20,7 @@ help: the return type of this call is `{integer}` due to the type of the argumen
LL | <F as FnOnce(&mut u8)>::call_once(f, 1)
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^-^
| |
| this argument influences the return type of `FnOnce`
| this argument influences the return type of `call_once`
note: method defined here
--> $SRC_DIR/core/src/ops/function.rs:LL:COL

View File

@ -25,7 +25,7 @@ help: the return type of this call is `u32` due to the type of the argument pass
LL | <i32 as Add<i32>>::add(1u32, 2);
| ^^^^^^^^^^^^^^^^^^^^^^^----^^^^
| |
| this argument influences the return type of `Add`
| this argument influences the return type of `add`
note: method defined here
--> $SRC_DIR/core/src/ops/arith.rs:LL:COL
help: change the type of the numeric literal from `u32` to `i32`
@ -48,7 +48,7 @@ help: the return type of this call is `u32` due to the type of the argument pass
LL | <i32 as Add<i32>>::add(1, 2u32);
| ^^^^^^^^^^^^^^^^^^^^^^^^^^----^
| |
| this argument influences the return type of `Add`
| this argument influences the return type of `add`
note: method defined here
--> $SRC_DIR/core/src/ops/arith.rs:LL:COL
help: change the type of the numeric literal from `u32` to `i32`

View File

@ -701,6 +701,9 @@ cc = ["@davidtwco", "@wesleywiser"]
[mentions."compiler/rustc_codegen_cranelift"]
cc = ["@bjorn3"]
[mentions."compiler/rustc_codegen_ssa"]
cc = ["@WaffleLapkin"]
[mentions."compiler/rustc_codegen_gcc"]
cc = ["@antoyo", "@GuillaumeGomez"]
@ -1090,6 +1093,10 @@ title = "[stable"
branch = "stable"
[assign.adhoc_groups]
compiler_leads = [
"@davidtwco",
"@wesleywiser",
]
compiler = [
"@BoxyUwU",
"@cjgillot",