Auto merge of #3029 - rust-lang:rustup-2023-08-16, r=RalfJung

Automatic sync from rustc
This commit is contained in:
bors 2023-08-16 06:24:00 +00:00
commit e08baec579
353 changed files with 5156 additions and 2502 deletions

View File

@ -50,10 +50,10 @@ jobs:
matrix:
include:
- name: mingw-check
os: ubuntu-20.04-16core-64gb
os: ubuntu-20.04-4core-16gb
env: {}
- name: mingw-check-tidy
os: ubuntu-20.04-16core-64gb
os: ubuntu-20.04-4core-16gb
env: {}
- name: x86_64-gnu-llvm-15
os: ubuntu-20.04-16core-64gb

View File

@ -8,8 +8,17 @@ version = "0.20.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f4fa78e18c64fce05e902adecd7a5eed15a5e0a3439f7b0e169f0252214865e3"
dependencies = [
"compiler_builtins",
"gimli 0.27.3",
]
[[package]]
name = "addr2line"
version = "0.21.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8a30b2e23b9e17a9f90641c7ab1549cd9b44f296d3ccbf309d2863cfe398a0cb"
dependencies = [
"compiler_builtins",
"gimli 0.28.0",
"rustc-std-workspace-alloc",
"rustc-std-workspace-core",
]
@ -24,17 +33,6 @@ dependencies = [
"rustc-std-workspace-core",
]
[[package]]
name = "ahash"
version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47"
dependencies = [
"getrandom",
"once_cell",
"version_check",
]
[[package]]
name = "ahash"
version = "0.8.3"
@ -178,11 +176,11 @@ dependencies = [
[[package]]
name = "ar_archive_writer"
version = "0.1.4"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "74cfb39880a59e122232cb5fb06b20b4382d58c12fa9747d16f846d38a7b094c"
checksum = "9792d37ca5173d7e7f4fe453739a0671d0557915a030a383d6b866476bbc3e71"
dependencies = [
"object",
"object 0.32.0",
]
[[package]]
@ -252,12 +250,12 @@ version = "0.3.68"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4319208da049c43661739c5fade2ba182f09d1dc2299b32298d3a31692b17e12"
dependencies = [
"addr2line",
"addr2line 0.20.0",
"cc",
"cfg-if",
"libc",
"miniz_oxide",
"object",
"object 0.31.1",
"rustc-demangle",
]
@ -1159,9 +1157,9 @@ dependencies = [
[[package]]
name = "fallible-iterator"
version = "0.2.0"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7"
checksum = "2acce4a10f12dc2fb14a218589d4f1f62ef011b2d0cc4b3cb1bba8e94da14649"
[[package]]
name = "fastrand"
@ -1437,26 +1435,24 @@ dependencies = [
"wasi",
]
[[package]]
name = "gimli"
version = "0.26.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "22030e2c5a68ec659fde1e949a745124b48e6fa8b045b7ed5bd1fe4ccc5c4e5d"
dependencies = [
"fallible-iterator",
"indexmap 1.9.3",
"stable_deref_trait",
]
[[package]]
name = "gimli"
version = "0.27.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b6c80984affa11d98d1b88b66ac8853f143217b399d3c74116778ff8fdb4ed2e"
[[package]]
name = "gimli"
version = "0.28.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6fb8d784f27acf97159b40fc4db5ecd8aa23b9ad5ef69cdd136d3bc80665f0c0"
dependencies = [
"compiler_builtins",
"fallible-iterator",
"indexmap 2.0.0",
"rustc-std-workspace-alloc",
"rustc-std-workspace-core",
"stable_deref_trait",
]
[[package]]
@ -1525,18 +1521,6 @@ name = "hashbrown"
version = "0.12.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888"
dependencies = [
"ahash 0.7.6",
]
[[package]]
name = "hashbrown"
version = "0.13.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "43a3c133739dddd0d2990f9a4bdf8eb4b21ef50e4851ca85ab661199821d510e"
dependencies = [
"ahash 0.8.3",
]
[[package]]
name = "hashbrown"
@ -1544,6 +1528,7 @@ version = "0.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2c6201b9ff9fd90a5a3bac2e56a830d0caa509576f0e503818ee82c181b3437a"
dependencies = [
"ahash",
"allocator-api2",
"compiler_builtins",
"rustc-std-workspace-alloc",
@ -2449,12 +2434,21 @@ name = "object"
version = "0.31.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8bda667d9f2b5051b8833f59f3bf748b28ef54f850f4fcb389a252aa383866d1"
dependencies = [
"memchr",
]
[[package]]
name = "object"
version = "0.32.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "77ac5bbd07aea88c60a577a1ce218075ffd59208b2d7ca97adf9bfc5aeb21ebe"
dependencies = [
"compiler_builtins",
"crc32fast",
"flate2",
"hashbrown 0.13.2",
"indexmap 1.9.3",
"hashbrown 0.14.0",
"indexmap 2.0.0",
"memchr",
"rustc-std-workspace-alloc",
"rustc-std-workspace-core",
@ -3362,7 +3356,7 @@ dependencies = [
"cstr",
"libc",
"measureme",
"object",
"object 0.32.0",
"rustc-demangle",
"rustc_ast",
"rustc_attr",
@ -3398,7 +3392,7 @@ dependencies = [
"itertools",
"jobserver",
"libc",
"object",
"object 0.32.0",
"pathdiff",
"regex",
"rustc_arena",
@ -3805,6 +3799,7 @@ dependencies = [
"rustc_data_structures",
"rustc_errors",
"rustc_expand",
"rustc_feature",
"rustc_fluent_macro",
"rustc_fs_util",
"rustc_hir",
@ -4331,7 +4326,7 @@ name = "rustc_target"
version = "0.0.0"
dependencies = [
"bitflags 1.3.2",
"object",
"object 0.32.0",
"rustc_abi",
"rustc_data_structures",
"rustc_feature",
@ -4586,12 +4581,12 @@ checksum = "4f3208ce4d8448b3f3e7d168a73f5e0c43a61e32930de3bceeccedb388b6bf06"
[[package]]
name = "ruzstd"
version = "0.3.1"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9a15e661f0f9dac21f3494fe5d23a6338c0ac116a2d22c2b63010acd89467ffe"
checksum = "ac3ffab8f9715a0d455df4bbb9d21e91135aab3cd3ca187af0cd0c3c3f868fdc"
dependencies = [
"byteorder",
"thiserror",
"thiserror-core",
"twox-hash",
]
@ -4860,7 +4855,7 @@ checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f"
name = "std"
version = "0.0.0"
dependencies = [
"addr2line",
"addr2line 0.21.0",
"alloc",
"cfg-if",
"compiler_builtins",
@ -4871,7 +4866,7 @@ dependencies = [
"hermit-abi 0.3.2",
"libc",
"miniz_oxide",
"object",
"object 0.32.0",
"panic_abort",
"panic_unwind",
"profiler_builtins",
@ -5138,6 +5133,26 @@ dependencies = [
"thiserror-impl",
]
[[package]]
name = "thiserror-core"
version = "1.0.38"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0d97345f6437bb2004cd58819d8a9ef8e36cdd7661c2abc4bbde0a7c40d9f497"
dependencies = [
"thiserror-core-impl",
]
[[package]]
name = "thiserror-core-impl"
version = "1.0.38"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "10ac1c5050e43014d16b2f94d0d2ce79e65ffdd8b38d8048f9c8f6a8a6da62ac"
dependencies = [
"proc-macro2",
"quote",
"syn 1.0.109",
]
[[package]]
name = "thiserror-impl"
version = "1.0.40"
@ -5151,13 +5166,13 @@ dependencies = [
[[package]]
name = "thorin-dwp"
version = "0.6.0"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "98c040e1340b889d4180c64e1d787efa9c32cb1617757e101480b61238b0d927"
checksum = "4db52ee8fec06e119b692ef3dd2c4cf621a99204c1b8c47407870ed050305b9b"
dependencies = [
"gimli 0.26.2",
"hashbrown 0.12.3",
"object",
"gimli 0.28.0",
"hashbrown 0.14.0",
"object 0.32.0",
"tracing",
]

View File

@ -1648,7 +1648,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
hir::ExprKind::Match(
scrutinee,
arena_vec![self; break_arm, continue_arm],
hir::MatchSource::TryDesugar,
hir::MatchSource::TryDesugar(scrutinee.hir_id),
)
}

View File

@ -2495,35 +2495,31 @@ impl<'hir> GenericArgsCtor<'hir> {
let id = lcx.next_node_id();
let hir_id = lcx.next_id();
let Some(host_param_id) = lcx.host_param_id else {
lcx.tcx
.sess
.delay_span_bug(span, "no host param id for call in const yet no errors reported");
return;
};
let body = lcx.lower_body(|lcx| {
(
&[],
match constness {
ast::Const::Yes(_) => {
let hir_id = lcx.next_id();
let res =
Res::Def(DefKind::ConstParam, lcx.host_param_id.unwrap().to_def_id());
let expr_kind = hir::ExprKind::Path(hir::QPath::Resolved(
None,
lcx.arena.alloc(hir::Path {
span,
res,
segments: arena_vec![lcx; hir::PathSegment::new(Ident {
name: sym::host,
span,
}, hir_id, res)],
}),
));
lcx.expr(span, expr_kind)
}
ast::Const::No => lcx.expr(
(&[], {
let hir_id = lcx.next_id();
let res = Res::Def(DefKind::ConstParam, host_param_id.to_def_id());
let expr_kind = hir::ExprKind::Path(hir::QPath::Resolved(
None,
lcx.arena.alloc(hir::Path {
span,
hir::ExprKind::Lit(
lcx.arena.alloc(hir::Lit { span, node: ast::LitKind::Bool(true) }),
),
),
},
)
res,
segments: arena_vec![lcx; hir::PathSegment::new(Ident {
name: sym::host,
span,
}, hir_id, res)],
}),
));
lcx.expr(span, expr_kind)
})
});
let attr_id = lcx.tcx.sess.parse_sess.attr_id_generator.mk_attr_id();

View File

@ -1,7 +1,7 @@
use super::ResolverAstLoweringExt;
use rustc_ast::visit::{self, BoundKind, LifetimeCtxt, Visitor};
use rustc_ast::{GenericBounds, Lifetime, NodeId, PathSegment, PolyTraitRef, Ty, TyKind};
use rustc_hir::def::LifetimeRes;
use rustc_hir::def::{DefKind, LifetimeRes, Res};
use rustc_middle::span_bug;
use rustc_middle::ty::ResolverAstLowering;
use rustc_span::symbol::{kw, Ident};
@ -77,7 +77,20 @@ impl<'ast> Visitor<'ast> for LifetimeCollectVisitor<'ast> {
}
fn visit_ty(&mut self, t: &'ast Ty) {
match t.kind {
match &t.kind {
TyKind::Path(None, _) => {
// We can sometimes encounter bare trait objects
// which are represented in AST as paths.
if let Some(partial_res) = self.resolver.get_partial_res(t.id)
&& let Some(Res::Def(DefKind::Trait | DefKind::TraitAlias, _)) = partial_res.full_res()
{
self.current_binders.push(t.id);
visit::walk_ty(self, t);
self.current_binders.pop();
} else {
visit::walk_ty(self, t);
}
}
TyKind::BareFn(_) => {
self.current_binders.push(t.id);
visit::walk_ty(self, t);

View File

@ -13,6 +13,7 @@ use rustc_ast::*;
use rustc_ast::{walk_list, StaticItem};
use rustc_ast_pretty::pprust::{self, State};
use rustc_data_structures::fx::FxIndexMap;
use rustc_feature::Features;
use rustc_macros::Subdiagnostic;
use rustc_parse::validate_attr;
use rustc_session::lint::builtin::{
@ -45,6 +46,7 @@ enum DisallowTildeConstContext<'a> {
struct AstValidator<'a> {
session: &'a Session,
features: &'a Features,
/// The span of the `extern` in an `extern { ... }` block, if any.
extern_mod: Option<&'a Item>,
@ -1023,7 +1025,7 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
}
self.check_type_no_bounds(bounds, "this context");
if self.session.features_untracked().lazy_type_alias {
if self.features.lazy_type_alias {
if let Err(err) = self.check_type_alias_where_clause_location(ty_alias) {
self.err_handler().emit_err(err);
}
@ -1500,9 +1502,15 @@ fn deny_equality_constraints(
this.err_handler().emit_err(err);
}
pub fn check_crate(session: &Session, krate: &Crate, lints: &mut LintBuffer) -> bool {
pub fn check_crate(
session: &Session,
features: &Features,
krate: &Crate,
lints: &mut LintBuffer,
) -> bool {
let mut validator = AstValidator {
session,
features,
extern_mod: None,
in_trait_impl: false,
in_const_trait_impl: false,

View File

@ -514,10 +514,10 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
}
}
pub fn check_crate(krate: &ast::Crate, sess: &Session) {
maybe_stage_features(sess, krate);
check_incompatible_features(sess);
let mut visitor = PostExpansionVisitor { sess, features: &sess.features_untracked() };
pub fn check_crate(krate: &ast::Crate, sess: &Session, features: &Features) {
maybe_stage_features(sess, features, krate);
check_incompatible_features(sess, features);
let mut visitor = PostExpansionVisitor { sess, features };
let spans = sess.parse_sess.gated_spans.spans.borrow();
macro_rules! gate_all {
@ -600,12 +600,12 @@ pub fn check_crate(krate: &ast::Crate, sess: &Session) {
visit::walk_crate(&mut visitor, krate);
}
fn maybe_stage_features(sess: &Session, krate: &ast::Crate) {
fn maybe_stage_features(sess: &Session, features: &Features, krate: &ast::Crate) {
// checks if `#![feature]` has been used to enable any lang feature
// does not check the same for lib features unless there's at least one
// declared lang feature
if !sess.opts.unstable_features.is_nightly_build() {
let lang_features = &sess.features_untracked().declared_lang_features;
let lang_features = &features.declared_lang_features;
if lang_features.len() == 0 {
return;
}
@ -640,9 +640,7 @@ fn maybe_stage_features(sess: &Session, krate: &ast::Crate) {
}
}
fn check_incompatible_features(sess: &Session) {
let features = sess.features_untracked();
fn check_incompatible_features(sess: &Session, features: &Features) {
let declared_features = features
.declared_lang_features
.iter()

View File

@ -800,18 +800,15 @@ pub struct Deprecation {
}
/// Finds the deprecation attribute. `None` if none exists.
pub fn find_deprecation(sess: &Session, attrs: &[Attribute]) -> Option<(Deprecation, Span)> {
find_deprecation_generic(sess, attrs.iter())
}
fn find_deprecation_generic<'a, I>(sess: &Session, attrs_iter: I) -> Option<(Deprecation, Span)>
where
I: Iterator<Item = &'a Attribute>,
{
pub fn find_deprecation(
sess: &Session,
features: &Features,
attrs: &[Attribute],
) -> Option<(Deprecation, Span)> {
let mut depr: Option<(Deprecation, Span)> = None;
let is_rustc = sess.features_untracked().staged_api;
let is_rustc = features.staged_api;
'outer: for attr in attrs_iter {
'outer: for attr in attrs {
if !attr.has_name(sym::deprecated) {
continue;
}
@ -872,7 +869,7 @@ where
}
}
sym::suggestion => {
if !sess.features_untracked().deprecated_suggestion {
if !features.deprecated_suggestion {
sess.emit_err(session_diagnostics::DeprecatedItemSuggestion {
span: mi.span,
is_nightly: sess.is_nightly_build().then_some(()),
@ -890,7 +887,7 @@ where
meta.span(),
AttrError::UnknownMetaItem(
pprust::path_to_string(&mi.path),
if sess.features_untracked().deprecated_suggestion {
if features.deprecated_suggestion {
&["since", "note", "suggestion"]
} else {
&["since", "note"]

View File

@ -371,40 +371,27 @@ fn check_opaque_type_parameter_valid(
span: Span,
) -> Result<(), ErrorGuaranteed> {
let opaque_ty_hir = tcx.hir().expect_item(opaque_type_key.def_id);
match opaque_ty_hir.expect_opaque_ty().origin {
// No need to check return position impl trait (RPIT)
// because for type and const parameters they are correct
// by construction: we convert
//
// fn foo<P0..Pn>() -> impl Trait
//
// into
//
// type Foo<P0...Pn>
// fn foo<P0..Pn>() -> Foo<P0...Pn>.
//
// For lifetime parameters we convert
//
// fn foo<'l0..'ln>() -> impl Trait<'l0..'lm>
//
// into
//
// type foo::<'p0..'pn>::Foo<'q0..'qm>
// fn foo<l0..'ln>() -> foo::<'static..'static>::Foo<'l0..'lm>.
//
// which would error here on all of the `'static` args.
OpaqueTyOrigin::FnReturn(..) | OpaqueTyOrigin::AsyncFn(..) => return Ok(()),
// Check these
OpaqueTyOrigin::TyAlias { .. } => {}
}
let is_ty_alias = match opaque_ty_hir.expect_opaque_ty().origin {
OpaqueTyOrigin::TyAlias { .. } => true,
OpaqueTyOrigin::AsyncFn(..) | OpaqueTyOrigin::FnReturn(..) => false,
};
let opaque_generics = tcx.generics_of(opaque_type_key.def_id);
let mut seen_params: FxIndexMap<_, Vec<_>> = FxIndexMap::default();
for (i, arg) in opaque_type_key.args.iter().enumerate() {
if let Err(guar) = arg.error_reported() {
return Err(guar);
}
let arg_is_param = match arg.unpack() {
GenericArgKind::Type(ty) => matches!(ty.kind(), ty::Param(_)),
GenericArgKind::Lifetime(lt) => {
GenericArgKind::Lifetime(lt) if is_ty_alias => {
matches!(*lt, ty::ReEarlyBound(_) | ty::ReFree(_))
}
// FIXME(#113916): we can't currently check for unique lifetime params,
// see that issue for more. We will also have to ignore unused lifetime
// params for RPIT, but that's comparatively trivial ✨
GenericArgKind::Lifetime(_) => continue,
GenericArgKind::Const(ct) => matches!(ct.kind(), ty::ConstKind::Param(_)),
};

View File

@ -69,7 +69,7 @@ pub fn expand_assert<'cx>(
// If `generic_assert` is enabled, generates rich captured outputs
//
// FIXME(c410-f3r) See https://github.com/rust-lang/rust/issues/96949
else if let Some(features) = cx.ecfg.features && features.generic_assert {
else if cx.ecfg.features.generic_assert {
context::Context::new(cx, call_site_span).build(cond_expr, panic_path())
}
// If `generic_assert` is not enabled, only outputs a literal "assertion failed: ..."

View File

@ -24,7 +24,7 @@ pub fn expand_cfg(
&cfg,
&cx.sess.parse_sess,
cx.current_expansion.lint_node_id,
cx.ecfg.features,
Some(cx.ecfg.features),
);
MacEager::expr(cx.expr_bool(sp, matches_cfg))
}

View File

@ -31,10 +31,11 @@ pub(crate) fn expand(
pub(crate) fn cfg_eval(
sess: &Session,
features: Option<&Features>,
features: &Features,
annotatable: Annotatable,
lint_node_id: NodeId,
) -> Annotatable {
let features = Some(features);
CfgEval { cfg: &mut StripUnconfigured { sess, features, config_tokens: true, lint_node_id } }
.configure_annotatable(annotatable)
// Since the item itself has already been configured by the `InvocationCollector`,

View File

@ -5,6 +5,7 @@ use rustc_ast::{self as ast, attr, NodeId};
use rustc_ast_pretty::pprust;
use rustc_expand::base::{parse_macro_name_and_helper_attrs, ExtCtxt, ResolverExpand};
use rustc_expand::expand::{AstFragment, ExpansionConfig};
use rustc_feature::Features;
use rustc_session::Session;
use rustc_span::hygiene::AstPass;
use rustc_span::source_map::SourceMap;
@ -46,13 +47,14 @@ struct CollectProcMacros<'a> {
pub fn inject(
krate: &mut ast::Crate,
sess: &Session,
features: &Features,
resolver: &mut dyn ResolverExpand,
is_proc_macro_crate: bool,
has_proc_macro_decls: bool,
is_test_crate: bool,
handler: &rustc_errors::Handler,
) {
let ecfg = ExpansionConfig::default("proc_macro".to_string());
let ecfg = ExpansionConfig::default("proc_macro".to_string(), features);
let mut cx = ExtCtxt::new(sess, ecfg, resolver, None);
let mut collect = CollectProcMacros {

View File

@ -1,6 +1,7 @@
use rustc_ast::{self as ast, attr};
use rustc_expand::base::{ExtCtxt, ResolverExpand};
use rustc_expand::expand::ExpansionConfig;
use rustc_feature::Features;
use rustc_session::Session;
use rustc_span::edition::Edition::*;
use rustc_span::hygiene::AstPass;
@ -13,6 +14,7 @@ pub fn inject(
pre_configured_attrs: &[ast::Attribute],
resolver: &mut dyn ResolverExpand,
sess: &Session,
features: &Features,
) -> usize {
let orig_num_items = krate.items.len();
let edition = sess.parse_sess.edition;
@ -39,7 +41,7 @@ pub fn inject(
let span = DUMMY_SP.with_def_site_ctxt(expn_id.to_expn_id());
let call_site = DUMMY_SP.with_call_site_ctxt(expn_id.to_expn_id());
let ecfg = ExpansionConfig::default("std_lib_injection".to_string());
let ecfg = ExpansionConfig::default("std_lib_injection".to_string(), features);
let cx = ExtCtxt::new(sess, ecfg, resolver, None);
// .rev() to preserve ordering above in combination with insert(0, ...)

View File

@ -41,7 +41,12 @@ struct TestCtxt<'a> {
/// Traverse the crate, collecting all the test functions, eliding any
/// existing main functions, and synthesizing a main test harness
pub fn inject(krate: &mut ast::Crate, sess: &Session, resolver: &mut dyn ResolverExpand) {
pub fn inject(
krate: &mut ast::Crate,
sess: &Session,
features: &Features,
resolver: &mut dyn ResolverExpand,
) {
let span_diagnostic = sess.diagnostic();
let panic_strategy = sess.panic_strategy();
let platform_panic_strategy = sess.target.panic_strategy;
@ -76,7 +81,7 @@ pub fn inject(krate: &mut ast::Crate, sess: &Session, resolver: &mut dyn Resolve
resolver,
reexport_test_harness_main,
krate,
&sess.features_untracked(),
features,
panic_strategy,
test_runner,
)
@ -243,9 +248,7 @@ fn generate_test_harness(
panic_strategy: PanicStrategy,
test_runner: Option<ast::Path>,
) {
let mut econfig = ExpansionConfig::default("test".to_string());
econfig.features = Some(features);
let econfig = ExpansionConfig::default("test".to_string(), features);
let ext_cx = ExtCtxt::new(sess, econfig, resolver, None);
let expn_id = ext_cx.resolver.expansion_for_ast_pass(

View File

@ -12,6 +12,7 @@
target_arch = "mips",
target_arch = "mips32r6",
target_arch = "powerpc",
target_arch = "csky"
target_arch = "powerpc64"))]
const MIN_ALIGN: usize = 8;
#[cfg(any(target_arch = "x86_64",

View File

@ -107,7 +107,7 @@ enum ConstraintOrRegister {
impl<'a, 'gcc, 'tcx> AsmBuilderMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
fn codegen_inline_asm(&mut self, template: &[InlineAsmTemplatePiece], rust_operands: &[InlineAsmOperandRef<'tcx, Self>], options: InlineAsmOptions, span: &[Span], _instance: Instance<'_>, _dest_catch_funclet: Option<(Self::BasicBlock, Self::BasicBlock, Option<&Self::Funclet>)>) {
fn codegen_inline_asm(&mut self, template: &[InlineAsmTemplatePiece], rust_operands: &[InlineAsmOperandRef<'tcx, Self>], options: InlineAsmOptions, span: &[Span], instance: Instance<'_>, _dest_catch_funclet: Option<(Self::BasicBlock, Self::BasicBlock, Option<&Self::Funclet>)>) {
if options.contains(InlineAsmOptions::MAY_UNWIND) {
self.sess()
.create_err(UnwindingInlineAsm { span: span[0] })
@ -173,7 +173,7 @@ impl<'a, 'gcc, 'tcx> AsmBuilderMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
let is_target_supported = reg.reg_class().supported_types(asm_arch).iter()
.any(|&(_, feature)| {
if let Some(feature) = feature {
self.tcx.sess.target_features.contains(&feature)
self.tcx.asm_target_features(instance.def_id()).contains(&feature)
} else {
true // Register class is unconditionally supported
}
@ -597,6 +597,8 @@ fn reg_to_gcc(reg: InlineAsmRegOrRegClass) -> ConstraintOrRegister {
InlineAsmRegClass::M68k(M68kInlineAsmRegClass::reg) => "r",
InlineAsmRegClass::M68k(M68kInlineAsmRegClass::reg_addr) => "a",
InlineAsmRegClass::M68k(M68kInlineAsmRegClass::reg_data) => "d",
InlineAsmRegClass::CSKY(CSKYInlineAsmRegClass::reg) => "r",
InlineAsmRegClass::CSKY(CSKYInlineAsmRegClass::freg) => "f",
InlineAsmRegClass::Mips(MipsInlineAsmRegClass::reg) => "d", // more specific than "r"
InlineAsmRegClass::Mips(MipsInlineAsmRegClass::freg) => "f",
InlineAsmRegClass::Msp430(Msp430InlineAsmRegClass::reg) => "r",
@ -673,6 +675,8 @@ fn dummy_output_type<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, reg: InlineAsmRegCl
InlineAsmRegClass::M68k(M68kInlineAsmRegClass::reg) => cx.type_i32(),
InlineAsmRegClass::M68k(M68kInlineAsmRegClass::reg_addr) => cx.type_i32(),
InlineAsmRegClass::M68k(M68kInlineAsmRegClass::reg_data) => cx.type_i32(),
InlineAsmRegClass::CSKY(CSKYInlineAsmRegClass::reg) => cx.type_i32(),
InlineAsmRegClass::CSKY(CSKYInlineAsmRegClass::freg) => cx.type_f32(),
InlineAsmRegClass::Mips(MipsInlineAsmRegClass::reg) => cx.type_i32(),
InlineAsmRegClass::Mips(MipsInlineAsmRegClass::freg) => cx.type_f32(),
InlineAsmRegClass::Msp430(_) => unimplemented!(),
@ -860,6 +864,7 @@ fn modifier_to_gcc(arch: InlineAsmArch, reg: InlineAsmRegClass, modifier: Option
InlineAsmRegClass::S390x(_) => None,
InlineAsmRegClass::Msp430(_) => None,
InlineAsmRegClass::M68k(_) => None,
InlineAsmRegClass::CSKY(_) => None,
InlineAsmRegClass::SpirV(SpirVInlineAsmRegClass::reg) => {
bug!("LLVM backend does not support SPIR-V")
}

View File

@ -11,7 +11,7 @@ bitflags = "1.0"
cstr = "0.2"
libc = "0.2"
measureme = "10.0.0"
object = { version = "0.31.1", default-features = false, features = [
object = { version = "0.32.0", default-features = false, features = [
"std",
"read",
] }

View File

@ -9,7 +9,7 @@ use rustc_middle::ty::TyCtxt;
use rustc_session::config::{DebugInfo, OomStrategy};
use crate::debuginfo;
use crate::llvm::{self, False, True};
use crate::llvm::{self, Context, False, Module, True, Type};
use crate::ModuleLlvm;
pub(crate) unsafe fn codegen(
@ -29,7 +29,6 @@ pub(crate) unsafe fn codegen(
};
let i8 = llvm::LLVMInt8TypeInContext(llcx);
let i8p = llvm::LLVMPointerTypeInContext(llcx, 0);
let void = llvm::LLVMVoidTypeInContext(llcx);
if kind == AllocatorKind::Default {
for method in ALLOCATOR_METHODS {
@ -54,102 +53,25 @@ pub(crate) unsafe fn codegen(
panic!("invalid allocator output")
}
};
let ty = llvm::LLVMFunctionType(
output.unwrap_or(void),
args.as_ptr(),
args.len() as c_uint,
False,
);
let name = global_fn_name(method.name);
let llfn =
llvm::LLVMRustGetOrInsertFunction(llmod, name.as_ptr().cast(), name.len(), ty);
if tcx.sess.target.default_hidden_visibility {
llvm::LLVMRustSetVisibility(llfn, llvm::Visibility::Hidden);
}
if tcx.sess.must_emit_unwind_tables() {
let uwtable = attributes::uwtable_attr(llcx);
attributes::apply_to_llfn(llfn, llvm::AttributePlace::Function, &[uwtable]);
}
let from_name = global_fn_name(method.name);
let to_name = default_fn_name(method.name);
let callee = default_fn_name(method.name);
let callee =
llvm::LLVMRustGetOrInsertFunction(llmod, callee.as_ptr().cast(), callee.len(), ty);
llvm::LLVMRustSetVisibility(callee, llvm::Visibility::Hidden);
let llbb = llvm::LLVMAppendBasicBlockInContext(llcx, llfn, "entry\0".as_ptr().cast());
let llbuilder = llvm::LLVMCreateBuilderInContext(llcx);
llvm::LLVMPositionBuilderAtEnd(llbuilder, llbb);
let args = args
.iter()
.enumerate()
.map(|(i, _)| llvm::LLVMGetParam(llfn, i as c_uint))
.collect::<Vec<_>>();
let ret = llvm::LLVMRustBuildCall(
llbuilder,
ty,
callee,
args.as_ptr(),
args.len() as c_uint,
[].as_ptr(),
0 as c_uint,
);
llvm::LLVMSetTailCall(ret, True);
if output.is_some() {
llvm::LLVMBuildRet(llbuilder, ret);
} else {
llvm::LLVMBuildRetVoid(llbuilder);
}
llvm::LLVMDisposeBuilder(llbuilder);
create_wrapper_function(tcx, llcx, llmod, &from_name, &to_name, &args, output, false);
}
}
// rust alloc error handler
let args = [usize, usize]; // size, align
let ty = llvm::LLVMFunctionType(void, args.as_ptr(), args.len() as c_uint, False);
let name = "__rust_alloc_error_handler";
let llfn = llvm::LLVMRustGetOrInsertFunction(llmod, name.as_ptr().cast(), name.len(), ty);
// -> ! DIFlagNoReturn
let no_return = llvm::AttributeKind::NoReturn.create_attr(llcx);
attributes::apply_to_llfn(llfn, llvm::AttributePlace::Function, &[no_return]);
if tcx.sess.target.default_hidden_visibility {
llvm::LLVMRustSetVisibility(llfn, llvm::Visibility::Hidden);
}
if tcx.sess.must_emit_unwind_tables() {
let uwtable = attributes::uwtable_attr(llcx);
attributes::apply_to_llfn(llfn, llvm::AttributePlace::Function, &[uwtable]);
}
let callee = alloc_error_handler_name(alloc_error_handler_kind);
let callee = llvm::LLVMRustGetOrInsertFunction(llmod, callee.as_ptr().cast(), callee.len(), ty);
// -> ! DIFlagNoReturn
attributes::apply_to_llfn(callee, llvm::AttributePlace::Function, &[no_return]);
llvm::LLVMRustSetVisibility(callee, llvm::Visibility::Hidden);
let llbb = llvm::LLVMAppendBasicBlockInContext(llcx, llfn, "entry\0".as_ptr().cast());
let llbuilder = llvm::LLVMCreateBuilderInContext(llcx);
llvm::LLVMPositionBuilderAtEnd(llbuilder, llbb);
let args = args
.iter()
.enumerate()
.map(|(i, _)| llvm::LLVMGetParam(llfn, i as c_uint))
.collect::<Vec<_>>();
let ret = llvm::LLVMRustBuildCall(
llbuilder,
ty,
callee,
args.as_ptr(),
args.len() as c_uint,
[].as_ptr(),
0 as c_uint,
create_wrapper_function(
tcx,
llcx,
llmod,
"__rust_alloc_error_handler",
&alloc_error_handler_name(alloc_error_handler_kind),
&[usize, usize], // size, align
None,
true,
);
llvm::LLVMSetTailCall(ret, True);
llvm::LLVMBuildRetVoid(llbuilder);
llvm::LLVMDisposeBuilder(llbuilder);
// __rust_alloc_error_handler_should_panic
let name = OomStrategy::SYMBOL;
@ -175,3 +97,79 @@ pub(crate) unsafe fn codegen(
dbg_cx.finalize(tcx.sess);
}
}
fn create_wrapper_function(
tcx: TyCtxt<'_>,
llcx: &Context,
llmod: &Module,
from_name: &str,
to_name: &str,
args: &[&Type],
output: Option<&Type>,
no_return: bool,
) {
unsafe {
let ty = llvm::LLVMFunctionType(
output.unwrap_or_else(|| llvm::LLVMVoidTypeInContext(llcx)),
args.as_ptr(),
args.len() as c_uint,
False,
);
let llfn = llvm::LLVMRustGetOrInsertFunction(
llmod,
from_name.as_ptr().cast(),
from_name.len(),
ty,
);
let no_return = if no_return {
// -> ! DIFlagNoReturn
let no_return = llvm::AttributeKind::NoReturn.create_attr(llcx);
attributes::apply_to_llfn(llfn, llvm::AttributePlace::Function, &[no_return]);
Some(no_return)
} else {
None
};
if tcx.sess.target.default_hidden_visibility {
llvm::LLVMRustSetVisibility(llfn, llvm::Visibility::Hidden);
}
if tcx.sess.must_emit_unwind_tables() {
let uwtable = attributes::uwtable_attr(llcx);
attributes::apply_to_llfn(llfn, llvm::AttributePlace::Function, &[uwtable]);
}
let callee =
llvm::LLVMRustGetOrInsertFunction(llmod, to_name.as_ptr().cast(), to_name.len(), ty);
if let Some(no_return) = no_return {
// -> ! DIFlagNoReturn
attributes::apply_to_llfn(callee, llvm::AttributePlace::Function, &[no_return]);
}
llvm::LLVMRustSetVisibility(callee, llvm::Visibility::Hidden);
let llbb = llvm::LLVMAppendBasicBlockInContext(llcx, llfn, "entry\0".as_ptr().cast());
let llbuilder = llvm::LLVMCreateBuilderInContext(llcx);
llvm::LLVMPositionBuilderAtEnd(llbuilder, llbb);
let args = args
.iter()
.enumerate()
.map(|(i, _)| llvm::LLVMGetParam(llfn, i as c_uint))
.collect::<Vec<_>>();
let ret = llvm::LLVMRustBuildCall(
llbuilder,
ty,
callee,
args.as_ptr(),
args.len() as c_uint,
[].as_ptr(),
0 as c_uint,
);
llvm::LLVMSetTailCall(ret, True);
if output.is_some() {
llvm::LLVMBuildRet(llbuilder, ret);
} else {
llvm::LLVMBuildRetVoid(llbuilder);
}
llvm::LLVMDisposeBuilder(llbuilder);
}
}

View File

@ -44,9 +44,10 @@ impl<'ll, 'tcx> AsmBuilderMethods<'tcx> for Builder<'_, 'll, 'tcx> {
let is_target_supported = |reg_class: InlineAsmRegClass| {
for &(_, feature) in reg_class.supported_types(asm_arch) {
if let Some(feature) = feature {
let codegen_fn_attrs = self.tcx.codegen_fn_attrs(instance.def_id());
if self.tcx.sess.target_features.contains(&feature)
|| codegen_fn_attrs.target_features.contains(&feature)
if self
.tcx
.asm_target_features(instance.def_id())
.contains(&feature)
{
return true;
}
@ -261,6 +262,7 @@ impl<'ll, 'tcx> AsmBuilderMethods<'tcx> for Builder<'_, 'll, 'tcx> {
InlineAsmArch::M68k => {
constraints.push("~{ccr}".to_string());
}
InlineAsmArch::CSKY => {}
}
}
if !options.contains(InlineAsmOptions::NOMEM) {
@ -693,6 +695,8 @@ fn reg_to_llvm(reg: InlineAsmRegOrRegClass, layout: Option<&TyAndLayout<'_>>) ->
InlineAsmRegClass::M68k(M68kInlineAsmRegClass::reg) => "r",
InlineAsmRegClass::M68k(M68kInlineAsmRegClass::reg_addr) => "a",
InlineAsmRegClass::M68k(M68kInlineAsmRegClass::reg_data) => "d",
InlineAsmRegClass::CSKY(CSKYInlineAsmRegClass::reg) => "r",
InlineAsmRegClass::CSKY(CSKYInlineAsmRegClass::freg) => "f",
InlineAsmRegClass::SpirV(SpirVInlineAsmRegClass::reg) => {
bug!("LLVM backend does not support SPIR-V")
}
@ -792,6 +796,7 @@ fn modifier_to_llvm(
bug!("LLVM backend does not support SPIR-V")
}
InlineAsmRegClass::M68k(_) => None,
InlineAsmRegClass::CSKY(_) => None,
InlineAsmRegClass::Err => unreachable!(),
}
}
@ -868,6 +873,8 @@ fn dummy_output_type<'ll>(cx: &CodegenCx<'ll, '_>, reg: InlineAsmRegClass) -> &'
InlineAsmRegClass::M68k(M68kInlineAsmRegClass::reg) => cx.type_i32(),
InlineAsmRegClass::M68k(M68kInlineAsmRegClass::reg_addr) => cx.type_i32(),
InlineAsmRegClass::M68k(M68kInlineAsmRegClass::reg_data) => cx.type_i32(),
InlineAsmRegClass::CSKY(CSKYInlineAsmRegClass::reg) => cx.type_i32(),
InlineAsmRegClass::CSKY(CSKYInlineAsmRegClass::freg) => cx.type_f32(),
InlineAsmRegClass::SpirV(SpirVInlineAsmRegClass::reg) => {
bug!("LLVM backend does not support SPIR-V")
}

View File

@ -4,14 +4,14 @@ version = "0.0.0"
edition = "2021"
[dependencies]
ar_archive_writer = "0.1.3"
ar_archive_writer = "0.1.5"
bitflags = "1.2.1"
cc = "1.0.69"
itertools = "0.10.1"
tracing = "0.1"
jobserver = "0.1.22"
tempfile = "3.2"
thorin-dwp = "0.6"
thorin-dwp = "0.7"
pathdiff = "0.2.0"
serde_json = "1.0.59"
smallvec = { version = "1.8.1", features = ["union", "may_dangle"] }
@ -42,7 +42,7 @@ rustc_session = { path = "../rustc_session" }
libc = "0.2.50"
[dependencies.object]
version = "0.31.1"
version = "0.32.0"
default-features = false
features = ["read_core", "elf", "macho", "pe", "xcoff", "unaligned", "archive", "write"]

View File

@ -2991,25 +2991,10 @@ fn add_lld_args(cmd: &mut dyn Linker, sess: &Session, flavor: LinkerFlavor) {
return;
}
let self_contained_linker = sess.opts.cg.link_self_contained.linker();
// FIXME: some targets default to using `lld`, but users can only override the linker on the CLI
// and cannot yet select the precise linker flavor to opt out of that. See for example issue
// #113597 for the `thumbv6m-none-eabi` target: a driver is used, and its default linker
// conflicts with the target's flavor, causing unexpected arguments being passed.
//
// Until the new `LinkerFlavor`-like CLI options are stabilized, we only adopt MCP510's behavior
// if its dedicated unstable CLI flags are used, to keep the current sub-optimal stable
// behavior.
let using_mcp510 =
self_contained_linker || sess.opts.cg.linker_flavor.is_some_and(|f| f.is_unstable());
if !using_mcp510 && !unstable_use_lld {
return;
}
// 1. Implement the "self-contained" part of this feature by adding rustc distribution
// directories to the tool's search path.
if self_contained_linker || unstable_use_lld {
let self_contained_linker = sess.opts.cg.link_self_contained.linker() || unstable_use_lld;
if self_contained_linker {
for path in sess.get_tools_search_paths(false) {
cmd.arg({
let mut arg = OsString::from("-B");

View File

@ -209,6 +209,7 @@ pub(crate) fn create_object_file(sess: &Session) -> Option<write::Object<'static
"hexagon" => Architecture::Hexagon,
"bpf" => Architecture::Bpf,
"loongarch64" => Architecture::LoongArch64,
"csky" => Architecture::Csky,
// Unsupported architecture.
_ => return None,
};
@ -307,6 +308,13 @@ pub(crate) fn create_object_file(sess: &Session) -> Option<write::Object<'static
// the appropriate EF_AVR_ARCH flag.
ef_avr_arch(&sess.target.options.cpu)
}
Architecture::Csky => {
let e_flags = match sess.target.options.abi.as_ref() {
"abiv2" => elf::EF_CSKY_ABIV2,
_ => elf::EF_CSKY_ABIV1,
};
e_flags
}
_ => 0,
};
// adapted from LLVM's `MCELFObjectTargetWriter::getOSABI`

View File

@ -2,7 +2,6 @@
#![feature(associated_type_bounds)]
#![feature(box_patterns)]
#![feature(if_let_guard)]
#![feature(int_roundings)]
#![feature(let_chains)]
#![feature(negative_impls)]
#![feature(never_type)]

View File

@ -296,6 +296,52 @@ const WASM_ALLOWED_FEATURES: &[(&str, Option<Symbol>)] = &[
const BPF_ALLOWED_FEATURES: &[(&str, Option<Symbol>)] = &[("alu32", Some(sym::bpf_target_feature))];
const CSKY_ALLOWED_FEATURES: &[(&str, Option<Symbol>)] = &[
// tidy-alphabetical-start
("10e60", Some(sym::csky_target_feature)),
("2e3", Some(sym::csky_target_feature)),
("3e3r1", Some(sym::csky_target_feature)),
("3e3r2", Some(sym::csky_target_feature)),
("3e3r3", Some(sym::csky_target_feature)),
("3e7", Some(sym::csky_target_feature)),
("7e10", Some(sym::csky_target_feature)),
("cache", Some(sym::csky_target_feature)),
("doloop", Some(sym::csky_target_feature)),
("dsp1e2", Some(sym::csky_target_feature)),
("dspe60", Some(sym::csky_target_feature)),
("e1", Some(sym::csky_target_feature)),
("e2", Some(sym::csky_target_feature)),
("edsp", Some(sym::csky_target_feature)),
("elrw", Some(sym::csky_target_feature)),
("float1e2", Some(sym::csky_target_feature)),
("float1e3", Some(sym::csky_target_feature)),
("float3e4", Some(sym::csky_target_feature)),
("float7e60", Some(sym::csky_target_feature)),
("floate1", Some(sym::csky_target_feature)),
("hard-tp", Some(sym::csky_target_feature)),
("high-registers", Some(sym::csky_target_feature)),
("hwdiv", Some(sym::csky_target_feature)),
("mp", Some(sym::csky_target_feature)),
("mp1e2", Some(sym::csky_target_feature)),
("nvic", Some(sym::csky_target_feature)),
("trust", Some(sym::csky_target_feature)),
("vdsp2e60f", Some(sym::csky_target_feature)),
("vdspv1", Some(sym::csky_target_feature)),
("vdspv2", Some(sym::csky_target_feature)),
// tidy-alphabetical-end
//fpu
// tidy-alphabetical-start
("fdivdu", Some(sym::csky_target_feature)),
("fpuv2_df", Some(sym::csky_target_feature)),
("fpuv2_sf", Some(sym::csky_target_feature)),
("fpuv3_df", Some(sym::csky_target_feature)),
("fpuv3_hf", Some(sym::csky_target_feature)),
("fpuv3_hi", Some(sym::csky_target_feature)),
("fpuv3_sf", Some(sym::csky_target_feature)),
("hard-float", Some(sym::csky_target_feature)),
("hard-float-abi", Some(sym::csky_target_feature)),
// tidy-alphabetical-end
];
/// When rustdoc is running, provide a list of all known features so that all their respective
/// primitives may be documented.
///
@ -311,6 +357,7 @@ pub fn all_known_features() -> impl Iterator<Item = (&'static str, Option<Symbol
.chain(RISCV_ALLOWED_FEATURES.iter())
.chain(WASM_ALLOWED_FEATURES.iter())
.chain(BPF_ALLOWED_FEATURES.iter())
.chain(CSKY_ALLOWED_FEATURES)
.cloned()
}
@ -325,6 +372,7 @@ pub fn supported_target_features(sess: &Session) -> &'static [(&'static str, Opt
"riscv32" | "riscv64" => RISCV_ALLOWED_FEATURES,
"wasm32" | "wasm64" => WASM_ALLOWED_FEATURES,
"bpf" => BPF_ALLOWED_FEATURES,
"csky" => CSKY_ALLOWED_FEATURES,
_ => &[],
}
}
@ -396,6 +444,7 @@ pub fn from_target_feature(
Some(sym::ermsb_target_feature) => rust_features.ermsb_target_feature,
Some(sym::bpf_target_feature) => rust_features.bpf_target_feature,
Some(sym::aarch64_ver_target_feature) => rust_features.aarch64_ver_target_feature,
Some(sym::csky_target_feature) => rust_features.csky_target_feature,
Some(name) => bug!("unknown target feature gate {}", name),
None => true,
};

View File

@ -772,7 +772,7 @@ impl<'tcx> Visitor<'tcx> for Checker<'_, 'tcx> {
};
match implsrc {
Ok(Some(ImplSource::Param(ty::BoundConstness::ConstIfConst, _))) => {
Ok(Some(ImplSource::Param(_))) if tcx.features().effects => {
debug!(
"const_trait_impl: provided {:?} via where-clause in {:?}",
trait_ref, param_env

View File

@ -174,8 +174,7 @@ impl Qualif for NeedsNonConstDrop {
if !matches!(
impl_src,
ImplSource::Builtin(BuiltinImplSource::Misc, _)
| ImplSource::Param(ty::BoundConstness::ConstIfConst, _)
ImplSource::Builtin(BuiltinImplSource::Misc, _) | ImplSource::Param(_)
) {
// If our const destruct candidate is not ConstDestruct or implied by the param env,
// then it's bad

View File

@ -18,6 +18,7 @@ use rustc_errors::{
Applicability, DiagnosticBuilder, DiagnosticMessage, ErrorGuaranteed, IntoDiagnostic,
MultiSpan, PResult,
};
use rustc_feature::Features;
use rustc_lint_defs::builtin::PROC_MACRO_BACK_COMPAT;
use rustc_lint_defs::{BufferedEarlyLint, BuiltinLintDiagnostics, RegisteredTools};
use rustc_parse::{self, parser, MACRO_ARGUMENTS};
@ -767,6 +768,7 @@ impl SyntaxExtension {
/// and other properties converted from attributes.
pub fn new(
sess: &Session,
features: &Features,
kind: SyntaxExtensionKind,
span: Span,
helper_attrs: Vec<Symbol>,
@ -816,7 +818,7 @@ impl SyntaxExtension {
allow_internal_unstable: (!allow_internal_unstable.is_empty())
.then(|| allow_internal_unstable.into()),
stability: stability.map(|(s, _)| s),
deprecation: attr::find_deprecation(&sess, attrs).map(|(d, _)| d),
deprecation: attr::find_deprecation(&sess, features, attrs).map(|(d, _)| d),
helper_attrs,
edition,
builtin_name,
@ -957,6 +959,7 @@ pub trait LintStoreExpand {
fn pre_expansion_lint(
&self,
sess: &Session,
features: &Features,
registered_tools: &RegisteredTools,
node_id: NodeId,
attrs: &[Attribute],

View File

@ -796,7 +796,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
| Annotatable::FieldDef(..)
| Annotatable::Variant(..) => panic!("unexpected annotatable"),
};
if self.cx.ecfg.proc_macro_hygiene() {
if self.cx.ecfg.features.proc_macro_hygiene {
return;
}
feature_err(
@ -834,7 +834,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
}
}
if !self.cx.ecfg.proc_macro_hygiene() {
if !self.cx.ecfg.features.proc_macro_hygiene {
annotatable
.visit_with(&mut GateProcMacroInput { parse_sess: &self.cx.sess.parse_sess });
}
@ -1122,6 +1122,7 @@ impl InvocationCollectorNode for P<ast::Item> {
if let Some(lint_store) = ecx.lint_store {
lint_store.pre_expansion_lint(
ecx.sess,
ecx.ecfg.features,
ecx.resolver.registered_tools(),
ecx.current_expansion.lint_node_id,
&attrs,
@ -1580,7 +1581,7 @@ impl<'a, 'b> InvocationCollector<'a, 'b> {
fn cfg(&self) -> StripUnconfigured<'_> {
StripUnconfigured {
sess: &self.cx.sess,
features: self.cx.ecfg.features,
features: Some(self.cx.ecfg.features),
config_tokens: false,
lint_node_id: self.cx.current_expansion.lint_node_id,
}
@ -1676,7 +1677,7 @@ impl<'a, 'b> InvocationCollector<'a, 'b> {
// Detect use of feature-gated or invalid attributes on macro invocations
// since they will not be detected after macro expansion.
fn check_attributes(&self, attrs: &[ast::Attribute], call: &ast::MacCall) {
let features = self.cx.ecfg.features.unwrap();
let features = self.cx.ecfg.features;
let mut attrs = attrs.iter().peekable();
let mut span: Option<Span> = None;
while let Some(attr) = attrs.next() {
@ -1976,7 +1977,7 @@ impl<'a, 'b> MutVisitor for InvocationCollector<'a, 'b> {
pub struct ExpansionConfig<'feat> {
pub crate_name: String,
pub features: Option<&'feat Features>,
pub features: &'feat Features,
pub recursion_limit: Limit,
pub trace_mac: bool,
/// If false, strip `#[test]` nodes
@ -1987,11 +1988,11 @@ pub struct ExpansionConfig<'feat> {
pub proc_macro_backtrace: bool,
}
impl<'feat> ExpansionConfig<'feat> {
pub fn default(crate_name: String) -> ExpansionConfig<'static> {
impl ExpansionConfig<'_> {
pub fn default(crate_name: String, features: &Features) -> ExpansionConfig<'_> {
ExpansionConfig {
crate_name,
features: None,
features,
recursion_limit: Limit::new(1024),
trace_mac: false,
should_test: false,
@ -1999,8 +2000,4 @@ impl<'feat> ExpansionConfig<'feat> {
proc_macro_backtrace: false,
}
}
fn proc_macro_hygiene(&self) -> bool {
self.features.is_some_and(|features| features.proc_macro_hygiene)
}
}

View File

@ -16,6 +16,7 @@ use rustc_ast_pretty::pprust;
use rustc_attr::{self as attr, TransparencyError};
use rustc_data_structures::fx::{FxHashMap, FxIndexMap};
use rustc_errors::{Applicability, ErrorGuaranteed};
use rustc_feature::Features;
use rustc_lint_defs::builtin::{
RUST_2021_INCOMPATIBLE_OR_PATTERNS, SEMICOLON_IN_EXPRESSIONS_FROM_MACROS,
};
@ -375,6 +376,7 @@ pub(super) fn try_match_macro<'matcher, T: Tracker<'matcher>>(
/// Converts a macro item into a syntax extension.
pub fn compile_declarative_macro(
sess: &Session,
features: &Features,
def: &ast::Item,
edition: Edition,
) -> (SyntaxExtension, Vec<(usize, Span)>) {
@ -382,6 +384,7 @@ pub fn compile_declarative_macro(
let mk_syn_ext = |expander| {
SyntaxExtension::new(
sess,
features,
SyntaxExtensionKind::LegacyBang(expander),
def.span,
Vec::new(),
@ -503,7 +506,7 @@ pub fn compile_declarative_macro(
true,
&sess.parse_sess,
def.id,
sess.features_untracked(),
features,
edition,
)
.pop()
@ -527,7 +530,7 @@ pub fn compile_declarative_macro(
false,
&sess.parse_sess,
def.id,
sess.features_untracked(),
features,
edition,
)
.pop()

View File

@ -282,6 +282,7 @@ declare_features! (
(active, arm_target_feature, "1.27.0", Some(44839), None),
(active, avx512_target_feature, "1.27.0", Some(44839), None),
(active, bpf_target_feature, "1.54.0", Some(44839), None),
(active, csky_target_feature, "CURRENT_RUSTC_VERSION", Some(44839), None),
(active, ermsb_target_feature, "1.49.0", Some(44839), None),
(active, hexagon_target_feature, "1.27.0", Some(44839), None),
(active, mips_target_feature, "1.27.0", Some(44839), None),

View File

@ -2148,7 +2148,7 @@ pub enum MatchSource {
/// A desugared `for _ in _ { .. }` loop.
ForLoopDesugar,
/// A desugared `?` operator.
TryDesugar,
TryDesugar(HirId),
/// A desugared `<expr>.await`.
AwaitDesugar,
/// A desugared `format_args!()`.
@ -2162,7 +2162,7 @@ impl MatchSource {
match self {
Normal => "match",
ForLoopDesugar => "for",
TryDesugar => "?",
TryDesugar(_) => "?",
AwaitDesugar => ".await",
FormatArgs => "format_args!()",
}

View File

@ -8,7 +8,7 @@ use rustc_attr as attr;
use rustc_errors::{Applicability, ErrorGuaranteed, MultiSpan};
use rustc_hir as hir;
use rustc_hir::def::{CtorKind, DefKind, Res};
use rustc_hir::def_id::{DefId, LocalDefId, CRATE_DEF_ID};
use rustc_hir::def_id::{DefId, LocalDefId, LocalModDefId};
use rustc_hir::intravisit::Visitor;
use rustc_hir::{ItemKind, Node, PathSegment};
use rustc_infer::infer::opaque_types::ConstrainOpaqueTypeRegionVisitor;
@ -1443,12 +1443,12 @@ pub(super) fn check_type_params_are_used<'tcx>(
}
}
pub(super) fn check_mod_item_types(tcx: TyCtxt<'_>, module_def_id: LocalDefId) {
pub(super) fn check_mod_item_types(tcx: TyCtxt<'_>, module_def_id: LocalModDefId) {
let module = tcx.hir_module_items(module_def_id);
for id in module.items() {
check_item_type(tcx, id);
}
if module_def_id == CRATE_DEF_ID {
if module_def_id == LocalModDefId::CRATE_DEF_ID {
super::entry::check_for_entry_fn(tcx);
}
}

View File

@ -308,6 +308,15 @@ fn compare_method_predicate_entailment<'tcx>(
}
if check_implied_wf == CheckImpliedWfMode::Check && !(impl_sig, trait_sig).references_error() {
// Select obligations to make progress on inference before processing
// the wf obligation below.
// FIXME(-Ztrait-solver=next): Not needed when the hack below is removed.
let errors = ocx.select_where_possible();
if !errors.is_empty() {
let reported = infcx.err_ctxt().report_fulfillment_errors(&errors);
return Err(reported);
}
// See #108544. Annoying, we can end up in cases where, because of winnowing,
// we pick param env candidates over a more general impl, leading to more
// stricter lifetime requirements than we would otherwise need. This can
@ -378,7 +387,7 @@ fn compare_method_predicate_entailment<'tcx>(
// lifetime parameters.
let outlives_env = OutlivesEnvironment::with_bounds(
param_env,
infcx.implied_bounds_tys(param_env, impl_m_def_id, wf_tys.clone()),
infcx.implied_bounds_tys(param_env, impl_m_def_id, wf_tys),
);
let errors = infcx.resolve_regions(&outlives_env);
if !errors.is_empty() {

View File

@ -68,7 +68,7 @@ impl<'a, 'tcx> InlineAsmCtxt<'a, 'tcx> {
let asm_ty = match *ty.kind() {
// `!` is allowed for input but not for output (issue #87802)
ty::Never if is_input => return None,
ty::Error(_) => return None,
_ if ty.references_error() => return None,
ty::Int(IntTy::I8) | ty::Uint(UintTy::U8) => Some(InlineAsmType::I8),
ty::Int(IntTy::I16) | ty::Uint(UintTy::U16) => Some(InlineAsmType::I16),
ty::Int(IntTy::I32) | ty::Uint(UintTy::U32) => Some(InlineAsmType::I32),

View File

@ -5,7 +5,7 @@ use rustc_ast as ast;
use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexSet};
use rustc_errors::{pluralize, struct_span_err, Applicability, DiagnosticBuilder, ErrorGuaranteed};
use rustc_hir as hir;
use rustc_hir::def_id::{DefId, LocalDefId};
use rustc_hir::def_id::{DefId, LocalDefId, LocalModDefId};
use rustc_hir::lang_items::LangItem;
use rustc_hir::ItemKind;
use rustc_infer::infer::outlives::env::{OutlivesEnvironment, RegionBoundPairs};
@ -1854,7 +1854,7 @@ impl<'tcx> WfCheckingCtxt<'_, 'tcx> {
}
}
fn check_mod_type_wf(tcx: TyCtxt<'_>, module: LocalDefId) {
fn check_mod_type_wf(tcx: TyCtxt<'_>, module: LocalModDefId) {
let items = tcx.hir_module_items(module);
items.par_items(|item| tcx.ensure().check_well_formed(item.owner_id));
items.par_impl_items(|item| tcx.ensure().check_well_formed(item.owner_id));

View File

@ -22,7 +22,7 @@ use rustc_data_structures::captures::Captures;
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_errors::{Applicability, DiagnosticBuilder, ErrorGuaranteed, StashKey};
use rustc_hir as hir;
use rustc_hir::def_id::{DefId, LocalDefId};
use rustc_hir::def_id::{DefId, LocalDefId, LocalModDefId};
use rustc_hir::intravisit::{self, Visitor};
use rustc_hir::{GenericParamKind, Node};
use rustc_infer::infer::{InferCtxt, TyCtxtInferExt};
@ -48,7 +48,7 @@ mod type_of;
///////////////////////////////////////////////////////////////////////////
// Main entry point
fn collect_mod_item_types(tcx: TyCtxt<'_>, module_def_id: LocalDefId) {
fn collect_mod_item_types(tcx: TyCtxt<'_>, module_def_id: LocalModDefId) {
tcx.hir().visit_item_likes_in_module(module_def_id, &mut CollectItemTypesVisitor { tcx });
}

View File

@ -59,7 +59,7 @@ struct ParameterCollector {
impl<'tcx> TypeVisitor<TyCtxt<'tcx>> for ParameterCollector {
fn visit_ty(&mut self, t: Ty<'tcx>) -> ControlFlow<Self::BreakTy> {
match *t.kind() {
ty::Alias(ty::Projection | ty::Inherent, ..) if !self.include_nonconstraining => {
ty::Alias(..) if !self.include_nonconstraining => {
// projections are not injective
return ControlFlow::Continue(());
}

View File

@ -14,7 +14,7 @@ use min_specialization::check_min_specialization;
use rustc_data_structures::fx::FxHashSet;
use rustc_errors::struct_span_err;
use rustc_hir::def::DefKind;
use rustc_hir::def_id::LocalDefId;
use rustc_hir::def_id::{LocalDefId, LocalModDefId};
use rustc_middle::query::Providers;
use rustc_middle::ty::{self, TyCtxt, TypeVisitableExt};
use rustc_span::{Span, Symbol};
@ -51,7 +51,7 @@ mod min_specialization;
/// impl<'a> Trait<Foo> for Bar { type X = &'a i32; }
/// // ^ 'a is unused and appears in assoc type, error
/// ```
fn check_mod_impl_wf(tcx: TyCtxt<'_>, module_def_id: LocalDefId) {
fn check_mod_impl_wf(tcx: TyCtxt<'_>, module_def_id: LocalModDefId) {
let min_specialization = tcx.features().min_specialization;
let module = tcx.hir_module_items(module_def_id);
for id in module.items() {

View File

@ -107,7 +107,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let (span, code) = match prior_arm {
// The reason for the first arm to fail is not that the match arms diverge,
// but rather that there's a prior obligation that doesn't hold.
None => (arm_span, ObligationCauseCode::BlockTailExpression(arm.body.hir_id)),
None => {
(arm_span, ObligationCauseCode::BlockTailExpression(arm.body.hir_id, match_src))
}
Some((prior_arm_block_id, prior_arm_ty, prior_arm_span)) => (
expr.span,
ObligationCauseCode::MatchExpressionArm(Box::new(MatchExpressionArmCause {
@ -120,7 +122,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
scrut_span: scrut.span,
source: match_src,
prior_arms: other_arms.clone(),
scrut_hir_id: scrut.hir_id,
opt_suggest_box_span,
})),
),
@ -145,7 +146,12 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
other_arms.remove(0);
}
prior_arm = Some((arm_block_id, arm_ty, arm_span));
if !arm_ty.is_never() {
// When a match arm has type `!`, then it doesn't influence the expected type for
// the following arm. If all of the prior arms are `!`, then the influence comes
// from elsewhere and we shouldn't point to any previous arm.
prior_arm = Some((arm_block_id, arm_ty, arm_span));
}
}
// If all of the arms in the `match` diverge,

View File

@ -1603,7 +1603,7 @@ impl<'tcx, 'exprs, E: AsCoercionSite> CoerceMany<'tcx, 'exprs, E> {
);
err.span_label(cause.span, "return type is not `()`");
}
ObligationCauseCode::BlockTailExpression(blk_id) => {
ObligationCauseCode::BlockTailExpression(blk_id, ..) => {
let parent_id = fcx.tcx.hir().parent_id(blk_id);
err = self.report_return_mismatched_types(
cause,
@ -1650,10 +1650,7 @@ impl<'tcx, 'exprs, E: AsCoercionSite> CoerceMany<'tcx, 'exprs, E> {
augment_error(&mut err);
let is_insufficiently_polymorphic =
matches!(coercion_error, TypeError::RegionsInsufficientlyPolymorphic(..));
if !is_insufficiently_polymorphic && let Some(expr) = expression {
if let Some(expr) = expression {
fcx.emit_coerce_suggestions(
&mut err,
expr,
@ -1751,7 +1748,7 @@ impl<'tcx, 'exprs, E: AsCoercionSite> CoerceMany<'tcx, 'exprs, E> {
) && !in_external_macro(fcx.tcx.sess, cond_expr.span)
&& !matches!(
cond_expr.kind,
hir::ExprKind::Match(.., hir::MatchSource::TryDesugar)
hir::ExprKind::Match(.., hir::MatchSource::TryDesugar(_))
)
{
err.span_label(cond_expr.span, "expected this to be `()`");

View File

@ -84,6 +84,13 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
self.annotate_expected_due_to_let_ty(err, expr, error);
// FIXME(#73154): For now, we do leak check when coercing function
// pointers in typeck, instead of only during borrowck. This can lead
// to these `RegionsInsufficientlyPolymorphic` errors that aren't helpful.
if matches!(error, Some(TypeError::RegionsInsufficientlyPolymorphic(..))) {
return;
}
if self.is_destruct_assignment_desugaring(expr) {
return;
}
@ -263,22 +270,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let expr_ty = self.resolve_vars_if_possible(checked_ty);
let mut err = self.err_ctxt().report_mismatched_types(&cause, expected, expr_ty, e);
let is_insufficiently_polymorphic =
matches!(e, TypeError::RegionsInsufficientlyPolymorphic(..));
// FIXME(#73154): For now, we do leak check when coercing function
// pointers in typeck, instead of only during borrowck. This can lead
// to these `RegionsInsufficientlyPolymorphic` errors that aren't helpful.
if !is_insufficiently_polymorphic {
self.emit_coerce_suggestions(
&mut err,
expr,
expr_ty,
expected,
expected_ty_expr,
Some(e),
);
}
self.emit_coerce_suggestions(&mut err, expr, expr_ty, expected, expected_ty_expr, Some(e));
(expected, Some(err))
}

View File

@ -1580,7 +1580,10 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let coerce = ctxt.coerce.as_mut().unwrap();
if let Some((tail_expr, tail_expr_ty)) = tail_expr_ty {
let span = self.get_expr_coercion_span(tail_expr);
let cause = self.cause(span, ObligationCauseCode::BlockTailExpression(blk.hir_id));
let cause = self.cause(
span,
ObligationCauseCode::BlockTailExpression(blk.hir_id, hir::MatchSource::Normal),
);
let ty_for_diagnostic = coerce.merged_ty();
// We use coerce_inner here because we want to augment the error
// suggesting to wrap the block in square brackets if it might've

View File

@ -1681,7 +1681,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|| found_assoc(tcx.types.u64)
|| found_assoc(tcx.types.u128)
|| found_assoc(tcx.types.f32)
|| found_assoc(tcx.types.f32);
|| found_assoc(tcx.types.f64);
if found_candidate
&& actual.is_numeric()
&& !actual.has_concrete_skeleton()

View File

@ -3,7 +3,7 @@
use crate::errors;
use rustc_data_structures::memmap::Mmap;
use rustc_data_structures::unord::UnordMap;
use rustc_middle::dep_graph::{SerializedDepGraph, WorkProduct, WorkProductId};
use rustc_middle::dep_graph::{SerializedDepGraph, WorkProductMap};
use rustc_middle::query::on_disk_cache::OnDiskCache;
use rustc_serialize::opaque::MemDecoder;
use rustc_serialize::Decodable;
@ -16,8 +16,6 @@ use super::file_format;
use super::fs::*;
use super::work_product;
type WorkProductMap = UnordMap<WorkProductId, WorkProduct>;
#[derive(Debug)]
/// Represents the result of an attempt to load incremental compilation data.
pub enum LoadResult<T> {

View File

@ -1,7 +1,9 @@
use crate::errors;
use rustc_data_structures::fx::FxIndexMap;
use rustc_data_structures::sync::join;
use rustc_middle::dep_graph::{DepGraph, SerializedDepGraph, WorkProduct, WorkProductId};
use rustc_middle::dep_graph::{
DepGraph, SerializedDepGraph, WorkProduct, WorkProductId, WorkProductMap,
};
use rustc_middle::ty::TyCtxt;
use rustc_serialize::opaque::{FileEncodeResult, FileEncoder};
use rustc_serialize::Encodable as RustcEncodable;
@ -101,7 +103,7 @@ pub fn save_work_product_index(
// deleted during invalidation. Some object files don't change their
// content, they are just not needed anymore.
let previous_work_products = dep_graph.previous_work_products();
for (id, wp) in previous_work_products.iter() {
for (id, wp) in previous_work_products.to_sorted_stable_ord().iter() {
if !new_work_products.contains_key(id) {
work_product::delete_workproduct_files(sess, wp);
debug_assert!(
@ -146,7 +148,7 @@ fn encode_query_cache(tcx: TyCtxt<'_>, encoder: FileEncoder) -> FileEncodeResult
pub fn build_dep_graph(
sess: &Session,
prev_graph: SerializedDepGraph,
prev_work_products: FxIndexMap<WorkProductId, WorkProduct>,
prev_work_products: WorkProductMap,
) -> Option<DepGraph> {
if sess.opts.incremental.is_none() {
// No incremental compilation.

View File

@ -743,6 +743,35 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
ObligationCauseCode::Pattern { origin_expr: false, span: Some(span), .. } => {
err.span_label(span, "expected due to this");
}
ObligationCauseCode::BlockTailExpression(
_,
hir::MatchSource::TryDesugar(scrut_hir_id),
) => {
if let Some(ty::error::ExpectedFound { expected, .. }) = exp_found {
let scrut_expr = self.tcx.hir().expect_expr(scrut_hir_id);
let scrut_ty = if let hir::ExprKind::Call(_, args) = &scrut_expr.kind {
let arg_expr = args.first().expect("try desugaring call w/out arg");
self.typeck_results.as_ref().and_then(|typeck_results| {
typeck_results.expr_ty_opt(arg_expr)
})
} else {
bug!("try desugaring w/out call expr as scrutinee");
};
match scrut_ty {
Some(ty) if expected == ty => {
let source_map = self.tcx.sess.source_map();
err.span_suggestion(
source_map.end_point(cause.span()),
"try removing this `?`",
"",
Applicability::MachineApplicable,
);
}
_ => {}
}
}
},
ObligationCauseCode::MatchExpressionArm(box MatchExpressionArmCause {
arm_block_id,
arm_span,
@ -752,12 +781,11 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
prior_arm_ty,
source,
ref prior_arms,
scrut_hir_id,
opt_suggest_box_span,
scrut_span,
..
}) => match source {
hir::MatchSource::TryDesugar => {
hir::MatchSource::TryDesugar(scrut_hir_id) => {
if let Some(ty::error::ExpectedFound { expected, .. }) = exp_found {
let scrut_expr = self.tcx.hir().expect_expr(scrut_hir_id);
let scrut_ty = if let hir::ExprKind::Call(_, args) = &scrut_expr.kind {
@ -1927,7 +1955,12 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
true
};
if should_suggest_fixes {
// FIXME(#73154): For now, we do leak check when coercing function
// pointers in typeck, instead of only during borrowck. This can lead
// to these `RegionsInsufficientlyPolymorphic` errors that aren't helpful.
if should_suggest_fixes
&& !matches!(terr, TypeError::RegionsInsufficientlyPolymorphic(..))
{
self.suggest_tuple_pattern(cause, &exp_found, diag);
self.suggest_accessing_field_where_appropriate(cause, &exp_found, diag);
self.suggest_await_on_expect_found(cause, span, &exp_found, diag);
@ -1973,7 +2006,7 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
trace: &TypeTrace<'tcx>,
terr: TypeError<'tcx>,
) -> Vec<TypeErrorAdditionalDiags> {
use crate::traits::ObligationCauseCode::MatchExpressionArm;
use crate::traits::ObligationCauseCode::{BlockTailExpression, MatchExpressionArm};
let mut suggestions = Vec::new();
let span = trace.cause.span();
let values = self.resolve_vars_if_possible(trace.values);
@ -1991,11 +2024,17 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
// specify a byte literal
(ty::Uint(ty::UintTy::U8), ty::Char) => {
if let Ok(code) = self.tcx.sess().source_map().span_to_snippet(span)
&& let Some(code) = code.strip_prefix('\'').and_then(|s| s.strip_suffix('\''))
&& !code.starts_with("\\u") // forbid all Unicode escapes
&& code.chars().next().is_some_and(|c| c.is_ascii()) // forbids literal Unicode characters beyond ASCII
&& let Some(code) =
code.strip_prefix('\'').and_then(|s| s.strip_suffix('\''))
// forbid all Unicode escapes
&& !code.starts_with("\\u")
// forbids literal Unicode characters beyond ASCII
&& code.chars().next().is_some_and(|c| c.is_ascii())
{
suggestions.push(TypeErrorAdditionalDiags::MeantByteLiteral { span, code: escape_literal(code) })
suggestions.push(TypeErrorAdditionalDiags::MeantByteLiteral {
span,
code: escape_literal(code),
})
}
}
// If a character was expected and the found expression is a string literal
@ -2006,7 +2045,10 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
&& let Some(code) = code.strip_prefix('"').and_then(|s| s.strip_suffix('"'))
&& code.chars().count() == 1
{
suggestions.push(TypeErrorAdditionalDiags::MeantCharLiteral { span, code: escape_literal(code) })
suggestions.push(TypeErrorAdditionalDiags::MeantCharLiteral {
span,
code: escape_literal(code),
})
}
}
// If a string was expected and the found expression is a character literal,
@ -2016,7 +2058,10 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
if let Some(code) =
code.strip_prefix('\'').and_then(|s| s.strip_suffix('\''))
{
suggestions.push(TypeErrorAdditionalDiags::MeantStrLiteral { span, code: escape_literal(code) })
suggestions.push(TypeErrorAdditionalDiags::MeantStrLiteral {
span,
code: escape_literal(code),
})
}
}
}
@ -2025,17 +2070,24 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
(ty::Bool, ty::Tuple(list)) => if list.len() == 0 {
suggestions.extend(self.suggest_let_for_letchains(&trace.cause, span));
}
(ty::Array(_, _), ty::Array(_, _)) => suggestions.extend(self.suggest_specify_actual_length(terr, trace, span)),
(ty::Array(_, _), ty::Array(_, _)) => {
suggestions.extend(self.suggest_specify_actual_length(terr, trace, span))
}
_ => {}
}
}
let code = trace.cause.code();
if let &MatchExpressionArm(box MatchExpressionArmCause { source, .. }) = code
&& let hir::MatchSource::TryDesugar = source
&& let Some((expected_ty, found_ty, _, _)) = self.values_str(trace.values)
{
suggestions.push(TypeErrorAdditionalDiags::TryCannotConvert { found: found_ty.content(), expected: expected_ty.content() });
}
if let &(MatchExpressionArm(box MatchExpressionArmCause { source, .. })
| BlockTailExpression(.., source)
) = code
&& let hir::MatchSource::TryDesugar(_) = source
&& let Some((expected_ty, found_ty, _, _)) = self.values_str(trace.values)
{
suggestions.push(TypeErrorAdditionalDiags::TryCannotConvert {
found: found_ty.content(),
expected: expected_ty.content(),
});
}
suggestions
}
@ -2905,8 +2957,11 @@ impl<'tcx> ObligationCauseExt<'tcx> for ObligationCause<'tcx> {
CompareImplItemObligation { kind: ty::AssocKind::Const, .. } => {
ObligationCauseFailureCode::ConstCompat { span, subdiags }
}
BlockTailExpression(.., hir::MatchSource::TryDesugar(_)) => {
ObligationCauseFailureCode::TryCompat { span, subdiags }
}
MatchExpressionArm(box MatchExpressionArmCause { source, .. }) => match source {
hir::MatchSource::TryDesugar => {
hir::MatchSource::TryDesugar(_) => {
ObligationCauseFailureCode::TryCompat { span, subdiags }
}
_ => ObligationCauseFailureCode::MatchCompat { span, subdiags },

View File

@ -29,25 +29,15 @@ impl<'a, 'tcx> NiceRegionError<'a, 'tcx> {
// version new_ty of its type where the anonymous region is replaced
// with the named one.
let (named, anon, anon_param_info, region_info) = if sub.has_name()
&& self.tcx().is_suitable_region(sup).is_some()
&& self.find_param_with_region(sup, sub).is_some()
&& let Some(region_info) = self.tcx().is_suitable_region(sup)
&& let Some(anon_param_info) = self.find_param_with_region(sup, sub)
{
(
sub,
sup,
self.find_param_with_region(sup, sub).unwrap(),
self.tcx().is_suitable_region(sup).unwrap(),
)
(sub, sup, anon_param_info, region_info)
} else if sup.has_name()
&& self.tcx().is_suitable_region(sub).is_some()
&& self.find_param_with_region(sub, sup).is_some()
&& let Some(region_info) = self.tcx().is_suitable_region(sub)
&& let Some(anon_param_info) = self.find_param_with_region(sub, sup)
{
(
sup,
sub,
self.find_param_with_region(sub, sup).unwrap(),
self.tcx().is_suitable_region(sub).unwrap(),
)
(sup, sub, anon_param_info, region_info)
} else {
return None; // inapplicable
};

View File

@ -146,7 +146,7 @@ impl<'a, 'tcx> NiceRegionError<'a, 'tcx> {
if let SubregionOrigin::Subtype(box TypeTrace { cause, .. }) = sub_origin {
if let ObligationCauseCode::ReturnValue(hir_id)
| ObligationCauseCode::BlockTailExpression(hir_id) = cause.code()
| ObligationCauseCode::BlockTailExpression(hir_id, ..) = cause.code()
{
let parent_id = tcx.hir().get_parent_item(*hir_id);
if let Some(fn_decl) = tcx.hir().fn_decl_by_hir_id(parent_id.into()) {

View File

@ -64,7 +64,7 @@ pub fn find_param_with_region<'tcx>(
let body_id = hir.maybe_body_owned_by(def_id)?;
let owner_id = hir.body_owner(body_id);
let fn_decl = hir.fn_decl_by_hir_id(owner_id).unwrap();
let fn_decl = hir.fn_decl_by_hir_id(owner_id)?;
let poly_fn_sig = tcx.fn_sig(id).instantiate_identity();
let fn_sig = tcx.liberate_late_bound_regions(id, poly_fn_sig);

View File

@ -15,6 +15,7 @@ rustc_attr = { path = "../rustc_attr" }
rustc_borrowck = { path = "../rustc_borrowck" }
rustc_builtin_macros = { path = "../rustc_builtin_macros" }
rustc_expand = { path = "../rustc_expand" }
rustc_feature = { path = "../rustc_feature" }
rustc_fluent_macro = { path = "../rustc_fluent_macro" }
rustc_fs_util = { path = "../rustc_fs_util" }
rustc_macros = { path = "../rustc_macros" }

View File

@ -11,6 +11,7 @@ use rustc_data_structures::steal::Steal;
use rustc_data_structures::sync::{Lrc, OnceCell, WorkerLocal};
use rustc_errors::PResult;
use rustc_expand::base::{ExtCtxt, LintStoreExpand};
use rustc_feature::Features;
use rustc_fs_util::try_canonicalize;
use rustc_hir::def_id::{StableCrateId, LOCAL_CRATE};
use rustc_lint::{unerased_lint_store, BufferedEarlyLint, EarlyCheckNode, LintStore};
@ -98,6 +99,7 @@ pub(crate) fn create_lint_store(
fn pre_expansion_lint<'a>(
sess: &Session,
features: &Features,
lint_store: &LintStore,
registered_tools: &RegisteredTools,
check_node: impl EarlyCheckNode<'a>,
@ -107,6 +109,7 @@ fn pre_expansion_lint<'a>(
|| {
rustc_lint::check_ast_node(
sess,
features,
true,
lint_store,
registered_tools,
@ -125,13 +128,14 @@ impl LintStoreExpand for LintStoreExpandImpl<'_> {
fn pre_expansion_lint(
&self,
sess: &Session,
features: &Features,
registered_tools: &RegisteredTools,
node_id: ast::NodeId,
attrs: &[ast::Attribute],
items: &[rustc_ast::ptr::P<ast::Item>],
name: Symbol,
) {
pre_expansion_lint(sess, self.0, registered_tools, (node_id, attrs, items), name);
pre_expansion_lint(sess, features, self.0, registered_tools, (node_id, attrs, items), name);
}
}
@ -147,10 +151,18 @@ fn configure_and_expand(
) -> ast::Crate {
let tcx = resolver.tcx();
let sess = tcx.sess;
let features = tcx.features();
let lint_store = unerased_lint_store(tcx);
let crate_name = tcx.crate_name(LOCAL_CRATE);
let lint_check_node = (&krate, pre_configured_attrs);
pre_expansion_lint(sess, lint_store, tcx.registered_tools(()), lint_check_node, crate_name);
pre_expansion_lint(
sess,
features,
lint_store,
tcx.registered_tools(()),
lint_check_node,
crate_name,
);
rustc_builtin_macros::register_builtin_macros(resolver);
let num_standard_library_imports = sess.time("crate_injection", || {
@ -159,6 +171,7 @@ fn configure_and_expand(
pre_configured_attrs,
resolver,
sess,
features,
)
});
@ -198,16 +211,15 @@ fn configure_and_expand(
}
// Create the config for macro expansion
let features = sess.features_untracked();
let recursion_limit = get_recursion_limit(pre_configured_attrs, sess);
let cfg = rustc_expand::expand::ExpansionConfig {
features: Some(features),
crate_name: crate_name.to_string(),
features,
recursion_limit,
trace_mac: sess.opts.unstable_opts.trace_macros,
should_test: sess.is_test_crate(),
span_debug: sess.opts.unstable_opts.span_debug,
proc_macro_backtrace: sess.opts.unstable_opts.proc_macro_backtrace,
..rustc_expand::expand::ExpansionConfig::default(crate_name.to_string())
};
let lint_store = LintStoreExpandImpl(lint_store);
@ -241,11 +253,16 @@ fn configure_and_expand(
});
sess.time("maybe_building_test_harness", || {
rustc_builtin_macros::test_harness::inject(&mut krate, sess, resolver)
rustc_builtin_macros::test_harness::inject(&mut krate, sess, features, resolver)
});
let has_proc_macro_decls = sess.time("AST_validation", || {
rustc_ast_passes::ast_validation::check_crate(sess, &krate, resolver.lint_buffer())
rustc_ast_passes::ast_validation::check_crate(
sess,
features,
&krate,
resolver.lint_buffer(),
)
});
let crate_types = tcx.crate_types();
@ -270,6 +287,7 @@ fn configure_and_expand(
rustc_builtin_macros::proc_macro_harness::inject(
&mut krate,
sess,
features,
resolver,
is_proc_macro_crate,
has_proc_macro_decls,
@ -300,7 +318,7 @@ fn early_lint_checks(tcx: TyCtxt<'_>, (): ()) {
// Needs to go *after* expansion to be able to check the results of macro expansion.
sess.time("complete_gated_feature_checking", || {
rustc_ast_passes::feature_gate::check_crate(&krate, sess);
rustc_ast_passes::feature_gate::check_crate(&krate, sess, tcx.features());
});
// Add all buffered lints from the `ParseSess` to the `Session`.
@ -329,6 +347,7 @@ fn early_lint_checks(tcx: TyCtxt<'_>, (): ()) {
let lint_store = unerased_lint_store(tcx);
rustc_lint::check_ast_node(
sess,
tcx.features(),
false,
lint_store,
tcx.registered_tools(()),

View File

@ -5,7 +5,6 @@ use crate::{passes, util};
use rustc_ast as ast;
use rustc_codegen_ssa::traits::CodegenBackend;
use rustc_codegen_ssa::CodegenResults;
use rustc_data_structures::fx::FxIndexMap;
use rustc_data_structures::steal::Steal;
use rustc_data_structures::svh::Svh;
use rustc_data_structures::sync::{AppendOnlyIndexVec, Lrc, OnceCell, RwLock, WorkerLocal};
@ -86,9 +85,6 @@ pub struct Queries<'tcx> {
parse: Query<ast::Crate>,
pre_configure: Query<(ast::Crate, ast::AttrVec)>,
crate_name: Query<Symbol>,
crate_types: Query<Vec<CrateType>>,
stable_crate_id: Query<StableCrateId>,
// This just points to what's in `gcx_cell`.
gcx: Query<&'tcx GlobalCtxt<'tcx>>,
}
@ -102,9 +98,6 @@ impl<'tcx> Queries<'tcx> {
hir_arena: WorkerLocal::new(|_| rustc_hir::Arena::default()),
parse: Default::default(),
pre_configure: Default::default(),
crate_name: Default::default(),
crate_types: Default::default(),
stable_crate_id: Default::default(),
gcx: Default::default(),
}
}
@ -138,39 +131,12 @@ impl<'tcx> Queries<'tcx> {
})
}
fn crate_name(&self) -> Result<QueryResult<'_, Symbol>> {
self.crate_name.compute(|| {
let pre_configure_result = self.pre_configure()?;
let (_, pre_configured_attrs) = &*pre_configure_result.borrow();
// parse `#[crate_name]` even if `--crate-name` was passed, to make sure it matches.
Ok(find_crate_name(self.session(), pre_configured_attrs))
})
}
fn crate_types(&self) -> Result<QueryResult<'_, Vec<CrateType>>> {
self.crate_types.compute(|| {
let pre_configure_result = self.pre_configure()?;
let (_, pre_configured_attrs) = &*pre_configure_result.borrow();
Ok(util::collect_crate_types(&self.session(), &pre_configured_attrs))
})
}
fn stable_crate_id(&self) -> Result<QueryResult<'_, StableCrateId>> {
self.stable_crate_id.compute(|| {
let sess = self.session();
Ok(StableCrateId::new(
*self.crate_name()?.borrow(),
self.crate_types()?.borrow().contains(&CrateType::Executable),
sess.opts.cg.metadata.clone(),
sess.cfg_version,
))
})
}
fn dep_graph_future(&self) -> Result<Option<DepGraphFuture>> {
fn dep_graph_future(
&self,
crate_name: Symbol,
stable_crate_id: StableCrateId,
) -> Result<Option<DepGraphFuture>> {
let sess = self.session();
let crate_name = *self.crate_name()?.borrow();
let stable_crate_id = *self.stable_crate_id()?.borrow();
// `load_dep_graph` can only be called after `prepare_session_directory`.
rustc_incremental::prepare_session_directory(sess, crate_name, stable_crate_id)?;
@ -195,15 +161,8 @@ impl<'tcx> Queries<'tcx> {
dep_graph_future
.and_then(|future| {
let sess = self.session();
let (prev_graph, mut prev_work_products) =
let (prev_graph, prev_work_products) =
sess.time("blocked_on_dep_graph_loading", || future.open().open(sess));
// Convert from UnordMap to FxIndexMap by sorting
let prev_work_product_ids =
prev_work_products.items().map(|x| *x.0).into_sorted_stable_ord();
let prev_work_products = prev_work_product_ids
.into_iter()
.map(|x| (x, prev_work_products.remove(&x).unwrap()))
.collect::<FxIndexMap<_, _>>();
rustc_incremental::build_dep_graph(sess, prev_graph, prev_work_products)
})
.unwrap_or_else(DepGraph::new_disabled)
@ -211,32 +170,39 @@ impl<'tcx> Queries<'tcx> {
pub fn global_ctxt(&'tcx self) -> Result<QueryResult<'_, &'tcx GlobalCtxt<'tcx>>> {
self.gcx.compute(|| {
// Compute the dependency graph (in the background). We want to do this as early as
// possible, to give the DepGraph maximum time to load before `dep_graph` is called.
let dep_graph_future = self.dep_graph_future()?;
let crate_name = self.crate_name()?.steal();
let crate_types = self.crate_types()?.steal();
let stable_crate_id = self.stable_crate_id()?.steal();
let sess = self.session();
let (krate, pre_configured_attrs) = self.pre_configure()?.steal();
let sess = self.session();
// parse `#[crate_name]` even if `--crate-name` was passed, to make sure it matches.
let crate_name = find_crate_name(sess, &pre_configured_attrs);
let crate_types = util::collect_crate_types(sess, &pre_configured_attrs);
let stable_crate_id = StableCrateId::new(
crate_name,
crate_types.contains(&CrateType::Executable),
sess.opts.cg.metadata.clone(),
sess.cfg_version,
);
// Compute the dependency graph (in the background). We want to do this as early as
// possible, to give the DepGraph maximum time to load before `dep_graph` is called.
let dep_graph_future = self.dep_graph_future(crate_name, stable_crate_id)?;
let lint_store = Lrc::new(passes::create_lint_store(
sess,
&*self.codegen_backend().metadata_loader(),
self.compiler.register_lints.as_deref(),
&pre_configured_attrs,
));
let cstore = RwLock::new(Box::new(CStore::new(stable_crate_id)) as _);
let cstore = RwLock::new(Box::new(CStore::new(
self.codegen_backend().metadata_loader(),
stable_crate_id,
)) as _);
let definitions = RwLock::new(Definitions::new(stable_crate_id));
let source_span = AppendOnlyIndexVec::new();
let _id = source_span.push(krate.spans.inner_span);
debug_assert_eq!(_id, CRATE_DEF_ID);
let untracked = Untracked { cstore, source_span, definitions };
// FIXME: Move features from session to tcx and make them immutable.
sess.init_features(rustc_expand::config::features(sess, &pre_configured_attrs));
let qcx = passes::create_global_ctxt(
self.compiler,
crate_types,
@ -254,11 +220,10 @@ impl<'tcx> Queries<'tcx> {
feed.crate_name(crate_name);
let feed = tcx.feed_unit_query();
feed.crate_for_resolver(tcx.arena.alloc(Steal::new((krate, pre_configured_attrs))));
feed.metadata_loader(
tcx.arena.alloc(Steal::new(self.codegen_backend().metadata_loader())),
feed.features_query(
tcx.arena.alloc(rustc_expand::config::features(sess, &pre_configured_attrs)),
);
feed.features_query(tcx.sess.features_untracked());
feed.crate_for_resolver(tcx.arena.alloc(Steal::new((krate, pre_configured_attrs))));
});
Ok(qcx)
})

View File

@ -2215,7 +2215,7 @@ declare_lint! {
///
/// ### Example
///
/// ```rust,compile_fail
/// ```rust
/// #![feature(rustc_attrs)]
/// ```
///
@ -2226,7 +2226,7 @@ declare_lint! {
/// These features are an implementation detail of the compiler and standard
/// library and are not supposed to be used in user code.
pub INTERNAL_FEATURES,
Deny,
Warn,
"internal features are not supposed to be used"
}
@ -2237,7 +2237,7 @@ declare_lint_pass!(
impl EarlyLintPass for IncompleteInternalFeatures {
fn check_crate(&mut self, cx: &EarlyContext<'_>, _: &ast::Crate) {
let features = cx.sess().features_untracked();
let features = cx.builder.features();
features
.declared_lang_features
.iter()

View File

@ -27,6 +27,7 @@ use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::sync;
use rustc_errors::{add_elided_lifetime_in_path_suggestion, DiagnosticBuilder, DiagnosticMessage};
use rustc_errors::{Applicability, DecorateLint, MultiSpan, SuggestionStyle};
use rustc_feature::Features;
use rustc_hir as hir;
use rustc_hir::def::Res;
use rustc_hir::def_id::{CrateNum, DefId};
@ -1071,6 +1072,7 @@ pub trait LintContext: Sized {
impl<'a> EarlyContext<'a> {
pub(crate) fn new(
sess: &'a Session,
features: &'a Features,
warn_about_weird_lints: bool,
lint_store: &'a LintStore,
registered_tools: &'a RegisteredTools,
@ -1079,6 +1081,7 @@ impl<'a> EarlyContext<'a> {
EarlyContext {
builder: LintLevelsBuilder::new(
sess,
features,
warn_about_weird_lints,
lint_store,
registered_tools,

View File

@ -20,6 +20,7 @@ use rustc_ast::ptr::P;
use rustc_ast::visit::{self as ast_visit, Visitor};
use rustc_ast::{self as ast, walk_list, HasAttrs};
use rustc_data_structures::stack::ensure_sufficient_stack;
use rustc_feature::Features;
use rustc_middle::ty::RegisteredTools;
use rustc_session::lint::{BufferedEarlyLint, LintBuffer, LintPass};
use rustc_session::Session;
@ -381,6 +382,7 @@ impl<'a> EarlyCheckNode<'a> for (ast::NodeId, &'a [ast::Attribute], &'a [P<ast::
pub fn check_ast_node<'a>(
sess: &Session,
features: &Features,
pre_expansion: bool,
lint_store: &LintStore,
registered_tools: &RegisteredTools,
@ -390,6 +392,7 @@ pub fn check_ast_node<'a>(
) {
let context = EarlyContext::new(
sess,
features,
!pre_expansion,
lint_store,
registered_tools,

View File

@ -19,7 +19,7 @@ use rustc_ast as ast;
use rustc_data_structures::stack::ensure_sufficient_stack;
use rustc_data_structures::sync::join;
use rustc_hir as hir;
use rustc_hir::def_id::LocalDefId;
use rustc_hir::def_id::{LocalDefId, LocalModDefId};
use rustc_hir::intravisit as hir_visit;
use rustc_hir::intravisit::Visitor;
use rustc_middle::hir::nested_filter;
@ -338,7 +338,7 @@ crate::late_lint_methods!(impl_late_lint_pass, []);
pub fn late_lint_mod<'tcx, T: LateLintPass<'tcx> + 'tcx>(
tcx: TyCtxt<'tcx>,
module_def_id: LocalDefId,
module_def_id: LocalModDefId,
builtin_lints: T,
) {
let context = LateContext {
@ -369,7 +369,7 @@ pub fn late_lint_mod<'tcx, T: LateLintPass<'tcx> + 'tcx>(
fn late_lint_mod_inner<'tcx, T: LateLintPass<'tcx>>(
tcx: TyCtxt<'tcx>,
module_def_id: LocalDefId,
module_def_id: LocalModDefId,
context: LateContext<'tcx>,
pass: T,
) {

View File

@ -12,6 +12,7 @@ use rustc_ast as ast;
use rustc_ast_pretty::pprust;
use rustc_data_structures::fx::FxHashMap;
use rustc_errors::{DecorateLint, DiagnosticBuilder, DiagnosticMessage, MultiSpan};
use rustc_feature::Features;
use rustc_hir as hir;
use rustc_hir::intravisit::{self, Visitor};
use rustc_hir::HirId;
@ -119,6 +120,7 @@ fn lint_expectations(tcx: TyCtxt<'_>, (): ()) -> Vec<(LintExpectationId, LintExp
let mut builder = LintLevelsBuilder {
sess: tcx.sess,
features: tcx.features(),
provider: QueryMapExpectationsWrapper {
tcx,
cur: hir::CRATE_HIR_ID,
@ -148,6 +150,7 @@ fn shallow_lint_levels_on(tcx: TyCtxt<'_>, owner: hir::OwnerId) -> ShallowLintLe
let mut levels = LintLevelsBuilder {
sess: tcx.sess,
features: tcx.features(),
provider: LintLevelQueryMap {
tcx,
cur: owner.into(),
@ -435,6 +438,7 @@ impl<'tcx> Visitor<'tcx> for LintLevelsBuilder<'_, QueryMapExpectationsWrapper<'
pub struct LintLevelsBuilder<'s, P> {
sess: &'s Session,
features: &'s Features,
provider: P,
warn_about_weird_lints: bool,
store: &'s LintStore,
@ -448,12 +452,14 @@ pub(crate) struct BuilderPush {
impl<'s> LintLevelsBuilder<'s, TopDown> {
pub(crate) fn new(
sess: &'s Session,
features: &'s Features,
warn_about_weird_lints: bool,
store: &'s LintStore,
registered_tools: &'s RegisteredTools,
) -> Self {
let mut builder = LintLevelsBuilder {
sess,
features,
provider: TopDown { sets: LintLevelSets::new(), cur: COMMAND_LINE },
warn_about_weird_lints,
store,
@ -526,6 +532,10 @@ impl<'s, P: LintLevelsProvider> LintLevelsBuilder<'s, P> {
self.sess
}
pub(crate) fn features(&self) -> &Features {
self.features
}
pub(crate) fn lint_store(&self) -> &LintStore {
self.store
}
@ -716,7 +726,7 @@ impl<'s, P: LintLevelsProvider> LintLevelsBuilder<'s, P> {
ast::MetaItemKind::NameValue(ref name_value) => {
if item.path == sym::reason {
if let ast::LitKind::Str(rationale, _) = name_value.kind {
if !self.sess.features_untracked().lint_reasons {
if !self.features.lint_reasons {
feature_err(
&self.sess.parse_sess,
sym::lint_reasons,
@ -992,7 +1002,7 @@ impl<'s, P: LintLevelsProvider> LintLevelsBuilder<'s, P> {
#[track_caller]
fn check_gated_lint(&self, lint_id: LintId, span: Span) -> bool {
if let Some(feature) = lint_id.lint.feature_gate {
if !self.sess.features_untracked().enabled(feature) {
if !self.features.enabled(feature) {
let lint = builtin::UNKNOWN_LINTS;
let (level, src) = self.lint_level(builtin::UNKNOWN_LINTS);
struct_lint_level(

View File

@ -90,7 +90,7 @@ use rustc_ast as ast;
use rustc_errors::{DiagnosticMessage, SubdiagnosticMessage};
use rustc_fluent_macro::fluent_messages;
use rustc_hir as hir;
use rustc_hir::def_id::LocalDefId;
use rustc_hir::def_id::{LocalDefId, LocalModDefId};
use rustc_middle::query::Providers;
use rustc_middle::ty::TyCtxt;
use rustc_session::lint::builtin::{
@ -145,7 +145,7 @@ pub fn provide(providers: &mut Providers) {
*providers = Providers { lint_mod, ..*providers };
}
fn lint_mod(tcx: TyCtxt<'_>, module_def_id: LocalDefId) {
fn lint_mod(tcx: TyCtxt<'_>, module_def_id: LocalModDefId) {
late_lint_mod(tcx, module_def_id, BuiltinCombinedModuleLateLintPass::new());
}

View File

@ -98,32 +98,56 @@ impl<'tcx> LateLintPass<'tcx> for InvalidReferenceCasting {
fn is_cast_from_const_to_mut<'tcx>(cx: &LateContext<'tcx>, e: &'tcx Expr<'tcx>) -> bool {
let e = e.peel_blocks();
// <expr> as *mut ...
let e = if let ExprKind::Cast(e, t) = e.kind
&& let ty::RawPtr(TypeAndMut { mutbl: Mutability::Mut, .. }) = cx.typeck_results().node_type(t.hir_id).kind() {
e
// <expr>.cast_mut()
} else if let ExprKind::MethodCall(_, expr, [], _) = e.kind
&& let Some(def_id) = cx.typeck_results().type_dependent_def_id(e.hir_id)
&& cx.tcx.is_diagnostic_item(sym::ptr_cast_mut, def_id) {
expr
} else {
return false;
};
fn from_casts<'tcx>(cx: &LateContext<'tcx>, e: &'tcx Expr<'tcx>) -> Option<&'tcx Expr<'tcx>> {
// <expr> as *mut ...
let e = if let ExprKind::Cast(e, t) = e.kind
&& let ty::RawPtr(TypeAndMut { mutbl: Mutability::Mut, .. }) = cx.typeck_results().node_type(t.hir_id).kind() {
e
// <expr>.cast_mut()
} else if let ExprKind::MethodCall(_, expr, [], _) = e.kind
&& let Some(def_id) = cx.typeck_results().type_dependent_def_id(e.hir_id)
&& cx.tcx.is_diagnostic_item(sym::ptr_cast_mut, def_id) {
expr
} else {
return None;
};
let e = e.peel_blocks();
let e = e.peel_blocks();
// <expr> as *const ...
let e = if let ExprKind::Cast(e, t) = e.kind
&& let ty::RawPtr(TypeAndMut { mutbl: Mutability::Not, .. }) = cx.typeck_results().node_type(t.hir_id).kind() {
e
// ptr::from_ref(<expr>)
} else if let ExprKind::Call(path, [arg]) = e.kind
&& let ExprKind::Path(ref qpath) = path.kind
&& let Some(def_id) = cx.qpath_res(qpath, path.hir_id).opt_def_id()
&& cx.tcx.is_diagnostic_item(sym::ptr_from_ref, def_id) {
arg
} else {
// <expr> as *const ...
let e = if let ExprKind::Cast(e, t) = e.kind
&& let ty::RawPtr(TypeAndMut { mutbl: Mutability::Not, .. }) = cx.typeck_results().node_type(t.hir_id).kind() {
e
// ptr::from_ref(<expr>)
} else if let ExprKind::Call(path, [arg]) = e.kind
&& let ExprKind::Path(ref qpath) = path.kind
&& let Some(def_id) = cx.qpath_res(qpath, path.hir_id).opt_def_id()
&& cx.tcx.is_diagnostic_item(sym::ptr_from_ref, def_id) {
arg
} else {
return None;
};
Some(e)
}
fn from_transmute<'tcx>(
cx: &LateContext<'tcx>,
e: &'tcx Expr<'tcx>,
) -> Option<&'tcx Expr<'tcx>> {
// mem::transmute::<_, *mut _>(<expr>)
if let ExprKind::Call(path, [arg]) = e.kind
&& let ExprKind::Path(ref qpath) = path.kind
&& let Some(def_id) = cx.qpath_res(qpath, path.hir_id).opt_def_id()
&& cx.tcx.is_diagnostic_item(sym::transmute, def_id)
&& let ty::RawPtr(TypeAndMut { mutbl: Mutability::Mut, .. }) = cx.typeck_results().node_type(e.hir_id).kind() {
Some(arg)
} else {
None
}
}
let Some(e) = from_casts(cx, e).or_else(|| from_transmute(cx, e)) else {
return false;
};

View File

@ -666,6 +666,24 @@ trait UnusedDelimLint {
if !followed_by_block {
return false;
}
// Check if we need parens for `match &( Struct { feild: }) {}`.
{
let mut innermost = inner;
loop {
innermost = match &innermost.kind {
ExprKind::AddrOf(_, _, expr) => expr,
_ => {
if parser::contains_exterior_struct_lit(&innermost) {
return true;
} else {
break;
}
}
}
}
}
let mut innermost = inner;
loop {
innermost = match &innermost.kind {

View File

@ -3366,6 +3366,7 @@ declare_lint_pass! {
BYTE_SLICE_IN_PACKED_STRUCT_WITH_DERIVE,
CENUM_IMPL_DROP_CAST,
COHERENCE_LEAK_CHECK,
COINDUCTIVE_OVERLAP_IN_COHERENCE,
CONFLICTING_REPR_HINTS,
CONST_EVALUATABLE_UNCHECKED,
CONST_ITEM_MUTATION,
@ -4422,6 +4423,44 @@ declare_lint! {
@feature_gate = sym::type_privacy_lints;
}
declare_lint! {
/// The `coinductive_overlap_in_coherence` lint detects impls which are currently
/// considered not overlapping, but may be considered to overlap if support for
/// coinduction is added to the trait solver.
///
/// ### Example
///
/// ```rust,compile_fail
/// #![deny(coinductive_overlap_in_coherence)]
///
/// trait CyclicTrait {}
/// impl<T: CyclicTrait> CyclicTrait for T {}
///
/// trait Trait {}
/// impl<T: CyclicTrait> Trait for T {}
/// // conflicting impl with the above
/// impl Trait for u8 {}
/// ```
///
/// {{produces}}
///
/// ### Explanation
///
/// We have two choices for impl which satisfy `u8: Trait`: the blanket impl
/// for generic `T`, and the direct impl for `u8`. These two impls nominally
/// overlap, since we can infer `T = u8` in the former impl, but since the where
/// clause `u8: CyclicTrait` would end up resulting in a cycle (since it depends
/// on itself), the blanket impl is not considered to hold for `u8`. This will
/// change in a future release.
pub COINDUCTIVE_OVERLAP_IN_COHERENCE,
Warn,
"impls that are not considered to overlap may be considered to \
overlap in the future",
@future_incompatible = FutureIncompatibleInfo {
reference: "issue #114040 <https://github.com/rust-lang/rust/issues/114040>",
};
}
declare_lint! {
/// The `unknown_diagnostic_attributes` lint detects unrecognized diagnostic attributes.
///

View File

@ -12,6 +12,7 @@ const OPTIONAL_COMPONENTS: &[&str] = &[
"avr",
"loongarch",
"m68k",
"csky",
"mips",
"powerpc",
"systemz",

View File

@ -105,6 +105,12 @@ extern "C" void LLVMTimeTraceProfilerFinish(const char* FileName) {
#define SUBTARGET_M68K
#endif
#ifdef LLVM_COMPONENT_CSKY
#define SUBTARGET_CSKY SUBTARGET(CSKY)
#else
#define SUBTARGET_CSKY
#endif
#ifdef LLVM_COMPONENT_MIPS
#define SUBTARGET_MIPS SUBTARGET(Mips)
#else
@ -159,6 +165,7 @@ extern "C" void LLVMTimeTraceProfilerFinish(const char* FileName) {
SUBTARGET_AARCH64 \
SUBTARGET_AVR \
SUBTARGET_M68K \
SUBTARGET_CSKY \
SUBTARGET_MIPS \
SUBTARGET_PPC \
SUBTARGET_SYSTEMZ \

View File

@ -102,6 +102,14 @@ pub fn initialize_available_targets() {
LLVMInitializeM68kAsmPrinter,
LLVMInitializeM68kAsmParser
);
init_target!(
llvm_component = "csky",
LLVMInitializeCSKYTargetInfo,
LLVMInitializeCSKYTarget,
LLVMInitializeCSKYTargetMC,
LLVMInitializeCSKYAsmPrinter,
LLVMInitializeCSKYAsmParser
);
init_target!(
llvm_component = "loongarch",
LLVMInitializeLoongArchTargetInfo,

View File

@ -15,8 +15,9 @@ use rustc_hir::definitions::Definitions;
use rustc_index::IndexVec;
use rustc_middle::ty::TyCtxt;
use rustc_session::config::{self, CrateType, ExternLocation};
use rustc_session::cstore::ExternCrateSource;
use rustc_session::cstore::{CrateDepKind, CrateSource, ExternCrate};
use rustc_session::cstore::{
CrateDepKind, CrateSource, ExternCrate, ExternCrateSource, MetadataLoaderDyn,
};
use rustc_session::lint;
use rustc_session::output::validate_crate_name;
use rustc_session::search_paths::PathKind;
@ -33,6 +34,8 @@ use std::time::Duration;
use std::{cmp, env, iter};
pub struct CStore {
metadata_loader: Box<MetadataLoaderDyn>,
metas: IndexVec<CrateNum, Option<Box<CrateMetadata>>>,
injected_panic_runtime: Option<CrateNum>,
/// This crate needs an allocator and either provides it itself, or finds it in a dependency.
@ -261,10 +264,14 @@ impl CStore {
}
}
pub fn new(local_stable_crate_id: StableCrateId) -> CStore {
pub fn new(
metadata_loader: Box<MetadataLoaderDyn>,
local_stable_crate_id: StableCrateId,
) -> CStore {
let mut stable_crate_ids = StableCrateIdMap::default();
stable_crate_ids.insert(local_stable_crate_id, LOCAL_CRATE);
CStore {
metadata_loader,
// We add an empty entry for LOCAL_CRATE (which maps to zero) in
// order to make array indices in `metas` match with the
// corresponding `CrateNum`. This first entry will always remain
@ -538,10 +545,9 @@ impl<'a, 'tcx> CrateLoader<'a, 'tcx> {
(LoadResult::Previous(cnum), None)
} else {
info!("falling back to a load");
let metadata_loader = self.tcx.metadata_loader(()).borrow();
let mut locator = CrateLocator::new(
self.sess,
&**metadata_loader,
&*self.cstore.metadata_loader,
name,
// The all loop is because `--crate-type=rlib --crate-type=rlib` is
// legal and produces both inside this type.

View File

@ -842,7 +842,7 @@ impl<'a, 'tcx> CrateMetadataRef<'a> {
.decode((self, sess))
}
fn load_proc_macro(self, id: DefIndex, sess: &Session) -> SyntaxExtension {
fn load_proc_macro(self, id: DefIndex, tcx: TyCtxt<'tcx>) -> SyntaxExtension {
let (name, kind, helper_attrs) = match *self.raw_proc_macro(id) {
ProcMacro::CustomDerive { trait_name, attributes, client } => {
let helper_attrs =
@ -861,9 +861,11 @@ impl<'a, 'tcx> CrateMetadataRef<'a> {
}
};
let sess = tcx.sess;
let attrs: Vec<_> = self.get_item_attrs(id, sess).collect();
SyntaxExtension::new(
sess,
tcx.features(),
kind,
self.get_span(id, sess),
helper_attrs,

View File

@ -6,6 +6,7 @@ use crate::rmeta::AttrFlags;
use rustc_ast as ast;
use rustc_attr::Deprecation;
use rustc_data_structures::sync::Lrc;
use rustc_hir::def::{CtorKind, DefKind, Res};
use rustc_hir::def_id::{CrateNum, DefId, DefIdMap, LOCAL_CRATE};
use rustc_hir::definitions::{DefKey, DefPath, DefPathHash};
@ -23,7 +24,6 @@ use rustc_span::hygiene::{ExpnHash, ExpnId};
use rustc_span::symbol::{kw, Symbol};
use rustc_span::Span;
use rustc_data_structures::sync::Lrc;
use std::any::Any;
use super::{Decodable, DecodeContext, DecodeIterator};
@ -522,12 +522,13 @@ impl CStore {
self.get_crate_data(def.krate).get_ctor(def.index)
}
pub fn load_macro_untracked(&self, id: DefId, sess: &Session) -> LoadedMacro {
pub fn load_macro_untracked(&self, id: DefId, tcx: TyCtxt<'_>) -> LoadedMacro {
let sess = tcx.sess;
let _prof_timer = sess.prof.generic_activity("metadata_load_macro");
let data = self.get_crate_data(id.krate);
if data.root.is_proc_macro_crate() {
return LoadedMacro::ProcMacro(data.load_proc_macro(id.index, sess));
return LoadedMacro::ProcMacro(data.load_proc_macro(id.index, tcx));
}
let span = data.get_span(id.index, sess);

View File

@ -40,7 +40,6 @@ macro_rules! arena_types {
rustc_data_structures::sync::Lrc<rustc_ast::Crate>,
)>,
[] output_filenames: std::sync::Arc<rustc_session::config::OutputFilenames>,
[] metadata_loader: rustc_data_structures::steal::Steal<Box<rustc_session::cstore::MetadataLoaderDyn>>,
[] crate_for_resolver: rustc_data_structures::steal::Steal<(rustc_ast::Crate, rustc_ast::AttrVec)>,
[] resolutions: rustc_middle::ty::ResolverGlobalCtxt,
[decode] unsafety_check_result: rustc_middle::mir::UnsafetyCheckResult,
@ -131,6 +130,7 @@ macro_rules! arena_types {
[] closure_kind_origin: (rustc_span::Span, rustc_middle::hir::place::Place<'tcx>),
[] stripped_cfg_items: rustc_ast::expand::StrippedCfgItem,
[] mod_child: rustc_middle::metadata::ModChild,
[] features: rustc_feature::Features,
]);
)
}

View File

@ -60,7 +60,7 @@ use crate::mir::mono::MonoItem;
use crate::ty::TyCtxt;
use rustc_data_structures::fingerprint::Fingerprint;
use rustc_hir::def_id::{CrateNum, DefId, LocalDefId, LOCAL_CRATE};
use rustc_hir::def_id::{CrateNum, DefId, LocalDefId, LocalModDefId, ModDefId, LOCAL_CRATE};
use rustc_hir::definitions::DefPathHash;
use rustc_hir::{HirId, ItemLocalId, OwnerId};
use rustc_query_system::dep_graph::FingerprintStyle;
@ -387,3 +387,53 @@ impl<'tcx> DepNodeParams<TyCtxt<'tcx>> for HirId {
}
}
}
macro_rules! impl_for_typed_def_id {
($Name:ident, $LocalName:ident) => {
impl<'tcx> DepNodeParams<TyCtxt<'tcx>> for $Name {
#[inline(always)]
fn fingerprint_style() -> FingerprintStyle {
FingerprintStyle::DefPathHash
}
#[inline(always)]
fn to_fingerprint(&self, tcx: TyCtxt<'tcx>) -> Fingerprint {
self.to_def_id().to_fingerprint(tcx)
}
#[inline(always)]
fn to_debug_str(&self, tcx: TyCtxt<'tcx>) -> String {
self.to_def_id().to_debug_str(tcx)
}
#[inline(always)]
fn recover(tcx: TyCtxt<'tcx>, dep_node: &DepNode) -> Option<Self> {
DefId::recover(tcx, dep_node).map($Name::new_unchecked)
}
}
impl<'tcx> DepNodeParams<TyCtxt<'tcx>> for $LocalName {
#[inline(always)]
fn fingerprint_style() -> FingerprintStyle {
FingerprintStyle::DefPathHash
}
#[inline(always)]
fn to_fingerprint(&self, tcx: TyCtxt<'tcx>) -> Fingerprint {
self.to_def_id().to_fingerprint(tcx)
}
#[inline(always)]
fn to_debug_str(&self, tcx: TyCtxt<'tcx>) -> String {
self.to_def_id().to_debug_str(tcx)
}
#[inline(always)]
fn recover(tcx: TyCtxt<'tcx>, dep_node: &DepNode) -> Option<Self> {
LocalDefId::recover(tcx, dep_node).map($LocalName::new_unchecked)
}
}
};
}
impl_for_typed_def_id! { ModDefId, LocalModDefId }

View File

@ -8,7 +8,7 @@ mod dep_node;
pub use rustc_query_system::dep_graph::{
debug::DepNodeFilter, hash_result, DepContext, DepNodeColor, DepNodeIndex,
SerializedDepNodeIndex, WorkProduct, WorkProductId,
SerializedDepNodeIndex, WorkProduct, WorkProductId, WorkProductMap,
};
pub use dep_node::{label_strs, DepKind, DepNode, DepNodeExt};

View File

@ -8,7 +8,7 @@ use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_data_structures::svh::Svh;
use rustc_data_structures::sync::{par_for_each_in, DynSend, DynSync};
use rustc_hir::def::{DefKind, Res};
use rustc_hir::def_id::{DefId, LocalDefId, CRATE_DEF_ID, LOCAL_CRATE};
use rustc_hir::def_id::{DefId, LocalDefId, LocalModDefId, LOCAL_CRATE};
use rustc_hir::definitions::{DefKey, DefPath, DefPathData, DefPathHash};
use rustc_hir::intravisit::{self, Visitor};
use rustc_hir::*;
@ -148,7 +148,7 @@ impl<'hir> Map<'hir> {
}
#[inline]
pub fn module_items(self, module: LocalDefId) -> impl Iterator<Item = ItemId> + 'hir {
pub fn module_items(self, module: LocalModDefId) -> impl Iterator<Item = ItemId> + 'hir {
self.tcx.hir_module_items(module).items()
}
@ -169,8 +169,8 @@ impl<'hir> Map<'hir> {
}
#[inline]
pub fn local_def_id_to_hir_id(self, def_id: LocalDefId) -> HirId {
self.tcx.local_def_id_to_hir_id(def_id)
pub fn local_def_id_to_hir_id(self, def_id: impl Into<LocalDefId>) -> HirId {
self.tcx.local_def_id_to_hir_id(def_id.into())
}
/// Do not call this function directly. The query should be called.
@ -529,8 +529,8 @@ impl<'hir> Map<'hir> {
self.krate_attrs().iter().any(|attr| attr.has_name(sym::rustc_coherence_is_core))
}
pub fn get_module(self, module: LocalDefId) -> (&'hir Mod<'hir>, Span, HirId) {
let hir_id = HirId::make_owner(module);
pub fn get_module(self, module: LocalModDefId) -> (&'hir Mod<'hir>, Span, HirId) {
let hir_id = HirId::make_owner(module.to_local_def_id());
match self.tcx.hir_owner(hir_id.owner).map(|o| o.node) {
Some(OwnerNode::Item(&Item { span, kind: ItemKind::Mod(ref m), .. })) => {
(m, span, hir_id)
@ -542,7 +542,7 @@ impl<'hir> Map<'hir> {
/// Walks the contents of the local crate. See also `visit_all_item_likes_in_crate`.
pub fn walk_toplevel_module(self, visitor: &mut impl Visitor<'hir>) {
let (top_mod, span, hir_id) = self.get_module(CRATE_DEF_ID);
let (top_mod, span, hir_id) = self.get_module(LocalModDefId::CRATE_DEF_ID);
visitor.visit_mod(top_mod, span, hir_id);
}
@ -595,7 +595,7 @@ impl<'hir> Map<'hir> {
/// This method is the equivalent of `visit_all_item_likes_in_crate` but restricted to
/// item-likes in a single module.
pub fn visit_item_likes_in_module<V>(self, module: LocalDefId, visitor: &mut V)
pub fn visit_item_likes_in_module<V>(self, module: LocalModDefId, visitor: &mut V)
where
V: Visitor<'hir>,
{
@ -618,17 +618,19 @@ impl<'hir> Map<'hir> {
}
}
pub fn for_each_module(self, mut f: impl FnMut(LocalDefId)) {
pub fn for_each_module(self, mut f: impl FnMut(LocalModDefId)) {
let crate_items = self.tcx.hir_crate_items(());
for module in crate_items.submodules.iter() {
f(module.def_id)
f(LocalModDefId::new_unchecked(module.def_id))
}
}
#[inline]
pub fn par_for_each_module(self, f: impl Fn(LocalDefId) + DynSend + DynSync) {
pub fn par_for_each_module(self, f: impl Fn(LocalModDefId) + DynSend + DynSync) {
let crate_items = self.tcx.hir_crate_items(());
par_for_each_in(&crate_items.submodules[..], |module| f(module.def_id))
par_for_each_in(&crate_items.submodules[..], |module| {
f(LocalModDefId::new_unchecked(module.def_id))
})
}
/// Returns an iterator for the nodes in the ancestor tree of the `current_id`
@ -1324,7 +1326,7 @@ fn hir_id_to_string(map: Map<'_>, id: HirId) -> String {
}
}
pub(super) fn hir_module_items(tcx: TyCtxt<'_>, module_id: LocalDefId) -> ModuleItems {
pub(super) fn hir_module_items(tcx: TyCtxt<'_>, module_id: LocalModDefId) -> ModuleItems {
let mut collector = ItemCollector::new(tcx, false);
let (hir_mod, span, hir_id) = tcx.hir().get_module(module_id);

View File

@ -11,7 +11,7 @@ use crate::ty::{EarlyBinder, ImplSubject, TyCtxt};
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_data_structures::sync::{par_for_each_in, DynSend, DynSync};
use rustc_hir::def::DefKind;
use rustc_hir::def_id::{DefId, LocalDefId};
use rustc_hir::def_id::{DefId, LocalDefId, LocalModDefId};
use rustc_hir::*;
use rustc_query_system::ich::StableHashingContext;
use rustc_span::{ExpnId, DUMMY_SP};
@ -101,22 +101,22 @@ impl<'tcx> TyCtxt<'tcx> {
map::Map { tcx: self }
}
pub fn parent_module(self, id: HirId) -> LocalDefId {
pub fn parent_module(self, id: HirId) -> LocalModDefId {
if !id.is_owner() && self.def_kind(id.owner) == DefKind::Mod {
id.owner.def_id
LocalModDefId::new_unchecked(id.owner.def_id)
} else {
self.parent_module_from_def_id(id.owner.def_id)
}
}
pub fn parent_module_from_def_id(self, mut id: LocalDefId) -> LocalDefId {
pub fn parent_module_from_def_id(self, mut id: LocalDefId) -> LocalModDefId {
while let Some(parent) = self.opt_local_parent(id) {
id = parent;
if self.def_kind(id) == DefKind::Mod {
break;
}
}
id
LocalModDefId::new_unchecked(id)
}
pub fn impl_subject(self, def_id: DefId) -> EarlyBinder<ImplSubject<'tcx>> {

View File

@ -35,7 +35,6 @@
#![feature(if_let_guard)]
#![feature(inline_const)]
#![feature(iter_from_generator)]
#![feature(local_key_cell_methods)]
#![feature(negative_impls)]
#![feature(never_type)]
#![feature(extern_types)]

View File

@ -235,6 +235,7 @@ trivial! {
rustc_hir::def_id::DefId,
rustc_hir::def_id::DefIndex,
rustc_hir::def_id::LocalDefId,
rustc_hir::def_id::LocalModDefId,
rustc_hir::def::DefKind,
rustc_hir::Defaultness,
rustc_hir::definitions::DefKey,

View File

@ -8,7 +8,7 @@ use crate::ty::fast_reject::SimplifiedType;
use crate::ty::layout::{TyAndLayout, ValidityRequirement};
use crate::ty::{self, Ty, TyCtxt};
use crate::ty::{GenericArg, GenericArgsRef};
use rustc_hir::def_id::{CrateNum, DefId, LocalDefId, LOCAL_CRATE};
use rustc_hir::def_id::{CrateNum, DefId, LocalDefId, LocalModDefId, ModDefId, LOCAL_CRATE};
use rustc_hir::hir_id::{HirId, OwnerId};
use rustc_query_system::query::{DefaultCacheSelector, SingleCacheSelector, VecCacheSelector};
use rustc_span::symbol::{Ident, Symbol};
@ -175,6 +175,41 @@ impl AsLocalKey for DefId {
}
}
impl Key for LocalModDefId {
type CacheSelector = DefaultCacheSelector<Self>;
fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
tcx.def_span(*self)
}
#[inline(always)]
fn key_as_def_id(&self) -> Option<DefId> {
Some(self.to_def_id())
}
}
impl Key for ModDefId {
type CacheSelector = DefaultCacheSelector<Self>;
fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
tcx.def_span(*self)
}
#[inline(always)]
fn key_as_def_id(&self) -> Option<DefId> {
Some(self.to_def_id())
}
}
impl AsLocalKey for ModDefId {
type LocalKey = LocalModDefId;
#[inline(always)]
fn as_local_key(&self) -> Option<Self::LocalKey> {
self.as_local()
}
}
impl Key for SimplifiedType {
type CacheSelector = DefaultCacheSelector<Self>;

View File

@ -67,7 +67,7 @@ use rustc_errors::ErrorGuaranteed;
use rustc_hir as hir;
use rustc_hir::def::{DefKind, DocLinkResMap};
use rustc_hir::def_id::{
CrateNum, DefId, DefIdMap, DefIdSet, LocalDefId, LocalDefIdMap, LocalDefIdSet,
CrateNum, DefId, DefIdMap, DefIdSet, LocalDefId, LocalDefIdMap, LocalDefIdSet, LocalModDefId,
};
use rustc_hir::lang_items::{LangItem, LanguageItems};
use rustc_hir::{Crate, ItemLocalId, TraitCandidate};
@ -167,7 +167,7 @@ rustc_queries! {
///
/// This can be conveniently accessed by `tcx.hir().visit_item_likes_in_module`.
/// Avoid calling this query directly.
query hir_module_items(key: LocalDefId) -> &'tcx rustc_middle::hir::ModuleItems {
query hir_module_items(key: LocalModDefId) -> &'tcx rustc_middle::hir::ModuleItems {
arena_cache
desc { |tcx| "getting HIR module items in `{}`", tcx.def_path_str(key) }
cache_on_disk_if { true }
@ -896,7 +896,7 @@ rustc_queries! {
}
/// Performs lint checking for the module.
query lint_mod(key: LocalDefId) -> () {
query lint_mod(key: LocalModDefId) -> () {
desc { |tcx| "linting {}", describe_as_module(key, tcx) }
}
@ -905,35 +905,35 @@ rustc_queries! {
}
/// Checks the attributes in the module.
query check_mod_attrs(key: LocalDefId) -> () {
query check_mod_attrs(key: LocalModDefId) -> () {
desc { |tcx| "checking attributes in {}", describe_as_module(key, tcx) }
}
/// Checks for uses of unstable APIs in the module.
query check_mod_unstable_api_usage(key: LocalDefId) -> () {
query check_mod_unstable_api_usage(key: LocalModDefId) -> () {
desc { |tcx| "checking for unstable API usage in {}", describe_as_module(key, tcx) }
}
/// Checks the const bodies in the module for illegal operations (e.g. `if` or `loop`).
query check_mod_const_bodies(key: LocalDefId) -> () {
query check_mod_const_bodies(key: LocalModDefId) -> () {
desc { |tcx| "checking consts in {}", describe_as_module(key, tcx) }
}
/// Checks the loops in the module.
query check_mod_loops(key: LocalDefId) -> () {
query check_mod_loops(key: LocalModDefId) -> () {
desc { |tcx| "checking loops in {}", describe_as_module(key, tcx) }
}
query check_mod_naked_functions(key: LocalDefId) -> () {
query check_mod_naked_functions(key: LocalModDefId) -> () {
desc { |tcx| "checking naked functions in {}", describe_as_module(key, tcx) }
}
query check_mod_item_types(key: LocalDefId) -> () {
query check_mod_item_types(key: LocalModDefId) -> () {
desc { |tcx| "checking item types in {}", describe_as_module(key, tcx) }
}
query check_mod_privacy(key: LocalDefId) -> () {
desc { |tcx| "checking privacy in {}", describe_as_module(key, tcx) }
query check_mod_privacy(key: LocalModDefId) -> () {
desc { |tcx| "checking privacy in {}", describe_as_module(key.to_local_def_id(), tcx) }
}
query check_liveness(key: LocalDefId) {
@ -952,19 +952,19 @@ rustc_queries! {
desc { "finding live symbols in crate" }
}
query check_mod_deathness(key: LocalDefId) -> () {
query check_mod_deathness(key: LocalModDefId) -> () {
desc { |tcx| "checking deathness of variables in {}", describe_as_module(key, tcx) }
}
query check_mod_impl_wf(key: LocalDefId) -> () {
query check_mod_impl_wf(key: LocalModDefId) -> () {
desc { |tcx| "checking that impls are well-formed in {}", describe_as_module(key, tcx) }
}
query check_mod_type_wf(key: LocalDefId) -> () {
query check_mod_type_wf(key: LocalModDefId) -> () {
desc { |tcx| "checking that types are well-formed in {}", describe_as_module(key, tcx) }
}
query collect_mod_item_types(key: LocalDefId) -> () {
query collect_mod_item_types(key: LocalModDefId) -> () {
desc { |tcx| "collecting item types in {}", describe_as_module(key, tcx) }
}
@ -2096,12 +2096,6 @@ rustc_queries! {
desc { "looking up enabled feature gates" }
}
query metadata_loader((): ()) -> &'tcx Steal<Box<rustc_session::cstore::MetadataLoaderDyn>> {
feedable
no_hash
desc { "raw operations for metadata file access" }
}
query crate_for_resolver((): ()) -> &'tcx Steal<(rustc_ast::Crate, rustc_ast::AttrVec)> {
feedable
no_hash

View File

@ -545,6 +545,7 @@ macro_rules! define_feedable {
mod sealed {
use super::{DefId, LocalDefId, OwnerId};
use rustc_hir::def_id::{LocalModDefId, ModDefId};
/// An analogue of the `Into` trait that's intended only for query parameters.
///
@ -588,6 +589,27 @@ mod sealed {
self.to_def_id()
}
}
impl IntoQueryParam<DefId> for ModDefId {
#[inline(always)]
fn into_query_param(self) -> DefId {
self.to_def_id()
}
}
impl IntoQueryParam<DefId> for LocalModDefId {
#[inline(always)]
fn into_query_param(self) -> DefId {
self.to_def_id()
}
}
impl IntoQueryParam<LocalDefId> for LocalModDefId {
#[inline(always)]
fn into_query_param(self) -> LocalDefId {
self.into()
}
}
}
pub use sealed::IntoQueryParam;

View File

@ -346,6 +346,7 @@ pub enum ExprKind<'tcx> {
/// A `match` expression.
Match {
scrutinee: ExprId,
scrutinee_hir_id: hir::HirId,
arms: Box<[ArmId]>,
},
/// A block.

View File

@ -70,7 +70,7 @@ pub fn walk_expr<'a, 'tcx: 'a, V: Visitor<'a, 'tcx>>(visitor: &mut V, expr: &Exp
visitor.visit_expr(&visitor.thir()[expr]);
}
Loop { body } => visitor.visit_expr(&visitor.thir()[body]),
Match { scrutinee, ref arms } => {
Match { scrutinee, ref arms, .. } => {
visitor.visit_expr(&visitor.thir()[scrutinee]);
for &arm in &**arms {
visitor.visit_arm(&visitor.thir()[arm]);

View File

@ -402,7 +402,7 @@ pub enum ObligationCauseCode<'tcx> {
OpaqueReturnType(Option<(Ty<'tcx>, Span)>),
/// Block implicit return
BlockTailExpression(hir::HirId),
BlockTailExpression(hir::HirId, hir::MatchSource),
/// #[feature(trivial_bounds)] is not enabled
TrivialBound,
@ -543,7 +543,6 @@ pub struct MatchExpressionArmCause<'tcx> {
pub scrut_span: Span,
pub source: hir::MatchSource,
pub prior_arms: Vec<Span>,
pub scrut_hir_id: hir::HirId,
pub opt_suggest_box_span: Option<Span>,
}
@ -649,7 +648,7 @@ pub enum ImplSource<'tcx, N> {
/// for some type parameter. The `Vec<N>` represents the
/// obligations incurred from normalizing the where-clause (if
/// any).
Param(ty::BoundConstness, Vec<N>),
Param(Vec<N>),
/// Successful resolution for a builtin impl.
Builtin(BuiltinImplSource, Vec<N>),
@ -659,21 +658,21 @@ impl<'tcx, N> ImplSource<'tcx, N> {
pub fn nested_obligations(self) -> Vec<N> {
match self {
ImplSource::UserDefined(i) => i.nested,
ImplSource::Param(_, n) | ImplSource::Builtin(_, n) => n,
ImplSource::Param(n) | ImplSource::Builtin(_, n) => n,
}
}
pub fn borrow_nested_obligations(&self) -> &[N] {
match self {
ImplSource::UserDefined(i) => &i.nested,
ImplSource::Param(_, n) | ImplSource::Builtin(_, n) => &n,
ImplSource::Param(n) | ImplSource::Builtin(_, n) => &n,
}
}
pub fn borrow_nested_obligations_mut(&mut self) -> &mut [N] {
match self {
ImplSource::UserDefined(i) => &mut i.nested,
ImplSource::Param(_, n) | ImplSource::Builtin(_, n) => n,
ImplSource::Param(n) | ImplSource::Builtin(_, n) => n,
}
}
@ -687,7 +686,7 @@ impl<'tcx, N> ImplSource<'tcx, N> {
args: i.args,
nested: i.nested.into_iter().map(f).collect(),
}),
ImplSource::Param(ct, n) => ImplSource::Param(ct, n.into_iter().map(f).collect()),
ImplSource::Param(n) => ImplSource::Param(n.into_iter().map(f).collect()),
ImplSource::Builtin(source, n) => {
ImplSource::Builtin(source, n.into_iter().map(f).collect())
}

View File

@ -127,6 +127,7 @@ pub enum SelectionCandidate<'tcx> {
/// an applicable bound in the trait definition. The `usize` is an index
/// into the list returned by `tcx.item_bounds`. The constness is the
/// constness of the bound in the trait.
// FIXME(effects) do we need this constness
ProjectionCandidate(usize, ty::BoundConstness),
/// Implementation of a `Fn`-family trait by one of the anonymous types

View File

@ -13,8 +13,8 @@ impl<'tcx, N: fmt::Debug> fmt::Debug for traits::ImplSource<'tcx, N> {
write!(f, "Builtin({source:?}, {d:?})")
}
super::ImplSource::Param(ct, n) => {
write!(f, "ImplSourceParamData({n:?}, {ct:?})")
super::ImplSource::Param(n) => {
write!(f, "ImplSourceParamData({n:?})")
}
}
}

View File

@ -238,7 +238,7 @@ pub struct ImplHeader<'tcx> {
pub impl_def_id: DefId,
pub self_ty: Ty<'tcx>,
pub trait_ref: Option<TraitRef<'tcx>>,
pub predicates: Vec<Predicate<'tcx>>,
pub predicates: Vec<(Predicate<'tcx>, Span)>,
}
#[derive(Copy, Clone, PartialEq, Eq, Debug, TypeFoldable, TypeVisitable)]
@ -355,8 +355,8 @@ impl TyCtxt<'_> {
#[inline]
#[track_caller]
pub fn local_parent(self, id: LocalDefId) -> LocalDefId {
self.parent(id.to_def_id()).expect_local()
pub fn local_parent(self, id: impl Into<LocalDefId>) -> LocalDefId {
self.parent(id.into().to_def_id()).expect_local()
}
pub fn is_descendant_of(self, mut descendant: DefId, ancestor: DefId) -> bool {

View File

@ -329,7 +329,8 @@ impl<'tcx, P: Printer<'tcx>> Print<'tcx, P> for ty::Const<'tcx> {
}
// This is only used by query descriptions
pub fn describe_as_module(def_id: LocalDefId, tcx: TyCtxt<'_>) -> String {
pub fn describe_as_module(def_id: impl Into<LocalDefId>, tcx: TyCtxt<'_>) -> String {
let def_id = def_id.into();
if def_id.is_top_level_module() {
"top-level module".to_string()
} else {

View File

@ -11,7 +11,7 @@ use rustc_data_structures::fx::{FxHashMap, FxIndexMap};
use rustc_data_structures::sso::SsoHashSet;
use rustc_hir as hir;
use rustc_hir::def::{self, CtorKind, DefKind, Namespace};
use rustc_hir::def_id::{DefId, DefIdSet, CRATE_DEF_ID, LOCAL_CRATE};
use rustc_hir::def_id::{DefId, DefIdSet, ModDefId, CRATE_DEF_ID, LOCAL_CRATE};
use rustc_hir::definitions::{DefKey, DefPathData, DefPathDataName, DisambiguatedDefPathData};
use rustc_hir::LangItem;
use rustc_session::config::TrimmedDefPaths;
@ -326,7 +326,8 @@ pub trait PrettyPrinter<'tcx>:
{
this
.tcx()
.module_children(visible_parent)
// FIXME(typed_def_id): Further propagate ModDefId
.module_children(ModDefId::new_unchecked(*visible_parent))
.iter()
.filter(|child| child.res.opt_def_id() == Some(def_id))
.find(|child| child.vis.is_public() && child.ident.name != kw::Underscore)
@ -551,7 +552,8 @@ pub trait PrettyPrinter<'tcx>:
// that's public and whose identifier isn't `_`.
let reexport = self
.tcx()
.module_children(visible_parent)
// FIXME(typed_def_id): Further propagate ModDefId
.module_children(ModDefId::new_unchecked(visible_parent))
.iter()
.filter(|child| child.res.opt_def_id() == Some(def_id))
.find(|child| child.vis.is_public() && child.ident.name != kw::Underscore)

View File

@ -65,7 +65,7 @@ impl<'tcx, 'body> ParseCtxt<'tcx, 'body> {
let target = self.parse_block(args[1])?;
self.parse_call(args[2], destination, target)
},
ExprKind::Match { scrutinee, arms } => {
ExprKind::Match { scrutinee, arms, .. } => {
let discr = self.parse_operand(*scrutinee)?;
self.parse_match(arms, expr.span).map(|t| TerminatorKind::SwitchInt { discr, targets: t })
},

View File

@ -47,7 +47,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
ExprKind::Block { block: ast_block } => {
this.ast_block(destination, block, ast_block, source_info)
}
ExprKind::Match { scrutinee, ref arms } => {
ExprKind::Match { scrutinee, ref arms, .. } => {
this.match_expr(destination, expr_span, block, &this.thir[scrutinee], arms)
}
ExprKind::If { cond, then, else_opt, if_then_scope } => {

View File

@ -445,7 +445,6 @@ impl<'tcx> Cx<'tcx> {
let rhs = self.mirror_expr(rhs);
self.overloaded_operator(expr, Box::new([lhs, rhs]))
} else {
// FIXME overflow
match op.node {
hir::BinOpKind::And => ExprKind::LogicalOp {
op: LogicalOp::And,
@ -733,6 +732,7 @@ impl<'tcx> Cx<'tcx> {
},
hir::ExprKind::Match(ref discr, ref arms, _) => ExprKind::Match {
scrutinee: self.mirror_expr(discr),
scrutinee_hir_id: discr.hir_id,
arms: arms.iter().map(|a| self.convert_arm(a)).collect(),
},
hir::ExprKind::Loop(ref body, ..) => {

View File

@ -135,10 +135,12 @@ impl<'a, 'tcx> Visitor<'a, 'tcx> for MatchVisitor<'a, '_, 'tcx> {
});
return;
}
ExprKind::Match { scrutinee, box ref arms } => {
ExprKind::Match { scrutinee, scrutinee_hir_id, box ref arms } => {
let source = match ex.span.desugaring_kind() {
Some(DesugaringKind::ForLoop) => hir::MatchSource::ForLoopDesugar,
Some(DesugaringKind::QuestionMark) => hir::MatchSource::TryDesugar,
Some(DesugaringKind::QuestionMark) => {
hir::MatchSource::TryDesugar(scrutinee_hir_id)
}
Some(DesugaringKind::Await) => hir::MatchSource::AwaitDesugar,
_ => hir::MatchSource::Normal,
};
@ -277,7 +279,7 @@ impl<'p, 'tcx> MatchVisitor<'_, 'p, 'tcx> {
| hir::MatchSource::FormatArgs => report_arm_reachability(&cx, &report),
// Unreachable patterns in try and await expressions occur when one of
// the arms are an uninhabited type. Which is OK.
hir::MatchSource::AwaitDesugar | hir::MatchSource::TryDesugar => {}
hir::MatchSource::AwaitDesugar | hir::MatchSource::TryDesugar(_) => {}
}
// Check if the match is exhaustive.

View File

@ -325,6 +325,11 @@ impl<'tcx> ConstToPat<'tcx> {
// `PartialEq::eq` on it.
return Err(FallbackToConstRef);
}
ty::FnDef(..) => {
self.saw_const_match_error.set(true);
tcx.sess.emit_err(InvalidPattern { span, non_sm_ty: ty });
PatKind::Wild
}
ty::Adt(adt_def, _) if !self.type_marked_structural(ty) => {
debug!("adt_def {:?} has !type_marked_structural for cv.ty: {:?}", adt_def, ty,);
self.saw_const_match_error.set(true);
@ -440,7 +445,7 @@ impl<'tcx> ConstToPat<'tcx> {
}
}
},
ty::Bool | ty::Char | ty::Int(_) | ty::Uint(_) | ty::FnDef(..) => PatKind::Constant {
ty::Bool | ty::Char | ty::Int(_) | ty::Uint(_) => PatKind::Constant {
value: mir::ConstantKind::Ty(ty::Const::new_value(tcx, cv, ty)),
},
ty::FnPtr(..) | ty::RawPtr(..) => unreachable!(),

View File

@ -321,7 +321,7 @@ impl<'a, 'tcx> ThirPrinter<'a, 'tcx> {
print_indented!(self, format!("pat: {:?}", pat), depth_lvl + 1);
print_indented!(self, "}", depth_lvl);
}
Match { scrutinee, arms } => {
Match { scrutinee, arms, .. } => {
print_indented!(self, "Match {", depth_lvl);
print_indented!(self, "scrutinee:", depth_lvl + 1);
self.print_expr(*scrutinee, depth_lvl + 2);

View File

@ -8,25 +8,50 @@ use debug::{DebugCounters, NESTED_INDENT};
use graph::{BasicCoverageBlock, BcbBranch, CoverageGraph, TraverseCoverageGraphWithLoops};
use spans::CoverageSpan;
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::graph::WithNumNodes;
use rustc_index::bit_set::BitSet;
use rustc_index::IndexVec;
use rustc_middle::mir::coverage::*;
/// Manages the counter and expression indexes/IDs to generate `CoverageKind` components for MIR
/// `Coverage` statements.
/// Generates and stores coverage counter and coverage expression information
/// associated with nodes/edges in the BCB graph.
pub(super) struct CoverageCounters {
function_source_hash: u64,
next_counter_id: CounterId,
next_expression_id: ExpressionId,
/// Coverage counters/expressions that are associated with individual BCBs.
bcb_counters: IndexVec<BasicCoverageBlock, Option<CoverageKind>>,
/// Coverage counters/expressions that are associated with the control-flow
/// edge between two BCBs.
bcb_edge_counters: FxHashMap<(BasicCoverageBlock, BasicCoverageBlock), CoverageKind>,
/// Tracks which BCBs have a counter associated with some incoming edge.
/// Only used by debug assertions, to verify that BCBs with incoming edge
/// counters do not have their own physical counters (expressions are allowed).
bcb_has_incoming_edge_counters: BitSet<BasicCoverageBlock>,
/// Expression nodes that are not directly associated with any particular
/// BCB/edge, but are needed as operands to more complex expressions.
/// These are always `CoverageKind::Expression`.
pub(super) intermediate_expressions: Vec<CoverageKind>,
pub debug_counters: DebugCounters,
}
impl CoverageCounters {
pub fn new(function_source_hash: u64) -> Self {
pub(super) fn new(function_source_hash: u64, basic_coverage_blocks: &CoverageGraph) -> Self {
let num_bcbs = basic_coverage_blocks.num_nodes();
Self {
function_source_hash,
next_counter_id: CounterId::START,
next_expression_id: ExpressionId::START,
bcb_counters: IndexVec::from_elem_n(None, num_bcbs),
bcb_edge_counters: FxHashMap::default(),
bcb_has_incoming_edge_counters: BitSet::new_empty(num_bcbs),
intermediate_expressions: Vec::new(),
debug_counters: DebugCounters::new(),
}
}
@ -38,15 +63,14 @@ impl CoverageCounters {
}
/// Makes `CoverageKind` `Counter`s and `Expressions` for the `BasicCoverageBlock`s directly or
/// indirectly associated with `CoverageSpans`, and returns additional `Expression`s
/// indirectly associated with `CoverageSpans`, and accumulates additional `Expression`s
/// representing intermediate values.
pub fn make_bcb_counters(
&mut self,
basic_coverage_blocks: &mut CoverageGraph,
basic_coverage_blocks: &CoverageGraph,
coverage_spans: &[CoverageSpan],
) -> Result<Vec<CoverageKind>, Error> {
let mut bcb_counters = BcbCounters::new(self, basic_coverage_blocks);
bcb_counters.make_bcb_counters(coverage_spans)
) -> Result<(), Error> {
MakeBcbCounters::new(self, basic_coverage_blocks).make_bcb_counters(coverage_spans)
}
fn make_counter<F>(&mut self, debug_block_label_fn: F) -> CoverageKind
@ -106,21 +130,95 @@ impl CoverageCounters {
self.next_expression_id = next.next_id();
next
}
fn set_bcb_counter(
&mut self,
bcb: BasicCoverageBlock,
counter_kind: CoverageKind,
) -> Result<Operand, Error> {
debug_assert!(
// If the BCB has an edge counter (to be injected into a new `BasicBlock`), it can also
// have an expression (to be injected into an existing `BasicBlock` represented by this
// `BasicCoverageBlock`).
counter_kind.is_expression() || !self.bcb_has_incoming_edge_counters.contains(bcb),
"attempt to add a `Counter` to a BCB target with existing incoming edge counters"
);
let operand = counter_kind.as_operand();
if let Some(replaced) = self.bcb_counters[bcb].replace(counter_kind) {
Error::from_string(format!(
"attempt to set a BasicCoverageBlock coverage counter more than once; \
{bcb:?} already had counter {replaced:?}",
))
} else {
Ok(operand)
}
}
fn set_bcb_edge_counter(
&mut self,
from_bcb: BasicCoverageBlock,
to_bcb: BasicCoverageBlock,
counter_kind: CoverageKind,
) -> Result<Operand, Error> {
if level_enabled!(tracing::Level::DEBUG) {
// If the BCB has an edge counter (to be injected into a new `BasicBlock`), it can also
// have an expression (to be injected into an existing `BasicBlock` represented by this
// `BasicCoverageBlock`).
if self.bcb_counter(to_bcb).is_some_and(|c| !c.is_expression()) {
return Error::from_string(format!(
"attempt to add an incoming edge counter from {from_bcb:?} when the target BCB already \
has a `Counter`"
));
}
}
self.bcb_has_incoming_edge_counters.insert(to_bcb);
let operand = counter_kind.as_operand();
if let Some(replaced) = self.bcb_edge_counters.insert((from_bcb, to_bcb), counter_kind) {
Error::from_string(format!(
"attempt to set an edge counter more than once; from_bcb: \
{from_bcb:?} already had counter {replaced:?}",
))
} else {
Ok(operand)
}
}
pub(super) fn bcb_counter(&self, bcb: BasicCoverageBlock) -> Option<&CoverageKind> {
self.bcb_counters[bcb].as_ref()
}
pub(super) fn take_bcb_counter(&mut self, bcb: BasicCoverageBlock) -> Option<CoverageKind> {
self.bcb_counters[bcb].take()
}
pub(super) fn drain_bcb_counters(
&mut self,
) -> impl Iterator<Item = (BasicCoverageBlock, CoverageKind)> + '_ {
self.bcb_counters
.iter_enumerated_mut()
.filter_map(|(bcb, counter)| Some((bcb, counter.take()?)))
}
pub(super) fn drain_bcb_edge_counters(
&mut self,
) -> impl Iterator<Item = ((BasicCoverageBlock, BasicCoverageBlock), CoverageKind)> + '_ {
self.bcb_edge_counters.drain()
}
}
/// Traverse the `CoverageGraph` and add either a `Counter` or `Expression` to every BCB, to be
/// injected with `CoverageSpan`s. `Expressions` have no runtime overhead, so if a viable expression
/// (adding or subtracting two other counters or expressions) can compute the same result as an
/// embedded counter, an `Expression` should be used.
struct BcbCounters<'a> {
struct MakeBcbCounters<'a> {
coverage_counters: &'a mut CoverageCounters,
basic_coverage_blocks: &'a mut CoverageGraph,
basic_coverage_blocks: &'a CoverageGraph,
}
impl<'a> BcbCounters<'a> {
impl<'a> MakeBcbCounters<'a> {
fn new(
coverage_counters: &'a mut CoverageCounters,
basic_coverage_blocks: &'a mut CoverageGraph,
basic_coverage_blocks: &'a CoverageGraph,
) -> Self {
Self { coverage_counters, basic_coverage_blocks }
}
@ -135,13 +233,9 @@ impl<'a> BcbCounters<'a> {
/// Returns any non-code-span expressions created to represent intermediate values (such as to
/// add two counters so the result can be subtracted from another counter), or an Error with
/// message for subsequent debugging.
fn make_bcb_counters(
&mut self,
coverage_spans: &[CoverageSpan],
) -> Result<Vec<CoverageKind>, Error> {
fn make_bcb_counters(&mut self, coverage_spans: &[CoverageSpan]) -> Result<(), Error> {
debug!("make_bcb_counters(): adding a counter or expression to each BasicCoverageBlock");
let num_bcbs = self.basic_coverage_blocks.num_nodes();
let mut collect_intermediate_expressions = Vec::with_capacity(num_bcbs);
let mut bcbs_with_coverage = BitSet::new_empty(num_bcbs);
for covspan in coverage_spans {
@ -162,16 +256,10 @@ impl<'a> BcbCounters<'a> {
while let Some(bcb) = traversal.next(self.basic_coverage_blocks) {
if bcbs_with_coverage.contains(bcb) {
debug!("{:?} has at least one `CoverageSpan`. Get or make its counter", bcb);
let branching_counter_operand =
self.get_or_make_counter_operand(bcb, &mut collect_intermediate_expressions)?;
let branching_counter_operand = self.get_or_make_counter_operand(bcb)?;
if self.bcb_needs_branch_counters(bcb) {
self.make_branch_counters(
&mut traversal,
bcb,
branching_counter_operand,
&mut collect_intermediate_expressions,
)?;
self.make_branch_counters(&mut traversal, bcb, branching_counter_operand)?;
}
} else {
debug!(
@ -183,7 +271,7 @@ impl<'a> BcbCounters<'a> {
}
if traversal.is_complete() {
Ok(collect_intermediate_expressions)
Ok(())
} else {
Error::from_string(format!(
"`TraverseCoverageGraphWithLoops` missed some `BasicCoverageBlock`s: {:?}",
@ -197,7 +285,6 @@ impl<'a> BcbCounters<'a> {
traversal: &mut TraverseCoverageGraphWithLoops,
branching_bcb: BasicCoverageBlock,
branching_counter_operand: Operand,
collect_intermediate_expressions: &mut Vec<CoverageKind>,
) -> Result<(), Error> {
let branches = self.bcb_branches(branching_bcb);
debug!(
@ -205,9 +292,7 @@ impl<'a> BcbCounters<'a> {
branching_bcb,
branches
.iter()
.map(|branch| {
format!("{:?}: {:?}", branch, branch.counter(&self.basic_coverage_blocks))
})
.map(|branch| { format!("{:?}: {:?}", branch, self.branch_counter(branch)) })
.collect::<Vec<_>>()
.join("\n "),
);
@ -233,17 +318,10 @@ impl<'a> BcbCounters<'a> {
counter",
branch, branching_bcb
);
self.get_or_make_counter_operand(
branch.target_bcb,
collect_intermediate_expressions,
)?
self.get_or_make_counter_operand(branch.target_bcb)?
} else {
debug!(" {:?} has multiple incoming edges, so adding an edge counter", branch);
self.get_or_make_edge_counter_operand(
branching_bcb,
branch.target_bcb,
collect_intermediate_expressions,
)?
self.get_or_make_edge_counter_operand(branching_bcb, branch.target_bcb)?
};
if let Some(sumup_counter_operand) =
some_sumup_counter_operand.replace(branch_counter_operand)
@ -259,7 +337,7 @@ impl<'a> BcbCounters<'a> {
self.format_counter(&intermediate_expression)
);
let intermediate_expression_operand = intermediate_expression.as_operand();
collect_intermediate_expressions.push(intermediate_expression);
self.coverage_counters.intermediate_expressions.push(intermediate_expression);
some_sumup_counter_operand.replace(intermediate_expression_operand);
}
}
@ -284,29 +362,24 @@ impl<'a> BcbCounters<'a> {
debug!("{:?} gets an expression: {}", expression_branch, self.format_counter(&expression));
let bcb = expression_branch.target_bcb;
if expression_branch.is_only_path_to_target() {
self.basic_coverage_blocks[bcb].set_counter(expression)?;
self.coverage_counters.set_bcb_counter(bcb, expression)?;
} else {
self.basic_coverage_blocks[bcb].set_edge_counter_from(branching_bcb, expression)?;
self.coverage_counters.set_bcb_edge_counter(branching_bcb, bcb, expression)?;
}
Ok(())
}
fn get_or_make_counter_operand(
&mut self,
bcb: BasicCoverageBlock,
collect_intermediate_expressions: &mut Vec<CoverageKind>,
) -> Result<Operand, Error> {
self.recursive_get_or_make_counter_operand(bcb, collect_intermediate_expressions, 1)
fn get_or_make_counter_operand(&mut self, bcb: BasicCoverageBlock) -> Result<Operand, Error> {
self.recursive_get_or_make_counter_operand(bcb, 1)
}
fn recursive_get_or_make_counter_operand(
&mut self,
bcb: BasicCoverageBlock,
collect_intermediate_expressions: &mut Vec<CoverageKind>,
debug_indent_level: usize,
) -> Result<Operand, Error> {
// If the BCB already has a counter, return it.
if let Some(counter_kind) = self.basic_coverage_blocks[bcb].counter() {
if let Some(counter_kind) = &self.coverage_counters.bcb_counters[bcb] {
debug!(
"{}{:?} already has a counter: {}",
NESTED_INDENT.repeat(debug_indent_level),
@ -339,7 +412,7 @@ impl<'a> BcbCounters<'a> {
self.format_counter(&counter_kind),
);
}
return self.basic_coverage_blocks[bcb].set_counter(counter_kind);
return self.coverage_counters.set_bcb_counter(bcb, counter_kind);
}
// A BCB with multiple incoming edges can compute its count by `Expression`, summing up the
@ -355,7 +428,6 @@ impl<'a> BcbCounters<'a> {
let first_edge_counter_operand = self.recursive_get_or_make_edge_counter_operand(
predecessors.next().unwrap(),
bcb,
collect_intermediate_expressions,
debug_indent_level + 1,
)?;
let mut some_sumup_edge_counter_operand = None;
@ -363,7 +435,6 @@ impl<'a> BcbCounters<'a> {
let edge_counter_operand = self.recursive_get_or_make_edge_counter_operand(
predecessor,
bcb,
collect_intermediate_expressions,
debug_indent_level + 1,
)?;
if let Some(sumup_edge_counter_operand) =
@ -381,7 +452,7 @@ impl<'a> BcbCounters<'a> {
self.format_counter(&intermediate_expression)
);
let intermediate_expression_operand = intermediate_expression.as_operand();
collect_intermediate_expressions.push(intermediate_expression);
self.coverage_counters.intermediate_expressions.push(intermediate_expression);
some_sumup_edge_counter_operand.replace(intermediate_expression_operand);
}
}
@ -397,43 +468,34 @@ impl<'a> BcbCounters<'a> {
bcb,
self.format_counter(&counter_kind)
);
self.basic_coverage_blocks[bcb].set_counter(counter_kind)
self.coverage_counters.set_bcb_counter(bcb, counter_kind)
}
fn get_or_make_edge_counter_operand(
&mut self,
from_bcb: BasicCoverageBlock,
to_bcb: BasicCoverageBlock,
collect_intermediate_expressions: &mut Vec<CoverageKind>,
) -> Result<Operand, Error> {
self.recursive_get_or_make_edge_counter_operand(
from_bcb,
to_bcb,
collect_intermediate_expressions,
1,
)
self.recursive_get_or_make_edge_counter_operand(from_bcb, to_bcb, 1)
}
fn recursive_get_or_make_edge_counter_operand(
&mut self,
from_bcb: BasicCoverageBlock,
to_bcb: BasicCoverageBlock,
collect_intermediate_expressions: &mut Vec<CoverageKind>,
debug_indent_level: usize,
) -> Result<Operand, Error> {
// If the source BCB has only one successor (assumed to be the given target), an edge
// counter is unnecessary. Just get or make a counter for the source BCB.
let successors = self.bcb_successors(from_bcb).iter();
if successors.len() == 1 {
return self.recursive_get_or_make_counter_operand(
from_bcb,
collect_intermediate_expressions,
debug_indent_level + 1,
);
return self.recursive_get_or_make_counter_operand(from_bcb, debug_indent_level + 1);
}
// If the edge already has a counter, return it.
if let Some(counter_kind) = self.basic_coverage_blocks[to_bcb].edge_counter_from(from_bcb) {
if let Some(counter_kind) =
self.coverage_counters.bcb_edge_counters.get(&(from_bcb, to_bcb))
{
debug!(
"{}Edge {:?}->{:?} already has a counter: {}",
NESTED_INDENT.repeat(debug_indent_level),
@ -454,7 +516,7 @@ impl<'a> BcbCounters<'a> {
to_bcb,
self.format_counter(&counter_kind)
);
self.basic_coverage_blocks[to_bcb].set_edge_counter_from(from_bcb, counter_kind)
self.coverage_counters.set_bcb_edge_counter(from_bcb, to_bcb, counter_kind)
}
/// Select a branch for the expression, either the recommended `reloop_branch`, or if none was
@ -464,8 +526,7 @@ impl<'a> BcbCounters<'a> {
traversal: &TraverseCoverageGraphWithLoops,
branches: &[BcbBranch],
) -> BcbBranch {
let branch_needs_a_counter =
|branch: &BcbBranch| branch.counter(&self.basic_coverage_blocks).is_none();
let branch_needs_a_counter = |branch: &BcbBranch| self.branch_has_no_counter(branch);
let some_reloop_branch = self.find_some_reloop_branch(traversal, &branches);
if let Some(reloop_branch_without_counter) =
@ -478,10 +539,8 @@ impl<'a> BcbCounters<'a> {
);
reloop_branch_without_counter
} else {
let &branch_without_counter = branches
.iter()
.find(|&&branch| branch.counter(&self.basic_coverage_blocks).is_none())
.expect(
let &branch_without_counter =
branches.iter().find(|&branch| self.branch_has_no_counter(branch)).expect(
"needs_branch_counters was `true` so there should be at least one \
branch",
);
@ -508,8 +567,7 @@ impl<'a> BcbCounters<'a> {
traversal: &TraverseCoverageGraphWithLoops,
branches: &[BcbBranch],
) -> Option<BcbBranch> {
let branch_needs_a_counter =
|branch: &BcbBranch| branch.counter(&self.basic_coverage_blocks).is_none();
let branch_needs_a_counter = |branch: &BcbBranch| self.branch_has_no_counter(branch);
let mut some_reloop_branch: Option<BcbBranch> = None;
for context in traversal.context_stack.iter().rev() {
@ -520,7 +578,7 @@ impl<'a> BcbCounters<'a> {
self.bcb_dominates(branch.target_bcb, backedge_from_bcb)
}) {
if let Some(reloop_branch) = some_reloop_branch {
if reloop_branch.counter(&self.basic_coverage_blocks).is_none() {
if self.branch_has_no_counter(&reloop_branch) {
// we already found a candidate reloop_branch that still
// needs a counter
continue;
@ -586,12 +644,24 @@ impl<'a> BcbCounters<'a> {
}
fn bcb_needs_branch_counters(&self, bcb: BasicCoverageBlock) -> bool {
let branch_needs_a_counter =
|branch: &BcbBranch| branch.counter(&self.basic_coverage_blocks).is_none();
let branch_needs_a_counter = |branch: &BcbBranch| self.branch_has_no_counter(branch);
let branches = self.bcb_branches(bcb);
branches.len() > 1 && branches.iter().any(branch_needs_a_counter)
}
fn branch_has_no_counter(&self, branch: &BcbBranch) -> bool {
self.branch_counter(branch).is_none()
}
fn branch_counter(&self, branch: &BcbBranch) -> Option<&CoverageKind> {
let to_bcb = branch.target_bcb;
if let Some(from_bcb) = branch.edge_from_bcb {
self.coverage_counters.bcb_edge_counters.get(&(from_bcb, to_bcb))
} else {
self.coverage_counters.bcb_counters[to_bcb].as_ref()
}
}
/// Returns true if the BasicCoverageBlock has zero or one incoming edge. (If zero, it should be
/// the entry point for the function.)
#[inline]

View File

@ -108,6 +108,7 @@
//! recursively, generating labels with nested operations, enclosed in parentheses
//! (for example: `bcb2 + (bcb0 - bcb1)`).
use super::counters::CoverageCounters;
use super::graph::{BasicCoverageBlock, BasicCoverageBlockData, CoverageGraph};
use super::spans::CoverageSpan;
@ -659,18 +660,21 @@ pub(super) fn dump_coverage_graphviz<'tcx>(
mir_body: &mir::Body<'tcx>,
pass_name: &str,
basic_coverage_blocks: &CoverageGraph,
debug_counters: &DebugCounters,
coverage_counters: &CoverageCounters,
graphviz_data: &GraphvizData,
intermediate_expressions: &[CoverageKind],
debug_used_expressions: &UsedExpressions,
) {
let debug_counters = &coverage_counters.debug_counters;
let mir_source = mir_body.source;
let def_id = mir_source.def_id();
let node_content = |bcb| {
bcb_to_string_sections(
tcx,
mir_body,
debug_counters,
coverage_counters,
bcb,
&basic_coverage_blocks[bcb],
graphviz_data.get_bcb_coverage_spans_with_counters(bcb),
graphviz_data.get_bcb_dependency_counters(bcb),
@ -736,12 +740,15 @@ pub(super) fn dump_coverage_graphviz<'tcx>(
fn bcb_to_string_sections<'tcx>(
tcx: TyCtxt<'tcx>,
mir_body: &mir::Body<'tcx>,
debug_counters: &DebugCounters,
coverage_counters: &CoverageCounters,
bcb: BasicCoverageBlock,
bcb_data: &BasicCoverageBlockData,
some_coverage_spans_with_counters: Option<&[(CoverageSpan, CoverageKind)]>,
some_dependency_counters: Option<&[CoverageKind]>,
some_intermediate_expressions: Option<&[CoverageKind]>,
) -> Vec<String> {
let debug_counters = &coverage_counters.debug_counters;
let len = bcb_data.basic_blocks.len();
let mut sections = Vec::new();
if let Some(collect_intermediate_expressions) = some_intermediate_expressions {
@ -777,7 +784,7 @@ fn bcb_to_string_sections<'tcx>(
.join(" \n"),
));
}
if let Some(counter_kind) = &bcb_data.counter_kind {
if let Some(counter_kind) = coverage_counters.bcb_counter(bcb) {
sections.push(format!("{counter_kind:?}"));
}
let non_term_blocks = bcb_data.basic_blocks[0..len - 1]

View File

@ -1,12 +1,8 @@
use super::Error;
use itertools::Itertools;
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::graph::dominators::{self, Dominators};
use rustc_data_structures::graph::{self, GraphSuccessors, WithNumNodes, WithStartNode};
use rustc_index::bit_set::BitSet;
use rustc_index::{IndexSlice, IndexVec};
use rustc_middle::mir::coverage::*;
use rustc_middle::mir::{self, BasicBlock, BasicBlockData, Terminator, TerminatorKind};
use std::cmp::Ordering;
@ -15,10 +11,7 @@ use std::ops::{Index, IndexMut};
const ID_SEPARATOR: &str = ",";
/// A coverage-specific simplification of the MIR control flow graph (CFG). The `CoverageGraph`s
/// nodes are `BasicCoverageBlock`s, which encompass one or more MIR `BasicBlock`s, plus a
/// `CoverageKind` counter (to be added by `CoverageCounters::make_bcb_counters`), and an optional
/// set of additional counters--if needed--to count incoming edges, if there are more than one.
/// (These "edge counters" are eventually converted into new MIR `BasicBlock`s.)
/// nodes are `BasicCoverageBlock`s, which encompass one or more MIR `BasicBlock`s.
#[derive(Debug)]
pub(super) struct CoverageGraph {
bcbs: IndexVec<BasicCoverageBlock, BasicCoverageBlockData>,
@ -195,13 +188,6 @@ impl CoverageGraph {
self.bcbs.iter_enumerated()
}
#[inline(always)]
pub fn iter_enumerated_mut(
&mut self,
) -> impl Iterator<Item = (BasicCoverageBlock, &mut BasicCoverageBlockData)> {
self.bcbs.iter_enumerated_mut()
}
#[inline(always)]
pub fn bcb_from_bb(&self, bb: BasicBlock) -> Option<BasicCoverageBlock> {
if bb.index() < self.bb_to_bcb.len() { self.bb_to_bcb[bb] } else { None }
@ -320,14 +306,12 @@ rustc_index::newtype_index! {
#[derive(Debug, Clone)]
pub(super) struct BasicCoverageBlockData {
pub basic_blocks: Vec<BasicBlock>,
pub counter_kind: Option<CoverageKind>,
edge_from_bcbs: Option<FxHashMap<BasicCoverageBlock, CoverageKind>>,
}
impl BasicCoverageBlockData {
pub fn from(basic_blocks: Vec<BasicBlock>) -> Self {
assert!(basic_blocks.len() > 0);
Self { basic_blocks, counter_kind: None, edge_from_bcbs: None }
Self { basic_blocks }
}
#[inline(always)]
@ -345,80 +329,6 @@ impl BasicCoverageBlockData {
&mir_body[self.last_bb()].terminator()
}
pub fn set_counter(&mut self, counter_kind: CoverageKind) -> Result<Operand, Error> {
debug_assert!(
// If the BCB has an edge counter (to be injected into a new `BasicBlock`), it can also
// have an expression (to be injected into an existing `BasicBlock` represented by this
// `BasicCoverageBlock`).
self.edge_from_bcbs.is_none() || counter_kind.is_expression(),
"attempt to add a `Counter` to a BCB target with existing incoming edge counters"
);
let operand = counter_kind.as_operand();
if let Some(replaced) = self.counter_kind.replace(counter_kind) {
Error::from_string(format!(
"attempt to set a BasicCoverageBlock coverage counter more than once; \
{self:?} already had counter {replaced:?}",
))
} else {
Ok(operand)
}
}
#[inline(always)]
pub fn counter(&self) -> Option<&CoverageKind> {
self.counter_kind.as_ref()
}
#[inline(always)]
pub fn take_counter(&mut self) -> Option<CoverageKind> {
self.counter_kind.take()
}
pub fn set_edge_counter_from(
&mut self,
from_bcb: BasicCoverageBlock,
counter_kind: CoverageKind,
) -> Result<Operand, Error> {
if level_enabled!(tracing::Level::DEBUG) {
// If the BCB has an edge counter (to be injected into a new `BasicBlock`), it can also
// have an expression (to be injected into an existing `BasicBlock` represented by this
// `BasicCoverageBlock`).
if self.counter_kind.as_ref().is_some_and(|c| !c.is_expression()) {
return Error::from_string(format!(
"attempt to add an incoming edge counter from {from_bcb:?} when the target BCB already \
has a `Counter`"
));
}
}
let operand = counter_kind.as_operand();
if let Some(replaced) =
self.edge_from_bcbs.get_or_insert_default().insert(from_bcb, counter_kind)
{
Error::from_string(format!(
"attempt to set an edge counter more than once; from_bcb: \
{from_bcb:?} already had counter {replaced:?}",
))
} else {
Ok(operand)
}
}
#[inline]
pub fn edge_counter_from(&self, from_bcb: BasicCoverageBlock) -> Option<&CoverageKind> {
if let Some(edge_from_bcbs) = &self.edge_from_bcbs {
edge_from_bcbs.get(&from_bcb)
} else {
None
}
}
#[inline]
pub fn take_edge_counters(
&mut self,
) -> Option<impl Iterator<Item = (BasicCoverageBlock, CoverageKind)>> {
self.edge_from_bcbs.take().map(|m| m.into_iter())
}
pub fn id(&self) -> String {
format!("@{}", self.basic_blocks.iter().map(|bb| bb.index().to_string()).join(ID_SEPARATOR))
}
@ -448,17 +358,6 @@ impl BcbBranch {
Self { edge_from_bcb, target_bcb: to_bcb }
}
pub fn counter<'a>(
&self,
basic_coverage_blocks: &'a CoverageGraph,
) -> Option<&'a CoverageKind> {
if let Some(from_bcb) = self.edge_from_bcb {
basic_coverage_blocks[self.target_bcb].edge_counter_from(from_bcb)
} else {
basic_coverage_blocks[self.target_bcb].counter()
}
}
pub fn is_only_path_to_target(&self) -> bool {
self.edge_from_bcb.is_none()
}

View File

@ -137,6 +137,8 @@ impl<'a, 'tcx> Instrumentor<'a, 'tcx> {
let function_source_hash = hash_mir_source(tcx, hir_body);
let basic_coverage_blocks = CoverageGraph::from_mir(mir_body);
let coverage_counters = CoverageCounters::new(function_source_hash, &basic_coverage_blocks);
Self {
pass_name,
tcx,
@ -145,7 +147,7 @@ impl<'a, 'tcx> Instrumentor<'a, 'tcx> {
fn_sig_span,
body_span,
basic_coverage_blocks,
coverage_counters: CoverageCounters::new(function_source_hash),
coverage_counters,
}
}
@ -199,52 +201,47 @@ impl<'a, 'tcx> Instrumentor<'a, 'tcx> {
// `BasicCoverageBlock`s not already associated with a `CoverageSpan`.
//
// Intermediate expressions (used to compute other `Expression` values), which have no
// direct associate to any `BasicCoverageBlock`, are returned in the method `Result`.
let intermediate_expressions_or_error = self
// direct association with any `BasicCoverageBlock`, are accumulated inside `coverage_counters`.
let result = self
.coverage_counters
.make_bcb_counters(&mut self.basic_coverage_blocks, &coverage_spans);
let (result, intermediate_expressions) = match intermediate_expressions_or_error {
Ok(intermediate_expressions) => {
// If debugging, add any intermediate expressions (which are not associated with any
// BCB) to the `debug_used_expressions` map.
if debug_used_expressions.is_enabled() {
for intermediate_expression in &intermediate_expressions {
debug_used_expressions.add_expression_operands(intermediate_expression);
}
if let Ok(()) = result {
// If debugging, add any intermediate expressions (which are not associated with any
// BCB) to the `debug_used_expressions` map.
if debug_used_expressions.is_enabled() {
for intermediate_expression in &self.coverage_counters.intermediate_expressions {
debug_used_expressions.add_expression_operands(intermediate_expression);
}
////////////////////////////////////////////////////
// Remove the counter or edge counter from of each `CoverageSpan`s associated
// `BasicCoverageBlock`, and inject a `Coverage` statement into the MIR.
//
// `Coverage` statements injected from `CoverageSpan`s will include the code regions
// (source code start and end positions) to be counted by the associated counter.
//
// These `CoverageSpan`-associated counters are removed from their associated
// `BasicCoverageBlock`s so that the only remaining counters in the `CoverageGraph`
// are indirect counters (to be injected next, without associated code regions).
self.inject_coverage_span_counters(
coverage_spans,
&mut graphviz_data,
&mut debug_used_expressions,
);
////////////////////////////////////////////////////
// For any remaining `BasicCoverageBlock` counters (that were not associated with
// any `CoverageSpan`), inject `Coverage` statements (_without_ code region `Span`s)
// to ensure `BasicCoverageBlock` counters that other `Expression`s may depend on
// are in fact counted, even though they don't directly contribute to counting
// their own independent code region's coverage.
self.inject_indirect_counters(&mut graphviz_data, &mut debug_used_expressions);
// Intermediate expressions will be injected as the final step, after generating
// debug output, if any.
////////////////////////////////////////////////////
(Ok(()), intermediate_expressions)
}
Err(e) => (Err(e), Vec::new()),
////////////////////////////////////////////////////
// Remove the counter or edge counter from of each `CoverageSpan`s associated
// `BasicCoverageBlock`, and inject a `Coverage` statement into the MIR.
//
// `Coverage` statements injected from `CoverageSpan`s will include the code regions
// (source code start and end positions) to be counted by the associated counter.
//
// These `CoverageSpan`-associated counters are removed from their associated
// `BasicCoverageBlock`s so that the only remaining counters in the `CoverageGraph`
// are indirect counters (to be injected next, without associated code regions).
self.inject_coverage_span_counters(
coverage_spans,
&mut graphviz_data,
&mut debug_used_expressions,
);
////////////////////////////////////////////////////
// For any remaining `BasicCoverageBlock` counters (that were not associated with
// any `CoverageSpan`), inject `Coverage` statements (_without_ code region `Span`s)
// to ensure `BasicCoverageBlock` counters that other `Expression`s may depend on
// are in fact counted, even though they don't directly contribute to counting
// their own independent code region's coverage.
self.inject_indirect_counters(&mut graphviz_data, &mut debug_used_expressions);
// Intermediate expressions will be injected as the final step, after generating
// debug output, if any.
////////////////////////////////////////////////////
};
if graphviz_data.is_enabled() {
@ -255,9 +252,9 @@ impl<'a, 'tcx> Instrumentor<'a, 'tcx> {
self.mir_body,
self.pass_name,
&self.basic_coverage_blocks,
&self.coverage_counters.debug_counters,
&self.coverage_counters,
&graphviz_data,
&intermediate_expressions,
&self.coverage_counters.intermediate_expressions,
&debug_used_expressions,
);
}
@ -273,7 +270,7 @@ impl<'a, 'tcx> Instrumentor<'a, 'tcx> {
////////////////////////////////////////////////////
// Finally, inject the intermediate expressions collected along the way.
for intermediate_expression in intermediate_expressions {
for intermediate_expression in self.coverage_counters.intermediate_expressions.drain(..) {
inject_intermediate_expression(self.mir_body, intermediate_expression);
}
}
@ -303,7 +300,7 @@ impl<'a, 'tcx> Instrumentor<'a, 'tcx> {
let span = covspan.span;
let counter_kind = if let Some(&counter_operand) = bcb_counters[bcb].as_ref() {
self.coverage_counters.make_identity_counter(counter_operand)
} else if let Some(counter_kind) = self.bcb_data_mut(bcb).take_counter() {
} else if let Some(counter_kind) = self.coverage_counters.take_bcb_counter(bcb) {
bcb_counters[bcb] = Some(counter_kind.as_operand());
debug_used_expressions.add_expression_operands(&counter_kind);
counter_kind
@ -343,19 +340,17 @@ impl<'a, 'tcx> Instrumentor<'a, 'tcx> {
debug_used_expressions: &mut debug::UsedExpressions,
) {
let mut bcb_counters_without_direct_coverage_spans = Vec::new();
for (target_bcb, target_bcb_data) in self.basic_coverage_blocks.iter_enumerated_mut() {
if let Some(counter_kind) = target_bcb_data.take_counter() {
bcb_counters_without_direct_coverage_spans.push((None, target_bcb, counter_kind));
}
if let Some(edge_counters) = target_bcb_data.take_edge_counters() {
for (from_bcb, counter_kind) in edge_counters {
bcb_counters_without_direct_coverage_spans.push((
Some(from_bcb),
target_bcb,
counter_kind,
));
}
}
for (target_bcb, counter_kind) in self.coverage_counters.drain_bcb_counters() {
bcb_counters_without_direct_coverage_spans.push((None, target_bcb, counter_kind));
}
for ((from_bcb, target_bcb), counter_kind) in
self.coverage_counters.drain_bcb_edge_counters()
{
bcb_counters_without_direct_coverage_spans.push((
Some(from_bcb),
target_bcb,
counter_kind,
));
}
// If debug is enabled, validate that every BCB or edge counter not directly associated
@ -430,11 +425,6 @@ impl<'a, 'tcx> Instrumentor<'a, 'tcx> {
&self.basic_coverage_blocks[bcb]
}
#[inline]
fn bcb_data_mut(&mut self, bcb: BasicCoverageBlock) -> &mut BasicCoverageBlockData {
&mut self.basic_coverage_blocks[bcb]
}
#[inline]
fn format_counter(&self, counter_kind: &CoverageKind) -> String {
self.coverage_counters.debug_counters.format_counter(counter_kind)

View File

@ -675,16 +675,16 @@ fn test_make_bcb_counters() {
));
}
}
let mut coverage_counters = counters::CoverageCounters::new(0);
let intermediate_expressions = coverage_counters
let mut coverage_counters = counters::CoverageCounters::new(0, &basic_coverage_blocks);
let () = coverage_counters
.make_bcb_counters(&mut basic_coverage_blocks, &coverage_spans)
.expect("should be Ok");
assert_eq!(intermediate_expressions.len(), 0);
assert_eq!(coverage_counters.intermediate_expressions.len(), 0);
let_bcb!(1);
assert_eq!(
0, // bcb1 has a `Counter` with id = 0
match basic_coverage_blocks[bcb1].counter().expect("should have a counter") {
match coverage_counters.bcb_counter(bcb1).expect("should have a counter") {
CoverageKind::Counter { id, .. } => id,
_ => panic!("expected a Counter"),
}
@ -694,7 +694,7 @@ fn test_make_bcb_counters() {
let_bcb!(2);
assert_eq!(
1, // bcb2 has a `Counter` with id = 1
match basic_coverage_blocks[bcb2].counter().expect("should have a counter") {
match coverage_counters.bcb_counter(bcb2).expect("should have a counter") {
CoverageKind::Counter { id, .. } => id,
_ => panic!("expected a Counter"),
}

View File

@ -74,7 +74,6 @@ pub(crate) fn parse_token_trees<'a>(
// because the delimiter mismatch is more likely to be the root cause of error
let mut buffer = Vec::with_capacity(1);
// Not using `emit_unclosed_delims` to use `db.buffer`
for unmatched in unmatched_delims {
if let Some(err) = make_unclosed_delims_error(unmatched, &sess) {
err.buffer(&mut buffer);

View File

@ -1717,13 +1717,7 @@ impl<'a> Parser<'a> {
self.recover_await_prefix(await_sp)?
};
let sp = self.error_on_incorrect_await(lo, hi, &expr, is_question);
let kind = match expr.kind {
// Avoid knock-down errors as we don't know whether to interpret this as `foo().await?`
// or `foo()?.await` (the very reason we went with postfix syntax 😅).
ExprKind::Try(_) => ExprKind::Err,
_ => ExprKind::Await(expr, await_sp),
};
let expr = self.mk_expr(lo.to(sp), kind);
let expr = self.mk_expr(lo.to(sp), ExprKind::Err);
self.maybe_recover_from_bad_qpath(expr)
}

View File

@ -29,7 +29,6 @@ use rustc_ast::{Async, AttrArgs, AttrArgsEq, Expr, ExprKind, Mutability, StrLit}
use rustc_ast::{HasAttrs, HasTokens, Unsafe, Visibility, VisibilityKind};
use rustc_ast_pretty::pprust;
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::sync::Ordering;
use rustc_errors::PResult;
use rustc_errors::{
Applicability, DiagnosticBuilder, ErrorGuaranteed, FatalError, IntoDiagnostic, MultiSpan,
@ -1455,18 +1454,6 @@ pub(crate) fn make_unclosed_delims_error(
Some(err)
}
pub fn emit_unclosed_delims(unclosed_delims: &mut Vec<UnmatchedDelim>, sess: &ParseSess) {
let _ = sess.reached_eof.fetch_or(
unclosed_delims.iter().any(|unmatched_delim| unmatched_delim.found_delim.is_none()),
Ordering::Relaxed,
);
for unmatched in unclosed_delims.drain(..) {
if let Some(mut e) = make_unclosed_delims_error(unmatched, sess) {
e.emit();
}
}
}
/// A helper struct used when building an `AttrTokenStream` from
/// a `LazyAttrTokenStream`. Both delimiter and non-delimited tokens
/// are stored as `FlatToken::Token`. A vector of `FlatToken`s

Some files were not shown because too many files have changed in this diff Show More