mirror of
https://github.com/rust-lang/rust.git
synced 2024-11-24 15:54:15 +00:00
Auto merge of #132171 - matthiaskrgr:rollup-tp75ge7, r=matthiaskrgr
Rollup of 3 pull requests Successful merges: - #132114 (Use `Enabled{Lang,Lib}Feature` instead of n-tuples) - #132163 (Update Fuchsia CI script for package serving) - #132168 (Effects cleanup) r? `@ghost` `@rustbot` modify labels: rollup
This commit is contained in:
commit
80d0d927d5
@ -623,8 +623,9 @@ fn maybe_stage_features(sess: &Session, features: &Features, krate: &ast::Crate)
|
||||
let stable_since = features
|
||||
.enabled_lang_features()
|
||||
.iter()
|
||||
.flat_map(|&(feature, _, since)| if feature == name { since } else { None })
|
||||
.next();
|
||||
.find(|feat| feat.gate_name == name)
|
||||
.map(|feat| feat.stable_since)
|
||||
.flatten();
|
||||
if let Some(since) = stable_since {
|
||||
err.stable_features.push(errors::StableFeature { name, since });
|
||||
} else {
|
||||
@ -642,16 +643,15 @@ fn maybe_stage_features(sess: &Session, features: &Features, krate: &ast::Crate)
|
||||
}
|
||||
|
||||
fn check_incompatible_features(sess: &Session, features: &Features) {
|
||||
let enabled_features = features
|
||||
.enabled_lang_features()
|
||||
.iter()
|
||||
.copied()
|
||||
.map(|(name, span, _)| (name, span))
|
||||
.chain(features.enabled_lib_features().iter().copied());
|
||||
let enabled_lang_features =
|
||||
features.enabled_lang_features().iter().map(|feat| (feat.gate_name, feat.attr_sp));
|
||||
let enabled_lib_features =
|
||||
features.enabled_lib_features().iter().map(|feat| (feat.gate_name, feat.attr_sp));
|
||||
let enabled_features = enabled_lang_features.chain(enabled_lib_features);
|
||||
|
||||
for (f1, f2) in rustc_feature::INCOMPATIBLE_FEATURES
|
||||
.iter()
|
||||
.filter(|&&(f1, f2)| features.enabled(f1) && features.enabled(f2))
|
||||
.filter(|(f1, f2)| features.enabled(*f1) && features.enabled(*f2))
|
||||
{
|
||||
if let Some((f1_name, f1_span)) = enabled_features.clone().find(|(name, _)| name == f1) {
|
||||
if let Some((f2_name, f2_span)) = enabled_features.clone().find(|(name, _)| name == f2)
|
||||
@ -673,10 +673,11 @@ fn check_new_solver_banned_features(sess: &Session, features: &Features) {
|
||||
}
|
||||
|
||||
// Ban GCE with the new solver, because it does not implement GCE correctly.
|
||||
if let Some(&(_, gce_span, _)) = features
|
||||
if let Some(gce_span) = features
|
||||
.enabled_lang_features()
|
||||
.iter()
|
||||
.find(|&&(feat, _, _)| feat == sym::generic_const_exprs)
|
||||
.find(|feat| feat.gate_name == sym::generic_const_exprs)
|
||||
.map(|feat| feat.attr_sp)
|
||||
{
|
||||
sess.dcx().emit_err(errors::IncompatibleFeatures {
|
||||
spans: vec![gce_span],
|
||||
|
@ -210,7 +210,6 @@ impl DebugContext {
|
||||
type_names::push_generic_params(
|
||||
tcx,
|
||||
tcx.normalize_erasing_regions(ty::ParamEnv::reveal_all(), args),
|
||||
enclosing_fn_def_id,
|
||||
&mut name,
|
||||
);
|
||||
|
||||
|
@ -98,8 +98,7 @@ pub fn get_fn<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, instance: Instance<'tcx>)
|
||||
// whether we are sharing generics or not. The important thing here is
|
||||
// that the visibility we apply to the declaration is the same one that
|
||||
// has been applied to the definition (wherever that definition may be).
|
||||
let is_generic =
|
||||
instance.args.non_erasable_generics(tcx, instance.def_id()).next().is_some();
|
||||
let is_generic = instance.args.non_erasable_generics().next().is_some();
|
||||
|
||||
if is_generic {
|
||||
// This is a monomorphization. Its expected visibility depends
|
||||
|
@ -98,8 +98,7 @@ pub(crate) fn get_fn<'ll, 'tcx>(cx: &CodegenCx<'ll, 'tcx>, instance: Instance<'t
|
||||
unsafe {
|
||||
llvm::LLVMRustSetLinkage(llfn, llvm::Linkage::ExternalLinkage);
|
||||
|
||||
let is_generic =
|
||||
instance.args.non_erasable_generics(tcx, instance.def_id()).next().is_some();
|
||||
let is_generic = instance.args.non_erasable_generics().next().is_some();
|
||||
|
||||
let is_hidden = if is_generic {
|
||||
// This is a monomorphization of a generic function.
|
||||
|
@ -350,7 +350,6 @@ impl<'ll, 'tcx> DebugInfoCodegenMethods<'tcx> for CodegenCx<'ll, 'tcx> {
|
||||
type_names::push_generic_params(
|
||||
tcx,
|
||||
tcx.normalize_erasing_regions(ty::ParamEnv::reveal_all(), args),
|
||||
enclosing_fn_def_id,
|
||||
&mut name,
|
||||
);
|
||||
|
||||
|
@ -312,7 +312,7 @@ fn exported_symbols_provider_local(
|
||||
|
||||
match *mono_item {
|
||||
MonoItem::Fn(Instance { def: InstanceKind::Item(def), args }) => {
|
||||
if args.non_erasable_generics(tcx, def).next().is_some() {
|
||||
if args.non_erasable_generics().next().is_some() {
|
||||
let symbol = ExportedSymbol::Generic(def, args);
|
||||
symbols.push((symbol, SymbolExportInfo {
|
||||
level: SymbolExportLevel::Rust,
|
||||
@ -321,12 +321,9 @@ fn exported_symbols_provider_local(
|
||||
}));
|
||||
}
|
||||
}
|
||||
MonoItem::Fn(Instance { def: InstanceKind::DropGlue(def_id, Some(ty)), args }) => {
|
||||
MonoItem::Fn(Instance { def: InstanceKind::DropGlue(_, Some(ty)), args }) => {
|
||||
// A little sanity-check
|
||||
assert_eq!(
|
||||
args.non_erasable_generics(tcx, def_id).next(),
|
||||
Some(GenericArgKind::Type(ty))
|
||||
);
|
||||
assert_eq!(args.non_erasable_generics().next(), Some(GenericArgKind::Type(ty)));
|
||||
symbols.push((ExportedSymbol::DropGlue(ty), SymbolExportInfo {
|
||||
level: SymbolExportLevel::Rust,
|
||||
kind: SymbolExportKind::Text,
|
||||
@ -334,14 +331,11 @@ fn exported_symbols_provider_local(
|
||||
}));
|
||||
}
|
||||
MonoItem::Fn(Instance {
|
||||
def: InstanceKind::AsyncDropGlueCtorShim(def_id, Some(ty)),
|
||||
def: InstanceKind::AsyncDropGlueCtorShim(_, Some(ty)),
|
||||
args,
|
||||
}) => {
|
||||
// A little sanity-check
|
||||
assert_eq!(
|
||||
args.non_erasable_generics(tcx, def_id).next(),
|
||||
Some(GenericArgKind::Type(ty))
|
||||
);
|
||||
assert_eq!(args.non_erasable_generics().next(), Some(GenericArgKind::Type(ty)));
|
||||
symbols.push((ExportedSymbol::AsyncDropGlueCtorShim(ty), SymbolExportInfo {
|
||||
level: SymbolExportLevel::Rust,
|
||||
kind: SymbolExportKind::Text,
|
||||
|
@ -110,14 +110,14 @@ fn push_debuginfo_type_name<'tcx>(
|
||||
ty_and_layout,
|
||||
&|output, visited| {
|
||||
push_item_name(tcx, def.did(), true, output);
|
||||
push_generic_params_internal(tcx, args, def.did(), output, visited);
|
||||
push_generic_params_internal(tcx, args, output, visited);
|
||||
},
|
||||
output,
|
||||
visited,
|
||||
);
|
||||
} else {
|
||||
push_item_name(tcx, def.did(), qualified, output);
|
||||
push_generic_params_internal(tcx, args, def.did(), output, visited);
|
||||
push_generic_params_internal(tcx, args, output, visited);
|
||||
}
|
||||
}
|
||||
ty::Tuple(component_types) => {
|
||||
@ -251,13 +251,8 @@ fn push_debuginfo_type_name<'tcx>(
|
||||
let principal =
|
||||
tcx.normalize_erasing_late_bound_regions(ty::ParamEnv::reveal_all(), principal);
|
||||
push_item_name(tcx, principal.def_id, qualified, output);
|
||||
let principal_has_generic_params = push_generic_params_internal(
|
||||
tcx,
|
||||
principal.args,
|
||||
principal.def_id,
|
||||
output,
|
||||
visited,
|
||||
);
|
||||
let principal_has_generic_params =
|
||||
push_generic_params_internal(tcx, principal.args, output, visited);
|
||||
|
||||
let projection_bounds: SmallVec<[_; 4]> = trait_data
|
||||
.projection_bounds()
|
||||
@ -538,13 +533,7 @@ pub fn compute_debuginfo_vtable_name<'tcx>(
|
||||
tcx.normalize_erasing_late_bound_regions(ty::ParamEnv::reveal_all(), trait_ref);
|
||||
push_item_name(tcx, trait_ref.def_id, true, &mut vtable_name);
|
||||
visited.clear();
|
||||
push_generic_params_internal(
|
||||
tcx,
|
||||
trait_ref.args,
|
||||
trait_ref.def_id,
|
||||
&mut vtable_name,
|
||||
&mut visited,
|
||||
);
|
||||
push_generic_params_internal(tcx, trait_ref.args, &mut vtable_name, &mut visited);
|
||||
} else {
|
||||
vtable_name.push('_');
|
||||
}
|
||||
@ -647,12 +636,11 @@ fn push_unqualified_item_name(
|
||||
fn push_generic_params_internal<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
args: GenericArgsRef<'tcx>,
|
||||
def_id: DefId,
|
||||
output: &mut String,
|
||||
visited: &mut FxHashSet<Ty<'tcx>>,
|
||||
) -> bool {
|
||||
assert_eq!(args, tcx.normalize_erasing_regions(ty::ParamEnv::reveal_all(), args));
|
||||
let mut args = args.non_erasable_generics(tcx, def_id).peekable();
|
||||
let mut args = args.non_erasable_generics().peekable();
|
||||
if args.peek().is_none() {
|
||||
return false;
|
||||
}
|
||||
@ -736,12 +724,11 @@ fn push_const_param<'tcx>(tcx: TyCtxt<'tcx>, ct: ty::Const<'tcx>, output: &mut S
|
||||
pub fn push_generic_params<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
args: GenericArgsRef<'tcx>,
|
||||
def_id: DefId,
|
||||
output: &mut String,
|
||||
) {
|
||||
let _prof = tcx.prof.generic_activity("compute_debuginfo_type_name");
|
||||
let mut visited = FxHashSet::default();
|
||||
push_generic_params_internal(tcx, args, def_id, output, &mut visited);
|
||||
push_generic_params_internal(tcx, args, output, &mut visited);
|
||||
}
|
||||
|
||||
fn push_closure_or_coroutine_name<'tcx>(
|
||||
@ -786,7 +773,7 @@ fn push_closure_or_coroutine_name<'tcx>(
|
||||
// FIXME(async_closures): This is probably not going to be correct w.r.t.
|
||||
// multiple coroutine flavors. Maybe truncate to (parent + 1)?
|
||||
let args = args.truncate_to(tcx, generics);
|
||||
push_generic_params_internal(tcx, args, enclosing_fn_def_id, output, visited);
|
||||
push_generic_params_internal(tcx, args, output, visited);
|
||||
}
|
||||
|
||||
fn push_close_angle_bracket(cpp_like_debuginfo: bool, output: &mut String) {
|
||||
|
@ -11,8 +11,8 @@ use rustc_ast::{
|
||||
use rustc_attr as attr;
|
||||
use rustc_data_structures::flat_map_in_place::FlatMapInPlace;
|
||||
use rustc_feature::{
|
||||
ACCEPTED_LANG_FEATURES, AttributeSafety, Features, REMOVED_LANG_FEATURES,
|
||||
UNSTABLE_LANG_FEATURES,
|
||||
ACCEPTED_LANG_FEATURES, AttributeSafety, EnabledLangFeature, EnabledLibFeature, Features,
|
||||
REMOVED_LANG_FEATURES, UNSTABLE_LANG_FEATURES,
|
||||
};
|
||||
use rustc_lint_defs::BuiltinLintDiag;
|
||||
use rustc_parse::validate_attr;
|
||||
@ -88,8 +88,11 @@ pub fn features(sess: &Session, krate_attrs: &[Attribute], crate_name: Symbol) -
|
||||
|
||||
// If the enabled feature is stable, record it.
|
||||
if let Some(f) = ACCEPTED_LANG_FEATURES.iter().find(|f| name == f.name) {
|
||||
let since = Some(Symbol::intern(f.since));
|
||||
features.set_enabled_lang_feature(name, mi.span(), since);
|
||||
features.set_enabled_lang_feature(EnabledLangFeature {
|
||||
gate_name: name,
|
||||
attr_sp: mi.span(),
|
||||
stable_since: Some(Symbol::intern(f.since)),
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
@ -115,13 +118,19 @@ pub fn features(sess: &Session, krate_attrs: &[Attribute], crate_name: Symbol) -
|
||||
{
|
||||
sess.using_internal_features.store(true, std::sync::atomic::Ordering::Relaxed);
|
||||
}
|
||||
features.set_enabled_lang_feature(name, mi.span(), None);
|
||||
|
||||
features.set_enabled_lang_feature(EnabledLangFeature {
|
||||
gate_name: name,
|
||||
attr_sp: mi.span(),
|
||||
stable_since: None,
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
// Otherwise, the feature is unknown. Enable it as a lib feature.
|
||||
// It will be checked later whether the feature really exists.
|
||||
features.set_enabled_lib_feature(name, mi.span());
|
||||
features
|
||||
.set_enabled_lib_feature(EnabledLibFeature { gate_name: name, attr_sp: mi.span() });
|
||||
|
||||
// Similar to above, detect internal lib features to suppress
|
||||
// the ICE message that asks for a report.
|
||||
|
@ -135,4 +135,6 @@ pub use builtin_attrs::{
|
||||
is_valid_for_get_attr,
|
||||
};
|
||||
pub use removed::REMOVED_LANG_FEATURES;
|
||||
pub use unstable::{Features, INCOMPATIBLE_FEATURES, UNSTABLE_LANG_FEATURES};
|
||||
pub use unstable::{
|
||||
EnabledLangFeature, EnabledLibFeature, Features, INCOMPATIBLE_FEATURES, UNSTABLE_LANG_FEATURES,
|
||||
};
|
||||
|
@ -36,35 +36,54 @@ macro_rules! status_to_enum {
|
||||
#[derive(Clone, Default, Debug)]
|
||||
pub struct Features {
|
||||
/// `#![feature]` attrs for language features, for error reporting.
|
||||
enabled_lang_features: Vec<(Symbol, Span, Option<Symbol>)>,
|
||||
enabled_lang_features: Vec<EnabledLangFeature>,
|
||||
/// `#![feature]` attrs for non-language (library) features.
|
||||
enabled_lib_features: Vec<(Symbol, Span)>,
|
||||
enabled_lib_features: Vec<EnabledLibFeature>,
|
||||
/// `enabled_lang_features` + `enabled_lib_features`.
|
||||
enabled_features: FxHashSet<Symbol>,
|
||||
}
|
||||
|
||||
/// Information about an enabled language feature.
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct EnabledLangFeature {
|
||||
/// Name of the feature gate guarding the language feature.
|
||||
pub gate_name: Symbol,
|
||||
/// Span of the `#[feature(...)]` attribute.
|
||||
pub attr_sp: Span,
|
||||
/// If the lang feature is stable, the version number when it was stabilized.
|
||||
pub stable_since: Option<Symbol>,
|
||||
}
|
||||
|
||||
/// Information abhout an enabled library feature.
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct EnabledLibFeature {
|
||||
pub gate_name: Symbol,
|
||||
pub attr_sp: Span,
|
||||
}
|
||||
|
||||
impl Features {
|
||||
/// `since` should be set for stable features that are nevertheless enabled with a `#[feature]`
|
||||
/// attribute, indicating since when they are stable.
|
||||
pub fn set_enabled_lang_feature(&mut self, name: Symbol, span: Span, since: Option<Symbol>) {
|
||||
self.enabled_lang_features.push((name, span, since));
|
||||
self.enabled_features.insert(name);
|
||||
pub fn set_enabled_lang_feature(&mut self, lang_feat: EnabledLangFeature) {
|
||||
self.enabled_lang_features.push(lang_feat);
|
||||
self.enabled_features.insert(lang_feat.gate_name);
|
||||
}
|
||||
|
||||
pub fn set_enabled_lib_feature(&mut self, name: Symbol, span: Span) {
|
||||
self.enabled_lib_features.push((name, span));
|
||||
self.enabled_features.insert(name);
|
||||
pub fn set_enabled_lib_feature(&mut self, lib_feat: EnabledLibFeature) {
|
||||
self.enabled_lib_features.push(lib_feat);
|
||||
self.enabled_features.insert(lib_feat.gate_name);
|
||||
}
|
||||
|
||||
/// Returns a list of triples with:
|
||||
/// - feature gate name
|
||||
/// - the span of the `#[feature]` attribute
|
||||
/// - (for already stable features) the version since which it is stable
|
||||
pub fn enabled_lang_features(&self) -> &Vec<(Symbol, Span, Option<Symbol>)> {
|
||||
/// Returns a list of [`EnabledLangFeature`] with info about:
|
||||
///
|
||||
/// - Feature gate name.
|
||||
/// - The span of the `#[feature]` attribute.
|
||||
/// - For stable language features, version info for when it was stabilized.
|
||||
pub fn enabled_lang_features(&self) -> &Vec<EnabledLangFeature> {
|
||||
&self.enabled_lang_features
|
||||
}
|
||||
|
||||
pub fn enabled_lib_features(&self) -> &Vec<(Symbol, Span)> {
|
||||
pub fn enabled_lib_features(&self) -> &Vec<EnabledLibFeature> {
|
||||
&self.enabled_lib_features
|
||||
}
|
||||
|
||||
|
@ -1,7 +1,6 @@
|
||||
//! Bounds are restrictions applied to some types after they've been lowered from the HIR to the
|
||||
//! [`rustc_middle::ty`] form.
|
||||
|
||||
use rustc_data_structures::fx::FxIndexMap;
|
||||
use rustc_hir::LangItem;
|
||||
use rustc_middle::ty::{self, Ty, TyCtxt, Upcast};
|
||||
use rustc_span::Span;
|
||||
@ -25,7 +24,6 @@ use rustc_span::Span;
|
||||
#[derive(Default, PartialEq, Eq, Clone, Debug)]
|
||||
pub(crate) struct Bounds<'tcx> {
|
||||
clauses: Vec<(ty::Clause<'tcx>, Span)>,
|
||||
effects_min_tys: FxIndexMap<Ty<'tcx>, Span>,
|
||||
}
|
||||
|
||||
impl<'tcx> Bounds<'tcx> {
|
||||
@ -96,15 +94,7 @@ impl<'tcx> Bounds<'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn clauses(
|
||||
&self,
|
||||
// FIXME(effects): remove tcx
|
||||
_tcx: TyCtxt<'tcx>,
|
||||
) -> impl Iterator<Item = (ty::Clause<'tcx>, Span)> + '_ {
|
||||
pub(crate) fn clauses(&self) -> impl Iterator<Item = (ty::Clause<'tcx>, Span)> + '_ {
|
||||
self.clauses.iter().cloned()
|
||||
}
|
||||
|
||||
pub(crate) fn effects_min_tys(&self) -> impl Iterator<Item = Ty<'tcx>> + '_ {
|
||||
self.effects_min_tys.keys().copied()
|
||||
}
|
||||
}
|
||||
|
@ -67,7 +67,7 @@ fn associated_type_bounds<'tcx>(
|
||||
)
|
||||
});
|
||||
|
||||
let all_bounds = tcx.arena.alloc_from_iter(bounds.clauses(tcx).chain(bounds_from_parent));
|
||||
let all_bounds = tcx.arena.alloc_from_iter(bounds.clauses().chain(bounds_from_parent));
|
||||
debug!(
|
||||
"associated_type_bounds({}) = {:?}",
|
||||
tcx.def_path_str(assoc_item_def_id.to_def_id()),
|
||||
@ -339,7 +339,7 @@ fn opaque_type_bounds<'tcx>(
|
||||
}
|
||||
debug!(?bounds);
|
||||
|
||||
tcx.arena.alloc_from_iter(bounds.clauses(tcx))
|
||||
tcx.arena.alloc_from_iter(bounds.clauses())
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -106,7 +106,6 @@ fn gather_explicit_predicates_of(tcx: TyCtxt<'_>, def_id: LocalDefId) -> ty::Gen
|
||||
return ty::GenericPredicates {
|
||||
parent: Some(tcx.parent(def_id.to_def_id())),
|
||||
predicates: tcx.arena.alloc_from_iter(predicates),
|
||||
effects_min_tys: ty::List::empty(),
|
||||
};
|
||||
}
|
||||
|
||||
@ -128,7 +127,6 @@ fn gather_explicit_predicates_of(tcx: TyCtxt<'_>, def_id: LocalDefId) -> ty::Gen
|
||||
return ty::GenericPredicates {
|
||||
parent: Some(impl_def_id),
|
||||
predicates: tcx.arena.alloc_from_iter(impl_predicates),
|
||||
effects_min_tys: ty::List::empty(),
|
||||
};
|
||||
}
|
||||
|
||||
@ -154,7 +152,6 @@ fn gather_explicit_predicates_of(tcx: TyCtxt<'_>, def_id: LocalDefId) -> ty::Gen
|
||||
// We use an `IndexSet` to preserve order of insertion.
|
||||
// Preserving the order of insertion is important here so as not to break UI tests.
|
||||
let mut predicates: FxIndexSet<(ty::Clause<'_>, Span)> = FxIndexSet::default();
|
||||
let mut effects_min_tys = Vec::new();
|
||||
|
||||
let hir_generics = node.generics().unwrap_or(NO_GENERICS);
|
||||
if let Node::Item(item) = node {
|
||||
@ -189,8 +186,7 @@ fn gather_explicit_predicates_of(tcx: TyCtxt<'_>, def_id: LocalDefId) -> ty::Gen
|
||||
ty::List::empty(),
|
||||
PredicateFilter::All,
|
||||
);
|
||||
predicates.extend(bounds.clauses(tcx));
|
||||
effects_min_tys.extend(bounds.effects_min_tys());
|
||||
predicates.extend(bounds.clauses());
|
||||
}
|
||||
|
||||
// In default impls, we can assume that the self type implements
|
||||
@ -223,7 +219,7 @@ fn gather_explicit_predicates_of(tcx: TyCtxt<'_>, def_id: LocalDefId) -> ty::Gen
|
||||
param.span,
|
||||
);
|
||||
trace!(?bounds);
|
||||
predicates.extend(bounds.clauses(tcx));
|
||||
predicates.extend(bounds.clauses());
|
||||
trace!(?predicates);
|
||||
}
|
||||
hir::GenericParamKind::Const { .. } => {
|
||||
@ -275,8 +271,7 @@ fn gather_explicit_predicates_of(tcx: TyCtxt<'_>, def_id: LocalDefId) -> ty::Gen
|
||||
bound_vars,
|
||||
PredicateFilter::All,
|
||||
);
|
||||
predicates.extend(bounds.clauses(tcx));
|
||||
effects_min_tys.extend(bounds.effects_min_tys());
|
||||
predicates.extend(bounds.clauses());
|
||||
}
|
||||
|
||||
hir::WherePredicate::RegionPredicate(region_pred) => {
|
||||
@ -348,7 +343,6 @@ fn gather_explicit_predicates_of(tcx: TyCtxt<'_>, def_id: LocalDefId) -> ty::Gen
|
||||
ty::GenericPredicates {
|
||||
parent: generics.parent,
|
||||
predicates: tcx.arena.alloc_from_iter(predicates),
|
||||
effects_min_tys: tcx.mk_type_list(&effects_min_tys),
|
||||
}
|
||||
}
|
||||
|
||||
@ -499,7 +493,6 @@ pub(super) fn explicit_predicates_of<'tcx>(
|
||||
ty::GenericPredicates {
|
||||
parent: predicates_and_bounds.parent,
|
||||
predicates: tcx.arena.alloc_slice(&predicates),
|
||||
effects_min_tys: predicates_and_bounds.effects_min_tys,
|
||||
}
|
||||
}
|
||||
} else {
|
||||
@ -551,7 +544,6 @@ pub(super) fn explicit_predicates_of<'tcx>(
|
||||
return GenericPredicates {
|
||||
parent: parent_preds.parent,
|
||||
predicates: { tcx.arena.alloc_from_iter(filtered_predicates) },
|
||||
effects_min_tys: parent_preds.effects_min_tys,
|
||||
};
|
||||
}
|
||||
gather_explicit_predicates_of(tcx, def_id)
|
||||
@ -630,7 +622,7 @@ pub(super) fn implied_predicates_with_filter<'tcx>(
|
||||
|
||||
// Combine the two lists to form the complete set of superbounds:
|
||||
let implied_bounds =
|
||||
&*tcx.arena.alloc_from_iter(bounds.clauses(tcx).chain(where_bounds_that_match));
|
||||
&*tcx.arena.alloc_from_iter(bounds.clauses().chain(where_bounds_that_match));
|
||||
debug!(?implied_bounds);
|
||||
|
||||
// Now require that immediate supertraits are lowered, which will, in
|
||||
@ -874,7 +866,7 @@ impl<'tcx> ItemCtxt<'tcx> {
|
||||
);
|
||||
}
|
||||
|
||||
bounds.clauses(self.tcx).collect()
|
||||
bounds.clauses().collect()
|
||||
}
|
||||
}
|
||||
|
||||
@ -966,7 +958,7 @@ pub(super) fn const_conditions<'tcx>(
|
||||
|
||||
ty::ConstConditions {
|
||||
parent: has_parent.then(|| tcx.local_parent(def_id).to_def_id()),
|
||||
predicates: tcx.arena.alloc_from_iter(bounds.clauses(tcx).map(|(clause, span)| {
|
||||
predicates: tcx.arena.alloc_from_iter(bounds.clauses().map(|(clause, span)| {
|
||||
(
|
||||
clause.kind().map_bound(|clause| match clause {
|
||||
ty::ClauseKind::HostEffect(ty::HostEffectPredicate {
|
||||
|
@ -278,8 +278,6 @@ impl<'tcx> PredicatesBuilder<'tcx> {
|
||||
ty::GenericPredicates {
|
||||
parent: self.parent,
|
||||
predicates: self.tcx.arena.alloc_from_iter(preds),
|
||||
// FIXME(fn_delegation): Support effects.
|
||||
effects_min_tys: ty::List::empty(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -62,7 +62,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
|
||||
|
||||
let mut trait_bounds = vec![];
|
||||
let mut projection_bounds = vec![];
|
||||
for (pred, span) in bounds.clauses(tcx) {
|
||||
for (pred, span) in bounds.clauses() {
|
||||
let bound_pred = pred.kind();
|
||||
match bound_pred.skip_binder() {
|
||||
ty::ClauseKind::Trait(trait_pred) => {
|
||||
|
@ -2289,13 +2289,15 @@ declare_lint_pass!(
|
||||
impl EarlyLintPass for IncompleteInternalFeatures {
|
||||
fn check_crate(&mut self, cx: &EarlyContext<'_>, _: &ast::Crate) {
|
||||
let features = cx.builder.features();
|
||||
features
|
||||
.enabled_lang_features()
|
||||
.iter()
|
||||
.map(|(name, span, _)| (name, span))
|
||||
.chain(features.enabled_lib_features().iter().map(|(name, span)| (name, span)))
|
||||
.filter(|(&name, _)| features.incomplete(name) || features.internal(name))
|
||||
.for_each(|(&name, &span)| {
|
||||
let lang_features =
|
||||
features.enabled_lang_features().iter().map(|feat| (feat.gate_name, feat.attr_sp));
|
||||
let lib_features =
|
||||
features.enabled_lib_features().iter().map(|feat| (feat.gate_name, feat.attr_sp));
|
||||
|
||||
lang_features
|
||||
.chain(lib_features)
|
||||
.filter(|(name, _)| features.incomplete(*name) || features.internal(*name))
|
||||
.for_each(|(name, span)| {
|
||||
if features.incomplete(name) {
|
||||
let note = rustc_feature::find_feature_issue(name, GateIssue::Language)
|
||||
.map(|n| BuiltinFeatureIssueNote { n });
|
||||
|
@ -86,11 +86,9 @@ impl<'tcx> MonoItem<'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_generic_fn(&self, tcx: TyCtxt<'tcx>) -> bool {
|
||||
pub fn is_generic_fn(&self) -> bool {
|
||||
match self {
|
||||
MonoItem::Fn(instance) => {
|
||||
instance.args.non_erasable_generics(tcx, instance.def_id()).next().is_some()
|
||||
}
|
||||
MonoItem::Fn(instance) => instance.args.non_erasable_generics().next().is_some(),
|
||||
MonoItem::Static(..) | MonoItem::GlobalAsm(..) => false,
|
||||
}
|
||||
}
|
||||
|
@ -70,7 +70,7 @@ impl<'tcx> Ty<'tcx> {
|
||||
/// ADTs with no type arguments.
|
||||
pub fn is_simple_text(self, tcx: TyCtxt<'tcx>) -> bool {
|
||||
match self.kind() {
|
||||
Adt(def, args) => args.non_erasable_generics(tcx, def.did()).next().is_none(),
|
||||
Adt(_, args) => args.non_erasable_generics().next().is_none(),
|
||||
Ref(_, ty, _) => ty.is_simple_text(tcx),
|
||||
_ => self.is_simple_ty(),
|
||||
}
|
||||
|
@ -501,9 +501,6 @@ impl<'tcx> GenericArgs<'tcx> {
|
||||
#[inline]
|
||||
pub fn non_erasable_generics(
|
||||
&'tcx self,
|
||||
// FIXME(effects): Remove these
|
||||
_tcx: TyCtxt<'tcx>,
|
||||
_def_id: DefId,
|
||||
) -> impl DoubleEndedIterator<Item = GenericArgKind<'tcx>> + 'tcx {
|
||||
self.iter().filter_map(|k| match k.unpack() {
|
||||
ty::GenericArgKind::Lifetime(_) => None,
|
||||
|
@ -360,7 +360,6 @@ impl<'tcx> Generics {
|
||||
pub struct GenericPredicates<'tcx> {
|
||||
pub parent: Option<DefId>,
|
||||
pub predicates: &'tcx [(Clause<'tcx>, Span)],
|
||||
pub effects_min_tys: &'tcx ty::List<Ty<'tcx>>,
|
||||
}
|
||||
|
||||
impl<'tcx> GenericPredicates<'tcx> {
|
||||
|
@ -204,7 +204,7 @@ impl<'tcx> Instance<'tcx> {
|
||||
}
|
||||
|
||||
// If this a non-generic instance, it cannot be a shared monomorphization.
|
||||
self.args.non_erasable_generics(tcx, self.def_id()).next()?;
|
||||
self.args.non_erasable_generics().next()?;
|
||||
|
||||
// compiler_builtins cannot use upstream monomorphizations.
|
||||
if tcx.is_compiler_builtins(LOCAL_CRATE) {
|
||||
|
@ -439,12 +439,7 @@ impl<'tcx> Inliner<'tcx> {
|
||||
|
||||
// Reachability pass defines which functions are eligible for inlining. Generally inlining
|
||||
// other functions is incorrect because they could reference symbols that aren't exported.
|
||||
let is_generic = callsite
|
||||
.callee
|
||||
.args
|
||||
.non_erasable_generics(self.tcx, callsite.callee.def_id())
|
||||
.next()
|
||||
.is_some();
|
||||
let is_generic = callsite.callee.args.non_erasable_generics().next().is_some();
|
||||
if !is_generic && !cross_crate_inlinable {
|
||||
return Err("not exported");
|
||||
}
|
||||
|
@ -504,7 +504,7 @@ fn collect_items_rec<'tcx>(
|
||||
// Check for PMEs and emit a diagnostic if one happened. To try to show relevant edges of the
|
||||
// mono item graph.
|
||||
if tcx.dcx().err_count() > error_count
|
||||
&& starting_item.node.is_generic_fn(tcx)
|
||||
&& starting_item.node.is_generic_fn()
|
||||
&& starting_item.node.is_user_defined()
|
||||
{
|
||||
let formatted_item = with_no_trimmed_paths!(starting_item.node.to_string());
|
||||
|
@ -229,7 +229,7 @@ where
|
||||
}
|
||||
|
||||
let characteristic_def_id = characteristic_def_id_of_mono_item(cx.tcx, mono_item);
|
||||
let is_volatile = is_incremental_build && mono_item.is_generic_fn(cx.tcx);
|
||||
let is_volatile = is_incremental_build && mono_item.is_generic_fn();
|
||||
|
||||
let cgu_name = match characteristic_def_id {
|
||||
Some(def_id) => compute_codegen_unit_name(
|
||||
@ -822,7 +822,7 @@ fn mono_item_visibility<'tcx>(
|
||||
return Visibility::Hidden;
|
||||
}
|
||||
|
||||
let is_generic = instance.args.non_erasable_generics(tcx, def_id).next().is_some();
|
||||
let is_generic = instance.args.non_erasable_generics().next().is_some();
|
||||
|
||||
// Upstream `DefId` instances get different handling than local ones.
|
||||
let Some(def_id) = def_id.as_local() else {
|
||||
|
@ -10,7 +10,7 @@ use rustc_attr::{
|
||||
};
|
||||
use rustc_data_structures::fx::FxIndexMap;
|
||||
use rustc_data_structures::unord::{ExtendUnord, UnordMap, UnordSet};
|
||||
use rustc_feature::ACCEPTED_LANG_FEATURES;
|
||||
use rustc_feature::{ACCEPTED_LANG_FEATURES, EnabledLangFeature, EnabledLibFeature};
|
||||
use rustc_hir as hir;
|
||||
use rustc_hir::def::{DefKind, Res};
|
||||
use rustc_hir::def_id::{CRATE_DEF_ID, LOCAL_CRATE, LocalDefId, LocalModDefId};
|
||||
@ -994,25 +994,25 @@ pub fn check_unused_or_stable_features(tcx: TyCtxt<'_>) {
|
||||
|
||||
let enabled_lang_features = tcx.features().enabled_lang_features();
|
||||
let mut lang_features = UnordSet::default();
|
||||
for &(feature, span, since) in enabled_lang_features {
|
||||
if let Some(since) = since {
|
||||
for EnabledLangFeature { gate_name, attr_sp, stable_since } in enabled_lang_features {
|
||||
if let Some(version) = stable_since {
|
||||
// Warn if the user has enabled an already-stable lang feature.
|
||||
unnecessary_stable_feature_lint(tcx, span, feature, since);
|
||||
unnecessary_stable_feature_lint(tcx, *attr_sp, *gate_name, *version);
|
||||
}
|
||||
if !lang_features.insert(feature) {
|
||||
if !lang_features.insert(gate_name) {
|
||||
// Warn if the user enables a lang feature multiple times.
|
||||
tcx.dcx().emit_err(errors::DuplicateFeatureErr { span, feature });
|
||||
tcx.dcx().emit_err(errors::DuplicateFeatureErr { span: *attr_sp, feature: *gate_name });
|
||||
}
|
||||
}
|
||||
|
||||
let enabled_lib_features = tcx.features().enabled_lib_features();
|
||||
let mut remaining_lib_features = FxIndexMap::default();
|
||||
for (feature, span) in enabled_lib_features {
|
||||
if remaining_lib_features.contains_key(&feature) {
|
||||
for EnabledLibFeature { gate_name, attr_sp } in enabled_lib_features {
|
||||
if remaining_lib_features.contains_key(gate_name) {
|
||||
// Warn if the user enables a lib feature multiple times.
|
||||
tcx.dcx().emit_err(errors::DuplicateFeatureErr { span: *span, feature: *feature });
|
||||
tcx.dcx().emit_err(errors::DuplicateFeatureErr { span: *attr_sp, feature: *gate_name });
|
||||
}
|
||||
remaining_lib_features.insert(feature, *span);
|
||||
remaining_lib_features.insert(*gate_name, *attr_sp);
|
||||
}
|
||||
// `stdbuild` has special handling for `libc`, so we need to
|
||||
// recognise the feature when building std.
|
||||
@ -1044,7 +1044,7 @@ pub fn check_unused_or_stable_features(tcx: TyCtxt<'_>) {
|
||||
/// time, less loading from metadata is performed and thus compiler performance is improved.
|
||||
fn check_features<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
remaining_lib_features: &mut FxIndexMap<&Symbol, Span>,
|
||||
remaining_lib_features: &mut FxIndexMap<Symbol, Span>,
|
||||
remaining_implications: &mut UnordMap<Symbol, Symbol>,
|
||||
defined_features: &LibFeatures,
|
||||
all_implications: &UnordMap<Symbol, Symbol>,
|
||||
@ -1114,7 +1114,7 @@ pub fn check_unused_or_stable_features(tcx: TyCtxt<'_>) {
|
||||
}
|
||||
|
||||
for (feature, span) in remaining_lib_features {
|
||||
tcx.dcx().emit_err(errors::UnknownFeature { span, feature: *feature });
|
||||
tcx.dcx().emit_err(errors::UnknownFeature { span, feature });
|
||||
}
|
||||
|
||||
for (&implied_by, &feature) in remaining_implications.to_sorted_stable_ord() {
|
||||
|
@ -116,3 +116,20 @@ impl<'tcx> HashStable<StableHashingContext<'tcx>> for rustc_feature::Features {
|
||||
self.enabled_lib_features().hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> HashStable<StableHashingContext<'tcx>> for rustc_feature::EnabledLangFeature {
|
||||
fn hash_stable(&self, hcx: &mut StableHashingContext<'tcx>, hasher: &mut StableHasher) {
|
||||
let rustc_feature::EnabledLangFeature { gate_name, attr_sp, stable_since } = self;
|
||||
gate_name.hash_stable(hcx, hasher);
|
||||
attr_sp.hash_stable(hcx, hasher);
|
||||
stable_since.hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> HashStable<StableHashingContext<'tcx>> for rustc_feature::EnabledLibFeature {
|
||||
fn hash_stable(&self, hcx: &mut StableHashingContext<'tcx>, hasher: &mut StableHasher) {
|
||||
let rustc_feature::EnabledLibFeature { gate_name, attr_sp } = self;
|
||||
gate_name.hash_stable(hcx, hasher);
|
||||
attr_sp.hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
|
@ -161,8 +161,7 @@ impl<'tcx> Context for TablesWrapper<'tcx> {
|
||||
fn predicates_of(&self, def_id: stable_mir::DefId) -> stable_mir::ty::GenericPredicates {
|
||||
let mut tables = self.0.borrow_mut();
|
||||
let def_id = tables[def_id];
|
||||
let GenericPredicates { parent, predicates, effects_min_tys: _ } =
|
||||
tables.tcx.predicates_of(def_id);
|
||||
let GenericPredicates { parent, predicates } = tables.tcx.predicates_of(def_id);
|
||||
stable_mir::ty::GenericPredicates {
|
||||
parent: parent.map(|did| tables.trait_def(did)),
|
||||
predicates: predicates
|
||||
@ -183,8 +182,7 @@ impl<'tcx> Context for TablesWrapper<'tcx> {
|
||||
) -> stable_mir::ty::GenericPredicates {
|
||||
let mut tables = self.0.borrow_mut();
|
||||
let def_id = tables[def_id];
|
||||
let GenericPredicates { parent, predicates, effects_min_tys: _ } =
|
||||
tables.tcx.explicit_predicates_of(def_id);
|
||||
let GenericPredicates { parent, predicates } = tables.tcx.explicit_predicates_of(def_id);
|
||||
stable_mir::ty::GenericPredicates {
|
||||
parent: parent.map(|did| tables.trait_def(did)),
|
||||
predicates: predicates
|
||||
|
@ -135,7 +135,7 @@ fn symbol_name_provider<'tcx>(tcx: TyCtxt<'tcx>, instance: Instance<'tcx>) -> ty
|
||||
// This closure determines the instantiating crate for instances that
|
||||
// need an instantiating-crate-suffix for their symbol name, in order
|
||||
// to differentiate between local copies.
|
||||
if is_generic(instance, tcx) {
|
||||
if is_generic(instance) {
|
||||
// For generics we might find re-usable upstream instances. If there
|
||||
// is one, we rely on the symbol being instantiated locally.
|
||||
instance.upstream_monomorphization(tcx).unwrap_or(LOCAL_CRATE)
|
||||
@ -241,7 +241,7 @@ fn compute_symbol_name<'tcx>(
|
||||
// the ID of the instantiating crate. This avoids symbol conflicts
|
||||
// in case the same instances is emitted in two crates of the same
|
||||
// project.
|
||||
let avoid_cross_crate_conflicts = is_generic(instance, tcx) || is_globally_shared_function;
|
||||
let avoid_cross_crate_conflicts = is_generic(instance) || is_globally_shared_function;
|
||||
|
||||
let instantiating_crate = avoid_cross_crate_conflicts.then(compute_instantiating_crate);
|
||||
|
||||
@ -276,6 +276,6 @@ fn compute_symbol_name<'tcx>(
|
||||
symbol
|
||||
}
|
||||
|
||||
fn is_generic<'tcx>(instance: Instance<'tcx>, tcx: TyCtxt<'tcx>) -> bool {
|
||||
instance.args.non_erasable_generics(tcx, instance.def_id()).next().is_some()
|
||||
fn is_generic<'tcx>(instance: Instance<'tcx>) -> bool {
|
||||
instance.args.non_erasable_generics().next().is_some()
|
||||
}
|
||||
|
@ -287,7 +287,7 @@ class TestEnvironment:
|
||||
|
||||
@property
|
||||
def package_server_log_path(self) -> Path:
|
||||
return self.tmp_dir().joinpath("package_server_log")
|
||||
return self.tmp_dir().joinpath(f"repo_{self.TEST_REPO_NAME}.log")
|
||||
|
||||
@property
|
||||
def emulator_log_path(self) -> Path:
|
||||
@ -401,6 +401,7 @@ class TestEnvironment:
|
||||
# Set configs
|
||||
configs = {
|
||||
"log.enabled": "true",
|
||||
"log.dir": self.tmp_dir(),
|
||||
"test.is_isolated": "true",
|
||||
"test.experimental_structured_output": "true",
|
||||
}
|
||||
@ -575,43 +576,19 @@ class TestEnvironment:
|
||||
stderr_handler=self.subprocess_logger.debug,
|
||||
)
|
||||
|
||||
# Add repository
|
||||
check_call_with_logging(
|
||||
[
|
||||
ffx_path,
|
||||
"repository",
|
||||
"add-from-pm",
|
||||
"--repository",
|
||||
self.TEST_REPO_NAME,
|
||||
self.repo_dir(),
|
||||
],
|
||||
env=ffx_env,
|
||||
stdout_handler=self.subprocess_logger.debug,
|
||||
stderr_handler=self.subprocess_logger.debug,
|
||||
)
|
||||
|
||||
# Start repository server
|
||||
# Note that we must first enable the repository server daemon.
|
||||
check_call_with_logging(
|
||||
[
|
||||
ffx_path,
|
||||
"config",
|
||||
"set",
|
||||
"repository.server.enabled",
|
||||
"true",
|
||||
],
|
||||
env=ffx_env,
|
||||
stdout_handler=self.subprocess_logger.debug,
|
||||
stderr_handler=self.subprocess_logger.debug,
|
||||
)
|
||||
check_call_with_logging(
|
||||
[
|
||||
ffx_path,
|
||||
"repository",
|
||||
"server",
|
||||
"start",
|
||||
"--background",
|
||||
"--address",
|
||||
"[::]:0",
|
||||
"--repo-path",
|
||||
self.repo_dir(),
|
||||
"--repository",
|
||||
self.TEST_REPO_NAME
|
||||
],
|
||||
env=ffx_env,
|
||||
stdout_handler=self.subprocess_logger.debug,
|
||||
@ -1009,6 +986,21 @@ class TestEnvironment:
|
||||
stderr_handler=self.subprocess_logger.debug,
|
||||
)
|
||||
|
||||
# Stop the package server
|
||||
self.env_logger.info("Stopping package server...")
|
||||
check_call_with_logging(
|
||||
[
|
||||
self.tool_path("ffx"),
|
||||
"repository",
|
||||
"server",
|
||||
"stop",
|
||||
self.TEST_REPO_NAME
|
||||
],
|
||||
env=self.ffx_cmd_env(),
|
||||
stdout_handler=self.subprocess_logger.debug,
|
||||
stderr_handler=self.subprocess_logger.debug,
|
||||
)
|
||||
|
||||
# Stop ffx isolation
|
||||
self.env_logger.info("Stopping ffx isolation...")
|
||||
self.stop_ffx_isolation()
|
||||
|
@ -1400,7 +1400,6 @@ pub(crate) fn clean_middle_assoc_item(assoc_item: &ty::AssocItem, cx: &mut DocCo
|
||||
clean_ty_generics(cx, tcx.generics_of(assoc_item.def_id), ty::GenericPredicates {
|
||||
parent: None,
|
||||
predicates,
|
||||
effects_min_tys: ty::List::empty(),
|
||||
});
|
||||
simplify::move_bounds_to_generic_parameters(&mut generics);
|
||||
|
||||
|
@ -324,7 +324,7 @@ fn check_copy_clone<'tcx>(cx: &LateContext<'tcx>, item: &Item<'_>, trait_ref: &h
|
||||
// If the current self type doesn't implement Copy (due to generic constraints), search to see if
|
||||
// there's a Copy impl for any instance of the adt.
|
||||
if !is_copy(cx, ty) {
|
||||
if ty_subs.non_erasable_generics(cx.tcx, ty_adt.did()).next().is_some() {
|
||||
if ty_subs.non_erasable_generics().next().is_some() {
|
||||
let has_copy_impl = cx.tcx.all_local_trait_impls(()).get(©_id).map_or(false, |impls| {
|
||||
impls.iter().any(|&id| {
|
||||
matches!(cx.tcx.type_of(id).instantiate_identity().kind(), ty::Adt(adt, _)
|
||||
|
Loading…
Reference in New Issue
Block a user