Auto merge of #127111 - matthiaskrgr:rollup-ybzkuuv, r=matthiaskrgr

Rollup of 9 pull requests

Successful merges:

 - #126822 (Bootstrap command refactoring: port more `Command` usages to `BootstrapCmd` (step 2))
 - #126835 (Simplifications in match lowering)
 - #126953 (std: separate TLS key creation from TLS access)
 - #127045 (Rename `super_predicates_of` and similar queries to `explicit_*` to note that they're not elaborated)
 - #127075 (rustc_data_structures: Explicitly check for 64-bit atomics support)
 - #127101 (remove redundant match statement from dataflow const prop)
 - #127102 (Rename fuchsia builder and bump Fuchsia)
 - #127103 (Move binder and polarity parsing into `parse_generic_ty_bound`)
 - #127108 (unify `dylib` and `bin_helpers` and create `shared_helpers::parse_value_from_args`)

r? `@ghost`
`@rustbot` modify labels: rollup
This commit is contained in:
bors 2024-06-29 09:29:41 +00:00
commit be99243afc
73 changed files with 1161 additions and 946 deletions

View File

@ -50,7 +50,7 @@ libc = "0.2"
memmap2 = "0.2.1"
# tidy-alphabetical-end
[target.'cfg(any(target_arch = "mips", target_arch = "powerpc", target_arch = "sparc"))'.dependencies]
[target.'cfg(not(target_has_atomic = "64"))'.dependencies]
portable-atomic = "1.5.1"
[features]

View File

@ -147,14 +147,13 @@ cfg_match! {
[crate::owned_slice::OwnedSlice]
);
// MIPS, PowerPC and SPARC platforms with 32-bit pointers do not
// have AtomicU64 type.
#[cfg(not(any(target_arch = "powerpc", target_arch = "powerpc", target_arch = "sparc")))]
// Use portable AtomicU64 for targets without native 64-bit atomics
#[cfg(target_has_atomic = "64")]
already_sync!(
[std::sync::atomic::AtomicU64]
);
#[cfg(any(target_arch = "mips", target_arch = "powerpc", target_arch = "sparc"))]
#[cfg(not(target_has_atomic = "64"))]
already_sync!(
[portable_atomic::AtomicU64]
);

View File

@ -270,12 +270,11 @@ cfg_match! {
pub use std::sync::atomic::{AtomicBool, AtomicUsize, AtomicU32};
// MIPS, PowerPC and SPARC platforms with 32-bit pointers do not
// have AtomicU64 type.
#[cfg(not(any(target_arch = "mips", target_arch = "powerpc", target_arch = "sparc")))]
// Use portable AtomicU64 for targets without native 64-bit atomics
#[cfg(target_has_atomic = "64")]
pub use std::sync::atomic::AtomicU64;
#[cfg(any(target_arch = "mips", target_arch = "powerpc", target_arch = "sparc"))]
#[cfg(not(target_has_atomic = "64"))]
pub use portable_atomic::AtomicU64;
pub use std::sync::Arc as Lrc;

View File

@ -70,10 +70,10 @@ pub fn provide(providers: &mut Providers) {
predicates_of: predicates_of::predicates_of,
predicates_defined_on,
explicit_predicates_of: predicates_of::explicit_predicates_of,
super_predicates_of: predicates_of::super_predicates_of,
implied_predicates_of: predicates_of::implied_predicates_of,
super_predicates_that_define_assoc_item:
predicates_of::super_predicates_that_define_assoc_item,
explicit_super_predicates_of: predicates_of::explicit_super_predicates_of,
explicit_implied_predicates_of: predicates_of::explicit_implied_predicates_of,
explicit_supertraits_containing_assoc_item:
predicates_of::explicit_supertraits_containing_assoc_item,
trait_explicit_predicates_and_bounds: predicates_of::trait_explicit_predicates_and_bounds,
type_param_predicates: predicates_of::type_param_predicates,
trait_def,
@ -691,14 +691,14 @@ fn lower_item(tcx: TyCtxt<'_>, item_id: hir::ItemId) {
hir::ItemKind::Trait(..) => {
tcx.ensure().generics_of(def_id);
tcx.ensure().trait_def(def_id);
tcx.at(it.span).super_predicates_of(def_id);
tcx.at(it.span).explicit_super_predicates_of(def_id);
tcx.ensure().predicates_of(def_id);
tcx.ensure().associated_items(def_id);
}
hir::ItemKind::TraitAlias(..) => {
tcx.ensure().generics_of(def_id);
tcx.at(it.span).implied_predicates_of(def_id);
tcx.at(it.span).super_predicates_of(def_id);
tcx.at(it.span).explicit_implied_predicates_of(def_id);
tcx.at(it.span).explicit_super_predicates_of(def_id);
tcx.ensure().predicates_of(def_id);
}
hir::ItemKind::Struct(struct_def, _) | hir::ItemKind::Union(struct_def, _) => {

View File

@ -519,21 +519,21 @@ pub(super) fn explicit_predicates_of<'tcx>(
/// Ensures that the super-predicates of the trait with a `DefId`
/// of `trait_def_id` are lowered and stored. This also ensures that
/// the transitive super-predicates are lowered.
pub(super) fn super_predicates_of(
pub(super) fn explicit_super_predicates_of(
tcx: TyCtxt<'_>,
trait_def_id: LocalDefId,
) -> ty::GenericPredicates<'_> {
implied_predicates_with_filter(tcx, trait_def_id.to_def_id(), PredicateFilter::SelfOnly)
}
pub(super) fn super_predicates_that_define_assoc_item(
pub(super) fn explicit_supertraits_containing_assoc_item(
tcx: TyCtxt<'_>,
(trait_def_id, assoc_name): (DefId, Ident),
) -> ty::GenericPredicates<'_> {
implied_predicates_with_filter(tcx, trait_def_id, PredicateFilter::SelfThatDefines(assoc_name))
}
pub(super) fn implied_predicates_of(
pub(super) fn explicit_implied_predicates_of(
tcx: TyCtxt<'_>,
trait_def_id: LocalDefId,
) -> ty::GenericPredicates<'_> {
@ -560,7 +560,7 @@ pub(super) fn implied_predicates_with_filter(
// if `assoc_name` is None, then the query should've been redirected to an
// external provider
assert!(matches!(filter, PredicateFilter::SelfThatDefines(_)));
return tcx.super_predicates_of(trait_def_id);
return tcx.explicit_super_predicates_of(trait_def_id);
};
let Node::Item(item) = tcx.hir_node_by_def_id(trait_def_id) else {
@ -601,7 +601,7 @@ pub(super) fn implied_predicates_with_filter(
if let ty::ClauseKind::Trait(bound) = pred.kind().skip_binder()
&& bound.polarity == ty::PredicatePolarity::Positive
{
tcx.at(span).super_predicates_of(bound.def_id());
tcx.at(span).explicit_super_predicates_of(bound.def_id());
}
}
}
@ -611,7 +611,7 @@ pub(super) fn implied_predicates_with_filter(
if let ty::ClauseKind::Trait(bound) = pred.kind().skip_binder()
&& bound.polarity == ty::PredicatePolarity::Positive
{
tcx.at(span).implied_predicates_of(bound.def_id());
tcx.at(span).explicit_implied_predicates_of(bound.def_id());
}
}
}

View File

@ -1760,7 +1760,7 @@ impl<'a, 'tcx> BoundVarContext<'a, 'tcx> {
if let Some(assoc_item) = trait_defines_associated_item_named(def_id) {
break Some((bound_vars.into_iter().collect(), assoc_item));
}
let predicates = tcx.super_predicates_that_define_assoc_item((def_id, assoc_name));
let predicates = tcx.explicit_supertraits_containing_assoc_item((def_id, assoc_name));
let obligations = predicates.predicates.iter().filter_map(|&(pred, _)| {
let bound_predicate = pred.kind();
match bound_predicate.skip_binder() {

View File

@ -275,10 +275,10 @@ impl<'tcx, O: Elaboratable<'tcx>> Elaborator<'tcx, O> {
}
// Get predicates implied by the trait, or only super predicates if we only care about self predicates.
let predicates = match self.mode {
Filter::All => tcx.implied_predicates_of(data.def_id()),
Filter::OnlySelf => tcx.super_predicates_of(data.def_id()),
Filter::All => tcx.explicit_implied_predicates_of(data.def_id()),
Filter::OnlySelf => tcx.explicit_super_predicates_of(data.def_id()),
Filter::OnlySelfThatDefines(ident) => {
tcx.super_predicates_that_define_assoc_item((data.def_id(), ident))
tcx.explicit_supertraits_containing_assoc_item((data.def_id(), ident))
}
};
@ -420,7 +420,7 @@ pub fn transitive_bounds<'tcx>(
/// A specialized variant of `elaborate` that only elaborates trait references that may
/// define the given associated item with the name `assoc_name`. It uses the
/// `super_predicates_that_define_assoc_item` query to avoid enumerating super-predicates that
/// `explicit_supertraits_containing_assoc_item` query to avoid enumerating super-predicates that
/// aren't related to `assoc_item`. This is used when resolving types like `Self::Item` or
/// `T::Item` and helps to avoid cycle errors (see e.g. #35237).
pub fn transitive_bounds_that_define_assoc_item<'tcx>(

View File

@ -45,7 +45,7 @@ impl<'tcx> LateLintPass<'tcx> for MultipleSupertraitUpcastable {
{
let direct_super_traits_iter = cx
.tcx
.super_predicates_of(def_id)
.explicit_super_predicates_of(def_id)
.predicates
.into_iter()
.filter_map(|(pred, _)| pred.as_trait_clause());

View File

@ -211,8 +211,8 @@ provide! { tcx, def_id, other, cdata,
explicit_predicates_of => { table }
generics_of => { table }
inferred_outlives_of => { table_defaulted_array }
super_predicates_of => { table }
implied_predicates_of => { table }
explicit_super_predicates_of => { table }
explicit_implied_predicates_of => { table }
type_of => { table }
type_alias_is_lazy => { cdata.root.tables.type_alias_is_lazy.get(cdata, def_id.index) }
variances_of => { table }

View File

@ -1431,8 +1431,8 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
}
if let DefKind::Trait = def_kind {
record!(self.tables.trait_def[def_id] <- self.tcx.trait_def(def_id));
record!(self.tables.super_predicates_of[def_id] <- self.tcx.super_predicates_of(def_id));
record!(self.tables.implied_predicates_of[def_id] <- self.tcx.implied_predicates_of(def_id));
record!(self.tables.explicit_super_predicates_of[def_id] <- self.tcx.explicit_super_predicates_of(def_id));
record!(self.tables.explicit_implied_predicates_of[def_id] <- self.tcx.explicit_implied_predicates_of(def_id));
let module_children = self.tcx.module_children_local(local_id);
record_array!(self.tables.module_children_non_reexports[def_id] <-
@ -1440,8 +1440,8 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
}
if let DefKind::TraitAlias = def_kind {
record!(self.tables.trait_def[def_id] <- self.tcx.trait_def(def_id));
record!(self.tables.super_predicates_of[def_id] <- self.tcx.super_predicates_of(def_id));
record!(self.tables.implied_predicates_of[def_id] <- self.tcx.implied_predicates_of(def_id));
record!(self.tables.explicit_super_predicates_of[def_id] <- self.tcx.explicit_super_predicates_of(def_id));
record!(self.tables.explicit_implied_predicates_of[def_id] <- self.tcx.explicit_implied_predicates_of(def_id));
}
if let DefKind::Trait | DefKind::Impl { .. } = def_kind {
let associated_item_def_ids = self.tcx.associated_item_def_ids(def_id);

View File

@ -416,10 +416,10 @@ define_tables! {
lookup_deprecation_entry: Table<DefIndex, LazyValue<attr::Deprecation>>,
explicit_predicates_of: Table<DefIndex, LazyValue<ty::GenericPredicates<'static>>>,
generics_of: Table<DefIndex, LazyValue<ty::Generics>>,
super_predicates_of: Table<DefIndex, LazyValue<ty::GenericPredicates<'static>>>,
explicit_super_predicates_of: Table<DefIndex, LazyValue<ty::GenericPredicates<'static>>>,
// As an optimization, we only store this for trait aliases,
// since it's identical to super_predicates_of for traits.
implied_predicates_of: Table<DefIndex, LazyValue<ty::GenericPredicates<'static>>>,
// since it's identical to explicit_super_predicates_of for traits.
explicit_implied_predicates_of: Table<DefIndex, LazyValue<ty::GenericPredicates<'static>>>,
type_of: Table<DefIndex, LazyValue<ty::EarlyBinder<'static, Ty<'static>>>>,
variances_of: Table<DefIndex, LazyArray<ty::Variance>>,
fn_sig: Table<DefIndex, LazyValue<ty::EarlyBinder<'static, ty::PolyFnSig<'static>>>>,

View File

@ -646,6 +646,9 @@ rustc_queries! {
}
/// Returns the predicates written explicitly by the user.
///
/// You should probably use `predicates_of` unless you're looking for
/// predicates with explicit spans for diagnostics purposes.
query explicit_predicates_of(key: DefId) -> ty::GenericPredicates<'tcx> {
desc { |tcx| "computing explicit predicates of `{}`", tcx.def_path_str(key) }
cache_on_disk_if { key.is_local() }
@ -662,29 +665,32 @@ rustc_queries! {
feedable
}
/// Maps from the `DefId` of a trait to the list of
/// super-predicates. This is a subset of the full list of
/// predicates. We store these in a separate map because we must
/// evaluate them even during type conversion, often before the
/// full predicates are available (note that supertraits have
/// additional acyclicity requirements).
query super_predicates_of(key: DefId) -> ty::GenericPredicates<'tcx> {
/// Maps from the `DefId` of a trait to the list of super-predicates of the trait,
/// *before* elaboration (so it doesn't contain transitive super-predicates). This
/// is a subset of the full list of predicates. We store these in a separate map
/// because we must evaluate them even during type conversion, often before the full
/// predicates are available (note that super-predicates must not be cyclic).
query explicit_super_predicates_of(key: DefId) -> ty::GenericPredicates<'tcx> {
desc { |tcx| "computing the super predicates of `{}`", tcx.def_path_str(key) }
cache_on_disk_if { key.is_local() }
separate_provide_extern
}
query implied_predicates_of(key: DefId) -> ty::GenericPredicates<'tcx> {
/// The predicates of the trait that are implied during elaboration. This is a
/// superset of the super-predicates of the trait, but a subset of the predicates
/// of the trait. For regular traits, this includes all super-predicates and their
/// associated type bounds. For trait aliases, currently, this includes all of the
/// predicates of the trait alias.
query explicit_implied_predicates_of(key: DefId) -> ty::GenericPredicates<'tcx> {
desc { |tcx| "computing the implied predicates of `{}`", tcx.def_path_str(key) }
cache_on_disk_if { key.is_local() }
separate_provide_extern
}
/// The `Option<Ident>` is the name of an associated type. If it is `None`, then this query
/// returns the full set of predicates. If `Some<Ident>`, then the query returns only the
/// subset of super-predicates that reference traits that define the given associated type.
/// This is used to avoid cycles in resolving types like `T::Item`.
query super_predicates_that_define_assoc_item(key: (DefId, rustc_span::symbol::Ident)) -> ty::GenericPredicates<'tcx> {
/// The Ident is the name of an associated type.The query returns only the subset
/// of supertraits that define the given associated type. This is used to avoid
/// cycles in resolving type-dependent associated item paths like `T::Item`.
query explicit_supertraits_containing_assoc_item(key: (DefId, rustc_span::symbol::Ident)) -> ty::GenericPredicates<'tcx> {
desc { |tcx| "computing the super traits of `{}` with associated type name `{}`",
tcx.def_path_str(key.0),
key.1

View File

@ -35,7 +35,7 @@ struct Elaborator<'tcx> {
impl<'tcx> Elaborator<'tcx> {
fn elaborate(&mut self, trait_ref: PolyTraitRef<'tcx>) {
let super_predicates =
self.tcx.super_predicates_of(trait_ref.def_id()).predicates.iter().filter_map(
self.tcx.explicit_super_predicates_of(trait_ref.def_id()).predicates.iter().filter_map(
|&(pred, _)| {
let clause = pred.instantiate_supertrait(self.tcx, trait_ref);
self.visited.insert(clause).then_some(clause)

View File

@ -342,12 +342,15 @@ impl<'tcx> Interner for TyCtxt<'tcx> {
)
}
fn super_predicates_of(
fn explicit_super_predicates_of(
self,
def_id: DefId,
) -> ty::EarlyBinder<'tcx, impl IntoIterator<Item = ty::Clause<'tcx>>> {
ty::EarlyBinder::bind(
self.super_predicates_of(def_id).instantiate_identity(self).predicates.into_iter(),
self.explicit_super_predicates_of(def_id)
.instantiate_identity(self)
.predicates
.into_iter(),
)
}
@ -2440,7 +2443,7 @@ impl<'tcx> TyCtxt<'tcx> {
/// Given the def_id of a Trait `trait_def_id` and the name of an associated item `assoc_name`
/// returns true if the `trait_def_id` defines an associated item of name `assoc_name`.
pub fn trait_may_define_assoc_item(self, trait_def_id: DefId, assoc_name: Ident) -> bool {
self.super_traits_of(trait_def_id).any(|trait_did| {
self.supertrait_def_ids(trait_def_id).any(|trait_did| {
self.associated_items(trait_did)
.filter_by_name_unhygienic(assoc_name.name)
.any(|item| self.hygienic_eq(assoc_name, item.ident(self), trait_did))
@ -2463,9 +2466,9 @@ impl<'tcx> TyCtxt<'tcx> {
/// Computes the def-ids of the transitive supertraits of `trait_def_id`. This (intentionally)
/// does not compute the full elaborated super-predicates but just the set of def-ids. It is used
/// to identify which traits may define a given associated type to help avoid cycle errors.
/// Returns a `DefId` iterator.
fn super_traits_of(self, trait_def_id: DefId) -> impl Iterator<Item = DefId> + 'tcx {
/// to identify which traits may define a given associated type to help avoid cycle errors,
/// and to make size estimates for vtable layout computation.
pub fn supertrait_def_ids(self, trait_def_id: DefId) -> impl Iterator<Item = DefId> + 'tcx {
let mut set = FxHashSet::default();
let mut stack = vec![trait_def_id];
@ -2473,7 +2476,7 @@ impl<'tcx> TyCtxt<'tcx> {
iter::from_fn(move || -> Option<DefId> {
let trait_did = stack.pop()?;
let generic_predicates = self.super_predicates_of(trait_did);
let generic_predicates = self.explicit_super_predicates_of(trait_did);
for (predicate, _) in generic_predicates.predicates {
if let ty::ClauseKind::Trait(data) = predicate.kind().skip_binder() {

View File

@ -3,8 +3,6 @@ use std::fmt;
use crate::mir::interpret::{alloc_range, AllocId, Allocation, Pointer, Scalar};
use crate::ty::{self, Instance, PolyTraitRef, Ty, TyCtxt};
use rustc_ast::Mutability;
use rustc_data_structures::fx::FxHashSet;
use rustc_hir::def_id::DefId;
use rustc_macros::HashStable;
#[derive(Clone, Copy, PartialEq, HashStable)]
@ -42,45 +40,12 @@ impl<'tcx> fmt::Debug for VtblEntry<'tcx> {
impl<'tcx> TyCtxt<'tcx> {
pub const COMMON_VTABLE_ENTRIES: &'tcx [VtblEntry<'tcx>] =
&[VtblEntry::MetadataDropInPlace, VtblEntry::MetadataSize, VtblEntry::MetadataAlign];
pub fn supertrait_def_ids(self, trait_def_id: DefId) -> SupertraitDefIds<'tcx> {
SupertraitDefIds {
tcx: self,
stack: vec![trait_def_id],
visited: Some(trait_def_id).into_iter().collect(),
}
}
}
pub const COMMON_VTABLE_ENTRIES_DROPINPLACE: usize = 0;
pub const COMMON_VTABLE_ENTRIES_SIZE: usize = 1;
pub const COMMON_VTABLE_ENTRIES_ALIGN: usize = 2;
pub struct SupertraitDefIds<'tcx> {
tcx: TyCtxt<'tcx>,
stack: Vec<DefId>,
visited: FxHashSet<DefId>,
}
impl Iterator for SupertraitDefIds<'_> {
type Item = DefId;
fn next(&mut self) -> Option<DefId> {
let def_id = self.stack.pop()?;
let predicates = self.tcx.super_predicates_of(def_id);
let visited = &mut self.visited;
self.stack.extend(
predicates
.predicates
.iter()
.filter_map(|(pred, _)| pred.as_trait_clause())
.map(|trait_ref| trait_ref.def_id())
.filter(|&super_def_id| visited.insert(super_def_id)),
);
Some(def_id)
}
}
// Note that we don't have access to a self type here, this has to be purely based on the trait (and
// supertrait) definitions. That means we can't call into the same vtable_entries code since that
// returns a specific instantiation (e.g., with Vacant slots when bounds aren't satisfied). The goal

View File

@ -189,38 +189,37 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
let initializer_span = this.thir[*initializer].span;
let scope = (*init_scope, source_info);
let failure = unpack!(
block = this.in_scope(scope, *lint_level, |this| {
this.declare_bindings(
visibility_scope,
remainder_span,
pattern,
None,
Some((Some(&destination), initializer_span)),
);
this.visit_primary_bindings(
pattern,
UserTypeProjections::none(),
&mut |this, _, _, node, span, _, _| {
this.storage_live_binding(
block,
node,
span,
OutsideGuard,
true,
);
},
);
this.ast_let_else(
block,
*initializer,
initializer_span,
*else_block,
&last_remainder_scope,
pattern,
)
})
);
let failure_and_block = this.in_scope(scope, *lint_level, |this| {
this.declare_bindings(
visibility_scope,
remainder_span,
pattern,
None,
Some((Some(&destination), initializer_span)),
);
this.visit_primary_bindings(
pattern,
UserTypeProjections::none(),
&mut |this, _, _, node, span, _, _| {
this.storage_live_binding(block, node, span, OutsideGuard, true);
},
);
let else_block_span = this.thir[*else_block].span;
let (matching, failure) =
this.in_if_then_scope(last_remainder_scope, else_block_span, |this| {
this.lower_let_expr(
block,
*initializer,
pattern,
None,
initializer_span,
false,
true,
)
});
matching.and(failure)
});
let failure = unpack!(block = failure_and_block);
this.cfg.goto(failure, source_info, failure_entry);
if let Some(source_scope) = visibility_scope {

View File

@ -21,6 +21,7 @@ use rustc_span::symbol::Symbol;
use rustc_span::{BytePos, Pos, Span};
use rustc_target::abi::VariantIdx;
use tracing::{debug, instrument};
use util::visit_bindings;
// helper functions, broken out by category:
mod simplify;
@ -146,6 +147,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
Some(args.variable_source_info.scope),
args.variable_source_info.span,
args.declare_let_bindings,
false,
),
_ => {
let mut block = block;
@ -314,13 +316,21 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
let match_start_span = span.shrink_to_lo().to(scrutinee_span);
let fake_borrow_temps = self.lower_match_tree(
// The set of places that we are creating fake borrows of. If there are no match guards then
// we don't need any fake borrows, so don't track them.
let fake_borrow_temps: Vec<(Place<'tcx>, Local, FakeBorrowKind)> = if match_has_guard {
util::collect_fake_borrows(self, &candidates, scrutinee_span, scrutinee_place.base())
} else {
Vec::new()
};
self.lower_match_tree(
block,
scrutinee_span,
&scrutinee_place,
match_start_span,
match_has_guard,
&mut candidates,
false,
);
self.lower_match_arms(
@ -375,89 +385,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
.collect()
}
/// Create the decision tree for the match expression, starting from `block`.
///
/// Modifies `candidates` to store the bindings and type ascriptions for
/// that candidate.
///
/// Returns the places that need fake borrows because we bind or test them.
fn lower_match_tree<'pat>(
&mut self,
block: BasicBlock,
scrutinee_span: Span,
scrutinee_place_builder: &PlaceBuilder<'tcx>,
match_start_span: Span,
match_has_guard: bool,
candidates: &mut [&mut Candidate<'pat, 'tcx>],
) -> Vec<(Place<'tcx>, Local, FakeBorrowKind)> {
// The set of places that we are creating fake borrows of. If there are no match guards then
// we don't need any fake borrows, so don't track them.
let fake_borrows: Vec<(Place<'tcx>, Local, FakeBorrowKind)> = if match_has_guard {
util::collect_fake_borrows(
self,
candidates,
scrutinee_span,
scrutinee_place_builder.base(),
)
} else {
Vec::new()
};
// See the doc comment on `match_candidates` for why we have an
// otherwise block. Match checking will ensure this is actually
// unreachable.
let otherwise_block = self.cfg.start_new_block();
// This will generate code to test scrutinee_place and
// branch to the appropriate arm block
self.match_candidates(match_start_span, scrutinee_span, block, otherwise_block, candidates);
let source_info = self.source_info(scrutinee_span);
// Matching on a `scrutinee_place` with an uninhabited type doesn't
// generate any memory reads by itself, and so if the place "expression"
// contains unsafe operations like raw pointer dereferences or union
// field projections, we wouldn't know to require an `unsafe` block
// around a `match` equivalent to `std::intrinsics::unreachable()`.
// See issue #47412 for this hole being discovered in the wild.
//
// HACK(eddyb) Work around the above issue by adding a dummy inspection
// of `scrutinee_place`, specifically by applying `ReadForMatch`.
//
// NOTE: ReadForMatch also checks that the scrutinee is initialized.
// This is currently needed to not allow matching on an uninitialized,
// uninhabited value. If we get never patterns, those will check that
// the place is initialized, and so this read would only be used to
// check safety.
let cause_matched_place = FakeReadCause::ForMatchedPlace(None);
if let Some(scrutinee_place) = scrutinee_place_builder.try_to_place(self) {
self.cfg.push_fake_read(
otherwise_block,
source_info,
cause_matched_place,
scrutinee_place,
);
}
self.cfg.terminate(otherwise_block, source_info, TerminatorKind::Unreachable);
// Link each leaf candidate to the `pre_binding_block` of the next one.
let mut previous_candidate: Option<&mut Candidate<'_, '_>> = None;
for candidate in candidates {
candidate.visit_leaves(|leaf_candidate| {
if let Some(ref mut prev) = previous_candidate {
assert!(leaf_candidate.false_edge_start_block.is_some());
prev.next_candidate_start_block = leaf_candidate.false_edge_start_block;
}
previous_candidate = Some(leaf_candidate);
});
}
fake_borrows
}
/// Lower the bindings, guards and arm bodies of a `match` expression.
///
/// The decision tree should have already been created
@ -728,59 +655,53 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
set_match_place: bool,
) -> BlockAnd<()> {
let mut candidate = Candidate::new(initializer.clone(), irrefutable_pat, false, self);
let fake_borrow_temps = self.lower_match_tree(
block,
irrefutable_pat.span,
&initializer,
irrefutable_pat.span,
false,
&mut [&mut candidate],
);
// For matches and function arguments, the place that is being matched
// can be set when creating the variables. But the place for
// let PATTERN = ... might not even exist until we do the assignment.
// so we set it here instead.
if set_match_place {
let mut next = Some(&candidate);
while let Some(candidate_ref) = next.take() {
for binding in &candidate_ref.extra_data.bindings {
// `try_to_place` may fail if it is unable to resolve the given `PlaceBuilder` inside a
// closure. In this case, we don't want to include a scrutinee place.
// `scrutinee_place_builder` will fail for destructured assignments. This is because a
// closure only captures the precise places that it will read and as a result a closure
// may not capture the entire tuple/struct and rather have individual places that will
// be read in the final MIR.
// Example:
// ```
// let foo = (0, 1);
// let c = || {
// let (v1, v2) = foo;
// };
// ```
if let Some(place) = initializer.try_to_place(self) {
visit_bindings(&[&mut candidate], |binding: &Binding<'_>| {
let local = self.var_local_id(binding.var_id, OutsideGuard);
// `try_to_place` may fail if it is unable to resolve the given
// `PlaceBuilder` inside a closure. In this case, we don't want to include
// a scrutinee place. `scrutinee_place_builder` will fail for destructured
// assignments. This is because a closure only captures the precise places
// that it will read and as a result a closure may not capture the entire
// tuple/struct and rather have individual places that will be read in the
// final MIR.
// Example:
// ```
// let foo = (0, 1);
// let c = || {
// let (v1, v2) = foo;
// };
// ```
if let Some(place) = initializer.try_to_place(self) {
let LocalInfo::User(BindingForm::Var(VarBindingForm {
opt_match_place: Some((ref mut match_place, _)),
..
})) = **self.local_decls[local].local_info.as_mut().assert_crate_local()
else {
bug!("Let binding to non-user variable.")
};
if let LocalInfo::User(BindingForm::Var(VarBindingForm {
opt_match_place: Some((ref mut match_place, _)),
..
})) = **self.local_decls[local].local_info.as_mut().assert_crate_local()
{
*match_place = Some(place);
}
}
// All of the subcandidates should bind the same locals, so we
// only visit the first one.
next = candidate_ref.subcandidates.get(0)
} else {
bug!("Let binding to non-user variable.")
};
});
}
}
self.lower_match_tree(
block,
irrefutable_pat.span,
&initializer,
irrefutable_pat.span,
&mut [&mut candidate],
false,
);
self.bind_pattern(
self.source_info(irrefutable_pat.span),
candidate,
fake_borrow_temps.as_slice(),
&[],
irrefutable_pat.span,
None,
false,
@ -1306,6 +1227,79 @@ pub(crate) struct ArmHasGuard(pub(crate) bool);
// Main matching algorithm
impl<'a, 'tcx> Builder<'a, 'tcx> {
/// The entrypoint of the matching algorithm. Create the decision tree for the match expression,
/// starting from `block`.
///
/// Modifies `candidates` to store the bindings and type ascriptions for
/// that candidate.
///
/// `refutable` indicates whether the candidate list is refutable (for `if let` and `let else`)
/// or not (for `let` and `match`). In the refutable case we return the block to which we branch
/// on failure.
fn lower_match_tree<'pat>(
&mut self,
block: BasicBlock,
scrutinee_span: Span,
scrutinee_place_builder: &PlaceBuilder<'tcx>,
match_start_span: Span,
candidates: &mut [&mut Candidate<'pat, 'tcx>],
refutable: bool,
) -> BasicBlock {
// See the doc comment on `match_candidates` for why we have an otherwise block.
let otherwise_block = self.cfg.start_new_block();
// This will generate code to test scrutinee_place and branch to the appropriate arm block
self.match_candidates(match_start_span, scrutinee_span, block, otherwise_block, candidates);
// Link each leaf candidate to the `false_edge_start_block` of the next one.
let mut previous_candidate: Option<&mut Candidate<'_, '_>> = None;
for candidate in candidates {
candidate.visit_leaves(|leaf_candidate| {
if let Some(ref mut prev) = previous_candidate {
assert!(leaf_candidate.false_edge_start_block.is_some());
prev.next_candidate_start_block = leaf_candidate.false_edge_start_block;
}
previous_candidate = Some(leaf_candidate);
});
}
if refutable {
// In refutable cases there's always at least one candidate, and we want a false edge to
// the failure block.
previous_candidate.as_mut().unwrap().next_candidate_start_block = Some(otherwise_block)
} else {
// Match checking ensures `otherwise_block` is actually unreachable in irrefutable
// cases.
let source_info = self.source_info(scrutinee_span);
// Matching on a scrutinee place of an uninhabited type doesn't generate any memory
// reads by itself, and so if the place is uninitialized we wouldn't know. In order to
// disallow the following:
// ```rust
// let x: !;
// match x {}
// ```
// we add a dummy read on the place.
//
// NOTE: If we require never patterns for empty matches, those will check that the place
// is initialized, and so this read would no longer be needed.
let cause_matched_place = FakeReadCause::ForMatchedPlace(None);
if let Some(scrutinee_place) = scrutinee_place_builder.try_to_place(self) {
self.cfg.push_fake_read(
otherwise_block,
source_info,
cause_matched_place,
scrutinee_place,
);
}
self.cfg.terminate(otherwise_block, source_info, TerminatorKind::Unreachable);
}
otherwise_block
}
/// The main match algorithm. It begins with a set of candidates
/// `candidates` and has the job of generating code to determine
/// which of these candidates, if any, is the correct one. The
@ -1998,52 +1992,50 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
impl<'a, 'tcx> Builder<'a, 'tcx> {
/// If the bindings have already been declared, set `declare_bindings` to
/// `false` to avoid duplicated bindings declaration. Used for if-let guards.
/// `false` to avoid duplicated bindings declaration; used for if-let guards.
pub(crate) fn lower_let_expr(
&mut self,
mut block: BasicBlock,
expr_id: ExprId,
pat: &Pat<'tcx>,
source_scope: Option<SourceScope>,
span: Span,
scope_span: Span,
declare_bindings: bool,
storages_alive: bool,
) -> BlockAnd<()> {
let expr_span = self.thir[expr_id].span;
let expr_place_builder = unpack!(block = self.lower_scrutinee(block, expr_id, expr_span));
let wildcard = Pat::wildcard_from_ty(pat.ty);
let mut guard_candidate = Candidate::new(expr_place_builder.clone(), pat, false, self);
let mut otherwise_candidate =
Candidate::new(expr_place_builder.clone(), &wildcard, false, self);
let fake_borrow_temps = self.lower_match_tree(
let scrutinee = unpack!(block = self.lower_scrutinee(block, expr_id, expr_span));
let mut candidate = Candidate::new(scrutinee.clone(), pat, false, self);
let otherwise_block = self.lower_match_tree(
block,
expr_span,
&scrutinee,
pat.span,
&expr_place_builder,
pat.span,
false,
&mut [&mut guard_candidate, &mut otherwise_candidate],
&mut [&mut candidate],
true,
);
let expr_place = expr_place_builder.try_to_place(self);
let opt_expr_place = expr_place.as_ref().map(|place| (Some(place), expr_span));
let otherwise_post_guard_block = otherwise_candidate.pre_binding_block.unwrap();
self.break_for_else(otherwise_post_guard_block, self.source_info(expr_span));
self.break_for_else(otherwise_block, self.source_info(expr_span));
if declare_bindings {
self.declare_bindings(source_scope, pat.span.to(span), pat, None, opt_expr_place);
let expr_place = scrutinee.try_to_place(self);
let opt_expr_place = expr_place.as_ref().map(|place| (Some(place), expr_span));
self.declare_bindings(source_scope, pat.span.to(scope_span), pat, None, opt_expr_place);
}
let post_guard_block = self.bind_pattern(
let success = self.bind_pattern(
self.source_info(pat.span),
guard_candidate,
fake_borrow_temps.as_slice(),
candidate,
&[],
expr_span,
None,
false,
storages_alive,
);
// If branch coverage is enabled, record this branch.
self.visit_coverage_conditional_let(pat, post_guard_block, otherwise_post_guard_block);
self.visit_coverage_conditional_let(pat, success, otherwise_block);
post_guard_block.unit()
success.unit()
}
/// Initializes each of the bindings from the candidate by
@ -2091,14 +2083,15 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
return self.cfg.start_new_block();
}
self.ascribe_types(
block,
parent_data
.iter()
.flat_map(|d| &d.ascriptions)
.cloned()
.chain(candidate.extra_data.ascriptions),
);
let ascriptions = parent_data
.iter()
.flat_map(|d| &d.ascriptions)
.cloned()
.chain(candidate.extra_data.ascriptions);
let bindings =
parent_data.iter().flat_map(|d| &d.bindings).chain(&candidate.extra_data.bindings);
self.ascribe_types(block, ascriptions);
// rust-lang/rust#27282: The `autoref` business deserves some
// explanation here.
@ -2185,12 +2178,11 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
&& let Some(guard) = arm.guard
{
let tcx = self.tcx;
let bindings =
parent_data.iter().flat_map(|d| &d.bindings).chain(&candidate.extra_data.bindings);
self.bind_matched_candidate_for_guard(block, schedule_drops, bindings.clone());
let guard_frame =
GuardFrame { locals: bindings.map(|b| GuardFrameLocal::new(b.var_id)).collect() };
let guard_frame = GuardFrame {
locals: bindings.clone().map(|b| GuardFrameLocal::new(b.var_id)).collect(),
};
debug!("entering guard building context: {:?}", guard_frame);
self.guard_context.push(guard_frame);
@ -2263,11 +2255,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
// ```
//
// and that is clearly not correct.
let by_value_bindings = parent_data
.iter()
.flat_map(|d| &d.bindings)
.chain(&candidate.extra_data.bindings)
.filter(|binding| matches!(binding.binding_mode.0, ByRef::No));
let by_value_bindings =
bindings.filter(|binding| matches!(binding.binding_mode.0, ByRef::No));
// Read all of the by reference bindings to ensure that the
// place they refer to can't be modified by the guard.
for binding in by_value_bindings.clone() {
@ -2291,7 +2280,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
self.bind_matched_candidate_for_arm_body(
block,
schedule_drops,
parent_data.iter().flat_map(|d| &d.bindings).chain(&candidate.extra_data.bindings),
bindings,
storages_alive,
);
block
@ -2493,55 +2482,4 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
debug!(?locals);
self.var_indices.insert(var_id, locals);
}
pub(crate) fn ast_let_else(
&mut self,
mut block: BasicBlock,
init_id: ExprId,
initializer_span: Span,
else_block: BlockId,
let_else_scope: &region::Scope,
pattern: &Pat<'tcx>,
) -> BlockAnd<BasicBlock> {
let else_block_span = self.thir[else_block].span;
let (matching, failure) = self.in_if_then_scope(*let_else_scope, else_block_span, |this| {
let scrutinee = unpack!(block = this.lower_scrutinee(block, init_id, initializer_span));
let pat = Pat { ty: pattern.ty, span: else_block_span, kind: PatKind::Wild };
let mut wildcard = Candidate::new(scrutinee.clone(), &pat, false, this);
let mut candidate = Candidate::new(scrutinee.clone(), pattern, false, this);
let fake_borrow_temps = this.lower_match_tree(
block,
initializer_span,
&scrutinee,
pattern.span,
false,
&mut [&mut candidate, &mut wildcard],
);
// This block is for the matching case
let matching = this.bind_pattern(
this.source_info(pattern.span),
candidate,
fake_borrow_temps.as_slice(),
initializer_span,
None,
true,
);
// This block is for the failure case
let failure = this.bind_pattern(
this.source_info(else_block_span),
wildcard,
fake_borrow_temps.as_slice(),
initializer_span,
None,
true,
);
// If branch coverage is enabled, record this branch.
this.visit_coverage_conditional_let(pattern, matching, failure);
this.break_for_else(failure, this.source_info(initializer_span));
matching.unit()
});
matching.and(failure)
}
}

View File

@ -1,3 +1,5 @@
use std::marker::PhantomData;
use crate::build::expr::as_place::{PlaceBase, PlaceBuilder};
use crate::build::matches::{Binding, Candidate, FlatPat, MatchPair, TestCase};
use crate::build::Builder;
@ -269,18 +271,6 @@ impl<'pat, 'tcx> MatchPair<'pat, 'tcx> {
}
}
pub(super) struct FakeBorrowCollector<'a, 'b, 'tcx> {
cx: &'a mut Builder<'b, 'tcx>,
/// Base of the scrutinee place. Used to distinguish bindings inside the scrutinee place from
/// bindings inside deref patterns.
scrutinee_base: PlaceBase,
/// Store for each place the kind of borrow to take. In case of conflicts, we take the strongest
/// borrow (i.e. Deep > Shallow).
/// Invariant: for any place in `fake_borrows`, all the prefixes of this place that are
/// dereferences are also borrowed with the same of stronger borrow kind.
fake_borrows: FxIndexMap<Place<'tcx>, FakeBorrowKind>,
}
/// Determine the set of places that have to be stable across match guards.
///
/// Returns a list of places that need a fake borrow along with a local to store it.
@ -344,6 +334,18 @@ pub(super) fn collect_fake_borrows<'tcx>(
.collect()
}
pub(super) struct FakeBorrowCollector<'a, 'b, 'tcx> {
cx: &'a mut Builder<'b, 'tcx>,
/// Base of the scrutinee place. Used to distinguish bindings inside the scrutinee place from
/// bindings inside deref patterns.
scrutinee_base: PlaceBase,
/// Store for each place the kind of borrow to take. In case of conflicts, we take the strongest
/// borrow (i.e. Deep > Shallow).
/// Invariant: for any place in `fake_borrows`, all the prefixes of this place that are
/// dereferences are also borrowed with the same of stronger borrow kind.
fake_borrows: FxIndexMap<Place<'tcx>, FakeBorrowKind>,
}
impl<'a, 'b, 'tcx> FakeBorrowCollector<'a, 'b, 'tcx> {
// Fake borrow this place and its dereference prefixes.
fn fake_borrow(&mut self, place: Place<'tcx>, kind: FakeBorrowKind) {
@ -457,6 +459,57 @@ impl<'a, 'b, 'tcx> FakeBorrowCollector<'a, 'b, 'tcx> {
}
}
/// Visit all the bindings of these candidates. Because or-alternatives bind the same variables, we
/// only explore the first one of each or-pattern.
pub(super) fn visit_bindings<'tcx>(
candidates: &[&mut Candidate<'_, 'tcx>],
f: impl FnMut(&Binding<'tcx>),
) {
let mut visitor = BindingsVisitor { f, phantom: PhantomData };
for candidate in candidates.iter() {
visitor.visit_candidate(candidate);
}
}
pub(super) struct BindingsVisitor<'tcx, F> {
f: F,
phantom: PhantomData<&'tcx ()>,
}
impl<'tcx, F> BindingsVisitor<'tcx, F>
where
F: FnMut(&Binding<'tcx>),
{
fn visit_candidate(&mut self, candidate: &Candidate<'_, 'tcx>) {
for binding in &candidate.extra_data.bindings {
(self.f)(binding)
}
for match_pair in &candidate.match_pairs {
self.visit_match_pair(match_pair);
}
}
fn visit_flat_pat(&mut self, flat_pat: &FlatPat<'_, 'tcx>) {
for binding in &flat_pat.extra_data.bindings {
(self.f)(binding)
}
for match_pair in &flat_pat.match_pairs {
self.visit_match_pair(match_pair);
}
}
fn visit_match_pair(&mut self, match_pair: &MatchPair<'_, 'tcx>) {
if let TestCase::Or { pats, .. } = &match_pair.test_case {
// All the or-alternatives should bind the same locals, so we only visit the first one.
self.visit_flat_pat(&pats[0])
} else {
for subpair in &match_pair.subpairs {
self.visit_match_pair(subpair);
}
}
}
}
#[must_use]
pub(crate) fn ref_pat_borrow_kind(ref_mutability: Mutability) -> BorrowKind {
match ref_mutability {

View File

@ -181,11 +181,6 @@ impl<'tcx> ValueAnalysis<'tcx> for ConstAnalysis<'_, 'tcx> {
state.insert_value_idx(value_target, val, self.map());
}
if let Some(overflow_target) = overflow_target {
let overflow = match overflow {
FlatSet::Top => FlatSet::Top,
FlatSet::Elem(overflow) => FlatSet::Elem(overflow),
FlatSet::Bottom => FlatSet::Bottom,
};
// We have flooded `target` earlier.
state.insert_value_idx(overflow_target, overflow, self.map());
}

View File

@ -668,8 +668,9 @@ where
{
let cx = ecx.cx();
let mut requirements = vec![];
requirements
.extend(cx.super_predicates_of(trait_ref.def_id).iter_instantiated(cx, trait_ref.args));
requirements.extend(
cx.explicit_super_predicates_of(trait_ref.def_id).iter_instantiated(cx, trait_ref.args),
);
// FIXME(associated_const_equality): Also add associated consts to
// the requirements here.

View File

@ -2327,7 +2327,7 @@ impl<'a> Parser<'a> {
let before = self.prev_token.clone();
let binder = if self.check_keyword(kw::For) {
let lo = self.token.span;
let lifetime_defs = self.parse_late_bound_lifetime_defs()?;
let (lifetime_defs, _) = self.parse_late_bound_lifetime_defs()?;
let span = lo.to(self.prev_token.span);
self.psess.gated_spans.gate(sym::closure_lifetime_binder, span);

View File

@ -457,7 +457,7 @@ impl<'a> Parser<'a> {
// * `for<'a> Trait1<'a>: Trait2<'a /* ok */>`
// * `(for<'a> Trait1<'a>): Trait2<'a /* not ok */>`
// * `for<'a> for<'b> Trait1<'a, 'b>: Trait2<'a /* ok */, 'b /* not ok */>`
let lifetime_defs = self.parse_late_bound_lifetime_defs()?;
let (lifetime_defs, _) = self.parse_late_bound_lifetime_defs()?;
// Parse type with mandatory colon and (possibly empty) bounds,
// or with mandatory equality sign and the second type.

View File

@ -18,7 +18,7 @@ use rustc_ast::{
};
use rustc_errors::{Applicability, PResult};
use rustc_span::symbol::{kw, sym, Ident};
use rustc_span::{Span, Symbol};
use rustc_span::{ErrorGuaranteed, Span, Symbol};
use thin_vec::{thin_vec, ThinVec};
#[derive(Copy, Clone, PartialEq)]
@ -280,7 +280,7 @@ impl<'a> Parser<'a> {
// Function pointer type or bound list (trait object type) starting with a poly-trait.
// `for<'lt> [unsafe] [extern "ABI"] fn (&'lt S) -> T`
// `for<'lt> Trait1<'lt> + Trait2 + 'a`
let lifetime_defs = self.parse_late_bound_lifetime_defs()?;
let (lifetime_defs, _) = self.parse_late_bound_lifetime_defs()?;
if self.check_fn_front_matter(false, Case::Sensitive) {
self.parse_ty_bare_fn(
lo,
@ -833,12 +833,9 @@ impl<'a> Parser<'a> {
let lo = self.token.span;
let leading_token = self.prev_token.clone();
let has_parens = self.eat(&token::OpenDelim(Delimiter::Parenthesis));
let inner_lo = self.token.span;
let modifiers = self.parse_trait_bound_modifiers()?;
let bound = if self.token.is_lifetime() {
self.error_lt_bound_with_modifiers(modifiers);
self.parse_generic_lt_bound(lo, inner_lo, has_parens)?
self.parse_generic_lt_bound(lo, has_parens)?
} else if self.eat_keyword(kw::Use) {
// parse precise captures, if any. This is `use<'lt, 'lt, P, P>`; a list of
// lifetimes and ident params (including SelfUpper). These are validated later
@ -848,7 +845,7 @@ impl<'a> Parser<'a> {
let (args, args_span) = self.parse_precise_capturing_args()?;
GenericBound::Use(args, use_span.to(args_span))
} else {
self.parse_generic_ty_bound(lo, has_parens, modifiers, &leading_token)?
self.parse_generic_ty_bound(lo, has_parens, &leading_token)?
};
Ok(bound)
@ -858,50 +855,64 @@ impl<'a> Parser<'a> {
/// ```ebnf
/// LT_BOUND = LIFETIME
/// ```
fn parse_generic_lt_bound(
&mut self,
lo: Span,
inner_lo: Span,
has_parens: bool,
) -> PResult<'a, GenericBound> {
let bound = GenericBound::Outlives(self.expect_lifetime());
fn parse_generic_lt_bound(&mut self, lo: Span, has_parens: bool) -> PResult<'a, GenericBound> {
let lt = self.expect_lifetime();
let bound = GenericBound::Outlives(lt);
if has_parens {
// FIXME(Centril): Consider not erroring here and accepting `('lt)` instead,
// possibly introducing `GenericBound::Paren(P<GenericBound>)`?
self.recover_paren_lifetime(lo, inner_lo)?;
self.recover_paren_lifetime(lo, lt.ident.span)?;
}
Ok(bound)
}
/// Emits an error if any trait bound modifiers were present.
fn error_lt_bound_with_modifiers(&self, modifiers: TraitBoundModifiers) {
match modifiers.constness {
fn error_lt_bound_with_modifiers(
&self,
modifiers: TraitBoundModifiers,
binder_span: Option<Span>,
) -> ErrorGuaranteed {
let TraitBoundModifiers { constness, asyncness, polarity } = modifiers;
match constness {
BoundConstness::Never => {}
BoundConstness::Always(span) | BoundConstness::Maybe(span) => {
self.dcx().emit_err(errors::ModifierLifetime {
span,
modifier: modifiers.constness.as_str(),
});
return self
.dcx()
.emit_err(errors::ModifierLifetime { span, modifier: constness.as_str() });
}
}
match modifiers.polarity {
match polarity {
BoundPolarity::Positive => {}
BoundPolarity::Negative(span) | BoundPolarity::Maybe(span) => {
self.dcx().emit_err(errors::ModifierLifetime {
span,
modifier: modifiers.polarity.as_str(),
});
return self
.dcx()
.emit_err(errors::ModifierLifetime { span, modifier: polarity.as_str() });
}
}
match asyncness {
BoundAsyncness::Normal => {}
BoundAsyncness::Async(span) => {
return self
.dcx()
.emit_err(errors::ModifierLifetime { span, modifier: asyncness.as_str() });
}
}
if let Some(span) = binder_span {
return self.dcx().emit_err(errors::ModifierLifetime { span, modifier: "for<...>" });
}
unreachable!("lifetime bound intercepted in `parse_generic_ty_bound` but no modifiers?")
}
/// Recover on `('lifetime)` with `(` already eaten.
fn recover_paren_lifetime(&mut self, lo: Span, inner_lo: Span) -> PResult<'a, ()> {
let inner_span = inner_lo.to(self.prev_token.span);
fn recover_paren_lifetime(&mut self, lo: Span, lt_span: Span) -> PResult<'a, ()> {
self.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
let span = lo.to(self.prev_token.span);
let (sugg, snippet) = if let Ok(snippet) = self.span_to_snippet(inner_span) {
let (sugg, snippet) = if let Ok(snippet) = self.span_to_snippet(lt_span) {
(Some(span), snippet)
} else {
(None, String::new())
@ -916,7 +927,7 @@ impl<'a> Parser<'a> {
/// If no modifiers are present, this does not consume any tokens.
///
/// ```ebnf
/// TRAIT_BOUND_MODIFIERS = [["~"] "const"] ["?" | "!"]
/// TRAIT_BOUND_MODIFIERS = [["~"] "const"] ["async"] ["?" | "!"]
/// ```
fn parse_trait_bound_modifiers(&mut self) -> PResult<'a, TraitBoundModifiers> {
let constness = if self.eat(&token::Tilde) {
@ -970,15 +981,23 @@ impl<'a> Parser<'a> {
/// TY_BOUND_NOPAREN = [TRAIT_BOUND_MODIFIERS] [for<LT_PARAM_DEFS>] SIMPLE_PATH
/// ```
///
/// For example, this grammar accepts `~const ?for<'a: 'b> m::Trait<'a>`.
/// For example, this grammar accepts `for<'a: 'b> ~const ?m::Trait<'a>`.
fn parse_generic_ty_bound(
&mut self,
lo: Span,
has_parens: bool,
modifiers: TraitBoundModifiers,
leading_token: &Token,
) -> PResult<'a, GenericBound> {
let mut lifetime_defs = self.parse_late_bound_lifetime_defs()?;
let modifiers = self.parse_trait_bound_modifiers()?;
let (mut lifetime_defs, binder_span) = self.parse_late_bound_lifetime_defs()?;
// Recover erroneous lifetime bound with modifiers or binder.
// e.g. `T: for<'a> 'a` or `T: ~const 'a`.
if self.token.is_lifetime() {
let _: ErrorGuaranteed = self.error_lt_bound_with_modifiers(modifiers, binder_span);
return self.parse_generic_lt_bound(lo, has_parens);
}
let mut path = if self.token.is_keyword(kw::Fn)
&& self.look_ahead(1, |tok| tok.kind == TokenKind::OpenDelim(Delimiter::Parenthesis))
&& let Some(path) = self.recover_path_from_fn()
@ -1094,16 +1113,19 @@ impl<'a> Parser<'a> {
}
/// Optionally parses `for<$generic_params>`.
pub(super) fn parse_late_bound_lifetime_defs(&mut self) -> PResult<'a, ThinVec<GenericParam>> {
pub(super) fn parse_late_bound_lifetime_defs(
&mut self,
) -> PResult<'a, (ThinVec<GenericParam>, Option<Span>)> {
if self.eat_keyword(kw::For) {
let lo = self.token.span;
self.expect_lt()?;
let params = self.parse_generic_params()?;
self.expect_gt()?;
// We rely on AST validation to rule out invalid cases: There must not be type
// parameters, and the lifetime parameters must not have bounds.
Ok(params)
// We rely on AST validation to rule out invalid cases: There must not be
// type or const parameters, and parameters must not have bounds.
Ok((params, Some(lo.to(self.prev_token.span))))
} else {
Ok(ThinVec::new())
Ok((ThinVec::new(), None))
}
}

View File

@ -187,7 +187,7 @@ fn predicates_reference_self(
) -> SmallVec<[Span; 1]> {
let trait_ref = ty::Binder::dummy(ty::TraitRef::identity(tcx, trait_def_id));
let predicates = if supertraits_only {
tcx.super_predicates_of(trait_def_id)
tcx.explicit_super_predicates_of(trait_def_id)
} else {
tcx.predicates_of(trait_def_id)
};
@ -256,7 +256,7 @@ fn super_predicates_have_non_lifetime_binders(
if !tcx.features().non_lifetime_binders {
return SmallVec::new();
}
tcx.super_predicates_of(trait_def_id)
tcx.explicit_super_predicates_of(trait_def_id)
.predicates
.iter()
.filter_map(|(pred, span)| pred.has_non_region_bound_vars().then_some(*span))

View File

@ -574,7 +574,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
// Check supertraits hold. This is so that their associated type bounds
// will be checked in the code below.
for super_trait in tcx
.super_predicates_of(trait_predicate.def_id())
.explicit_super_predicates_of(trait_predicate.def_id())
.instantiate(tcx, trait_predicate.trait_ref.args)
.predicates
.into_iter()

View File

@ -128,7 +128,7 @@ impl<'tcx> TraitAliasExpander<'tcx> {
}
// Get components of trait alias.
let predicates = tcx.super_predicates_of(trait_ref.def_id());
let predicates = tcx.explicit_super_predicates_of(trait_ref.def_id());
debug!(?predicates);
let items = predicates.predicates.iter().rev().filter_map(|(pred, span)| {

View File

@ -117,7 +117,7 @@ fn prepare_vtable_segments_inner<'tcx, T>(
let &(inner_most_trait_ref, _, _) = stack.last().unwrap();
let mut direct_super_traits_iter = tcx
.super_predicates_of(inner_most_trait_ref.def_id())
.explicit_super_predicates_of(inner_most_trait_ref.def_id())
.predicates
.into_iter()
.filter_map(move |(pred, _)| {

View File

@ -209,8 +209,7 @@ pub trait Interner:
def_id: Self::DefId,
) -> ty::EarlyBinder<Self, impl IntoIterator<Item = Self::Clause>>;
// FIXME: Rename this so it's obvious it's only *immediate* super predicates.
fn super_predicates_of(
fn explicit_super_predicates_of(
self,
def_id: Self::DefId,
) -> ty::EarlyBinder<Self, impl IntoIterator<Item = Self::Clause>>;

View File

@ -4,15 +4,15 @@
use crate::ptr;
use crate::sys::thread_local::destructors;
use crate::sys::thread_local::key::StaticKey;
use crate::sys::thread_local::key::{set, LazyKey};
pub fn enable() {
static DTORS: StaticKey = StaticKey::new(Some(run));
static DTORS: LazyKey = LazyKey::new(Some(run));
// Setting the key value to something other than NULL will result in the
// destructor being run at thread exit.
unsafe {
DTORS.set(ptr::without_provenance_mut(1));
set(DTORS.force(), ptr::without_provenance_mut(1));
}
unsafe extern "C" fn run(_: *mut u8) {

View File

@ -1,4 +1,4 @@
//! A `StaticKey` implementation using racy initialization.
//! A `LazyKey` implementation using racy initialization.
//!
//! Unfortunately, none of the platforms currently supported by `std` allows
//! creating TLS keys at compile-time. Thus we need a way to lazily create keys.
@ -10,34 +10,12 @@ use crate::sync::atomic::{self, AtomicUsize, Ordering};
/// A type for TLS keys that are statically allocated.
///
/// This type is entirely `unsafe` to use as it does not protect against
/// use-after-deallocation or use-during-deallocation.
///
/// The actual OS-TLS key is lazily allocated when this is used for the first
/// time. The key is also deallocated when the Rust runtime exits or `destroy`
/// is called, whichever comes first.
///
/// # Examples
///
/// ```ignore (cannot-doctest-private-modules)
/// use tls::os::{StaticKey, INIT};
///
/// // Use a regular global static to store the key.
/// static KEY: StaticKey = INIT;
///
/// // The state provided via `get` and `set` is thread-local.
/// unsafe {
/// assert!(KEY.get().is_null());
/// KEY.set(1 as *mut u8);
/// }
/// ```
pub struct StaticKey {
/// This is basically a `LazyLock<Key>`, but avoids blocking and circular
/// dependencies with the rest of `std`.
pub struct LazyKey {
/// Inner static TLS key (internals).
key: AtomicUsize,
/// Destructor for the TLS value.
///
/// See `Key::new` for information about when the destructor runs and how
/// it runs.
dtor: Option<unsafe extern "C" fn(*mut u8)>,
}
@ -51,32 +29,14 @@ const KEY_SENTVAL: usize = 0;
#[cfg(target_os = "nto")]
const KEY_SENTVAL: usize = libc::PTHREAD_KEYS_MAX + 1;
impl StaticKey {
impl LazyKey {
#[rustc_const_unstable(feature = "thread_local_internals", issue = "none")]
pub const fn new(dtor: Option<unsafe extern "C" fn(*mut u8)>) -> StaticKey {
StaticKey { key: atomic::AtomicUsize::new(KEY_SENTVAL), dtor }
}
/// Gets the value associated with this TLS key
///
/// This will lazily allocate a TLS key from the OS if one has not already
/// been allocated.
#[inline]
pub unsafe fn get(&self) -> *mut u8 {
unsafe { super::get(self.key()) }
}
/// Sets this TLS key to a new value.
///
/// This will lazily allocate a TLS key from the OS if one has not already
/// been allocated.
#[inline]
pub unsafe fn set(&self, val: *mut u8) {
unsafe { super::set(self.key(), val) }
pub const fn new(dtor: Option<unsafe extern "C" fn(*mut u8)>) -> LazyKey {
LazyKey { key: atomic::AtomicUsize::new(KEY_SENTVAL), dtor }
}
#[inline]
fn key(&self) -> super::Key {
pub fn force(&self) -> super::Key {
match self.key.load(Ordering::Acquire) {
KEY_SENTVAL => self.lazy_init() as super::Key,
n => n as super::Key,

View File

@ -1,18 +1,25 @@
use super::StaticKey;
use super::{get, set, LazyKey};
use crate::ptr;
#[test]
fn smoke() {
static K1: StaticKey = StaticKey::new(None);
static K2: StaticKey = StaticKey::new(None);
static K1: LazyKey = LazyKey::new(None);
static K2: LazyKey = LazyKey::new(None);
let k1 = K1.force();
let k2 = K2.force();
assert_ne!(k1, k2);
assert_eq!(K1.force(), k1);
assert_eq!(K2.force(), k2);
unsafe {
assert!(K1.get().is_null());
assert!(K2.get().is_null());
K1.set(ptr::without_provenance_mut(1));
K2.set(ptr::without_provenance_mut(2));
assert_eq!(K1.get() as usize, 1);
assert_eq!(K2.get() as usize, 2);
assert!(get(k1).is_null());
assert!(get(k2).is_null());
set(k1, ptr::without_provenance_mut(1));
set(k2, ptr::without_provenance_mut(2));
assert_eq!(get(k1) as usize, 1);
assert_eq!(get(k2) as usize, 2);
}
}
@ -26,25 +33,27 @@ fn destructors() {
drop(unsafe { Arc::from_raw(ptr as *const ()) });
}
static KEY: StaticKey = StaticKey::new(Some(destruct));
static KEY: LazyKey = LazyKey::new(Some(destruct));
let shared1 = Arc::new(());
let shared2 = Arc::clone(&shared1);
let key = KEY.force();
unsafe {
assert!(KEY.get().is_null());
KEY.set(Arc::into_raw(shared1) as *mut u8);
assert!(get(key).is_null());
set(key, Arc::into_raw(shared1) as *mut u8);
}
thread::spawn(move || unsafe {
assert!(KEY.get().is_null());
KEY.set(Arc::into_raw(shared2) as *mut u8);
let key = KEY.force();
assert!(get(key).is_null());
set(key, Arc::into_raw(shared2) as *mut u8);
})
.join()
.unwrap();
// Leak the Arc, let the TLS destructor clean it up.
let shared1 = unsafe { ManuallyDrop::new(Arc::from_raw(KEY.get() as *const ())) };
let shared1 = unsafe { ManuallyDrop::new(Arc::from_raw(get(key) as *const ())) };
assert_eq!(
Arc::strong_count(&shared1),
1,

View File

@ -16,6 +16,7 @@ pub unsafe fn set(key: Key, value: *mut u8) {
}
#[inline]
#[cfg(any(not(target_thread_local), test))]
pub unsafe fn get(key: Key) -> *mut u8 {
unsafe { libc::pthread_getspecific(key) as *mut u8 }
}

View File

@ -1,4 +1,4 @@
//! Implementation of `StaticKey` for Windows.
//! Implementation of `LazyKey` for Windows.
//!
//! Windows has no native support for running destructors so we manage our own
//! list of destructors to keep track of how to destroy keys. We then install a
@ -13,9 +13,9 @@
//! don't reach a fixed point after a short while then we just inevitably leak
//! something.
//!
//! The list is implemented as an atomic single-linked list of `StaticKey`s and
//! The list is implemented as an atomic single-linked list of `LazyKey`s and
//! does not support unregistration. Unfortunately, this means that we cannot
//! use racy initialization for creating the keys in `StaticKey`, as that could
//! use racy initialization for creating the keys in `LazyKey`, as that could
//! result in destructors being missed. Hence, we synchronize the creation of
//! keys with destructors through [`INIT_ONCE`](c::INIT_ONCE) (`std`'s
//! [`Once`](crate::sync::Once) cannot be used since it might use TLS itself).
@ -33,26 +33,26 @@ use crate::sync::atomic::{
use crate::sys::c;
use crate::sys::thread_local::guard;
type Key = c::DWORD;
pub type Key = c::DWORD;
type Dtor = unsafe extern "C" fn(*mut u8);
pub struct StaticKey {
pub struct LazyKey {
/// The key value shifted up by one. Since TLS_OUT_OF_INDEXES == DWORD::MAX
/// is not a valid key value, this allows us to use zero as sentinel value
/// without risking overflow.
key: AtomicU32,
dtor: Option<Dtor>,
next: AtomicPtr<StaticKey>,
next: AtomicPtr<LazyKey>,
/// Currently, destructors cannot be unregistered, so we cannot use racy
/// initialization for keys. Instead, we need synchronize initialization.
/// Use the Windows-provided `Once` since it does not require TLS.
once: UnsafeCell<c::INIT_ONCE>,
}
impl StaticKey {
impl LazyKey {
#[inline]
pub const fn new(dtor: Option<Dtor>) -> StaticKey {
StaticKey {
pub const fn new(dtor: Option<Dtor>) -> LazyKey {
LazyKey {
key: AtomicU32::new(0),
dtor,
next: AtomicPtr::new(ptr::null_mut()),
@ -61,18 +61,7 @@ impl StaticKey {
}
#[inline]
pub unsafe fn set(&'static self, val: *mut u8) {
let r = unsafe { c::TlsSetValue(self.key(), val.cast()) };
debug_assert_eq!(r, c::TRUE);
}
#[inline]
pub unsafe fn get(&'static self) -> *mut u8 {
unsafe { c::TlsGetValue(self.key()).cast() }
}
#[inline]
fn key(&'static self) -> Key {
pub fn force(&'static self) -> Key {
match self.key.load(Acquire) {
0 => unsafe { self.init() },
key => key - 1,
@ -141,17 +130,28 @@ impl StaticKey {
}
}
unsafe impl Send for StaticKey {}
unsafe impl Sync for StaticKey {}
unsafe impl Send for LazyKey {}
unsafe impl Sync for LazyKey {}
static DTORS: AtomicPtr<StaticKey> = AtomicPtr::new(ptr::null_mut());
#[inline]
pub unsafe fn set(key: Key, val: *mut u8) {
let r = unsafe { c::TlsSetValue(key, val.cast()) };
debug_assert_eq!(r, c::TRUE);
}
#[inline]
pub unsafe fn get(key: Key) -> *mut u8 {
unsafe { c::TlsGetValue(key).cast() }
}
static DTORS: AtomicPtr<LazyKey> = AtomicPtr::new(ptr::null_mut());
/// Should only be called once per key, otherwise loops or breaks may occur in
/// the linked list.
unsafe fn register_dtor(key: &'static StaticKey) {
unsafe fn register_dtor(key: &'static LazyKey) {
guard::enable();
let this = <*const StaticKey>::cast_mut(key);
let this = <*const LazyKey>::cast_mut(key);
// Use acquire ordering to pass along the changes done by the previously
// registered keys when we store the new head with release ordering.
let mut head = DTORS.load(Acquire);
@ -176,9 +176,9 @@ pub unsafe fn run_dtors() {
let dtor = unsafe { (*cur).dtor.unwrap() };
cur = unsafe { (*cur).next.load(Relaxed) };
// In StaticKey::init, we register the dtor before setting `key`.
// In LazyKey::init, we register the dtor before setting `key`.
// So if one thread's `run_dtors` races with another thread executing `init` on the same
// `StaticKey`, we can encounter a key of 0 here. That means this key was never
// `LazyKey`, we can encounter a key of 0 here. That means this key was never
// initialized in this thread so we can safely skip it.
if pre_key == 0 {
continue;

View File

@ -30,7 +30,7 @@
//! really.
//!
//! Perhaps one day we can fold the `Box` here into a static allocation,
//! expanding the `StaticKey` structure to contain not only a slot for the TLS
//! expanding the `LazyKey` structure to contain not only a slot for the TLS
//! key but also a slot for the destructor queue on windows. An optimization for
//! another day!

View File

@ -36,7 +36,7 @@ cfg_if::cfg_if! {
pub use native::{EagerStorage, LazyStorage, thread_local_inner};
} else {
mod os;
pub use os::{Key, thread_local_inner};
pub use os::{Storage, thread_local_inner};
}
}
@ -126,28 +126,33 @@ pub(crate) mod key {
mod unix;
#[cfg(test)]
mod tests;
pub(super) use racy::StaticKey;
use unix::{Key, create, destroy, get, set};
pub(super) use racy::LazyKey;
pub(super) use unix::{Key, set};
#[cfg(any(not(target_thread_local), test))]
pub(super) use unix::get;
use unix::{create, destroy};
} else if #[cfg(all(not(target_thread_local), target_os = "windows"))] {
#[cfg(test)]
mod tests;
mod windows;
pub(super) use windows::{StaticKey, run_dtors};
pub(super) use windows::{Key, LazyKey, get, run_dtors, set};
} else if #[cfg(all(target_vendor = "fortanix", target_env = "sgx"))] {
mod racy;
mod sgx;
#[cfg(test)]
mod tests;
pub(super) use racy::StaticKey;
use sgx::{Key, create, destroy, get, set};
pub(super) use racy::LazyKey;
pub(super) use sgx::{Key, get, set};
use sgx::{create, destroy};
} else if #[cfg(target_os = "xous")] {
mod racy;
#[cfg(test)]
mod tests;
mod xous;
pub(super) use racy::StaticKey;
pub(super) use racy::LazyKey;
pub(crate) use xous::destroy_tls;
use xous::{Key, create, destroy, get, set};
pub(super) use xous::{Key, get, set};
use xous::{create, destroy};
}
}
}

View File

@ -2,7 +2,7 @@ use super::abort_on_dtor_unwind;
use crate::cell::Cell;
use crate::marker::PhantomData;
use crate::ptr;
use crate::sys::thread_local::key::StaticKey as OsKey;
use crate::sys::thread_local::key::{get, set, Key, LazyKey};
#[doc(hidden)]
#[allow_internal_unstable(thread_local_internals)]
@ -22,12 +22,12 @@ pub macro thread_local_inner {
unsafe {
use $crate::thread::LocalKey;
use $crate::thread::local_impl::Key;
use $crate::thread::local_impl::Storage;
// Inlining does not work on windows-gnu due to linking errors around
// dllimports. See https://github.com/rust-lang/rust/issues/109797.
LocalKey::new(#[cfg_attr(windows, inline(never))] |init| {
static VAL: Key<$t> = Key::new();
static VAL: Storage<$t> = Storage::new();
VAL.get(init, __init)
})
}
@ -41,22 +41,23 @@ pub macro thread_local_inner {
/// Use a regular global static to store this key; the state provided will then be
/// thread-local.
#[allow(missing_debug_implementations)]
pub struct Key<T> {
os: OsKey,
pub struct Storage<T> {
key: LazyKey,
marker: PhantomData<Cell<T>>,
}
unsafe impl<T> Sync for Key<T> {}
unsafe impl<T> Sync for Storage<T> {}
struct Value<T: 'static> {
value: T,
key: &'static Key<T>,
// INVARIANT: if this value is stored under a TLS key, `key` must be that `key`.
key: Key,
}
impl<T: 'static> Key<T> {
impl<T: 'static> Storage<T> {
#[rustc_const_unstable(feature = "thread_local_internals", issue = "none")]
pub const fn new() -> Key<T> {
Key { os: OsKey::new(Some(destroy_value::<T>)), marker: PhantomData }
pub const fn new() -> Storage<T> {
Storage { key: LazyKey::new(Some(destroy_value::<T>)), marker: PhantomData }
}
/// Get a pointer to the TLS value, potentially initializing it with the
@ -66,19 +67,23 @@ impl<T: 'static> Key<T> {
/// The resulting pointer may not be used after reentrant inialialization
/// or thread destruction has occurred.
pub fn get(&'static self, i: Option<&mut Option<T>>, f: impl FnOnce() -> T) -> *const T {
// SAFETY: (FIXME: get should actually be safe)
let ptr = unsafe { self.os.get() as *mut Value<T> };
let key = self.key.force();
let ptr = unsafe { get(key) as *mut Value<T> };
if ptr.addr() > 1 {
// SAFETY: the check ensured the pointer is safe (its destructor
// is not running) + it is coming from a trusted source (self).
unsafe { &(*ptr).value }
} else {
self.try_initialize(ptr, i, f)
// SAFETY: trivially correct.
unsafe { Self::try_initialize(key, ptr, i, f) }
}
}
fn try_initialize(
&'static self,
/// # Safety
/// * `key` must be the result of calling `self.key.force()`
/// * `ptr` must be the current value associated with `key`.
unsafe fn try_initialize(
key: Key,
ptr: *mut Value<T>,
i: Option<&mut Option<T>>,
f: impl FnOnce() -> T,
@ -88,14 +93,19 @@ impl<T: 'static> Key<T> {
return ptr::null();
}
let value = i.and_then(Option::take).unwrap_or_else(f);
let ptr = Box::into_raw(Box::new(Value { value, key: self }));
// SAFETY: (FIXME: get should actually be safe)
let old = unsafe { self.os.get() as *mut Value<T> };
// SAFETY: `ptr` is a correct pointer that can be destroyed by the key destructor.
unsafe {
self.os.set(ptr as *mut u8);
}
let value = Box::new(Value { value: i.and_then(Option::take).unwrap_or_else(f), key });
let ptr = Box::into_raw(value);
// SAFETY:
// * key came from a `LazyKey` and is thus correct.
// * `ptr` is a correct pointer that can be destroyed by the key destructor.
// * the value is stored under the key that it contains.
let old = unsafe {
let old = get(key) as *mut Value<T>;
set(key, ptr as *mut u8);
old
};
if !old.is_null() {
// If the variable was recursively initialized, drop the old value.
// SAFETY: We cannot be inside a `LocalKey::with` scope, as the
@ -123,8 +133,10 @@ unsafe extern "C" fn destroy_value<T: 'static>(ptr: *mut u8) {
abort_on_dtor_unwind(|| {
let ptr = unsafe { Box::from_raw(ptr as *mut Value<T>) };
let key = ptr.key;
unsafe { key.os.set(ptr::without_provenance_mut(1)) };
// SAFETY: `key` is the TLS key `ptr` was stored under.
unsafe { set(key, ptr::without_provenance_mut(1)) };
drop(ptr);
unsafe { key.os.set(ptr::null_mut()) };
// SAFETY: `key` is the TLS key `ptr` was stored under.
unsafe { set(key, ptr::null_mut()) };
});
}

View File

@ -20,26 +20,24 @@ use std::path::{Path, PathBuf};
use std::process::{Child, Command};
use std::time::Instant;
use dylib_util::{dylib_path, dylib_path_var, exe};
use shared_helpers::{
dylib_path, dylib_path_var, exe, maybe_dump, parse_rustc_stage, parse_rustc_verbose,
parse_value_from_args,
};
#[path = "../utils/bin_helpers.rs"]
mod bin_helpers;
#[path = "../utils/dylib.rs"]
mod dylib_util;
#[path = "../utils/shared_helpers.rs"]
mod shared_helpers;
fn main() {
let orig_args = env::args_os().skip(1).collect::<Vec<_>>();
let mut args = orig_args.clone();
let arg =
|name| orig_args.windows(2).find(|args| args[0] == name).and_then(|args| args[1].to_str());
let stage = bin_helpers::parse_rustc_stage();
let verbose = bin_helpers::parse_rustc_verbose();
let stage = parse_rustc_stage();
let verbose = parse_rustc_verbose();
// Detect whether or not we're a build script depending on whether --target
// is passed (a bit janky...)
let target = arg("--target");
let target = parse_value_from_args(&orig_args, "--target");
let version = args.iter().find(|w| &**w == "-vV");
// Use a different compiler for build scripts, since there may not yet be a
@ -102,7 +100,7 @@ fn main() {
cmd.args(&args).env(dylib_path_var(), env::join_paths(&dylib_path).unwrap());
// Get the name of the crate we're compiling, if any.
let crate_name = arg("--crate-name");
let crate_name = parse_value_from_args(&orig_args, "--crate-name");
if let Some(crate_name) = crate_name {
if let Some(target) = env::var_os("RUSTC_TIME") {
@ -143,10 +141,11 @@ fn main() {
cmd.arg("-C").arg("panic=abort");
}
let crate_type = parse_value_from_args(&orig_args, "--crate-type");
// `-Ztls-model=initial-exec` must not be applied to proc-macros, see
// issue https://github.com/rust-lang/rust/issues/100530
if env::var("RUSTC_TLS_MODEL_INITIAL_EXEC").is_ok()
&& arg("--crate-type") != Some("proc-macro")
&& crate_type != Some("proc-macro")
&& !matches!(crate_name, Some("proc_macro2" | "quote" | "syn" | "synstructure"))
{
cmd.arg("-Ztls-model=initial-exec");
@ -251,7 +250,7 @@ fn main() {
eprintln!("{prefix} libdir: {libdir:?}");
}
bin_helpers::maybe_dump(format!("stage{stage}-rustc"), &cmd);
maybe_dump(format!("stage{stage}-rustc"), &cmd);
let start = Instant::now();
let (child, status) = {

View File

@ -6,19 +6,19 @@ use std::env;
use std::path::PathBuf;
use std::process::Command;
use dylib_util::{dylib_path, dylib_path_var};
use shared_helpers::{
dylib_path, dylib_path_var, maybe_dump, parse_rustc_stage, parse_rustc_verbose,
parse_value_from_args,
};
#[path = "../utils/bin_helpers.rs"]
mod bin_helpers;
#[path = "../utils/dylib.rs"]
mod dylib_util;
#[path = "../utils/shared_helpers.rs"]
mod shared_helpers;
fn main() {
let args = env::args_os().skip(1).collect::<Vec<_>>();
let stage = bin_helpers::parse_rustc_stage();
let verbose = bin_helpers::parse_rustc_verbose();
let stage = parse_rustc_stage();
let verbose = parse_rustc_verbose();
let rustdoc = env::var_os("RUSTDOC_REAL").expect("RUSTDOC_REAL was not set");
let libdir = env::var_os("RUSTDOC_LIBDIR").expect("RUSTDOC_LIBDIR was not set");
@ -26,7 +26,7 @@ fn main() {
// Detect whether or not we're a build script depending on whether --target
// is passed (a bit janky...)
let target = args.windows(2).find(|w| &*w[0] == "--target").and_then(|w| w[1].to_str());
let target = parse_value_from_args(&args, "--target");
let mut dylib_path = dylib_path();
dylib_path.insert(0, PathBuf::from(libdir.clone()));
@ -62,7 +62,7 @@ fn main() {
cmd.arg("-Zunstable-options");
cmd.arg("--check-cfg=cfg(bootstrap)");
bin_helpers::maybe_dump(format!("stage{stage}-rustdoc"), &cmd);
maybe_dump(format!("stage{stage}-rustdoc"), &cmd);
if verbose > 1 {
eprintln!(

View File

@ -85,7 +85,7 @@ macro_rules! clean_crate_tree {
// NOTE: doesn't use `run_cargo` because we don't want to save a stamp file,
// and doesn't use `stream_cargo` to avoid passing `--message-format` which `clean` doesn't accept.
builder.run(&mut cargo);
builder.run(cargo);
}
}
)+ }

View File

@ -27,6 +27,7 @@ use crate::core::builder::crate_description;
use crate::core::builder::Cargo;
use crate::core::builder::{Builder, Kind, PathSet, RunConfig, ShouldRun, Step, TaskPath};
use crate::core::config::{DebuginfoLevel, LlvmLibunwind, RustcLto, TargetSelection};
use crate::utils::exec::BootstrapCommand;
use crate::utils::helpers::{
exe, get_clang_cl_resource_dir, is_debug_info, is_dylib, output, symlink_dir, t, up_to_date,
};
@ -771,7 +772,7 @@ impl Step for StartupObjects {
let src_file = &src_dir.join(file.to_string() + ".rs");
let dst_file = &dst_dir.join(file.to_string() + ".o");
if !up_to_date(src_file, dst_file) {
let mut cmd = Command::new(&builder.initial_rustc);
let mut cmd = BootstrapCommand::new(&builder.initial_rustc);
cmd.env("RUSTC_BOOTSTRAP", "1");
if !builder.local_rebuild {
// a local_rebuild compiler already has stage1 features
@ -2076,7 +2077,7 @@ pub fn stream_cargo(
tail_args: Vec<String>,
cb: &mut dyn FnMut(CargoMessage<'_>),
) -> bool {
let mut cargo = Command::from(cargo);
let mut cargo = BootstrapCommand::from(cargo).command;
// Instruct Cargo to give us json messages on stdout, critically leaving
// stderr as piped so we can get those pretty colors.
let mut message_format = if builder.config.json_output {

View File

@ -26,6 +26,7 @@ use crate::core::build_steps::tool::{self, Tool};
use crate::core::builder::{Builder, Kind, RunConfig, ShouldRun, Step};
use crate::core::config::TargetSelection;
use crate::utils::channel::{self, Info};
use crate::utils::exec::BootstrapCommand;
use crate::utils::helpers::{
exe, is_dylib, move_file, output, t, target_supports_cranelift_backend, timeit,
};
@ -1599,14 +1600,14 @@ impl Step for Extended {
let _ = fs::remove_dir_all(&pkg);
let pkgbuild = |component: &str| {
let mut cmd = Command::new("pkgbuild");
let mut cmd = BootstrapCommand::new("pkgbuild");
cmd.arg("--identifier")
.arg(format!("org.rust-lang.{}", component))
.arg("--scripts")
.arg(pkg.join(component))
.arg("--nopayload")
.arg(pkg.join(component).with_extension("pkg"));
builder.run(&mut cmd);
builder.run(cmd);
};
let prepare = |name: &str| {
@ -1636,7 +1637,7 @@ impl Step for Extended {
builder.create_dir(&pkg.join("res"));
builder.create(&pkg.join("res/LICENSE.txt"), &license);
builder.install(&etc.join("gfx/rust-logo.png"), &pkg.join("res"), 0o644);
let mut cmd = Command::new("productbuild");
let mut cmd = BootstrapCommand::new("productbuild");
cmd.arg("--distribution")
.arg(xform(&etc.join("pkg/Distribution.xml")))
.arg("--resources")
@ -1649,7 +1650,7 @@ impl Step for Extended {
.arg("--package-path")
.arg(&pkg);
let _time = timeit(builder);
builder.run(&mut cmd);
builder.run(cmd);
}
if target.is_windows() {
@ -1704,7 +1705,7 @@ impl Step for Extended {
let heat_flags = ["-nologo", "-gg", "-sfrag", "-srd", "-sreg"];
builder.run(
Command::new(&heat)
BootstrapCommand::new(&heat)
.current_dir(&exe)
.arg("dir")
.arg("rustc")
@ -1720,7 +1721,7 @@ impl Step for Extended {
);
if built_tools.contains("rust-docs") {
builder.run(
Command::new(&heat)
BootstrapCommand::new(&heat)
.current_dir(&exe)
.arg("dir")
.arg("rust-docs")
@ -1738,7 +1739,7 @@ impl Step for Extended {
);
}
builder.run(
Command::new(&heat)
BootstrapCommand::new(&heat)
.current_dir(&exe)
.arg("dir")
.arg("cargo")
@ -1755,7 +1756,7 @@ impl Step for Extended {
.arg(etc.join("msi/remove-duplicates.xsl")),
);
builder.run(
Command::new(&heat)
BootstrapCommand::new(&heat)
.current_dir(&exe)
.arg("dir")
.arg("rust-std")
@ -1771,7 +1772,7 @@ impl Step for Extended {
);
if built_tools.contains("rust-analyzer") {
builder.run(
Command::new(&heat)
BootstrapCommand::new(&heat)
.current_dir(&exe)
.arg("dir")
.arg("rust-analyzer")
@ -1790,7 +1791,7 @@ impl Step for Extended {
}
if built_tools.contains("clippy") {
builder.run(
Command::new(&heat)
BootstrapCommand::new(&heat)
.current_dir(&exe)
.arg("dir")
.arg("clippy")
@ -1809,7 +1810,7 @@ impl Step for Extended {
}
if built_tools.contains("miri") {
builder.run(
Command::new(&heat)
BootstrapCommand::new(&heat)
.current_dir(&exe)
.arg("dir")
.arg("miri")
@ -1827,7 +1828,7 @@ impl Step for Extended {
);
}
builder.run(
Command::new(&heat)
BootstrapCommand::new(&heat)
.current_dir(&exe)
.arg("dir")
.arg("rust-analysis")
@ -1845,7 +1846,7 @@ impl Step for Extended {
);
if target.ends_with("windows-gnu") {
builder.run(
Command::new(&heat)
BootstrapCommand::new(&heat)
.current_dir(&exe)
.arg("dir")
.arg("rust-mingw")
@ -1864,7 +1865,7 @@ impl Step for Extended {
let candle = |input: &Path| {
let output = exe.join(input.file_stem().unwrap()).with_extension("wixobj");
let arch = if target.contains("x86_64") { "x64" } else { "x86" };
let mut cmd = Command::new(&candle);
let mut cmd = BootstrapCommand::new(&candle);
cmd.current_dir(&exe)
.arg("-nologo")
.arg("-dRustcDir=rustc")
@ -1893,7 +1894,7 @@ impl Step for Extended {
if target.ends_with("windows-gnu") {
cmd.arg("-dGccDir=rust-mingw");
}
builder.run(&mut cmd);
builder.run(cmd);
};
candle(&xform(&etc.join("msi/rust.wxs")));
candle(&etc.join("msi/ui.wxs"));
@ -1925,7 +1926,7 @@ impl Step for Extended {
builder.info(&format!("building `msi` installer with {light:?}"));
let filename = format!("{}-{}.msi", pkgname(builder, "rust"), target.triple);
let mut cmd = Command::new(&light);
let mut cmd = BootstrapCommand::new(&light);
cmd.arg("-nologo")
.arg("-ext")
.arg("WixUIExtension")
@ -1962,7 +1963,7 @@ impl Step for Extended {
cmd.arg("-sice:ICE57");
let _time = timeit(builder);
builder.run(&mut cmd);
builder.run(cmd);
if !builder.config.dry_run() {
t!(move_file(exe.join(&filename), distdir(builder).join(&filename)));
@ -1971,7 +1972,7 @@ impl Step for Extended {
}
}
fn add_env(builder: &Builder<'_>, cmd: &mut Command, target: TargetSelection) {
fn add_env(builder: &Builder<'_>, cmd: &mut BootstrapCommand, target: TargetSelection) {
let mut parts = builder.version.split('.');
cmd.env("CFG_RELEASE_INFO", builder.rust_version())
.env("CFG_RELEASE_NUM", &builder.version)

View File

@ -249,6 +249,7 @@ impl Step for TheBook {
let shared_assets = builder.ensure(SharedAssets { target });
// build the command first so we don't nest GHA groups
// FIXME: this doesn't do anything!
builder.rustdoc_cmd(compiler);
// build the redirect pages
@ -300,7 +301,7 @@ fn invoke_rustdoc(
cmd.arg("-Z").arg("unstable-options").arg("--disable-minification");
}
builder.run(&mut cmd);
builder.run(cmd);
}
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
@ -394,7 +395,7 @@ impl Step for Standalone {
} else {
cmd.arg("--markdown-css").arg("rust.css");
}
builder.run(&mut cmd);
builder.run(cmd);
}
// We open doc/index.html as the default if invoked as `x.py doc --open`
@ -493,7 +494,7 @@ impl Step for Releases {
cmd.arg("--disable-minification");
}
builder.run(&mut cmd);
builder.run(cmd);
}
// We open doc/RELEASES.html as the default if invoked as `x.py doc --open RELEASES.md`
@ -737,7 +738,7 @@ fn doc_std(
format!("library{} in {} format", crate_description(requested_crates), format.as_str());
let _guard = builder.msg_doc(compiler, description, target);
builder.run(&mut cargo.into());
builder.run(cargo);
builder.cp_link_r(&out_dir, out);
}
@ -862,7 +863,7 @@ impl Step for Rustc {
let proc_macro_out_dir = builder.stage_out(compiler, Mode::Rustc).join("doc");
symlink_dir_force(&builder.config, &out, &proc_macro_out_dir);
builder.run(&mut cargo.into());
builder.run(cargo);
if !builder.config.dry_run() {
// Sanity check on linked compiler crates
@ -995,7 +996,7 @@ macro_rules! tool_doc {
symlink_dir_force(&builder.config, &out, &proc_macro_out_dir);
let _guard = builder.msg_doc(compiler, stringify!($tool).to_lowercase(), target);
builder.run(&mut cargo.into());
builder.run(cargo);
if !builder.config.dry_run() {
// Sanity check on linked doc directories
@ -1079,7 +1080,7 @@ impl Step for ErrorIndex {
index.arg(out);
index.arg(&builder.version);
builder.run(&mut index);
builder.run(index);
}
}
@ -1115,7 +1116,7 @@ impl Step for UnstableBookGen {
cmd.arg(builder.src.join("src"));
cmd.arg(out);
builder.run(&mut cmd);
builder.run(cmd);
}
}
@ -1210,7 +1211,7 @@ impl Step for RustcBook {
self.compiler.host,
self.target,
);
builder.run(&mut cmd);
builder.run(cmd);
drop(doc_generator_guard);
// Run rustbook/mdbook to generate the HTML pages.

View File

@ -6,11 +6,11 @@
use std::env;
use std::fs;
use std::path::{Component, Path, PathBuf};
use std::process::Command;
use crate::core::build_steps::dist;
use crate::core::builder::{Builder, RunConfig, ShouldRun, Step};
use crate::core::config::{Config, TargetSelection};
use crate::utils::exec::BootstrapCommand;
use crate::utils::helpers::t;
use crate::utils::tarball::GeneratedTarball;
use crate::{Compiler, Kind};
@ -102,7 +102,7 @@ fn install_sh(
let empty_dir = builder.out.join("tmp/empty_dir");
t!(fs::create_dir_all(&empty_dir));
let mut cmd = Command::new(SHELL);
let mut cmd = BootstrapCommand::new(SHELL);
cmd.current_dir(&empty_dir)
.arg(sanitize_sh(&tarball.decompressed_output().join("install.sh")))
.arg(format!("--prefix={}", prepare_dir(&destdir_env, prefix)))
@ -113,7 +113,7 @@ fn install_sh(
.arg(format!("--libdir={}", prepare_dir(&destdir_env, libdir)))
.arg(format!("--mandir={}", prepare_dir(&destdir_env, mandir)))
.arg("--disable-ldconfig");
builder.run(&mut cmd);
builder.run(cmd);
t!(fs::remove_dir_all(&empty_dir));
}

View File

@ -50,7 +50,7 @@ impl Step for BuildManifest {
cmd.arg(&builder.config.channel);
builder.create_dir(&distdir(builder));
builder.run(&mut cmd);
builder.run(cmd);
}
}
@ -72,7 +72,7 @@ impl Step for BumpStage0 {
fn run(self, builder: &Builder<'_>) -> Self::Output {
let mut cmd = builder.tool_cmd(Tool::BumpStage0);
cmd.args(builder.config.args());
builder.run(&mut cmd);
builder.run(cmd);
}
}
@ -94,7 +94,7 @@ impl Step for ReplaceVersionPlaceholder {
fn run(self, builder: &Builder<'_>) -> Self::Output {
let mut cmd = builder.tool_cmd(Tool::ReplaceVersionPlaceholder);
cmd.arg(&builder.src);
builder.run(&mut cmd);
builder.run(cmd);
}
}
@ -158,8 +158,7 @@ impl Step for Miri {
// after another --, so this must be at the end.
miri.args(builder.config.args());
let mut miri = Command::from(miri);
builder.run(&mut miri);
builder.run(miri);
}
}
@ -189,7 +188,7 @@ impl Step for CollectLicenseMetadata {
let mut cmd = builder.tool_cmd(Tool::CollectLicenseMetadata);
cmd.env("REUSE_EXE", reuse);
cmd.env("DEST", &dest);
builder.run(&mut cmd);
builder.run(cmd);
dest
}
@ -219,7 +218,7 @@ impl Step for GenerateCopyright {
let mut cmd = builder.tool_cmd(Tool::GenerateCopyright);
cmd.env("LICENSE_METADATA", &license_metadata);
cmd.env("DEST", &dest);
builder.run(&mut cmd);
builder.run(cmd);
dest
}
@ -243,7 +242,7 @@ impl Step for GenerateWindowsSys {
fn run(self, builder: &Builder<'_>) {
let mut cmd = builder.tool_cmd(Tool::GenerateWindowsSys);
cmd.arg(&builder.src);
builder.run(&mut cmd);
builder.run(cmd);
}
}

View File

@ -16,6 +16,7 @@ pub fn suggest(builder: &Builder<'_>, run: bool) {
.tool_cmd(Tool::SuggestTests)
.env("SUGGEST_TESTS_GIT_REPOSITORY", git_config.git_repository)
.env("SUGGEST_TESTS_NIGHTLY_BRANCH", git_config.nightly_branch)
.command
.output()
.expect("failed to run `suggest-tests` tool");

View File

@ -156,7 +156,7 @@ You can skip linkcheck with --skip src/tools/linkchecker"
let _guard =
builder.msg(Kind::Test, compiler.stage, "Linkcheck", bootstrap_host, bootstrap_host);
let _time = helpers::timeit(builder);
builder.run_tracked(
builder.run(
BootstrapCommand::from(linkchecker.arg(builder.out.join(host.triple).join("doc")))
.delay_failure(),
);
@ -216,7 +216,7 @@ impl Step for HtmlCheck {
builder,
));
builder.run_tracked(
builder.run(
BootstrapCommand::from(
builder.tool_cmd(Tool::HtmlChecker).arg(builder.doc_out(self.target)),
)
@ -267,7 +267,7 @@ impl Step for Cargotest {
.env("RUSTC", builder.rustc(compiler))
.env("RUSTDOC", builder.rustdoc(compiler));
add_rustdoc_cargo_linker_args(cmd, builder, compiler.host, LldThreads::No);
builder.run_tracked(BootstrapCommand::from(cmd).delay_failure());
builder.run(BootstrapCommand::from(cmd).delay_failure());
}
}
@ -465,7 +465,7 @@ impl Miri {
// Tell it where to put the sysroot.
cargo.env("MIRI_SYSROOT", &miri_sysroot);
let mut cargo = Command::from(cargo);
let mut cargo = BootstrapCommand::from(cargo);
let _guard =
builder.msg(Kind::Build, compiler.stage, "miri sysroot", compiler.host, target);
builder.run(&mut cargo);
@ -482,8 +482,10 @@ impl Miri {
String::new()
} else {
builder.verbose(|| println!("running: {cargo:?}"));
let out =
cargo.output().expect("We already ran `cargo miri setup` before and that worked");
let out = cargo
.command
.output()
.expect("We already ran `cargo miri setup` before and that worked");
assert!(out.status.success(), "`cargo miri setup` returned with non-0 exit code");
// Output is "<sysroot>\n".
let stdout = String::from_utf8(out.stdout)
@ -596,7 +598,7 @@ impl Step for Miri {
target,
);
let _time = helpers::timeit(builder);
builder.run(&mut cargo);
builder.run(cargo);
}
}
}
@ -661,11 +663,11 @@ impl Step for CargoMiri {
// Finally, pass test-args and run everything.
cargo.arg("--").args(builder.config.test_args());
let mut cargo = Command::from(cargo);
let cargo = BootstrapCommand::from(cargo);
{
let _guard = builder.msg_sysroot_tool(Kind::Test, stage, "cargo-miri", host, target);
let _time = helpers::timeit(builder);
builder.run(&mut cargo);
builder.run(cargo);
}
}
}
@ -766,7 +768,7 @@ impl Step for Clippy {
let _guard = builder.msg_sysroot_tool(Kind::Test, compiler.stage, "clippy", host, host);
// Clippy reports errors if it blessed the outputs
if builder.run_cmd(BootstrapCommand::from(&mut cargo).allow_failure()) {
if builder.run(BootstrapCommand::from(&mut cargo).allow_failure()).is_success() {
// The tests succeeded; nothing to do.
return;
}
@ -819,7 +821,7 @@ impl Step for RustdocTheme {
.env("RUSTC_BOOTSTRAP", "1");
cmd.args(linker_args(builder, self.compiler.host, LldThreads::No));
builder.run_tracked(BootstrapCommand::from(&mut cmd).delay_failure());
builder.run(BootstrapCommand::from(&mut cmd).delay_failure());
}
}
@ -845,7 +847,7 @@ impl Step for RustdocJSStd {
fn run(self, builder: &Builder<'_>) {
let nodejs =
builder.config.nodejs.as_ref().expect("need nodejs to run rustdoc-js-std tests");
let mut command = Command::new(nodejs);
let mut command = BootstrapCommand::new(nodejs);
command
.arg(builder.src.join("src/tools/rustdoc-js/tester.js"))
.arg("--crate-name")
@ -879,7 +881,7 @@ impl Step for RustdocJSStd {
builder.config.build,
self.target,
);
builder.run(&mut command);
builder.run(command);
}
}
@ -1097,7 +1099,7 @@ HELP: to skip test's attempt to check tidiness, pass `--skip src/tools/tidy` to
}
builder.info("tidy check");
builder.run_tracked(BootstrapCommand::from(&mut cmd).delay_failure());
builder.run(BootstrapCommand::from(&mut cmd).delay_failure());
builder.info("x.py completions check");
let [bash, zsh, fish, powershell] = ["x.py.sh", "x.py.zsh", "x.py.fish", "x.py.ps1"]
@ -1304,8 +1306,7 @@ impl Step for RunMakeSupport {
&[],
);
let mut cargo = Command::from(cargo);
builder.run(&mut cargo);
builder.run(cargo);
let lib_name = "librun_make_support.rlib";
let lib = builder.tools_dir(self.compiler).join(lib_name);
@ -2066,7 +2067,8 @@ NOTE: if you're sure you want to do this, please open an issue as to why. In the
cmd.arg("--git-repository").arg(git_config.git_repository);
cmd.arg("--nightly-branch").arg(git_config.nightly_branch);
builder.ci_env.force_coloring_in_ci(&mut cmd);
// FIXME: Move CiEnv back to bootstrap, it is only used here anyway
builder.ci_env.force_coloring_in_ci(&mut cmd.command);
#[cfg(feature = "build-metrics")]
builder.metrics.begin_test_suite(
@ -2184,11 +2186,8 @@ impl BookTest {
);
let _time = helpers::timeit(builder);
let cmd = BootstrapCommand::from(&mut rustbook_cmd).delay_failure();
let toolstate = if builder.run_tracked(cmd).is_success() {
ToolState::TestPass
} else {
ToolState::TestFail
};
let toolstate =
if builder.run(cmd).is_success() { ToolState::TestPass } else { ToolState::TestFail };
builder.save_toolstate(self.name, toolstate);
}
@ -2317,8 +2316,7 @@ impl Step for ErrorIndex {
let guard =
builder.msg(Kind::Test, compiler.stage, "error-index", compiler.host, compiler.host);
let _time = helpers::timeit(builder);
builder
.run_tracked(BootstrapCommand::from(&mut tool).output_mode(OutputMode::OnlyOnFailure));
builder.run(BootstrapCommand::from(&mut tool).output_mode(OutputMode::OnlyOnFailure));
drop(guard);
// The tests themselves need to link to std, so make sure it is
// available.
@ -2347,11 +2345,11 @@ fn markdown_test(builder: &Builder<'_>, compiler: Compiler, markdown: &Path) ->
let test_args = builder.config.test_args().join(" ");
cmd.arg("--test-args").arg(test_args);
let mut cmd = BootstrapCommand::from(&mut cmd).delay_failure();
cmd = cmd.delay_failure();
if !builder.config.verbose_tests {
cmd = cmd.quiet();
}
builder.run_tracked(cmd).is_success()
builder.run(cmd).is_success()
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
@ -2377,11 +2375,8 @@ impl Step for RustcGuide {
let src = builder.src.join(relative_path);
let mut rustbook_cmd = builder.tool_cmd(Tool::Rustbook);
let cmd = BootstrapCommand::from(rustbook_cmd.arg("linkcheck").arg(&src)).delay_failure();
let toolstate = if builder.run_tracked(cmd).is_success() {
ToolState::TestPass
} else {
ToolState::TestFail
};
let toolstate =
if builder.run(cmd).is_success() { ToolState::TestPass } else { ToolState::TestFail };
builder.save_toolstate("rustc-dev-guide", toolstate);
}
}
@ -2432,7 +2427,7 @@ impl Step for CrateLibrustc {
/// Returns whether the test succeeded.
#[allow(clippy::too_many_arguments)] // FIXME: reduce the number of args and remove this.
fn run_cargo_test<'a>(
cargo: impl Into<Command>,
cargo: impl Into<BootstrapCommand>,
libtest_args: &[&str],
crates: &[String],
primary_crate: &str,
@ -2463,14 +2458,14 @@ fn run_cargo_test<'a>(
/// Given a `cargo test` subcommand, pass it the appropriate test flags given a `builder`.
fn prepare_cargo_test(
cargo: impl Into<Command>,
cargo: impl Into<BootstrapCommand>,
libtest_args: &[&str],
crates: &[String],
primary_crate: &str,
compiler: Compiler,
target: TargetSelection,
builder: &Builder<'_>,
) -> Command {
) -> BootstrapCommand {
let mut cargo = cargo.into();
// Propegate `--bless` if it has not already been set/unset
@ -2881,19 +2876,19 @@ impl Step for RemoteCopyLibs {
// Spawn the emulator and wait for it to come online
let tool = builder.tool_exe(Tool::RemoteTestClient);
let mut cmd = Command::new(&tool);
let mut cmd = BootstrapCommand::new(&tool);
cmd.arg("spawn-emulator").arg(target.triple).arg(&server).arg(builder.tempdir());
if let Some(rootfs) = builder.qemu_rootfs(target) {
cmd.arg(rootfs);
}
builder.run(&mut cmd);
builder.run(cmd);
// Push all our dylibs to the emulator
for f in t!(builder.sysroot_libdir(compiler, target).read_dir()) {
let f = t!(f);
let name = f.file_name().into_string().unwrap();
if helpers::is_dylib(&name) {
builder.run(Command::new(&tool).arg("push").arg(f.path()));
builder.run(BootstrapCommand::new(&tool).arg("push").arg(f.path()));
}
}
}
@ -2924,20 +2919,20 @@ impl Step for Distcheck {
builder.ensure(dist::PlainSourceTarball);
builder.ensure(dist::Src);
let mut cmd = Command::new("tar");
let mut cmd = BootstrapCommand::new("tar");
cmd.arg("-xf")
.arg(builder.ensure(dist::PlainSourceTarball).tarball())
.arg("--strip-components=1")
.current_dir(&dir);
builder.run(&mut cmd);
builder.run(cmd);
builder.run(
Command::new("./configure")
BootstrapCommand::new("./configure")
.args(&builder.config.configure_args)
.arg("--enable-vendor")
.current_dir(&dir),
);
builder.run(
Command::new(helpers::make(&builder.config.build.triple))
BootstrapCommand::new(helpers::make(&builder.config.build.triple))
.arg("check")
.current_dir(&dir),
);
@ -2948,16 +2943,16 @@ impl Step for Distcheck {
let _ = fs::remove_dir_all(&dir);
t!(fs::create_dir_all(&dir));
let mut cmd = Command::new("tar");
let mut cmd = BootstrapCommand::new("tar");
cmd.arg("-xf")
.arg(builder.ensure(dist::Src).tarball())
.arg("--strip-components=1")
.current_dir(&dir);
builder.run(&mut cmd);
builder.run(cmd);
let toml = dir.join("rust-src/lib/rustlib/src/rust/library/std/Cargo.toml");
builder.run(
Command::new(&builder.initial_cargo)
BootstrapCommand::new(&builder.initial_cargo)
// Will read the libstd Cargo.toml
// which uses the unstable `public-dependency` feature.
.env("RUSTC_BOOTSTRAP", "1")
@ -2986,7 +2981,7 @@ impl Step for Bootstrap {
// Some tests require cargo submodule to be present.
builder.build.update_submodule(Path::new("src/tools/cargo"));
let mut check_bootstrap = Command::new(builder.python());
let mut check_bootstrap = BootstrapCommand::new(builder.python());
check_bootstrap
.args(["-m", "unittest", "bootstrap_test.py"])
.env("BUILD_DIR", &builder.out)
@ -2994,9 +2989,9 @@ impl Step for Bootstrap {
.current_dir(builder.src.join("src/bootstrap/"));
// NOTE: we intentionally don't pass test_args here because the args for unittest and cargo test are mutually incompatible.
// Use `python -m unittest` manually if you want to pass arguments.
builder.run_tracked(BootstrapCommand::from(&mut check_bootstrap).delay_failure());
builder.run(check_bootstrap.delay_failure());
let mut cmd = Command::new(&builder.initial_cargo);
let mut cmd = BootstrapCommand::new(&builder.initial_cargo);
cmd.arg("test")
.args(["--features", "bootstrap-self-test"])
.current_dir(builder.src.join("src/bootstrap"))
@ -3071,7 +3066,7 @@ impl Step for TierCheck {
self.compiler.host,
self.compiler.host,
);
builder.run_tracked(BootstrapCommand::from(&mut cargo.into()).delay_failure());
builder.run(BootstrapCommand::from(cargo).delay_failure());
}
}
@ -3147,8 +3142,7 @@ impl Step for RustInstaller {
return;
}
let mut cmd =
std::process::Command::new(builder.src.join("src/tools/rust-installer/test.sh"));
let mut cmd = BootstrapCommand::new(builder.src.join("src/tools/rust-installer/test.sh"));
let tmpdir = testdir(builder, compiler.host).join("rust-installer");
let _ = std::fs::remove_dir_all(&tmpdir);
let _ = std::fs::create_dir_all(&tmpdir);
@ -3157,7 +3151,7 @@ impl Step for RustInstaller {
cmd.env("CARGO", &builder.initial_cargo);
cmd.env("RUSTC", &builder.initial_rustc);
cmd.env("TMP_DIR", &tmpdir);
builder.run_tracked(BootstrapCommand::from(&mut cmd).delay_failure());
builder.run(cmd.delay_failure());
}
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
@ -3351,8 +3345,7 @@ impl Step for CodegenCranelift {
.arg("testsuite.extended_sysroot");
cargo.args(builder.config.test_args());
let mut cmd: Command = cargo.into();
builder.run_cmd(BootstrapCommand::from(&mut cmd).fail_fast());
builder.run(cargo);
}
}
@ -3477,7 +3470,6 @@ impl Step for CodegenGCC {
.arg("--std-tests");
cargo.args(builder.config.test_args());
let mut cmd: Command = cargo.into();
builder.run_cmd(BootstrapCommand::from(&mut cmd).fail_fast());
builder.run(cargo);
}
}

View File

@ -9,6 +9,7 @@ use crate::core::builder;
use crate::core::builder::{Builder, Cargo as CargoCommand, RunConfig, ShouldRun, Step};
use crate::core::config::TargetSelection;
use crate::utils::channel::GitInfo;
use crate::utils::exec::BootstrapCommand;
use crate::utils::helpers::output;
use crate::utils::helpers::{add_dylib_path, exe, t};
use crate::Compiler;
@ -432,12 +433,12 @@ pub struct ErrorIndex {
}
impl ErrorIndex {
pub fn command(builder: &Builder<'_>) -> Command {
pub fn command(builder: &Builder<'_>) -> BootstrapCommand {
// Error-index-generator links with the rustdoc library, so we need to add `rustc_lib_paths`
// for rustc_private and libLLVM.so, and `sysroot_lib` for libstd, etc.
let host = builder.config.build;
let compiler = builder.compiler_for(builder.top_stage, host, host);
let mut cmd = Command::new(builder.ensure(ErrorIndex { compiler }));
let mut cmd = BootstrapCommand::new(builder.ensure(ErrorIndex { compiler }));
let mut dylib_paths = builder.rustc_lib_paths(compiler);
dylib_paths.push(PathBuf::from(&builder.sysroot_libdir(compiler, compiler.host)));
add_dylib_path(dylib_paths, &mut cmd);
@ -601,7 +602,7 @@ impl Step for Rustdoc {
&self.compiler.host,
&target,
);
builder.run(&mut cargo.into());
builder.run(cargo);
// Cargo adds a number of paths to the dylib search path on windows, which results in
// the wrong rustdoc being executed. To avoid the conflicting rustdocs, we name the "tool"
@ -856,7 +857,7 @@ impl Step for LlvmBitcodeLinker {
&self.extra_features,
);
builder.run(&mut cargo.into());
builder.run(cargo);
let tool_out = builder
.cargo_out(self.compiler, Mode::ToolRustc, self.target)
@ -911,13 +912,13 @@ impl Step for LibcxxVersionTool {
}
let compiler = builder.cxx(self.target).unwrap();
let mut cmd = Command::new(compiler);
let mut cmd = BootstrapCommand::new(compiler);
cmd.arg("-o")
.arg(&executable)
.arg(builder.src.join("src/tools/libcxx-version/main.cpp"));
builder.run_cmd(&mut cmd);
builder.run(cmd);
if !executable.exists() {
panic!("Something went wrong. {} is not present", executable.display());
@ -1045,10 +1046,10 @@ tool_extended!((self, builder),
);
impl<'a> Builder<'a> {
/// Gets a `Command` which is ready to run `tool` in `stage` built for
/// Gets a `BootstrapCommand` which is ready to run `tool` in `stage` built for
/// `host`.
pub fn tool_cmd(&self, tool: Tool) -> Command {
let mut cmd = Command::new(self.tool_exe(tool));
pub fn tool_cmd(&self, tool: Tool) -> BootstrapCommand {
let mut cmd = BootstrapCommand::new(self.tool_exe(tool));
let compiler = self.compiler(0, self.config.build);
let host = &compiler.host;
// Prepares the `cmd` provided to be able to run the `compiler` provided.

View File

@ -1,6 +1,6 @@
use crate::core::builder::{Builder, RunConfig, ShouldRun, Step};
use crate::utils::exec::BootstrapCommand;
use std::path::{Path, PathBuf};
use std::process::Command;
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub(crate) struct Vendor {
@ -27,7 +27,7 @@ impl Step for Vendor {
}
fn run(self, builder: &Builder<'_>) -> Self::Output {
let mut cmd = Command::new(&builder.initial_cargo);
let mut cmd = BootstrapCommand::new(&builder.initial_cargo);
cmd.arg("vendor");
if self.versioned_dirs {
@ -59,6 +59,6 @@ impl Step for Vendor {
cmd.current_dir(self.root_dir);
builder.run(&mut cmd);
builder.run(cmd);
}
}

View File

@ -24,6 +24,7 @@ use crate::utils::helpers::{check_cfg_arg, libdir, linker_flags, output, t, LldT
use crate::EXTRA_CHECK_CFGS;
use crate::{Build, CLang, Crate, DocTests, GitRepo, Mode};
use crate::utils::exec::BootstrapCommand;
pub use crate::Compiler;
use clap::ValueEnum;
@ -1217,7 +1218,7 @@ impl<'a> Builder<'a> {
/// Adds the compiler's directory of dynamic libraries to `cmd`'s dynamic
/// library lookup path.
pub fn add_rustc_lib_path(&self, compiler: Compiler, cmd: &mut Command) {
pub fn add_rustc_lib_path(&self, compiler: Compiler, cmd: &mut BootstrapCommand) {
// Windows doesn't need dylib path munging because the dlls for the
// compiler live next to the compiler and the system will find them
// automatically.
@ -1250,11 +1251,11 @@ impl<'a> Builder<'a> {
self.ensure(tool::Rustdoc { compiler })
}
pub fn cargo_clippy_cmd(&self, run_compiler: Compiler) -> Command {
pub fn cargo_clippy_cmd(&self, run_compiler: Compiler) -> BootstrapCommand {
if run_compiler.stage == 0 {
// `ensure(Clippy { stage: 0 })` *builds* clippy with stage0, it doesn't use the beta clippy.
let cargo_clippy = self.build.config.download_clippy();
let mut cmd = Command::new(cargo_clippy);
let mut cmd = BootstrapCommand::new(cargo_clippy);
cmd.env("CARGO", &self.initial_cargo);
return cmd;
}
@ -1273,13 +1274,13 @@ impl<'a> Builder<'a> {
let mut dylib_path = helpers::dylib_path();
dylib_path.insert(0, self.sysroot(run_compiler).join("lib"));
let mut cmd = Command::new(cargo_clippy);
let mut cmd = BootstrapCommand::new(cargo_clippy);
cmd.env(helpers::dylib_path_var(), env::join_paths(&dylib_path).unwrap());
cmd.env("CARGO", &self.initial_cargo);
cmd
}
pub fn cargo_miri_cmd(&self, run_compiler: Compiler) -> Command {
pub fn cargo_miri_cmd(&self, run_compiler: Compiler) -> BootstrapCommand {
assert!(run_compiler.stage > 0, "miri can not be invoked at stage 0");
let build_compiler = self.compiler(run_compiler.stage - 1, self.build.build);
@ -1295,7 +1296,7 @@ impl<'a> Builder<'a> {
extra_features: Vec::new(),
});
// Invoke cargo-miri, make sure it can find miri and cargo.
let mut cmd = Command::new(cargo_miri);
let mut cmd = BootstrapCommand::new(cargo_miri);
cmd.env("MIRI", &miri);
cmd.env("CARGO", &self.initial_cargo);
// Need to add the `run_compiler` libs. Those are the libs produces *by* `build_compiler`,
@ -1310,8 +1311,8 @@ impl<'a> Builder<'a> {
cmd
}
pub fn rustdoc_cmd(&self, compiler: Compiler) -> Command {
let mut cmd = Command::new(self.bootstrap_out.join("rustdoc"));
pub fn rustdoc_cmd(&self, compiler: Compiler) -> BootstrapCommand {
let mut cmd = BootstrapCommand::new(self.bootstrap_out.join("rustdoc"));
cmd.env("RUSTC_STAGE", compiler.stage.to_string())
.env("RUSTC_SYSROOT", self.sysroot(compiler))
// Note that this is *not* the sysroot_libdir because rustdoc must be linked
@ -1352,7 +1353,7 @@ impl<'a> Builder<'a> {
mode: Mode,
target: TargetSelection,
cmd: &str, // FIXME make this properly typed
) -> Command {
) -> BootstrapCommand {
let mut cargo;
if cmd == "clippy" {
cargo = self.cargo_clippy_cmd(compiler);
@ -1365,7 +1366,7 @@ impl<'a> Builder<'a> {
cargo = self.cargo_miri_cmd(compiler);
cargo.arg("miri").arg(subcmd);
} else {
cargo = Command::new(&self.initial_cargo);
cargo = BootstrapCommand::new(&self.initial_cargo);
cargo.arg(cmd);
}
@ -2104,7 +2105,7 @@ impl<'a> Builder<'a> {
// Try to use a sysroot-relative bindir, in case it was configured absolutely.
cargo.env("RUSTC_INSTALL_BINDIR", self.config.bindir_relative());
self.ci_env.force_coloring_in_ci(&mut cargo);
self.ci_env.force_coloring_in_ci(&mut cargo.command);
// When we build Rust dylibs they're all intended for intermediate
// usage, so make sure we pass the -Cprefer-dynamic flag instead of
@ -2373,7 +2374,7 @@ impl HostFlags {
#[derive(Debug)]
pub struct Cargo {
command: Command,
command: BootstrapCommand,
compiler: Compiler,
target: TargetSelection,
rustflags: Rustflags,
@ -2598,8 +2599,8 @@ impl Cargo {
}
}
impl From<Cargo> for Command {
fn from(mut cargo: Cargo) -> Command {
impl From<Cargo> for BootstrapCommand {
fn from(mut cargo: Cargo) -> BootstrapCommand {
let rustflags = &cargo.rustflags.0;
if !rustflags.is_empty() {
cargo.command.env("RUSTFLAGS", rustflags);
@ -2618,7 +2619,12 @@ impl From<Cargo> for Command {
if !cargo.allow_features.is_empty() {
cargo.command.env("RUSTC_ALLOW_FEATURES", cargo.allow_features);
}
cargo.command
}
}
impl From<Cargo> for Command {
fn from(cargo: Cargo) -> Command {
BootstrapCommand::from(cargo).command
}
}

View File

@ -11,6 +11,7 @@ use std::{
use build_helper::ci::CiEnv;
use xz2::bufread::XzDecoder;
use crate::utils::exec::BootstrapCommand;
use crate::utils::helpers::hex_encode;
use crate::utils::helpers::{check_run, exe, move_file, program_out_of_date};
use crate::{t, Config};
@ -56,7 +57,7 @@ impl Config {
/// Runs a command, printing out nice contextual information if it fails.
/// Returns false if do not execute at all, otherwise returns its
/// `status.success()`.
pub(crate) fn check_run(&self, cmd: &mut Command) -> bool {
pub(crate) fn check_run(&self, cmd: &mut BootstrapCommand) -> bool {
if self.dry_run() {
return true;
}
@ -211,7 +212,7 @@ impl Config {
fn download_http_with_retries(&self, tempfile: &Path, url: &str, help_on_error: &str) {
println!("downloading {url}");
// Try curl. If that fails and we are on windows, fallback to PowerShell.
let mut curl = Command::new("curl");
let mut curl = BootstrapCommand::new("curl");
curl.args([
"-y",
"30",

View File

@ -575,19 +575,17 @@ impl Build {
};
// NOTE: doesn't use `try_run` because this shouldn't print an error if it fails.
if !update(true).status().map_or(false, |status| status.success()) {
self.run(&mut update(false));
self.run(update(false));
}
// Save any local changes, but avoid running `git stash pop` if there are none (since it will exit with an error).
// diff-index reports the modifications through the exit status
let has_local_modifications = !self.run_cmd(
BootstrapCommand::from(submodule_git().args(["diff-index", "--quiet", "HEAD"]))
.allow_failure()
.output_mode(match self.is_verbose() {
true => OutputMode::All,
false => OutputMode::OnlyOutput,
}),
);
let has_local_modifications = self
.run(
BootstrapCommand::from(submodule_git().args(["diff-index", "--quiet", "HEAD"]))
.allow_failure(),
)
.is_failure();
if has_local_modifications {
self.run(submodule_git().args(["stash", "push"]));
}
@ -939,7 +937,7 @@ impl Build {
}
/// Adds the `RUST_TEST_THREADS` env var if necessary
fn add_rust_test_threads(&self, cmd: &mut Command) {
fn add_rust_test_threads(&self, cmd: &mut BootstrapCommand) {
if env::var_os("RUST_TEST_THREADS").is_none() {
cmd.env("RUST_TEST_THREADS", self.jobs().to_string());
}
@ -961,11 +959,14 @@ impl Build {
}
/// Execute a command and return its output.
fn run_tracked(&self, command: BootstrapCommand<'_>) -> CommandOutput {
/// This method should be used for all command executions in bootstrap.
fn run<C: Into<BootstrapCommand>>(&self, command: C) -> CommandOutput {
if self.config.dry_run() {
return CommandOutput::default();
}
let mut command = command.into();
self.verbose(|| println!("running: {command:?}"));
let output_mode = command.output_mode.unwrap_or_else(|| match self.is_verbose() {
@ -1024,22 +1025,6 @@ impl Build {
output
}
/// Runs a command, printing out nice contextual information if it fails.
fn run(&self, cmd: &mut Command) {
self.run_cmd(BootstrapCommand::from(cmd).fail_fast().output_mode(
match self.is_verbose() {
true => OutputMode::All,
false => OutputMode::OnlyOutput,
},
));
}
/// A centralized function for running commands that do not return output.
pub(crate) fn run_cmd<'a, C: Into<BootstrapCommand<'a>>>(&self, cmd: C) -> bool {
let command = cmd.into();
self.run_tracked(command).is_success()
}
/// Check if verbosity is greater than the `level`
pub fn is_verbose_than(&self, level: usize) -> bool {
self.verbosity > level

View File

@ -1,50 +0,0 @@
//! This file is meant to be included directly from bootstrap shims to avoid a
//! dependency on the bootstrap library. This reduces the binary size and
//! improves compilation time by reducing the linking time.
use std::env;
use std::fs::OpenOptions;
use std::io::Write;
use std::process::Command;
use std::str::FromStr;
/// Parses the value of the "RUSTC_VERBOSE" environment variable and returns it as a `usize`.
/// If it was not defined, returns 0 by default.
///
/// Panics if "RUSTC_VERBOSE" is defined with the value that is not an unsigned integer.
pub(crate) fn parse_rustc_verbose() -> usize {
match env::var("RUSTC_VERBOSE") {
Ok(s) => usize::from_str(&s).expect("RUSTC_VERBOSE should be an integer"),
Err(_) => 0,
}
}
/// Parses the value of the "RUSTC_STAGE" environment variable and returns it as a `String`.
///
/// If "RUSTC_STAGE" was not set, the program will be terminated with 101.
pub(crate) fn parse_rustc_stage() -> String {
env::var("RUSTC_STAGE").unwrap_or_else(|_| {
// Don't panic here; it's reasonable to try and run these shims directly. Give a helpful error instead.
eprintln!("rustc shim: FATAL: RUSTC_STAGE was not set");
eprintln!("rustc shim: NOTE: use `x.py build -vvv` to see all environment variables set by bootstrap");
std::process::exit(101);
})
}
/// Writes the command invocation to a file if `DUMP_BOOTSTRAP_SHIMS` is set during bootstrap.
///
/// Before writing it, replaces user-specific values to create generic dumps for cross-environment
/// comparisons.
pub(crate) fn maybe_dump(dump_name: String, cmd: &Command) {
if let Ok(dump_dir) = env::var("DUMP_BOOTSTRAP_SHIMS") {
let dump_file = format!("{dump_dir}/{dump_name}");
let mut file = OpenOptions::new().create(true).append(true).open(dump_file).unwrap();
let cmd_dump = format!("{:?}\n", cmd);
let cmd_dump = cmd_dump.replace(&env::var("BUILD_OUT").unwrap(), "${BUILD_OUT}");
let cmd_dump = cmd_dump.replace(&env::var("CARGO_HOME").unwrap(), "${CARGO_HOME}");
file.write_all(cmd_dump.as_bytes()).expect("Unable to write file");
}
}

View File

@ -1,40 +0,0 @@
//! Various utilities for working with dylib paths.
/// Returns the environment variable which the dynamic library lookup path
/// resides in for this platform.
pub fn dylib_path_var() -> &'static str {
if cfg!(target_os = "windows") {
"PATH"
} else if cfg!(target_vendor = "apple") {
"DYLD_LIBRARY_PATH"
} else if cfg!(target_os = "haiku") {
"LIBRARY_PATH"
} else if cfg!(target_os = "aix") {
"LIBPATH"
} else {
"LD_LIBRARY_PATH"
}
}
/// Parses the `dylib_path_var()` environment variable, returning a list of
/// paths that are members of this lookup path.
pub fn dylib_path() -> Vec<std::path::PathBuf> {
let var = match std::env::var_os(dylib_path_var()) {
Some(v) => v,
None => return vec![],
};
std::env::split_paths(&var).collect()
}
/// Given an executable called `name`, return the filename for the
/// executable for a particular target.
#[allow(dead_code)]
pub fn exe(name: &str, target: &str) -> String {
if target.contains("windows") {
format!("{name}.exe")
} else if target.contains("uefi") {
format!("{name}.efi")
} else {
name.to_string()
}
}

View File

@ -1,4 +1,6 @@
use std::process::{Command, ExitStatus, Output};
use std::ffi::OsStr;
use std::path::Path;
use std::process::{Command, CommandArgs, CommandEnvs, ExitStatus, Output};
/// What should be done when the command fails.
#[derive(Debug, Copy, Clone)]
@ -24,14 +26,71 @@ pub enum OutputMode {
}
/// Wrapper around `std::process::Command`.
///
/// By default, the command will exit bootstrap if it fails.
/// If you want to allow failures, use [allow_failure].
/// If you want to delay failures until the end of bootstrap, use [delay_failure].
///
/// By default, the command will print its stdout/stderr to stdout/stderr of bootstrap
/// ([OutputMode::OnlyOutput]). If bootstrap uses verbose mode, then it will also print the
/// command itself in case of failure ([OutputMode::All]).
/// If you want to handle the output programmatically, use `output_mode(OutputMode::OnlyOnFailure)`.
///
/// [allow_failure]: BootstrapCommand::allow_failure
/// [delay_failure]: BootstrapCommand::delay_failure
#[derive(Debug)]
pub struct BootstrapCommand<'a> {
pub command: &'a mut Command,
pub struct BootstrapCommand {
pub command: Command,
pub failure_behavior: BehaviorOnFailure,
pub output_mode: Option<OutputMode>,
}
impl<'a> BootstrapCommand<'a> {
impl BootstrapCommand {
pub fn new<S: AsRef<OsStr>>(program: S) -> Self {
Command::new(program).into()
}
pub fn arg<S: AsRef<OsStr>>(&mut self, arg: S) -> &mut Self {
self.command.arg(arg.as_ref());
self
}
pub fn args<I, S>(&mut self, args: I) -> &mut Self
where
I: IntoIterator<Item = S>,
S: AsRef<OsStr>,
{
self.command.args(args);
self
}
pub fn env<K, V>(&mut self, key: K, val: V) -> &mut Self
where
K: AsRef<OsStr>,
V: AsRef<OsStr>,
{
self.command.env(key, val);
self
}
pub fn get_envs(&self) -> CommandEnvs<'_> {
self.command.get_envs()
}
pub fn get_args(&self) -> CommandArgs<'_> {
self.command.get_args()
}
pub fn env_remove<K: AsRef<OsStr>>(&mut self, key: K) -> &mut Self {
self.command.env_remove(key);
self
}
pub fn current_dir<P: AsRef<Path>>(&mut self, dir: P) -> &mut Self {
self.command.current_dir(dir);
self
}
pub fn delay_failure(self) -> Self {
Self { failure_behavior: BehaviorOnFailure::DelayFail, ..self }
}
@ -54,8 +113,41 @@ impl<'a> BootstrapCommand<'a> {
}
}
impl<'a> From<&'a mut Command> for BootstrapCommand<'a> {
/// This implementation is temporary, until all `Command` invocations are migrated to
/// `BootstrapCommand`.
impl<'a> From<&'a mut Command> for BootstrapCommand {
fn from(command: &'a mut Command) -> Self {
// This is essentially a manual `Command::clone`
let mut cmd = Command::new(command.get_program());
if let Some(dir) = command.get_current_dir() {
cmd.current_dir(dir);
}
cmd.args(command.get_args());
for (key, value) in command.get_envs() {
match value {
Some(value) => {
cmd.env(key, value);
}
None => {
cmd.env_remove(key);
}
}
}
cmd.into()
}
}
/// This implementation is temporary, until all `Command` invocations are migrated to
/// `BootstrapCommand`.
impl<'a> From<&'a mut BootstrapCommand> for BootstrapCommand {
fn from(command: &'a mut BootstrapCommand) -> Self {
BootstrapCommand::from(&mut command.command)
}
}
impl From<Command> for BootstrapCommand {
fn from(command: Command) -> Self {
Self { command, failure_behavior: BehaviorOnFailure::Exit, output_mode: None }
}
}

View File

@ -18,7 +18,7 @@ use crate::core::builder::Builder;
use crate::core::config::{Config, TargetSelection};
use crate::LldMode;
pub use crate::utils::dylib::{dylib_path, dylib_path_var};
pub use crate::utils::shared_helpers::{dylib_path, dylib_path_var};
#[cfg(test)]
mod tests;
@ -47,10 +47,11 @@ macro_rules! t {
}
};
}
use crate::utils::exec::BootstrapCommand;
pub use t;
pub fn exe(name: &str, target: TargetSelection) -> String {
crate::utils::dylib::exe(name, &target.triple)
crate::utils::shared_helpers::exe(name, &target.triple)
}
/// Returns `true` if the file name given looks like a dynamic library.
@ -72,7 +73,7 @@ pub fn libdir(target: TargetSelection) -> &'static str {
/// Adds a list of lookup paths to `cmd`'s dynamic library lookup path.
/// If the dylib_path_var is already set for this cmd, the old value will be overwritten!
pub fn add_dylib_path(path: Vec<PathBuf>, cmd: &mut Command) {
pub fn add_dylib_path(path: Vec<PathBuf>, cmd: &mut BootstrapCommand) {
let mut list = dylib_path();
for path in path {
list.insert(0, path);
@ -81,7 +82,7 @@ pub fn add_dylib_path(path: Vec<PathBuf>, cmd: &mut Command) {
}
/// Adds a list of lookup paths to `cmd`'s link library lookup path.
pub fn add_link_lib_path(path: Vec<PathBuf>, cmd: &mut Command) {
pub fn add_link_lib_path(path: Vec<PathBuf>, cmd: &mut BootstrapCommand) {
let mut list = link_lib_path();
for path in path {
list.insert(0, path);
@ -241,8 +242,9 @@ pub fn is_valid_test_suite_arg<'a, P: AsRef<Path>>(
}
}
pub fn check_run(cmd: &mut Command, print_cmd_on_fail: bool) -> bool {
let status = match cmd.status() {
// FIXME: get rid of this function
pub fn check_run(cmd: &mut BootstrapCommand, print_cmd_on_fail: bool) -> bool {
let status = match cmd.command.status() {
Ok(status) => status,
Err(e) => {
println!("failed to execute command: {cmd:?}\nERROR: {e}");
@ -437,7 +439,7 @@ pub fn linker_flags(
}
pub fn add_rustdoc_cargo_linker_args(
cmd: &mut Command,
cmd: &mut BootstrapCommand,
builder: &Builder<'_>,
target: TargetSelection,
lld_threads: LldThreads,

View File

@ -6,11 +6,11 @@ pub(crate) mod cache;
pub(crate) mod cc_detect;
pub(crate) mod change_tracker;
pub(crate) mod channel;
pub(crate) mod dylib;
pub(crate) mod exec;
pub(crate) mod helpers;
pub(crate) mod job;
#[cfg(feature = "build-metrics")]
pub(crate) mod metrics;
pub(crate) mod render_tests;
pub(crate) mod shared_helpers;
pub(crate) mod tarball;

View File

@ -7,14 +7,18 @@
//! to reimplement all the rendering logic in this module because of that.
use crate::core::builder::Builder;
use crate::utils::exec::BootstrapCommand;
use std::io::{BufRead, BufReader, Read, Write};
use std::process::{ChildStdout, Command, Stdio};
use std::process::{ChildStdout, Stdio};
use std::time::Duration;
use termcolor::{Color, ColorSpec, WriteColor};
const TERSE_TESTS_PER_LINE: usize = 88;
pub(crate) fn add_flags_and_try_run_tests(builder: &Builder<'_>, cmd: &mut Command) -> bool {
pub(crate) fn add_flags_and_try_run_tests(
builder: &Builder<'_>,
cmd: &mut BootstrapCommand,
) -> bool {
if !cmd.get_args().any(|arg| arg == "--") {
cmd.arg("--");
}
@ -23,7 +27,11 @@ pub(crate) fn add_flags_and_try_run_tests(builder: &Builder<'_>, cmd: &mut Comma
try_run_tests(builder, cmd, false)
}
pub(crate) fn try_run_tests(builder: &Builder<'_>, cmd: &mut Command, stream: bool) -> bool {
pub(crate) fn try_run_tests(
builder: &Builder<'_>,
cmd: &mut BootstrapCommand,
stream: bool,
) -> bool {
if builder.config.dry_run() {
return true;
}
@ -41,7 +49,8 @@ pub(crate) fn try_run_tests(builder: &Builder<'_>, cmd: &mut Command, stream: bo
}
}
fn run_tests(builder: &Builder<'_>, cmd: &mut Command, stream: bool) -> bool {
fn run_tests(builder: &Builder<'_>, cmd: &mut BootstrapCommand, stream: bool) -> bool {
let cmd = &mut cmd.command;
cmd.stdout(Stdio::piped());
builder.verbose(|| println!("running: {cmd:?}"));

View File

@ -0,0 +1,112 @@
//! This module serves two purposes:
//! 1. It is part of the `utils` module and used in other parts of bootstrap.
//! 2. It is embedded inside bootstrap shims to avoid a dependency on the bootstrap library.
//! Therefore, this module should never use any other bootstrap module. This reduces binary
//! size and improves compilation time by minimizing linking time.
#![allow(dead_code)]
use std::env;
use std::ffi::OsString;
use std::fs::OpenOptions;
use std::io::Write;
use std::process::Command;
use std::str::FromStr;
#[cfg(test)]
mod tests;
/// Returns the environment variable which the dynamic library lookup path
/// resides in for this platform.
pub fn dylib_path_var() -> &'static str {
if cfg!(target_os = "windows") {
"PATH"
} else if cfg!(target_vendor = "apple") {
"DYLD_LIBRARY_PATH"
} else if cfg!(target_os = "haiku") {
"LIBRARY_PATH"
} else if cfg!(target_os = "aix") {
"LIBPATH"
} else {
"LD_LIBRARY_PATH"
}
}
/// Parses the `dylib_path_var()` environment variable, returning a list of
/// paths that are members of this lookup path.
pub fn dylib_path() -> Vec<std::path::PathBuf> {
let var = match std::env::var_os(dylib_path_var()) {
Some(v) => v,
None => return vec![],
};
std::env::split_paths(&var).collect()
}
/// Given an executable called `name`, return the filename for the
/// executable for a particular target.
pub fn exe(name: &str, target: &str) -> String {
if target.contains("windows") {
format!("{name}.exe")
} else if target.contains("uefi") {
format!("{name}.efi")
} else {
name.to_string()
}
}
/// Parses the value of the "RUSTC_VERBOSE" environment variable and returns it as a `usize`.
/// If it was not defined, returns 0 by default.
///
/// Panics if "RUSTC_VERBOSE" is defined with the value that is not an unsigned integer.
pub fn parse_rustc_verbose() -> usize {
match env::var("RUSTC_VERBOSE") {
Ok(s) => usize::from_str(&s).expect("RUSTC_VERBOSE should be an integer"),
Err(_) => 0,
}
}
/// Parses the value of the "RUSTC_STAGE" environment variable and returns it as a `String`.
///
/// If "RUSTC_STAGE" was not set, the program will be terminated with 101.
pub fn parse_rustc_stage() -> String {
env::var("RUSTC_STAGE").unwrap_or_else(|_| {
// Don't panic here; it's reasonable to try and run these shims directly. Give a helpful error instead.
eprintln!("rustc shim: FATAL: RUSTC_STAGE was not set");
eprintln!("rustc shim: NOTE: use `x.py build -vvv` to see all environment variables set by bootstrap");
std::process::exit(101);
})
}
/// Writes the command invocation to a file if `DUMP_BOOTSTRAP_SHIMS` is set during bootstrap.
///
/// Before writing it, replaces user-specific values to create generic dumps for cross-environment
/// comparisons.
pub fn maybe_dump(dump_name: String, cmd: &Command) {
if let Ok(dump_dir) = env::var("DUMP_BOOTSTRAP_SHIMS") {
let dump_file = format!("{dump_dir}/{dump_name}");
let mut file = OpenOptions::new().create(true).append(true).open(dump_file).unwrap();
let cmd_dump = format!("{:?}\n", cmd);
let cmd_dump = cmd_dump.replace(&env::var("BUILD_OUT").unwrap(), "${BUILD_OUT}");
let cmd_dump = cmd_dump.replace(&env::var("CARGO_HOME").unwrap(), "${CARGO_HOME}");
file.write_all(cmd_dump.as_bytes()).expect("Unable to write file");
}
}
/// Finds `key` and returns its value from the given list of arguments `args`.
pub fn parse_value_from_args<'a>(args: &'a [OsString], key: &str) -> Option<&'a str> {
let mut args = args.iter();
while let Some(arg) = args.next() {
let arg = arg.to_str().unwrap();
if let Some(value) = arg.strip_prefix(&format!("{key}=")) {
return Some(value);
} else if arg == key {
return args.next().map(|v| v.to_str().unwrap());
}
}
None
}

View File

@ -0,0 +1,28 @@
use super::parse_value_from_args;
#[test]
fn test_parse_value_from_args() {
let args = vec![
"--stage".into(),
"1".into(),
"--version".into(),
"2".into(),
"--target".into(),
"x86_64-unknown-linux".into(),
];
assert_eq!(parse_value_from_args(args.as_slice(), "--stage").unwrap(), "1");
assert_eq!(parse_value_from_args(args.as_slice(), "--version").unwrap(), "2");
assert_eq!(parse_value_from_args(args.as_slice(), "--target").unwrap(), "x86_64-unknown-linux");
assert!(parse_value_from_args(args.as_slice(), "random-key").is_none());
let args = vec![
"app-name".into(),
"--key".into(),
"value".into(),
"random-value".into(),
"--sysroot=/x/y/z".into(),
];
assert_eq!(parse_value_from_args(args.as_slice(), "--key").unwrap(), "value");
assert_eq!(parse_value_from_args(args.as_slice(), "--sysroot").unwrap(), "/x/y/z");
}

View File

@ -5,14 +5,12 @@
//! In uplifting, a tarball from Stage N captures essential components
//! to assemble Stage N + 1 compiler.
use std::{
path::{Path, PathBuf},
process::Command,
};
use std::path::{Path, PathBuf};
use crate::core::builder::Builder;
use crate::core::{build_steps::dist::distdir, builder::Kind};
use crate::utils::channel;
use crate::utils::exec::BootstrapCommand;
use crate::utils::helpers::{move_file, t};
#[derive(Copy, Clone)]
@ -300,7 +298,7 @@ impl<'a> Tarball<'a> {
}
}
fn non_bare_args(&self, cmd: &mut Command) {
fn non_bare_args(&self, cmd: &mut BootstrapCommand) {
cmd.arg("--rel-manifest-dir=rustlib")
.arg("--legacy-manifest-dirs=rustlib,cargo")
.arg(format!("--product-name={}", self.product_name))
@ -312,7 +310,7 @@ impl<'a> Tarball<'a> {
.arg(distdir(self.builder));
}
fn run(self, build_cli: impl FnOnce(&Tarball<'a>, &mut Command)) -> GeneratedTarball {
fn run(self, build_cli: impl FnOnce(&Tarball<'a>, &mut BootstrapCommand)) -> GeneratedTarball {
t!(std::fs::create_dir_all(&self.overlay_dir));
self.builder.create(&self.overlay_dir.join("version"), &self.overlay.version(self.builder));
if let Some(info) = self.builder.rust_info().info() {
@ -353,7 +351,7 @@ impl<'a> Tarball<'a> {
};
cmd.args(["--compression-profile", compression_profile]);
self.builder.run(&mut cmd);
self.builder.run(cmd);
// Ensure there are no symbolic links in the tarball. In particular,
// rustup-toolchain-install-master and most versions of Windows can't handle symbolic links.

View File

@ -1,5 +1,6 @@
# This job builds a toolchain capable of building Fuchsia, and then builds
# Fuchsia. See the build-fuchsia.sh script in this directory for more details.
# Fuchsia as an integration test of the toolchain. See the build-fuchsia.sh
# script in this directory for more details.
FROM ubuntu:22.04
@ -24,7 +25,6 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
&& rm -rf /var/lib/apt/lists/*
# Duplicated in dist-various-2 Dockerfile.
# FIXME: Move to canonical triple
ENV \
AR_x86_64_unknown_fuchsia=x86_64-unknown-fuchsia-ar \
CC_x86_64_unknown_fuchsia=x86_64-unknown-fuchsia-clang \
@ -48,10 +48,6 @@ ENV CARGO_TARGET_X86_64_UNKNOWN_FUCHSIA_RUSTFLAGS \
ENV TARGETS=x86_64-unknown-fuchsia
ENV TARGETS=$TARGETS,x86_64-unknown-linux-gnu
ENV TARGETS=$TARGETS,wasm32-unknown-unknown
# Fuchsia clang does not have wasm target enabled, use system clang.
ENV CC_wasm32_unknown_unknown=clang-15
COPY scripts/sccache.sh /scripts/
RUN sh /scripts/sccache.sh
@ -76,4 +72,4 @@ ENV RUST_CONFIGURE_ARGS \
--set target.x86_64-unknown-fuchsia.linker=/usr/local/bin/ld.lld
ENV SCRIPT \
python3 ../x.py install --target $TARGETS compiler/rustc library/std clippy && \
bash ../src/ci/docker/host-x86_64/x86_64-gnu-integration/build-fuchsia.sh
bash ../src/ci/docker/host-x86_64/x86_64-fuchsia/build-fuchsia.sh

View File

@ -5,14 +5,14 @@
#
# You may run this script locally using Docker with the following command:
#
# $ src/ci/docker/run.sh x86_64-gnu-integration
# $ src/ci/docker/run.sh x86_64-fuchsia
#
# Alternatively, from within the container with --dev, assuming you have made it
# as far as building the toolchain with the above command:
#
# $ src/ci/docker/run.sh --dev x86_64-gnu-integration
# $ src/ci/docker/run.sh --dev x86_64-fuchsia
# docker# git config --global --add safe.directory /checkout/obj/fuchsia
# docker# ../src/ci/docker/host-x86_64/x86_64-gnu-integration/build-fuchsia.sh
# docker# ../src/ci/docker/host-x86_64/x86_64-fuchsia/build-fuchsia.sh
#
# Also see the docs in the rustc-dev-guide for more info:
# https://github.com/rust-lang/rustc-dev-guide/pull/1989
@ -21,7 +21,7 @@ set -euf -o pipefail
# Set this variable to 1 to disable updating the Fuchsia checkout. This is
# useful for making local changes. You can find the Fuchsia checkout in
# `obj/x86_64-gnu-integration/fuchsia` in your local checkout after running this
# `obj/x86_64-fuchsia/fuchsia` in your local checkout after running this
# job for the first time.
KEEP_CHECKOUT=
@ -35,7 +35,7 @@ PICK_REFS=()
# commit hash of fuchsia.git and some other repos in the "monorepo" checkout, in
# addition to versions of prebuilts. It should be bumped regularly by the
# Fuchsia team  we aim for every 1-2 months.
INTEGRATION_SHA=737ebdd83afa47b742ca8325fad0176952fcefbd
INTEGRATION_SHA=d1d2f20efe46e22be179953dd6726c96eced54ab
checkout=fuchsia
jiri=.jiri_root/bin/jiri

View File

@ -208,6 +208,13 @@ auto:
- image: test-various
<<: *job-linux-4c
- image: x86_64-fuchsia
# Only run this job on the nightly channel. Fuchsia requires
# nightly features to compile, and this job would fail if
# executed on beta and stable.
only_on_channel: nightly
<<: *job-linux-8c
- image: x86_64-gnu
<<: *job-linux-4c
@ -229,13 +236,6 @@ auto:
- image: x86_64-gnu-aux
<<: *job-linux-4c
- image: x86_64-gnu-integration
# Only run this job on the nightly channel. Fuchsia requires
# nightly features to compile, and this job would fail if
# executed on beta and stable.
only_on_channel: nightly
<<: *job-linux-8c
- image: x86_64-gnu-debug
<<: *job-linux-4c

View File

@ -113,7 +113,7 @@ fn trait_is_same_or_supertrait(cx: &DocContext<'_>, child: DefId, trait_: DefId)
if child == trait_ {
return true;
}
let predicates = cx.tcx.super_predicates_of(child);
let predicates = cx.tcx.explicit_super_predicates_of(child);
debug_assert!(cx.tcx.generics_of(child).has_self);
let self_ty = cx.tcx.types.self_param;
predicates

View File

@ -246,7 +246,7 @@ fn collect_supertrait_bounds<'tcx>(cx: &LateContext<'tcx>, bounds: GenericBounds
&& let [.., path] = poly_trait.trait_ref.path.segments
&& poly_trait.bound_generic_params.is_empty()
&& let Some(trait_def_id) = path.res.opt_def_id()
&& let predicates = cx.tcx.super_predicates_of(trait_def_id).predicates
&& let predicates = cx.tcx.explicit_super_predicates_of(trait_def_id).predicates
// If the trait has no supertrait, there is no need to collect anything from that bound
&& !predicates.is_empty()
{

View File

@ -24,7 +24,7 @@ fn is_subtrait_of_any(cx: &LateContext<'_>, ty: Ty<'_>) -> bool {
cx.tcx.is_diagnostic_item(sym::Any, tr.def_id)
|| cx
.tcx
.super_predicates_of(tr.def_id)
.explicit_super_predicates_of(tr.def_id)
.predicates
.iter()
.any(|(clause, _)| {

View File

@ -91,7 +91,7 @@ fn path_to_sized_bound(cx: &LateContext<'_>, trait_bound: &PolyTraitRef<'_>) ->
return true;
}
for &(predicate, _) in cx.tcx.super_predicates_of(trait_def_id).predicates {
for &(predicate, _) in cx.tcx.explicit_super_predicates_of(trait_def_id).predicates {
if let ClauseKind::Trait(trait_predicate) = predicate.kind().skip_binder()
&& trait_predicate.polarity == PredicatePolarity::Positive
&& !path.contains(&trait_predicate.def_id())

View File

@ -27,13 +27,13 @@ fn main() -> () {
StorageLive(_5);
PlaceMention(_1);
_6 = discriminant(_1);
switchInt(move _6) -> [1: bb6, otherwise: bb4];
switchInt(move _6) -> [1: bb4, otherwise: bb3];
}
bb1: {
StorageLive(_3);
StorageLive(_4);
_4 = begin_panic::<&str>(const "explicit panic") -> bb10;
_4 = begin_panic::<&str>(const "explicit panic") -> bb8;
}
bb2: {
@ -43,12 +43,11 @@ fn main() -> () {
}
bb3: {
FakeRead(ForMatchedPlace(None), _1);
unreachable;
goto -> bb7;
}
bb4: {
goto -> bb9;
falseEdge -> [real: bb6, imaginary: bb3];
}
bb5: {
@ -56,14 +55,6 @@ fn main() -> () {
}
bb6: {
falseEdge -> [real: bb8, imaginary: bb4];
}
bb7: {
goto -> bb4;
}
bb8: {
_5 = ((_1 as Some).0: u8);
_0 = const ();
StorageDead(_5);
@ -71,12 +62,12 @@ fn main() -> () {
return;
}
bb9: {
bb7: {
StorageDead(_5);
goto -> bb1;
}
bb10 (cleanup): {
bb8 (cleanup): {
resume;
}
}

View File

@ -19,22 +19,21 @@ fn test_complex() -> () {
bb0: {
StorageLive(_1);
StorageLive(_2);
_2 = E::f() -> [return: bb1, unwind: bb38];
_2 = E::f() -> [return: bb1, unwind: bb34];
}
bb1: {
PlaceMention(_2);
_3 = discriminant(_2);
switchInt(move _3) -> [0: bb5, otherwise: bb3];
switchInt(move _3) -> [0: bb3, otherwise: bb2];
}
bb2: {
FakeRead(ForMatchedPlace(None), _2);
unreachable;
goto -> bb21;
}
bb3: {
goto -> bb23;
falseEdge -> [real: bb5, imaginary: bb2];
}
bb4: {
@ -42,175 +41,158 @@ fn test_complex() -> () {
}
bb5: {
falseEdge -> [real: bb7, imaginary: bb3];
StorageLive(_4);
_4 = always_true() -> [return: bb6, unwind: bb34];
}
bb6: {
goto -> bb3;
switchInt(move _4) -> [0: bb8, otherwise: bb7];
}
bb7: {
StorageLive(_4);
_4 = always_true() -> [return: bb8, unwind: bb38];
}
bb8: {
switchInt(move _4) -> [0: bb10, otherwise: bb9];
}
bb9: {
StorageLive(_5);
StorageLive(_6);
StorageLive(_7);
_7 = Droppy(const 0_u8);
_6 = (_7.0: u8);
_5 = Gt(move _6, const 0_u8);
switchInt(move _5) -> [0: bb12, otherwise: bb11];
switchInt(move _5) -> [0: bb10, otherwise: bb9];
}
bb8: {
goto -> bb14;
}
bb9: {
drop(_7) -> [return: bb11, unwind: bb34];
}
bb10: {
goto -> bb16;
goto -> bb12;
}
bb11: {
drop(_7) -> [return: bb13, unwind: bb38];
StorageDead(_7);
StorageDead(_6);
goto -> bb18;
}
bb12: {
goto -> bb14;
drop(_7) -> [return: bb13, unwind: bb34];
}
bb13: {
StorageDead(_7);
StorageDead(_6);
goto -> bb20;
goto -> bb14;
}
bb14: {
drop(_7) -> [return: bb15, unwind: bb38];
}
bb15: {
StorageDead(_7);
StorageDead(_6);
goto -> bb16;
}
bb16: {
StorageLive(_8);
StorageLive(_9);
StorageLive(_10);
_10 = Droppy(const 1_u8);
_9 = (_10.0: u8);
_8 = Gt(move _9, const 1_u8);
switchInt(move _8) -> [0: bb18, otherwise: bb17];
switchInt(move _8) -> [0: bb16, otherwise: bb15];
}
bb15: {
drop(_10) -> [return: bb17, unwind: bb34];
}
bb16: {
goto -> bb19;
}
bb17: {
drop(_10) -> [return: bb19, unwind: bb38];
StorageDead(_10);
StorageDead(_9);
goto -> bb18;
}
bb18: {
goto -> bb21;
_1 = const ();
goto -> bb22;
}
bb19: {
StorageDead(_10);
StorageDead(_9);
goto -> bb20;
drop(_10) -> [return: bb20, unwind: bb34];
}
bb20: {
_1 = const ();
goto -> bb24;
StorageDead(_10);
StorageDead(_9);
goto -> bb21;
}
bb21: {
drop(_10) -> [return: bb22, unwind: bb38];
_1 = const ();
goto -> bb22;
}
bb22: {
StorageDead(_10);
StorageDead(_9);
goto -> bb23;
}
bb23: {
_1 = const ();
goto -> bb24;
}
bb24: {
StorageDead(_8);
StorageDead(_5);
StorageDead(_4);
StorageDead(_2);
StorageDead(_1);
StorageLive(_11);
_11 = always_true() -> [return: bb25, unwind: bb38];
_11 = always_true() -> [return: bb23, unwind: bb34];
}
bb23: {
switchInt(move _11) -> [0: bb25, otherwise: bb24];
}
bb24: {
goto -> bb32;
}
bb25: {
switchInt(move _11) -> [0: bb27, otherwise: bb26];
goto -> bb26;
}
bb26: {
goto -> bb36;
StorageLive(_12);
_12 = E::f() -> [return: bb27, unwind: bb34];
}
bb27: {
goto -> bb28;
PlaceMention(_12);
_13 = discriminant(_12);
switchInt(move _13) -> [1: bb29, otherwise: bb28];
}
bb28: {
StorageLive(_12);
_12 = E::f() -> [return: bb29, unwind: bb38];
goto -> bb32;
}
bb29: {
PlaceMention(_12);
_13 = discriminant(_12);
switchInt(move _13) -> [1: bb33, otherwise: bb31];
falseEdge -> [real: bb31, imaginary: bb28];
}
bb30: {
FakeRead(ForMatchedPlace(None), _12);
unreachable;
goto -> bb28;
}
bb31: {
goto -> bb36;
_0 = const ();
goto -> bb33;
}
bb32: {
goto -> bb30;
_0 = const ();
goto -> bb33;
}
bb33: {
falseEdge -> [real: bb35, imaginary: bb31];
}
bb34: {
goto -> bb31;
}
bb35: {
_0 = const ();
goto -> bb37;
}
bb36: {
_0 = const ();
goto -> bb37;
}
bb37: {
StorageDead(_11);
StorageDead(_12);
return;
}
bb38 (cleanup): {
bb34 (cleanup): {
resume;
}
}

View File

@ -0,0 +1,5 @@
fn foo<T>() where T: for<'a> 'a {}
//~^ ERROR `for<...>` may only modify trait bounds, not lifetime bounds
//~| ERROR use of undeclared lifetime name `'a` [E0261]
fn main() {}

View File

@ -0,0 +1,25 @@
error: `for<...>` may only modify trait bounds, not lifetime bounds
--> $DIR/erroneous-lifetime-bound.rs:1:25
|
LL | fn foo<T>() where T: for<'a> 'a {}
| ^^^^
error[E0261]: use of undeclared lifetime name `'a`
--> $DIR/erroneous-lifetime-bound.rs:1:30
|
LL | fn foo<T>() where T: for<'a> 'a {}
| ^^ undeclared lifetime
|
= note: for more information on higher-ranked polymorphism, visit https://doc.rust-lang.org/nomicon/hrtb.html
help: consider making the bound lifetime-generic with a new `'a` lifetime
|
LL | fn foo<T>() where for<'a> T: for<'a> 'a {}
| +++++++
help: consider introducing lifetime `'a` here
|
LL | fn foo<'a, T>() where T: for<'a> 'a {}
| +++
error: aborting due to 2 previous errors
For more information about this error, try `rustc --explain E0261`.

View File

@ -0,0 +1,25 @@
//@ edition: 2021
#![feature(precise_capturing)]
fn polarity() -> impl Sized + ?use<> {}
//~^ ERROR expected identifier, found keyword `use`
//~| ERROR cannot find trait `r#use` in this scope
//~| WARN relaxing a default bound only does something for `?Sized`
//~| WARN relaxing a default bound only does something for `?Sized`
fn asyncness() -> impl Sized + async use<> {}
//~^ ERROR expected identifier, found keyword `use`
//~| ERROR cannot find trait `r#use` in this scope
//~| ERROR async closures are unstable
fn constness() -> impl Sized + const use<> {}
//~^ ERROR expected identifier, found keyword `use`
//~| ERROR cannot find trait `r#use` in this scope
//~| ERROR const trait impls are experimental
fn binder() -> impl Sized + for<'a> use<> {}
//~^ ERROR expected identifier, found keyword `use`
//~| ERROR cannot find trait `r#use` in this scope
fn main() {}

View File

@ -0,0 +1,87 @@
error: expected identifier, found keyword `use`
--> $DIR/bound-modifiers.rs:5:32
|
LL | fn polarity() -> impl Sized + ?use<> {}
| ^^^ expected identifier, found keyword
error: expected identifier, found keyword `use`
--> $DIR/bound-modifiers.rs:11:38
|
LL | fn asyncness() -> impl Sized + async use<> {}
| ^^^ expected identifier, found keyword
error: expected identifier, found keyword `use`
--> $DIR/bound-modifiers.rs:16:38
|
LL | fn constness() -> impl Sized + const use<> {}
| ^^^ expected identifier, found keyword
error: expected identifier, found keyword `use`
--> $DIR/bound-modifiers.rs:21:37
|
LL | fn binder() -> impl Sized + for<'a> use<> {}
| ^^^ expected identifier, found keyword
error[E0405]: cannot find trait `r#use` in this scope
--> $DIR/bound-modifiers.rs:5:32
|
LL | fn polarity() -> impl Sized + ?use<> {}
| ^^^ not found in this scope
error[E0405]: cannot find trait `r#use` in this scope
--> $DIR/bound-modifiers.rs:11:38
|
LL | fn asyncness() -> impl Sized + async use<> {}
| ^^^ not found in this scope
error[E0405]: cannot find trait `r#use` in this scope
--> $DIR/bound-modifiers.rs:16:38
|
LL | fn constness() -> impl Sized + const use<> {}
| ^^^ not found in this scope
error[E0405]: cannot find trait `r#use` in this scope
--> $DIR/bound-modifiers.rs:21:37
|
LL | fn binder() -> impl Sized + for<'a> use<> {}
| ^^^ not found in this scope
error[E0658]: async closures are unstable
--> $DIR/bound-modifiers.rs:11:32
|
LL | fn asyncness() -> impl Sized + async use<> {}
| ^^^^^
|
= note: see issue #62290 <https://github.com/rust-lang/rust/issues/62290> for more information
= help: add `#![feature(async_closure)]` to the crate attributes to enable
= note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date
= help: to use an async block, remove the `||`: `async {`
error[E0658]: const trait impls are experimental
--> $DIR/bound-modifiers.rs:16:32
|
LL | fn constness() -> impl Sized + const use<> {}
| ^^^^^
|
= note: see issue #67792 <https://github.com/rust-lang/rust/issues/67792> for more information
= help: add `#![feature(const_trait_impl)]` to the crate attributes to enable
= note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date
warning: relaxing a default bound only does something for `?Sized`; all other traits are not bound by default
--> $DIR/bound-modifiers.rs:5:31
|
LL | fn polarity() -> impl Sized + ?use<> {}
| ^^^^^^
warning: relaxing a default bound only does something for `?Sized`; all other traits are not bound by default
--> $DIR/bound-modifiers.rs:5:31
|
LL | fn polarity() -> impl Sized + ?use<> {}
| ^^^^^^
|
= note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no`
error: aborting due to 10 previous errors; 2 warnings emitted
Some errors have detailed explanations: E0405, E0658.
For more information about an error, try `rustc --explain E0405`.