Auto merge of #125045 - GuillaumeGomez:rollup-em6qdzw, r=GuillaumeGomez

Rollup of 4 pull requests

Successful merges:

 - #125021 (Update reference safety requirements)
 - #125022 (Migrate rustdoc scrape examples ordering)
 - #125030 (Fix some minor issues from the ui-test auto-porting)
 - #125036 (solve: all "non-structural" logging to trace)

r? `@ghost`
`@rustbot` modify labels: rollup
This commit is contained in:
bors 2024-05-12 13:33:39 +00:00
commit dde8cfa597
35 changed files with 173 additions and 99 deletions

View File

@ -20,7 +20,7 @@ use rustc_middle::traits::solve::{Certainty, Goal, QueryResult};
use rustc_middle::ty; use rustc_middle::ty;
impl<'tcx> EvalCtxt<'_, 'tcx> { impl<'tcx> EvalCtxt<'_, 'tcx> {
#[instrument(level = "debug", skip(self), ret)] #[instrument(level = "trace", skip(self), ret)]
pub(super) fn compute_alias_relate_goal( pub(super) fn compute_alias_relate_goal(
&mut self, &mut self,
goal: Goal<'tcx, (ty::Term<'tcx>, ty::Term<'tcx>, ty::AliasRelationDirection)>, goal: Goal<'tcx, (ty::Term<'tcx>, ty::Term<'tcx>, ty::AliasRelationDirection)>,
@ -50,7 +50,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
self.try_evaluate_added_goals()?; self.try_evaluate_added_goals()?;
let lhs = self.resolve_vars_if_possible(lhs); let lhs = self.resolve_vars_if_possible(lhs);
let rhs = self.resolve_vars_if_possible(rhs); let rhs = self.resolve_vars_if_possible(rhs);
debug!(?lhs, ?rhs); trace!(?lhs, ?rhs);
let variance = match direction { let variance = match direction {
ty::AliasRelationDirection::Equate => ty::Variance::Invariant, ty::AliasRelationDirection::Equate => ty::Variance::Invariant,

View File

@ -331,7 +331,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
.enter(|this| this.evaluate_added_goals_and_make_canonical_response(certainty)) .enter(|this| this.evaluate_added_goals_and_make_canonical_response(certainty))
} }
#[instrument(level = "debug", skip_all)] #[instrument(level = "trace", skip_all)]
fn assemble_non_blanket_impl_candidates<G: GoalKind<'tcx>>( fn assemble_non_blanket_impl_candidates<G: GoalKind<'tcx>>(
&mut self, &mut self,
goal: Goal<'tcx, G>, goal: Goal<'tcx, G>,
@ -447,7 +447,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
} }
} }
#[instrument(level = "debug", skip_all)] #[instrument(level = "trace", skip_all)]
fn assemble_blanket_impl_candidates<G: GoalKind<'tcx>>( fn assemble_blanket_impl_candidates<G: GoalKind<'tcx>>(
&mut self, &mut self,
goal: Goal<'tcx, G>, goal: Goal<'tcx, G>,
@ -470,7 +470,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
} }
} }
#[instrument(level = "debug", skip_all)] #[instrument(level = "trace", skip_all)]
fn assemble_builtin_impl_candidates<G: GoalKind<'tcx>>( fn assemble_builtin_impl_candidates<G: GoalKind<'tcx>>(
&mut self, &mut self,
goal: Goal<'tcx, G>, goal: Goal<'tcx, G>,
@ -544,7 +544,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
} }
} }
#[instrument(level = "debug", skip_all)] #[instrument(level = "trace", skip_all)]
fn assemble_param_env_candidates<G: GoalKind<'tcx>>( fn assemble_param_env_candidates<G: GoalKind<'tcx>>(
&mut self, &mut self,
goal: Goal<'tcx, G>, goal: Goal<'tcx, G>,
@ -561,7 +561,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
} }
} }
#[instrument(level = "debug", skip_all)] #[instrument(level = "trace", skip_all)]
fn assemble_alias_bound_candidates<G: GoalKind<'tcx>>( fn assemble_alias_bound_candidates<G: GoalKind<'tcx>>(
&mut self, &mut self,
goal: Goal<'tcx, G>, goal: Goal<'tcx, G>,
@ -665,7 +665,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
} }
} }
#[instrument(level = "debug", skip_all)] #[instrument(level = "trace", skip_all)]
fn assemble_object_bound_candidates<G: GoalKind<'tcx>>( fn assemble_object_bound_candidates<G: GoalKind<'tcx>>(
&mut self, &mut self,
goal: Goal<'tcx, G>, goal: Goal<'tcx, G>,
@ -756,7 +756,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
/// ///
/// To do so we add an ambiguous candidate in case such an unknown impl could /// To do so we add an ambiguous candidate in case such an unknown impl could
/// apply to the current goal. /// apply to the current goal.
#[instrument(level = "debug", skip_all)] #[instrument(level = "trace", skip_all)]
fn assemble_coherence_unknowable_candidates<G: GoalKind<'tcx>>( fn assemble_coherence_unknowable_candidates<G: GoalKind<'tcx>>(
&mut self, &mut self,
goal: Goal<'tcx, G>, goal: Goal<'tcx, G>,

View File

@ -17,7 +17,7 @@ use crate::solve::EvalCtxt;
// //
// For types with an "existential" binder, i.e. coroutine witnesses, we also // For types with an "existential" binder, i.e. coroutine witnesses, we also
// instantiate the binder with placeholders eagerly. // instantiate the binder with placeholders eagerly.
#[instrument(level = "debug", skip(ecx), ret)] #[instrument(level = "trace", skip(ecx), ret)]
pub(in crate::solve) fn instantiate_constituent_tys_for_auto_trait<'tcx>( pub(in crate::solve) fn instantiate_constituent_tys_for_auto_trait<'tcx>(
ecx: &EvalCtxt<'_, 'tcx>, ecx: &EvalCtxt<'_, 'tcx>,
ty: Ty<'tcx>, ty: Ty<'tcx>,
@ -96,7 +96,7 @@ pub(in crate::solve) fn instantiate_constituent_tys_for_auto_trait<'tcx>(
} }
} }
#[instrument(level = "debug", skip(ecx), ret)] #[instrument(level = "trace", skip(ecx), ret)]
pub(in crate::solve) fn instantiate_constituent_tys_for_sized_trait<'tcx>( pub(in crate::solve) fn instantiate_constituent_tys_for_sized_trait<'tcx>(
ecx: &EvalCtxt<'_, 'tcx>, ecx: &EvalCtxt<'_, 'tcx>,
ty: Ty<'tcx>, ty: Ty<'tcx>,
@ -160,7 +160,7 @@ pub(in crate::solve) fn instantiate_constituent_tys_for_sized_trait<'tcx>(
} }
} }
#[instrument(level = "debug", skip(ecx), ret)] #[instrument(level = "trace", skip(ecx), ret)]
pub(in crate::solve) fn instantiate_constituent_tys_for_copy_clone_trait<'tcx>( pub(in crate::solve) fn instantiate_constituent_tys_for_copy_clone_trait<'tcx>(
ecx: &EvalCtxt<'_, 'tcx>, ecx: &EvalCtxt<'_, 'tcx>,
ty: Ty<'tcx>, ty: Ty<'tcx>,

View File

@ -83,7 +83,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
/// the values inferred while solving the instantiated goal. /// the values inferred while solving the instantiated goal.
/// - `external_constraints`: additional constraints which aren't expressible /// - `external_constraints`: additional constraints which aren't expressible
/// using simple unification of inference variables. /// using simple unification of inference variables.
#[instrument(level = "debug", skip(self), ret)] #[instrument(level = "trace", skip(self), ret)]
pub(in crate::solve) fn evaluate_added_goals_and_make_canonical_response( pub(in crate::solve) fn evaluate_added_goals_and_make_canonical_response(
&mut self, &mut self,
certainty: Certainty, certainty: Certainty,
@ -166,7 +166,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
/// external constraints do not need to record that opaque, since if it is /// external constraints do not need to record that opaque, since if it is
/// further constrained by inference, that will be passed back in the var /// further constrained by inference, that will be passed back in the var
/// values. /// values.
#[instrument(level = "debug", skip(self), ret)] #[instrument(level = "trace", skip(self), ret)]
fn compute_external_query_constraints( fn compute_external_query_constraints(
&self, &self,
normalization_nested_goals: NestedNormalizationGoals<'tcx>, normalization_nested_goals: NestedNormalizationGoals<'tcx>,
@ -174,7 +174,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
// We only check for leaks from universes which were entered inside // We only check for leaks from universes which were entered inside
// of the query. // of the query.
self.infcx.leak_check(self.max_input_universe, None).map_err(|e| { self.infcx.leak_check(self.max_input_universe, None).map_err(|e| {
debug!(?e, "failed the leak check"); trace!(?e, "failed the leak check");
NoSolution NoSolution
})?; })?;
@ -334,7 +334,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
/// whether an alias is rigid by using the trait solver. When instantiating a response /// whether an alias is rigid by using the trait solver. When instantiating a response
/// from the solver we assume that the solver correctly handled aliases and therefore /// from the solver we assume that the solver correctly handled aliases and therefore
/// always relate them structurally here. /// always relate them structurally here.
#[instrument(level = "debug", skip(infcx))] #[instrument(level = "trace", skip(infcx))]
fn unify_query_var_values( fn unify_query_var_values(
infcx: &InferCtxt<'tcx>, infcx: &InferCtxt<'tcx>,
param_env: ty::ParamEnv<'tcx>, param_env: ty::ParamEnv<'tcx>,
@ -407,7 +407,7 @@ pub(in crate::solve) fn make_canonical_state<'tcx, T: TypeFoldable<TyCtxt<'tcx>>
/// This currently assumes that unifying the var values trivially succeeds. /// This currently assumes that unifying the var values trivially succeeds.
/// Adding any inference constraints which weren't present when originally /// Adding any inference constraints which weren't present when originally
/// computing the canonical query can result in bugs. /// computing the canonical query can result in bugs.
#[instrument(level = "debug", skip(infcx, span, param_env))] #[instrument(level = "trace", skip(infcx, span, param_env))]
pub(in crate::solve) fn instantiate_canonical_state<'tcx, T: TypeFoldable<TyCtxt<'tcx>>>( pub(in crate::solve) fn instantiate_canonical_state<'tcx, T: TypeFoldable<TyCtxt<'tcx>>>(
infcx: &InferCtxt<'tcx>, infcx: &InferCtxt<'tcx>,
span: Span, span: Span,

View File

@ -458,7 +458,7 @@ impl<'a, 'tcx> EvalCtxt<'a, 'tcx> {
} }
} }
#[instrument(level = "debug", skip(self))] #[instrument(level = "trace", skip(self))]
pub(super) fn add_normalizes_to_goal(&mut self, goal: Goal<'tcx, ty::NormalizesTo<'tcx>>) { pub(super) fn add_normalizes_to_goal(&mut self, goal: Goal<'tcx, ty::NormalizesTo<'tcx>>) {
self.inspect.add_normalizes_to_goal(self.infcx, self.max_input_universe, goal); self.inspect.add_normalizes_to_goal(self.infcx, self.max_input_universe, goal);
self.nested_goals.normalizes_to_goals.push(goal); self.nested_goals.normalizes_to_goals.push(goal);
@ -472,7 +472,7 @@ impl<'a, 'tcx> EvalCtxt<'a, 'tcx> {
// Recursively evaluates all the goals added to this `EvalCtxt` to completion, returning // Recursively evaluates all the goals added to this `EvalCtxt` to completion, returning
// the certainty of all the goals. // the certainty of all the goals.
#[instrument(level = "debug", skip(self))] #[instrument(level = "trace", skip(self))]
pub(super) fn try_evaluate_added_goals(&mut self) -> Result<Certainty, NoSolution> { pub(super) fn try_evaluate_added_goals(&mut self) -> Result<Certainty, NoSolution> {
self.inspect.start_evaluate_added_goals(); self.inspect.start_evaluate_added_goals();
let mut response = Ok(Certainty::overflow(false)); let mut response = Ok(Certainty::overflow(false));
@ -526,7 +526,7 @@ impl<'a, 'tcx> EvalCtxt<'a, 'tcx> {
unconstrained_goal, unconstrained_goal,
)?; )?;
// Add the nested goals from normalization to our own nested goals. // Add the nested goals from normalization to our own nested goals.
debug!(?nested_goals); trace!(?nested_goals);
goals.goals.extend(nested_goals); goals.goals.extend(nested_goals);
// Finally, equate the goal's RHS with the unconstrained var. // Finally, equate the goal's RHS with the unconstrained var.
@ -622,7 +622,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
/// ///
/// This is the case if the `term` does not occur in any other part of the predicate /// This is the case if the `term` does not occur in any other part of the predicate
/// and is able to name all other placeholder and inference variables. /// and is able to name all other placeholder and inference variables.
#[instrument(level = "debug", skip(self), ret)] #[instrument(level = "trace", skip(self), ret)]
pub(super) fn term_is_fully_unconstrained( pub(super) fn term_is_fully_unconstrained(
&self, &self,
goal: Goal<'tcx, ty::NormalizesTo<'tcx>>, goal: Goal<'tcx, ty::NormalizesTo<'tcx>>,
@ -718,7 +718,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
&& goal.param_env.visit_with(&mut visitor).is_continue() && goal.param_env.visit_with(&mut visitor).is_continue()
} }
#[instrument(level = "debug", skip(self, param_env), ret)] #[instrument(level = "trace", skip(self, param_env), ret)]
pub(super) fn eq<T: ToTrace<'tcx>>( pub(super) fn eq<T: ToTrace<'tcx>>(
&mut self, &mut self,
param_env: ty::ParamEnv<'tcx>, param_env: ty::ParamEnv<'tcx>,
@ -733,7 +733,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
self.add_goals(GoalSource::Misc, obligations.into_iter().map(|o| o.into())); self.add_goals(GoalSource::Misc, obligations.into_iter().map(|o| o.into()));
}) })
.map_err(|e| { .map_err(|e| {
debug!(?e, "failed to equate"); trace!(?e, "failed to equate");
NoSolution NoSolution
}) })
} }
@ -743,7 +743,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
/// Normally we emit a nested `AliasRelate` when equating an inference /// Normally we emit a nested `AliasRelate` when equating an inference
/// variable and an alias. This causes us to instead constrain the inference /// variable and an alias. This causes us to instead constrain the inference
/// variable to the alias without emitting a nested alias relate goals. /// variable to the alias without emitting a nested alias relate goals.
#[instrument(level = "debug", skip(self, param_env), ret)] #[instrument(level = "trace", skip(self, param_env), ret)]
pub(super) fn relate_rigid_alias_non_alias( pub(super) fn relate_rigid_alias_non_alias(
&mut self, &mut self,
param_env: ty::ParamEnv<'tcx>, param_env: ty::ParamEnv<'tcx>,
@ -781,7 +781,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
/// This sohuld only be used when we're either instantiating a previously /// This sohuld only be used when we're either instantiating a previously
/// unconstrained "return value" or when we're sure that all aliases in /// unconstrained "return value" or when we're sure that all aliases in
/// the types are rigid. /// the types are rigid.
#[instrument(level = "debug", skip(self, param_env), ret)] #[instrument(level = "trace", skip(self, param_env), ret)]
pub(super) fn eq_structurally_relating_aliases<T: ToTrace<'tcx>>( pub(super) fn eq_structurally_relating_aliases<T: ToTrace<'tcx>>(
&mut self, &mut self,
param_env: ty::ParamEnv<'tcx>, param_env: ty::ParamEnv<'tcx>,
@ -798,7 +798,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
Ok(()) Ok(())
} }
#[instrument(level = "debug", skip(self, param_env), ret)] #[instrument(level = "trace", skip(self, param_env), ret)]
pub(super) fn sub<T: ToTrace<'tcx>>( pub(super) fn sub<T: ToTrace<'tcx>>(
&mut self, &mut self,
param_env: ty::ParamEnv<'tcx>, param_env: ty::ParamEnv<'tcx>,
@ -813,12 +813,12 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
self.add_goals(GoalSource::Misc, obligations.into_iter().map(|o| o.into())); self.add_goals(GoalSource::Misc, obligations.into_iter().map(|o| o.into()));
}) })
.map_err(|e| { .map_err(|e| {
debug!(?e, "failed to subtype"); trace!(?e, "failed to subtype");
NoSolution NoSolution
}) })
} }
#[instrument(level = "debug", skip(self, param_env), ret)] #[instrument(level = "trace", skip(self, param_env), ret)]
pub(super) fn relate<T: ToTrace<'tcx>>( pub(super) fn relate<T: ToTrace<'tcx>>(
&mut self, &mut self,
param_env: ty::ParamEnv<'tcx>, param_env: ty::ParamEnv<'tcx>,
@ -834,7 +834,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
self.add_goals(GoalSource::Misc, obligations.into_iter().map(|o| o.into())); self.add_goals(GoalSource::Misc, obligations.into_iter().map(|o| o.into()));
}) })
.map_err(|e| { .map_err(|e| {
debug!(?e, "failed to relate"); trace!(?e, "failed to relate");
NoSolution NoSolution
}) })
} }
@ -859,7 +859,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
obligations.into_iter().map(|o| o.into()).collect() obligations.into_iter().map(|o| o.into()).collect()
}) })
.map_err(|e| { .map_err(|e| {
debug!(?e, "failed to equate"); trace!(?e, "failed to equate");
NoSolution NoSolution
}) })
} }

View File

@ -58,6 +58,7 @@ impl<'tcx, F> TraitProbeCtxt<'_, '_, 'tcx, F>
where where
F: FnOnce(&QueryResult<'tcx>) -> inspect::ProbeKind<'tcx>, F: FnOnce(&QueryResult<'tcx>) -> inspect::ProbeKind<'tcx>,
{ {
#[instrument(level = "debug", skip_all, fields(source = ?self.source))]
pub(in crate::solve) fn enter( pub(in crate::solve) fn enter(
self, self,
f: impl FnOnce(&mut EvalCtxt<'_, 'tcx>) -> QueryResult<'tcx>, f: impl FnOnce(&mut EvalCtxt<'_, 'tcx>) -> QueryResult<'tcx>,

View File

@ -119,7 +119,7 @@ impl<'tcx> FulfillmentCtxt<'tcx> {
} }
impl<'tcx> TraitEngine<'tcx> for FulfillmentCtxt<'tcx> { impl<'tcx> TraitEngine<'tcx> for FulfillmentCtxt<'tcx> {
#[instrument(level = "debug", skip(self, infcx))] #[instrument(level = "trace", skip(self, infcx))]
fn register_predicate_obligation( fn register_predicate_obligation(
&mut self, &mut self,
infcx: &InferCtxt<'tcx>, infcx: &InferCtxt<'tcx>,

View File

@ -82,7 +82,7 @@ impl<'tcx> Canonical<'tcx, Response<'tcx>> {
} }
impl<'a, 'tcx> EvalCtxt<'a, 'tcx> { impl<'a, 'tcx> EvalCtxt<'a, 'tcx> {
#[instrument(level = "debug", skip(self))] #[instrument(level = "trace", skip(self))]
fn compute_type_outlives_goal( fn compute_type_outlives_goal(
&mut self, &mut self,
goal: Goal<'tcx, TypeOutlivesPredicate<'tcx>>, goal: Goal<'tcx, TypeOutlivesPredicate<'tcx>>,
@ -92,7 +92,7 @@ impl<'a, 'tcx> EvalCtxt<'a, 'tcx> {
self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes)
} }
#[instrument(level = "debug", skip(self))] #[instrument(level = "trace", skip(self))]
fn compute_region_outlives_goal( fn compute_region_outlives_goal(
&mut self, &mut self,
goal: Goal<'tcx, RegionOutlivesPredicate<'tcx>>, goal: Goal<'tcx, RegionOutlivesPredicate<'tcx>>,
@ -102,7 +102,7 @@ impl<'a, 'tcx> EvalCtxt<'a, 'tcx> {
self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes)
} }
#[instrument(level = "debug", skip(self))] #[instrument(level = "trace", skip(self))]
fn compute_coerce_goal( fn compute_coerce_goal(
&mut self, &mut self,
goal: Goal<'tcx, CoercePredicate<'tcx>>, goal: Goal<'tcx, CoercePredicate<'tcx>>,
@ -117,7 +117,7 @@ impl<'a, 'tcx> EvalCtxt<'a, 'tcx> {
}) })
} }
#[instrument(level = "debug", skip(self))] #[instrument(level = "trace", skip(self))]
fn compute_subtype_goal( fn compute_subtype_goal(
&mut self, &mut self,
goal: Goal<'tcx, SubtypePredicate<'tcx>>, goal: Goal<'tcx, SubtypePredicate<'tcx>>,
@ -138,7 +138,7 @@ impl<'a, 'tcx> EvalCtxt<'a, 'tcx> {
} }
} }
#[instrument(level = "debug", skip(self))] #[instrument(level = "trace", skip(self))]
fn compute_well_formed_goal( fn compute_well_formed_goal(
&mut self, &mut self,
goal: Goal<'tcx, ty::GenericArg<'tcx>>, goal: Goal<'tcx, ty::GenericArg<'tcx>>,
@ -152,7 +152,7 @@ impl<'a, 'tcx> EvalCtxt<'a, 'tcx> {
} }
} }
#[instrument(level = "debug", skip(self))] #[instrument(level = "trace", skip(self))]
fn compute_const_evaluatable_goal( fn compute_const_evaluatable_goal(
&mut self, &mut self,
Goal { param_env, predicate: ct }: Goal<'tcx, ty::Const<'tcx>>, Goal { param_env, predicate: ct }: Goal<'tcx, ty::Const<'tcx>>,
@ -189,7 +189,7 @@ impl<'a, 'tcx> EvalCtxt<'a, 'tcx> {
} }
} }
#[instrument(level = "debug", skip(self), ret)] #[instrument(level = "trace", skip(self), ret)]
fn compute_const_arg_has_type_goal( fn compute_const_arg_has_type_goal(
&mut self, &mut self,
goal: Goal<'tcx, (ty::Const<'tcx>, Ty<'tcx>)>, goal: Goal<'tcx, (ty::Const<'tcx>, Ty<'tcx>)>,
@ -201,7 +201,7 @@ impl<'a, 'tcx> EvalCtxt<'a, 'tcx> {
} }
impl<'tcx> EvalCtxt<'_, 'tcx> { impl<'tcx> EvalCtxt<'_, 'tcx> {
#[instrument(level = "debug", skip(self, goals))] #[instrument(level = "trace", skip(self, goals))]
fn add_goals( fn add_goals(
&mut self, &mut self,
source: GoalSource, source: GoalSource,
@ -215,7 +215,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
/// Try to merge multiple possible ways to prove a goal, if that is not possible returns `None`. /// Try to merge multiple possible ways to prove a goal, if that is not possible returns `None`.
/// ///
/// In this case we tend to flounder and return ambiguity by calling `[EvalCtxt::flounder]`. /// In this case we tend to flounder and return ambiguity by calling `[EvalCtxt::flounder]`.
#[instrument(level = "debug", skip(self), ret)] #[instrument(level = "trace", skip(self), ret)]
fn try_merge_responses( fn try_merge_responses(
&mut self, &mut self,
responses: &[CanonicalResponse<'tcx>], responses: &[CanonicalResponse<'tcx>],
@ -241,7 +241,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
} }
/// If we fail to merge responses we flounder and return overflow or ambiguity. /// If we fail to merge responses we flounder and return overflow or ambiguity.
#[instrument(level = "debug", skip(self), ret)] #[instrument(level = "trace", skip(self), ret)]
fn flounder(&mut self, responses: &[CanonicalResponse<'tcx>]) -> QueryResult<'tcx> { fn flounder(&mut self, responses: &[CanonicalResponse<'tcx>]) -> QueryResult<'tcx> {
if responses.is_empty() { if responses.is_empty() {
return Err(NoSolution); return Err(NoSolution);
@ -263,7 +263,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
/// This function is necessary in nearly all cases before matching on a type. /// This function is necessary in nearly all cases before matching on a type.
/// Not doing so is likely to be incomplete and therefore unsound during /// Not doing so is likely to be incomplete and therefore unsound during
/// coherence. /// coherence.
#[instrument(level = "debug", skip(self, param_env), ret)] #[instrument(level = "trace", skip(self, param_env), ret)]
fn structurally_normalize_ty( fn structurally_normalize_ty(
&mut self, &mut self,
param_env: ty::ParamEnv<'tcx>, param_env: ty::ParamEnv<'tcx>,

View File

@ -162,7 +162,7 @@ impl<'tcx> FallibleTypeFolder<TyCtxt<'tcx>> for NormalizationFolder<'_, 'tcx> {
Ok(t) Ok(t)
} }
#[instrument(level = "debug", skip(self), ret)] #[instrument(level = "trace", skip(self), ret)]
fn try_fold_ty(&mut self, ty: Ty<'tcx>) -> Result<Ty<'tcx>, Self::Error> { fn try_fold_ty(&mut self, ty: Ty<'tcx>) -> Result<Ty<'tcx>, Self::Error> {
let infcx = self.at.infcx; let infcx = self.at.infcx;
debug_assert_eq!(ty, infcx.shallow_resolve(ty)); debug_assert_eq!(ty, infcx.shallow_resolve(ty));
@ -189,7 +189,7 @@ impl<'tcx> FallibleTypeFolder<TyCtxt<'tcx>> for NormalizationFolder<'_, 'tcx> {
} }
} }
#[instrument(level = "debug", skip(self), ret)] #[instrument(level = "trace", skip(self), ret)]
fn try_fold_const(&mut self, ct: ty::Const<'tcx>) -> Result<ty::Const<'tcx>, Self::Error> { fn try_fold_const(&mut self, ct: ty::Const<'tcx>) -> Result<ty::Const<'tcx>, Self::Error> {
let infcx = self.at.infcx; let infcx = self.at.infcx;
debug_assert_eq!(ct, infcx.shallow_resolve_const(ct)); debug_assert_eq!(ct, infcx.shallow_resolve_const(ct));

View File

@ -3,7 +3,7 @@ use rustc_middle::traits::solve::{Certainty, Goal, QueryResult};
use rustc_middle::ty; use rustc_middle::ty;
impl<'tcx> EvalCtxt<'_, 'tcx> { impl<'tcx> EvalCtxt<'_, 'tcx> {
#[instrument(level = "debug", skip(self), ret)] #[instrument(level = "trace", skip(self), ret)]
pub(super) fn normalize_anon_const( pub(super) fn normalize_anon_const(
&mut self, &mut self,
goal: Goal<'tcx, ty::NormalizesTo<'tcx>>, goal: Goal<'tcx, ty::NormalizesTo<'tcx>>,

View File

@ -25,7 +25,7 @@ mod opaque_types;
mod weak_types; mod weak_types;
impl<'tcx> EvalCtxt<'_, 'tcx> { impl<'tcx> EvalCtxt<'_, 'tcx> {
#[instrument(level = "debug", skip(self), ret)] #[instrument(level = "trace", skip(self), ret)]
pub(super) fn compute_normalizes_to_goal( pub(super) fn compute_normalizes_to_goal(
&mut self, &mut self,
goal: Goal<'tcx, NormalizesTo<'tcx>>, goal: Goal<'tcx, NormalizesTo<'tcx>>,
@ -59,7 +59,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
/// Normalize the given alias by at least one step. If the alias is rigid, this /// Normalize the given alias by at least one step. If the alias is rigid, this
/// returns `NoSolution`. /// returns `NoSolution`.
#[instrument(level = "debug", skip(self), ret)] #[instrument(level = "trace", skip(self), ret)]
fn normalize_at_least_one_step( fn normalize_at_least_one_step(
&mut self, &mut self,
goal: Goal<'tcx, NormalizesTo<'tcx>>, goal: Goal<'tcx, NormalizesTo<'tcx>>,
@ -897,7 +897,7 @@ impl<'tcx> assembly::GoalKind<'tcx> for NormalizesTo<'tcx> {
/// ///
/// FIXME: We should merge these 3 implementations as it's likely that they otherwise /// FIXME: We should merge these 3 implementations as it's likely that they otherwise
/// diverge. /// diverge.
#[instrument(level = "debug", skip(ecx, param_env), ret)] #[instrument(level = "trace", skip(ecx, param_env), ret)]
fn fetch_eligible_assoc_item_def<'tcx>( fn fetch_eligible_assoc_item_def<'tcx>(
ecx: &EvalCtxt<'_, 'tcx>, ecx: &EvalCtxt<'_, 'tcx>,
param_env: ty::ParamEnv<'tcx>, param_env: ty::ParamEnv<'tcx>,
@ -920,7 +920,7 @@ fn fetch_eligible_assoc_item_def<'tcx>(
let poly_trait_ref = ecx.resolve_vars_if_possible(goal_trait_ref); let poly_trait_ref = ecx.resolve_vars_if_possible(goal_trait_ref);
!poly_trait_ref.still_further_specializable() !poly_trait_ref.still_further_specializable()
} else { } else {
debug!(?node_item.item.def_id, "not eligible due to default"); trace!(?node_item.item.def_id, "not eligible due to default");
false false
} }
}; };

View File

@ -5,7 +5,7 @@ use rustc_middle::traits::solve::{Certainty, Goal, QueryResult};
use rustc_middle::ty::{self, ProjectionPredicate}; use rustc_middle::ty::{self, ProjectionPredicate};
impl<'tcx> EvalCtxt<'_, 'tcx> { impl<'tcx> EvalCtxt<'_, 'tcx> {
#[instrument(level = "debug", skip(self), ret)] #[instrument(level = "trace", skip(self), ret)]
pub(super) fn compute_projection_goal( pub(super) fn compute_projection_goal(
&mut self, &mut self,
goal: Goal<'tcx, ProjectionPredicate<'tcx>>, goal: Goal<'tcx, ProjectionPredicate<'tcx>>,

View File

@ -130,7 +130,7 @@ impl<'tcx> SearchGraph<'tcx> {
} }
/// Update the stack and reached depths on cache hits. /// Update the stack and reached depths on cache hits.
#[instrument(level = "debug", skip(self))] #[instrument(level = "trace", skip(self))]
fn on_cache_hit(&mut self, additional_depth: usize, encountered_overflow: bool) { fn on_cache_hit(&mut self, additional_depth: usize, encountered_overflow: bool) {
let reached_depth = self.stack.next_index().plus(additional_depth); let reached_depth = self.stack.next_index().plus(additional_depth);
if let Some(last) = self.stack.raw.last_mut() { if let Some(last) = self.stack.raw.last_mut() {
@ -296,6 +296,7 @@ impl<'tcx> SearchGraph<'tcx> {
} }
self.on_cache_hit(reached_depth, encountered_overflow); self.on_cache_hit(reached_depth, encountered_overflow);
debug!("global cache hit");
return result; return result;
} }
@ -315,6 +316,7 @@ impl<'tcx> SearchGraph<'tcx> {
.filter(|p| !Self::stack_coinductive_from(tcx, &self.stack, p.head)) .filter(|p| !Self::stack_coinductive_from(tcx, &self.stack, p.head))
}) })
{ {
debug!("provisional cache hit");
// We have a nested goal which is already in the provisional cache, use // We have a nested goal which is already in the provisional cache, use
// its result. We do not provide any usage kind as that should have been // its result. We do not provide any usage kind as that should have been
// already set correctly while computing the cache entry. // already set correctly while computing the cache entry.

View File

@ -1130,7 +1130,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
}, },
); );
if let Some(def_id) = disqualifying_impl { if let Some(def_id) = disqualifying_impl {
debug!(?def_id, ?goal, "disqualified auto-trait implementation"); trace!(?def_id, ?goal, "disqualified auto-trait implementation");
// No need to actually consider the candidate here, // No need to actually consider the candidate here,
// since we do that in `consider_impl_candidate`. // since we do that in `consider_impl_candidate`.
return Some(Err(NoSolution)); return Some(Err(NoSolution));
@ -1171,7 +1171,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
}) })
} }
#[instrument(level = "debug", skip(self))] #[instrument(level = "trace", skip(self))]
pub(super) fn compute_trait_goal( pub(super) fn compute_trait_goal(
&mut self, &mut self,
goal: Goal<'tcx, TraitPredicate<'tcx>>, goal: Goal<'tcx, TraitPredicate<'tcx>>,

View File

@ -1467,8 +1467,9 @@ mod prim_usize {}
/// For all types, `T: ?Sized`, and for all `t: &T` or `t: &mut T`, when such values cross an API /// For all types, `T: ?Sized`, and for all `t: &T` or `t: &mut T`, when such values cross an API
/// boundary, the following invariants must generally be upheld: /// boundary, the following invariants must generally be upheld:
/// ///
/// * `t` is non-null
/// * `t` is aligned to `align_of_val(t)` /// * `t` is aligned to `align_of_val(t)`
/// * `t` is dereferenceable for `size_of_val(t)` many bytes /// * if `size_of_val(t) > 0`, then `t` is dereferenceable for `size_of_val(t)` many bytes
/// ///
/// If `t` points at address `a`, being "dereferenceable" for N bytes means that the memory range /// If `t` points at address `a`, being "dereferenceable" for N bytes means that the memory range
/// `[a, a + N)` is all contained within a single [allocated object]. /// `[a, a + N)` is all contained within a single [allocated object].

View File

@ -54,6 +54,15 @@ pub fn static_lib(name: &str) -> PathBuf {
tmp_dir().join(static_lib_name(name)) tmp_dir().join(static_lib_name(name))
} }
pub fn python_command() -> Command {
let python_path = std::env::var("PYTHON").expect("PYTHON environment variable does not exist");
Command::new(python_path)
}
pub fn source_path() -> PathBuf {
std::env::var("S").expect("S variable does not exist").into()
}
/// Construct the static library name based on the platform. /// Construct the static library name based on the platform.
pub fn static_lib_name(name: &str) -> String { pub fn static_lib_name(name: &str) -> String {
// See tools.mk (irrelevant lines omitted): // See tools.mk (irrelevant lines omitted):

View File

@ -1,5 +1,5 @@
use std::env; use std::env;
use std::ffi::OsString; use std::ffi::{OsStr, OsString};
use std::io::Write; use std::io::Write;
use std::path::Path; use std::path::Path;
use std::process::{Command, Output, Stdio}; use std::process::{Command, Output, Stdio};
@ -176,6 +176,13 @@ impl Rustc {
self self
} }
/// Specify the crate name.
pub fn crate_name<S: AsRef<OsStr>>(&mut self, name: S) -> &mut Self {
self.cmd.arg("--crate-name");
self.cmd.arg(name.as_ref());
self
}
/// Get the [`Output`][::std::process::Output] of the finished process. /// Get the [`Output`][::std::process::Output] of the finished process.
#[track_caller] #[track_caller]
pub fn command_output(&mut self) -> ::std::process::Output { pub fn command_output(&mut self) -> ::std::process::Output {

View File

@ -1,4 +1,5 @@
use std::env; use std::env;
use std::ffi::OsStr;
use std::io::Write; use std::io::Write;
use std::path::Path; use std::path::Path;
use std::process::{Command, Output, Stdio}; use std::process::{Command, Output, Stdio};
@ -45,6 +46,21 @@ impl Rustdoc {
Self { cmd, stdin: None } Self { cmd, stdin: None }
} }
/// Specify where an external library is located.
pub fn extern_<P: AsRef<Path>>(&mut self, crate_name: &str, path: P) -> &mut Self {
assert!(
!crate_name.contains(|c: char| c.is_whitespace() || c == '\\' || c == '/'),
"crate name cannot contain whitespace or path separators"
);
let path = path.as_ref().to_string_lossy();
self.cmd.arg("--extern");
self.cmd.arg(format!("{crate_name}={path}"));
self
}
/// Specify path to the input file. /// Specify path to the input file.
pub fn input<P: AsRef<Path>>(&mut self, path: P) -> &mut Self { pub fn input<P: AsRef<Path>>(&mut self, path: P) -> &mut Self {
self.cmd.arg(path.as_ref()); self.cmd.arg(path.as_ref());
@ -107,6 +123,20 @@ impl Rustdoc {
self self
} }
/// Specify the crate type.
pub fn crate_type(&mut self, crate_type: &str) -> &mut Self {
self.cmd.arg("--crate-type");
self.cmd.arg(crate_type);
self
}
/// Specify the crate name.
pub fn crate_name<S: AsRef<OsStr>>(&mut self, name: S) -> &mut Self {
self.cmd.arg("--crate-name");
self.cmd.arg(name.as_ref());
self
}
#[track_caller] #[track_caller]
pub fn run_fail_assert_exit_code(&mut self, code: i32) -> Output { pub fn run_fail_assert_exit_code(&mut self, code: i32) -> Output {
let caller_location = std::panic::Location::caller(); let caller_location = std::panic::Location::caller();

View File

@ -248,7 +248,6 @@ run-make/rustdoc-io-error/Makefile
run-make/rustdoc-scrape-examples-invalid-expr/Makefile run-make/rustdoc-scrape-examples-invalid-expr/Makefile
run-make/rustdoc-scrape-examples-macros/Makefile run-make/rustdoc-scrape-examples-macros/Makefile
run-make/rustdoc-scrape-examples-multiple/Makefile run-make/rustdoc-scrape-examples-multiple/Makefile
run-make/rustdoc-scrape-examples-ordering/Makefile
run-make/rustdoc-scrape-examples-remap/Makefile run-make/rustdoc-scrape-examples-remap/Makefile
run-make/rustdoc-scrape-examples-test/Makefile run-make/rustdoc-scrape-examples-test/Makefile
run-make/rustdoc-scrape-examples-whitespace/Makefile run-make/rustdoc-scrape-examples-whitespace/Makefile

View File

@ -1,7 +1,4 @@
// // We specify incremental here because we want to test the partitioning for incremental compilation
// We specify incremental here because we want to test the partitioning for
//@ incremental compilation
// We specify opt-level=0 because `drop_in_place` is `Internal` when optimizing // We specify opt-level=0 because `drop_in_place` is `Internal` when optimizing
//@ incremental //@ incremental
//@ compile-flags:-Zprint-mono-items=lazy //@ compile-flags:-Zprint-mono-items=lazy

View File

@ -1,6 +1,4 @@
// // We specify incremental here because we want to test the partitioning for incremental compilation
// We specify incremental here because we want to test the partitioning for
//@ incremental compilation
//@ incremental //@ incremental
//@ compile-flags:-Zprint-mono-items=eager -Zshare-generics=y //@ compile-flags:-Zprint-mono-items=eager -Zshare-generics=y

View File

@ -1,5 +1,4 @@
// We specify incremental here because we want to test the partitioning for // We specify incremental here because we want to test the partitioning for incremental compilation
//@ incremental compilation
//@ incremental //@ incremental
//@ compile-flags:-Zprint-mono-items=lazy //@ compile-flags:-Zprint-mono-items=lazy
//@ compile-flags:-Ccodegen-units=3 //@ compile-flags:-Ccodegen-units=3

View File

@ -1,6 +1,4 @@
// // We specify incremental here because we want to test the partitioning for incremental compilation
// We specify incremental here because we want to test the partitioning for
//@ incremental compilation
//@ incremental //@ incremental
//@ compile-flags:-Zprint-mono-items=lazy //@ compile-flags:-Zprint-mono-items=lazy
//@ compile-flags:-Zinline-in-all-cgus //@ compile-flags:-Zinline-in-all-cgus

View File

@ -1,6 +1,4 @@
// // We specify incremental here because we want to test the partitioning for incremental compilation
// We specify incremental here because we want to test the partitioning for
//@ incremental compilation
// We specify opt-level=0 because `drop_in_place` is `Internal` when optimizing // We specify opt-level=0 because `drop_in_place` is `Internal` when optimizing
//@ incremental //@ incremental
//@ compile-flags:-Zprint-mono-items=lazy //@ compile-flags:-Zprint-mono-items=lazy

View File

@ -1,5 +1,4 @@
// We specify incremental here because we want to test the partitioning for // We specify incremental here because we want to test the partitioning for incremental compilation
//@ incremental compilation
//@ incremental //@ incremental
//@ compile-flags:-Zprint-mono-items=eager //@ compile-flags:-Zprint-mono-items=eager

View File

@ -1,6 +1,4 @@
// // We specify incremental here because we want to test the partitioning for incremental compilation
// We specify incremental here because we want to test the partitioning for
//@ incremental compilation
//@ incremental //@ incremental
//@ compile-flags:-Zprint-mono-items=lazy //@ compile-flags:-Zprint-mono-items=lazy
//@ compile-flags:-Zinline-in-all-cgus=no //@ compile-flags:-Zinline-in-all-cgus=no

View File

@ -1,6 +1,4 @@
// // We specify incremental here because we want to test the partitioning for incremental compilation
// We specify incremental here because we want to test the partitioning for
//@ incremental compilation
//@ incremental //@ incremental
//@ compile-flags:-Zprint-mono-items=lazy //@ compile-flags:-Zprint-mono-items=lazy
//@ compile-flags:-Zinline-in-all-cgus //@ compile-flags:-Zinline-in-all-cgus

View File

@ -1,6 +1,4 @@
// // We specify incremental here because we want to test the partitioning for incremental compilation
// We specify incremental here because we want to test the partitioning for
//@ incremental compilation
//@ incremental //@ incremental
//@ compile-flags:-Zprint-mono-items=lazy //@ compile-flags:-Zprint-mono-items=lazy
//@ compile-flags:-Zinline-in-all-cgus //@ compile-flags:-Zinline-in-all-cgus

View File

@ -3,9 +3,7 @@
// much sense at the moment. // much sense at the moment.
//@ ignore-test //@ ignore-test
// // We specify incremental here because we want to test the partitioning for incremental compilation
// We specify incremental here because we want to test the partitioning for
//@ incremental compilation
//@ incremental //@ incremental
//@ compile-flags:-Zprint-mono-items=lazy //@ compile-flags:-Zprint-mono-items=lazy

View File

@ -1,5 +1,4 @@
// We specify incremental here because we want to test the partitioning for // We specify incremental here because we want to test the partitioning for incremental compilation
//@ incremental compilation
//@ incremental //@ incremental
//@ compile-flags:-Zprint-mono-items=eager //@ compile-flags:-Zprint-mono-items=eager

View File

@ -1,4 +1,3 @@
//
//@ no-prefer-dynamic //@ no-prefer-dynamic
// NOTE: We always compile this test with -Copt-level=0 because higher opt-levels // NOTE: We always compile this test with -Copt-level=0 because higher opt-levels
// prevent drop-glue from participating in share-generics. // prevent drop-glue from participating in share-generics.

View File

@ -1,5 +1,4 @@
// We specify incremental here because we want to test the partitioning for // We specify incremental here because we want to test the partitioning for incremental compilation
//@ incremental compilation
//@ incremental //@ incremental
//@ compile-flags:-Zprint-mono-items=lazy //@ compile-flags:-Zprint-mono-items=lazy

View File

@ -1,7 +1,4 @@
// // We specify incremental here because we want to test the partitioning for incremental compilation
// We specify incremental here because we want to test the partitioning for
//@ incremental compilation
//@ incremental //@ incremental
//@ compile-flags:-Zprint-mono-items=lazy //@ compile-flags:-Zprint-mono-items=lazy
//@ compile-flags:-Zinline-in-all-cgus //@ compile-flags:-Zinline-in-all-cgus

View File

@ -1,5 +0,0 @@
deps := ex1 ex2
include ../rustdoc-scrape-examples-multiple/scrape.mk
all: scrape

View File

@ -0,0 +1,55 @@
use run_make_support::{python_command, rustc, rustdoc, source_path, tmp_dir};
use std::fs::read_dir;
use std::path::Path;
fn main() {
let lib_dir = tmp_dir();
let out_dir = tmp_dir().join("rustdoc");
let crate_name = "foobar";
let deps = read_dir("examples")
.unwrap()
.filter_map(|entry| entry.ok().map(|e| e.path()))
.filter(|path| path.is_file() && path.extension().is_some_and(|ext| ext == "rs"))
.collect::<Vec<_>>();
rustc().input("src/lib.rs").crate_name(crate_name).crate_type("lib").emit("metadata").run();
let mut out_deps = Vec::with_capacity(deps.len());
for dep in deps {
let dep_stem = dep.file_stem().unwrap();
let out_example = out_dir.join(format!("{}.calls", dep_stem.to_str().unwrap()));
rustdoc()
.input(&dep)
.crate_name(&dep_stem)
.crate_type("bin")
.output(&out_dir)
.extern_(crate_name, lib_dir.join(format!("lib{crate_name}.rmeta")))
.arg("-Zunstable-options")
.arg("--scrape-examples-output-path")
.arg(&out_example)
.arg("--scrape-examples-target-crate")
.arg(crate_name)
.run();
out_deps.push(out_example);
}
let mut rustdoc = rustdoc();
rustdoc
.input("src/lib.rs")
.output(&out_dir)
.crate_name(crate_name)
.crate_type("lib")
.arg("-Zunstable-options");
for dep in out_deps {
rustdoc.arg("--with-examples").arg(dep);
}
rustdoc.run();
python_command()
.arg(source_path().join("/src/etc/htmldocck.py"))
.arg(out_dir)
.arg("src/lib.rs")
.status()
.unwrap()
.success();
}