Rollup merge of #121071 - nnethercote:fewer-delayed-bugs, r=oli-obk

Use fewer delayed bugs.

For some cases where it's clear that an error has already occurred, e.g.:
- there's a comment stating exactly that, or
- things like HIR lowering, where we are lowering an error kind

The commit also tweaks some comments around delayed bug sites.

r? `@oli-obk`
This commit is contained in:
Oli Scherer 2024-02-14 11:53:42 +01:00 committed by GitHub
commit 638f5259fe
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
23 changed files with 88 additions and 90 deletions

View File

@ -323,9 +323,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
) )
} }
ExprKind::Yield(opt_expr) => self.lower_expr_yield(e.span, opt_expr.as_deref()), ExprKind::Yield(opt_expr) => self.lower_expr_yield(e.span, opt_expr.as_deref()),
ExprKind::Err => { ExprKind::Err => hir::ExprKind::Err(self.dcx().has_errors().unwrap()),
hir::ExprKind::Err(self.dcx().span_delayed_bug(e.span, "lowered ExprKind::Err"))
}
ExprKind::Try(sub_expr) => self.lower_expr_try(e.span, sub_expr), ExprKind::Try(sub_expr) => self.lower_expr_try(e.span, sub_expr),
ExprKind::Paren(_) | ExprKind::ForLoop { .. } => { ExprKind::Paren(_) | ExprKind::ForLoop { .. } => {

View File

@ -1068,7 +1068,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
fn lower_block_expr_opt(&mut self, span: Span, block: Option<&Block>) -> hir::Expr<'hir> { fn lower_block_expr_opt(&mut self, span: Span, block: Option<&Block>) -> hir::Expr<'hir> {
match block { match block {
Some(block) => self.lower_block_expr(block), Some(block) => self.lower_block_expr(block),
None => self.expr_err(span, self.dcx().span_delayed_bug(span, "no block")), None => self.expr_err(span, self.dcx().has_errors().unwrap()),
} }
} }

View File

@ -1285,9 +1285,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
fn lower_ty_direct(&mut self, t: &Ty, itctx: ImplTraitContext) -> hir::Ty<'hir> { fn lower_ty_direct(&mut self, t: &Ty, itctx: ImplTraitContext) -> hir::Ty<'hir> {
let kind = match &t.kind { let kind = match &t.kind {
TyKind::Infer => hir::TyKind::Infer, TyKind::Infer => hir::TyKind::Infer,
TyKind::Err => { TyKind::Err => hir::TyKind::Err(self.dcx().has_errors().unwrap()),
hir::TyKind::Err(self.dcx().span_delayed_bug(t.span, "TyKind::Err lowered"))
}
// Lower the anonymous structs or unions in a nested lowering context. // Lower the anonymous structs or unions in a nested lowering context.
// //
// ``` // ```

View File

@ -117,18 +117,14 @@ struct CfgChecker<'a, 'tcx> {
impl<'a, 'tcx> CfgChecker<'a, 'tcx> { impl<'a, 'tcx> CfgChecker<'a, 'tcx> {
#[track_caller] #[track_caller]
fn fail(&self, location: Location, msg: impl AsRef<str>) { fn fail(&self, location: Location, msg: impl AsRef<str>) {
let span = self.body.source_info(location).span; // We might see broken MIR when other errors have already occurred.
// We use `span_delayed_bug` as we might see broken MIR when other errors have already assert!(
// occurred. self.tcx.dcx().has_errors().is_some(),
self.tcx.dcx().span_delayed_bug( "broken MIR in {:?} ({}) at {:?}:\n{}",
span, self.body.source.instance,
format!( self.when,
"broken MIR in {:?} ({}) at {:?}:\n{}", location,
self.body.source.instance, msg.as_ref(),
self.when,
location,
msg.as_ref()
),
); );
} }

View File

@ -34,10 +34,10 @@ pub(super) fn failed_to_match_macro<'cx>(
if try_success_result.is_ok() { if try_success_result.is_ok() {
// Nonterminal parser recovery might turn failed matches into successful ones, // Nonterminal parser recovery might turn failed matches into successful ones,
// but for that it must have emitted an error already // but for that it must have emitted an error already
tracker assert!(
.cx tracker.cx.dcx().has_errors().is_some(),
.dcx() "Macro matching returned a success on the second try"
.span_delayed_bug(sp, "Macro matching returned a success on the second try"); );
} }
if let Some(result) = tracker.result { if let Some(result) = tracker.result {

View File

@ -758,8 +758,8 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
// since we should have emitten an error for them earlier, and they will // since we should have emitten an error for them earlier, and they will
// not be well-formed! // not be well-formed!
if polarity == ty::ImplPolarity::Negative { if polarity == ty::ImplPolarity::Negative {
self.tcx().dcx().span_delayed_bug( assert!(
binding.span, self.tcx().dcx().has_errors().is_some(),
"negative trait bounds should not have bindings", "negative trait bounds should not have bindings",
); );
continue; continue;

View File

@ -1865,13 +1865,13 @@ fn check_variances_for_type_defn<'tcx>(
let hir_param = &hir_generics.params[index]; let hir_param = &hir_generics.params[index];
if ty_param.def_id != hir_param.def_id.into() { if ty_param.def_id != hir_param.def_id.into() {
// valid programs always have lifetimes before types in the generic parameter list // Valid programs always have lifetimes before types in the generic parameter list.
// ty_generics are normalized to be in this required order, and variances are built // ty_generics are normalized to be in this required order, and variances are built
// from ty generics, not from hir generics. but we need hir generics to get // from ty generics, not from hir generics. but we need hir generics to get
// a span out // a span out.
// //
// if they aren't in the same order, then the user has written invalid code, and already // If they aren't in the same order, then the user has written invalid code, and already
// got an error about it (or I'm wrong about this) // got an error about it (or I'm wrong about this).
tcx.dcx().span_delayed_bug( tcx.dcx().span_delayed_bug(
hir_param.span, hir_param.span,
"hir generics and ty generics in different order", "hir generics and ty generics in different order",

View File

@ -85,7 +85,7 @@ pub(super) fn check_item(
(_, _, Unsafety::Unsafe, Negative) => { (_, _, Unsafety::Unsafe, Negative) => {
// Reported in AST validation // Reported in AST validation
tcx.dcx().span_delayed_bug(tcx.def_span(def_id), "unsafe negative impl"); assert!(tcx.dcx().has_errors().is_some(), "unsafe negative impl");
Ok(()) Ok(())
} }
(_, _, Unsafety::Normal, Negative) (_, _, Unsafety::Normal, Negative)

View File

@ -139,10 +139,10 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
| ty::Never | ty::Never
| ty::Dynamic(_, _, ty::DynStar) | ty::Dynamic(_, _, ty::DynStar)
| ty::Error(_) => { | ty::Error(_) => {
let reported = self let guar = self
.dcx() .dcx()
.span_delayed_bug(span, format!("`{t:?}` should be sized but is not?")); .span_delayed_bug(span, format!("`{t:?}` should be sized but is not?"));
return Err(reported); return Err(guar);
} }
}) })
} }

View File

@ -221,8 +221,8 @@ impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> {
if base_ty.is_none() { if base_ty.is_none() {
// When encountering `return [0][0]` outside of a `fn` body we can encounter a base // When encountering `return [0][0]` outside of a `fn` body we can encounter a base
// that isn't in the type table. We assume more relevant errors have already been // that isn't in the type table. We assume more relevant errors have already been
// emitted, so we delay an ICE if none have. (#64638) // emitted. (#64638)
self.tcx().dcx().span_delayed_bug(e.span, format!("bad base: `{base:?}`")); assert!(self.tcx().dcx().has_errors().is_some(), "bad base: `{base:?}`");
} }
if let Some(base_ty) = base_ty if let Some(base_ty) = base_ty
&& let ty::Ref(_, base_ty_inner, _) = *base_ty.kind() && let ty::Ref(_, base_ty_inner, _) = *base_ty.kind()

View File

@ -802,14 +802,12 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> {
} }
// Errors in earlier passes can yield error variables without // Errors in earlier passes can yield error variables without
// resolution errors here; delay ICE in favor of those errors. // resolution errors here; ICE if no errors have been emitted yet.
self.tcx().dcx().span_delayed_bug( assert!(
self.var_infos[node_idx].origin.span(), self.tcx().dcx().has_errors().is_some(),
format!( "collect_error_for_expanding_node() could not find error for var {node_idx:?} in \
"collect_error_for_expanding_node() could not find \ universe {node_universe:?}, lower_bounds={lower_bounds:#?}, \
error for var {node_idx:?} in universe {node_universe:?}, lower_bounds={lower_bounds:#?}, \ upper_bounds={upper_bounds:#?}",
upper_bounds={upper_bounds:#?}"
),
); );
} }

View File

@ -300,9 +300,9 @@ where
self.components_must_outlive(origin, subcomponents, region, category); self.components_must_outlive(origin, subcomponents, region, category);
} }
Component::UnresolvedInferenceVariable(v) => { Component::UnresolvedInferenceVariable(v) => {
// ignore this, we presume it will yield an error // Ignore this, we presume it will yield an error later,
// later, since if a type variable is not resolved by // since if a type variable is not resolved by this point
// this point it never will be // it never will be.
self.tcx.dcx().span_delayed_bug( self.tcx.dcx().span_delayed_bug(
origin.span(), origin.span(),
format!("unresolved inference variable in outlives: {v:?}"), format!("unresolved inference variable in outlives: {v:?}"),

View File

@ -172,13 +172,13 @@ impl<'cx, 'tcx> VerifyBoundCx<'cx, 'tcx> {
self.bound_from_components(components, visited) self.bound_from_components(components, visited)
} }
Component::UnresolvedInferenceVariable(v) => { Component::UnresolvedInferenceVariable(v) => {
// ignore this, we presume it will yield an error // Ignore this, we presume it will yield an error later, since
// later, since if a type variable is not resolved by // if a type variable is not resolved by this point it never
// this point it never will be // will be.
self.tcx self.tcx
.dcx() .dcx()
.delayed_bug(format!("unresolved inference variable in outlives: {v:?}")); .delayed_bug(format!("unresolved inference variable in outlives: {v:?}"));
// add a bound that never holds // Add a bound that never holds.
VerifyBound::AnyBound(vec![]) VerifyBound::AnyBound(vec![])
} }
} }

View File

@ -431,14 +431,13 @@ pub fn check_ast_node_inner<'a, T: EarlyLintPass>(
// If not, that means that we somehow buffered a lint for a node id // If not, that means that we somehow buffered a lint for a node id
// that was not lint-checked (perhaps it doesn't exist?). This is a bug. // that was not lint-checked (perhaps it doesn't exist?). This is a bug.
for (id, lints) in cx.context.buffered.map { for (id, lints) in cx.context.buffered.map {
for early_lint in lints { if !lints.is_empty() {
sess.dcx().span_delayed_bug( assert!(
early_lint.span, sess.dcx().has_errors().is_some(),
format!( "failed to process buffered lint here (dummy = {})",
"failed to process buffered lint here (dummy = {})", id == ast::DUMMY_NODE_ID
id == ast::DUMMY_NODE_ID
),
); );
break;
} }
} }
} }

View File

@ -33,7 +33,7 @@ struct UnsafetyVisitor<'a, 'tcx> {
body_target_features: &'tcx [Symbol], body_target_features: &'tcx [Symbol],
/// When inside the LHS of an assignment to a field, this is the type /// When inside the LHS of an assignment to a field, this is the type
/// of the LHS and the span of the assignment expression. /// of the LHS and the span of the assignment expression.
assignment_info: Option<(Ty<'tcx>, Span)>, assignment_info: Option<Ty<'tcx>>,
in_union_destructure: bool, in_union_destructure: bool,
param_env: ParamEnv<'tcx>, param_env: ParamEnv<'tcx>,
inside_adt: bool, inside_adt: bool,
@ -473,10 +473,15 @@ impl<'a, 'tcx> Visitor<'a, 'tcx> for UnsafetyVisitor<'a, 'tcx> {
if let ty::Adt(adt_def, _) = lhs.ty.kind() if let ty::Adt(adt_def, _) = lhs.ty.kind()
&& adt_def.is_union() && adt_def.is_union()
{ {
if let Some((assigned_ty, assignment_span)) = self.assignment_info { if let Some(assigned_ty) = self.assignment_info {
if assigned_ty.needs_drop(self.tcx, self.param_env) { if assigned_ty.needs_drop(self.tcx, self.param_env) {
// This would be unsafe, but should be outright impossible since we reject such unions. // This would be unsafe, but should be outright impossible since we
self.tcx.dcx().span_delayed_bug(assignment_span, format!("union fields that need dropping should be impossible: {assigned_ty}")); // reject such unions.
assert!(
self.tcx.dcx().has_errors().is_some(),
"union fields that need dropping should be impossible: \
{assigned_ty}"
);
} }
} else { } else {
self.requires_unsafe(expr.span, AccessToUnionField); self.requires_unsafe(expr.span, AccessToUnionField);
@ -492,14 +497,15 @@ impl<'a, 'tcx> Visitor<'a, 'tcx> for UnsafetyVisitor<'a, 'tcx> {
self.requires_unsafe(expr.span, MutationOfLayoutConstrainedField); self.requires_unsafe(expr.span, MutationOfLayoutConstrainedField);
} }
// Second, check for accesses to union fields // Second, check for accesses to union fields. Don't have any
// don't have any special handling for AssignOp since it causes a read *and* write to lhs // special handling for AssignOp since it causes a read *and*
// write to lhs.
if matches!(expr.kind, ExprKind::Assign { .. }) { if matches!(expr.kind, ExprKind::Assign { .. }) {
self.assignment_info = Some((lhs.ty, expr.span)); self.assignment_info = Some(lhs.ty);
visit::walk_expr(self, lhs); visit::walk_expr(self, lhs);
self.assignment_info = None; self.assignment_info = None;
visit::walk_expr(self, &self.thir()[rhs]); visit::walk_expr(self, &self.thir()[rhs]);
return; // we have already visited everything by now return; // We have already visited everything by now.
} }
} }
ExprKind::Borrow { borrow_kind, arg } => { ExprKind::Borrow { borrow_kind, arg } => {

View File

@ -153,8 +153,7 @@ impl<'tcx> ConstToPat<'tcx> {
// a hard error when we don't have a valtree or when we find something in // a hard error when we don't have a valtree or when we find something in
// the valtree that is not structural; then this can all be made a lot simpler. // the valtree that is not structural; then this can all be made a lot simpler.
let structural = let structural = traits::search_for_structural_match_violation(self.tcx(), cv.ty());
traits::search_for_structural_match_violation(self.span, self.tcx(), cv.ty());
debug!( debug!(
"search_for_structural_match_violation cv.ty: {:?} returned: {:?}", "search_for_structural_match_violation cv.ty: {:?} returned: {:?}",
cv.ty(), cv.ty(),

View File

@ -243,10 +243,11 @@ impl<'tcx> Visitor<'tcx> for UnsafetyChecker<'_, 'tcx> {
// old value is being dropped. // old value is being dropped.
let assigned_ty = place.ty(&self.body.local_decls, self.tcx).ty; let assigned_ty = place.ty(&self.body.local_decls, self.tcx).ty;
if assigned_ty.needs_drop(self.tcx, self.param_env) { if assigned_ty.needs_drop(self.tcx, self.param_env) {
// This would be unsafe, but should be outright impossible since we reject such unions. // This would be unsafe, but should be outright impossible since we reject
self.tcx.dcx().span_delayed_bug( // such unions.
self.source_info.span, assert!(
format!("union fields that need dropping should be impossible: {assigned_ty}") self.tcx.dcx().has_errors().is_some(),
"union fields that need dropping should be impossible: {assigned_ty}"
); );
} }
} else { } else {

View File

@ -265,7 +265,7 @@ fn mir_const_qualif(tcx: TyCtxt<'_>, def: LocalDefId) -> ConstQualifs {
let body = &tcx.mir_const(def).borrow(); let body = &tcx.mir_const(def).borrow();
if body.return_ty().references_error() { if body.return_ty().references_error() {
tcx.dcx().span_delayed_bug(body.span, "mir_const_qualif: MIR had errors"); assert!(tcx.dcx().has_errors().is_some(), "mir_const_qualif: MIR had errors");
return Default::default(); return Default::default();
} }

View File

@ -429,16 +429,16 @@ where
let formatter = query.format_value(); let formatter = query.format_value();
if old_hash != new_hash { if old_hash != new_hash {
// We have an inconsistency. This can happen if one of the two // We have an inconsistency. This can happen if one of the two
// results is tainted by errors. In this case, delay a bug to // results is tainted by errors.
// ensure compilation is doomed. assert!(
qcx.dep_context().sess().dcx().delayed_bug(format!( qcx.dep_context().sess().dcx().has_errors().is_some(),
"Computed query value for {:?}({:?}) is inconsistent with fed value,\n\ "Computed query value for {:?}({:?}) is inconsistent with fed value,\n\
computed={:#?}\nfed={:#?}", computed={:#?}\nfed={:#?}",
query.dep_kind(), query.dep_kind(),
key, key,
formatter(&result), formatter(&result),
formatter(&cached_result), formatter(&cached_result),
)); );
} }
} }
} }

View File

@ -62,9 +62,10 @@ pub fn is_const_evaluatable<'tcx>(
match unexpanded_ct.kind() { match unexpanded_ct.kind() {
ty::ConstKind::Expr(_) => { ty::ConstKind::Expr(_) => {
// FIXME(generic_const_exprs): we have a `ConstKind::Expr` which is fully concrete, but // FIXME(generic_const_exprs): we have a `ConstKind::Expr` which is fully concrete,
// currently it is not possible to evaluate `ConstKind::Expr` so we are unable to tell if it // but currently it is not possible to evaluate `ConstKind::Expr` so we are unable
// is evaluatable or not. For now we just ICE until this is implemented. // to tell if it is evaluatable or not. For now we just ICE until this is
// implemented.
Err(NotConstEvaluatable::Error(tcx.dcx().span_delayed_bug( Err(NotConstEvaluatable::Error(tcx.dcx().span_delayed_bug(
span, span,
"evaluating `ConstKind::Expr` is not currently supported", "evaluating `ConstKind::Expr` is not currently supported",

View File

@ -236,9 +236,9 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
} }
} }
// It could be that we don't report an error because we have seen an `ErrorReported` from another source. // It could be that we don't report an error because we have seen an `ErrorReported` from
// We should probably be able to fix most of these, but some are delayed bugs that get a proper error // another source. We should probably be able to fix most of these, but some are delayed
// after this function. // bugs that get a proper error after this function.
reported.unwrap_or_else(|| self.dcx().delayed_bug("failed to report fulfillment errors")) reported.unwrap_or_else(|| self.dcx().delayed_bug("failed to report fulfillment errors"))
} }
@ -519,7 +519,11 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
trait_ref, trait_ref,
span, span,
) { ) {
GetSafeTransmuteErrorAndReason::Silent => return self.dcx().span_delayed_bug(span, "silent safe transmute error"), GetSafeTransmuteErrorAndReason::Silent => {
return self.dcx().span_delayed_bug(
span, "silent safe transmute error"
);
}
GetSafeTransmuteErrorAndReason::Error { GetSafeTransmuteErrorAndReason::Error {
err_msg, err_msg,
safe_transmute_explanation, safe_transmute_explanation,

View File

@ -555,7 +555,7 @@ fn virtual_call_violations_for_method<'tcx>(
// NOTE: This check happens last, because it results in a lint, and not a // NOTE: This check happens last, because it results in a lint, and not a
// hard error. // hard error.
if tcx.predicates_of(method.def_id).predicates.iter().any(|&(pred, span)| { if tcx.predicates_of(method.def_id).predicates.iter().any(|&(pred, _span)| {
// dyn Trait is okay: // dyn Trait is okay:
// //
// trait Trait { // trait Trait {
@ -594,7 +594,10 @@ fn virtual_call_violations_for_method<'tcx>(
// would already have reported an error at the definition of the // would already have reported an error at the definition of the
// auto trait. // auto trait.
if pred_trait_ref.args.len() != 1 { if pred_trait_ref.args.len() != 1 {
tcx.dcx().span_delayed_bug(span, "auto traits cannot have generic parameters"); assert!(
tcx.dcx().has_errors().is_some(),
"auto traits cannot have generic parameters"
);
} }
return false; return false;
} }

View File

@ -1,7 +1,6 @@
use rustc_data_structures::fx::FxHashSet; use rustc_data_structures::fx::FxHashSet;
use rustc_hir as hir; use rustc_hir as hir;
use rustc_middle::ty::{self, Ty, TyCtxt, TypeSuperVisitable, TypeVisitable, TypeVisitor}; use rustc_middle::ty::{self, Ty, TyCtxt, TypeSuperVisitable, TypeVisitable, TypeVisitor};
use rustc_span::Span;
use std::ops::ControlFlow; use std::ops::ControlFlow;
/// This method traverses the structure of `ty`, trying to find an /// This method traverses the structure of `ty`, trying to find an
@ -30,19 +29,16 @@ use std::ops::ControlFlow;
/// that arose when the requirement was not enforced completely, see /// that arose when the requirement was not enforced completely, see
/// Rust RFC 1445, rust-lang/rust#61188, and rust-lang/rust#62307. /// Rust RFC 1445, rust-lang/rust#61188, and rust-lang/rust#62307.
pub fn search_for_structural_match_violation<'tcx>( pub fn search_for_structural_match_violation<'tcx>(
span: Span,
tcx: TyCtxt<'tcx>, tcx: TyCtxt<'tcx>,
ty: Ty<'tcx>, ty: Ty<'tcx>,
) -> Option<Ty<'tcx>> { ) -> Option<Ty<'tcx>> {
ty.visit_with(&mut Search { tcx, span, seen: FxHashSet::default() }).break_value() ty.visit_with(&mut Search { tcx, seen: FxHashSet::default() }).break_value()
} }
/// This implements the traversal over the structure of a given type to try to /// This implements the traversal over the structure of a given type to try to
/// find instances of ADTs (specifically structs or enums) that do not implement /// find instances of ADTs (specifically structs or enums) that do not implement
/// `StructuralPartialEq`. /// `StructuralPartialEq`.
struct Search<'tcx> { struct Search<'tcx> {
span: Span,
tcx: TyCtxt<'tcx>, tcx: TyCtxt<'tcx>,
/// Tracks ADTs previously encountered during search, so that /// Tracks ADTs previously encountered during search, so that
@ -138,7 +134,6 @@ impl<'tcx> TypeVisitor<TyCtxt<'tcx>> for Search<'tcx> {
bug!("unexpected type during structural-match checking: {:?}", ty); bug!("unexpected type during structural-match checking: {:?}", ty);
} }
ty::Error(_) => { ty::Error(_) => {
self.tcx.dcx().span_delayed_bug(self.span, "ty::Error in structural-match check");
// We still want to check other types after encountering an error, // We still want to check other types after encountering an error,
// as this may still emit relevant errors. // as this may still emit relevant errors.
return ControlFlow::Continue(()); return ControlFlow::Continue(());