Auto merge of #131797 - matthiaskrgr:rollup-lzpze2k, r=matthiaskrgr

Rollup of 9 pull requests

Successful merges:

 - #130989 (Don't check unsize goal in MIR validation when opaques remain)
 - #131657 (Rustfmt `for<'a> async` correctly)
 - #131691 (Delay ambiguous intra-doc link resolution after `Cache` has been populated)
 - #131730 (Refactor some `core::fmt` macros)
 - #131751 (Rename `can_coerce` to `may_coerce`, and then structurally resolve correctly in the probe)
 - #131753 (Unify `secondary_span` and `swap_secondary_and_primary` args in `note_type_err`)
 - #131776 (Emscripten: Xfail backtrace ui tests)
 - #131777 (Fix trivially_copy_pass_by_ref in stable_mir)
 - #131778 (Fix needless_lifetimes in stable_mir)

r? `@ghost`
`@rustbot` modify labels: rollup
This commit is contained in:
bors 2024-10-16 20:50:53 +00:00
commit 798fb83f7d
47 changed files with 610 additions and 248 deletions

View File

@ -592,14 +592,13 @@ pub(super) fn collect_return_position_impl_trait_in_trait_tys<'tcx>(
&cause, &cause,
hir.get_if_local(impl_m.def_id) hir.get_if_local(impl_m.def_id)
.and_then(|node| node.fn_decl()) .and_then(|node| node.fn_decl())
.map(|decl| (decl.output.span(), Cow::from("return type in trait"))), .map(|decl| (decl.output.span(), Cow::from("return type in trait"), false)),
Some(infer::ValuePairs::Terms(ExpectedFound { Some(infer::ValuePairs::Terms(ExpectedFound {
expected: trait_return_ty.into(), expected: trait_return_ty.into(),
found: impl_return_ty.into(), found: impl_return_ty.into(),
})), })),
terr, terr,
false, false,
false,
); );
return Err(diag.emit()); return Err(diag.emit());
} }
@ -1018,14 +1017,13 @@ fn report_trait_method_mismatch<'tcx>(
infcx.err_ctxt().note_type_err( infcx.err_ctxt().note_type_err(
&mut diag, &mut diag,
&cause, &cause,
trait_err_span.map(|sp| (sp, Cow::from("type in trait"))), trait_err_span.map(|sp| (sp, Cow::from("type in trait"), false)),
Some(infer::ValuePairs::PolySigs(ExpectedFound { Some(infer::ValuePairs::PolySigs(ExpectedFound {
expected: ty::Binder::dummy(trait_sig), expected: ty::Binder::dummy(trait_sig),
found: ty::Binder::dummy(impl_sig), found: ty::Binder::dummy(impl_sig),
})), })),
terr, terr,
false, false,
false,
); );
diag.emit() diag.emit()
@ -1825,14 +1823,13 @@ fn compare_const_predicate_entailment<'tcx>(
infcx.err_ctxt().note_type_err( infcx.err_ctxt().note_type_err(
&mut diag, &mut diag,
&cause, &cause,
trait_c_span.map(|span| (span, Cow::from("type in trait"))), trait_c_span.map(|span| (span, Cow::from("type in trait"), false)),
Some(infer::ValuePairs::Terms(ExpectedFound { Some(infer::ValuePairs::Terms(ExpectedFound {
expected: trait_ty.into(), expected: trait_ty.into(),
found: impl_ty.into(), found: impl_ty.into(),
})), })),
terr, terr,
false, false,
false,
); );
return Err(diag.emit()); return Err(diag.emit());
}; };

View File

@ -652,7 +652,6 @@ pub fn check_function_signature<'tcx>(
})), })),
err, err,
false, false,
false,
); );
return Err(diag.emit()); return Err(diag.emit());
} }

View File

@ -235,8 +235,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
Some(ret_coercion) => { Some(ret_coercion) => {
let ret_ty = ret_coercion.borrow().expected_ty(); let ret_ty = ret_coercion.borrow().expected_ty();
let ret_ty = self.infcx.shallow_resolve(ret_ty); let ret_ty = self.infcx.shallow_resolve(ret_ty);
self.can_coerce(arm_ty, ret_ty) self.may_coerce(arm_ty, ret_ty)
&& prior_arm.is_none_or(|(_, ty, _)| self.can_coerce(ty, ret_ty)) && prior_arm.is_none_or(|(_, ty, _)| self.may_coerce(ty, ret_ty))
// The match arms need to unify for the case of `impl Trait`. // The match arms need to unify for the case of `impl Trait`.
&& !matches!(ret_ty.kind(), ty::Alias(ty::Opaque, ..)) && !matches!(ret_ty.kind(), ty::Alias(ty::Opaque, ..))
} }

View File

@ -409,7 +409,7 @@ impl<'a, 'tcx> CastCheck<'tcx> {
let mut sugg_mutref = false; let mut sugg_mutref = false;
if let ty::Ref(reg, cast_ty, mutbl) = *self.cast_ty.kind() { if let ty::Ref(reg, cast_ty, mutbl) = *self.cast_ty.kind() {
if let ty::RawPtr(expr_ty, _) = *self.expr_ty.kind() if let ty::RawPtr(expr_ty, _) = *self.expr_ty.kind()
&& fcx.can_coerce( && fcx.may_coerce(
Ty::new_ref(fcx.tcx, fcx.tcx.lifetimes.re_erased, expr_ty, mutbl), Ty::new_ref(fcx.tcx, fcx.tcx.lifetimes.re_erased, expr_ty, mutbl),
self.cast_ty, self.cast_ty,
) )
@ -418,14 +418,14 @@ impl<'a, 'tcx> CastCheck<'tcx> {
} else if let ty::Ref(expr_reg, expr_ty, expr_mutbl) = *self.expr_ty.kind() } else if let ty::Ref(expr_reg, expr_ty, expr_mutbl) = *self.expr_ty.kind()
&& expr_mutbl == Mutability::Not && expr_mutbl == Mutability::Not
&& mutbl == Mutability::Mut && mutbl == Mutability::Mut
&& fcx.can_coerce(Ty::new_mut_ref(fcx.tcx, expr_reg, expr_ty), self.cast_ty) && fcx.may_coerce(Ty::new_mut_ref(fcx.tcx, expr_reg, expr_ty), self.cast_ty)
{ {
sugg_mutref = true; sugg_mutref = true;
} }
if !sugg_mutref if !sugg_mutref
&& sugg == None && sugg == None
&& fcx.can_coerce( && fcx.may_coerce(
Ty::new_ref(fcx.tcx, reg, self.expr_ty, mutbl), Ty::new_ref(fcx.tcx, reg, self.expr_ty, mutbl),
self.cast_ty, self.cast_ty,
) )
@ -433,7 +433,7 @@ impl<'a, 'tcx> CastCheck<'tcx> {
sugg = Some((format!("&{}", mutbl.prefix_str()), false)); sugg = Some((format!("&{}", mutbl.prefix_str()), false));
} }
} else if let ty::RawPtr(_, mutbl) = *self.cast_ty.kind() } else if let ty::RawPtr(_, mutbl) = *self.cast_ty.kind()
&& fcx.can_coerce( && fcx.may_coerce(
Ty::new_ref(fcx.tcx, fcx.tcx.lifetimes.re_erased, self.expr_ty, mutbl), Ty::new_ref(fcx.tcx, fcx.tcx.lifetimes.re_erased, self.expr_ty, mutbl),
self.cast_ty, self.cast_ty,
) )

View File

@ -1084,24 +1084,42 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
}) })
} }
/// Same as `coerce()`, but without side-effects. /// Probe whether `expr_ty` can be coerced to `target_ty`. This has no side-effects,
/// and may return false positives if types are not yet fully constrained by inference.
/// ///
/// Returns false if the coercion creates any obligations that result in /// Returns false if the coercion is not possible, or if the coercion creates any
/// errors. /// sub-obligations that result in errors.
pub(crate) fn can_coerce(&self, expr_ty: Ty<'tcx>, target: Ty<'tcx>) -> bool { ///
// FIXME(-Znext-solver): We need to structurally resolve both types here. /// This should only be used for diagnostics.
let source = self.resolve_vars_with_obligations(expr_ty); pub(crate) fn may_coerce(&self, expr_ty: Ty<'tcx>, target_ty: Ty<'tcx>) -> bool {
debug!("coercion::can_with_predicates({:?} -> {:?})", source, target);
let cause = self.cause(DUMMY_SP, ObligationCauseCode::ExprAssignable); let cause = self.cause(DUMMY_SP, ObligationCauseCode::ExprAssignable);
// We don't ever need two-phase here since we throw out the result of the coercion. // We don't ever need two-phase here since we throw out the result of the coercion.
// We also just always set `coerce_never` to true, since this is a heuristic. // We also just always set `coerce_never` to true, since this is a heuristic.
let coerce = Coerce::new(self, cause, AllowTwoPhase::No, true); let coerce = Coerce::new(self, cause.clone(), AllowTwoPhase::No, true);
self.probe(|_| { self.probe(|_| {
let Ok(ok) = coerce.coerce(source, target) else { // Make sure to structurally resolve the types, since we use
// the `TyKind`s heavily in coercion.
let ocx = ObligationCtxt::new(self);
let structurally_resolve = |ty| {
let ty = self.shallow_resolve(ty);
if self.next_trait_solver()
&& let ty::Alias(..) = ty.kind()
{
ocx.structurally_normalize(&cause, self.param_env, ty)
} else {
Ok(ty)
}
};
let Ok(expr_ty) = structurally_resolve(expr_ty) else {
return false;
};
let Ok(target_ty) = structurally_resolve(target_ty) else {
return false;
};
let Ok(ok) = coerce.coerce(expr_ty, target_ty) else {
return false; return false;
}; };
let ocx = ObligationCtxt::new(self);
ocx.register_obligations(ok.obligations); ocx.register_obligations(ok.obligations);
ocx.select_where_possible().is_empty() ocx.select_where_possible().is_empty()
}) })
@ -1370,7 +1388,7 @@ pub fn can_coerce<'tcx>(
) -> bool { ) -> bool {
let root_ctxt = crate::typeck_root_ctxt::TypeckRootCtxt::new(tcx, body_id); let root_ctxt = crate::typeck_root_ctxt::TypeckRootCtxt::new(tcx, body_id);
let fn_ctxt = FnCtxt::new(&root_ctxt, param_env, body_id); let fn_ctxt = FnCtxt::new(&root_ctxt, param_env, body_id);
fn_ctxt.can_coerce(ty, output_ty) fn_ctxt.may_coerce(ty, output_ty)
} }
/// CoerceMany encapsulates the pattern you should use when you have /// CoerceMany encapsulates the pattern you should use when you have

View File

@ -1330,9 +1330,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let refs_can_coerce = |lhs: Ty<'tcx>, rhs: Ty<'tcx>| { let refs_can_coerce = |lhs: Ty<'tcx>, rhs: Ty<'tcx>| {
let lhs = Ty::new_imm_ref(self.tcx, self.tcx.lifetimes.re_erased, lhs.peel_refs()); let lhs = Ty::new_imm_ref(self.tcx, self.tcx.lifetimes.re_erased, lhs.peel_refs());
let rhs = Ty::new_imm_ref(self.tcx, self.tcx.lifetimes.re_erased, rhs.peel_refs()); let rhs = Ty::new_imm_ref(self.tcx, self.tcx.lifetimes.re_erased, rhs.peel_refs());
self.can_coerce(rhs, lhs) self.may_coerce(rhs, lhs)
}; };
let (applicability, eq) = if self.can_coerce(rhs_ty, lhs_ty) { let (applicability, eq) = if self.may_coerce(rhs_ty, lhs_ty) {
(Applicability::MachineApplicable, true) (Applicability::MachineApplicable, true)
} else if refs_can_coerce(rhs_ty, lhs_ty) { } else if refs_can_coerce(rhs_ty, lhs_ty) {
// The lhs and rhs are likely missing some references in either side. Subsequent // The lhs and rhs are likely missing some references in either side. Subsequent
@ -1349,7 +1349,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let actual_lhs_ty = self.check_expr(rhs_expr); let actual_lhs_ty = self.check_expr(rhs_expr);
( (
Applicability::MaybeIncorrect, Applicability::MaybeIncorrect,
self.can_coerce(rhs_ty, actual_lhs_ty) self.may_coerce(rhs_ty, actual_lhs_ty)
|| refs_can_coerce(rhs_ty, actual_lhs_ty), || refs_can_coerce(rhs_ty, actual_lhs_ty),
) )
} else if let ExprKind::Binary( } else if let ExprKind::Binary(
@ -1363,7 +1363,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let actual_rhs_ty = self.check_expr(lhs_expr); let actual_rhs_ty = self.check_expr(lhs_expr);
( (
Applicability::MaybeIncorrect, Applicability::MaybeIncorrect,
self.can_coerce(actual_rhs_ty, lhs_ty) self.may_coerce(actual_rhs_ty, lhs_ty)
|| refs_can_coerce(actual_rhs_ty, lhs_ty), || refs_can_coerce(actual_rhs_ty, lhs_ty),
) )
} else { } else {
@ -1414,7 +1414,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
self.param_env, self.param_env,
) )
.may_apply(); .may_apply();
if lhs_deref_ty_is_sized && self.can_coerce(rhs_ty, lhs_deref_ty) { if lhs_deref_ty_is_sized && self.may_coerce(rhs_ty, lhs_deref_ty) {
err.span_suggestion_verbose( err.span_suggestion_verbose(
lhs.span.shrink_to_lo(), lhs.span.shrink_to_lo(),
"consider dereferencing here to assign to the mutably borrowed value", "consider dereferencing here to assign to the mutably borrowed value",

View File

@ -658,7 +658,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
&& fn_sig.inputs()[1..] && fn_sig.inputs()[1..]
.iter() .iter()
.zip(input_types.iter()) .zip(input_types.iter())
.all(|(expected, found)| self.can_coerce(*expected, *found)) .all(|(expected, found)| self.may_coerce(*expected, *found))
&& fn_sig.inputs()[1..].len() == input_types.len() && fn_sig.inputs()[1..].len() == input_types.len()
{ {
err.span_suggestion_verbose( err.span_suggestion_verbose(
@ -722,7 +722,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let expectation = Expectation::rvalue_hint(self, expected_input_ty); let expectation = Expectation::rvalue_hint(self, expected_input_ty);
let coerced_ty = expectation.only_has_type(self).unwrap_or(formal_input_ty); let coerced_ty = expectation.only_has_type(self).unwrap_or(formal_input_ty);
let can_coerce = self.can_coerce(arg_ty, coerced_ty); let can_coerce = self.may_coerce(arg_ty, coerced_ty);
if !can_coerce { if !can_coerce {
return Compatibility::Incompatible(Some(ty::error::TypeError::Sorts( return Compatibility::Incompatible(Some(ty::error::TypeError::Sorts(
ty::error::ExpectedFound::new(true, coerced_ty, arg_ty), ty::error::ExpectedFound::new(true, coerced_ty, arg_ty),
@ -802,7 +802,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
provided_arg_tys.iter().map(|(ty, _)| *ty).skip(mismatch_idx + tys.len()), provided_arg_tys.iter().map(|(ty, _)| *ty).skip(mismatch_idx + tys.len()),
), ),
) { ) {
if !self.can_coerce(provided_ty, *expected_ty) { if !self.may_coerce(provided_ty, *expected_ty) {
satisfied = false; satisfied = false;
break; break;
} }
@ -1023,7 +1023,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
std::iter::zip(formal_and_expected_inputs.iter(), removed_arg_tys.iter()).all( std::iter::zip(formal_and_expected_inputs.iter(), removed_arg_tys.iter()).all(
|((expected_ty, _), (provided_ty, _))| { |((expected_ty, _), (provided_ty, _))| {
!provided_ty.references_error() !provided_ty.references_error()
&& self.can_coerce(*provided_ty, *expected_ty) && self.may_coerce(*provided_ty, *expected_ty)
}, },
) )
}; };
@ -1114,7 +1114,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
None, None,
Some(trace.values), Some(trace.values),
e, e,
false,
true, true,
); );
} }
@ -2124,7 +2123,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let expr_ty = self.typeck_results.borrow().expr_ty(expr); let expr_ty = self.typeck_results.borrow().expr_ty(expr);
let return_ty = fn_sig.output(); let return_ty = fn_sig.output();
if !matches!(expr.kind, hir::ExprKind::Ret(..)) if !matches!(expr.kind, hir::ExprKind::Ret(..))
&& self.can_coerce(expr_ty, return_ty) && self.may_coerce(expr_ty, return_ty)
{ {
found_semi = true; found_semi = true;
} }

View File

@ -261,7 +261,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
if let hir::ExprKind::MethodCall(hir::PathSegment { ident: method, .. }, recv_expr, &[], _) = if let hir::ExprKind::MethodCall(hir::PathSegment { ident: method, .. }, recv_expr, &[], _) =
expr.kind expr.kind
&& let Some(recv_ty) = self.typeck_results.borrow().expr_ty_opt(recv_expr) && let Some(recv_ty) = self.typeck_results.borrow().expr_ty_opt(recv_expr)
&& self.can_coerce(recv_ty, expected) && self.may_coerce(recv_ty, expected)
&& let name = method.name.as_str() && let name = method.name.as_str()
&& (name.starts_with("to_") || name.starts_with("as_") || name == "into") && (name.starts_with("to_") || name.starts_with("as_") || name == "into")
{ {
@ -349,7 +349,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
return true; return true;
} }
if self.suggest_fn_call(err, expr, found, |output| self.can_coerce(output, expected)) if self.suggest_fn_call(err, expr, found, |output| self.may_coerce(output, expected))
&& let ty::FnDef(def_id, ..) = *found.kind() && let ty::FnDef(def_id, ..) = *found.kind()
&& let Some(sp) = self.tcx.hir().span_if_local(def_id) && let Some(sp) = self.tcx.hir().span_if_local(def_id)
{ {
@ -568,7 +568,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
if self.tcx.hir().is_inside_const_context(hir_id) || !expected.is_box() || found.is_box() { if self.tcx.hir().is_inside_const_context(hir_id) || !expected.is_box() || found.is_box() {
return false; return false;
} }
if self.can_coerce(Ty::new_box(self.tcx, found), expected) { if self.may_coerce(Ty::new_box(self.tcx, found), expected) {
let suggest_boxing = match found.kind() { let suggest_boxing = match found.kind() {
ty::Tuple(tuple) if tuple.is_empty() => { ty::Tuple(tuple) if tuple.is_empty() => {
errors::SuggestBoxing::Unit { start: span.shrink_to_lo(), end: span } errors::SuggestBoxing::Unit { start: span.shrink_to_lo(), end: span }
@ -663,7 +663,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
}; };
match expected.kind() { match expected.kind() {
ty::Adt(def, _) if Some(def.did()) == pin_did => { ty::Adt(def, _) if Some(def.did()) == pin_did => {
if self.can_coerce(pin_box_found, expected) { if self.may_coerce(pin_box_found, expected) {
debug!("can coerce {:?} to {:?}, suggesting Box::pin", pin_box_found, expected); debug!("can coerce {:?} to {:?}, suggesting Box::pin", pin_box_found, expected);
match found.kind() { match found.kind() {
ty::Adt(def, _) if def.is_box() => { ty::Adt(def, _) if def.is_box() => {
@ -689,7 +689,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
} }
} }
true true
} else if self.can_coerce(pin_found, expected) { } else if self.may_coerce(pin_found, expected) {
match found.kind() { match found.kind() {
ty::Adt(def, _) if def.is_box() => { ty::Adt(def, _) if def.is_box() => {
err.help("use `Box::pin`"); err.help("use `Box::pin`");
@ -701,7 +701,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
false false
} }
} }
ty::Adt(def, _) if def.is_box() && self.can_coerce(box_found, expected) => { ty::Adt(def, _) if def.is_box() && self.may_coerce(box_found, expected) => {
// Check if the parent expression is a call to Pin::new. If it // Check if the parent expression is a call to Pin::new. If it
// is and we were expecting a Box, ergo Pin<Box<expected>>, we // is and we were expecting a Box, ergo Pin<Box<expected>>, we
// can suggest Box::pin. // can suggest Box::pin.
@ -884,7 +884,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let ty = Binder::bind_with_vars(ty, bound_vars); let ty = Binder::bind_with_vars(ty, bound_vars);
let ty = self.normalize(hir_ty.span, ty); let ty = self.normalize(hir_ty.span, ty);
let ty = self.tcx.instantiate_bound_regions_with_erased(ty); let ty = self.tcx.instantiate_bound_regions_with_erased(ty);
if self.can_coerce(expected, ty) { if self.may_coerce(expected, ty) {
err.subdiagnostic(errors::ExpectedReturnTypeLabel::Other { err.subdiagnostic(errors::ExpectedReturnTypeLabel::Other {
span: hir_ty.span, span: hir_ty.span,
expected, expected,
@ -1141,12 +1141,12 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
ty::Asyncness::No => ty, ty::Asyncness::No => ty,
}; };
let ty = self.normalize(expr.span, ty); let ty = self.normalize(expr.span, ty);
self.can_coerce(found, ty) self.may_coerce(found, ty)
} }
hir::FnRetTy::DefaultReturn(_) if in_closure => { hir::FnRetTy::DefaultReturn(_) if in_closure => {
self.ret_coercion.as_ref().map_or(false, |ret| { self.ret_coercion.as_ref().map_or(false, |ret| {
let ret_ty = ret.borrow().expected_ty(); let ret_ty = ret.borrow().expected_ty();
self.can_coerce(found, ret_ty) self.may_coerce(found, ret_ty)
}) })
} }
_ => false, _ => false,
@ -1510,7 +1510,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
provided_ty provided_ty
}; };
if !self.can_coerce(expected_ty, dummy_ty) { if !self.may_coerce(expected_ty, dummy_ty) {
return; return;
} }
let msg = format!("use `{adt_name}::map_or` to deref inner value of `{adt_name}`"); let msg = format!("use `{adt_name}::map_or` to deref inner value of `{adt_name}`");
@ -1534,7 +1534,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
expected_ty: Ty<'tcx>, expected_ty: Ty<'tcx>,
) { ) {
if let ty::Slice(elem_ty) | ty::Array(elem_ty, _) = expected_ty.kind() { if let ty::Slice(elem_ty) | ty::Array(elem_ty, _) = expected_ty.kind() {
if self.can_coerce(blk_ty, *elem_ty) if self.may_coerce(blk_ty, *elem_ty)
&& blk.stmts.is_empty() && blk.stmts.is_empty()
&& blk.rules == hir::BlockCheckMode::DefaultBlock && blk.rules == hir::BlockCheckMode::DefaultBlock
{ {
@ -1744,7 +1744,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
if item_ty.has_param() { if item_ty.has_param() {
return false; return false;
} }
if self.can_coerce(item_ty, expected_ty) { if self.may_coerce(item_ty, expected_ty) {
err.span_suggestion_verbose( err.span_suggestion_verbose(
segment.ident.span, segment.ident.span,
format!("try referring to the associated const `{capitalized_name}` instead",), format!("try referring to the associated const `{capitalized_name}` instead",),
@ -1804,7 +1804,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
// diagnostic in cases where we have `(&&T).clone()` and we expect `T`). // diagnostic in cases where we have `(&&T).clone()` and we expect `T`).
&& !results.expr_adjustments(callee_expr).iter().any(|adj| matches!(adj.kind, ty::adjustment::Adjust::Deref(..))) && !results.expr_adjustments(callee_expr).iter().any(|adj| matches!(adj.kind, ty::adjustment::Adjust::Deref(..)))
// Check that we're in fact trying to clone into the expected type // Check that we're in fact trying to clone into the expected type
&& self.can_coerce(*pointee_ty, expected_ty) && self.may_coerce(*pointee_ty, expected_ty)
&& let trait_ref = ty::TraitRef::new(self.tcx, clone_trait_did, [expected_ty]) && let trait_ref = ty::TraitRef::new(self.tcx, clone_trait_did, [expected_ty])
// And the expected type doesn't implement `Clone` // And the expected type doesn't implement `Clone`
&& !self.predicate_must_hold_considering_regions(&traits::Obligation::new( && !self.predicate_must_hold_considering_regions(&traits::Obligation::new(
@ -2022,7 +2022,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
} else { } else {
return false; return false;
}; };
if is_ctor || !self.can_coerce(args.type_at(0), expected) { if is_ctor || !self.may_coerce(args.type_at(0), expected) {
return false; return false;
} }
@ -2293,7 +2293,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
.then(|| " (its field is private, but it's local to this crate and its privacy can be changed)".to_string()); .then(|| " (its field is private, but it's local to this crate and its privacy can be changed)".to_string());
let sole_field_ty = sole_field.ty(self.tcx, args); let sole_field_ty = sole_field.ty(self.tcx, args);
if self.can_coerce(expr_ty, sole_field_ty) { if self.may_coerce(expr_ty, sole_field_ty) {
let variant_path = let variant_path =
with_no_trimmed_paths!(self.tcx.def_path_str(variant.def_id)); with_no_trimmed_paths!(self.tcx.def_path_str(variant.def_id));
// FIXME #56861: DRYer prelude filtering // FIXME #56861: DRYer prelude filtering
@ -2401,7 +2401,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
} }
let int_type = args.type_at(0); let int_type = args.type_at(0);
if !self.can_coerce(expr_ty, int_type) { if !self.may_coerce(expr_ty, int_type) {
return false; return false;
} }
@ -2585,7 +2585,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
Ty::new_imm_ref(self.tcx, self.tcx.lifetimes.re_static, checked_ty) Ty::new_imm_ref(self.tcx, self.tcx.lifetimes.re_static, checked_ty)
} }
}; };
if self.can_coerce(ref_ty, expected) { if self.may_coerce(ref_ty, expected) {
let mut sugg_sp = sp; let mut sugg_sp = sp;
if let hir::ExprKind::MethodCall(segment, receiver, args, _) = expr.kind { if let hir::ExprKind::MethodCall(segment, receiver, args, _) = expr.kind {
let clone_trait = let clone_trait =

View File

@ -1934,7 +1934,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
&& fn_sig.inputs()[1..] && fn_sig.inputs()[1..]
.iter() .iter()
.zip(args.into_iter()) .zip(args.into_iter())
.all(|(expected, found)| self.can_coerce(*expected, *found)) .all(|(expected, found)| self.may_coerce(*expected, *found))
&& fn_sig.inputs()[1..].len() == args.len() && fn_sig.inputs()[1..].len() == args.len()
{ {
err.span_suggestion_verbose( err.span_suggestion_verbose(
@ -4148,7 +4148,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
return false; return false;
}; };
if !self.can_coerce(output, expected) { if !self.may_coerce(output, expected) {
return false; return false;
} }

View File

@ -1780,7 +1780,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
} else if inexistent_fields.len() == 1 { } else if inexistent_fields.len() == 1 {
match pat_field.pat.kind { match pat_field.pat.kind {
PatKind::Lit(expr) PatKind::Lit(expr)
if !self.can_coerce( if !self.may_coerce(
self.typeck_results.borrow().expr_ty(expr), self.typeck_results.borrow().expr_ty(expr),
self.field_ty(field.span, field_def, args), self.field_ty(field.span, field_def, args),
) => {} ) => {}

View File

@ -595,6 +595,17 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
&self, &self,
pred: impl Upcast<TyCtxt<'tcx>, ty::Predicate<'tcx>>, pred: impl Upcast<TyCtxt<'tcx>, ty::Predicate<'tcx>>,
) -> bool { ) -> bool {
let pred: ty::Predicate<'tcx> = pred.upcast(self.tcx);
// We sometimes have to use `defining_opaque_types` for predicates
// to succeed here and figuring out how exactly that should work
// is annoying. It is harmless enough to just not validate anything
// in that case. We still check this after analysis as all opaque
// types have been revealed at this point.
if pred.has_opaque_types() {
return true;
}
let infcx = self.tcx.infer_ctxt().build(); let infcx = self.tcx.infer_ctxt().build();
let ocx = ObligationCtxt::new(&infcx); let ocx = ObligationCtxt::new(&infcx);
ocx.register_obligation(Obligation::new( ocx.register_obligation(Obligation::new(

View File

@ -2302,7 +2302,6 @@ impl<'tcx> CheckAttrVisitor<'tcx> {
})), })),
terr, terr,
false, false,
false,
); );
diag.emit(); diag.emit();
self.abort.set(true); self.abort.set(true);

View File

@ -1127,18 +1127,14 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
/// the message in `secondary_span` as the primary label, and apply the message that would /// the message in `secondary_span` as the primary label, and apply the message that would
/// otherwise be used for the primary label on the `secondary_span` `Span`. This applies on /// otherwise be used for the primary label on the `secondary_span` `Span`. This applies on
/// E0271, like `tests/ui/issues/issue-39970.stderr`. /// E0271, like `tests/ui/issues/issue-39970.stderr`.
#[instrument( #[instrument(level = "debug", skip(self, diag, secondary_span, prefer_label))]
level = "debug",
skip(self, diag, secondary_span, swap_secondary_and_primary, prefer_label)
)]
pub fn note_type_err( pub fn note_type_err(
&self, &self,
diag: &mut Diag<'_>, diag: &mut Diag<'_>,
cause: &ObligationCause<'tcx>, cause: &ObligationCause<'tcx>,
secondary_span: Option<(Span, Cow<'static, str>)>, secondary_span: Option<(Span, Cow<'static, str>, bool)>,
mut values: Option<ValuePairs<'tcx>>, mut values: Option<ValuePairs<'tcx>>,
terr: TypeError<'tcx>, terr: TypeError<'tcx>,
swap_secondary_and_primary: bool,
prefer_label: bool, prefer_label: bool,
) { ) {
let span = cause.span(); let span = cause.span();
@ -1304,7 +1300,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
diag.span_note(span, msg); diag.span_note(span, msg);
} }
}; };
if let Some((sp, msg)) = secondary_span { if let Some((secondary_span, secondary_msg, swap_secondary_and_primary)) = secondary_span {
if swap_secondary_and_primary { if swap_secondary_and_primary {
let terr = if let Some(infer::ValuePairs::Terms(ExpectedFound { let terr = if let Some(infer::ValuePairs::Terms(ExpectedFound {
expected, .. expected, ..
@ -1314,11 +1310,11 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
} else { } else {
terr.to_string(self.tcx) terr.to_string(self.tcx)
}; };
label_or_note(sp, terr); label_or_note(secondary_span, terr);
label_or_note(span, msg); label_or_note(span, secondary_msg);
} else { } else {
label_or_note(span, terr.to_string(self.tcx)); label_or_note(span, terr.to_string(self.tcx));
label_or_note(sp, msg); label_or_note(secondary_span, secondary_msg);
} }
} else if let Some(values) = values } else if let Some(values) = values
&& let Some((e, f)) = values.ty() && let Some((e, f)) = values.ty()
@ -1788,7 +1784,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
self.type_error_additional_suggestions(&trace, terr), self.type_error_additional_suggestions(&trace, terr),
); );
let mut diag = self.dcx().create_err(failure_code); let mut diag = self.dcx().create_err(failure_code);
self.note_type_err(&mut diag, &trace.cause, None, Some(trace.values), terr, false, false); self.note_type_err(&mut diag, &trace.cause, None, Some(trace.values), terr, false);
diag diag
} }

View File

@ -710,7 +710,6 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
None, None,
TypeError::Sorts(ty::error::ExpectedFound::new(true, expected_ty, ct_ty)), TypeError::Sorts(ty::error::ExpectedFound::new(true, expected_ty, ct_ty)),
false, false,
false,
); );
diag diag
} }
@ -1435,6 +1434,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
cx.into_buffer() cx.into_buffer()
} }
))), ))),
true,
)), )),
_ => None, _ => None,
} }
@ -1452,7 +1452,6 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
)) ))
}), }),
err, err,
true,
false, false,
); );
self.note_obligation_cause(&mut diag, obligation); self.note_obligation_cause(&mut diag, obligation);

View File

@ -1028,7 +1028,7 @@ impl ProjectionElem {
ProjectionElem::Field(_idx, fty) => Ok(*fty), ProjectionElem::Field(_idx, fty) => Ok(*fty),
ProjectionElem::Index(_) | ProjectionElem::ConstantIndex { .. } => Self::index_ty(ty), ProjectionElem::Index(_) | ProjectionElem::ConstantIndex { .. } => Self::index_ty(ty),
ProjectionElem::Subslice { from, to, from_end } => { ProjectionElem::Subslice { from, to, from_end } => {
Self::subslice_ty(ty, from, to, from_end) Self::subslice_ty(ty, *from, *to, *from_end)
} }
ProjectionElem::Downcast(_) => Ok(ty), ProjectionElem::Downcast(_) => Ok(ty),
ProjectionElem::OpaqueCast(ty) | ProjectionElem::Subtype(ty) => Ok(*ty), ProjectionElem::OpaqueCast(ty) | ProjectionElem::Subtype(ty) => Ok(*ty),
@ -1039,13 +1039,13 @@ impl ProjectionElem {
ty.kind().builtin_index().ok_or_else(|| error!("Cannot index non-array type: {ty:?}")) ty.kind().builtin_index().ok_or_else(|| error!("Cannot index non-array type: {ty:?}"))
} }
fn subslice_ty(ty: Ty, from: &u64, to: &u64, from_end: &bool) -> Result<Ty, Error> { fn subslice_ty(ty: Ty, from: u64, to: u64, from_end: bool) -> Result<Ty, Error> {
let ty_kind = ty.kind(); let ty_kind = ty.kind();
match ty_kind { match ty_kind {
TyKind::RigidTy(RigidTy::Slice(..)) => Ok(ty), TyKind::RigidTy(RigidTy::Slice(..)) => Ok(ty),
TyKind::RigidTy(RigidTy::Array(inner, _)) if !from_end => Ty::try_new_array( TyKind::RigidTy(RigidTy::Array(inner, _)) if !from_end => Ty::try_new_array(
inner, inner,
to.checked_sub(*from).ok_or_else(|| error!("Subslice overflow: {from}..{to}"))?, to.checked_sub(from).ok_or_else(|| error!("Subslice overflow: {from}..{to}"))?,
), ),
TyKind::RigidTy(RigidTy::Array(inner, size)) => { TyKind::RigidTy(RigidTy::Array(inner, size)) => {
let size = size.eval_target_usize()?; let size = size.eval_target_usize()?;

View File

@ -477,7 +477,7 @@ pub struct PlaceRef<'a> {
pub projection: &'a [ProjectionElem], pub projection: &'a [ProjectionElem],
} }
impl<'a> PlaceRef<'a> { impl PlaceRef<'_> {
/// Get the type of this place. /// Get the type of this place.
pub fn ty(&self, locals: &[LocalDecl]) -> Result<Ty, Error> { pub fn ty(&self, locals: &[LocalDecl]) -> Result<Ty, Error> {
self.projection.iter().fold(Ok(locals[self.local].ty), |place_ty, elem| elem.ty(place_ty?)) self.projection.iter().fold(Ok(locals[self.local].ty), |place_ty, elem| elem.ty(place_ty?))

View File

@ -196,39 +196,40 @@ where
} }
macro_rules! floating { macro_rules! floating {
($ty:ident) => { ($($ty:ident)*) => {
#[stable(feature = "rust1", since = "1.0.0")] $(
impl Debug for $ty { #[stable(feature = "rust1", since = "1.0.0")]
fn fmt(&self, fmt: &mut Formatter<'_>) -> Result { impl Debug for $ty {
float_to_general_debug(fmt, self) fn fmt(&self, fmt: &mut Formatter<'_>) -> Result {
float_to_general_debug(fmt, self)
}
} }
}
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
impl Display for $ty { impl Display for $ty {
fn fmt(&self, fmt: &mut Formatter<'_>) -> Result { fn fmt(&self, fmt: &mut Formatter<'_>) -> Result {
float_to_decimal_display(fmt, self) float_to_decimal_display(fmt, self)
}
} }
}
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
impl LowerExp for $ty { impl LowerExp for $ty {
fn fmt(&self, fmt: &mut Formatter<'_>) -> Result { fn fmt(&self, fmt: &mut Formatter<'_>) -> Result {
float_to_exponential_common(fmt, self, false) float_to_exponential_common(fmt, self, false)
}
} }
}
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
impl UpperExp for $ty { impl UpperExp for $ty {
fn fmt(&self, fmt: &mut Formatter<'_>) -> Result { fn fmt(&self, fmt: &mut Formatter<'_>) -> Result {
float_to_exponential_common(fmt, self, true) float_to_exponential_common(fmt, self, true)
}
} }
} )*
}; };
} }
floating! { f32 } floating! { f32 f64 }
floating! { f64 }
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
impl Debug for f16 { impl Debug for f16 {

View File

@ -1,18 +1,17 @@
use crate::fmt::{Debug, Formatter, Result}; use crate::fmt::{Debug, Formatter, Result};
macro_rules! floating { macro_rules! floating {
($ty:ident) => { ($($ty:ident)*) => {
#[stable(feature = "rust1", since = "1.0.0")] $(
impl Debug for $ty { #[stable(feature = "rust1", since = "1.0.0")]
#[inline] impl Debug for $ty {
fn fmt(&self, _fmt: &mut Formatter<'_>) -> Result { #[inline]
panic!("floating point support is turned off"); fn fmt(&self, _fmt: &mut Formatter<'_>) -> Result {
panic!("floating point fmt support is turned off");
}
} }
} )*
}; };
} }
floating! { f16 } floating! { f16 f32 f64 f128 }
floating! { f32 }
floating! { f64 }
floating! { f128 }

View File

@ -20,33 +20,22 @@ trait DisplayInt:
macro_rules! impl_int { macro_rules! impl_int {
($($t:ident)*) => ( ($($t:ident)*) => (
$(impl DisplayInt for $t { $(impl DisplayInt for $t {
fn zero() -> Self { 0 } fn zero() -> Self { 0 }
fn from_u8(u: u8) -> Self { u as Self } fn from_u8(u: u8) -> Self { u as Self }
fn to_u8(&self) -> u8 { *self as u8 } fn to_u8(&self) -> u8 { *self as u8 }
#[cfg(not(any(target_pointer_width = "64", target_arch = "wasm32")))] #[cfg(not(any(target_pointer_width = "64", target_arch = "wasm32")))]
fn to_u32(&self) -> u32 { *self as u32 } fn to_u32(&self) -> u32 { *self as u32 }
fn to_u64(&self) -> u64 { *self as u64 } fn to_u64(&self) -> u64 { *self as u64 }
fn to_u128(&self) -> u128 { *self as u128 } fn to_u128(&self) -> u128 { *self as u128 }
})* })*
)
}
macro_rules! impl_uint {
($($t:ident)*) => (
$(impl DisplayInt for $t {
fn zero() -> Self { 0 }
fn from_u8(u: u8) -> Self { u as Self }
fn to_u8(&self) -> u8 { *self as u8 }
#[cfg(not(any(target_pointer_width = "64", target_arch = "wasm32")))]
fn to_u32(&self) -> u32 { *self as u32 }
fn to_u64(&self) -> u64 { *self as u64 }
fn to_u128(&self) -> u128 { *self as u128 }
})*
) )
} }
impl_int! { i8 i16 i32 i64 i128 isize } impl_int! {
impl_uint! { u8 u16 u32 u64 u128 usize } i8 i16 i32 i64 i128 isize
u8 u16 u32 u64 u128 usize
}
/// A type that represents a specific radix /// A type that represents a specific radix
/// ///
@ -178,26 +167,25 @@ integer! { i16, u16 }
integer! { i32, u32 } integer! { i32, u32 }
integer! { i64, u64 } integer! { i64, u64 }
integer! { i128, u128 } integer! { i128, u128 }
macro_rules! debug {
($($T:ident)*) => {$( macro_rules! impl_Debug {
#[stable(feature = "rust1", since = "1.0.0")] ($($T:ident)*) => {
impl fmt::Debug for $T { $(
#[inline] #[stable(feature = "rust1", since = "1.0.0")]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { impl fmt::Debug for $T {
if f.debug_lower_hex() { #[inline]
fmt::LowerHex::fmt(self, f) fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
} else if f.debug_upper_hex() { if f.debug_lower_hex() {
fmt::UpperHex::fmt(self, f) fmt::LowerHex::fmt(self, f)
} else { } else if f.debug_upper_hex() {
fmt::Display::fmt(self, f) fmt::UpperHex::fmt(self, f)
} else {
fmt::Display::fmt(self, f)
}
} }
} }
} )*
)*}; };
}
debug! {
i8 i16 i32 i64 i128 isize
u8 u16 u32 u64 u128 usize
} }
// 2 digit decimal look up table // 2 digit decimal look up table
@ -521,6 +509,11 @@ macro_rules! impl_Exp {
}; };
} }
impl_Debug! {
i8 i16 i32 i64 i128 isize
u8 u16 u32 u64 u128 usize
}
// Include wasm32 in here since it doesn't reflect the native pointer size, and // Include wasm32 in here since it doesn't reflect the native pointer size, and
// often cares strongly about getting a smaller code size. // often cares strongly about getting a smaller code size.
#[cfg(any(target_pointer_width = "64", target_arch = "wasm32"))] #[cfg(any(target_pointer_width = "64", target_arch = "wasm32"))]

View File

@ -1228,15 +1228,14 @@ impl Attributes {
for attr in self.other_attrs.lists(sym::doc).filter(|a| a.has_name(sym::alias)) { for attr in self.other_attrs.lists(sym::doc).filter(|a| a.has_name(sym::alias)) {
if let Some(values) = attr.meta_item_list() { if let Some(values) = attr.meta_item_list() {
for l in values { for l in values {
match l.lit().unwrap().kind { if let Some(lit) = l.lit()
ast::LitKind::Str(s, _) => { && let ast::LitKind::Str(s, _) = lit.kind
aliases.insert(s); {
} aliases.insert(s);
_ => unreachable!(),
} }
} }
} else { } else if let Some(value) = attr.value_str() {
aliases.insert(attr.value_str().unwrap()); aliases.insert(value);
} }
} }
aliases.into_iter().collect::<Vec<_>>().into() aliases.into_iter().collect::<Vec<_>>().into()

View File

@ -29,8 +29,9 @@ use crate::clean::inline::build_external_trait;
use crate::clean::{self, ItemId}; use crate::clean::{self, ItemId};
use crate::config::{Options as RustdocOptions, OutputFormat, RenderOptions}; use crate::config::{Options as RustdocOptions, OutputFormat, RenderOptions};
use crate::formats::cache::Cache; use crate::formats::cache::Cache;
use crate::passes;
use crate::passes::Condition::*; use crate::passes::Condition::*;
use crate::passes::{self}; use crate::passes::collect_intra_doc_links::LinkCollector;
pub(crate) struct DocContext<'tcx> { pub(crate) struct DocContext<'tcx> {
pub(crate) tcx: TyCtxt<'tcx>, pub(crate) tcx: TyCtxt<'tcx>,
@ -427,6 +428,9 @@ pub(crate) fn run_global_ctxt(
info!("Executing passes"); info!("Executing passes");
let mut visited = FxHashMap::default();
let mut ambiguous = FxIndexMap::default();
for p in passes::defaults(show_coverage) { for p in passes::defaults(show_coverage) {
let run = match p.condition { let run = match p.condition {
Always => true, Always => true,
@ -436,18 +440,30 @@ pub(crate) fn run_global_ctxt(
}; };
if run { if run {
debug!("running pass {}", p.pass.name); debug!("running pass {}", p.pass.name);
krate = tcx.sess.time(p.pass.name, || (p.pass.run)(krate, &mut ctxt)); if let Some(run_fn) = p.pass.run {
krate = tcx.sess.time(p.pass.name, || run_fn(krate, &mut ctxt));
} else {
let (k, LinkCollector { visited_links, ambiguous_links, .. }) =
passes::collect_intra_doc_links::collect_intra_doc_links(krate, &mut ctxt);
krate = k;
visited = visited_links;
ambiguous = ambiguous_links;
}
} }
} }
tcx.sess.time("check_lint_expectations", || tcx.check_expectations(Some(sym::rustdoc))); tcx.sess.time("check_lint_expectations", || tcx.check_expectations(Some(sym::rustdoc)));
krate = tcx.sess.time("create_format_cache", || Cache::populate(&mut ctxt, krate));
let mut collector =
LinkCollector { cx: &mut ctxt, visited_links: visited, ambiguous_links: ambiguous };
collector.resolve_ambiguities();
if let Some(guar) = tcx.dcx().has_errors() { if let Some(guar) = tcx.dcx().has_errors() {
return Err(guar); return Err(guar);
} }
krate = tcx.sess.time("create_format_cache", || Cache::populate(&mut ctxt, krate));
Ok((krate, ctxt.render_options, ctxt.cache)) Ok((krate, ctxt.render_options, ctxt.cache))
} }

View File

@ -20,7 +20,7 @@ use crate::visit::DocVisitor;
pub(crate) const CALCULATE_DOC_COVERAGE: Pass = Pass { pub(crate) const CALCULATE_DOC_COVERAGE: Pass = Pass {
name: "calculate-doc-coverage", name: "calculate-doc-coverage",
run: calculate_doc_coverage, run: Some(calculate_doc_coverage),
description: "counts the number of items with and without documentation", description: "counts the number of items with and without documentation",
}; };

View File

@ -20,7 +20,7 @@ use crate::visit::DocVisitor;
pub(crate) const CHECK_DOC_TEST_VISIBILITY: Pass = Pass { pub(crate) const CHECK_DOC_TEST_VISIBILITY: Pass = Pass {
name: "check_doc_test_visibility", name: "check_doc_test_visibility",
run: check_doc_test_visibility, run: Some(check_doc_test_visibility),
description: "run various visibility-related lints on doctests", description: "run various visibility-related lints on doctests",
}; };

View File

@ -9,12 +9,12 @@ use std::ops::Range;
use pulldown_cmark::LinkType; use pulldown_cmark::LinkType;
use rustc_ast::util::comments::may_have_doc_links; use rustc_ast::util::comments::may_have_doc_links;
use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexSet}; use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexMap, FxIndexSet};
use rustc_data_structures::intern::Interned; use rustc_data_structures::intern::Interned;
use rustc_errors::{Applicability, Diag, DiagMessage}; use rustc_errors::{Applicability, Diag, DiagMessage};
use rustc_hir::def::Namespace::*; use rustc_hir::def::Namespace::*;
use rustc_hir::def::{DefKind, Namespace, PerNS}; use rustc_hir::def::{DefKind, Namespace, PerNS};
use rustc_hir::def_id::{CRATE_DEF_ID, DefId}; use rustc_hir::def_id::{CRATE_DEF_ID, DefId, LOCAL_CRATE};
use rustc_hir::{Mutability, Safety}; use rustc_hir::{Mutability, Safety};
use rustc_middle::ty::{Ty, TyCtxt}; use rustc_middle::ty::{Ty, TyCtxt};
use rustc_middle::{bug, span_bug, ty}; use rustc_middle::{bug, span_bug, ty};
@ -30,23 +30,27 @@ use smallvec::{SmallVec, smallvec};
use tracing::{debug, info, instrument, trace}; use tracing::{debug, info, instrument, trace};
use crate::clean::utils::find_nearest_parent_module; use crate::clean::utils::find_nearest_parent_module;
use crate::clean::{self, Crate, Item, ItemLink, PrimitiveType}; use crate::clean::{self, Crate, Item, ItemId, ItemLink, PrimitiveType};
use crate::core::DocContext; use crate::core::DocContext;
use crate::html::markdown::{MarkdownLink, MarkdownLinkRange, markdown_links}; use crate::html::markdown::{MarkdownLink, MarkdownLinkRange, markdown_links};
use crate::lint::{BROKEN_INTRA_DOC_LINKS, PRIVATE_INTRA_DOC_LINKS}; use crate::lint::{BROKEN_INTRA_DOC_LINKS, PRIVATE_INTRA_DOC_LINKS};
use crate::passes::Pass; use crate::passes::Pass;
use crate::visit::DocVisitor; use crate::visit::DocVisitor;
pub(crate) const COLLECT_INTRA_DOC_LINKS: Pass = Pass { pub(crate) const COLLECT_INTRA_DOC_LINKS: Pass =
name: "collect-intra-doc-links", Pass { name: "collect-intra-doc-links", run: None, description: "resolves intra-doc links" };
run: collect_intra_doc_links,
description: "resolves intra-doc links",
};
fn collect_intra_doc_links(krate: Crate, cx: &mut DocContext<'_>) -> Crate { pub(crate) fn collect_intra_doc_links<'a, 'tcx>(
let mut collector = LinkCollector { cx, visited_links: FxHashMap::default() }; krate: Crate,
cx: &'a mut DocContext<'tcx>,
) -> (Crate, LinkCollector<'a, 'tcx>) {
let mut collector = LinkCollector {
cx,
visited_links: FxHashMap::default(),
ambiguous_links: FxIndexMap::default(),
};
collector.visit_crate(&krate); collector.visit_crate(&krate);
krate (krate, collector)
} }
fn filter_assoc_items_by_name_and_namespace<'a>( fn filter_assoc_items_by_name_and_namespace<'a>(
@ -61,7 +65,7 @@ fn filter_assoc_items_by_name_and_namespace<'a>(
} }
#[derive(Copy, Clone, Debug, Hash, PartialEq)] #[derive(Copy, Clone, Debug, Hash, PartialEq)]
enum Res { pub(crate) enum Res {
Def(DefKind, DefId), Def(DefKind, DefId),
Primitive(PrimitiveType), Primitive(PrimitiveType),
} }
@ -234,7 +238,7 @@ impl UrlFragment {
} }
#[derive(Clone, Debug, Hash, PartialEq, Eq)] #[derive(Clone, Debug, Hash, PartialEq, Eq)]
struct ResolutionInfo { pub(crate) struct ResolutionInfo {
item_id: DefId, item_id: DefId,
module_id: DefId, module_id: DefId,
dis: Option<Disambiguator>, dis: Option<Disambiguator>,
@ -243,18 +247,64 @@ struct ResolutionInfo {
} }
#[derive(Clone)] #[derive(Clone)]
struct DiagnosticInfo<'a> { pub(crate) struct DiagnosticInfo<'a> {
item: &'a Item, item: &'a Item,
dox: &'a str, dox: &'a str,
ori_link: &'a str, ori_link: &'a str,
link_range: MarkdownLinkRange, link_range: MarkdownLinkRange,
} }
struct LinkCollector<'a, 'tcx> { pub(crate) struct OwnedDiagnosticInfo {
cx: &'a mut DocContext<'tcx>, item: Item,
dox: String,
ori_link: String,
link_range: MarkdownLinkRange,
}
impl From<DiagnosticInfo<'_>> for OwnedDiagnosticInfo {
fn from(f: DiagnosticInfo<'_>) -> Self {
Self {
item: f.item.clone(),
dox: f.dox.to_string(),
ori_link: f.ori_link.to_string(),
link_range: f.link_range.clone(),
}
}
}
impl OwnedDiagnosticInfo {
pub(crate) fn into_info(&self) -> DiagnosticInfo<'_> {
DiagnosticInfo {
item: &self.item,
ori_link: &self.ori_link,
dox: &self.dox,
link_range: self.link_range.clone(),
}
}
}
pub(crate) struct LinkCollector<'a, 'tcx> {
pub(crate) cx: &'a mut DocContext<'tcx>,
/// Cache the resolved links so we can avoid resolving (and emitting errors for) the same link. /// Cache the resolved links so we can avoid resolving (and emitting errors for) the same link.
/// The link will be `None` if it could not be resolved (i.e. the error was cached). /// The link will be `None` if it could not be resolved (i.e. the error was cached).
visited_links: FxHashMap<ResolutionInfo, Option<(Res, Option<UrlFragment>)>>, pub(crate) visited_links: FxHashMap<ResolutionInfo, Option<(Res, Option<UrlFragment>)>>,
/// According to `rustc_resolve`, these links are ambiguous.
///
/// However, we cannot link to an item that has been stripped from the documentation. If all
/// but one of the "possibilities" are stripped, then there is no real ambiguity. To determine
/// if an ambiguity is real, we delay resolving them until after `Cache::populate`, then filter
/// every item that doesn't have a cached path.
///
/// We could get correct results by simply delaying everything. This would have fewer happy
/// codepaths, but we want to distinguish different kinds of error conditions, and this is easy
/// to do by resolving links as soon as possible.
pub(crate) ambiguous_links: FxIndexMap<(ItemId, String), Vec<AmbiguousLinks>>,
}
pub(crate) struct AmbiguousLinks {
link_text: Box<str>,
diag_info: OwnedDiagnosticInfo,
resolved: Vec<(Res, Option<UrlFragment>)>,
} }
impl<'a, 'tcx> LinkCollector<'a, 'tcx> { impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
@ -1001,6 +1051,10 @@ impl LinkCollector<'_, '_> {
} }
} }
pub(crate) fn save_link(&mut self, item_id: ItemId, link: ItemLink) {
self.cx.cache.intra_doc_links.entry(item_id).or_default().insert(link);
}
/// This is the entry point for resolving an intra-doc link. /// This is the entry point for resolving an intra-doc link.
/// ///
/// FIXME(jynelson): this is way too many arguments /// FIXME(jynelson): this is way too many arguments
@ -1024,7 +1078,7 @@ impl LinkCollector<'_, '_> {
pp_link.as_ref().map_err(|err| err.report(self.cx, diag_info.clone())).ok()?; pp_link.as_ref().map_err(|err| err.report(self.cx, diag_info.clone())).ok()?;
let disambiguator = *disambiguator; let disambiguator = *disambiguator;
let (mut res, fragment) = self.resolve_with_disambiguator_cached( let mut resolved = self.resolve_with_disambiguator_cached(
ResolutionInfo { ResolutionInfo {
item_id, item_id,
module_id, module_id,
@ -1040,6 +1094,139 @@ impl LinkCollector<'_, '_> {
false, false,
)?; )?;
if resolved.len() > 1 {
let links = AmbiguousLinks {
link_text: link_text.clone(),
diag_info: diag_info.into(),
resolved,
};
self.ambiguous_links
.entry((item.item_id, path_str.to_string()))
.or_default()
.push(links);
None
} else if let Some((res, fragment)) = resolved.pop() {
self.compute_link(res, fragment, path_str, disambiguator, diag_info, link_text)
} else {
None
}
}
/// Returns `true` if a link could be generated from the given intra-doc information.
///
/// This is a very light version of `format::href_with_root_path` since we're only interested
/// about whether we can generate a link to an item or not.
///
/// * If `original_did` is local, then we check if the item is reexported or public.
/// * If `original_did` is not local, then we check if the crate it comes from is a direct
/// public dependency.
fn validate_link(&self, original_did: DefId) -> bool {
let tcx = self.cx.tcx;
let def_kind = tcx.def_kind(original_did);
let did = match def_kind {
DefKind::AssocTy | DefKind::AssocFn | DefKind::AssocConst | DefKind::Variant => {
// documented on their parent's page
tcx.parent(original_did)
}
// If this a constructor, we get the parent (either a struct or a variant) and then
// generate the link for this item.
DefKind::Ctor(..) => return self.validate_link(tcx.parent(original_did)),
DefKind::ExternCrate => {
// Link to the crate itself, not the `extern crate` item.
if let Some(local_did) = original_did.as_local() {
tcx.extern_mod_stmt_cnum(local_did).unwrap_or(LOCAL_CRATE).as_def_id()
} else {
original_did
}
}
_ => original_did,
};
let cache = &self.cx.cache;
if !original_did.is_local()
&& !cache.effective_visibilities.is_directly_public(tcx, did)
&& !cache.document_private
&& !cache.primitive_locations.values().any(|&id| id == did)
{
return false;
}
cache.paths.get(&did).is_some()
|| cache.external_paths.get(&did).is_some()
|| !did.is_local()
}
#[allow(rustc::potential_query_instability)]
pub(crate) fn resolve_ambiguities(&mut self) {
let mut ambiguous_links = mem::take(&mut self.ambiguous_links);
for ((item_id, path_str), info_items) in ambiguous_links.iter_mut() {
for info in info_items {
info.resolved.retain(|(res, _)| match res {
Res::Def(_, def_id) => self.validate_link(*def_id),
// Primitive types are always valid.
Res::Primitive(_) => true,
});
let diag_info = info.diag_info.into_info();
match info.resolved.len() {
1 => {
let (res, fragment) = info.resolved.pop().unwrap();
if let Some(link) = self.compute_link(
res,
fragment,
path_str,
None,
diag_info,
&info.link_text,
) {
self.save_link(*item_id, link);
}
}
0 => {
report_diagnostic(
self.cx.tcx,
BROKEN_INTRA_DOC_LINKS,
format!("all items matching `{path_str}` are private or doc(hidden)"),
&diag_info,
|diag, sp, _| {
if let Some(sp) = sp {
diag.span_label(sp, "unresolved link");
} else {
diag.note("unresolved link");
}
},
);
}
_ => {
let candidates = info
.resolved
.iter()
.map(|(res, fragment)| {
let def_id = if let Some(UrlFragment::Item(def_id)) = fragment {
Some(*def_id)
} else {
None
};
(*res, def_id)
})
.collect::<Vec<_>>();
ambiguity_error(self.cx, &diag_info, path_str, &candidates, true);
}
}
}
}
}
fn compute_link(
&mut self,
mut res: Res,
fragment: Option<UrlFragment>,
path_str: &str,
disambiguator: Option<Disambiguator>,
diag_info: DiagnosticInfo<'_>,
link_text: &Box<str>,
) -> Option<ItemLink> {
// Check for a primitive which might conflict with a module // Check for a primitive which might conflict with a module
// Report the ambiguity and require that the user specify which one they meant. // Report the ambiguity and require that the user specify which one they meant.
// FIXME: could there ever be a primitive not in the type namespace? // FIXME: could there ever be a primitive not in the type namespace?
@ -1055,7 +1242,7 @@ impl LinkCollector<'_, '_> {
} else { } else {
// `[char]` when a `char` module is in scope // `[char]` when a `char` module is in scope
let candidates = &[(res, res.def_id(self.cx.tcx)), (prim, None)]; let candidates = &[(res, res.def_id(self.cx.tcx)), (prim, None)];
ambiguity_error(self.cx, &diag_info, path_str, candidates); ambiguity_error(self.cx, &diag_info, path_str, candidates, true);
return None; return None;
} }
} }
@ -1085,7 +1272,7 @@ impl LinkCollector<'_, '_> {
} }
res.def_id(self.cx.tcx).map(|page_id| ItemLink { res.def_id(self.cx.tcx).map(|page_id| ItemLink {
link: Box::<str>::from(&*ori_link.link), link: Box::<str>::from(&*diag_info.ori_link),
link_text: link_text.clone(), link_text: link_text.clone(),
page_id, page_id,
fragment, fragment,
@ -1107,7 +1294,7 @@ impl LinkCollector<'_, '_> {
let page_id = clean::register_res(self.cx, rustc_hir::def::Res::Def(kind, id)); let page_id = clean::register_res(self.cx, rustc_hir::def::Res::Def(kind, id));
Some(ItemLink { Some(ItemLink {
link: Box::<str>::from(&*ori_link.link), link: Box::<str>::from(&*diag_info.ori_link),
link_text: link_text.clone(), link_text: link_text.clone(),
page_id, page_id,
fragment, fragment,
@ -1220,10 +1407,10 @@ impl LinkCollector<'_, '_> {
// If this call is intended to be recoverable, then pass true to silence. // If this call is intended to be recoverable, then pass true to silence.
// This is only recoverable when path is failed to resolved. // This is only recoverable when path is failed to resolved.
recoverable: bool, recoverable: bool,
) -> Option<(Res, Option<UrlFragment>)> { ) -> Option<Vec<(Res, Option<UrlFragment>)>> {
if let Some(res) = self.visited_links.get(&key) { if let Some(res) = self.visited_links.get(&key) {
if res.is_some() || cache_errors { if res.is_some() || cache_errors {
return res.clone(); return res.clone().map(|r| vec![r]);
} }
} }
@ -1248,13 +1435,14 @@ impl LinkCollector<'_, '_> {
// and after removing duplicated kinds, only one remains, the `ambiguity_error` function // and after removing duplicated kinds, only one remains, the `ambiguity_error` function
// won't emit an error. So at this point, we can just take the first candidate as it was // won't emit an error. So at this point, we can just take the first candidate as it was
// the first retrieved and use it to generate the link. // the first retrieved and use it to generate the link.
if let [candidate, _candidate2, ..] = *candidates if let [candidate, _candidate2, ..] = *candidates {
&& !ambiguity_error(self.cx, &diag, &key.path_str, &candidates) if !ambiguity_error(self.cx, &diag, &key.path_str, &candidates, false) {
{ candidates = vec![candidate];
candidates = vec![candidate]; }
} }
if let &[(res, def_id)] = candidates.as_slice() { let mut out = Vec::with_capacity(candidates.len());
for (res, def_id) in candidates {
let fragment = match (&key.extra_fragment, def_id) { let fragment = match (&key.extra_fragment, def_id) {
(Some(_), Some(def_id)) => { (Some(_), Some(def_id)) => {
report_anchor_conflict(self.cx, diag, def_id); report_anchor_conflict(self.cx, diag, def_id);
@ -1264,15 +1452,14 @@ impl LinkCollector<'_, '_> {
(None, Some(def_id)) => Some(UrlFragment::Item(def_id)), (None, Some(def_id)) => Some(UrlFragment::Item(def_id)),
(None, None) => None, (None, None) => None,
}; };
let r = Some((res, fragment)); out.push((res, fragment));
self.visited_links.insert(key, r.clone());
return r;
} }
if let [r] = out.as_slice() {
if cache_errors { self.visited_links.insert(key, Some(r.clone()));
} else if cache_errors {
self.visited_links.insert(key, None); self.visited_links.insert(key, None);
} }
None Some(out)
} }
/// After parsing the disambiguator, resolve the main part of the link. /// After parsing the disambiguator, resolve the main part of the link.
@ -2046,6 +2233,7 @@ fn ambiguity_error(
diag_info: &DiagnosticInfo<'_>, diag_info: &DiagnosticInfo<'_>,
path_str: &str, path_str: &str,
candidates: &[(Res, Option<DefId>)], candidates: &[(Res, Option<DefId>)],
emit_error: bool,
) -> bool { ) -> bool {
let mut descrs = FxHashSet::default(); let mut descrs = FxHashSet::default();
let kinds = candidates let kinds = candidates
@ -2061,6 +2249,8 @@ fn ambiguity_error(
// There is no way for users to disambiguate at this point, so better return the first // There is no way for users to disambiguate at this point, so better return the first
// candidate and not show a warning. // candidate and not show a warning.
return false; return false;
} else if !emit_error {
return true;
} }
let mut msg = format!("`{path_str}` is "); let mut msg = format!("`{path_str}` is ");

View File

@ -16,7 +16,7 @@ use crate::visit::DocVisitor;
pub(crate) const COLLECT_TRAIT_IMPLS: Pass = Pass { pub(crate) const COLLECT_TRAIT_IMPLS: Pass = Pass {
name: "collect-trait-impls", name: "collect-trait-impls",
run: collect_trait_impls, run: Some(collect_trait_impls),
description: "retrieves trait impls for items in the crate", description: "retrieves trait impls for items in the crate",
}; };

View File

@ -14,7 +14,7 @@ use crate::core::DocContext;
use crate::visit::DocVisitor; use crate::visit::DocVisitor;
pub(crate) const RUN_LINTS: Pass = pub(crate) const RUN_LINTS: Pass =
Pass { name: "run-lints", run: run_lints, description: "runs some of rustdoc's lints" }; Pass { name: "run-lints", run: Some(run_lints), description: "runs some of rustdoc's lints" };
struct Linter<'a, 'tcx> { struct Linter<'a, 'tcx> {
cx: &'a mut DocContext<'tcx>, cx: &'a mut DocContext<'tcx>,

View File

@ -47,7 +47,7 @@ pub(crate) use self::lint::RUN_LINTS;
#[derive(Copy, Clone)] #[derive(Copy, Clone)]
pub(crate) struct Pass { pub(crate) struct Pass {
pub(crate) name: &'static str, pub(crate) name: &'static str,
pub(crate) run: fn(clean::Crate, &mut DocContext<'_>) -> clean::Crate, pub(crate) run: Option<fn(clean::Crate, &mut DocContext<'_>) -> clean::Crate>,
pub(crate) description: &'static str, pub(crate) description: &'static str,
} }

View File

@ -13,7 +13,7 @@ use crate::passes::Pass;
pub(crate) const PROPAGATE_DOC_CFG: Pass = Pass { pub(crate) const PROPAGATE_DOC_CFG: Pass = Pass {
name: "propagate-doc-cfg", name: "propagate-doc-cfg",
run: propagate_doc_cfg, run: Some(propagate_doc_cfg),
description: "propagates `#[doc(cfg(...))]` to child items", description: "propagates `#[doc(cfg(...))]` to child items",
}; };

View File

@ -16,7 +16,7 @@ use crate::passes::Pass;
pub(crate) const PROPAGATE_STABILITY: Pass = Pass { pub(crate) const PROPAGATE_STABILITY: Pass = Pass {
name: "propagate-stability", name: "propagate-stability",
run: propagate_stability, run: Some(propagate_stability),
description: "propagates stability to child items", description: "propagates stability to child items",
}; };

View File

@ -8,7 +8,7 @@ use crate::passes::Pass;
pub(crate) const STRIP_ALIASED_NON_LOCAL: Pass = Pass { pub(crate) const STRIP_ALIASED_NON_LOCAL: Pass = Pass {
name: "strip-aliased-non-local", name: "strip-aliased-non-local",
run: strip_aliased_non_local, run: Some(strip_aliased_non_local),
description: "strips all non-local private aliased items from the output", description: "strips all non-local private aliased items from the output",
}; };

View File

@ -16,7 +16,7 @@ use crate::passes::{ImplStripper, Pass};
pub(crate) const STRIP_HIDDEN: Pass = Pass { pub(crate) const STRIP_HIDDEN: Pass = Pass {
name: "strip-hidden", name: "strip-hidden",
run: strip_hidden, run: Some(strip_hidden),
description: "strips all `#[doc(hidden)]` items from the output", description: "strips all `#[doc(hidden)]` items from the output",
}; };

View File

@ -8,7 +8,7 @@ use crate::passes::{ImportStripper, Pass};
pub(crate) const STRIP_PRIV_IMPORTS: Pass = Pass { pub(crate) const STRIP_PRIV_IMPORTS: Pass = Pass {
name: "strip-priv-imports", name: "strip-priv-imports",
run: strip_priv_imports, run: Some(strip_priv_imports),
description: "strips all private import statements (`use`, `extern crate`) from a crate", description: "strips all private import statements (`use`, `extern crate`) from a crate",
}; };

View File

@ -8,7 +8,7 @@ use crate::passes::{ImplStripper, ImportStripper, Pass, Stripper};
pub(crate) const STRIP_PRIVATE: Pass = Pass { pub(crate) const STRIP_PRIVATE: Pass = Pass {
name: "strip-private", name: "strip-private",
run: strip_private, run: Some(strip_private),
description: "strips all private items from a crate which cannot be seen externally, \ description: "strips all private items from a crate which cannot be seen externally, \
implies strip-priv-imports", implies strip-priv-imports",
}; };

View File

@ -613,26 +613,8 @@ impl Rewrite for ast::GenericBound {
ast::GenericBound::Trait(ref poly_trait_ref) => { ast::GenericBound::Trait(ref poly_trait_ref) => {
let snippet = context.snippet(self.span()); let snippet = context.snippet(self.span());
let has_paren = snippet.starts_with('(') && snippet.ends_with(')'); let has_paren = snippet.starts_with('(') && snippet.ends_with(')');
let ast::TraitBoundModifiers {
constness,
asyncness,
polarity,
} = poly_trait_ref.modifiers;
let mut constness = constness.as_str().to_string();
if !constness.is_empty() {
constness.push(' ');
}
let mut asyncness = asyncness.as_str().to_string();
if !asyncness.is_empty() {
asyncness.push(' ');
}
let polarity = polarity.as_str();
let shape = shape
.offset_left(constness.len() + polarity.len())
.max_width_error(shape.width, self.span())?;
poly_trait_ref poly_trait_ref
.rewrite_result(context, shape) .rewrite_result(context, shape)
.map(|s| format!("{constness}{asyncness}{polarity}{s}"))
.map(|s| if has_paren { format!("({})", s) } else { s }) .map(|s| if has_paren { format!("({})", s) } else { s })
} }
ast::GenericBound::Use(ref args, span) => { ast::GenericBound::Use(ref args, span) => {
@ -756,19 +738,41 @@ impl Rewrite for ast::PolyTraitRef {
} }
fn rewrite_result(&self, context: &RewriteContext<'_>, shape: Shape) -> RewriteResult { fn rewrite_result(&self, context: &RewriteContext<'_>, shape: Shape) -> RewriteResult {
if let Some(lifetime_str) = rewrite_bound_params(context, shape, &self.bound_generic_params) let (binder, shape) = if let Some(lifetime_str) =
rewrite_bound_params(context, shape, &self.bound_generic_params)
{ {
// 6 is "for<> ".len() // 6 is "for<> ".len()
let extra_offset = lifetime_str.len() + 6; let extra_offset = lifetime_str.len() + 6;
let shape = shape let shape = shape
.offset_left(extra_offset) .offset_left(extra_offset)
.max_width_error(shape.width, self.span)?; .max_width_error(shape.width, self.span)?;
let path_str = self.trait_ref.rewrite_result(context, shape)?; (format!("for<{lifetime_str}> "), shape)
Ok(format!("for<{lifetime_str}> {path_str}"))
} else { } else {
self.trait_ref.rewrite_result(context, shape) (String::new(), shape)
};
let ast::TraitBoundModifiers {
constness,
asyncness,
polarity,
} = self.modifiers;
let mut constness = constness.as_str().to_string();
if !constness.is_empty() {
constness.push(' ');
} }
let mut asyncness = asyncness.as_str().to_string();
if !asyncness.is_empty() {
asyncness.push(' ');
}
let polarity = polarity.as_str();
let shape = shape
.offset_left(constness.len() + polarity.len())
.max_width_error(shape.width, self.span)?;
let path_str = self.trait_ref.rewrite_result(context, shape)?;
Ok(format!(
"{binder}{constness}{asyncness}{polarity}{path_str}"
))
} }
} }

View File

@ -1,3 +1,5 @@
// rustfmt-edition: 2018 // rustfmt-edition: 2018
fn foo() -> impl async Fn() {} fn foo() -> impl async Fn() {}
fn bar() -> impl for<'a> async Fn(&'a ()) {}

View File

@ -1,10 +0,0 @@
//@ known-bug: #130921
//@ compile-flags: -Zvalidate-mir -Copt-level=0 --crate-type lib
pub fn hello() -> [impl Sized; 2] {
if false {
let x = hello();
let _: &[i32] = &x;
}
todo!()
}

View File

@ -0,0 +1,15 @@
// This test ensures that ambiguities (not) resolved at a later stage still emit an error.
#![deny(rustdoc::broken_intra_doc_links)]
#![crate_name = "foo"]
#[doc(hidden)]
pub struct Thing {}
#[allow(non_snake_case)]
#[doc(hidden)]
pub fn Thing() {}
/// Do stuff with [`Thing`].
//~^ ERROR all items matching `Thing` are private or doc(hidden)
pub fn repro(_: Thing) {}

View File

@ -0,0 +1,14 @@
error: all items matching `Thing` are private or doc(hidden)
--> $DIR/filter-out-private-2.rs:13:21
|
LL | /// Do stuff with [`Thing`].
| ^^^^^ unresolved link
|
note: the lint level is defined here
--> $DIR/filter-out-private-2.rs:3:9
|
LL | #![deny(rustdoc::broken_intra_doc_links)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: aborting due to 1 previous error

View File

@ -0,0 +1,13 @@
// This test ensures that ambiguities resolved at a later stage still emit an error.
#![deny(rustdoc::broken_intra_doc_links)]
#![crate_name = "foo"]
pub struct Thing {}
#[allow(non_snake_case)]
pub fn Thing() {}
/// Do stuff with [`Thing`].
//~^ ERROR `Thing` is both a function and a struct
pub fn repro(_: Thing) {}

View File

@ -0,0 +1,22 @@
error: `Thing` is both a function and a struct
--> $DIR/filter-out-private.rs:11:21
|
LL | /// Do stuff with [`Thing`].
| ^^^^^ ambiguous link
|
note: the lint level is defined here
--> $DIR/filter-out-private.rs:3:9
|
LL | #![deny(rustdoc::broken_intra_doc_links)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
help: to link to the function, add parentheses
|
LL | /// Do stuff with [`Thing()`].
| ++
help: to link to the struct, prefix with `struct@`
|
LL | /// Do stuff with [`struct@Thing`].
| +++++++
error: aborting due to 1 previous error

View File

@ -0,0 +1,26 @@
// This test ensures that private/hidden items don't create ambiguity.
// This is a regression test for <https://github.com/rust-lang/rust/issues/130233>.
#![deny(rustdoc::broken_intra_doc_links)]
#![crate_name = "foo"]
pub struct Thing {}
#[doc(hidden)]
#[allow(non_snake_case)]
pub fn Thing() {}
pub struct Bar {}
#[allow(non_snake_case)]
fn Bar() {}
//@ has 'foo/fn.repro.html'
//@ has - '//*[@class="toggle top-doc"]/*[@class="docblock"]//a/@href' 'struct.Thing.html'
/// Do stuff with [`Thing`].
pub fn repro(_: Thing) {}
//@ has 'foo/fn.repro2.html'
//@ has - '//*[@class="toggle top-doc"]/*[@class="docblock"]//a/@href' 'struct.Bar.html'
/// Do stuff with [`Bar`].
pub fn repro2(_: Bar) {}

View File

@ -9,6 +9,7 @@
//@ ignore-musl musl doesn't support dynamic libraries (at least when the original test was written). //@ ignore-musl musl doesn't support dynamic libraries (at least when the original test was written).
//@ needs-unwind //@ needs-unwind
//@ compile-flags: -g -Copt-level=0 -Cstrip=none -Cforce-frame-pointers=yes //@ compile-flags: -g -Copt-level=0 -Cstrip=none -Cforce-frame-pointers=yes
//@ ignore-emscripten Requires custom symbolization code
//@ aux-crate: dylib_dep_helper=dylib-dep-helper.rs //@ aux-crate: dylib_dep_helper=dylib-dep-helper.rs
//@ aux-crate: auxiliary=dylib-dep-helper-aux.rs //@ aux-crate: auxiliary=dylib-dep-helper-aux.rs
//@ run-pass //@ run-pass

View File

@ -10,6 +10,7 @@
//@ compile-flags: -Cstrip=none -Cdebuginfo=line-tables-only //@ compile-flags: -Cstrip=none -Cdebuginfo=line-tables-only
//@ ignore-android FIXME #17520 //@ ignore-android FIXME #17520
//@ ignore-fuchsia Backtraces not symbolized //@ ignore-fuchsia Backtraces not symbolized
//@ ignore-emscripten Requires custom symbolization code
//@ needs-unwind //@ needs-unwind
//@ aux-build: line-tables-only-helper.rs //@ aux-build: line-tables-only-helper.rs

View File

@ -0,0 +1,12 @@
//@ check-pass
//@ compile-flags: -Zvalidate-mir
fn hello() -> &'static [impl Sized; 0] {
if false {
let x = hello();
let _: &[i32] = x;
}
&[]
}
fn main() {}

View File

@ -0,0 +1,19 @@
//@ compile-flags: -Znext-solver
trait Mirror {
type Assoc;
}
impl<T> Mirror for T {
type Assoc = T;
}
fn arg() -> &'static [i32; 1] { todo!() }
fn arg_error(x: <fn() as Mirror>::Assoc, y: ()) { todo!() }
fn main() {
// Should suggest to reverse the args...
// but if we don't normalize the expected, then we don't.
arg_error((), || ());
//~^ ERROR arguments to this function are incorrect
}

View File

@ -0,0 +1,21 @@
error[E0308]: arguments to this function are incorrect
--> $DIR/coerce-in-may-coerce.rs:17:5
|
LL | arg_error((), || ());
| ^^^^^^^^^ -- ----- expected `()`, found `{closure@$DIR/coerce-in-may-coerce.rs:17:19: 17:21}`
| |
| expected `<fn() as Mirror>::Assoc`, found `()`
|
note: function defined here
--> $DIR/coerce-in-may-coerce.rs:12:4
|
LL | fn arg_error(x: <fn() as Mirror>::Assoc, y: ()) { todo!() }
| ^^^^^^^^^ -------------------------- -----
help: swap these arguments
|
LL | arg_error(|| (), ());
| ~~~~~~~~~~~
error: aborting due to 1 previous error
For more information about this error, try `rustc --explain E0308`.

View File

@ -1,3 +1,15 @@
error[E0308]: mismatched types
--> $DIR/lazy_subtyping_of_opaques.rs:11:5
|
LL | fn reify_as_tait() -> Thunk<Tait> {
| ----------- expected `Thunk<_>` because of return type
LL |
LL | Thunk::new(|cont| cont)
| ^^^^^^^^^^^^^^^^^^^^^^^ expected `Thunk<_>`, found `()`
|
= note: expected struct `Thunk<_>`
found unit type `()`
error[E0277]: expected a `FnOnce()` closure, found `()` error[E0277]: expected a `FnOnce()` closure, found `()`
--> $DIR/lazy_subtyping_of_opaques.rs:11:23 --> $DIR/lazy_subtyping_of_opaques.rs:11:23
| |
@ -12,19 +24,13 @@ error[E0277]: expected a `FnOnce()` closure, found `()`
| |
LL | fn reify_as_tait() -> Thunk<Tait> { LL | fn reify_as_tait() -> Thunk<Tait> {
| ^^^^^^^^^^^ expected an `FnOnce()` closure, found `()` | ^^^^^^^^^^^ expected an `FnOnce()` closure, found `()`
LL |
LL | Thunk::new(|cont| cont)
| ----------------------- return type was inferred to be `{type error}` here
| |
= help: the trait `FnOnce()` is not implemented for `()` = help: the trait `FnOnce()` is not implemented for `()`
= note: wrap the `()` in a closure with no arguments: `|| { /* code */ }` = note: wrap the `()` in a closure with no arguments: `|| { /* code */ }`
error[E0308]: mismatched types
--> $DIR/lazy_subtyping_of_opaques.rs:11:5
|
LL | Thunk::new(|cont| cont)
| ^^^^^^^^^^^^^^^^^^^^^^^ expected `Thunk<_>`, found `()`
|
= note: expected struct `Thunk<_>`
found unit type `()`
error: aborting due to 3 previous errors error: aborting due to 3 previous errors
Some errors have detailed explanations: E0277, E0308. Some errors have detailed explanations: E0277, E0308.