mirror of
https://github.com/rust-lang/rust.git
synced 2024-11-25 08:13:41 +00:00
Rollup merge of #110153 - DaniPopes:compiler-typos, r=Nilstrieb
Fix typos in compiler I ran [`typos -w compiler`](https://github.com/crate-ci/typos) to fix typos in the `compiler` directory. Refs #110150
This commit is contained in:
commit
331e7c3659
@ -94,7 +94,7 @@ impl FormatArguments {
|
||||
}
|
||||
if !matches!(arg.kind, FormatArgumentKind::Captured(..)) {
|
||||
// This is an explicit argument.
|
||||
// Make sure that all arguments so far are explcit.
|
||||
// Make sure that all arguments so far are explicit.
|
||||
assert_eq!(
|
||||
self.num_explicit_args,
|
||||
self.arguments.len(),
|
||||
|
@ -137,7 +137,7 @@ pub struct AsyncNonMoveClosureNotSupported {
|
||||
|
||||
#[derive(Diagnostic, Clone, Copy)]
|
||||
#[diag(ast_lowering_functional_record_update_destructuring_assignment)]
|
||||
pub struct FunctionalRecordUpdateDestructuringAssignemnt {
|
||||
pub struct FunctionalRecordUpdateDestructuringAssignment {
|
||||
#[primary_span]
|
||||
#[suggestion(code = "", applicability = "machine-applicable")]
|
||||
pub span: Span,
|
||||
|
@ -1,6 +1,6 @@
|
||||
use super::errors::{
|
||||
AsyncGeneratorsNotSupported, AsyncNonMoveClosureNotSupported, AwaitOnlyInAsyncFnAndBlocks,
|
||||
BaseExpressionDoubleDot, ClosureCannotBeStatic, FunctionalRecordUpdateDestructuringAssignemnt,
|
||||
BaseExpressionDoubleDot, ClosureCannotBeStatic, FunctionalRecordUpdateDestructuringAssignment,
|
||||
GeneratorTooManyParameters, InclusiveRangeWithNoEnd, NotSupportedForLifetimeBinderAsyncClosure,
|
||||
UnderscoreExprLhsAssign,
|
||||
};
|
||||
@ -434,7 +434,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
// `if let pat = val` or `if foo && let pat = val`, as we _do_ want `val` to live beyond the
|
||||
// condition in this case.
|
||||
//
|
||||
// In order to mantain the drop behavior for the non `let` parts of the condition,
|
||||
// In order to maintain the drop behavior for the non `let` parts of the condition,
|
||||
// we still wrap them in terminating scopes, e.g. `if foo && let pat = val` essentially
|
||||
// gets transformed into `if { let _t = foo; _t } && let pat = val`
|
||||
match &cond.kind {
|
||||
@ -1232,7 +1232,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
);
|
||||
let fields_omitted = match &se.rest {
|
||||
StructRest::Base(e) => {
|
||||
self.tcx.sess.emit_err(FunctionalRecordUpdateDestructuringAssignemnt {
|
||||
self.tcx.sess.emit_err(FunctionalRecordUpdateDestructuringAssignment {
|
||||
span: e.span,
|
||||
});
|
||||
true
|
||||
|
@ -13,7 +13,7 @@ use crate::{
|
||||
/// The construct graph organizes the constraints by their end-points.
|
||||
/// It can be used to view a `R1: R2` constraint as either an edge `R1
|
||||
/// -> R2` or `R2 -> R1` depending on the direction type `D`.
|
||||
pub(crate) struct ConstraintGraph<D: ConstraintGraphDirecton> {
|
||||
pub(crate) struct ConstraintGraph<D: ConstraintGraphDirection> {
|
||||
_direction: D,
|
||||
first_constraints: IndexVec<RegionVid, Option<OutlivesConstraintIndex>>,
|
||||
next_constraints: IndexVec<OutlivesConstraintIndex, Option<OutlivesConstraintIndex>>,
|
||||
@ -25,7 +25,7 @@ pub(crate) type ReverseConstraintGraph = ConstraintGraph<Reverse>;
|
||||
|
||||
/// Marker trait that controls whether a `R1: R2` constraint
|
||||
/// represents an edge `R1 -> R2` or `R2 -> R1`.
|
||||
pub(crate) trait ConstraintGraphDirecton: Copy + 'static {
|
||||
pub(crate) trait ConstraintGraphDirection: Copy + 'static {
|
||||
fn start_region(c: &OutlivesConstraint<'_>) -> RegionVid;
|
||||
fn end_region(c: &OutlivesConstraint<'_>) -> RegionVid;
|
||||
fn is_normal() -> bool;
|
||||
@ -38,7 +38,7 @@ pub(crate) trait ConstraintGraphDirecton: Copy + 'static {
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub(crate) struct Normal;
|
||||
|
||||
impl ConstraintGraphDirecton for Normal {
|
||||
impl ConstraintGraphDirection for Normal {
|
||||
fn start_region(c: &OutlivesConstraint<'_>) -> RegionVid {
|
||||
c.sup
|
||||
}
|
||||
@ -59,7 +59,7 @@ impl ConstraintGraphDirecton for Normal {
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub(crate) struct Reverse;
|
||||
|
||||
impl ConstraintGraphDirecton for Reverse {
|
||||
impl ConstraintGraphDirection for Reverse {
|
||||
fn start_region(c: &OutlivesConstraint<'_>) -> RegionVid {
|
||||
c.sub
|
||||
}
|
||||
@ -73,7 +73,7 @@ impl ConstraintGraphDirecton for Reverse {
|
||||
}
|
||||
}
|
||||
|
||||
impl<D: ConstraintGraphDirecton> ConstraintGraph<D> {
|
||||
impl<D: ConstraintGraphDirection> ConstraintGraph<D> {
|
||||
/// Creates a "dependency graph" where each region constraint `R1:
|
||||
/// R2` is treated as an edge `R1 -> R2`. We use this graph to
|
||||
/// construct SCCs for region inference but also for error
|
||||
@ -133,7 +133,7 @@ impl<D: ConstraintGraphDirecton> ConstraintGraph<D> {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct Edges<'s, 'tcx, D: ConstraintGraphDirecton> {
|
||||
pub(crate) struct Edges<'s, 'tcx, D: ConstraintGraphDirection> {
|
||||
graph: &'s ConstraintGraph<D>,
|
||||
constraints: &'s OutlivesConstraintSet<'tcx>,
|
||||
pointer: Option<OutlivesConstraintIndex>,
|
||||
@ -141,7 +141,7 @@ pub(crate) struct Edges<'s, 'tcx, D: ConstraintGraphDirecton> {
|
||||
static_region: RegionVid,
|
||||
}
|
||||
|
||||
impl<'s, 'tcx, D: ConstraintGraphDirecton> Iterator for Edges<'s, 'tcx, D> {
|
||||
impl<'s, 'tcx, D: ConstraintGraphDirection> Iterator for Edges<'s, 'tcx, D> {
|
||||
type Item = OutlivesConstraint<'tcx>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
@ -174,13 +174,13 @@ impl<'s, 'tcx, D: ConstraintGraphDirecton> Iterator for Edges<'s, 'tcx, D> {
|
||||
/// This struct brings together a constraint set and a (normal, not
|
||||
/// reverse) constraint graph. It implements the graph traits and is
|
||||
/// usd for doing the SCC computation.
|
||||
pub(crate) struct RegionGraph<'s, 'tcx, D: ConstraintGraphDirecton> {
|
||||
pub(crate) struct RegionGraph<'s, 'tcx, D: ConstraintGraphDirection> {
|
||||
set: &'s OutlivesConstraintSet<'tcx>,
|
||||
constraint_graph: &'s ConstraintGraph<D>,
|
||||
static_region: RegionVid,
|
||||
}
|
||||
|
||||
impl<'s, 'tcx, D: ConstraintGraphDirecton> RegionGraph<'s, 'tcx, D> {
|
||||
impl<'s, 'tcx, D: ConstraintGraphDirection> RegionGraph<'s, 'tcx, D> {
|
||||
/// Creates a "dependency graph" where each region constraint `R1:
|
||||
/// R2` is treated as an edge `R1 -> R2`. We use this graph to
|
||||
/// construct SCCs for region inference but also for error
|
||||
@ -202,11 +202,11 @@ impl<'s, 'tcx, D: ConstraintGraphDirecton> RegionGraph<'s, 'tcx, D> {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct Successors<'s, 'tcx, D: ConstraintGraphDirecton> {
|
||||
pub(crate) struct Successors<'s, 'tcx, D: ConstraintGraphDirection> {
|
||||
edges: Edges<'s, 'tcx, D>,
|
||||
}
|
||||
|
||||
impl<'s, 'tcx, D: ConstraintGraphDirecton> Iterator for Successors<'s, 'tcx, D> {
|
||||
impl<'s, 'tcx, D: ConstraintGraphDirection> Iterator for Successors<'s, 'tcx, D> {
|
||||
type Item = RegionVid;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
@ -214,23 +214,25 @@ impl<'s, 'tcx, D: ConstraintGraphDirecton> Iterator for Successors<'s, 'tcx, D>
|
||||
}
|
||||
}
|
||||
|
||||
impl<'s, 'tcx, D: ConstraintGraphDirecton> graph::DirectedGraph for RegionGraph<'s, 'tcx, D> {
|
||||
impl<'s, 'tcx, D: ConstraintGraphDirection> graph::DirectedGraph for RegionGraph<'s, 'tcx, D> {
|
||||
type Node = RegionVid;
|
||||
}
|
||||
|
||||
impl<'s, 'tcx, D: ConstraintGraphDirecton> graph::WithNumNodes for RegionGraph<'s, 'tcx, D> {
|
||||
impl<'s, 'tcx, D: ConstraintGraphDirection> graph::WithNumNodes for RegionGraph<'s, 'tcx, D> {
|
||||
fn num_nodes(&self) -> usize {
|
||||
self.constraint_graph.first_constraints.len()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'s, 'tcx, D: ConstraintGraphDirecton> graph::WithSuccessors for RegionGraph<'s, 'tcx, D> {
|
||||
impl<'s, 'tcx, D: ConstraintGraphDirection> graph::WithSuccessors for RegionGraph<'s, 'tcx, D> {
|
||||
fn successors(&self, node: Self::Node) -> <Self as graph::GraphSuccessors<'_>>::Iter {
|
||||
self.outgoing_regions(node)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'s, 'tcx, D: ConstraintGraphDirecton> graph::GraphSuccessors<'_> for RegionGraph<'s, 'tcx, D> {
|
||||
impl<'s, 'tcx, D: ConstraintGraphDirection> graph::GraphSuccessors<'_>
|
||||
for RegionGraph<'s, 'tcx, D>
|
||||
{
|
||||
type Item = RegionVid;
|
||||
type Iter = Successors<'s, 'tcx, D>;
|
||||
}
|
||||
|
@ -2600,7 +2600,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||
self.implicit_region_bound,
|
||||
self.param_env,
|
||||
location.to_locations(),
|
||||
DUMMY_SP, // irrelevant; will be overrided.
|
||||
DUMMY_SP, // irrelevant; will be overridden.
|
||||
ConstraintCategory::Boring, // same as above.
|
||||
&mut self.borrowck_context.constraints,
|
||||
)
|
||||
|
@ -42,7 +42,7 @@ struct MacroInput {
|
||||
fmtstr: P<Expr>,
|
||||
args: FormatArguments,
|
||||
/// Whether the first argument was a string literal or a result from eager macro expansion.
|
||||
/// If it's not a string literal, we disallow implicit arugment capturing.
|
||||
/// If it's not a string literal, we disallow implicit argument capturing.
|
||||
///
|
||||
/// This does not correspond to whether we can treat spans to the literal normally, as the whole
|
||||
/// invocation might be the result of another macro expansion, in which case this flag may still be true.
|
||||
|
@ -141,7 +141,7 @@ codegen_ssa_msvc_missing_linker = the msvc targets depend on the msvc linker but
|
||||
|
||||
codegen_ssa_check_installed_visual_studio = please ensure that Visual Studio 2017 or later, or Build Tools for Visual Studio were installed with the Visual C++ option.
|
||||
|
||||
codegen_ssa_unsufficient_vs_code_product = VS Code is a different product, and is not sufficient.
|
||||
codegen_ssa_insufficient_vs_code_product = VS Code is a different product, and is not sufficient.
|
||||
|
||||
codegen_ssa_processing_dymutil_failed = processing debug info with `dsymutil` failed: {$status}
|
||||
.note = {$output}
|
||||
|
@ -923,7 +923,7 @@ fn link_natively<'a>(
|
||||
if sess.target.is_like_msvc && linker_not_found {
|
||||
sess.emit_note(errors::MsvcMissingLinker);
|
||||
sess.emit_note(errors::CheckInstalledVisualStudio);
|
||||
sess.emit_note(errors::UnsufficientVSCodeProduct);
|
||||
sess.emit_note(errors::InsufficientVSCodeProduct);
|
||||
}
|
||||
sess.abort_if_errors();
|
||||
}
|
||||
|
@ -405,8 +405,8 @@ pub struct MsvcMissingLinker;
|
||||
pub struct CheckInstalledVisualStudio;
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(codegen_ssa_unsufficient_vs_code_product)]
|
||||
pub struct UnsufficientVSCodeProduct;
|
||||
#[diag(codegen_ssa_insufficient_vs_code_product)]
|
||||
pub struct InsufficientVSCodeProduct;
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(codegen_ssa_processing_dymutil_failed)]
|
||||
|
@ -205,7 +205,7 @@ pub(crate) fn turn_into_const_value<'tcx>(
|
||||
let cid = key.value;
|
||||
let def_id = cid.instance.def.def_id();
|
||||
let is_static = tcx.is_static(def_id);
|
||||
// This is just accessing an already computed constant, so no need to check alginment here.
|
||||
// This is just accessing an already computed constant, so no need to check alignment here.
|
||||
let ecx = mk_eval_cx(
|
||||
tcx,
|
||||
tcx.def_span(key.value.instance.def_id()),
|
||||
|
@ -135,4 +135,4 @@ expand_proc_macro_panicked =
|
||||
.help = message: {$message}
|
||||
|
||||
expand_proc_macro_derive_tokens =
|
||||
proc-macro derive produced unparseable tokens
|
||||
proc-macro derive produced unparsable tokens
|
||||
|
@ -309,7 +309,7 @@ declare_features! (
|
||||
(active, associated_type_defaults, "1.2.0", Some(29661), None),
|
||||
/// Allows `async || body` closures.
|
||||
(active, async_closure, "1.37.0", Some(62290), None),
|
||||
/// Alows async functions to be declared, implemented, and used in traits.
|
||||
/// Allows async functions to be declared, implemented, and used in traits.
|
||||
(incomplete, async_fn_in_trait, "1.66.0", Some(91611), None),
|
||||
/// Allows `extern "C-unwind" fn` to enable unwinding across ABI boundaries.
|
||||
(active, c_unwind, "1.52.0", Some(74990), None),
|
||||
|
@ -58,7 +58,7 @@ impl<'tcx> Bounds<'tcx> {
|
||||
pub fn push_sized(&mut self, tcx: TyCtxt<'tcx>, ty: Ty<'tcx>, span: Span) {
|
||||
let sized_def_id = tcx.require_lang_item(LangItem::Sized, Some(span));
|
||||
let trait_ref = ty::Binder::dummy(tcx.mk_trait_ref(sized_def_id, [ty]));
|
||||
// Preferrable to put this obligation first, since we report better errors for sized ambiguity.
|
||||
// Preferable to put this obligation first, since we report better errors for sized ambiguity.
|
||||
self.predicates.insert(0, (trait_ref.without_const().to_predicate(tcx), span));
|
||||
}
|
||||
|
||||
|
@ -308,7 +308,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
let rcvr_ty = self.node_ty(rcvr.hir_id);
|
||||
// Get the evaluated type *after* calling the method call, so that the influence
|
||||
// of the arguments can be reflected in the receiver type. The receiver
|
||||
// expression has the type *before* theis analysis is done.
|
||||
// expression has the type *before* this analysis is done.
|
||||
let ty = match self.lookup_probe_for_diagnostic(
|
||||
segment.ident,
|
||||
rcvr_ty,
|
||||
|
@ -120,7 +120,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
ty
|
||||
}
|
||||
|
||||
pub(super) fn check_expr_coercable_to_type(
|
||||
pub(super) fn check_expr_coercible_to_type(
|
||||
&self,
|
||||
expr: &'tcx hir::Expr<'tcx>,
|
||||
expected: Ty<'tcx>,
|
||||
@ -1128,7 +1128,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
}
|
||||
};
|
||||
|
||||
// This is (basically) inlined `check_expr_coercable_to_type`, but we want
|
||||
// This is (basically) inlined `check_expr_coercible_to_type`, but we want
|
||||
// to suggest an additional fixup here in `suggest_deref_binop`.
|
||||
let rhs_ty = self.check_expr_with_hint(&rhs, lhs_ty);
|
||||
if let (_, Some(mut diag)) =
|
||||
@ -1401,7 +1401,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
|
||||
let (element_ty, t) = match uty {
|
||||
Some(uty) => {
|
||||
self.check_expr_coercable_to_type(&element, uty, None);
|
||||
self.check_expr_coercible_to_type(&element, uty, None);
|
||||
(uty, uty)
|
||||
}
|
||||
None => {
|
||||
@ -1478,7 +1478,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
let elt_ts_iter = elts.iter().enumerate().map(|(i, e)| match flds {
|
||||
Some(fs) if i < fs.len() => {
|
||||
let ety = fs[i];
|
||||
self.check_expr_coercable_to_type(&e, ety, None);
|
||||
self.check_expr_coercible_to_type(&e, ety, None);
|
||||
ety
|
||||
}
|
||||
_ => self.check_expr_with_expectation(&e, NoExpectation),
|
||||
@ -2869,7 +2869,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
) -> Ty<'tcx> {
|
||||
match self.resume_yield_tys {
|
||||
Some((resume_ty, yield_ty)) => {
|
||||
self.check_expr_coercable_to_type(&value, yield_ty, None);
|
||||
self.check_expr_coercible_to_type(&value, yield_ty, None);
|
||||
|
||||
resume_ty
|
||||
}
|
||||
@ -2878,7 +2878,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
// information. Hence, we check the source of the yield expression here and check its
|
||||
// value's type against `()` (this check should always hold).
|
||||
None if src.is_await() => {
|
||||
self.check_expr_coercable_to_type(&value, self.tcx.mk_unit(), None);
|
||||
self.check_expr_coercible_to_type(&value, self.tcx.mk_unit(), None);
|
||||
self.tcx.mk_unit()
|
||||
}
|
||||
_ => {
|
||||
|
@ -466,7 +466,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
/// obligation. Hence we refine the `expr` "outwards-in" and bail at the first kind of expression/impl we don't recognize.
|
||||
///
|
||||
/// This function returns a `Result<&Expr, &Expr>` - either way, it returns the `Expr` whose span should be
|
||||
/// reported as an error. If it is `Ok`, then it means it refined successfull. If it is `Err`, then it may be
|
||||
/// reported as an error. If it is `Ok`, then it means it refined successful. If it is `Err`, then it may be
|
||||
/// only a partial success - but it cannot be refined even further.
|
||||
fn blame_specific_expr_if_possible_for_derived_predicate_obligation(
|
||||
&self,
|
||||
@ -534,7 +534,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
/// - in_ty: `(Option<Vec<T>, bool)`
|
||||
/// we would drill until we arrive at `vec![1, 2, 3]`.
|
||||
///
|
||||
/// If successful, we return `Ok(refined_expr)`. If unsuccesful, we return `Err(partially_refined_expr`),
|
||||
/// If successful, we return `Ok(refined_expr)`. If unsuccessful, we return `Err(partially_refined_expr`),
|
||||
/// which will go as far as possible. For example, given `(foo(), false)` instead, we would drill to
|
||||
/// `foo()` and then return `Err("foo()")`.
|
||||
///
|
||||
|
@ -1413,7 +1413,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
self.demand_eqtype(init.span, local_ty, init_ty);
|
||||
init_ty
|
||||
} else {
|
||||
self.check_expr_coercable_to_type(init, local_ty, None)
|
||||
self.check_expr_coercible_to_type(init, local_ty, None)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -280,7 +280,7 @@ fn typeck_with_fallback<'tcx>(
|
||||
// Gather locals in statics (because of block expressions).
|
||||
GatherLocalsVisitor::new(&fcx).visit_body(body);
|
||||
|
||||
fcx.check_expr_coercable_to_type(&body.value, expected_type, None);
|
||||
fcx.check_expr_coercible_to_type(&body.value, expected_type, None);
|
||||
|
||||
fcx.write_ty(id, expected_type);
|
||||
};
|
||||
|
@ -300,7 +300,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
};
|
||||
|
||||
// We could pass the file for long types into these two, but it isn't strictly necessary
|
||||
// given how targetted they are.
|
||||
// given how targeted they are.
|
||||
if self.suggest_wrapping_range_with_parens(
|
||||
tcx,
|
||||
rcvr_ty,
|
||||
|
@ -103,9 +103,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
match BinOpCategory::from(op) {
|
||||
BinOpCategory::Shortcircuit => {
|
||||
// && and || are a simple case.
|
||||
self.check_expr_coercable_to_type(lhs_expr, tcx.types.bool, None);
|
||||
self.check_expr_coercible_to_type(lhs_expr, tcx.types.bool, None);
|
||||
let lhs_diverges = self.diverges.get();
|
||||
self.check_expr_coercable_to_type(rhs_expr, tcx.types.bool, None);
|
||||
self.check_expr_coercible_to_type(rhs_expr, tcx.types.bool, None);
|
||||
|
||||
// Depending on the LHS' value, the RHS can never execute.
|
||||
self.diverges.set(lhs_diverges);
|
||||
@ -255,7 +255,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
);
|
||||
|
||||
// see `NB` above
|
||||
let rhs_ty = self.check_expr_coercable_to_type(rhs_expr, rhs_ty_var, Some(lhs_expr));
|
||||
let rhs_ty = self.check_expr_coercible_to_type(rhs_expr, rhs_ty_var, Some(lhs_expr));
|
||||
let rhs_ty = self.resolve_vars_with_obligations(rhs_ty);
|
||||
|
||||
let return_ty = match result {
|
||||
|
@ -174,7 +174,7 @@ infer_region_explanation = {$pref_kind ->
|
||||
|
||||
infer_outlives_content = lifetime of reference outlives lifetime of borrowed content...
|
||||
infer_outlives_bound = lifetime of the source pointer does not outlive lifetime bound of the object type
|
||||
infer_fullfill_req_lifetime = the type `{$ty}` does not fulfill the required lifetime
|
||||
infer_fulfill_req_lifetime = the type `{$ty}` does not fulfill the required lifetime
|
||||
infer_lf_bound_not_satisfied = lifetime bound not satisfied
|
||||
infer_borrowed_too_long = a value of type `{$ty}` is borrowed for too long
|
||||
infer_ref_longer_than_data = in type `{$ty}`, reference has a longer lifetime than the data it references
|
||||
|
@ -53,7 +53,7 @@ pub struct AnnotationRequired<'a> {
|
||||
// Copy of `AnnotationRequired` for E0283
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(infer_type_annotations_needed, code = "E0283")]
|
||||
pub struct AmbigousImpl<'a> {
|
||||
pub struct AmbiguousImpl<'a> {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
pub source_kind: &'static str,
|
||||
@ -942,8 +942,8 @@ pub struct OutlivesBound<'a> {
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(infer_fullfill_req_lifetime, code = "E0477")]
|
||||
pub struct FullfillReqLifetime<'a> {
|
||||
#[diag(infer_fulfill_req_lifetime, code = "E0477")]
|
||||
pub struct FulfillReqLifetime<'a> {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
pub ty: Ty<'a>,
|
||||
|
@ -1829,7 +1829,7 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
|
||||
// will try to hide in some case such as `async fn`, so
|
||||
// to make an error more use friendly we will
|
||||
// avoid to suggest a mismatch type with a
|
||||
// type that the user usually are not usign
|
||||
// type that the user usually are not using
|
||||
// directly such as `impl Future<Output = u8>`.
|
||||
if !self.tcx.ty_is_opaque_future(found_ty) {
|
||||
diag.note_expected_found_extra(
|
||||
|
@ -1,5 +1,5 @@
|
||||
use crate::errors::{
|
||||
AmbigousImpl, AmbigousReturn, AnnotationRequired, InferenceBadError, NeedTypeInfoInGenerator,
|
||||
AmbigousReturn, AmbiguousImpl, AnnotationRequired, InferenceBadError, NeedTypeInfoInGenerator,
|
||||
SourceKindMultiSuggestion, SourceKindSubdiag,
|
||||
};
|
||||
use crate::infer::error_reporting::TypeErrCtxt;
|
||||
@ -358,7 +358,7 @@ impl<'tcx> InferCtxt<'tcx> {
|
||||
bad_label,
|
||||
}
|
||||
.into_diagnostic(&self.tcx.sess.parse_sess.span_diagnostic),
|
||||
TypeAnnotationNeeded::E0283 => AmbigousImpl {
|
||||
TypeAnnotationNeeded::E0283 => AmbiguousImpl {
|
||||
span,
|
||||
source_kind,
|
||||
source_name,
|
||||
@ -563,7 +563,7 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
|
||||
bad_label: None,
|
||||
}
|
||||
.into_diagnostic(&self.tcx.sess.parse_sess.span_diagnostic),
|
||||
TypeAnnotationNeeded::E0283 => AmbigousImpl {
|
||||
TypeAnnotationNeeded::E0283 => AmbiguousImpl {
|
||||
span,
|
||||
source_kind,
|
||||
source_name: &name,
|
||||
|
@ -1,5 +1,5 @@
|
||||
use crate::errors::{
|
||||
note_and_explain, FullfillReqLifetime, LfBoundNotSatisfied, OutlivesBound, OutlivesContent,
|
||||
note_and_explain, FulfillReqLifetime, LfBoundNotSatisfied, OutlivesBound, OutlivesContent,
|
||||
RefLongerThanData, RegionOriginNote, WhereClauseSuggestions,
|
||||
};
|
||||
use crate::fluent_generated as fluent;
|
||||
@ -176,7 +176,7 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
|
||||
let note = note_and_explain::RegionExplanation::new(
|
||||
self.tcx, sub, opt_span, prefix, suffix,
|
||||
);
|
||||
FullfillReqLifetime { span, ty: self.resolve_vars_if_possible(ty), note }
|
||||
FulfillReqLifetime { span, ty: self.resolve_vars_if_possible(ty), note }
|
||||
.into_diagnostic(&self.tcx.sess.parse_sess.span_diagnostic)
|
||||
}
|
||||
infer::RelateRegionParamBound(span) => {
|
||||
|
@ -91,7 +91,7 @@ lint_ty_qualified = usage of qualified `ty::{$ty}`
|
||||
lint_lintpass_by_hand = implementing `LintPass` by hand
|
||||
.help = try using `declare_lint_pass!` or `impl_lint_pass!` instead
|
||||
|
||||
lint_non_existant_doc_keyword = found non-existing keyword `{$keyword}` used in `#[doc(keyword = "...")]`
|
||||
lint_non_existent_doc_keyword = found non-existing keyword `{$keyword}` used in `#[doc(keyword = "...")]`
|
||||
.help = only existing keywords are allowed in core/std
|
||||
|
||||
lint_diag_out_of_impl =
|
||||
@ -107,7 +107,7 @@ lint_cstring_ptr = getting the inner pointer of a temporary `CString`
|
||||
.note = pointers do not have a lifetime; when calling `as_ptr` the `CString` will be deallocated at the end of the statement because nothing is referencing it as far as the type system is concerned
|
||||
.help = for more information, see https://doc.rust-lang.org/reference/destructors.html
|
||||
|
||||
lint_multple_supertrait_upcastable = `{$ident}` is object-safe and has multiple supertraits
|
||||
lint_multiple_supertrait_upcastable = `{$ident}` is object-safe and has multiple supertraits
|
||||
|
||||
lint_identifier_non_ascii_char = identifier contains non-ASCII characters
|
||||
|
||||
|
@ -2,7 +2,7 @@
|
||||
//! Clippy.
|
||||
|
||||
use crate::lints::{
|
||||
BadOptAccessDiag, DefaultHashTypesDiag, DiagOutOfImpl, LintPassByHand, NonExistantDocKeyword,
|
||||
BadOptAccessDiag, DefaultHashTypesDiag, DiagOutOfImpl, LintPassByHand, NonExistentDocKeyword,
|
||||
QueryInstability, TyQualified, TykindDiag, TykindKind, UntranslatableDiag,
|
||||
};
|
||||
use crate::{EarlyContext, EarlyLintPass, LateContext, LateLintPass, LintContext};
|
||||
@ -334,7 +334,7 @@ impl<'tcx> LateLintPass<'tcx> for ExistingDocKeyword {
|
||||
cx.emit_spanned_lint(
|
||||
EXISTING_DOC_KEYWORD,
|
||||
attr.span,
|
||||
NonExistantDocKeyword { keyword },
|
||||
NonExistentDocKeyword { keyword },
|
||||
);
|
||||
}
|
||||
}
|
||||
@ -424,7 +424,7 @@ impl LateLintPass<'_> for Diagnostics {
|
||||
}
|
||||
|
||||
declare_tool_lint! {
|
||||
/// The `bad_opt_access` lint detects accessing options by field instad of
|
||||
/// The `bad_opt_access` lint detects accessing options by field instead of
|
||||
/// the wrapper function.
|
||||
pub rustc::BAD_OPT_ACCESS,
|
||||
Deny,
|
||||
|
@ -25,7 +25,7 @@ declare_lint! {
|
||||
///
|
||||
/// fn main() {
|
||||
/// #[warn(let_underscore_drop)]
|
||||
/// // SomeStuct is dropped immediately instead of at end of scope,
|
||||
/// // SomeStruct is dropped immediately instead of at end of scope,
|
||||
/// // so "Dropping SomeStruct" is printed before "end of main".
|
||||
/// // The order of prints would be reversed if SomeStruct was bound to
|
||||
/// // a name (such as "_foo").
|
||||
|
@ -3,7 +3,7 @@ use crate::{
|
||||
fluent_generated as fluent,
|
||||
late::unerased_lint_store,
|
||||
lints::{
|
||||
DeprecatedLintName, IgnoredUnlessCrateSpecified, OverruledAtributeLint,
|
||||
DeprecatedLintName, IgnoredUnlessCrateSpecified, OverruledAttributeLint,
|
||||
RenamedOrRemovedLint, RenamedOrRemovedLintSuggestion, UnknownLint, UnknownLintSuggestion,
|
||||
},
|
||||
};
|
||||
@ -612,7 +612,7 @@ impl<'s, P: LintLevelsProvider> LintLevelsBuilder<'s, P> {
|
||||
self.emit_spanned_lint(
|
||||
FORBIDDEN_LINT_GROUPS,
|
||||
src.span().into(),
|
||||
OverruledAtributeLint {
|
||||
OverruledAttributeLint {
|
||||
overruled: src.span(),
|
||||
lint_level: level.as_str(),
|
||||
lint_source: src.name(),
|
||||
|
@ -806,9 +806,9 @@ pub struct TyQualified {
|
||||
pub struct LintPassByHand;
|
||||
|
||||
#[derive(LintDiagnostic)]
|
||||
#[diag(lint_non_existant_doc_keyword)]
|
||||
#[diag(lint_non_existent_doc_keyword)]
|
||||
#[help]
|
||||
pub struct NonExistantDocKeyword {
|
||||
pub struct NonExistentDocKeyword {
|
||||
pub keyword: Symbol,
|
||||
}
|
||||
|
||||
@ -875,7 +875,7 @@ impl AddToDiagnostic for NonBindingLetSub {
|
||||
// levels.rs
|
||||
#[derive(LintDiagnostic)]
|
||||
#[diag(lint_overruled_attribute)]
|
||||
pub struct OverruledAtributeLint<'a> {
|
||||
pub struct OverruledAttributeLint<'a> {
|
||||
#[label]
|
||||
pub overruled: Span,
|
||||
pub lint_level: &'a str,
|
||||
@ -947,7 +947,7 @@ pub struct CStringPtr {
|
||||
|
||||
// multiple_supertrait_upcastable.rs
|
||||
#[derive(LintDiagnostic)]
|
||||
#[diag(lint_multple_supertrait_upcastable)]
|
||||
#[diag(lint_multiple_supertrait_upcastable)]
|
||||
pub struct MultipleSupertraitUpcastable {
|
||||
pub ident: Ident,
|
||||
}
|
||||
@ -1422,7 +1422,7 @@ pub struct UnusedResult<'a> {
|
||||
pub ty: Ty<'a>,
|
||||
}
|
||||
|
||||
// FIXME(davidtwco): this isn't properly translatable becauses of the
|
||||
// FIXME(davidtwco): this isn't properly translatable because of the
|
||||
// pre/post strings
|
||||
#[derive(LintDiagnostic)]
|
||||
#[diag(lint_unused_closure)]
|
||||
@ -1433,7 +1433,7 @@ pub struct UnusedClosure<'a> {
|
||||
pub post: &'a str,
|
||||
}
|
||||
|
||||
// FIXME(davidtwco): this isn't properly translatable becauses of the
|
||||
// FIXME(davidtwco): this isn't properly translatable because of the
|
||||
// pre/post strings
|
||||
#[derive(LintDiagnostic)]
|
||||
#[diag(lint_unused_generator)]
|
||||
@ -1444,7 +1444,7 @@ pub struct UnusedGenerator<'a> {
|
||||
pub post: &'a str,
|
||||
}
|
||||
|
||||
// FIXME(davidtwco): this isn't properly translatable becauses of the pre/post
|
||||
// FIXME(davidtwco): this isn't properly translatable because of the pre/post
|
||||
// strings
|
||||
pub struct UnusedDef<'a, 'b> {
|
||||
pub pre: &'a str,
|
||||
|
@ -250,7 +250,7 @@ impl EarlyLintPass for NonAsciiIdents {
|
||||
let latin_augmented_script_set = AugmentedScriptSet::for_char('A');
|
||||
script_states.insert(latin_augmented_script_set, ScriptSetUsage::Verified);
|
||||
|
||||
let mut has_suspicous = false;
|
||||
let mut has_suspicious = false;
|
||||
for (symbol, &sp) in symbols.iter() {
|
||||
let symbol_str = symbol.as_str();
|
||||
for ch in symbol_str.chars() {
|
||||
@ -278,14 +278,14 @@ impl EarlyLintPass for NonAsciiIdents {
|
||||
if !is_potential_mixed_script_confusable_char(ch) {
|
||||
ScriptSetUsage::Verified
|
||||
} else {
|
||||
has_suspicous = true;
|
||||
has_suspicious = true;
|
||||
ScriptSetUsage::Suspicious(vec![ch], sp)
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if has_suspicous {
|
||||
if has_suspicious {
|
||||
let verified_augmented_script_sets = script_states
|
||||
.iter()
|
||||
.flat_map(|(k, v)| match v {
|
||||
|
@ -532,7 +532,7 @@ pub enum BuiltinLintDiagnostics {
|
||||
AmbiguousGlobReexports {
|
||||
/// The name for which collision(s) have occurred.
|
||||
name: String,
|
||||
/// The name space for whihc the collision(s) occurred in.
|
||||
/// The name space for which the collision(s) occurred in.
|
||||
namespace: String,
|
||||
/// Span where the name is first re-exported.
|
||||
first_reexport_span: Span,
|
||||
|
@ -58,7 +58,7 @@ impl<'tcx> UnifyValue for UnifiedRegion<'tcx> {
|
||||
|
||||
fn unify_values(value1: &Self, value2: &Self) -> Result<Self, NoError> {
|
||||
// We pick the value of the least universe because it is compatible with more variables.
|
||||
// This is *not* neccessary for soundness, but it allows more region variables to be
|
||||
// This is *not* necessary for soundness, but it allows more region variables to be
|
||||
// resolved to the said value.
|
||||
#[cold]
|
||||
fn min_universe<'tcx>(r1: Region<'tcx>, r2: Region<'tcx>) -> Region<'tcx> {
|
||||
|
@ -1,6 +1,6 @@
|
||||
/// A macro for triggering an ICE.
|
||||
/// Calling `bug` instead of panicking will result in a nicer error message and should
|
||||
/// therefore be prefered over `panic`/`unreachable` or others.
|
||||
/// therefore be preferred over `panic`/`unreachable` or others.
|
||||
///
|
||||
/// If you have a span available, you should use [`span_bug`] instead.
|
||||
///
|
||||
|
@ -63,7 +63,7 @@ impl InitMask {
|
||||
}
|
||||
|
||||
/// Sets a specified range to a value. If the range is out-of-bounds, the mask will grow to
|
||||
/// accomodate it entirely.
|
||||
/// accommodate it entirely.
|
||||
pub fn set_range(&mut self, range: AllocRange, new_state: bool) {
|
||||
let start = range.start;
|
||||
let end = range.end();
|
||||
|
@ -14,7 +14,7 @@ use rustc_serialize::{Decodable, Decoder, Encodable, Encoder};
|
||||
#[derive(HashStable)]
|
||||
pub struct ProvenanceMap<Prov = AllocId> {
|
||||
/// Provenance in this map applies from the given offset for an entire pointer-size worth of
|
||||
/// bytes. Two entires in this map are always at least a pointer size apart.
|
||||
/// bytes. Two entries in this map are always at least a pointer size apart.
|
||||
ptrs: SortedMap<Size, Prov>,
|
||||
/// Provenance in this map only applies to the given single byte.
|
||||
/// This map is disjoint from the previous. It will always be empty when
|
||||
|
@ -37,7 +37,7 @@ pub fn erase<T: EraseType>(src: T) -> Erase<T> {
|
||||
#[inline(always)]
|
||||
pub fn restore<T: EraseType>(value: Erase<T>) -> T {
|
||||
let value: Erased<<T as EraseType>::Result> = value;
|
||||
// SAFETY: Due to the use of impl Trait in `Erase` the only way to safetly create an instance
|
||||
// SAFETY: Due to the use of impl Trait in `Erase` the only way to safely create an instance
|
||||
// of `Erase` is to call `erase`, so we know that `value.data` is a valid instance of `T` of
|
||||
// the right size.
|
||||
unsafe { transmute_copy(&value.data) }
|
||||
|
@ -26,7 +26,7 @@ pub trait Key: Sized {
|
||||
//
|
||||
// ...But r-a doesn't support them yet and using a default here causes r-a to not infer
|
||||
// return types of queries which is very annoying. Thus, until r-a support associated
|
||||
// type defaults, plese restrain from using them here <3
|
||||
// type defaults, please restrain from using them here <3
|
||||
//
|
||||
// r-a issue: <https://github.com/rust-lang/rust-analyzer/issues/13693>
|
||||
type CacheSelector;
|
||||
|
@ -97,7 +97,7 @@ rustc_queries! {
|
||||
|
||||
/// Gives access to the HIR ID for the given `LocalDefId` owner `key` if any.
|
||||
///
|
||||
/// Definitions that were generated with no HIR, would be feeded to return `None`.
|
||||
/// Definitions that were generated with no HIR, would be fed to return `None`.
|
||||
query opt_local_def_id_to_hir_id(key: LocalDefId) -> Option<hir::HirId>{
|
||||
desc { |tcx| "getting HIR ID of `{}`", tcx.def_path_str(key.to_def_id()) }
|
||||
feedable
|
||||
|
@ -1347,7 +1347,7 @@ pub trait PrettyPrinter<'tcx>:
|
||||
p!(write("{}::{}", self.tcx().crate_name(def.did.krate), self.tcx().def_path(def.did).to_string_no_crate_verbose()))
|
||||
}
|
||||
}
|
||||
defkind => bug!("`{:?}` has unexpcted defkind {:?}", ct, defkind),
|
||||
defkind => bug!("`{:?}` has unexpected defkind {:?}", ct, defkind),
|
||||
}
|
||||
}
|
||||
ty::ConstKind::Infer(infer_ct) => {
|
||||
|
@ -593,7 +593,7 @@ pub struct MultipleMutBorrows {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
#[subdiagnostic]
|
||||
pub occurences: Vec<Conflict>,
|
||||
pub occurrences: Vec<Conflict>,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
@ -602,7 +602,7 @@ pub struct AlreadyBorrowed {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
#[subdiagnostic]
|
||||
pub occurences: Vec<Conflict>,
|
||||
pub occurrences: Vec<Conflict>,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
@ -611,7 +611,7 @@ pub struct AlreadyMutBorrowed {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
#[subdiagnostic]
|
||||
pub occurences: Vec<Conflict>,
|
||||
pub occurrences: Vec<Conflict>,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
@ -620,7 +620,7 @@ pub struct MovedWhileBorrowed {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
#[subdiagnostic]
|
||||
pub occurences: Vec<Conflict>,
|
||||
pub occurrences: Vec<Conflict>,
|
||||
}
|
||||
|
||||
#[derive(Subdiagnostic)]
|
||||
|
@ -966,30 +966,30 @@ fn check_borrow_conflicts_in_at_patterns<'tcx>(cx: &MatchVisitor<'_, '_, 'tcx>,
|
||||
let report_mut_ref = !conflicts_mut_ref.is_empty();
|
||||
let report_move_conflict = !conflicts_move.is_empty();
|
||||
|
||||
let mut occurences = match mut_outer {
|
||||
let mut occurrences = match mut_outer {
|
||||
Mutability::Mut => vec![Conflict::Mut { span: pat.span, name }],
|
||||
Mutability::Not => vec![Conflict::Ref { span: pat.span, name }],
|
||||
};
|
||||
occurences.extend(conflicts_mut_mut);
|
||||
occurences.extend(conflicts_mut_ref);
|
||||
occurences.extend(conflicts_move);
|
||||
occurrences.extend(conflicts_mut_mut);
|
||||
occurrences.extend(conflicts_mut_ref);
|
||||
occurrences.extend(conflicts_move);
|
||||
|
||||
// Report errors if any.
|
||||
if report_mut_mut {
|
||||
// Report mutability conflicts for e.g. `ref mut x @ Some(ref mut y)`.
|
||||
sess.emit_err(MultipleMutBorrows { span: pat.span, occurences });
|
||||
sess.emit_err(MultipleMutBorrows { span: pat.span, occurrences });
|
||||
} else if report_mut_ref {
|
||||
// Report mutability conflicts for e.g. `ref x @ Some(ref mut y)` or the converse.
|
||||
match mut_outer {
|
||||
Mutability::Mut => {
|
||||
sess.emit_err(AlreadyMutBorrowed { span: pat.span, occurences });
|
||||
sess.emit_err(AlreadyMutBorrowed { span: pat.span, occurrences });
|
||||
}
|
||||
Mutability::Not => {
|
||||
sess.emit_err(AlreadyBorrowed { span: pat.span, occurences });
|
||||
sess.emit_err(AlreadyBorrowed { span: pat.span, occurrences });
|
||||
}
|
||||
};
|
||||
} else if report_move_conflict {
|
||||
// Report by-ref and by-move conflicts, e.g. `ref x @ y`.
|
||||
sess.emit_err(MovedWhileBorrowed { span: pat.span, occurences });
|
||||
sess.emit_err(MovedWhileBorrowed { span: pat.span, occurrences });
|
||||
}
|
||||
}
|
||||
|
@ -394,8 +394,8 @@ where
|
||||
) -> io::Result<()> {
|
||||
let diffs = StateDiffCollector::run(body, block, self.results.results(), self.style);
|
||||
|
||||
let mut befores = diffs.before.map(|v| v.into_iter());
|
||||
let mut afters = diffs.after.into_iter();
|
||||
let mut diffs_before = diffs.before.map(|v| v.into_iter());
|
||||
let mut diffs_after = diffs.after.into_iter();
|
||||
|
||||
let next_in_dataflow_order = |it: &mut std::vec::IntoIter<_>| {
|
||||
if A::Direction::IS_FORWARD { it.next().unwrap() } else { it.next_back().unwrap() }
|
||||
@ -405,8 +405,8 @@ where
|
||||
let statement_str = format!("{statement:?}");
|
||||
let index_str = format!("{i}");
|
||||
|
||||
let after = next_in_dataflow_order(&mut afters);
|
||||
let before = befores.as_mut().map(next_in_dataflow_order);
|
||||
let after = next_in_dataflow_order(&mut diffs_after);
|
||||
let before = diffs_before.as_mut().map(next_in_dataflow_order);
|
||||
|
||||
self.write_row(w, &index_str, &statement_str, |_this, w, fmt| {
|
||||
if let Some(before) = before {
|
||||
@ -417,11 +417,11 @@ where
|
||||
})?;
|
||||
}
|
||||
|
||||
let after = next_in_dataflow_order(&mut afters);
|
||||
let before = befores.as_mut().map(next_in_dataflow_order);
|
||||
let after = next_in_dataflow_order(&mut diffs_after);
|
||||
let before = diffs_before.as_mut().map(next_in_dataflow_order);
|
||||
|
||||
assert!(afters.is_empty());
|
||||
assert!(befores.as_ref().map_or(true, ExactSizeIterator::is_empty));
|
||||
assert!(diffs_after.is_empty());
|
||||
assert!(diffs_before.as_ref().map_or(true, ExactSizeIterator::is_empty));
|
||||
|
||||
let terminator = body[block].terminator();
|
||||
let mut terminator_str = String::new();
|
||||
|
@ -83,7 +83,7 @@
|
||||
//! that ever have their address taken. Of course that requires actually having alias analysis
|
||||
//! (and a model to build it on), so this might be a bit of a ways off.
|
||||
//!
|
||||
//! * Various perf improvents. There are a bunch of comments in here marked `PERF` with ideas for
|
||||
//! * Various perf improvements. There are a bunch of comments in here marked `PERF` with ideas for
|
||||
//! how to do things more efficiently. However, the complexity of the pass as a whole should be
|
||||
//! kept in mind.
|
||||
//!
|
||||
|
@ -99,7 +99,7 @@ where
|
||||
//
|
||||
// This generates a `switchInt() -> [0: 0, 1: 1, otherwise: unreachable]`, which allows us or LLVM to
|
||||
// turn it into just `x` later. Without the unreachable, such a transformation would be illegal.
|
||||
// If the otherwise branch is unreachable, we can delete all other unreacahble targets, as they will
|
||||
// If the otherwise branch is unreachable, we can delete all other unreachable targets, as they will
|
||||
// still point to the unreachable and therefore not lose reachability information.
|
||||
let reachable_iter = targets.iter().filter(|(_, bb)| !is_unreachable(*bb));
|
||||
|
||||
|
@ -474,7 +474,7 @@ fn collect_and_partition_mono_items(tcx: TyCtxt<'_>, (): ()) -> (&DefIdSet, &[Co
|
||||
(tcx.arena.alloc(mono_items), codegen_units)
|
||||
}
|
||||
|
||||
/// Outputs stats about instantation counts and estimated size, per `MonoItem`'s
|
||||
/// Outputs stats about instantiation counts and estimated size, per `MonoItem`'s
|
||||
/// def, to a file in the given output directory.
|
||||
fn dump_mono_items_stats<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
|
@ -21,7 +21,7 @@ pub struct TokenTreeDiagInfo {
|
||||
pub matching_block_spans: Vec<(Span, Span)>,
|
||||
}
|
||||
|
||||
pub fn same_identation_level(sm: &SourceMap, open_sp: Span, close_sp: Span) -> bool {
|
||||
pub fn same_indentation_level(sm: &SourceMap, open_sp: Span, close_sp: Span) -> bool {
|
||||
match (sm.span_to_margin(open_sp), sm.span_to_margin(close_sp)) {
|
||||
(Some(open_padding), Some(close_padding)) => open_padding == close_padding,
|
||||
_ => false,
|
||||
@ -67,13 +67,13 @@ pub fn report_suspicious_mismatch_block(
|
||||
let mut matched_spans: Vec<(Span, bool)> = diag_info
|
||||
.matching_block_spans
|
||||
.iter()
|
||||
.map(|&(open, close)| (open.with_hi(close.lo()), same_identation_level(sm, open, close)))
|
||||
.map(|&(open, close)| (open.with_hi(close.lo()), same_indentation_level(sm, open, close)))
|
||||
.collect();
|
||||
|
||||
// sort by `lo`, so the large block spans in the front
|
||||
matched_spans.sort_by_key(|(span, _)| span.lo());
|
||||
|
||||
// We use larger block whose identation is well to cover those inner mismatched blocks
|
||||
// We use larger block whose indentation is well to cover those inner mismatched blocks
|
||||
// O(N^2) here, but we are on error reporting path, so it is fine
|
||||
for i in 0..matched_spans.len() {
|
||||
let (block_span, same_ident) = matched_spans[i];
|
||||
|
@ -1,5 +1,5 @@
|
||||
use super::diagnostics::report_suspicious_mismatch_block;
|
||||
use super::diagnostics::same_identation_level;
|
||||
use super::diagnostics::same_indentation_level;
|
||||
use super::diagnostics::TokenTreeDiagInfo;
|
||||
use super::{StringReader, UnmatchedDelim};
|
||||
use rustc_ast::token::{self, Delimiter, Token};
|
||||
@ -153,7 +153,7 @@ impl<'a> TokenTreesReader<'a> {
|
||||
unclosed_delimiter = Some(sp);
|
||||
};
|
||||
for (brace, brace_span) in &self.diag_info.open_braces {
|
||||
if same_identation_level(&sm, self.token.span, *brace_span)
|
||||
if same_indentation_level(&sm, self.token.span, *brace_span)
|
||||
&& brace == &close_delim
|
||||
{
|
||||
// high likelihood of these two corresponding
|
||||
|
@ -2767,7 +2767,7 @@ impl<'a> Parser<'a> {
|
||||
(token::DotDotEq, token::Gt)
|
||||
) {
|
||||
// `error_inclusive_range_match_arrow` handles cases like `0..=> {}`,
|
||||
// so we supress the error here
|
||||
// so we suppress the error here
|
||||
err.delay_as_bug();
|
||||
this.bump();
|
||||
} else {
|
||||
|
@ -909,7 +909,7 @@ fn find_width_map_from_snippet(
|
||||
// Strip quotes.
|
||||
let snippet = &snippet[1..snippet.len() - 1];
|
||||
|
||||
// Macros like `println` add a newline at the end. That technically doens't make them "literals" anymore, but it's fine
|
||||
// Macros like `println` add a newline at the end. That technically doesn't make them "literals" anymore, but it's fine
|
||||
// since we will never need to point our spans there, so we lie about it here by ignoring it.
|
||||
// Since there might actually be newlines in the source code, we need to normalize away all trailing newlines.
|
||||
// If we only trimmed it off the input, `format!("\n")` would cause a mismatch as here we they actually match up.
|
||||
|
@ -42,7 +42,7 @@ resolve_try_adding_local_generic_param_on_method =
|
||||
try adding a local generic parameter in this method instead
|
||||
|
||||
resolve_help_try_using_local_generic_param =
|
||||
try using a local generic paramter instead
|
||||
try using a local generic parameter instead
|
||||
|
||||
resolve_name_is_already_used_as_generic_parameter =
|
||||
the name `{$name}` is already used for a generic parameter in this item's generic parameters
|
||||
|
@ -598,7 +598,7 @@ struct LateResolutionVisitor<'a, 'b, 'ast, 'tcx> {
|
||||
/// The current set of local scopes for types and values.
|
||||
ribs: PerNS<Vec<Rib<'a>>>,
|
||||
|
||||
/// Previous poped `rib`, only used for diagnostic.
|
||||
/// Previous popped `rib`, only used for diagnostic.
|
||||
last_block_rib: Option<Rib<'a>>,
|
||||
|
||||
/// The current set of local scopes, for labels.
|
||||
|
@ -1652,7 +1652,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
misc2: AmbiguityErrorMisc::None,
|
||||
};
|
||||
if !self.matches_previous_ambiguity_error(&ambiguity_error) {
|
||||
// avoid dumplicated span information to be emitt out
|
||||
// avoid duplicated span information to be emitt out
|
||||
self.ambiguity_errors.push(ambiguity_error);
|
||||
}
|
||||
}
|
||||
|
@ -219,7 +219,7 @@ fn find_best_match_for_name_impl(
|
||||
}
|
||||
|
||||
// We have a tie among several candidates, try to select the best among them ignoring substrings.
|
||||
// For example, the candidates list `force_capture`, `capture`, and user inputed `forced_capture`,
|
||||
// For example, the candidates list `force_capture`, `capture`, and user inputted `forced_capture`,
|
||||
// we select `force_capture` with a extra round of edit distance calculation.
|
||||
if next_candidates.len() > 1 {
|
||||
debug_assert!(use_substring_score);
|
||||
|
@ -1207,7 +1207,7 @@ impl HygieneEncodeContext {
|
||||
// a `SyntaxContext` that we haven't seen before
|
||||
while !self.latest_ctxts.lock().is_empty() || !self.latest_expns.lock().is_empty() {
|
||||
debug!(
|
||||
"encode_hygiene: Serializing a round of {:?} SyntaxContextDatas: {:?}",
|
||||
"encode_hygiene: Serializing a round of {:?} SyntaxContextData: {:?}",
|
||||
self.latest_ctxts.lock().len(),
|
||||
self.latest_ctxts
|
||||
);
|
||||
|
@ -755,7 +755,7 @@ impl FromStr for Conv {
|
||||
"AmdGpuKernel" => Ok(Conv::AmdGpuKernel),
|
||||
"AvrInterrupt" => Ok(Conv::AvrInterrupt),
|
||||
"AvrNonBlockingInterrupt" => Ok(Conv::AvrNonBlockingInterrupt),
|
||||
_ => Err(format!("'{s}' is not a valid value for entry function call convetion.")),
|
||||
_ => Err(format!("'{s}' is not a valid value for entry function call convention.")),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -123,7 +123,7 @@ pub enum Lld {
|
||||
/// target properties, in accordance with the first design goal.
|
||||
///
|
||||
/// The first component of the flavor is tightly coupled with the compilation target,
|
||||
/// while the `Cc` and `Lld` flags can vary withing the same target.
|
||||
/// while the `Cc` and `Lld` flags can vary within the same target.
|
||||
#[derive(Clone, Copy, Debug, Eq, Ord, PartialEq, PartialOrd)]
|
||||
pub enum LinkerFlavor {
|
||||
/// Unix-like linker with GNU extensions (both naked and compiler-wrapped forms).
|
||||
|
@ -13,7 +13,7 @@ use rustc_middle::ty::TypeVisitableExt;
|
||||
use rustc_middle::ty::{self, Ty};
|
||||
use rustc_middle::ty::{TypeFoldable, TypeFolder, TypeSuperFoldable};
|
||||
|
||||
/// Whether we're canonicalizing a query input or the query reponse.
|
||||
/// Whether we're canonicalizing a query input or the query response.
|
||||
///
|
||||
/// When canonicalizing an input we're in the context of the caller
|
||||
/// while canonicalizing the response happens in the context of the
|
||||
@ -21,7 +21,7 @@ use rustc_middle::ty::{TypeFoldable, TypeFolder, TypeSuperFoldable};
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub enum CanonicalizeMode {
|
||||
Input,
|
||||
/// FIXME: We currently return region constraints refering to
|
||||
/// FIXME: We currently return region constraints referring to
|
||||
/// placeholders and inference variables from a binder instantiated
|
||||
/// inside of the query.
|
||||
///
|
||||
|
@ -42,7 +42,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
|
||||
///
|
||||
/// - `var_values`: a map from bound variables in the canonical goal to
|
||||
/// the values inferred while solving the instantiated goal.
|
||||
/// - `external_constraints`: additional constraints which aren't expressable
|
||||
/// - `external_constraints`: additional constraints which aren't expressible
|
||||
/// using simple unification of inference variables.
|
||||
#[instrument(level = "debug", skip(self))]
|
||||
pub(in crate::solve) fn evaluate_added_goals_and_make_canonical_response(
|
||||
@ -113,7 +113,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
|
||||
}
|
||||
|
||||
/// This returns the substitutions to instantiate the bound variables of
|
||||
/// the canonical reponse. This depends on the `original_values` for the
|
||||
/// the canonical response. This depends on the `original_values` for the
|
||||
/// bound variables.
|
||||
fn compute_query_response_substitution(
|
||||
&self,
|
||||
|
@ -153,7 +153,7 @@ impl<'tcx> SearchGraph<'tcx> {
|
||||
/// coinductive cycles.
|
||||
///
|
||||
/// When we encounter a coinductive cycle, we have to prove the final result of that cycle
|
||||
/// while we are still computing that result. Because of this we continously recompute the
|
||||
/// while we are still computing that result. Because of this we continuously recompute the
|
||||
/// cycle until the result of the previous iteration is equal to the final result, at which
|
||||
/// point we are done.
|
||||
///
|
||||
|
@ -45,7 +45,7 @@ impl OverflowData {
|
||||
/// Updating the current limit when hitting overflow.
|
||||
fn deal_with_overflow(&mut self) {
|
||||
// When first hitting overflow we reduce the overflow limit
|
||||
// for all future goals to prevent hangs if there's an exponental
|
||||
// for all future goals to prevent hangs if there's an exponential
|
||||
// blowup.
|
||||
self.current_limit.0 = self.default_limit.0 / 8;
|
||||
}
|
||||
|
@ -294,7 +294,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||
return;
|
||||
}
|
||||
|
||||
// Keep this funtion in sync with extract_tupled_inputs_and_output_from_callable
|
||||
// Keep this function in sync with extract_tupled_inputs_and_output_from_callable
|
||||
// until the old solver (and thus this function) is removed.
|
||||
|
||||
// Okay to skip binder because what we are inspecting doesn't involve bound regions.
|
||||
@ -406,7 +406,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||
}
|
||||
|
||||
match obligation.self_ty().skip_binder().kind() {
|
||||
// Fast path to avoid evaluating an obligation that trivally holds.
|
||||
// Fast path to avoid evaluating an obligation that trivially holds.
|
||||
// There may be more bounds, but these are checked by the regular path.
|
||||
ty::FnPtr(..) => return false,
|
||||
// These may potentially implement `FnPtr`
|
||||
|
@ -12,7 +12,7 @@ ty_utils_array_not_supported = array construction is not supported in generic co
|
||||
|
||||
ty_utils_block_not_supported = blocks are not supported in generic constants
|
||||
|
||||
ty_utils_never_to_any_not_supported = converting nevers to any is not supported in generic constants
|
||||
ty_utils_never_to_any_not_supported = coercing the `never` type is not supported in generic constants
|
||||
|
||||
ty_utils_tuple_not_supported = tuple construction is not supported in generic constants
|
||||
|
||||
@ -54,4 +54,4 @@ ty_utils_multiple_array_fields_simd_type = monomorphising SIMD type `{$ty}` with
|
||||
|
||||
ty_utils_oversized_simd_type = monomorphising SIMD type `{$ty}` of length greater than {$max_lanes}
|
||||
|
||||
ty_utils_non_primative_simd_type = monomorphising SIMD type `{$ty}` with a non-primitive-scalar (integer/float/pointer) element type `{$e_ty}`
|
||||
ty_utils_non_primitive_simd_type = monomorphising SIMD type `{$ty}` with a non-primitive-scalar (integer/float/pointer) element type `{$e_ty}`
|
||||
|
@ -95,7 +95,7 @@ pub struct OversizedSimdType<'tcx> {
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(ty_utils_non_primative_simd_type)]
|
||||
#[diag(ty_utils_non_primitive_simd_type)]
|
||||
pub struct NonPrimitiveSimdType<'tcx> {
|
||||
pub ty: Ty<'tcx>,
|
||||
pub e_ty: Ty<'tcx>,
|
||||
|
@ -322,7 +322,7 @@ fn layout_of_uncached<'tcx>(
|
||||
if fi.ty(tcx, substs) != f0_ty {
|
||||
tcx.sess.delay_span_bug(
|
||||
DUMMY_SP,
|
||||
"#[repr(simd)] was applied to an ADT with hetrogeneous field type",
|
||||
"#[repr(simd)] was applied to an ADT with heterogeneous field type",
|
||||
);
|
||||
return Err(LayoutError::Unknown(ty));
|
||||
}
|
||||
|
@ -83,7 +83,7 @@ pub trait CollectAndApply<T, R>: Sized {
|
||||
/// Produce a result of type `Self::Output` from `iter`. The result will
|
||||
/// typically be produced by applying `f` on the elements produced by
|
||||
/// `iter`, though this may not happen in some impls, e.g. if an error
|
||||
/// occured during iteration.
|
||||
/// occurred during iteration.
|
||||
fn collect_and_apply<I, F>(iter: I, f: F) -> Self::Output
|
||||
where
|
||||
I: Iterator<Item = Self>,
|
||||
|
@ -4,7 +4,7 @@
|
||||
extern crate derive_bad;
|
||||
|
||||
#[derive(A)]
|
||||
//~^ ERROR proc-macro derive produced unparseable tokens
|
||||
//~^ ERROR proc-macro derive produced unparsable tokens
|
||||
//~| ERROR expected `:`, found `}`
|
||||
struct A; //~ ERROR the name `A` is defined multiple times
|
||||
|
||||
|
@ -9,7 +9,7 @@ LL | #[derive(A)]
|
||||
|
|
||||
= note: this error originates in the derive macro `A` (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||
|
||||
error: proc-macro derive produced unparseable tokens
|
||||
error: proc-macro derive produced unparsable tokens
|
||||
--> $DIR/derive-bad.rs:6:10
|
||||
|
|
||||
LL | #[derive(A)]
|
||||
|
@ -5,7 +5,7 @@ extern crate issue_91800_macro;
|
||||
|
||||
#[derive(MyTrait)]
|
||||
//~^ ERROR macros that expand to items must be delimited with braces or followed by a semicolon
|
||||
//~| ERROR proc-macro derive produced unparseable tokens
|
||||
//~| ERROR proc-macro derive produced unparsable tokens
|
||||
#[attribute_macro]
|
||||
//~^ ERROR macros that expand to items must be delimited with braces or followed by a semicolon
|
||||
struct MyStruct;
|
||||
|
@ -6,7 +6,7 @@ LL | #[derive(MyTrait)]
|
||||
|
|
||||
= note: this error originates in the derive macro `MyTrait` (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||
|
||||
error: proc-macro derive produced unparseable tokens
|
||||
error: proc-macro derive produced unparsable tokens
|
||||
--> $DIR/issue-91800.rs:6:10
|
||||
|
|
||||
LL | #[derive(MyTrait)]
|
||||
|
Loading…
Reference in New Issue
Block a user