Fix typos in compiler

This commit is contained in:
DaniPopes 2023-04-10 22:02:52 +02:00
parent a73288371e
commit 677357d32b
No known key found for this signature in database
GPG Key ID: 0F09640DDB7AC692
71 changed files with 140 additions and 136 deletions

View File

@ -94,7 +94,7 @@ impl FormatArguments {
} }
if !matches!(arg.kind, FormatArgumentKind::Captured(..)) { if !matches!(arg.kind, FormatArgumentKind::Captured(..)) {
// This is an explicit argument. // This is an explicit argument.
// Make sure that all arguments so far are explcit. // Make sure that all arguments so far are explicit.
assert_eq!( assert_eq!(
self.num_explicit_args, self.num_explicit_args,
self.arguments.len(), self.arguments.len(),

View File

@ -137,7 +137,7 @@ pub struct AsyncNonMoveClosureNotSupported {
#[derive(Diagnostic, Clone, Copy)] #[derive(Diagnostic, Clone, Copy)]
#[diag(ast_lowering_functional_record_update_destructuring_assignment)] #[diag(ast_lowering_functional_record_update_destructuring_assignment)]
pub struct FunctionalRecordUpdateDestructuringAssignemnt { pub struct FunctionalRecordUpdateDestructuringAssignment {
#[primary_span] #[primary_span]
#[suggestion(code = "", applicability = "machine-applicable")] #[suggestion(code = "", applicability = "machine-applicable")]
pub span: Span, pub span: Span,

View File

@ -1,6 +1,6 @@
use super::errors::{ use super::errors::{
AsyncGeneratorsNotSupported, AsyncNonMoveClosureNotSupported, AwaitOnlyInAsyncFnAndBlocks, AsyncGeneratorsNotSupported, AsyncNonMoveClosureNotSupported, AwaitOnlyInAsyncFnAndBlocks,
BaseExpressionDoubleDot, ClosureCannotBeStatic, FunctionalRecordUpdateDestructuringAssignemnt, BaseExpressionDoubleDot, ClosureCannotBeStatic, FunctionalRecordUpdateDestructuringAssignment,
GeneratorTooManyParameters, InclusiveRangeWithNoEnd, NotSupportedForLifetimeBinderAsyncClosure, GeneratorTooManyParameters, InclusiveRangeWithNoEnd, NotSupportedForLifetimeBinderAsyncClosure,
UnderscoreExprLhsAssign, UnderscoreExprLhsAssign,
}; };
@ -434,7 +434,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
// `if let pat = val` or `if foo && let pat = val`, as we _do_ want `val` to live beyond the // `if let pat = val` or `if foo && let pat = val`, as we _do_ want `val` to live beyond the
// condition in this case. // condition in this case.
// //
// In order to mantain the drop behavior for the non `let` parts of the condition, // In order to maintain the drop behavior for the non `let` parts of the condition,
// we still wrap them in terminating scopes, e.g. `if foo && let pat = val` essentially // we still wrap them in terminating scopes, e.g. `if foo && let pat = val` essentially
// gets transformed into `if { let _t = foo; _t } && let pat = val` // gets transformed into `if { let _t = foo; _t } && let pat = val`
match &cond.kind { match &cond.kind {
@ -1232,7 +1232,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
); );
let fields_omitted = match &se.rest { let fields_omitted = match &se.rest {
StructRest::Base(e) => { StructRest::Base(e) => {
self.tcx.sess.emit_err(FunctionalRecordUpdateDestructuringAssignemnt { self.tcx.sess.emit_err(FunctionalRecordUpdateDestructuringAssignment {
span: e.span, span: e.span,
}); });
true true

View File

@ -13,7 +13,7 @@ use crate::{
/// The construct graph organizes the constraints by their end-points. /// The construct graph organizes the constraints by their end-points.
/// It can be used to view a `R1: R2` constraint as either an edge `R1 /// It can be used to view a `R1: R2` constraint as either an edge `R1
/// -> R2` or `R2 -> R1` depending on the direction type `D`. /// -> R2` or `R2 -> R1` depending on the direction type `D`.
pub(crate) struct ConstraintGraph<D: ConstraintGraphDirecton> { pub(crate) struct ConstraintGraph<D: ConstraintGraphDirection> {
_direction: D, _direction: D,
first_constraints: IndexVec<RegionVid, Option<OutlivesConstraintIndex>>, first_constraints: IndexVec<RegionVid, Option<OutlivesConstraintIndex>>,
next_constraints: IndexVec<OutlivesConstraintIndex, Option<OutlivesConstraintIndex>>, next_constraints: IndexVec<OutlivesConstraintIndex, Option<OutlivesConstraintIndex>>,
@ -25,7 +25,7 @@ pub(crate) type ReverseConstraintGraph = ConstraintGraph<Reverse>;
/// Marker trait that controls whether a `R1: R2` constraint /// Marker trait that controls whether a `R1: R2` constraint
/// represents an edge `R1 -> R2` or `R2 -> R1`. /// represents an edge `R1 -> R2` or `R2 -> R1`.
pub(crate) trait ConstraintGraphDirecton: Copy + 'static { pub(crate) trait ConstraintGraphDirection: Copy + 'static {
fn start_region(c: &OutlivesConstraint<'_>) -> RegionVid; fn start_region(c: &OutlivesConstraint<'_>) -> RegionVid;
fn end_region(c: &OutlivesConstraint<'_>) -> RegionVid; fn end_region(c: &OutlivesConstraint<'_>) -> RegionVid;
fn is_normal() -> bool; fn is_normal() -> bool;
@ -38,7 +38,7 @@ pub(crate) trait ConstraintGraphDirecton: Copy + 'static {
#[derive(Copy, Clone, Debug)] #[derive(Copy, Clone, Debug)]
pub(crate) struct Normal; pub(crate) struct Normal;
impl ConstraintGraphDirecton for Normal { impl ConstraintGraphDirection for Normal {
fn start_region(c: &OutlivesConstraint<'_>) -> RegionVid { fn start_region(c: &OutlivesConstraint<'_>) -> RegionVid {
c.sup c.sup
} }
@ -59,7 +59,7 @@ impl ConstraintGraphDirecton for Normal {
#[derive(Copy, Clone, Debug)] #[derive(Copy, Clone, Debug)]
pub(crate) struct Reverse; pub(crate) struct Reverse;
impl ConstraintGraphDirecton for Reverse { impl ConstraintGraphDirection for Reverse {
fn start_region(c: &OutlivesConstraint<'_>) -> RegionVid { fn start_region(c: &OutlivesConstraint<'_>) -> RegionVid {
c.sub c.sub
} }
@ -73,7 +73,7 @@ impl ConstraintGraphDirecton for Reverse {
} }
} }
impl<D: ConstraintGraphDirecton> ConstraintGraph<D> { impl<D: ConstraintGraphDirection> ConstraintGraph<D> {
/// Creates a "dependency graph" where each region constraint `R1: /// Creates a "dependency graph" where each region constraint `R1:
/// R2` is treated as an edge `R1 -> R2`. We use this graph to /// R2` is treated as an edge `R1 -> R2`. We use this graph to
/// construct SCCs for region inference but also for error /// construct SCCs for region inference but also for error
@ -133,7 +133,7 @@ impl<D: ConstraintGraphDirecton> ConstraintGraph<D> {
} }
} }
pub(crate) struct Edges<'s, 'tcx, D: ConstraintGraphDirecton> { pub(crate) struct Edges<'s, 'tcx, D: ConstraintGraphDirection> {
graph: &'s ConstraintGraph<D>, graph: &'s ConstraintGraph<D>,
constraints: &'s OutlivesConstraintSet<'tcx>, constraints: &'s OutlivesConstraintSet<'tcx>,
pointer: Option<OutlivesConstraintIndex>, pointer: Option<OutlivesConstraintIndex>,
@ -141,7 +141,7 @@ pub(crate) struct Edges<'s, 'tcx, D: ConstraintGraphDirecton> {
static_region: RegionVid, static_region: RegionVid,
} }
impl<'s, 'tcx, D: ConstraintGraphDirecton> Iterator for Edges<'s, 'tcx, D> { impl<'s, 'tcx, D: ConstraintGraphDirection> Iterator for Edges<'s, 'tcx, D> {
type Item = OutlivesConstraint<'tcx>; type Item = OutlivesConstraint<'tcx>;
fn next(&mut self) -> Option<Self::Item> { fn next(&mut self) -> Option<Self::Item> {
@ -174,13 +174,13 @@ impl<'s, 'tcx, D: ConstraintGraphDirecton> Iterator for Edges<'s, 'tcx, D> {
/// This struct brings together a constraint set and a (normal, not /// This struct brings together a constraint set and a (normal, not
/// reverse) constraint graph. It implements the graph traits and is /// reverse) constraint graph. It implements the graph traits and is
/// usd for doing the SCC computation. /// usd for doing the SCC computation.
pub(crate) struct RegionGraph<'s, 'tcx, D: ConstraintGraphDirecton> { pub(crate) struct RegionGraph<'s, 'tcx, D: ConstraintGraphDirection> {
set: &'s OutlivesConstraintSet<'tcx>, set: &'s OutlivesConstraintSet<'tcx>,
constraint_graph: &'s ConstraintGraph<D>, constraint_graph: &'s ConstraintGraph<D>,
static_region: RegionVid, static_region: RegionVid,
} }
impl<'s, 'tcx, D: ConstraintGraphDirecton> RegionGraph<'s, 'tcx, D> { impl<'s, 'tcx, D: ConstraintGraphDirection> RegionGraph<'s, 'tcx, D> {
/// Creates a "dependency graph" where each region constraint `R1: /// Creates a "dependency graph" where each region constraint `R1:
/// R2` is treated as an edge `R1 -> R2`. We use this graph to /// R2` is treated as an edge `R1 -> R2`. We use this graph to
/// construct SCCs for region inference but also for error /// construct SCCs for region inference but also for error
@ -202,11 +202,11 @@ impl<'s, 'tcx, D: ConstraintGraphDirecton> RegionGraph<'s, 'tcx, D> {
} }
} }
pub(crate) struct Successors<'s, 'tcx, D: ConstraintGraphDirecton> { pub(crate) struct Successors<'s, 'tcx, D: ConstraintGraphDirection> {
edges: Edges<'s, 'tcx, D>, edges: Edges<'s, 'tcx, D>,
} }
impl<'s, 'tcx, D: ConstraintGraphDirecton> Iterator for Successors<'s, 'tcx, D> { impl<'s, 'tcx, D: ConstraintGraphDirection> Iterator for Successors<'s, 'tcx, D> {
type Item = RegionVid; type Item = RegionVid;
fn next(&mut self) -> Option<Self::Item> { fn next(&mut self) -> Option<Self::Item> {
@ -214,23 +214,25 @@ impl<'s, 'tcx, D: ConstraintGraphDirecton> Iterator for Successors<'s, 'tcx, D>
} }
} }
impl<'s, 'tcx, D: ConstraintGraphDirecton> graph::DirectedGraph for RegionGraph<'s, 'tcx, D> { impl<'s, 'tcx, D: ConstraintGraphDirection> graph::DirectedGraph for RegionGraph<'s, 'tcx, D> {
type Node = RegionVid; type Node = RegionVid;
} }
impl<'s, 'tcx, D: ConstraintGraphDirecton> graph::WithNumNodes for RegionGraph<'s, 'tcx, D> { impl<'s, 'tcx, D: ConstraintGraphDirection> graph::WithNumNodes for RegionGraph<'s, 'tcx, D> {
fn num_nodes(&self) -> usize { fn num_nodes(&self) -> usize {
self.constraint_graph.first_constraints.len() self.constraint_graph.first_constraints.len()
} }
} }
impl<'s, 'tcx, D: ConstraintGraphDirecton> graph::WithSuccessors for RegionGraph<'s, 'tcx, D> { impl<'s, 'tcx, D: ConstraintGraphDirection> graph::WithSuccessors for RegionGraph<'s, 'tcx, D> {
fn successors(&self, node: Self::Node) -> <Self as graph::GraphSuccessors<'_>>::Iter { fn successors(&self, node: Self::Node) -> <Self as graph::GraphSuccessors<'_>>::Iter {
self.outgoing_regions(node) self.outgoing_regions(node)
} }
} }
impl<'s, 'tcx, D: ConstraintGraphDirecton> graph::GraphSuccessors<'_> for RegionGraph<'s, 'tcx, D> { impl<'s, 'tcx, D: ConstraintGraphDirection> graph::GraphSuccessors<'_>
for RegionGraph<'s, 'tcx, D>
{
type Item = RegionVid; type Item = RegionVid;
type Iter = Successors<'s, 'tcx, D>; type Iter = Successors<'s, 'tcx, D>;
} }

View File

@ -2600,7 +2600,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
self.implicit_region_bound, self.implicit_region_bound,
self.param_env, self.param_env,
location.to_locations(), location.to_locations(),
DUMMY_SP, // irrelevant; will be overrided. DUMMY_SP, // irrelevant; will be overridden.
ConstraintCategory::Boring, // same as above. ConstraintCategory::Boring, // same as above.
&mut self.borrowck_context.constraints, &mut self.borrowck_context.constraints,
) )

View File

@ -40,7 +40,7 @@ struct MacroInput {
fmtstr: P<Expr>, fmtstr: P<Expr>,
args: FormatArguments, args: FormatArguments,
/// Whether the first argument was a string literal or a result from eager macro expansion. /// Whether the first argument was a string literal or a result from eager macro expansion.
/// If it's not a string literal, we disallow implicit arugment capturing. /// If it's not a string literal, we disallow implicit argument capturing.
/// ///
/// This does not correspond to whether we can treat spans to the literal normally, as the whole /// This does not correspond to whether we can treat spans to the literal normally, as the whole
/// invocation might be the result of another macro expansion, in which case this flag may still be true. /// invocation might be the result of another macro expansion, in which case this flag may still be true.

View File

@ -141,7 +141,7 @@ codegen_ssa_msvc_missing_linker = the msvc targets depend on the msvc linker but
codegen_ssa_check_installed_visual_studio = please ensure that Visual Studio 2017 or later, or Build Tools for Visual Studio were installed with the Visual C++ option. codegen_ssa_check_installed_visual_studio = please ensure that Visual Studio 2017 or later, or Build Tools for Visual Studio were installed with the Visual C++ option.
codegen_ssa_unsufficient_vs_code_product = VS Code is a different product, and is not sufficient. codegen_ssa_insufficient_vs_code_product = VS Code is a different product, and is not sufficient.
codegen_ssa_processing_dymutil_failed = processing debug info with `dsymutil` failed: {$status} codegen_ssa_processing_dymutil_failed = processing debug info with `dsymutil` failed: {$status}
.note = {$output} .note = {$output}

View File

@ -923,7 +923,7 @@ fn link_natively<'a>(
if sess.target.is_like_msvc && linker_not_found { if sess.target.is_like_msvc && linker_not_found {
sess.emit_note(errors::MsvcMissingLinker); sess.emit_note(errors::MsvcMissingLinker);
sess.emit_note(errors::CheckInstalledVisualStudio); sess.emit_note(errors::CheckInstalledVisualStudio);
sess.emit_note(errors::UnsufficientVSCodeProduct); sess.emit_note(errors::InsufficientVSCodeProduct);
} }
sess.abort_if_errors(); sess.abort_if_errors();
} }

View File

@ -405,8 +405,8 @@ pub struct MsvcMissingLinker;
pub struct CheckInstalledVisualStudio; pub struct CheckInstalledVisualStudio;
#[derive(Diagnostic)] #[derive(Diagnostic)]
#[diag(codegen_ssa_unsufficient_vs_code_product)] #[diag(codegen_ssa_insufficient_vs_code_product)]
pub struct UnsufficientVSCodeProduct; pub struct InsufficientVSCodeProduct;
#[derive(Diagnostic)] #[derive(Diagnostic)]
#[diag(codegen_ssa_processing_dymutil_failed)] #[diag(codegen_ssa_processing_dymutil_failed)]

View File

@ -205,7 +205,7 @@ pub(crate) fn turn_into_const_value<'tcx>(
let cid = key.value; let cid = key.value;
let def_id = cid.instance.def.def_id(); let def_id = cid.instance.def.def_id();
let is_static = tcx.is_static(def_id); let is_static = tcx.is_static(def_id);
// This is just accessing an already computed constant, so no need to check alginment here. // This is just accessing an already computed constant, so no need to check alignment here.
let ecx = mk_eval_cx( let ecx = mk_eval_cx(
tcx, tcx,
tcx.def_span(key.value.instance.def_id()), tcx.def_span(key.value.instance.def_id()),

View File

@ -135,4 +135,4 @@ expand_proc_macro_panicked =
.help = message: {$message} .help = message: {$message}
expand_proc_macro_derive_tokens = expand_proc_macro_derive_tokens =
proc-macro derive produced unparseable tokens proc-macro derive produced unparsable tokens

View File

@ -309,7 +309,7 @@ declare_features! (
(active, associated_type_defaults, "1.2.0", Some(29661), None), (active, associated_type_defaults, "1.2.0", Some(29661), None),
/// Allows `async || body` closures. /// Allows `async || body` closures.
(active, async_closure, "1.37.0", Some(62290), None), (active, async_closure, "1.37.0", Some(62290), None),
/// Alows async functions to be declared, implemented, and used in traits. /// Allows async functions to be declared, implemented, and used in traits.
(incomplete, async_fn_in_trait, "1.66.0", Some(91611), None), (incomplete, async_fn_in_trait, "1.66.0", Some(91611), None),
/// Allows `extern "C-unwind" fn` to enable unwinding across ABI boundaries. /// Allows `extern "C-unwind" fn` to enable unwinding across ABI boundaries.
(active, c_unwind, "1.52.0", Some(74990), None), (active, c_unwind, "1.52.0", Some(74990), None),

View File

@ -58,7 +58,7 @@ impl<'tcx> Bounds<'tcx> {
pub fn push_sized(&mut self, tcx: TyCtxt<'tcx>, ty: Ty<'tcx>, span: Span) { pub fn push_sized(&mut self, tcx: TyCtxt<'tcx>, ty: Ty<'tcx>, span: Span) {
let sized_def_id = tcx.require_lang_item(LangItem::Sized, Some(span)); let sized_def_id = tcx.require_lang_item(LangItem::Sized, Some(span));
let trait_ref = ty::Binder::dummy(tcx.mk_trait_ref(sized_def_id, [ty])); let trait_ref = ty::Binder::dummy(tcx.mk_trait_ref(sized_def_id, [ty]));
// Preferrable to put this obligation first, since we report better errors for sized ambiguity. // Preferable to put this obligation first, since we report better errors for sized ambiguity.
self.predicates.insert(0, (trait_ref.without_const().to_predicate(tcx), span)); self.predicates.insert(0, (trait_ref.without_const().to_predicate(tcx), span));
} }

View File

@ -308,7 +308,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let rcvr_ty = self.node_ty(rcvr.hir_id); let rcvr_ty = self.node_ty(rcvr.hir_id);
// Get the evaluated type *after* calling the method call, so that the influence // Get the evaluated type *after* calling the method call, so that the influence
// of the arguments can be reflected in the receiver type. The receiver // of the arguments can be reflected in the receiver type. The receiver
// expression has the type *before* theis analysis is done. // expression has the type *before* this analysis is done.
let ty = match self.lookup_probe_for_diagnostic( let ty = match self.lookup_probe_for_diagnostic(
segment.ident, segment.ident,
rcvr_ty, rcvr_ty,

View File

@ -120,7 +120,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
ty ty
} }
pub(super) fn check_expr_coercable_to_type( pub(super) fn check_expr_coercible_to_type(
&self, &self,
expr: &'tcx hir::Expr<'tcx>, expr: &'tcx hir::Expr<'tcx>,
expected: Ty<'tcx>, expected: Ty<'tcx>,
@ -1128,7 +1128,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
} }
}; };
// This is (basically) inlined `check_expr_coercable_to_type`, but we want // This is (basically) inlined `check_expr_coercible_to_type`, but we want
// to suggest an additional fixup here in `suggest_deref_binop`. // to suggest an additional fixup here in `suggest_deref_binop`.
let rhs_ty = self.check_expr_with_hint(&rhs, lhs_ty); let rhs_ty = self.check_expr_with_hint(&rhs, lhs_ty);
if let (_, Some(mut diag)) = if let (_, Some(mut diag)) =
@ -1401,7 +1401,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let (element_ty, t) = match uty { let (element_ty, t) = match uty {
Some(uty) => { Some(uty) => {
self.check_expr_coercable_to_type(&element, uty, None); self.check_expr_coercible_to_type(&element, uty, None);
(uty, uty) (uty, uty)
} }
None => { None => {
@ -1478,7 +1478,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let elt_ts_iter = elts.iter().enumerate().map(|(i, e)| match flds { let elt_ts_iter = elts.iter().enumerate().map(|(i, e)| match flds {
Some(fs) if i < fs.len() => { Some(fs) if i < fs.len() => {
let ety = fs[i]; let ety = fs[i];
self.check_expr_coercable_to_type(&e, ety, None); self.check_expr_coercible_to_type(&e, ety, None);
ety ety
} }
_ => self.check_expr_with_expectation(&e, NoExpectation), _ => self.check_expr_with_expectation(&e, NoExpectation),
@ -2869,7 +2869,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
) -> Ty<'tcx> { ) -> Ty<'tcx> {
match self.resume_yield_tys { match self.resume_yield_tys {
Some((resume_ty, yield_ty)) => { Some((resume_ty, yield_ty)) => {
self.check_expr_coercable_to_type(&value, yield_ty, None); self.check_expr_coercible_to_type(&value, yield_ty, None);
resume_ty resume_ty
} }
@ -2878,7 +2878,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
// information. Hence, we check the source of the yield expression here and check its // information. Hence, we check the source of the yield expression here and check its
// value's type against `()` (this check should always hold). // value's type against `()` (this check should always hold).
None if src.is_await() => { None if src.is_await() => {
self.check_expr_coercable_to_type(&value, self.tcx.mk_unit(), None); self.check_expr_coercible_to_type(&value, self.tcx.mk_unit(), None);
self.tcx.mk_unit() self.tcx.mk_unit()
} }
_ => { _ => {

View File

@ -466,7 +466,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
/// obligation. Hence we refine the `expr` "outwards-in" and bail at the first kind of expression/impl we don't recognize. /// obligation. Hence we refine the `expr` "outwards-in" and bail at the first kind of expression/impl we don't recognize.
/// ///
/// This function returns a `Result<&Expr, &Expr>` - either way, it returns the `Expr` whose span should be /// This function returns a `Result<&Expr, &Expr>` - either way, it returns the `Expr` whose span should be
/// reported as an error. If it is `Ok`, then it means it refined successfull. If it is `Err`, then it may be /// reported as an error. If it is `Ok`, then it means it refined successful. If it is `Err`, then it may be
/// only a partial success - but it cannot be refined even further. /// only a partial success - but it cannot be refined even further.
fn blame_specific_expr_if_possible_for_derived_predicate_obligation( fn blame_specific_expr_if_possible_for_derived_predicate_obligation(
&self, &self,
@ -534,7 +534,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
/// - in_ty: `(Option<Vec<T>, bool)` /// - in_ty: `(Option<Vec<T>, bool)`
/// we would drill until we arrive at `vec![1, 2, 3]`. /// we would drill until we arrive at `vec![1, 2, 3]`.
/// ///
/// If successful, we return `Ok(refined_expr)`. If unsuccesful, we return `Err(partially_refined_expr`), /// If successful, we return `Ok(refined_expr)`. If unsuccessful, we return `Err(partially_refined_expr`),
/// which will go as far as possible. For example, given `(foo(), false)` instead, we would drill to /// which will go as far as possible. For example, given `(foo(), false)` instead, we would drill to
/// `foo()` and then return `Err("foo()")`. /// `foo()` and then return `Err("foo()")`.
/// ///

View File

@ -1413,7 +1413,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
self.demand_eqtype(init.span, local_ty, init_ty); self.demand_eqtype(init.span, local_ty, init_ty);
init_ty init_ty
} else { } else {
self.check_expr_coercable_to_type(init, local_ty, None) self.check_expr_coercible_to_type(init, local_ty, None)
} }
} }

View File

@ -280,7 +280,7 @@ fn typeck_with_fallback<'tcx>(
// Gather locals in statics (because of block expressions). // Gather locals in statics (because of block expressions).
GatherLocalsVisitor::new(&fcx).visit_body(body); GatherLocalsVisitor::new(&fcx).visit_body(body);
fcx.check_expr_coercable_to_type(&body.value, expected_type, None); fcx.check_expr_coercible_to_type(&body.value, expected_type, None);
fcx.write_ty(id, expected_type); fcx.write_ty(id, expected_type);
}; };

View File

@ -300,7 +300,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
}; };
// We could pass the file for long types into these two, but it isn't strictly necessary // We could pass the file for long types into these two, but it isn't strictly necessary
// given how targetted they are. // given how targeted they are.
if self.suggest_wrapping_range_with_parens( if self.suggest_wrapping_range_with_parens(
tcx, tcx,
rcvr_ty, rcvr_ty,

View File

@ -103,9 +103,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
match BinOpCategory::from(op) { match BinOpCategory::from(op) {
BinOpCategory::Shortcircuit => { BinOpCategory::Shortcircuit => {
// && and || are a simple case. // && and || are a simple case.
self.check_expr_coercable_to_type(lhs_expr, tcx.types.bool, None); self.check_expr_coercible_to_type(lhs_expr, tcx.types.bool, None);
let lhs_diverges = self.diverges.get(); let lhs_diverges = self.diverges.get();
self.check_expr_coercable_to_type(rhs_expr, tcx.types.bool, None); self.check_expr_coercible_to_type(rhs_expr, tcx.types.bool, None);
// Depending on the LHS' value, the RHS can never execute. // Depending on the LHS' value, the RHS can never execute.
self.diverges.set(lhs_diverges); self.diverges.set(lhs_diverges);
@ -255,7 +255,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
); );
// see `NB` above // see `NB` above
let rhs_ty = self.check_expr_coercable_to_type(rhs_expr, rhs_ty_var, Some(lhs_expr)); let rhs_ty = self.check_expr_coercible_to_type(rhs_expr, rhs_ty_var, Some(lhs_expr));
let rhs_ty = self.resolve_vars_with_obligations(rhs_ty); let rhs_ty = self.resolve_vars_with_obligations(rhs_ty);
let return_ty = match result { let return_ty = match result {

View File

@ -174,7 +174,7 @@ infer_region_explanation = {$pref_kind ->
infer_outlives_content = lifetime of reference outlives lifetime of borrowed content... infer_outlives_content = lifetime of reference outlives lifetime of borrowed content...
infer_outlives_bound = lifetime of the source pointer does not outlive lifetime bound of the object type infer_outlives_bound = lifetime of the source pointer does not outlive lifetime bound of the object type
infer_fullfill_req_lifetime = the type `{$ty}` does not fulfill the required lifetime infer_fulfill_req_lifetime = the type `{$ty}` does not fulfill the required lifetime
infer_lf_bound_not_satisfied = lifetime bound not satisfied infer_lf_bound_not_satisfied = lifetime bound not satisfied
infer_borrowed_too_long = a value of type `{$ty}` is borrowed for too long infer_borrowed_too_long = a value of type `{$ty}` is borrowed for too long
infer_ref_longer_than_data = in type `{$ty}`, reference has a longer lifetime than the data it references infer_ref_longer_than_data = in type `{$ty}`, reference has a longer lifetime than the data it references

View File

@ -53,7 +53,7 @@ pub struct AnnotationRequired<'a> {
// Copy of `AnnotationRequired` for E0283 // Copy of `AnnotationRequired` for E0283
#[derive(Diagnostic)] #[derive(Diagnostic)]
#[diag(infer_type_annotations_needed, code = "E0283")] #[diag(infer_type_annotations_needed, code = "E0283")]
pub struct AmbigousImpl<'a> { pub struct AmbiguousImpl<'a> {
#[primary_span] #[primary_span]
pub span: Span, pub span: Span,
pub source_kind: &'static str, pub source_kind: &'static str,
@ -954,8 +954,8 @@ pub struct OutlivesBound<'a> {
} }
#[derive(Diagnostic)] #[derive(Diagnostic)]
#[diag(infer_fullfill_req_lifetime, code = "E0477")] #[diag(infer_fulfill_req_lifetime, code = "E0477")]
pub struct FullfillReqLifetime<'a> { pub struct FulfillReqLifetime<'a> {
#[primary_span] #[primary_span]
pub span: Span, pub span: Span,
pub ty: Ty<'a>, pub ty: Ty<'a>,

View File

@ -1808,7 +1808,7 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
// will try to hide in some case such as `async fn`, so // will try to hide in some case such as `async fn`, so
// to make an error more use friendly we will // to make an error more use friendly we will
// avoid to suggest a mismatch type with a // avoid to suggest a mismatch type with a
// type that the user usually are not usign // type that the user usually are not using
// directly such as `impl Future<Output = u8>`. // directly such as `impl Future<Output = u8>`.
if !self.tcx.ty_is_opaque_future(found_ty) { if !self.tcx.ty_is_opaque_future(found_ty) {
diag.note_expected_found_extra( diag.note_expected_found_extra(

View File

@ -1,5 +1,5 @@
use crate::errors::{ use crate::errors::{
AmbigousImpl, AmbigousReturn, AnnotationRequired, InferenceBadError, NeedTypeInfoInGenerator, AmbigousReturn, AmbiguousImpl, AnnotationRequired, InferenceBadError, NeedTypeInfoInGenerator,
SourceKindMultiSuggestion, SourceKindSubdiag, SourceKindMultiSuggestion, SourceKindSubdiag,
}; };
use crate::infer::error_reporting::TypeErrCtxt; use crate::infer::error_reporting::TypeErrCtxt;
@ -358,7 +358,7 @@ impl<'tcx> InferCtxt<'tcx> {
bad_label, bad_label,
} }
.into_diagnostic(&self.tcx.sess.parse_sess.span_diagnostic), .into_diagnostic(&self.tcx.sess.parse_sess.span_diagnostic),
TypeAnnotationNeeded::E0283 => AmbigousImpl { TypeAnnotationNeeded::E0283 => AmbiguousImpl {
span, span,
source_kind, source_kind,
source_name, source_name,
@ -563,7 +563,7 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
bad_label: None, bad_label: None,
} }
.into_diagnostic(&self.tcx.sess.parse_sess.span_diagnostic), .into_diagnostic(&self.tcx.sess.parse_sess.span_diagnostic),
TypeAnnotationNeeded::E0283 => AmbigousImpl { TypeAnnotationNeeded::E0283 => AmbiguousImpl {
span, span,
source_kind, source_kind,
source_name: &name, source_name: &name,

View File

@ -1,5 +1,5 @@
use crate::errors::{ use crate::errors::{
note_and_explain, FullfillReqLifetime, LfBoundNotSatisfied, OutlivesBound, OutlivesContent, note_and_explain, FulfillReqLifetime, LfBoundNotSatisfied, OutlivesBound, OutlivesContent,
RefLongerThanData, RegionOriginNote, WhereClauseSuggestions, RefLongerThanData, RegionOriginNote, WhereClauseSuggestions,
}; };
use crate::fluent_generated as fluent; use crate::fluent_generated as fluent;
@ -176,7 +176,7 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
let note = note_and_explain::RegionExplanation::new( let note = note_and_explain::RegionExplanation::new(
self.tcx, sub, opt_span, prefix, suffix, self.tcx, sub, opt_span, prefix, suffix,
); );
FullfillReqLifetime { span, ty: self.resolve_vars_if_possible(ty), note } FulfillReqLifetime { span, ty: self.resolve_vars_if_possible(ty), note }
.into_diagnostic(&self.tcx.sess.parse_sess.span_diagnostic) .into_diagnostic(&self.tcx.sess.parse_sess.span_diagnostic)
} }
infer::RelateRegionParamBound(span) => { infer::RelateRegionParamBound(span) => {

View File

@ -91,7 +91,7 @@ lint_ty_qualified = usage of qualified `ty::{$ty}`
lint_lintpass_by_hand = implementing `LintPass` by hand lint_lintpass_by_hand = implementing `LintPass` by hand
.help = try using `declare_lint_pass!` or `impl_lint_pass!` instead .help = try using `declare_lint_pass!` or `impl_lint_pass!` instead
lint_non_existant_doc_keyword = found non-existing keyword `{$keyword}` used in `#[doc(keyword = "...")]` lint_non_existent_doc_keyword = found non-existing keyword `{$keyword}` used in `#[doc(keyword = "...")]`
.help = only existing keywords are allowed in core/std .help = only existing keywords are allowed in core/std
lint_diag_out_of_impl = lint_diag_out_of_impl =
@ -107,7 +107,7 @@ lint_cstring_ptr = getting the inner pointer of a temporary `CString`
.note = pointers do not have a lifetime; when calling `as_ptr` the `CString` will be deallocated at the end of the statement because nothing is referencing it as far as the type system is concerned .note = pointers do not have a lifetime; when calling `as_ptr` the `CString` will be deallocated at the end of the statement because nothing is referencing it as far as the type system is concerned
.help = for more information, see https://doc.rust-lang.org/reference/destructors.html .help = for more information, see https://doc.rust-lang.org/reference/destructors.html
lint_multple_supertrait_upcastable = `{$ident}` is object-safe and has multiple supertraits lint_multiple_supertrait_upcastable = `{$ident}` is object-safe and has multiple supertraits
lint_identifier_non_ascii_char = identifier contains non-ASCII characters lint_identifier_non_ascii_char = identifier contains non-ASCII characters

View File

@ -2,7 +2,7 @@
//! Clippy. //! Clippy.
use crate::lints::{ use crate::lints::{
BadOptAccessDiag, DefaultHashTypesDiag, DiagOutOfImpl, LintPassByHand, NonExistantDocKeyword, BadOptAccessDiag, DefaultHashTypesDiag, DiagOutOfImpl, LintPassByHand, NonExistentDocKeyword,
QueryInstability, TyQualified, TykindDiag, TykindKind, UntranslatableDiag, QueryInstability, TyQualified, TykindDiag, TykindKind, UntranslatableDiag,
}; };
use crate::{EarlyContext, EarlyLintPass, LateContext, LateLintPass, LintContext}; use crate::{EarlyContext, EarlyLintPass, LateContext, LateLintPass, LintContext};
@ -334,7 +334,7 @@ impl<'tcx> LateLintPass<'tcx> for ExistingDocKeyword {
cx.emit_spanned_lint( cx.emit_spanned_lint(
EXISTING_DOC_KEYWORD, EXISTING_DOC_KEYWORD,
attr.span, attr.span,
NonExistantDocKeyword { keyword }, NonExistentDocKeyword { keyword },
); );
} }
} }
@ -424,7 +424,7 @@ impl LateLintPass<'_> for Diagnostics {
} }
declare_tool_lint! { declare_tool_lint! {
/// The `bad_opt_access` lint detects accessing options by field instad of /// The `bad_opt_access` lint detects accessing options by field instead of
/// the wrapper function. /// the wrapper function.
pub rustc::BAD_OPT_ACCESS, pub rustc::BAD_OPT_ACCESS,
Deny, Deny,

View File

@ -25,7 +25,7 @@ declare_lint! {
/// ///
/// fn main() { /// fn main() {
/// #[warn(let_underscore_drop)] /// #[warn(let_underscore_drop)]
/// // SomeStuct is dropped immediately instead of at end of scope, /// // SomeStruct is dropped immediately instead of at end of scope,
/// // so "Dropping SomeStruct" is printed before "end of main". /// // so "Dropping SomeStruct" is printed before "end of main".
/// // The order of prints would be reversed if SomeStruct was bound to /// // The order of prints would be reversed if SomeStruct was bound to
/// // a name (such as "_foo"). /// // a name (such as "_foo").

View File

@ -3,7 +3,7 @@ use crate::{
fluent_generated as fluent, fluent_generated as fluent,
late::unerased_lint_store, late::unerased_lint_store,
lints::{ lints::{
DeprecatedLintName, IgnoredUnlessCrateSpecified, OverruledAtributeLint, DeprecatedLintName, IgnoredUnlessCrateSpecified, OverruledAttributeLint,
RenamedOrRemovedLint, RenamedOrRemovedLintSuggestion, UnknownLint, UnknownLintSuggestion, RenamedOrRemovedLint, RenamedOrRemovedLintSuggestion, UnknownLint, UnknownLintSuggestion,
}, },
}; };
@ -612,7 +612,7 @@ impl<'s, P: LintLevelsProvider> LintLevelsBuilder<'s, P> {
self.emit_spanned_lint( self.emit_spanned_lint(
FORBIDDEN_LINT_GROUPS, FORBIDDEN_LINT_GROUPS,
src.span().into(), src.span().into(),
OverruledAtributeLint { OverruledAttributeLint {
overruled: src.span(), overruled: src.span(),
lint_level: level.as_str(), lint_level: level.as_str(),
lint_source: src.name(), lint_source: src.name(),

View File

@ -806,9 +806,9 @@ pub struct TyQualified {
pub struct LintPassByHand; pub struct LintPassByHand;
#[derive(LintDiagnostic)] #[derive(LintDiagnostic)]
#[diag(lint_non_existant_doc_keyword)] #[diag(lint_non_existent_doc_keyword)]
#[help] #[help]
pub struct NonExistantDocKeyword { pub struct NonExistentDocKeyword {
pub keyword: Symbol, pub keyword: Symbol,
} }
@ -875,7 +875,7 @@ impl AddToDiagnostic for NonBindingLetSub {
// levels.rs // levels.rs
#[derive(LintDiagnostic)] #[derive(LintDiagnostic)]
#[diag(lint_overruled_attribute)] #[diag(lint_overruled_attribute)]
pub struct OverruledAtributeLint<'a> { pub struct OverruledAttributeLint<'a> {
#[label] #[label]
pub overruled: Span, pub overruled: Span,
pub lint_level: &'a str, pub lint_level: &'a str,
@ -947,7 +947,7 @@ pub struct CStringPtr {
// multiple_supertrait_upcastable.rs // multiple_supertrait_upcastable.rs
#[derive(LintDiagnostic)] #[derive(LintDiagnostic)]
#[diag(lint_multple_supertrait_upcastable)] #[diag(lint_multiple_supertrait_upcastable)]
pub struct MultipleSupertraitUpcastable { pub struct MultipleSupertraitUpcastable {
pub ident: Ident, pub ident: Ident,
} }
@ -1422,7 +1422,7 @@ pub struct UnusedResult<'a> {
pub ty: Ty<'a>, pub ty: Ty<'a>,
} }
// FIXME(davidtwco): this isn't properly translatable becauses of the // FIXME(davidtwco): this isn't properly translatable because of the
// pre/post strings // pre/post strings
#[derive(LintDiagnostic)] #[derive(LintDiagnostic)]
#[diag(lint_unused_closure)] #[diag(lint_unused_closure)]
@ -1433,7 +1433,7 @@ pub struct UnusedClosure<'a> {
pub post: &'a str, pub post: &'a str,
} }
// FIXME(davidtwco): this isn't properly translatable becauses of the // FIXME(davidtwco): this isn't properly translatable because of the
// pre/post strings // pre/post strings
#[derive(LintDiagnostic)] #[derive(LintDiagnostic)]
#[diag(lint_unused_generator)] #[diag(lint_unused_generator)]
@ -1444,7 +1444,7 @@ pub struct UnusedGenerator<'a> {
pub post: &'a str, pub post: &'a str,
} }
// FIXME(davidtwco): this isn't properly translatable becauses of the pre/post // FIXME(davidtwco): this isn't properly translatable because of the pre/post
// strings // strings
pub struct UnusedDef<'a, 'b> { pub struct UnusedDef<'a, 'b> {
pub pre: &'a str, pub pre: &'a str,

View File

@ -250,7 +250,7 @@ impl EarlyLintPass for NonAsciiIdents {
let latin_augmented_script_set = AugmentedScriptSet::for_char('A'); let latin_augmented_script_set = AugmentedScriptSet::for_char('A');
script_states.insert(latin_augmented_script_set, ScriptSetUsage::Verified); script_states.insert(latin_augmented_script_set, ScriptSetUsage::Verified);
let mut has_suspicous = false; let mut has_suspicious = false;
for (symbol, &sp) in symbols.iter() { for (symbol, &sp) in symbols.iter() {
let symbol_str = symbol.as_str(); let symbol_str = symbol.as_str();
for ch in symbol_str.chars() { for ch in symbol_str.chars() {
@ -278,14 +278,14 @@ impl EarlyLintPass for NonAsciiIdents {
if !is_potential_mixed_script_confusable_char(ch) { if !is_potential_mixed_script_confusable_char(ch) {
ScriptSetUsage::Verified ScriptSetUsage::Verified
} else { } else {
has_suspicous = true; has_suspicious = true;
ScriptSetUsage::Suspicious(vec![ch], sp) ScriptSetUsage::Suspicious(vec![ch], sp)
} }
}); });
} }
} }
if has_suspicous { if has_suspicious {
let verified_augmented_script_sets = script_states let verified_augmented_script_sets = script_states
.iter() .iter()
.flat_map(|(k, v)| match v { .flat_map(|(k, v)| match v {

View File

@ -532,7 +532,7 @@ pub enum BuiltinLintDiagnostics {
AmbiguousGlobReexports { AmbiguousGlobReexports {
/// The name for which collision(s) have occurred. /// The name for which collision(s) have occurred.
name: String, name: String,
/// The name space for whihc the collision(s) occurred in. /// The name space for which the collision(s) occurred in.
namespace: String, namespace: String,
/// Span where the name is first re-exported. /// Span where the name is first re-exported.
first_reexport_span: Span, first_reexport_span: Span,

View File

@ -58,7 +58,7 @@ impl<'tcx> UnifyValue for UnifiedRegion<'tcx> {
fn unify_values(value1: &Self, value2: &Self) -> Result<Self, NoError> { fn unify_values(value1: &Self, value2: &Self) -> Result<Self, NoError> {
// We pick the value of the least universe because it is compatible with more variables. // We pick the value of the least universe because it is compatible with more variables.
// This is *not* neccessary for soundness, but it allows more region variables to be // This is *not* necessary for soundness, but it allows more region variables to be
// resolved to the said value. // resolved to the said value.
#[cold] #[cold]
fn min_universe<'tcx>(r1: Region<'tcx>, r2: Region<'tcx>) -> Region<'tcx> { fn min_universe<'tcx>(r1: Region<'tcx>, r2: Region<'tcx>) -> Region<'tcx> {

View File

@ -1,6 +1,6 @@
/// A macro for triggering an ICE. /// A macro for triggering an ICE.
/// Calling `bug` instead of panicking will result in a nicer error message and should /// Calling `bug` instead of panicking will result in a nicer error message and should
/// therefore be prefered over `panic`/`unreachable` or others. /// therefore be preferred over `panic`/`unreachable` or others.
/// ///
/// If you have a span available, you should use [`span_bug`] instead. /// If you have a span available, you should use [`span_bug`] instead.
/// ///

View File

@ -63,7 +63,7 @@ impl InitMask {
} }
/// Sets a specified range to a value. If the range is out-of-bounds, the mask will grow to /// Sets a specified range to a value. If the range is out-of-bounds, the mask will grow to
/// accomodate it entirely. /// accommodate it entirely.
pub fn set_range(&mut self, range: AllocRange, new_state: bool) { pub fn set_range(&mut self, range: AllocRange, new_state: bool) {
let start = range.start; let start = range.start;
let end = range.end(); let end = range.end();

View File

@ -14,7 +14,7 @@ use rustc_serialize::{Decodable, Decoder, Encodable, Encoder};
#[derive(HashStable)] #[derive(HashStable)]
pub struct ProvenanceMap<Prov = AllocId> { pub struct ProvenanceMap<Prov = AllocId> {
/// Provenance in this map applies from the given offset for an entire pointer-size worth of /// Provenance in this map applies from the given offset for an entire pointer-size worth of
/// bytes. Two entires in this map are always at least a pointer size apart. /// bytes. Two entries in this map are always at least a pointer size apart.
ptrs: SortedMap<Size, Prov>, ptrs: SortedMap<Size, Prov>,
/// Provenance in this map only applies to the given single byte. /// Provenance in this map only applies to the given single byte.
/// This map is disjoint from the previous. It will always be empty when /// This map is disjoint from the previous. It will always be empty when

View File

@ -37,7 +37,7 @@ pub fn erase<T: EraseType>(src: T) -> Erase<T> {
#[inline(always)] #[inline(always)]
pub fn restore<T: EraseType>(value: Erase<T>) -> T { pub fn restore<T: EraseType>(value: Erase<T>) -> T {
let value: Erased<<T as EraseType>::Result> = value; let value: Erased<<T as EraseType>::Result> = value;
// SAFETY: Due to the use of impl Trait in `Erase` the only way to safetly create an instance // SAFETY: Due to the use of impl Trait in `Erase` the only way to safely create an instance
// of `Erase` is to call `erase`, so we know that `value.data` is a valid instance of `T` of // of `Erase` is to call `erase`, so we know that `value.data` is a valid instance of `T` of
// the right size. // the right size.
unsafe { transmute_copy(&value.data) } unsafe { transmute_copy(&value.data) }

View File

@ -26,7 +26,7 @@ pub trait Key: Sized {
// //
// ...But r-a doesn't support them yet and using a default here causes r-a to not infer // ...But r-a doesn't support them yet and using a default here causes r-a to not infer
// return types of queries which is very annoying. Thus, until r-a support associated // return types of queries which is very annoying. Thus, until r-a support associated
// type defaults, plese restrain from using them here <3 // type defaults, please restrain from using them here <3
// //
// r-a issue: <https://github.com/rust-lang/rust-analyzer/issues/13693> // r-a issue: <https://github.com/rust-lang/rust-analyzer/issues/13693>
type CacheSelector; type CacheSelector;

View File

@ -97,7 +97,7 @@ rustc_queries! {
/// Gives access to the HIR ID for the given `LocalDefId` owner `key` if any. /// Gives access to the HIR ID for the given `LocalDefId` owner `key` if any.
/// ///
/// Definitions that were generated with no HIR, would be feeded to return `None`. /// Definitions that were generated with no HIR, would be fed to return `None`.
query opt_local_def_id_to_hir_id(key: LocalDefId) -> Option<hir::HirId>{ query opt_local_def_id_to_hir_id(key: LocalDefId) -> Option<hir::HirId>{
desc { |tcx| "getting HIR ID of `{}`", tcx.def_path_str(key.to_def_id()) } desc { |tcx| "getting HIR ID of `{}`", tcx.def_path_str(key.to_def_id()) }
feedable feedable

View File

@ -1347,7 +1347,7 @@ pub trait PrettyPrinter<'tcx>:
p!(write("{}::{}", self.tcx().crate_name(def.did.krate), self.tcx().def_path(def.did).to_string_no_crate_verbose())) p!(write("{}::{}", self.tcx().crate_name(def.did.krate), self.tcx().def_path(def.did).to_string_no_crate_verbose()))
} }
} }
defkind => bug!("`{:?}` has unexpcted defkind {:?}", ct, defkind), defkind => bug!("`{:?}` has unexpected defkind {:?}", ct, defkind),
} }
} }
ty::ConstKind::Infer(infer_ct) => { ty::ConstKind::Infer(infer_ct) => {

View File

@ -593,7 +593,7 @@ pub struct MultipleMutBorrows {
#[primary_span] #[primary_span]
pub span: Span, pub span: Span,
#[subdiagnostic] #[subdiagnostic]
pub occurences: Vec<Conflict>, pub occurrences: Vec<Conflict>,
} }
#[derive(Diagnostic)] #[derive(Diagnostic)]
@ -602,7 +602,7 @@ pub struct AlreadyBorrowed {
#[primary_span] #[primary_span]
pub span: Span, pub span: Span,
#[subdiagnostic] #[subdiagnostic]
pub occurences: Vec<Conflict>, pub occurrences: Vec<Conflict>,
} }
#[derive(Diagnostic)] #[derive(Diagnostic)]
@ -611,7 +611,7 @@ pub struct AlreadyMutBorrowed {
#[primary_span] #[primary_span]
pub span: Span, pub span: Span,
#[subdiagnostic] #[subdiagnostic]
pub occurences: Vec<Conflict>, pub occurrences: Vec<Conflict>,
} }
#[derive(Diagnostic)] #[derive(Diagnostic)]
@ -620,7 +620,7 @@ pub struct MovedWhileBorrowed {
#[primary_span] #[primary_span]
pub span: Span, pub span: Span,
#[subdiagnostic] #[subdiagnostic]
pub occurences: Vec<Conflict>, pub occurrences: Vec<Conflict>,
} }
#[derive(Subdiagnostic)] #[derive(Subdiagnostic)]

View File

@ -966,30 +966,30 @@ fn check_borrow_conflicts_in_at_patterns<'tcx>(cx: &MatchVisitor<'_, '_, 'tcx>,
let report_mut_ref = !conflicts_mut_ref.is_empty(); let report_mut_ref = !conflicts_mut_ref.is_empty();
let report_move_conflict = !conflicts_move.is_empty(); let report_move_conflict = !conflicts_move.is_empty();
let mut occurences = match mut_outer { let mut occurrences = match mut_outer {
Mutability::Mut => vec![Conflict::Mut { span: pat.span, name }], Mutability::Mut => vec![Conflict::Mut { span: pat.span, name }],
Mutability::Not => vec![Conflict::Ref { span: pat.span, name }], Mutability::Not => vec![Conflict::Ref { span: pat.span, name }],
}; };
occurences.extend(conflicts_mut_mut); occurrences.extend(conflicts_mut_mut);
occurences.extend(conflicts_mut_ref); occurrences.extend(conflicts_mut_ref);
occurences.extend(conflicts_move); occurrences.extend(conflicts_move);
// Report errors if any. // Report errors if any.
if report_mut_mut { if report_mut_mut {
// Report mutability conflicts for e.g. `ref mut x @ Some(ref mut y)`. // Report mutability conflicts for e.g. `ref mut x @ Some(ref mut y)`.
sess.emit_err(MultipleMutBorrows { span: pat.span, occurences }); sess.emit_err(MultipleMutBorrows { span: pat.span, occurrences });
} else if report_mut_ref { } else if report_mut_ref {
// Report mutability conflicts for e.g. `ref x @ Some(ref mut y)` or the converse. // Report mutability conflicts for e.g. `ref x @ Some(ref mut y)` or the converse.
match mut_outer { match mut_outer {
Mutability::Mut => { Mutability::Mut => {
sess.emit_err(AlreadyMutBorrowed { span: pat.span, occurences }); sess.emit_err(AlreadyMutBorrowed { span: pat.span, occurrences });
} }
Mutability::Not => { Mutability::Not => {
sess.emit_err(AlreadyBorrowed { span: pat.span, occurences }); sess.emit_err(AlreadyBorrowed { span: pat.span, occurrences });
} }
}; };
} else if report_move_conflict { } else if report_move_conflict {
// Report by-ref and by-move conflicts, e.g. `ref x @ y`. // Report by-ref and by-move conflicts, e.g. `ref x @ y`.
sess.emit_err(MovedWhileBorrowed { span: pat.span, occurences }); sess.emit_err(MovedWhileBorrowed { span: pat.span, occurrences });
} }
} }

View File

@ -394,8 +394,8 @@ where
) -> io::Result<()> { ) -> io::Result<()> {
let diffs = StateDiffCollector::run(body, block, self.results.results(), self.style); let diffs = StateDiffCollector::run(body, block, self.results.results(), self.style);
let mut befores = diffs.before.map(|v| v.into_iter()); let mut diffs_before = diffs.before.map(|v| v.into_iter());
let mut afters = diffs.after.into_iter(); let mut diffs_after = diffs.after.into_iter();
let next_in_dataflow_order = |it: &mut std::vec::IntoIter<_>| { let next_in_dataflow_order = |it: &mut std::vec::IntoIter<_>| {
if A::Direction::IS_FORWARD { it.next().unwrap() } else { it.next_back().unwrap() } if A::Direction::IS_FORWARD { it.next().unwrap() } else { it.next_back().unwrap() }
@ -405,8 +405,8 @@ where
let statement_str = format!("{statement:?}"); let statement_str = format!("{statement:?}");
let index_str = format!("{i}"); let index_str = format!("{i}");
let after = next_in_dataflow_order(&mut afters); let after = next_in_dataflow_order(&mut diffs_after);
let before = befores.as_mut().map(next_in_dataflow_order); let before = diffs_before.as_mut().map(next_in_dataflow_order);
self.write_row(w, &index_str, &statement_str, |_this, w, fmt| { self.write_row(w, &index_str, &statement_str, |_this, w, fmt| {
if let Some(before) = before { if let Some(before) = before {
@ -417,11 +417,11 @@ where
})?; })?;
} }
let after = next_in_dataflow_order(&mut afters); let after = next_in_dataflow_order(&mut diffs_after);
let before = befores.as_mut().map(next_in_dataflow_order); let before = diffs_before.as_mut().map(next_in_dataflow_order);
assert!(afters.is_empty()); assert!(diffs_after.is_empty());
assert!(befores.as_ref().map_or(true, ExactSizeIterator::is_empty)); assert!(diffs_before.as_ref().map_or(true, ExactSizeIterator::is_empty));
let terminator = body[block].terminator(); let terminator = body[block].terminator();
let mut terminator_str = String::new(); let mut terminator_str = String::new();

View File

@ -83,7 +83,7 @@
//! that ever have their address taken. Of course that requires actually having alias analysis //! that ever have their address taken. Of course that requires actually having alias analysis
//! (and a model to build it on), so this might be a bit of a ways off. //! (and a model to build it on), so this might be a bit of a ways off.
//! //!
//! * Various perf improvents. There are a bunch of comments in here marked `PERF` with ideas for //! * Various perf improvements. There are a bunch of comments in here marked `PERF` with ideas for
//! how to do things more efficiently. However, the complexity of the pass as a whole should be //! how to do things more efficiently. However, the complexity of the pass as a whole should be
//! kept in mind. //! kept in mind.
//! //!

View File

@ -99,7 +99,7 @@ where
// //
// This generates a `switchInt() -> [0: 0, 1: 1, otherwise: unreachable]`, which allows us or LLVM to // This generates a `switchInt() -> [0: 0, 1: 1, otherwise: unreachable]`, which allows us or LLVM to
// turn it into just `x` later. Without the unreachable, such a transformation would be illegal. // turn it into just `x` later. Without the unreachable, such a transformation would be illegal.
// If the otherwise branch is unreachable, we can delete all other unreacahble targets, as they will // If the otherwise branch is unreachable, we can delete all other unreachable targets, as they will
// still point to the unreachable and therefore not lose reachability information. // still point to the unreachable and therefore not lose reachability information.
let reachable_iter = targets.iter().filter(|(_, bb)| !is_unreachable(*bb)); let reachable_iter = targets.iter().filter(|(_, bb)| !is_unreachable(*bb));

View File

@ -474,7 +474,7 @@ fn collect_and_partition_mono_items(tcx: TyCtxt<'_>, (): ()) -> (&DefIdSet, &[Co
(tcx.arena.alloc(mono_items), codegen_units) (tcx.arena.alloc(mono_items), codegen_units)
} }
/// Outputs stats about instantation counts and estimated size, per `MonoItem`'s /// Outputs stats about instantiation counts and estimated size, per `MonoItem`'s
/// def, to a file in the given output directory. /// def, to a file in the given output directory.
fn dump_mono_items_stats<'tcx>( fn dump_mono_items_stats<'tcx>(
tcx: TyCtxt<'tcx>, tcx: TyCtxt<'tcx>,

View File

@ -21,7 +21,7 @@ pub struct TokenTreeDiagInfo {
pub matching_block_spans: Vec<(Span, Span)>, pub matching_block_spans: Vec<(Span, Span)>,
} }
pub fn same_identation_level(sm: &SourceMap, open_sp: Span, close_sp: Span) -> bool { pub fn same_indentation_level(sm: &SourceMap, open_sp: Span, close_sp: Span) -> bool {
match (sm.span_to_margin(open_sp), sm.span_to_margin(close_sp)) { match (sm.span_to_margin(open_sp), sm.span_to_margin(close_sp)) {
(Some(open_padding), Some(close_padding)) => open_padding == close_padding, (Some(open_padding), Some(close_padding)) => open_padding == close_padding,
_ => false, _ => false,
@ -67,13 +67,13 @@ pub fn report_suspicious_mismatch_block(
let mut matched_spans: Vec<(Span, bool)> = diag_info let mut matched_spans: Vec<(Span, bool)> = diag_info
.matching_block_spans .matching_block_spans
.iter() .iter()
.map(|&(open, close)| (open.with_hi(close.lo()), same_identation_level(sm, open, close))) .map(|&(open, close)| (open.with_hi(close.lo()), same_indentation_level(sm, open, close)))
.collect(); .collect();
// sort by `lo`, so the large block spans in the front // sort by `lo`, so the large block spans in the front
matched_spans.sort_by_key(|(span, _)| span.lo()); matched_spans.sort_by_key(|(span, _)| span.lo());
// We use larger block whose identation is well to cover those inner mismatched blocks // We use larger block whose indentation is well to cover those inner mismatched blocks
// O(N^2) here, but we are on error reporting path, so it is fine // O(N^2) here, but we are on error reporting path, so it is fine
for i in 0..matched_spans.len() { for i in 0..matched_spans.len() {
let (block_span, same_ident) = matched_spans[i]; let (block_span, same_ident) = matched_spans[i];

View File

@ -1,5 +1,5 @@
use super::diagnostics::report_suspicious_mismatch_block; use super::diagnostics::report_suspicious_mismatch_block;
use super::diagnostics::same_identation_level; use super::diagnostics::same_indentation_level;
use super::diagnostics::TokenTreeDiagInfo; use super::diagnostics::TokenTreeDiagInfo;
use super::{StringReader, UnmatchedDelim}; use super::{StringReader, UnmatchedDelim};
use rustc_ast::token::{self, Delimiter, Token}; use rustc_ast::token::{self, Delimiter, Token};
@ -153,7 +153,7 @@ impl<'a> TokenTreesReader<'a> {
unclosed_delimiter = Some(sp); unclosed_delimiter = Some(sp);
}; };
for (brace, brace_span) in &self.diag_info.open_braces { for (brace, brace_span) in &self.diag_info.open_braces {
if same_identation_level(&sm, self.token.span, *brace_span) if same_indentation_level(&sm, self.token.span, *brace_span)
&& brace == &close_delim && brace == &close_delim
{ {
// high likelihood of these two corresponding // high likelihood of these two corresponding

View File

@ -2767,7 +2767,7 @@ impl<'a> Parser<'a> {
(token::DotDotEq, token::Gt) (token::DotDotEq, token::Gt)
) { ) {
// `error_inclusive_range_match_arrow` handles cases like `0..=> {}`, // `error_inclusive_range_match_arrow` handles cases like `0..=> {}`,
// so we supress the error here // so we suppress the error here
err.delay_as_bug(); err.delay_as_bug();
this.bump(); this.bump();
} else { } else {

View File

@ -909,7 +909,7 @@ fn find_width_map_from_snippet(
// Strip quotes. // Strip quotes.
let snippet = &snippet[1..snippet.len() - 1]; let snippet = &snippet[1..snippet.len() - 1];
// Macros like `println` add a newline at the end. That technically doens't make them "literals" anymore, but it's fine // Macros like `println` add a newline at the end. That technically doesn't make them "literals" anymore, but it's fine
// since we will never need to point our spans there, so we lie about it here by ignoring it. // since we will never need to point our spans there, so we lie about it here by ignoring it.
// Since there might actually be newlines in the source code, we need to normalize away all trailing newlines. // Since there might actually be newlines in the source code, we need to normalize away all trailing newlines.
// If we only trimmed it off the input, `format!("\n")` would cause a mismatch as here we they actually match up. // If we only trimmed it off the input, `format!("\n")` would cause a mismatch as here we they actually match up.

View File

@ -42,7 +42,7 @@ resolve_try_adding_local_generic_param_on_method =
try adding a local generic parameter in this method instead try adding a local generic parameter in this method instead
resolve_help_try_using_local_generic_param = resolve_help_try_using_local_generic_param =
try using a local generic paramter instead try using a local generic parameter instead
resolve_name_is_already_used_as_generic_parameter = resolve_name_is_already_used_as_generic_parameter =
the name `{$name}` is already used for a generic parameter in this item's generic parameters the name `{$name}` is already used for a generic parameter in this item's generic parameters

View File

@ -598,7 +598,7 @@ struct LateResolutionVisitor<'a, 'b, 'ast, 'tcx> {
/// The current set of local scopes for types and values. /// The current set of local scopes for types and values.
ribs: PerNS<Vec<Rib<'a>>>, ribs: PerNS<Vec<Rib<'a>>>,
/// Previous poped `rib`, only used for diagnostic. /// Previous popped `rib`, only used for diagnostic.
last_block_rib: Option<Rib<'a>>, last_block_rib: Option<Rib<'a>>,
/// The current set of local scopes, for labels. /// The current set of local scopes, for labels.

View File

@ -1652,7 +1652,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
misc2: AmbiguityErrorMisc::None, misc2: AmbiguityErrorMisc::None,
}; };
if !self.matches_previous_ambiguity_error(&ambiguity_error) { if !self.matches_previous_ambiguity_error(&ambiguity_error) {
// avoid dumplicated span information to be emitt out // avoid duplicated span information to be emitt out
self.ambiguity_errors.push(ambiguity_error); self.ambiguity_errors.push(ambiguity_error);
} }
} }

View File

@ -219,7 +219,7 @@ fn find_best_match_for_name_impl(
} }
// We have a tie among several candidates, try to select the best among them ignoring substrings. // We have a tie among several candidates, try to select the best among them ignoring substrings.
// For example, the candidates list `force_capture`, `capture`, and user inputed `forced_capture`, // For example, the candidates list `force_capture`, `capture`, and user inputted `forced_capture`,
// we select `force_capture` with a extra round of edit distance calculation. // we select `force_capture` with a extra round of edit distance calculation.
if next_candidates.len() > 1 { if next_candidates.len() > 1 {
debug_assert!(use_substring_score); debug_assert!(use_substring_score);

View File

@ -1207,7 +1207,7 @@ impl HygieneEncodeContext {
// a `SyntaxContext` that we haven't seen before // a `SyntaxContext` that we haven't seen before
while !self.latest_ctxts.lock().is_empty() || !self.latest_expns.lock().is_empty() { while !self.latest_ctxts.lock().is_empty() || !self.latest_expns.lock().is_empty() {
debug!( debug!(
"encode_hygiene: Serializing a round of {:?} SyntaxContextDatas: {:?}", "encode_hygiene: Serializing a round of {:?} SyntaxContextData: {:?}",
self.latest_ctxts.lock().len(), self.latest_ctxts.lock().len(),
self.latest_ctxts self.latest_ctxts
); );

View File

@ -755,7 +755,7 @@ impl FromStr for Conv {
"AmdGpuKernel" => Ok(Conv::AmdGpuKernel), "AmdGpuKernel" => Ok(Conv::AmdGpuKernel),
"AvrInterrupt" => Ok(Conv::AvrInterrupt), "AvrInterrupt" => Ok(Conv::AvrInterrupt),
"AvrNonBlockingInterrupt" => Ok(Conv::AvrNonBlockingInterrupt), "AvrNonBlockingInterrupt" => Ok(Conv::AvrNonBlockingInterrupt),
_ => Err(format!("'{s}' is not a valid value for entry function call convetion.")), _ => Err(format!("'{s}' is not a valid value for entry function call convention.")),
} }
} }
} }

View File

@ -123,7 +123,7 @@ pub enum Lld {
/// target properties, in accordance with the first design goal. /// target properties, in accordance with the first design goal.
/// ///
/// The first component of the flavor is tightly coupled with the compilation target, /// The first component of the flavor is tightly coupled with the compilation target,
/// while the `Cc` and `Lld` flags can vary withing the same target. /// while the `Cc` and `Lld` flags can vary within the same target.
#[derive(Clone, Copy, Debug, Eq, Ord, PartialEq, PartialOrd)] #[derive(Clone, Copy, Debug, Eq, Ord, PartialEq, PartialOrd)]
pub enum LinkerFlavor { pub enum LinkerFlavor {
/// Unix-like linker with GNU extensions (both naked and compiler-wrapped forms). /// Unix-like linker with GNU extensions (both naked and compiler-wrapped forms).

View File

@ -13,7 +13,7 @@ use rustc_middle::ty::TypeVisitableExt;
use rustc_middle::ty::{self, Ty}; use rustc_middle::ty::{self, Ty};
use rustc_middle::ty::{TypeFoldable, TypeFolder, TypeSuperFoldable}; use rustc_middle::ty::{TypeFoldable, TypeFolder, TypeSuperFoldable};
/// Whether we're canonicalizing a query input or the query reponse. /// Whether we're canonicalizing a query input or the query response.
/// ///
/// When canonicalizing an input we're in the context of the caller /// When canonicalizing an input we're in the context of the caller
/// while canonicalizing the response happens in the context of the /// while canonicalizing the response happens in the context of the
@ -21,7 +21,7 @@ use rustc_middle::ty::{TypeFoldable, TypeFolder, TypeSuperFoldable};
#[derive(Debug, Clone, Copy)] #[derive(Debug, Clone, Copy)]
pub enum CanonicalizeMode { pub enum CanonicalizeMode {
Input, Input,
/// FIXME: We currently return region constraints refering to /// FIXME: We currently return region constraints referring to
/// placeholders and inference variables from a binder instantiated /// placeholders and inference variables from a binder instantiated
/// inside of the query. /// inside of the query.
/// ///

View File

@ -42,7 +42,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
/// ///
/// - `var_values`: a map from bound variables in the canonical goal to /// - `var_values`: a map from bound variables in the canonical goal to
/// the values inferred while solving the instantiated goal. /// the values inferred while solving the instantiated goal.
/// - `external_constraints`: additional constraints which aren't expressable /// - `external_constraints`: additional constraints which aren't expressible
/// using simple unification of inference variables. /// using simple unification of inference variables.
#[instrument(level = "debug", skip(self))] #[instrument(level = "debug", skip(self))]
pub(in crate::solve) fn evaluate_added_goals_and_make_canonical_response( pub(in crate::solve) fn evaluate_added_goals_and_make_canonical_response(
@ -113,7 +113,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
} }
/// This returns the substitutions to instantiate the bound variables of /// This returns the substitutions to instantiate the bound variables of
/// the canonical reponse. This depends on the `original_values` for the /// the canonical response. This depends on the `original_values` for the
/// bound variables. /// bound variables.
fn compute_query_response_substitution( fn compute_query_response_substitution(
&self, &self,

View File

@ -153,7 +153,7 @@ impl<'tcx> SearchGraph<'tcx> {
/// coinductive cycles. /// coinductive cycles.
/// ///
/// When we encounter a coinductive cycle, we have to prove the final result of that cycle /// When we encounter a coinductive cycle, we have to prove the final result of that cycle
/// while we are still computing that result. Because of this we continously recompute the /// while we are still computing that result. Because of this we continuously recompute the
/// cycle until the result of the previous iteration is equal to the final result, at which /// cycle until the result of the previous iteration is equal to the final result, at which
/// point we are done. /// point we are done.
/// ///

View File

@ -45,7 +45,7 @@ impl OverflowData {
/// Updating the current limit when hitting overflow. /// Updating the current limit when hitting overflow.
fn deal_with_overflow(&mut self) { fn deal_with_overflow(&mut self) {
// When first hitting overflow we reduce the overflow limit // When first hitting overflow we reduce the overflow limit
// for all future goals to prevent hangs if there's an exponental // for all future goals to prevent hangs if there's an exponential
// blowup. // blowup.
self.current_limit.0 = self.default_limit.0 / 8; self.current_limit.0 = self.default_limit.0 / 8;
} }

View File

@ -294,7 +294,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
return; return;
} }
// Keep this funtion in sync with extract_tupled_inputs_and_output_from_callable // Keep this function in sync with extract_tupled_inputs_and_output_from_callable
// until the old solver (and thus this function) is removed. // until the old solver (and thus this function) is removed.
// Okay to skip binder because what we are inspecting doesn't involve bound regions. // Okay to skip binder because what we are inspecting doesn't involve bound regions.
@ -406,7 +406,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
} }
match obligation.self_ty().skip_binder().kind() { match obligation.self_ty().skip_binder().kind() {
// Fast path to avoid evaluating an obligation that trivally holds. // Fast path to avoid evaluating an obligation that trivially holds.
// There may be more bounds, but these are checked by the regular path. // There may be more bounds, but these are checked by the regular path.
ty::FnPtr(..) => return false, ty::FnPtr(..) => return false,
// These may potentially implement `FnPtr` // These may potentially implement `FnPtr`

View File

@ -12,7 +12,7 @@ ty_utils_array_not_supported = array construction is not supported in generic co
ty_utils_block_not_supported = blocks are not supported in generic constants ty_utils_block_not_supported = blocks are not supported in generic constants
ty_utils_never_to_any_not_supported = converting nevers to any is not supported in generic constants ty_utils_never_to_any_not_supported = converting never to any is not supported in generic constants
ty_utils_tuple_not_supported = tuple construction is not supported in generic constants ty_utils_tuple_not_supported = tuple construction is not supported in generic constants
@ -54,4 +54,4 @@ ty_utils_multiple_array_fields_simd_type = monomorphising SIMD type `{$ty}` with
ty_utils_oversized_simd_type = monomorphising SIMD type `{$ty}` of length greater than {$max_lanes} ty_utils_oversized_simd_type = monomorphising SIMD type `{$ty}` of length greater than {$max_lanes}
ty_utils_non_primative_simd_type = monomorphising SIMD type `{$ty}` with a non-primitive-scalar (integer/float/pointer) element type `{$e_ty}` ty_utils_non_primitive_simd_type = monomorphising SIMD type `{$ty}` with a non-primitive-scalar (integer/float/pointer) element type `{$e_ty}`

View File

@ -95,7 +95,7 @@ pub struct OversizedSimdType<'tcx> {
} }
#[derive(Diagnostic)] #[derive(Diagnostic)]
#[diag(ty_utils_non_primative_simd_type)] #[diag(ty_utils_non_primitive_simd_type)]
pub struct NonPrimitiveSimdType<'tcx> { pub struct NonPrimitiveSimdType<'tcx> {
pub ty: Ty<'tcx>, pub ty: Ty<'tcx>,
pub e_ty: Ty<'tcx>, pub e_ty: Ty<'tcx>,

View File

@ -322,7 +322,7 @@ fn layout_of_uncached<'tcx>(
if fi.ty(tcx, substs) != f0_ty { if fi.ty(tcx, substs) != f0_ty {
tcx.sess.delay_span_bug( tcx.sess.delay_span_bug(
DUMMY_SP, DUMMY_SP,
"#[repr(simd)] was applied to an ADT with hetrogeneous field type", "#[repr(simd)] was applied to an ADT with heterogeneous field type",
); );
return Err(LayoutError::Unknown(ty)); return Err(LayoutError::Unknown(ty));
} }

View File

@ -83,7 +83,7 @@ pub trait CollectAndApply<T, R>: Sized {
/// Produce a result of type `Self::Output` from `iter`. The result will /// Produce a result of type `Self::Output` from `iter`. The result will
/// typically be produced by applying `f` on the elements produced by /// typically be produced by applying `f` on the elements produced by
/// `iter`, though this may not happen in some impls, e.g. if an error /// `iter`, though this may not happen in some impls, e.g. if an error
/// occured during iteration. /// occurred during iteration.
fn collect_and_apply<I, F>(iter: I, f: F) -> Self::Output fn collect_and_apply<I, F>(iter: I, f: F) -> Self::Output
where where
I: Iterator<Item = Self>, I: Iterator<Item = Self>,

View File

@ -24,6 +24,8 @@ error[E0599]: no method named `extend` found for struct `Map` in the current sco
| |
LL | v.iter().map(|x| x * x).extend(std::iter::once(100)); LL | v.iter().map(|x| x * x).extend(std::iter::once(100));
| ^^^^^^ method not found in `Map<Iter<'_, i32>, [closure@method-not-found-generic-arg-elision.rs:87:18]>` | ^^^^^^ method not found in `Map<Iter<'_, i32>, [closure@method-not-found-generic-arg-elision.rs:87:18]>`
|
= note: the full type name has been written to '$TEST_BUILD_DIR/methods/method-not-found-generic-arg-elision/method-not-found-generic-arg-elision.long-type-15656389337579798043.txt'
error[E0599]: no method named `method` found for struct `Wrapper<bool>` in the current scope error[E0599]: no method named `method` found for struct `Wrapper<bool>` in the current scope
--> $DIR/method-not-found-generic-arg-elision.rs:90:13 --> $DIR/method-not-found-generic-arg-elision.rs:90:13

View File

@ -4,7 +4,7 @@
extern crate derive_bad; extern crate derive_bad;
#[derive(A)] #[derive(A)]
//~^ ERROR proc-macro derive produced unparseable tokens //~^ ERROR proc-macro derive produced unparsable tokens
//~| ERROR expected `:`, found `}` //~| ERROR expected `:`, found `}`
struct A; //~ ERROR the name `A` is defined multiple times struct A; //~ ERROR the name `A` is defined multiple times

View File

@ -9,7 +9,7 @@ LL | #[derive(A)]
| |
= note: this error originates in the derive macro `A` (in Nightly builds, run with -Z macro-backtrace for more info) = note: this error originates in the derive macro `A` (in Nightly builds, run with -Z macro-backtrace for more info)
error: proc-macro derive produced unparseable tokens error: proc-macro derive produced unparsable tokens
--> $DIR/derive-bad.rs:6:10 --> $DIR/derive-bad.rs:6:10
| |
LL | #[derive(A)] LL | #[derive(A)]

View File

@ -5,7 +5,7 @@ extern crate issue_91800_macro;
#[derive(MyTrait)] #[derive(MyTrait)]
//~^ ERROR macros that expand to items must be delimited with braces or followed by a semicolon //~^ ERROR macros that expand to items must be delimited with braces or followed by a semicolon
//~| ERROR proc-macro derive produced unparseable tokens //~| ERROR proc-macro derive produced unparsable tokens
#[attribute_macro] #[attribute_macro]
//~^ ERROR macros that expand to items must be delimited with braces or followed by a semicolon //~^ ERROR macros that expand to items must be delimited with braces or followed by a semicolon
struct MyStruct; struct MyStruct;

View File

@ -6,7 +6,7 @@ LL | #[derive(MyTrait)]
| |
= note: this error originates in the derive macro `MyTrait` (in Nightly builds, run with -Z macro-backtrace for more info) = note: this error originates in the derive macro `MyTrait` (in Nightly builds, run with -Z macro-backtrace for more info)
error: proc-macro derive produced unparseable tokens error: proc-macro derive produced unparsable tokens
--> $DIR/issue-91800.rs:6:10 --> $DIR/issue-91800.rs:6:10
| |
LL | #[derive(MyTrait)] LL | #[derive(MyTrait)]