Spelling - compiler

* account
* achieved
* advising
* always
* ambiguous
* analysis
* annotations
* appropriate
* build
* candidates
* cascading
* category
* character
* clarification
* compound
* conceptually
* constituent
* consts
* convenience
* corresponds
* debruijn
* debug
* debugable
* debuggable
* deterministic
* discriminant
* display
* documentation
* doesn't
* ellipsis
* erroneous
* evaluability
* evaluate
* evaluation
* explicitly
* fallible
* fulfill
* getting
* has
* highlighting
* illustrative
* imported
* incompatible
* infringing
* initialized
* into
* intrinsic
* introduced
* javascript
* liveness
* metadata
* monomorphization
* nonexistent
* nontrivial
* obligation
* obligations
* offset
* opaque
* opportunities
* opt-in
* outlive
* overlapping
* paragraph
* parentheses
* poisson
* precisely
* predecessors
* predicates
* preexisting
* propagated
* really
* reentrant
* referent
* responsibility
* rustonomicon
* shortcircuit
* simplifiable
* simplifications
* specify
* stabilized
* structurally
* suggestibility
* translatable
* transmuting
* two
* unclosed
* uninhabited
* visibility
* volatile
* workaround

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>
This commit is contained in:
Josh Soref 2023-04-09 17:35:02 -04:00
parent 31656e7295
commit e09d0d2a29
101 changed files with 159 additions and 159 deletions

View File

@ -231,7 +231,7 @@ ast_passes_feature_on_non_nightly = `#![feature]` may not be used on the {$chann
.suggestion = remove the attribute
.stable_since = the feature `{$name}` has been stable since `{$since}` and no longer requires an attribute to enable
ast_passes_incompatbile_features = `{$f1}` and `{$f2}` are incompatible, using them at the same time is not allowed
ast_passes_incompatible_features = `{$f1}` and `{$f2}` are incompatible, using them at the same time is not allowed
.help = remove one of these features
ast_passes_show_span = {$msg}

View File

@ -677,7 +677,7 @@ impl AddToDiagnostic for StableFeature {
}
#[derive(Diagnostic)]
#[diag(ast_passes_incompatbile_features)]
#[diag(ast_passes_incompatible_features)]
#[help]
pub struct IncompatibleFeatures {
#[primary_span]

View File

@ -1147,7 +1147,7 @@ pub fn mut_borrow_of_mutable_ref(local_decl: &LocalDecl<'_>, local_name: Option<
// suggest removing the `&mut`.
//
// Deliberately fall into this case for all implicit self types,
// so that we don't fall in to the next case with them.
// so that we don't fall into the next case with them.
kind == hir::ImplicitSelfKind::MutRef
}
_ if Some(kw::SelfLower) == local_name => {
@ -1235,7 +1235,7 @@ fn suggest_ampmut<'tcx>(
}
}
let (suggestability, highlight_span) = match opt_ty_info {
let (suggestibility, highlight_span) = match opt_ty_info {
// if this is a variable binding with an explicit type,
// try to highlight that for the suggestion.
Some(ty_span) => (true, ty_span),
@ -1256,7 +1256,7 @@ fn suggest_ampmut<'tcx>(
let ty_mut = local_decl.ty.builtin_deref(true).unwrap();
assert_eq!(ty_mut.mutbl, hir::Mutability::Not);
(
suggestability,
suggestibility,
highlight_span,
if local_decl.ty.is_ref() {
format!("&mut {}", ty_mut.ty)

View File

@ -22,9 +22,9 @@ pub fn insert_reference_to_gdb_debug_scripts_section_global(bx: &mut Builder<'_,
bx.const_bitcast(get_or_insert_gdb_debug_scripts_section_global(bx), bx.type_i8p());
// Load just the first byte as that's all that's necessary to force
// LLVM to keep around the reference to the global.
let volative_load_instruction = bx.volatile_load(bx.type_i8(), gdb_debug_scripts_section);
let volatile_load_instruction = bx.volatile_load(bx.type_i8(), gdb_debug_scripts_section);
unsafe {
llvm::LLVMSetAlignment(volative_load_instruction, 1);
llvm::LLVMSetAlignment(volatile_load_instruction, 1);
}
}
}

View File

@ -62,7 +62,7 @@ const SINGLE_VARIANT_VIRTUAL_DISR: u64 = 0;
/// In CPP-like mode, we generate a union with a field for each variant and an
/// explicit tag field. The field of each variant has a struct type
/// that encodes the discrimiant of the variant and it's data layout.
/// that encodes the discriminant of the variant and it's data layout.
/// The union also has a nested enumeration type that is only used for encoding
/// variant names in an efficient way. Its enumerator values do _not_ correspond
/// to the enum's discriminant values.

View File

@ -69,7 +69,7 @@ mod declare;
mod errors;
mod intrinsic;
// The following is a work around that replaces `pub mod llvm;` and that fixes issue 53912.
// The following is a workaround that replaces `pub mod llvm;` and that fixes issue 53912.
#[path = "llvm/mod.rs"]
mod llvm_;
pub mod llvm {

View File

@ -148,7 +148,7 @@ codegen_ssa_processing_dymutil_failed = processing debug info with `dsymutil` fa
codegen_ssa_unable_to_run_dsymutil = unable to run `dsymutil`: {$error}
codegen_ssa_stripping_debu_info_failed = stripping debug info with `{$util}` failed: {$status}
codegen_ssa_stripping_debug_info_failed = stripping debug info with `{$util}` failed: {$status}
.note = {$output}
codegen_ssa_unable_to_run = unable to run `{$util}`: {$error}

View File

@ -872,7 +872,7 @@ fn execute_copy_from_cache_work_item<B: ExtraBackendMethods>(
let load_from_incr_comp_dir = |output_path: PathBuf, saved_path: &str| {
let source_file = in_incr_comp_dir(&incr_comp_session_dir, saved_path);
debug!(
"copying pre-existing module `{}` from {:?} to {}",
"copying preexisting module `{}` from {:?} to {}",
module.name,
source_file,
output_path.display()

View File

@ -156,7 +156,7 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, did: LocalDefId) -> CodegenFnAttrs {
None => {
// Unfortunately, unconditionally using `llvm.used` causes
// issues in handling `.init_array` with the gold linker,
// but using `llvm.compiler.used` caused a nontrival amount
// but using `llvm.compiler.used` caused a nontrivial amount
// of unintentional ecosystem breakage -- particularly on
// Mach-O targets.
//

View File

@ -424,7 +424,7 @@ pub struct UnableToRunDsymutil {
}
#[derive(Diagnostic)]
#[diag(codegen_ssa_stripping_debu_info_failed)]
#[diag(codegen_ssa_stripping_debug_info_failed)]
#[note]
pub struct StrippingDebugInfoFailed<'a> {
pub util: &'a str,

View File

@ -784,7 +784,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M>
Abi::Scalar(scalar_layout) => {
if !scalar_layout.is_uninit_valid() {
// There is something to check here.
let scalar = self.read_scalar(op, "initiailized scalar value")?;
let scalar = self.read_scalar(op, "initialized scalar value")?;
self.visit_scalar(scalar, scalar_layout)?;
}
}
@ -794,7 +794,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M>
// the other must be init.
if !a_layout.is_uninit_valid() && !b_layout.is_uninit_valid() {
let (a, b) =
self.read_immediate(op, "initiailized scalar value")?.to_scalar_pair();
self.read_immediate(op, "initialized scalar value")?.to_scalar_pair();
self.visit_scalar(a, a_layout)?;
self.visit_scalar(b, b_layout)?;
}

View File

@ -262,7 +262,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
// We sometimes have to use `defining_opaque_types` for subtyping
// to succeed here and figuring out how exactly that should work
// is annoying. It is harmless enough to just not validate anything
// in that case. We still check this after analysis as all opque
// in that case. We still check this after analysis as all opaque
// types have been revealed at this point.
if (src, dest).has_opaque_types() {
return true;

View File

@ -557,7 +557,7 @@ impl SelfProfiler {
let crate_name = crate_name.unwrap_or("unknown-crate");
// HACK(eddyb) we need to pad the PID, strange as it may seem, as its
// length can behave as a source of entropy for heap addresses, when
// ASLR is disabled and the heap is otherwise determinic.
// ASLR is disabled and the heap is otherwise deterministic.
let pid: u32 = process::id();
let filename = format!("{crate_name}-{pid:07}.rustc_profile");
let path = output_directory.join(&filename);

View File

@ -1,4 +1,4 @@
A struct pattern attempted to extract a non-existent field from a struct.
A struct pattern attempted to extract a nonexistent field from a struct.
Erroneous code example:

View File

@ -32,7 +32,7 @@ error: [-, o]
This error is deliberately triggered with the `#[rustc_variance]` attribute
(`#![feature(rustc_attrs)]` must be enabled) and helps to show you the variance
of the type's generic parameters. You can read more about variance and
subtyping in [this section of the Rustnomicon]. For a more in depth look at
subtyping in [this section of the Rustonomicon]. For a more in depth look at
variance (including a more complete list of common variances) see
[this section of the Reference]. For information on how variance is implemented
in the compiler, see [this section of `rustc-dev-guide`].
@ -41,6 +41,6 @@ This error can be easily fixed by removing the `#[rustc_variance]` attribute,
the compiler's suggestion to comment it out can be applied automatically with
`rustfix`.
[this section of the Rustnomicon]: https://doc.rust-lang.org/nomicon/subtyping.html
[this section of the Rustonomicon]: https://doc.rust-lang.org/nomicon/subtyping.html
[this section of the Reference]: https://doc.rust-lang.org/reference/subtyping.html#variance
[this section of `rustc-dev-guide`]: https://rustc-dev-guide.rust-lang.org/variance.html

View File

@ -29,7 +29,7 @@ If `no_restriction()` were to use `&T` instead of `&()` as an argument, the
compiler would have added an implied bound, causing this to compile.
This error can be resolved by explicitly naming the elided lifetime for `x` and
then explicily requiring that the generic parameter `T` outlives that lifetime:
then explicitly requiring that the generic parameter `T` outlives that lifetime:
```
fn no_restriction<'a, T: 'a>(x: &'a ()) -> &'a () {

View File

@ -1,6 +1,6 @@
Plugin `..` only found in rlib format, but must be available in dylib format.
Erroronous code example:
Erroneous code example:
`rlib-plugin.rs`
```ignore (needs-linkage-with-other-tests)

View File

@ -10,7 +10,7 @@ trait Hello {
}
```
In this example, we tried to use the non-existent associated type `You` of the
In this example, we tried to use the nonexistent associated type `You` of the
`Hello` trait. To fix this error, use an existing associated type:
```

View File

@ -1,4 +1,4 @@
Attempted to access a non-existent field in a struct.
Attempted to access a nonexistent field in a struct.
Erroneous code example:

View File

@ -1980,7 +1980,7 @@ impl EmitterWriter {
}
if let DisplaySuggestion::Add = show_code_change && is_item_attribute {
// The suggestion adds an entire line of code, ending on a newline, so we'll also
// print the *following* line, to provide context of what we're advicing people to
// print the *following* line, to provide context of what we're advising people to
// do. Otherwise you would only see contextless code that can be confused for
// already existing code, despite the colors and UI elements.
// We special case `#[derive(_)]\n` and other attribute suggestions, because those

View File

@ -341,7 +341,7 @@ pub(super) fn try_match_macro<'matcher, T: Tracker<'matcher>>(
Success(named_matches) => {
debug!("Parsed arm successfully");
// The matcher was `Success(..)`ful.
// Merge the gated spans from parsing the matcher with the pre-existing ones.
// Merge the gated spans from parsing the matcher with the preexisting ones.
sess.gated_spans.merge(gated_spans_snapshot);
return Ok((i, named_matches));
@ -873,7 +873,7 @@ impl<'tt> FirstSets<'tt> {
}
}
// Most `mbe::TokenTree`s are pre-existing in the matcher, but some are defined
// Most `mbe::TokenTree`s are preexisting in the matcher, but some are defined
// implicitly, such as opening/closing delimiters and sequence repetition ops.
// This type encapsulates both kinds. It implements `Clone` while avoiding the
// need for `mbe::TokenTree` to implement `Clone`.

View File

@ -513,7 +513,7 @@ error: foo
}
#[test]
fn non_overlaping() {
fn non_overlapping() {
test_harness(
r#"
fn foo() {
@ -552,7 +552,7 @@ error: foo
}
#[test]
fn overlaping_start_and_end() {
fn overlapping_start_and_end() {
test_harness(
r#"
fn foo() {

View File

@ -139,7 +139,7 @@ declare_features! (
/// Allows using `#[on_unimplemented(..)]` on traits.
/// (Moved to `rustc_attrs`.)
(removed, on_unimplemented, "1.40.0", None, None, None),
/// A way to temporarily opt out of opt in copy. This will *never* be accepted.
/// A way to temporarily opt out of opt-in copy. This will *never* be accepted.
(removed, opt_out_copy, "1.0.0", None, None, None),
/// Allows features specific to OIBIT (now called auto traits).
/// Renamed to `auto_traits`.

View File

@ -2061,7 +2061,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
err.note("enum variants can't have type parameters");
let type_name = tcx.item_name(adt_def.did());
let msg = format!(
"you might have meant to specity type parameters on enum \
"you might have meant to specify type parameters on enum \
`{type_name}`"
);
let Some(args) = assoc_segment.args else { return; };

View File

@ -83,7 +83,7 @@ fn visit_implementation_of_copy(tcx: TyCtxt<'_>, impl_did: LocalDefId) {
let cause = traits::ObligationCause::misc(span, impl_did);
match type_allowed_to_implement_copy(tcx, param_env, self_type, cause) {
Ok(()) => {}
Err(CopyImplementationError::InfrigingFields(fields)) => {
Err(CopyImplementationError::InfringingFields(fields)) => {
let mut err = struct_span_err!(
tcx.sess,
span,

View File

@ -1333,7 +1333,7 @@ impl<'a, 'tcx> BoundVarContext<'a, 'tcx> {
// We may fail to resolve higher-ranked lifetimes that are mentioned by APIT.
// AST-based resolution does not care for impl-trait desugaring, which are the
// responibility of lowering. This may create a mismatch between the resolution
// responsibility of lowering. This may create a mismatch between the resolution
// AST found (`region_def_id`) which points to HRTB, and what HIR allows.
// ```
// fn foo(x: impl for<'a> Trait<'a, Assoc = impl Copy + 'a>) {}

View File

@ -976,7 +976,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
/// Attempt to coerce an expression to a type, and return the
/// adjusted type of the expression, if successful.
/// Adjustments are only recorded if the coercion succeeded.
/// The expressions *must not* have any pre-existing adjustments.
/// The expressions *must not* have any preexisting adjustments.
pub fn try_coerce(
&self,
expr: &hir::Expr<'_>,
@ -1340,7 +1340,7 @@ impl<'tcx, 'exprs, E: AsCoercionSite> CoerceMany<'tcx, 'exprs, E> {
}
/// As an optimization, you can create a `CoerceMany` with a
/// pre-existing slice of expressions. In this case, you are
/// preexisting slice of expressions. In this case, you are
/// expected to pass each element in the slice to `coerce(...)` in
/// order. This is used with arrays in particular to avoid
/// needlessly cloning the slice.

View File

@ -108,7 +108,7 @@ pub enum ExpectedReturnTypeLabel<'tcx> {
#[derive(Diagnostic)]
#[diag(hir_typeck_missing_parentheses_in_range, code = "E0689")]
pub struct MissingParentheseInRange {
pub struct MissingParenthesesInRange {
#[primary_span]
#[label(hir_typeck_missing_parentheses_in_range)]
pub span: Span,

View File

@ -827,7 +827,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
}
QPath::TypeRelative(ref qself, ref segment) => {
// Don't use `self.to_ty`, since this will register a WF obligation.
// If we're trying to call a non-existent method on a trait
// If we're trying to call a nonexistent method on a trait
// (e.g. `MyTrait::missing_method`), then resolution will
// give us a `QPath::TypeRelative` with a trait object as
// `qself`. In that case, we want to avoid registering a WF obligation

View File

@ -330,7 +330,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
/// expression mentioned.
///
/// `blame_specific_arg_if_possible` will find the most-specific expression anywhere inside
/// the provided function call expression, and mark it as responsible for the fullfillment
/// the provided function call expression, and mark it as responsible for the fulfillment
/// error.
fn blame_specific_arg_if_possible(
&self,

View File

@ -794,7 +794,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
return;
};
// get all where BoundPredicates here, because they are used in to cases below
// get all where BoundPredicates here, because they are used in two cases below
let where_predicates = predicates
.iter()
.filter_map(|p| match p {

View File

@ -650,7 +650,7 @@ fn check_must_not_suspend_ty<'tcx>(
},
)
}
// If drop tracking is enabled, we want to look through references, since the referrent
// If drop tracking is enabled, we want to look through references, since the referent
// may not be considered live across the await point.
ty::Ref(_region, ty, _mutability) if fcx.sess().opts.unstable_opts.drop_tracking => {
let descr_pre = &format!("{}reference{} to ", data.descr_pre, plural_suffix);

View File

@ -1530,7 +1530,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
);
if pick.is_ok() {
let range_span = parent_expr.span.with_hi(expr.span.hi());
tcx.sess.emit_err(errors::MissingParentheseInRange {
tcx.sess.emit_err(errors::MissingParenthesesInRange {
span,
ty_str: ty_str.to_string(),
method_name: item_name.as_str().to_string(),

View File

@ -1659,7 +1659,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
if tcx.sess.teach(&err.get_code().unwrap()) {
err.note(
"This error indicates that a struct pattern attempted to \
extract a non-existent field from a struct. Struct fields \
extract a nonexistent field from a struct. Struct fields \
are identified by the name used before the colon : so struct \
patterns should resemble the declaration of the struct type \
being matched.\n\n\

View File

@ -223,7 +223,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let closure_hir_id = self.tcx.hir().local_def_id_to_hir_id(closure_def_id);
if should_do_rust_2021_incompatible_closure_captures_analysis(self.tcx, closure_hir_id) {
self.perform_2229_migration_anaysis(closure_def_id, body_id, capture_clause, span);
self.perform_2229_migration_analysis(closure_def_id, body_id, capture_clause, span);
}
let after_feature_tys = self.final_upvar_tys(closure_def_id);
@ -731,7 +731,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
/// Perform the migration analysis for RFC 2229, and emit lint
/// `disjoint_capture_drop_reorder` if needed.
fn perform_2229_migration_anaysis(
fn perform_2229_migration_analysis(
&self,
closure_def_id: LocalDefId,
body_id: hir::BodyId,

View File

@ -139,7 +139,7 @@ pub fn check_dirty_clean_annotations(tcx: TyCtxt<'_>) {
return;
}
// can't add `#[rustc_clean]` etc without opting in to this feature
// can't add `#[rustc_clean]` etc without opting into this feature
if !tcx.features().rustc_attrs {
return;
}

View File

@ -80,7 +80,7 @@ infer_subtype = ...so that the {$requirement ->
[no_else] `if` missing an `else` returns `()`
[fn_main_correct_type] `main` function has the correct type
[fn_start_correct_type] `#[start]` function has the correct type
[intristic_correct_type] intrinsic has the correct type
[intrinsic_correct_type] intrinsic has the correct type
[method_correct_type] method receiver has the correct type
*[other] types are compatible
}
@ -93,7 +93,7 @@ infer_subtype_2 = ...so that {$requirement ->
[no_else] `if` missing an `else` returns `()`
[fn_main_correct_type] `main` function has the correct type
[fn_start_correct_type] `#[start]` function has the correct type
[intristic_correct_type] intrinsic has the correct type
[intrinsic_correct_type] intrinsic has the correct type
[method_correct_type] method receiver has the correct type
*[other] types are compatible
}
@ -341,8 +341,8 @@ infer_await_note = calling an async function returns a future
infer_prlf_defined_with_sub = the lifetime `{$sub_symbol}` defined here...
infer_prlf_defined_without_sub = the lifetime defined here...
infer_prlf_must_oultive_with_sup = ...must outlive the lifetime `{$sup_symbol}` defined here
infer_prlf_must_oultive_without_sup = ...must outlive the lifetime defined here
infer_prlf_must_outlive_with_sup = ...must outlive the lifetime `{$sup_symbol}` defined here
infer_prlf_must_outlive_without_sup = ...must outlive the lifetime defined here
infer_prlf_known_limitation = this is a known limitation that will be removed in the future (see issue #100013 <https://github.com/rust-lang/rust/issues/100013> for more information)
infer_opaque_captures_lifetime = hidden type for `{$opaque_ty}` captures lifetime that does not appear in bounds
@ -380,7 +380,7 @@ infer_oc_no_else = `if` may be missing an `else` clause
infer_oc_no_diverge = `else` clause of `let...else` does not diverge
infer_oc_fn_main_correct_type = `main` function has wrong type
infer_oc_fn_start_correct_type = `#[start]` function has wrong type
infer_oc_intristic_correct_type = intrinsic has wrong type
infer_oc_intrinsic_correct_type = intrinsic has wrong type
infer_oc_method_correct_type = mismatched `self` parameter type
infer_oc_closure_selfref = closure/generator type that references itself
infer_oc_cant_coerce = cannot coerce intrinsics to function pointers

View File

@ -71,7 +71,7 @@ pub struct AmbiguousImpl<'a> {
// Copy of `AnnotationRequired` for E0284
#[derive(Diagnostic)]
#[diag(infer_type_annotations_needed, code = "E0284")]
pub struct AmbigousReturn<'a> {
pub struct AmbiguousReturn<'a> {
#[primary_span]
pub span: Span,
pub source_kind: &'static str,
@ -1085,7 +1085,7 @@ pub enum PlaceholderRelationLfNotSatisfied {
span: Span,
#[note(infer_prlf_defined_with_sub)]
sub_span: Span,
#[note(infer_prlf_must_oultive_with_sup)]
#[note(infer_prlf_must_outlive_with_sup)]
sup_span: Span,
sub_symbol: Symbol,
sup_symbol: Symbol,
@ -1098,7 +1098,7 @@ pub enum PlaceholderRelationLfNotSatisfied {
span: Span,
#[note(infer_prlf_defined_with_sub)]
sub_span: Span,
#[note(infer_prlf_must_oultive_without_sup)]
#[note(infer_prlf_must_outlive_without_sup)]
sup_span: Span,
sub_symbol: Symbol,
#[note(infer_prlf_known_limitation)]
@ -1110,7 +1110,7 @@ pub enum PlaceholderRelationLfNotSatisfied {
span: Span,
#[note(infer_prlf_defined_without_sub)]
sub_span: Span,
#[note(infer_prlf_must_oultive_with_sup)]
#[note(infer_prlf_must_outlive_with_sup)]
sup_span: Span,
sup_symbol: Symbol,
#[note(infer_prlf_known_limitation)]
@ -1122,7 +1122,7 @@ pub enum PlaceholderRelationLfNotSatisfied {
span: Span,
#[note(infer_prlf_defined_without_sub)]
sub_span: Span,
#[note(infer_prlf_must_oultive_without_sup)]
#[note(infer_prlf_must_outlive_without_sup)]
sup_span: Span,
#[note(infer_prlf_known_limitation)]
note: (),
@ -1488,8 +1488,8 @@ pub enum ObligationCauseFailureCode {
#[subdiagnostic]
subdiags: Vec<TypeErrorAdditionalDiags>,
},
#[diag(infer_oc_intristic_correct_type, code = "E0308")]
IntristicCorrectType {
#[diag(infer_oc_intrinsic_correct_type, code = "E0308")]
IntrinsicCorrectType {
#[primary_span]
span: Span,
#[subdiagnostic]

View File

@ -467,11 +467,11 @@ impl<'tcx> InferCtxt<'tcx> {
}
}
GenericArgKind::Const(result_value) => {
if let ty::ConstKind::Bound(debrujin, b) = result_value.kind() {
if let ty::ConstKind::Bound(debruijn, b) = result_value.kind() {
// ...in which case we would set `canonical_vars[0]` to `Some(const X)`.
// We only allow a `ty::INNERMOST` index in substitutions.
assert_eq!(debrujin, ty::INNERMOST);
assert_eq!(debruijn, ty::INNERMOST);
opt_values[b] = Some(*original_value);
}
}

View File

@ -832,7 +832,7 @@ pub trait ObligationEmittingRelation<'tcx>: TypeRelation<'tcx> {
/// Register predicates that must hold in order for this relation to hold. Uses
/// a default obligation cause, [`ObligationEmittingRelation::register_obligations`] should
/// be used if control over the obligaton causes is required.
/// be used if control over the obligation causes is required.
fn register_predicates(&mut self, obligations: impl IntoIterator<Item: ToPredicate<'tcx>>);
/// Register an obligation that both constants must be equal to each other.

View File

@ -178,7 +178,7 @@ impl<'tcx> TypeRelation<'tcx> for Equate<'_, '_, 'tcx> {
where
T: Relate<'tcx>,
{
// A binder is equal to itself if it's structually equal to itself
// A binder is equal to itself if it's structurally equal to itself
if a == b {
return Ok(a);
}

View File

@ -2886,7 +2886,7 @@ impl<'tcx> ObligationCauseExt<'tcx> for ObligationCause<'tcx> {
LetElse => ObligationCauseFailureCode::NoDiverge { span, subdiags },
MainFunctionType => ObligationCauseFailureCode::FnMainCorrectType { span },
StartFunctionType => ObligationCauseFailureCode::FnStartCorrectType { span, subdiags },
IntrinsicType => ObligationCauseFailureCode::IntristicCorrectType { span, subdiags },
IntrinsicType => ObligationCauseFailureCode::IntrinsicCorrectType { span, subdiags },
MethodReceiver => ObligationCauseFailureCode::MethodCorrectType { span, subdiags },
// In the case where we have no more specific thing to
@ -2943,7 +2943,7 @@ impl IntoDiagnosticArg for ObligationCauseAsDiagArg<'_> {
IfExpressionWithNoElse => "no_else",
MainFunctionType => "fn_main_correct_type",
StartFunctionType => "fn_start_correct_type",
IntrinsicType => "intristic_correct_type",
IntrinsicType => "intrinsic_correct_type",
MethodReceiver => "method_correct_type",
_ => "other",
}

View File

@ -1,5 +1,5 @@
use crate::errors::{
AmbigousReturn, AmbiguousImpl, AnnotationRequired, InferenceBadError, NeedTypeInfoInGenerator,
AmbiguousImpl, AmbiguousReturn, AnnotationRequired, InferenceBadError, NeedTypeInfoInGenerator,
SourceKindMultiSuggestion, SourceKindSubdiag,
};
use crate::infer::error_reporting::TypeErrCtxt;
@ -368,7 +368,7 @@ impl<'tcx> InferCtxt<'tcx> {
bad_label,
}
.into_diagnostic(&self.tcx.sess.parse_sess.span_diagnostic),
TypeAnnotationNeeded::E0284 => AmbigousReturn {
TypeAnnotationNeeded::E0284 => AmbiguousReturn {
span,
source_kind,
source_name,
@ -573,7 +573,7 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
bad_label: None,
}
.into_diagnostic(&self.tcx.sess.parse_sess.span_diagnostic),
TypeAnnotationNeeded::E0284 => AmbigousReturn {
TypeAnnotationNeeded::E0284 => AmbiguousReturn {
span,
source_kind,
source_name: &name,

View File

@ -42,7 +42,7 @@ impl<'a, 'tcx> CombineFields<'a, 'tcx> {
// Next, we instantiate each bound region in the subtype
// with a fresh region variable. These region variables --
// but no other pre-existing region variables -- can name
// but no other preexisting region variables -- can name
// the placeholders.
let sub_prime = self.infcx.instantiate_binder_with_fresh_vars(span, HigherRankedType, sub);

View File

@ -210,7 +210,7 @@ impl<'tcx> TypeRelation<'tcx> for Sub<'_, '_, 'tcx> {
where
T: Relate<'tcx>,
{
// A binder is always a subtype of itself if it's structually equal to itself
// A binder is always a subtype of itself if it's structurally equal to itself
if a == b {
return Ok(a);
}

View File

@ -103,7 +103,7 @@ pub enum ProjectionCacheEntry<'tcx> {
/// if this field is set. Evaluation only
/// cares about the final result, so we don't
/// care about any region constraint side-effects
/// produced by evaluating the sub-boligations.
/// produced by evaluating the sub-obligations.
///
/// Additionally, we will clear out the sub-obligations
/// entirely if we ever evaluate the cache entry (along

View File

@ -22,7 +22,7 @@
use crate::fluent_generated as fluent;
use crate::{
errors::BuiltinEllpisisInclusiveRangePatterns,
errors::BuiltinEllipsisInclusiveRangePatterns,
lints::{
BuiltinAnonymousParams, BuiltinBoxPointers, BuiltinClashingExtern,
BuiltinClashingExternSub, BuiltinConstNoMangle, BuiltinDeprecatedAttrLink,
@ -1711,13 +1711,13 @@ impl EarlyLintPass for EllipsisInclusiveRangePatterns {
}
}
let (parenthesise, endpoints) = match &pat.kind {
let (parentheses, endpoints) = match &pat.kind {
PatKind::Ref(subpat, _) => (true, matches_ellipsis_pat(&subpat)),
_ => (false, matches_ellipsis_pat(pat)),
};
if let Some((start, end, join)) = endpoints {
if parenthesise {
if parentheses {
self.node_id = Some(pat.id);
let end = expr_to_string(&end);
let replace = match start {
@ -1725,7 +1725,7 @@ impl EarlyLintPass for EllipsisInclusiveRangePatterns {
None => format!("&(..={})", end),
};
if join.edition() >= Edition::Edition2021 {
cx.sess().emit_err(BuiltinEllpisisInclusiveRangePatterns {
cx.sess().emit_err(BuiltinEllipsisInclusiveRangePatterns {
span: pat.span,
suggestion: pat.span,
replace,
@ -1743,7 +1743,7 @@ impl EarlyLintPass for EllipsisInclusiveRangePatterns {
} else {
let replace = "..=";
if join.edition() >= Edition::Edition2021 {
cx.sess().emit_err(BuiltinEllpisisInclusiveRangePatterns {
cx.sess().emit_err(BuiltinEllipsisInclusiveRangePatterns {
span: pat.span,
suggestion: join,
replace: replace.to_string(),
@ -2560,7 +2560,7 @@ impl<'tcx> LateLintPass<'tcx> for InvalidValue {
.subst(cx.tcx, substs)
.apply_any_module(cx.tcx, cx.param_env)
{
// Entirely skip uninhbaited variants.
// Entirely skip uninhabited variants.
Some(false) => return None,
// Forward the others, but remember which ones are definitely inhabited.
Some(true) => true,

View File

@ -81,7 +81,7 @@ pub struct UnknownToolInScopedLint {
#[derive(Diagnostic)]
#[diag(lint_builtin_ellipsis_inclusive_range_patterns, code = "E0783")]
pub struct BuiltinEllpisisInclusiveRangePatterns {
pub struct BuiltinEllipsisInclusiveRangePatterns {
#[primary_span]
pub span: Span,
#[suggestion(style = "short", code = "{replace}", applicability = "machine-applicable")]

View File

@ -1021,7 +1021,7 @@ declare_lint! {
declare_lint! {
/// The `invalid_alignment` lint detects dereferences of misaligned pointers during
/// constant evluation.
/// constant evaluation.
///
/// ### Example
///
@ -1854,7 +1854,7 @@ declare_lint! {
/// When new methods are added to traits in the standard library, they are
/// usually added in an "unstable" form which is only available on the
/// [nightly channel] with a [`feature` attribute]. If there is any
/// pre-existing code which extends a trait to have a method with the same
/// preexisting code which extends a trait to have a method with the same
/// name, then the names will collide. In the future, when the method is
/// stabilized, this will cause an error due to the ambiguity. This lint
/// is an early-warning to let you know that there may be a collision in

View File

@ -811,7 +811,7 @@ LLVMRustOptimize(
ModulePassManager MPM;
bool NeedThinLTOBufferPasses = UseThinLTOBuffers;
if (!NoPrepopulatePasses) {
// The pre-link pipelines don't support O0 and require using budilO0DefaultPipeline() instead.
// The pre-link pipelines don't support O0 and require using buildO0DefaultPipeline() instead.
// At the same time, the LTO pipelines do support O0 and using them is required.
bool IsLTO = OptStage == LLVMRustOptStage::ThinLTO || OptStage == LLVMRustOptStage::FatLTO;
if (OptLevel == OptimizationLevel::O0 && !IsLTO) {

View File

@ -117,7 +117,7 @@ pub(crate) struct CrateMetadata {
/// Additional data used for decoding `HygieneData` (e.g. `SyntaxContext`
/// and `ExpnId`).
/// Note that we store a `HygieneDecodeContext` for each `CrateMetadat`. This is
/// Note that we store a `HygieneDecodeContext` for each `CrateMetadata`. This is
/// because `SyntaxContext` ids are not globally unique, so we need
/// to track which ids we've decoded on a per-crate basis.
hygiene_context: HygieneDecodeContext,
@ -627,7 +627,7 @@ impl<'a, 'tcx> Decodable<DecodeContext<'a, 'tcx>> for Symbol {
let pos = d.read_usize();
let old_pos = d.opaque.position();
// move to str ofset and read
// move to str offset and read
d.opaque.set_position(pos);
let s = d.read_str();
let sym = Symbol::intern(s);

View File

@ -102,7 +102,7 @@ impl<T: HasDataLayout> PointerArithmetic for T {}
/// This trait abstracts over the kind of provenance that is associated with a `Pointer`. It is
/// mostly opaque; the `Machine` trait extends it with some more operations that also have access to
/// some global state.
/// The `Debug` rendering is used to distplay bare provenance, and for the default impl of `fmt`.
/// The `Debug` rendering is used to display bare provenance, and for the default impl of `fmt`.
pub trait Provenance: Copy + fmt::Debug {
/// Says whether the `offset` field of `Pointer`s with this provenance is the actual physical address.
/// - If `false`, the offset *must* be relative. This means the bytes representing a pointer are

View File

@ -251,7 +251,7 @@ pub enum StatementKind<'tcx> {
/// **Needs clarification**: The implication of the above idea would be that assignment implies
/// that the resulting value is initialized. I believe we could commit to this separately from
/// committing to whatever part of the memory model we would need to decide on to make the above
/// paragragh precise. Do we want to?
/// paragraph precise. Do we want to?
///
/// Assignments in which the types of the place and rvalue differ are not well-formed.
///
@ -997,7 +997,7 @@ pub type PlaceElem<'tcx> = ProjectionElem<Local, Ty<'tcx>>;
/// This is what is implemented in miri today. Are these the semantics we want for MIR? Is this
/// something we can even decide without knowing more about Rust's memory model?
///
/// **Needs clarifiation:** Is loading a place that has its variant index set well-formed? Miri
/// **Needs clarification:** Is loading a place that has its variant index set well-formed? Miri
/// currently implements it, but it seems like this may be something to check against in the
/// validator.
#[derive(Clone, PartialEq, TyEncodable, TyDecodable, Hash, HashStable, TypeFoldable, TypeVisitable)]

View File

@ -1,4 +1,4 @@
//! A subset of a mir body used for const evaluatability checking.
//! A subset of a mir body used for const evaluability checking.
use crate::ty::{
self, Const, EarlyBinder, Ty, TyCtxt, TypeFoldable, TypeFolder, TypeSuperFoldable,
TypeVisitableExt,

View File

@ -68,7 +68,7 @@ pub enum TreatParams {
}
/// During fast-rejection, we have the choice of treating projection types
/// as either simplifyable or not, depending on whether we expect the projection
/// as either simplifiable or not, depending on whether we expect the projection
/// to be normalized/rigid.
#[derive(PartialEq, Eq, Debug, Clone, Copy)]
pub enum TreatProjections {

View File

@ -235,7 +235,7 @@ impl IntoDiagnostic<'_, !> for LayoutError<'_> {
}
}
// FIXME: Once the other errors that embed this error have been converted to translateable
// FIXME: Once the other errors that embed this error have been converted to translatable
// diagnostics, this Display impl should be removed.
impl<'tcx> fmt::Display for LayoutError<'tcx> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
@ -458,10 +458,10 @@ impl<'tcx> SizeSkeleton<'tcx> {
}
}
/// When creating the layout for types with abstract conts in their size (i.e. [usize; 4 * N]),
/// When creating the layout for types with abstract consts in their size (i.e. [usize; 4 * N]),
/// to ensure that they have a canonical order and can be compared directly we combine all
/// constants, and sort the other terms. This allows comparison of expressions of sizes,
/// allowing for things like transmutating between types that depend on generic consts.
/// allowing for things like transmuting between types that depend on generic consts.
/// This returns `None` if multiplication of constants overflows.
fn mul_sorted_consts<'tcx>(
tcx: TyCtxt<'tcx>,

View File

@ -197,7 +197,7 @@ impl<'tcx> fmt::Debug for AliasTy<'tcx> {
// Atomic structs
//
// For things that don't carry any arena-allocated data (and are
// copy...), just add them to one of these lists as appropriat.
// copy...), just add them to one of these lists as appropriate.
// For things for which the type library provides traversal implementations
// for all Interners, we only need to provide a Lift implementation:

View File

@ -642,7 +642,7 @@ impl<'tcx> TyCtxt<'tcx> {
}
}
/// Return the set of types that should be taken into accound when checking
/// Return the set of types that should be taken into account when checking
/// trait bounds on a generator's internal state.
pub fn generator_hidden_types(
self,
@ -1402,7 +1402,7 @@ pub fn is_trivially_const_drop(ty: Ty<'_>) -> bool {
}
/// Does the equivalent of
/// ```ignore (ilustrative)
/// ```ignore (illustrative)
/// let v = self.iter().map(|p| p.fold_with(folder)).collect::<SmallVec<[_; 8]>>();
/// folder.tcx().intern_*(&v)
/// ```

View File

@ -163,13 +163,13 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
//
// [block: If(lhs)] -true-> [else_block: dest = (rhs)]
// | (false)
// [shortcurcuit_block: dest = false]
// [shortcircuit_block: dest = false]
//
// Or:
//
// [block: If(lhs)] -false-> [else_block: dest = (rhs)]
// | (true)
// [shortcurcuit_block: dest = true]
// [shortcircuit_block: dest = true]
let (shortcircuit_block, mut else_block, join_block) = (
this.cfg.start_new_block(),

View File

@ -77,7 +77,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
| PatKind::Wild
| PatKind::Binding { .. }
| PatKind::Leaf { .. }
| PatKind::Deref { .. } => self.error_simplifyable(match_pair),
| PatKind::Deref { .. } => self.error_simplifiable(match_pair),
}
}
@ -173,7 +173,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
debug_assert_ne!(
target_blocks[idx.index()],
otherwise_block,
"no canididates for tested discriminant: {:?}",
"no candidates for tested discriminant: {:?}",
discr,
);
Some((discr.val, target_blocks[idx.index()]))
@ -181,7 +181,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
debug_assert_eq!(
target_blocks[idx.index()],
otherwise_block,
"found canididates for untested discriminant: {:?}",
"found candidates for untested discriminant: {:?}",
discr,
);
None
@ -499,7 +499,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
/// However, in some cases, the test may just not be relevant to candidate.
/// For example, suppose we are testing whether `foo.x == 22`, but in one
/// match arm we have `Foo { x: _, ... }`... in that case, the test for
/// what value `x` has has no particular relevance to this candidate. In
/// the value of `x` has no particular relevance to this candidate. In
/// such cases, this function just returns None without doing anything.
/// This is used by the overall `match_candidates` algorithm to structure
/// the match as a whole. See `match_candidates` for more details.
@ -763,8 +763,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
candidate.match_pairs.extend(consequent_match_pairs);
}
fn error_simplifyable<'pat>(&mut self, match_pair: &MatchPair<'pat, 'tcx>) -> ! {
span_bug!(match_pair.pattern.span, "simplifyable pattern found: {:?}", match_pair.pattern)
fn error_simplifiable<'pat>(&mut self, match_pair: &MatchPair<'pat, 'tcx>) -> ! {
span_bug!(match_pair.pattern.span, "simplifiable pattern found: {:?}", match_pair.pattern)
}
fn const_range_contains(

View File

@ -156,7 +156,7 @@ impl<'tcx> ConstToPat<'tcx> {
if let Some(non_sm_ty) = structural {
if !self.type_may_have_partial_eq_impl(cv.ty()) {
// fatal avoids ICE from resolution of non-existent method (rare case).
// fatal avoids ICE from resolution of nonexistent method (rare case).
self.tcx()
.sess
.emit_fatal(TypeNotStructural { span: self.span, non_sm_ty: non_sm_ty });

View File

@ -366,7 +366,7 @@ where
rustc_index::newtype_index!(
/// This index uniquely identifies a place.
///
/// Not every place has a `PlaceIndex`, and not every `PlaceIndex` correspondends to a tracked
/// Not every place has a `PlaceIndex`, and not every `PlaceIndex` corresponds to a tracked
/// place. However, every tracked place and all places along its projection have a `PlaceIndex`.
pub struct PlaceIndex {}
);

View File

@ -10,7 +10,7 @@ use rustc_middle::mir::patch::MirPatch;
/// they are dropped from an aligned address.
///
/// For example, if we have something like
/// ```ignore (ilustrative)
/// ```ignore (illustrative)
/// #[repr(packed)]
/// struct Foo {
/// dealign: u8,
@ -25,7 +25,7 @@ use rustc_middle::mir::patch::MirPatch;
/// its address is not aligned.
///
/// Instead, we move `foo.data` to a local and drop that:
/// ```ignore (ilustrative)
/// ```ignore (illustrative)
/// storage.live(drop_temp)
/// drop_temp = foo.data;
/// drop(drop_temp) -> next

View File

@ -59,7 +59,7 @@ impl<'tcx> MirPass<'tcx> for AddRetag {
let basic_blocks = body.basic_blocks.as_mut();
let local_decls = &body.local_decls;
let needs_retag = |place: &Place<'tcx>| {
!place.has_deref() // we're not eally interested in stores to "outside" locations, they are hard to keep track of anyway
!place.has_deref() // we're not really interested in stores to "outside" locations, they are hard to keep track of anyway
&& may_contain_reference(place.ty(&*local_decls, tcx).ty, /*depth*/ 3, tcx)
&& !local_decls[place.local].is_deref_temp()
};

View File

@ -22,7 +22,7 @@ impl<'tcx> MirPass<'tcx> for ConstDebugInfo {
fn run_pass(&self, _tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
trace!("running ConstDebugInfo on {:?}", body.source);
for (local, constant) in find_optimization_oportunities(body) {
for (local, constant) in find_optimization_opportunities(body) {
for debuginfo in &mut body.var_debug_info {
if let VarDebugInfoContents::Place(p) = debuginfo.value {
if p.local == local && p.projection.is_empty() {
@ -45,7 +45,7 @@ struct LocalUseVisitor {
local_assignment_locations: IndexVec<Local, Option<Location>>,
}
fn find_optimization_oportunities<'tcx>(body: &Body<'tcx>) -> Vec<(Local, Constant<'tcx>)> {
fn find_optimization_opportunities<'tcx>(body: &Body<'tcx>) -> Vec<(Local, Constant<'tcx>)> {
let mut visitor = LocalUseVisitor {
local_mutating_uses: IndexVec::from_elem(0, &body.local_decls),
local_assignment_locations: IndexVec::from_elem(None, &body.local_decls),

View File

@ -826,7 +826,7 @@ impl Visitor<'_> for CanConstProp {
| NonMutatingUse(NonMutatingUseContext::AddressOf)
| MutatingUse(MutatingUseContext::Borrow)
| MutatingUse(MutatingUseContext::AddressOf) => {
trace!("local {:?} can't be propagaged because it's used: {:?}", local, context);
trace!("local {:?} can't be propagated because it's used: {:?}", local, context);
self.can_const_prop[local] = ConstPropMode::NoPropagation;
}
}

View File

@ -111,7 +111,7 @@ impl CoverageGraph {
if predecessors.len() > 1 {
"predecessors.len() > 1".to_owned()
} else {
format!("bb {} is not in precessors: {:?}", bb.index(), predecessors)
format!("bb {} is not in predecessors: {:?}", bb.index(), predecessors)
}
);
}

View File

@ -351,7 +351,7 @@ impl<'a, 'tcx> ConstAnalysis<'a, 'tcx> {
}
(FlatSet::Bottom, _) | (_, FlatSet::Bottom) => (FlatSet::Bottom, FlatSet::Bottom),
(_, _) => {
// Could attempt some algebraic simplifcations here.
// Could attempt some algebraic simplifications here.
(FlatSet::Top, FlatSet::Top)
}
}

View File

@ -69,7 +69,7 @@
//! of this is that such liveness analysis can report more accurate results about whole locals at
//! a time. For example, consider:
//!
//! ```ignore (syntax-highliting-only)
//! ```ignore (syntax-highlighting-only)
//! _1 = u;
//! // unrelated code
//! _1.f1 = v;
@ -360,7 +360,7 @@ struct FilterInformation<'a, 'body, 'alloc, 'tcx> {
}
// We first implement some utility functions which we will expose removing candidates according to
// different needs. Throughout the livenss filtering, the `candidates` are only ever accessed
// different needs. Throughout the liveness filtering, the `candidates` are only ever accessed
// through these methods, and not directly.
impl<'alloc> Candidates<'alloc> {
/// Just `Vec::retain`, but the condition is inverted and we add debugging output

View File

@ -24,7 +24,7 @@ use std::fmt;
/// In general, the compiler cannot determine at compile time whether a destructor will run or not.
///
/// At a high level, this pass refines Drop to only run the destructor if the
/// target is initialized. The way this is achievied is by inserting drop flags for every variable
/// target is initialized. The way this is achieved is by inserting drop flags for every variable
/// that may be dropped, and then using those flags to determine whether a destructor should run.
/// Once this is complete, Drop terminators in the MIR correspond to a call to the "drop glue" or
/// "drop shim" for the type of the dropped place.

View File

@ -1869,7 +1869,7 @@ fn check_must_not_suspend_ty<'tcx>(
},
)
}
// If drop tracking is enabled, we want to look through references, since the referrent
// If drop tracking is enabled, we want to look through references, since the referent
// may not be considered live across the await point.
ty::Ref(_region, ty, _mutability) => {
let descr_pre = &format!("{}reference{} to ", data.descr_pre, plural_suffix);

View File

@ -21,7 +21,7 @@ pub struct SsaLocals {
/// We often encounter MIR bodies with 1 or 2 basic blocks. In those cases, it's unnecessary to
/// actually compute dominators, we can just compare block indices because bb0 is always the first
/// block, and in any body all other blocks are always always dominated by bb0.
/// block, and in any body all other blocks are always dominated by bb0.
struct SmallDominators {
inner: Option<Dominators<BasicBlock>>,
}

View File

@ -402,7 +402,7 @@ fn collect_roots(tcx: TyCtxt<'_>, mode: MonoItemCollectionMode) -> Vec<MonoItem<
}
/// Collect all monomorphized items reachable from `starting_point`, and emit a note diagnostic if a
/// post-monorphization error is encountered during a collection step.
/// post-monomorphization error is encountered during a collection step.
#[instrument(skip(tcx, visited, recursion_depths, recursion_limit, inlining_map), level = "debug")]
fn collect_items_rec<'tcx>(
tcx: TyCtxt<'tcx>,

View File

@ -424,7 +424,7 @@ fn mono_item_visibility<'tcx>(
InstanceDef::Item(def) => def.did,
InstanceDef::DropGlue(def_id, Some(_)) => def_id,
// We match the visiblity of statics here
// We match the visibility of statics here
InstanceDef::ThreadLocalShim(def_id) => {
return static_visibility(tcx, can_be_internalized, def_id);
}

View File

@ -67,7 +67,7 @@ pub(crate) fn parse_token_trees<'a>(
match token_trees {
Ok(stream) if unmatched_delims.is_empty() => Ok(stream),
_ => {
// Return error if there are unmatched delimiters or unclosng delimiters.
// Return error if there are unmatched delimiters or unclosed delimiters.
// We emit delimiter mismatch errors first, then emit the unclosing delimiter mismatch
// because the delimiter mismatch is more likely to be the root cause of error

View File

@ -68,7 +68,7 @@ pub fn parse_meta<'a>(sess: &'a ParseSess, attr: &Attribute) -> PResult<'a, Meta
}
} else {
// The non-error case can happen with e.g. `#[foo = 1+1]`. The error case can
// happen with e.g. `#[foo = include_str!("non-existent-file.rs")]`; in that
// happen with e.g. `#[foo = include_str!("nonexistent-file.rs")]`; in that
// case we delay the error because an earlier error will have already been
// reported.
let msg = format!("unexpected expression: `{}`", pprust::expr_to_string(expr));

View File

@ -744,7 +744,7 @@ impl<'a, 'tcx> Decodable<CacheDecoder<'a, 'tcx>> for Symbol {
let pos = d.read_usize();
let old_pos = d.opaque.position();
// move to str ofset and read
// move to str offset and read
d.opaque.set_position(pos);
let s = d.read_str();
let sym = Symbol::intern(s);

View File

@ -1,4 +1,4 @@
query_system_reentrant = internal compiler error: re-entrant incremental verify failure, suppressing message
query_system_reentrant = internal compiler error: reentrant incremental verify failure, suppressing message
query_system_increment_compilation = internal compiler error: encountered incremental compilation error with {$dep_node}
.help = This is a known issue with the compiler. Run {$run_cmd} to allow your project to compile

View File

@ -249,7 +249,7 @@ impl<K: DepKind> DepGraph<K> {
/// get an ICE. Normally, we would have tried (and failed) to mark
/// some other query green (e.g. `item_children`) which was used
/// to obtain `C`, which would prevent us from ever trying to force
/// a non-existent `D`.
/// a nonexistent `D`.
///
/// It might be possible to enforce that all `DepNode`s read during
/// deserialization already exist in the previous `DepGraph`. In

View File

@ -63,7 +63,7 @@ pub trait QueryConfig<Qcx: QueryContext>: Copy {
fn handle_cycle_error(self) -> HandleCycleError;
fn hash_result(self) -> HashResult<Self::Value>;
// Just here for convernience and checking that the key matches the kind, don't override this.
// Just here for convenience and checking that the key matches the kind, don't override this.
fn construct_dep_node(self, tcx: Qcx::DepContext, key: &Self::Key) -> DepNode<Qcx::DepKind> {
DepNode::construct(tcx, self.dep_kind(), key)
}

View File

@ -691,7 +691,7 @@ fn incremental_verify_ich_failed<Tcx>(
// which may result in another fingerprint mismatch while we're in the middle
// of processing this one. To avoid a double-panic (which kills the process
// before we can print out the query static), we print out a terse
// but 'safe' message if we detect a re-entrant call to this method.
// but 'safe' message if we detect a reentrant call to this method.
thread_local! {
static INSIDE_VERIFY_PANIC: Cell<bool> = const { Cell::new(false) };
};

View File

@ -175,7 +175,7 @@ impl<'r, 'a, 'tcx> EffectiveVisibilitiesVisitor<'r, 'a, 'tcx> {
/// to not update anything and we can skip it.
///
/// We are checking this condition only if the correct value of private visibility is
/// cheaply available, otherwise it does't make sense performance-wise.
/// cheaply available, otherwise it doesn't make sense performance-wise.
///
/// `None` is returned if the update can be skipped,
/// and cheap private visibility is returned otherwise.

View File

@ -22,7 +22,7 @@ pub(crate) struct UnderscoreLifetimeNameCannotBeUsedHere(#[primary_span] pub(cra
#[derive(Diagnostic)]
#[diag(resolve_crate_may_not_be_imported)]
pub(crate) struct CrateMayNotBeImprted(#[primary_span] pub(crate) Span);
pub(crate) struct CrateMayNotBeImported(#[primary_span] pub(crate) Span);
#[derive(Diagnostic)]
#[diag(resolve_crate_root_imports_must_be_named_explicitly)]

View File

@ -1079,7 +1079,7 @@ impl<'a: 'ast, 'ast, 'tcx> Visitor<'ast> for LateResolutionVisitor<'a, '_, 'ast,
for rib in self.lifetime_ribs.iter().rev() {
match rib.kind {
// We are inside a `PolyTraitRef`. The lifetimes are
// to be intoduced in that (maybe implicit) `for<>` binder.
// to be introduced in that (maybe implicit) `for<>` binder.
LifetimeRibKind::Generics {
binder,
kind: LifetimeBinderKind::PolyTrait,
@ -3803,7 +3803,7 @@ impl<'a: 'ast, 'b, 'ast, 'tcx> LateResolutionVisitor<'a, 'b, 'ast, 'tcx> {
// use std::u8; // bring module u8 in scope
// fn f() -> u8 { // OK, resolves to primitive u8, not to std::u8
// u8::max_value() // OK, resolves to associated function <u8>::max_value,
// // not to non-existent std::u8::max_value
// // not to nonexistent std::u8::max_value
// }
//
// Such behavior is required for backward compatibility.

View File

@ -91,7 +91,7 @@ fn import_candidate_to_enum_paths(suggestion: &ImportSuggestion) -> (String, Str
/// Description of an elided lifetime.
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Debug)]
pub(super) struct MissingLifetime {
/// Used to overwrite the resolution with the suggestion, to avoid cascasing errors.
/// Used to overwrite the resolution with the suggestion, to avoid cascading errors.
pub id: NodeId,
/// Where to suggest adding the lifetime.
pub span: Span,
@ -408,7 +408,7 @@ impl<'a: 'ast, 'ast, 'tcx> LateResolutionVisitor<'a, '_, 'ast, 'tcx> {
}
let Some(path_last_segment) = path.last() else { return };
let item_str = path_last_segment.ident;
// Emit help message for fake-self from other languages (e.g., `this` in Javascript).
// Emit help message for fake-self from other languages (e.g., `this` in JavaScript).
if ["this", "my"].contains(&item_str.as_str()) {
err.span_suggestion_short(
span,

View File

@ -483,7 +483,7 @@ impl SourceMap {
self.span_to_string(sp, FileNameDisplayPreference::Remapped)
}
/// Format the span location suitable for pretty printing anotations with relative line numbers
/// Format the span location suitable for pretty printing annotations with relative line numbers
pub fn span_to_relative_line_string(&self, sp: Span, relative_to: Span) -> String {
if self.files.borrow().source_files.is_empty() || sp.is_dummy() || relative_to.is_dummy() {
return "no-location".to_string();
@ -777,7 +777,7 @@ impl SourceMap {
/// Given a 'Span', tries to tell if it's wrapped by "<>" or "()"
/// the algorithm searches if the next character is '>' or ')' after skipping white space
/// then searches the previous charactoer to match '<' or '(' after skipping white space
/// then searches the previous character to match '<' or '(' after skipping white space
/// return true if wrapped by '<>' or '()'
pub fn span_wrapped_by_angle_or_parentheses(&self, span: Span) -> bool {
self.span_to_source(span, |src, start_index, end_index| {

View File

@ -10,7 +10,7 @@
//! > self-consistent and sensible LLVM IR generation, but does not
//! > conform to any particular ABI.
//! >
//! > - Doxygen Doxumentation of `clang::DefaultABIInfo`
//! > - Doxygen Documentation of `clang::DefaultABIInfo`
//!
//! This calling convention may not match AVR-GCC in all cases.
//!

View File

@ -2285,13 +2285,13 @@ impl Target {
}
}
} );
($key_name:ident, falliable_list) => ( {
($key_name:ident, fallible_list) => ( {
let name = (stringify!($key_name)).replace("_", "-");
obj.remove(&name).and_then(|j| {
if let Some(v) = j.as_array() {
match v.iter().map(|a| FromStr::from_str(a.as_str().unwrap())).collect() {
Ok(l) => { base.$key_name = l },
// FIXME: `falliable_list` can't re-use the `key!` macro for list
// FIXME: `fallible_list` can't re-use the `key!` macro for list
// elements and the error messages from that macro, so it has a bad
// generic message instead
Err(_) => return Some(Err(
@ -2610,7 +2610,7 @@ impl Target {
key!(has_thumb_interworking, bool);
key!(debuginfo_kind, DebuginfoKind)?;
key!(split_debuginfo, SplitDebuginfo)?;
key!(supported_split_debuginfo, falliable_list)?;
key!(supported_split_debuginfo, fallible_list)?;
key!(supported_sanitizers, SanitizerSet)?;
key!(default_adjusted_cabi, Option<Abi>)?;
key!(generate_arange_section, bool);

View File

@ -12,7 +12,7 @@
//
// We have opted for these instead of one target per processor (e.g., `cortex-m0`, `cortex-m3`,
// etc) because the differences between some processors like the cortex-m0 and cortex-m1 are almost
// non-existent from the POV of codegen so it doesn't make sense to have separate targets for them.
// nonexistent from the POV of codegen so it doesn't make sense to have separate targets for them.
// And if differences exist between two processors under the same target, rustc flags can be used to
// optimize for one processor or the other.
//

View File

@ -51,7 +51,7 @@ pub(super) enum CandidateSource {
BuiltinImpl,
/// An assumption from the environment.
///
/// More precicely we've used the `n-th` assumption in the `param_env`.
/// More precisely we've used the `n-th` assumption in the `param_env`.
///
/// ## Examples
///
@ -241,7 +241,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
// HACK: `_: Trait` is ambiguous, because it may be satisfied via a builtin rule,
// object bound, alias bound, etc. We are unable to determine this until we can at
// least structually resolve the type one layer.
// least structurally resolve the type one layer.
if goal.predicate.self_ty().is_ty_var() {
return vec![Candidate {
source: CandidateSource::BuiltinImpl,

View File

@ -156,8 +156,8 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
}
}
GenericArgKind::Const(c) => {
if let ty::ConstKind::Bound(debrujin, b) = c.kind() {
assert_eq!(debrujin, ty::INNERMOST);
if let ty::ConstKind::Bound(debruijn, b) = c.kind() {
assert_eq!(debruijn, ty::INNERMOST);
opt_values[b] = Some(*original_value);
}
}
@ -177,7 +177,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
// As an optimization we sometimes avoid creating a new inference variable here.
//
// All new inference variables we create start out in the current universe of the caller.
// This is conceptionally wrong as these inference variables would be able to name
// This is conceptually wrong as these inference variables would be able to name
// more placeholders then they should be able to. However the inference variables have
// to "come from somewhere", so by equating them with the original values of the caller
// later on, we pull them down into their correct universe again.

View File

@ -591,7 +591,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
Some(self.evaluate_added_goals_and_make_canonical_response(Certainty::AMBIGUOUS))
}
// These types cannot be structurally decomposed into constitutent
// These types cannot be structurally decomposed into constituent
// types, and therefore have no built-in auto impl.
ty::Dynamic(..)
| ty::Param(..)

View File

@ -467,7 +467,7 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
}
}
self.tcx.sess.delay_span_bug(DUMMY_SP, "expected fullfillment errors")
self.tcx.sess.delay_span_bug(DUMMY_SP, "expected fulfillment errors")
}
/// Reports that an overflow has occurred and halts compilation. We
@ -2056,7 +2056,7 @@ impl<'tcx> InferCtxtPrivExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
if candidates.iter().any(|c| matches!(c.similarity, CandidateSimilarity::Exact { .. })) {
// If any of the candidates is a perfect match, we don't want to show all of them.
// This is particularly relevant for the case of numeric types (as they all have the
// same cathegory).
// same category).
candidates.retain(|c| matches!(c.similarity, CandidateSimilarity::Exact { .. }));
}
candidates

View File

@ -1381,7 +1381,7 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
}
}
// Issue #104961, we need to add parentheses properly for compond expressions
// Issue #104961, we need to add parentheses properly for compound expressions
// for example, `x.starts_with("hi".to_string() + "you")`
// should be `x.starts_with(&("hi".to_string() + "you"))`
let Some(body_id) = self.tcx.hir().maybe_body_owned_by(obligation.cause.body_id) else { return false; };

View File

@ -14,7 +14,7 @@ use rustc_span::DUMMY_SP;
use super::outlives_bounds::InferCtxtExt;
pub enum CopyImplementationError<'tcx> {
InfrigingFields(Vec<(&'tcx ty::FieldDef, Ty<'tcx>, InfringingFieldsReason<'tcx>)>),
InfringingFields(Vec<(&'tcx ty::FieldDef, Ty<'tcx>, InfringingFieldsReason<'tcx>)>),
NotAnAdt,
HasDestructor,
}
@ -125,7 +125,7 @@ pub fn type_allowed_to_implement_copy<'tcx>(
}
if !infringing.is_empty() {
return Err(CopyImplementationError::InfrigingFields(infringing));
return Err(CopyImplementationError::InfringingFields(infringing));
}
if adt.has_dtor(tcx) {

View File

@ -203,7 +203,7 @@ fn do_normalize_predicates<'tcx>(
}
};
debug!("do_normalize_predictes: normalized predicates = {:?}", predicates);
debug!("do_normalize_predicates: normalized predicates = {:?}", predicates);
// We can use the `elaborated_env` here; the region code only
// cares about declarations like `'a: 'b`.

View File

@ -888,7 +888,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
let c1 = tcx.expand_abstract_consts(c1);
let c2 = tcx.expand_abstract_consts(c2);
debug!(
"evalaute_predicate_recursively: equating consts:\nc1= {:?}\nc2= {:?}",
"evaluate_predicate_recursively: equating consts:\nc1= {:?}\nc2= {:?}",
c1, c2
);

View File

@ -285,7 +285,7 @@ pub(super) fn sanity_check_layout<'tcx>(
{
// These are never actually accessed anyway, so we can skip the coherence check
// for them. They also fail that check, since they have
// `Aggregate`/`Uninhbaited` ABI even when the main type is
// `Aggregate`/`Uninhabited` ABI even when the main type is
// `Scalar`/`ScalarPair`. (Note that sometimes, variants with fields have size
// 0, and sometimes, variants without fields have non-0 size.)
continue;

View File

@ -243,7 +243,7 @@ fn drop_tys_helper<'tcx>(
} else {
let field_tys = adt_def.all_fields().map(|field| {
let r = tcx.type_of(field.did).subst(tcx, substs);
debug!("drop_tys_helper: Subst into {:?} with {:?} gettng {:?}", field, substs, r);
debug!("drop_tys_helper: Subst into {:?} with {:?} getting {:?}", field, substs, r);
r
});
if only_significant {

View File

@ -4,7 +4,7 @@ error[E0026]: struct `Thing` does not have a field named `z`
LL | Thing { x, y, z } => {}
| ^ struct `Thing` does not have this field
|
= note: This error indicates that a struct pattern attempted to extract a non-existent field from a struct. Struct fields are identified by the name used before the colon : so struct patterns should resemble the declaration of the struct type being matched.
= note: This error indicates that a struct pattern attempted to extract a nonexistent field from a struct. Struct fields are identified by the name used before the colon : so struct patterns should resemble the declaration of the struct type being matched.
If you are using shorthand field patterns but want to refer to the struct field by a different name, you should rename it explicitly.

Some files were not shown because too many files have changed in this diff Show More