From a44a4edc0eb4361f998646891ba3728d3c9d9694 Mon Sep 17 00:00:00 2001 From: cui fliter Date: Tue, 14 Nov 2023 23:06:50 +0800 Subject: [PATCH] Fix some typos Signed-off-by: cui fliter --- compiler/rustc_codegen_llvm/src/callee.rs | 2 +- compiler/rustc_const_eval/src/interpret/discriminant.rs | 4 ++-- compiler/rustc_data_structures/src/sharded.rs | 6 +++--- compiler/rustc_data_structures/src/sync/lock.rs | 4 ++-- compiler/rustc_errors/src/markdown/parse.rs | 2 +- compiler/rustc_lint/src/reference_casting.rs | 2 +- compiler/rustc_parse/src/parser/ty.rs | 2 +- compiler/rustc_passes/src/dead.rs | 2 +- compiler/rustc_privacy/src/lib.rs | 2 +- 9 files changed, 13 insertions(+), 13 deletions(-) diff --git a/compiler/rustc_codegen_llvm/src/callee.rs b/compiler/rustc_codegen_llvm/src/callee.rs index d5778757caa..0c9f7f19551 100644 --- a/compiler/rustc_codegen_llvm/src/callee.rs +++ b/compiler/rustc_codegen_llvm/src/callee.rs @@ -59,7 +59,7 @@ pub fn get_fn<'ll, 'tcx>(cx: &CodegenCx<'ll, 'tcx>, instance: Instance<'tcx>) -> // To avoid this, we set the Storage Class to "DllImport" so that // LLVM will prefix the name with `__imp_`. Ideally, we'd like the // existing logic below to set the Storage Class, but it has an - // exemption for MinGW for backwards compatability. + // exemption for MinGW for backwards compatibility. let llfn = cx.declare_fn( &common::i686_decorated_name( &dllimport, diff --git a/compiler/rustc_const_eval/src/interpret/discriminant.rs b/compiler/rustc_const_eval/src/interpret/discriminant.rs index fd173670374..d9f583c1d1f 100644 --- a/compiler/rustc_const_eval/src/interpret/discriminant.rs +++ b/compiler/rustc_const_eval/src/interpret/discriminant.rs @@ -119,7 +119,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { if matches!(ty.kind(), ty::Adt(def, ..) if def.variants().is_empty()) { throw_ub!(UninhabitedEnumVariantRead(index)) } - // For consisteny with `write_discriminant`, and to make sure that + // For consistency with `write_discriminant`, and to make sure that // `project_downcast` cannot fail due to strange layouts, we declare immediate UB // for uninhabited variants. if op.layout().for_variant(self, index).abi.is_uninhabited() { @@ -236,7 +236,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { variant } }; - // For consisteny with `write_discriminant`, and to make sure that `project_downcast` cannot fail due to strange layouts, we declare immediate UB for uninhabited variants. + // For consistency with `write_discriminant`, and to make sure that `project_downcast` cannot fail due to strange layouts, we declare immediate UB for uninhabited variants. if op.layout().for_variant(self, index).abi.is_uninhabited() { throw_ub!(UninhabitedEnumVariantRead(index)) } diff --git a/compiler/rustc_data_structures/src/sharded.rs b/compiler/rustc_data_structures/src/sharded.rs index 29516fffd6a..639f05c9e52 100644 --- a/compiler/rustc_data_structures/src/sharded.rs +++ b/compiler/rustc_data_structures/src/sharded.rs @@ -79,7 +79,7 @@ impl Sharded { pub fn lock_shard_by_value(&self, _val: &K) -> LockGuard<'_, T> { match self { Self::Single(single) => { - // Syncronization is disabled so use the `lock_assume_no_sync` method optimized + // Synchronization is disabled so use the `lock_assume_no_sync` method optimized // for that case. // SAFETY: We know `is_dyn_thread_safe` was false when creating the lock thus @@ -102,7 +102,7 @@ impl Sharded { pub fn lock_shard_by_index(&self, _i: usize) -> LockGuard<'_, T> { match self { Self::Single(single) => { - // Syncronization is disabled so use the `lock_assume_no_sync` method optimized + // Synchronization is disabled so use the `lock_assume_no_sync` method optimized // for that case. // SAFETY: We know `is_dyn_thread_safe` was false when creating the lock thus @@ -111,7 +111,7 @@ impl Sharded { } #[cfg(parallel_compiler)] Self::Shards(shards) => { - // Syncronization is enabled so use the `lock_assume_sync` method optimized + // Synchronization is enabled so use the `lock_assume_sync` method optimized // for that case. // SAFETY (get_unchecked): The index gets ANDed with the shard mask, ensuring it is diff --git a/compiler/rustc_data_structures/src/sync/lock.rs b/compiler/rustc_data_structures/src/sync/lock.rs index 339aebbf81a..040a8aa6b63 100644 --- a/compiler/rustc_data_structures/src/sync/lock.rs +++ b/compiler/rustc_data_structures/src/sync/lock.rs @@ -38,7 +38,7 @@ mod maybe_sync { lock: &'a Lock, marker: PhantomData<&'a mut T>, - /// The syncronization mode of the lock. This is explicitly passed to let LLVM relate it + /// The synchronization mode of the lock. This is explicitly passed to let LLVM relate it /// to the original lock operation. mode: Mode, } @@ -142,7 +142,7 @@ mod maybe_sync { .then(|| LockGuard { lock: self, marker: PhantomData, mode }) } - /// This acquires the lock assuming syncronization is in a specific mode. + /// This acquires the lock assuming synchronization is in a specific mode. /// /// Safety /// This method must only be called with `Mode::Sync` if `might_be_dyn_thread_safe` was diff --git a/compiler/rustc_errors/src/markdown/parse.rs b/compiler/rustc_errors/src/markdown/parse.rs index d3a08da6283..67e4963fddf 100644 --- a/compiler/rustc_errors/src/markdown/parse.rs +++ b/compiler/rustc_errors/src/markdown/parse.rs @@ -329,7 +329,7 @@ fn parse_with_end_pat<'a>( end_sep: &[u8], ignore_esc: bool, ) -> Option<(&'a [u8], &'a [u8])> { - // Find positions that start with the end seperator + // Find positions that start with the end separator for idx in (0..buf.len()).filter(|idx| buf[*idx..].starts_with(end_sep)) { if !ignore_esc && idx > 0 && buf[idx - 1] == b'\\' { continue; diff --git a/compiler/rustc_lint/src/reference_casting.rs b/compiler/rustc_lint/src/reference_casting.rs index d44691b5e9b..82483ac7dc0 100644 --- a/compiler/rustc_lint/src/reference_casting.rs +++ b/compiler/rustc_lint/src/reference_casting.rs @@ -155,7 +155,7 @@ fn is_cast_from_const_to_mut<'tcx>( let start_ty = cx.typeck_results().node_type(e.hir_id); if let ty::Ref(_, inner_ty, Mutability::Not) = start_ty.kind() { - // If an UnsafeCell method is involved we need to additionaly check the + // If an UnsafeCell method is involved we need to additionally check the // inner type for the presence of the Freeze trait (ie does NOT contain // an UnsafeCell), since in that case we would incorrectly lint on valid casts. // diff --git a/compiler/rustc_parse/src/parser/ty.rs b/compiler/rustc_parse/src/parser/ty.rs index be2cbaf3020..dc0f1396523 100644 --- a/compiler/rustc_parse/src/parser/ty.rs +++ b/compiler/rustc_parse/src/parser/ty.rs @@ -135,7 +135,7 @@ impl<'a> Parser<'a> { ) } - /// Parse a type suitable for a field defintion. + /// Parse a type suitable for a field definition. /// The difference from `parse_ty` is that this version /// allows anonymous structs and unions. pub fn parse_ty_for_field_def(&mut self) -> PResult<'a, P> { diff --git a/compiler/rustc_passes/src/dead.rs b/compiler/rustc_passes/src/dead.rs index 2e8c58b0241..6b2b842543a 100644 --- a/compiler/rustc_passes/src/dead.rs +++ b/compiler/rustc_passes/src/dead.rs @@ -314,7 +314,7 @@ impl<'tcx> MarkSymbolVisitor<'tcx> { // for the `#[expect]` case. // // Note that an item can and will be duplicated on the worklist with different - // `ComesFromAllowExpect`, particulary if it was added from the + // `ComesFromAllowExpect`, particularly if it was added from the // `effective_visibilities` query or from the `#[allow]`/`#[expect]` checks, // this "duplication" is essential as otherwise a function with `#[expect]` // called from a `pub fn` may be falsely reported as not live, falsely diff --git a/compiler/rustc_privacy/src/lib.rs b/compiler/rustc_privacy/src/lib.rs index 4bb7e65747f..4b3b82c59cf 100644 --- a/compiler/rustc_privacy/src/lib.rs +++ b/compiler/rustc_privacy/src/lib.rs @@ -1756,7 +1756,7 @@ impl<'tcx> PrivateItemsInPublicInterfacesChecker<'tcx, '_> { // fn from(_: Priv) -> Pub {...} // } // - // lints shouldn't be emmited even if `from` effective visibility + // lints shouldn't be emitted even if `from` effective visibility // is larger than `Priv` nominal visibility and if `Priv` can leak // in some scenarios due to type inference. let impl_ev = EffectiveVisibility::of_impl::(