Auto merge of #13044 - dzvon:fix-typo, r=Veykril

fix: a bunch of typos

This PR will fix some typos detected by [typos].

There are also some other typos in the function names, variable names, and file
names, which I leave as they are. I'm more certain that typos in comments
should be fixed.

[typos]: https://github.com/crate-ci/typos
This commit is contained in:
bors 2022-08-17 14:59:02 +00:00
commit 82ff740501
19 changed files with 23 additions and 23 deletions

View File

@ -345,7 +345,7 @@ impl CargoActor {
//
// Because cargo only outputs one JSON object per line, we can
// simply skip a line if it doesn't parse, which just ignores any
// erroneus output.
// erroneous output.
let mut error = String::new();
let mut read_at_least_one_message = false;

View File

@ -45,7 +45,7 @@ impl Attrs {
kind: ProcMacroKind::CustomDerive { helpers: Box::new([]) },
}),
// `#[proc_macro_derive(Trait, attibutes(helper1, helper2, ...))]`
// `#[proc_macro_derive(Trait, attributes(helper1, helper2, ...))]`
[
TokenTree::Leaf(Leaf::Ident(trait_name)),
TokenTree::Leaf(Leaf::Punct(comma)),

View File

@ -616,7 +616,7 @@ impl ExpansionInfo {
let token_id = match token_id_in_attr_input {
Some(token_id) => token_id,
// the token is not inside an attribute's input so do the lookup in the macro_arg as ususal
// the token is not inside an attribute's input so do the lookup in the macro_arg as usual
None => {
let relative_range =
token.value.text_range().checked_sub(self.arg.value.text_range().start())?;

View File

@ -29,7 +29,7 @@ use super::remove_unused_param::range_to_remove;
// Assist: extract_module
//
// Extracts a selected region as seperate module. All the references, visibility and imports are
// Extracts a selected region as separate module. All the references, visibility and imports are
// resolved.
//
// ```
@ -105,7 +105,7 @@ pub(crate) fn extract_module(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
//
//- Thirdly, resolving all the imports this includes removing paths from imports
// outside the module, shifting/cloning them inside new module, or shifting the imports, or making
// new import statemnts
// new import statements
//We are getting item usages and record_fields together, record_fields
//for change_visibility and usages for first point mentioned above in the process
@ -661,7 +661,7 @@ fn check_intersection_and_push(
import_path: TextRange,
) {
if import_paths_to_be_removed.len() > 0 {
// Text ranges recieved here for imports are extended to the
// Text ranges received here for imports are extended to the
// next/previous comma which can cause intersections among them
// and later deletion of these can cause panics similar
// to reported in #11766. So to mitigate it, we

View File

@ -88,7 +88,7 @@ pub(crate) fn replace_turbofish_with_explicit_type(
},
);
} else if let Some(InferType(t)) = let_stmt.ty() {
// If there's a type inferrence underscore, we can offer to replace it with the type in
// If there's a type inference underscore, we can offer to replace it with the type in
// the turbofish.
// let x: _ = fn::<...>();
let underscore_range = t.syntax().text_range();

View File

@ -75,7 +75,7 @@ pub(crate) fn for_generic_parameter(ty: &ast::ImplTraitType) -> SmolStr {
/// In current implementation, the function tries to get the name from
/// the following sources:
///
/// * if expr is an argument to function/method, use paramter name
/// * if expr is an argument to function/method, use parameter name
/// * if expr is a function/method call, use function name
/// * expression type name if it exists (E.g. `()`, `fn() -> ()` or `!` do not have names)
/// * fallback: `var_name`
@ -85,7 +85,7 @@ pub(crate) fn for_generic_parameter(ty: &ast::ImplTraitType) -> SmolStr {
/// Currently it sticks to the first name found.
// FIXME: Microoptimize and return a `SmolStr` here.
pub(crate) fn for_variable(expr: &ast::Expr, sema: &Semantics<'_, RootDatabase>) -> String {
// `from_param` does not benifit from stripping
// `from_param` does not benefit from stripping
// it need the largest context possible
// so we check firstmost
if let Some(name) = from_param(expr, sema) {

View File

@ -173,7 +173,7 @@ impl FormatStrParser {
}
}
(State::Expr, ':') if chars.peek().copied() == Some(':') => {
// path seperator
// path separator
current_expr.push_str("::");
chars.next();
}
@ -185,7 +185,7 @@ impl FormatStrParser {
current_expr = String::new();
self.state = State::FormatOpts;
} else {
// We're inside of braced expression, assume that it's a struct field name/value delimeter.
// We're inside of braced expression, assume that it's a struct field name/value delimiter.
current_expr.push(chr);
}
}

View File

@ -159,7 +159,7 @@ pub mod some_module {
pub struct ThiiiiiirdStruct;
// contains all letters from the query, but not in the beginning, displayed second
pub struct AfterThirdStruct;
// contains all letters from the query in the begginning, displayed first
// contains all letters from the query in the beginning, displayed first
pub struct ThirdStruct;
}

View File

@ -82,7 +82,7 @@ impl Definition {
}
/// Textual range of the identifier which will change when renaming this
/// `Definition`. Note that some definitions, like buitin types, can't be
/// `Definition`. Note that some definitions, like builtin types, can't be
/// renamed.
pub fn range_for_rename(self, sema: &Semantics<'_, RootDatabase>) -> Option<FileRange> {
let res = match self {

View File

@ -750,7 +750,7 @@ fn main() {
enum Foo { A }
fn main() {
// FIXME: this should not bail out but current behavior is such as the old algorithm.
// ExprValidator::validate_match(..) checks types of top level patterns incorrecly.
// ExprValidator::validate_match(..) checks types of top level patterns incorrectly.
match Foo::A {
ref _x => {}
Foo::A => {}

View File

@ -321,7 +321,7 @@ struct MatchState<'t> {
/// The KleeneOp of this sequence if we are in a repetition.
sep_kind: Option<RepeatKind>,
/// Number of tokens of seperator parsed
/// Number of tokens of separator parsed
sep_parsed: Option<usize>,
/// Matched meta variables bindings

View File

@ -3,7 +3,7 @@
//!
//! Pure model is represented by the [`base_db::CrateGraph`] from another crate.
//!
//! In this crate, we are conserned with "real world" project models.
//! In this crate, we are concerned with "real world" project models.
//!
//! Specifically, here we have a representation for a Cargo project
//! ([`CargoWorkspace`]) and for manually specified layout ([`ProjectJson`]).

View File

@ -770,7 +770,7 @@ fn handle_rustc_crates(
queue.push_back(root_pkg);
while let Some(pkg) = queue.pop_front() {
// Don't duplicate packages if they are dependended on a diamond pattern
// N.B. if this line is ommitted, we try to analyse over 4_800_000 crates
// N.B. if this line is omitted, we try to analyse over 4_800_000 crates
// which is not ideal
if rustc_pkg_crates.contains_key(&pkg) {
continue;

View File

@ -52,7 +52,7 @@ impl Logger {
// merge chalk filter to our main filter (from RA_LOG env).
//
// The acceptable syntax of CHALK_DEBUG is `target[span{field=value}]=level`.
// As the value should only affect chalk crates, we'd better mannually
// As the value should only affect chalk crates, we'd better manually
// specify the target. And for simplicity, CHALK_DEBUG only accept the value
// that specify level.
let chalk_level_dir = std::env::var("CHALK_DEBUG")

View File

@ -1,4 +1,4 @@
//! Things which exist to solve practial issues, but which shouldn't exist.
//! Things which exist to solve practical issues, but which shouldn't exist.
//!
//! Please avoid adding new usages of the functions in this module

View File

@ -64,7 +64,7 @@ pub struct FileId(pub u32);
/// Storage for all files read by rust-analyzer.
///
/// For more informations see the [crate-level](crate) documentation.
/// For more information see the [crate-level](crate) documentation.
#[derive(Default)]
pub struct Vfs {
interner: PathInterner,

View File

@ -485,7 +485,7 @@ Mind the code--architecture gap: at the moment, we are using fewer feature flags
### Serialization
In Rust, it is easy (often too easy) to add serialization to any type by adding `#[derive(Serialize)]`.
This easiness is misleading -- serializable types impose significant backwards compatability constraints.
This easiness is misleading -- serializable types impose significant backwards compatibility constraints.
If a type is serializable, then it is a part of some IPC boundary.
You often don't control the other side of this boundary, so changing serializable types is hard.

View File

@ -655,7 +655,7 @@ function crateGraph(ctx: Ctx, full: boolean): Cmd {
html, body { margin:0; padding:0; overflow:hidden }
svg { position:fixed; top:0; left:0; height:100%; width:100% }
/* Disable the graphviz backgroud and fill the polygons */
/* Disable the graphviz background and fill the polygons */
.graph > polygon { display:none; }
:is(.node,.edge) polygon { fill: white; }

View File

@ -158,7 +158,7 @@ export const getPathForExecutable = memoizeAsync(
try {
// hmm, `os.homedir()` seems to be infallible
// it is not mentioned in docs and cannot be infered by the type signature...
// it is not mentioned in docs and cannot be inferred by the type signature...
const standardPath = vscode.Uri.joinPath(
vscode.Uri.file(os.homedir()),
".cargo",