* a rule
* access
* after
* amount
* annotations
* assignment
* assist
* associated
* attribute
* borrowed
* built-in type
* clarification
* command
* const
* constructor
* corresponding
* counterparts
* curlies
* dependencies
* deterministic
* diagnostic
* duplicates
* edge
* edited
* efficient
* elsewhere
* execution
* expression
* extensions
* extracted
* fill
* github
* helper
* heuristic
* incomplete
* indent end
* inlay
* invocation
* lifetime
* looking
* maybe
* move
* mutability
* mutable
* necessarily
* necessary
* negative
* nonexistent
* occurred
* offsets
* offsetted
* overridden
* parameters
* params
* params_and_where_preds_in_scope
* paredit
* parent
* parentheses
* prepended if
* punctuation
* receive
* receiver
* referring
* repeated
* representing
* semantically
* separately
* shouldnot
* siblings
* similar
* something's
* statement
* struct
* structure
* surprise
* the
* this
* transparent
* unimplemented
* unnamed
* unnecessary
* unneeded
* unreachable
* unterminated
* utilities
* variant
* variants
* visibility
* work around (v)
* workaround

Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>
This commit is contained in:
Josh Soref 2023-04-13 18:35:00 -04:00
parent 2c251a7e2b
commit bc7d84c3ce
83 changed files with 137 additions and 136 deletions

View File

@ -10,7 +10,7 @@ perform github releases but they all tend to have their set of drawbacks.
Additionally nothing handles deleting releases which we need for our rolling Additionally nothing handles deleting releases which we need for our rolling
`dev` release. `dev` release.
To handle all this this action rolls-its-own implementation using the To handle all this action rolls-its-own implementation using the
actions/toolkit repository and packages published there. These run in a Docker actions/toolkit repository and packages published there. These run in a Docker
container and take various inputs to orchestrate the release from the build. container and take various inputs to orchestrate the release from the build.

View File

@ -32,7 +32,7 @@ jobs:
shell: bash shell: bash
run: | run: |
git config --global user.email "runner@gha.local" git config --global user.email "runner@gha.local"
git config --global user.name "Github Action" git config --global user.name "GitHub Action"
rm Cargo.lock rm Cargo.lock
# Fix names for crates that were published before switch to kebab-case. # Fix names for crates that were published before switch to kebab-case.
cargo workspaces rename --from base-db base_db cargo workspaces rename --from base-db base_db

View File

@ -29,7 +29,7 @@ jobs:
shell: bash shell: bash
run: | run: |
git config --global user.email "runner@gha.local" git config --global user.email "runner@gha.local"
git config --global user.name "Github Action" git config --global user.name "GitHub Action"
# Remove r-a crates from the workspaces so we don't auto-publish them as well # Remove r-a crates from the workspaces so we don't auto-publish them as well
sed -i 's/ "crates\/\*"//' ./Cargo.toml sed -i 's/ "crates\/\*"//' ./Cargo.toml
cargo workspaces publish --yes --exact --from-git --no-git-commit --allow-dirty cargo workspaces publish --yes --exact --from-git --no-git-commit --allow-dirty

2
.vscode/launch.json vendored
View File

@ -72,7 +72,7 @@
}, },
{ {
// Used for testing the extension with a local build of the LSP server (in `target/release`) // Used for testing the extension with a local build of the LSP server (in `target/release`)
// with all other extendions loaded. // with all other extensions loaded.
"name": "Run With Extensions", "name": "Run With Extensions",
"type": "extensionHost", "type": "extensionHost",
"request": "launch", "request": "launch",

View File

@ -3808,7 +3808,7 @@ impl<'a> Parser<'a> {
if self.eat_keyword(keywords::Else) || !cond.returns() { if self.eat_keyword(keywords::Else) || !cond.returns() {
let sp = self.sess.source_map().next_point(lo); let sp = self.sess.source_map().next_point(lo);
let mut err = self.diagnostic() let mut err = self.diagnostic()
.struct_span_err(sp, "missing condition for `if` statemement"); .struct_span_err(sp, "missing condition for `if` statement");
err.span_label(sp, "expected if condition here"); err.span_label(sp, "expected if condition here");
return Err(err) return Err(err)
} }

View File

@ -42,7 +42,7 @@ const MAX_PATH_LEN: usize = 15;
#[derive(Copy, Clone, Debug, PartialEq, Eq)] #[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum PrefixKind { pub enum PrefixKind {
/// Causes paths to always start with either `self`, `super`, `crate` or a crate-name. /// Causes paths to always start with either `self`, `super`, `crate` or a crate-name.
/// This is the same as plain, just that paths will start with `self` iprepended f the path /// This is the same as plain, just that paths will start with `self` prepended if the path
/// starts with an identifier that is not a crate. /// starts with an identifier that is not a crate.
BySelf, BySelf,
/// Causes paths to ignore imports in the local module. /// Causes paths to ignore imports in the local module.

View File

@ -502,7 +502,7 @@ impl Binding {
pub fn is_upvar(&self, relative_to: ExprId) -> bool { pub fn is_upvar(&self, relative_to: ExprId) -> bool {
match self.owner { match self.owner {
Some(x) => { Some(x) => {
// We assign expression ids in a way that outer closures will recieve // We assign expression ids in a way that outer closures will receive
// a lower id // a lower id
x.into_raw() < relative_to.into_raw() x.into_raw() < relative_to.into_raw()
} }

View File

@ -98,7 +98,7 @@ fn#19 main#20(#21)#21 {#22
); );
} }
#[test] #[test]
fn float_field_acces_macro_input() { fn float_field_access_macro_input() {
check( check(
r#" r#"
macro_rules! foo { macro_rules! foo {

View File

@ -52,7 +52,7 @@ impl Attrs {
} }
// This fn is intended for `#[proc_macro_derive(..)]` and `#[rustc_builtin_macro(..)]`, which have // This fn is intended for `#[proc_macro_derive(..)]` and `#[rustc_builtin_macro(..)]`, which have
// the same strucuture. // the same structure.
#[rustfmt::skip] #[rustfmt::skip]
pub(crate) fn parse_macro_name_and_helper_attrs(tt: &[TokenTree]) -> Option<(Name, Box<[Name]>)> { pub(crate) fn parse_macro_name_and_helper_attrs(tt: &[TokenTree]) -> Option<(Name, Box<[Name]>)> {
match tt { match tt {

View File

@ -887,7 +887,7 @@ mod module;
//- /module.rs //- /module.rs
#![cfg(NEVER)] #![cfg(NEVER)]
struct AlsoShoulntAppear; struct AlsoShouldNotAppear;
"#, "#,
expect![[r#" expect![[r#"
crate crate

View File

@ -694,7 +694,7 @@ impl ExpansionInfo {
(&attr_args.1, self.attr_input_or_mac_def.clone()?.syntax().cloned()) (&attr_args.1, self.attr_input_or_mac_def.clone()?.syntax().cloned())
} }
MacroCallKind::Attr { attr_args, .. } => { MacroCallKind::Attr { attr_args, .. } => {
// try unshifting the the token id, if unshifting fails, the token resides in the non-item attribute input // try unshifting the token id, if unshifting fails, the token resides in the non-item attribute input
// note that the `TokenExpander::map_id_up` earlier only unshifts for declarative macros, so we don't double unshift with this // note that the `TokenExpander::map_id_up` earlier only unshifts for declarative macros, so we don't double unshift with this
match self.macro_arg_shift.unshift(token_id) { match self.macro_arg_shift.unshift(token_id) {
Some(unshifted) => { Some(unshifted) => {

View File

@ -207,7 +207,7 @@ impl ExprValidator {
let report = compute_match_usefulness(&cx, &m_arms, scrut_ty); let report = compute_match_usefulness(&cx, &m_arms, scrut_ty);
// FIXME Report unreacheble arms // FIXME Report unreachable arms
// https://github.com/rust-lang/rust/blob/f31622a50/compiler/rustc_mir_build/src/thir/pattern/check_match.rs#L200 // https://github.com/rust-lang/rust/blob/f31622a50/compiler/rustc_mir_build/src/thir/pattern/check_match.rs#L200
let witnesses = report.non_exhaustiveness_witnesses; let witnesses = report.non_exhaustiveness_witnesses;

View File

@ -82,7 +82,7 @@ fn expand_or_pat(pat: &Pat) -> Vec<&Pat> {
pats pats
} }
/// [Constructor] uses this in umimplemented variants. /// [Constructor] uses this in unimplemented variants.
/// It allows porting match expressions from upstream algorithm without losing semantics. /// It allows porting match expressions from upstream algorithm without losing semantics.
#[derive(Copy, Clone, Debug, PartialEq, Eq)] #[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub(super) enum Void {} pub(super) enum Void {}

View File

@ -1,4 +1,4 @@
//! Pattern untilities. //! Pattern utilities.
//! //!
//! Originates from `rustc_hir::pat_util` //! Originates from `rustc_hir::pat_util`

View File

@ -755,7 +755,7 @@ pub(crate) enum Reachability {
/// The arm is reachable. This additionally carries a set of or-pattern branches that have been /// The arm is reachable. This additionally carries a set of or-pattern branches that have been
/// found to be unreachable despite the overall arm being reachable. Used only in the presence /// found to be unreachable despite the overall arm being reachable. Used only in the presence
/// of or-patterns, otherwise it stays empty. /// of or-patterns, otherwise it stays empty.
// FIXME: store ureachable subpattern IDs // FIXME: store unreachable subpattern IDs
Reachable, Reachable,
/// The arm is unreachable. /// The arm is unreachable.
Unreachable, Unreachable,

View File

@ -483,7 +483,7 @@ pub(crate) struct InferenceContext<'a> {
current_closure: Option<ClosureId>, current_closure: Option<ClosureId>,
/// Stores the list of closure ids that need to be analyzed before this closure. See the /// Stores the list of closure ids that need to be analyzed before this closure. See the
/// comment on `InferenceContext::sort_closures` /// comment on `InferenceContext::sort_closures`
closure_dependecies: FxHashMap<ClosureId, Vec<ClosureId>>, closure_dependencies: FxHashMap<ClosureId, Vec<ClosureId>>,
deferred_closures: FxHashMap<ClosureId, Vec<(Ty, Ty, Vec<Ty>, ExprId)>>, deferred_closures: FxHashMap<ClosureId, Vec<(Ty, Ty, Vec<Ty>, ExprId)>>,
} }
@ -555,7 +555,7 @@ impl<'a> InferenceContext<'a> {
current_captures: vec![], current_captures: vec![],
current_closure: None, current_closure: None,
deferred_closures: FxHashMap::default(), deferred_closures: FxHashMap::default(),
closure_dependecies: FxHashMap::default(), closure_dependencies: FxHashMap::default(),
} }
} }

View File

@ -756,7 +756,7 @@ impl InferenceContext<'_> {
let mut deferred_closures = mem::take(&mut self.deferred_closures); let mut deferred_closures = mem::take(&mut self.deferred_closures);
let mut dependents_count: FxHashMap<ClosureId, usize> = let mut dependents_count: FxHashMap<ClosureId, usize> =
deferred_closures.keys().map(|x| (*x, 0)).collect(); deferred_closures.keys().map(|x| (*x, 0)).collect();
for (_, deps) in &self.closure_dependecies { for (_, deps) in &self.closure_dependencies {
for dep in deps { for dep in deps {
*dependents_count.entry(*dep).or_default() += 1; *dependents_count.entry(*dep).or_default() += 1;
} }
@ -768,7 +768,7 @@ impl InferenceContext<'_> {
if let Some(d) = deferred_closures.remove(&x) { if let Some(d) = deferred_closures.remove(&x) {
result.push((x, d)); result.push((x, d));
} }
for dep in self.closure_dependecies.get(&x).into_iter().flat_map(|x| x.iter()) { for dep in self.closure_dependencies.get(&x).into_iter().flat_map(|x| x.iter()) {
let cnt = dependents_count.get_mut(dep).unwrap(); let cnt = dependents_count.get_mut(dep).unwrap();
*cnt -= 1; *cnt -= 1;
if *cnt == 0 { if *cnt == 0 {

View File

@ -287,7 +287,7 @@ impl<'a> InferenceContext<'a> {
.intern(Interner); .intern(Interner);
self.deferred_closures.entry(closure_id).or_default(); self.deferred_closures.entry(closure_id).or_default();
if let Some(c) = self.current_closure { if let Some(c) = self.current_closure {
self.closure_dependecies.entry(c).or_default().push(closure_id); self.closure_dependencies.entry(c).or_default().push(closure_id);
} }
(Some(closure_id), closure_ty, None) (Some(closure_id), closure_ty, None)
} }
@ -349,7 +349,7 @@ impl<'a> InferenceContext<'a> {
self.table.resolve_completely(callee_ty.clone()).kind(Interner) self.table.resolve_completely(callee_ty.clone()).kind(Interner)
{ {
if let Some(par) = self.current_closure { if let Some(par) = self.current_closure {
self.closure_dependecies.entry(par).or_default().push(*c); self.closure_dependencies.entry(par).or_default().push(*c);
} }
self.deferred_closures.entry(*c).or_default().push(( self.deferred_closures.entry(*c).or_default().push((
derefed_callee.clone(), derefed_callee.clone(),

View File

@ -148,7 +148,7 @@ pub type Guidance = chalk_solve::Guidance<Interner>;
pub type WhereClause = chalk_ir::WhereClause<Interner>; pub type WhereClause = chalk_ir::WhereClause<Interner>;
/// A constant can have reference to other things. Memory map job is holding /// A constant can have reference to other things. Memory map job is holding
/// the neccessary bits of memory of the const eval session to keep the constant /// the necessary bits of memory of the const eval session to keep the constant
/// meaningful. /// meaningful.
#[derive(Debug, Default, Clone, PartialEq, Eq)] #[derive(Debug, Default, Clone, PartialEq, Eq)]
pub struct MemoryMap(pub HashMap<usize, Vec<u8>>); pub struct MemoryMap(pub HashMap<usize, Vec<u8>>);

View File

@ -55,7 +55,7 @@ pub struct Local {
/// This is what is implemented in miri today. Are these the semantics we want for MIR? Is this /// This is what is implemented in miri today. Are these the semantics we want for MIR? Is this
/// something we can even decide without knowing more about Rust's memory model? /// something we can even decide without knowing more about Rust's memory model?
/// ///
/// **Needs clarifiation:** Is loading a place that has its variant index set well-formed? Miri /// **Needs clarification:** Is loading a place that has its variant index set well-formed? Miri
/// currently implements it, but it seems like this may be something to check against in the /// currently implements it, but it seems like this may be something to check against in the
/// validator. /// validator.
#[derive(Debug, PartialEq, Eq, Clone)] #[derive(Debug, PartialEq, Eq, Clone)]

View File

@ -110,7 +110,7 @@ fn place_case(lvalue: &Place) -> ProjectionCase {
/// Returns a map from basic blocks to the set of locals that might be ever initialized before /// Returns a map from basic blocks to the set of locals that might be ever initialized before
/// the start of the block. Only `StorageDead` can remove something from this map, and we ignore /// the start of the block. Only `StorageDead` can remove something from this map, and we ignore
/// `Uninit` and `drop` and similars after initialization. /// `Uninit` and `drop` and similar after initialization.
fn ever_initialized_map(body: &MirBody) -> ArenaMap<BasicBlockId, ArenaMap<LocalId, bool>> { fn ever_initialized_map(body: &MirBody) -> ArenaMap<BasicBlockId, ArenaMap<LocalId, bool>> {
let mut result: ArenaMap<BasicBlockId, ArenaMap<LocalId, bool>> = let mut result: ArenaMap<BasicBlockId, ArenaMap<LocalId, bool>> =
body.basic_blocks.iter().map(|x| (x.0, ArenaMap::default())).collect(); body.basic_blocks.iter().map(|x| (x.0, ArenaMap::default())).collect();

View File

@ -123,7 +123,7 @@ impl Interval {
} }
fn write_from_interval(&self, memory: &mut Evaluator<'_>, interval: Interval) -> Result<()> { fn write_from_interval(&self, memory: &mut Evaluator<'_>, interval: Interval) -> Result<()> {
// FIXME: this could be more efficent // FIXME: this could be more efficient
let bytes = &interval.get(memory)?.to_vec(); let bytes = &interval.get(memory)?.to_vec();
memory.write_memory(self.addr, bytes) memory.write_memory(self.addr, bytes)
} }
@ -692,7 +692,7 @@ impl Evaluator<'_> {
Owned(r[0..lc.len()].into()) Owned(r[0..lc.len()].into())
} }
BinOp::Shl | BinOp::Shr => { BinOp::Shl | BinOp::Shr => {
let shift_amout = if r128 < 0 { let shift_amount = if r128 < 0 {
return Err(MirEvalError::Panic(format!("Overflow in {op:?}"))); return Err(MirEvalError::Panic(format!("Overflow in {op:?}")));
} else if r128 > 128 { } else if r128 > 128 {
return Err(MirEvalError::Panic(format!("Overflow in {op:?}"))); return Err(MirEvalError::Panic(format!("Overflow in {op:?}")));
@ -700,8 +700,8 @@ impl Evaluator<'_> {
r128 as u8 r128 as u8
}; };
let r = match op { let r = match op {
BinOp::Shl => l128 << shift_amout, BinOp::Shl => l128 << shift_amount,
BinOp::Shr => l128 >> shift_amout, BinOp::Shr => l128 >> shift_amount,
_ => unreachable!(), _ => unreachable!(),
}; };
Owned(r.to_le_bytes()[0..lc.len()].into()) Owned(r.to_le_bytes()[0..lc.len()].into())
@ -966,7 +966,7 @@ impl Evaluator<'_> {
fn make_by_layout( fn make_by_layout(
&mut self, &mut self,
size: usize, // Not neccessarily equal to variant_layout.size size: usize, // Not necessarily equal to variant_layout.size
variant_layout: &Layout, variant_layout: &Layout,
tag: Option<(usize, usize, i128)>, tag: Option<(usize, usize, i128)>,
values: impl Iterator<Item = Interval>, values: impl Iterator<Item = Interval>,
@ -1481,7 +1481,7 @@ impl Evaluator<'_> {
is_dyn_method(self.db, self.trait_env.clone(), def, generic_args.clone()) is_dyn_method(self.db, self.trait_env.clone(), def, generic_args.clone())
{ {
// In the layout of current possible receiver, which at the moment of writing this code is one of // In the layout of current possible receiver, which at the moment of writing this code is one of
// `&T`, `&mut T`, `Box<T>`, `Rc<T>`, `Arc<T>`, and `Pin<P>` where `P` is one of possible recievers, // `&T`, `&mut T`, `Box<T>`, `Rc<T>`, `Arc<T>`, and `Pin<P>` where `P` is one of possible receivers,
// the vtable is exactly in the `[ptr_size..2*ptr_size]` bytes. So we can use it without branching on // the vtable is exactly in the `[ptr_size..2*ptr_size]` bytes. So we can use it without branching on
// the type. // the type.
let ty = self let ty = self

View File

@ -1206,7 +1206,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
); );
let prev_label = if let Some(label) = label { let prev_label = if let Some(label) = label {
// We should generate the end now, to make sure that it wouldn't change later. It is // We should generate the end now, to make sure that it wouldn't change later. It is
// bad as we may emit end (unneccessary unreachable block) for unterminating loop, but // bad as we may emit end (unnecessary unreachable block) for unterminating loop, but
// it should not affect correctness. // it should not affect correctness.
self.current_loop_end()?; self.current_loop_end()?;
self.labeled_loop_blocks self.labeled_loop_blocks
@ -1278,7 +1278,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
/// the appropriated places. /// the appropriated places.
fn push_storage_live(&mut self, b: BindingId, current: BasicBlockId) -> Result<()> { fn push_storage_live(&mut self, b: BindingId, current: BasicBlockId) -> Result<()> {
// Current implementation is wrong. It adds no `StorageDead` at the end of scope, and before each break // Current implementation is wrong. It adds no `StorageDead` at the end of scope, and before each break
// and continue. It just add a `StorageDead` before the `StorageLive`, which is not wrong, but unneeeded in // and continue. It just add a `StorageDead` before the `StorageLive`, which is not wrong, but unneeded in
// the proper implementation. Due this limitation, implementing a borrow checker on top of this mir will falsely // the proper implementation. Due this limitation, implementing a borrow checker on top of this mir will falsely
// allow this: // allow this:
// //

View File

@ -23,7 +23,7 @@ impl MirLowerCtx<'_> {
/// mismatched path block is `None`. /// mismatched path block is `None`.
/// ///
/// By default, it will create a new block for mismatched path. If you already have one, you can provide it with /// By default, it will create a new block for mismatched path. If you already have one, you can provide it with
/// `current_else` argument to save an unneccessary jump. If `current_else` isn't `None`, the result mismatched path /// `current_else` argument to save an unnecessary jump. If `current_else` isn't `None`, the result mismatched path
/// wouldn't be `None` as well. Note that this function will add jumps to the beginning of the `current_else` block, /// wouldn't be `None` as well. Note that this function will add jumps to the beginning of the `current_else` block,
/// so it should be an empty block. /// so it should be an empty block.
pub(super) fn pattern_match( pub(super) fn pattern_match(

View File

@ -1060,7 +1060,7 @@ fn infix_parse<T, S>(_state: S, _level_code: &Fn(S)) -> T {
loop {} loop {}
} }
fn parse_arule() { fn parse_a_rule() {
infix_parse((), &(|_recurse| ())) infix_parse((), &(|_recurse| ()))
} }
"#, "#,

View File

@ -4250,7 +4250,7 @@ impl Trait for () {
} }
#[test] #[test]
fn associted_type_in_struct_expr_path_enum() { fn associated_type_in_struct_expr_path_enum() {
// FIXME: All annotation should be resolvable. // FIXME: All annotation should be resolvable.
// For lines marked as unstable, see rust-lang/rust#86935. // For lines marked as unstable, see rust-lang/rust#86935.
// FIXME: Remove the comments once stablized. // FIXME: Remove the comments once stablized.

View File

@ -801,7 +801,7 @@ fn precise_macro_call_location(
ast: &MacroCallKind, ast: &MacroCallKind,
db: &dyn HirDatabase, db: &dyn HirDatabase,
) -> (InFile<SyntaxNodePtr>, Option<TextRange>, Option<String>, MacroKind) { ) -> (InFile<SyntaxNodePtr>, Option<TextRange>, Option<String>, MacroKind) {
// FIXME: maaybe we actually want slightly different ranges for the different macro diagnostics // FIXME: maybe we actually want slightly different ranges for the different macro diagnostics
// - e.g. the full attribute for macro errors, but only the name for name resolution // - e.g. the full attribute for macro errors, but only the name for name resolution
match ast { match ast {
MacroCallKind::FnLike { ast_id, .. } => { MacroCallKind::FnLike { ast_id, .. } => {
@ -1522,7 +1522,7 @@ impl DefWithBody {
match source_map.expr_syntax(expr) { match source_map.expr_syntax(expr) {
Ok(expr) => acc.push(MissingUnsafe { expr }.into()), Ok(expr) => acc.push(MissingUnsafe { expr }.into()),
Err(SyntheticSyntax) => { Err(SyntheticSyntax) => {
// FIXME: Here and eslwhere in this file, the `expr` was // FIXME: Here and elsewhere in this file, the `expr` was
// desugared, report or assert that this doesn't happen. // desugared, report or assert that this doesn't happen.
} }
} }
@ -3654,7 +3654,7 @@ impl Type {
self.as_adt() self.as_adt()
.and_then(|a| a.lifetime(db).and_then(|lt| Some((&lt.name).to_smol_str()))) .and_then(|a| a.lifetime(db).and_then(|lt| Some((&lt.name).to_smol_str())))
.into_iter() .into_iter()
// add the type and const paramaters // add the type and const parameters
.chain(self.type_and_const_arguments(db)) .chain(self.type_and_const_arguments(db))
} }

View File

@ -252,7 +252,7 @@ impl Foo for S {
} }
#[test] #[test]
fn test_copied_overriden_members() { fn test_copied_overridden_members() {
check_assist( check_assist(
add_missing_impl_members, add_missing_impl_members,
r#" r#"

View File

@ -203,7 +203,7 @@ fn relevance_score(
// get the distance between the imported path and the current module // get the distance between the imported path and the current module
// (prefer items that are more local) // (prefer items that are more local)
Some((item_module, current_module)) => { Some((item_module, current_module)) => {
score -= module_distance_hueristic(db, current_module, &item_module) as i32; score -= module_distance_heuristic(db, current_module, &item_module) as i32;
} }
// could not find relevant modules, so just use the length of the path as an estimate // could not find relevant modules, so just use the length of the path as an estimate
@ -214,7 +214,7 @@ fn relevance_score(
} }
/// A heuristic that gives a higher score to modules that are more separated. /// A heuristic that gives a higher score to modules that are more separated.
fn module_distance_hueristic(db: &dyn HirDatabase, current: &Module, item: &Module) -> usize { fn module_distance_heuristic(db: &dyn HirDatabase, current: &Module, item: &Module) -> usize {
// get the path starting from the item to the respective crate roots // get the path starting from the item to the respective crate roots
let mut current_path = current.path_to_root(db); let mut current_path = current.path_to_root(db);
let mut item_path = item.path_to_root(db); let mut item_path = item.path_to_root(db);

View File

@ -504,7 +504,7 @@ fn main() {
} }
#[test] #[test]
fn ignore_statements_aftert_if() { fn ignore_statements_after_if() {
check_assist_not_applicable( check_assist_not_applicable(
convert_to_guarded_return, convert_to_guarded_return,
r#" r#"

View File

@ -374,7 +374,7 @@ struct OutlivedLocal {
/// Container of local variable usages /// Container of local variable usages
/// ///
/// Semanticall same as `UsageSearchResult`, but provides more convenient interface /// Semantically same as `UsageSearchResult`, but provides more convenient interface
struct LocalUsages(ide_db::search::UsageSearchResult); struct LocalUsages(ide_db::search::UsageSearchResult);
impl LocalUsages { impl LocalUsages {
@ -1291,8 +1291,8 @@ fn find_non_trait_impl(trait_impl: &SyntaxNode) -> Option<ast::Impl> {
let as_impl = ast::Impl::cast(trait_impl.clone())?; let as_impl = ast::Impl::cast(trait_impl.clone())?;
let impl_type = Some(impl_type_name(&as_impl)?); let impl_type = Some(impl_type_name(&as_impl)?);
let sibblings = trait_impl.parent()?.children(); let siblings = trait_impl.parent()?.children();
sibblings siblings
.filter_map(ast::Impl::cast) .filter_map(ast::Impl::cast)
.find(|s| impl_type_name(s) == impl_type && !is_trait_impl(s)) .find(|s| impl_type_name(s) == impl_type && !is_trait_impl(s))
} }

View File

@ -357,7 +357,7 @@ impl Module {
fn change_visibility(&mut self, record_fields: Vec<SyntaxNode>) { fn change_visibility(&mut self, record_fields: Vec<SyntaxNode>) {
let (mut replacements, record_field_parents, impls) = let (mut replacements, record_field_parents, impls) =
get_replacements_for_visibilty_change(&mut self.body_items, false); get_replacements_for_visibility_change(&mut self.body_items, false);
let mut impl_items: Vec<ast::Item> = impls let mut impl_items: Vec<ast::Item> = impls
.into_iter() .into_iter()
@ -366,7 +366,7 @@ impl Module {
.collect(); .collect();
let (mut impl_item_replacements, _, _) = let (mut impl_item_replacements, _, _) =
get_replacements_for_visibilty_change(&mut impl_items, true); get_replacements_for_visibility_change(&mut impl_items, true);
replacements.append(&mut impl_item_replacements); replacements.append(&mut impl_item_replacements);
@ -824,7 +824,7 @@ fn does_source_exists_outside_sel_in_same_mod(
source_exists_outside_sel_in_same_mod source_exists_outside_sel_in_same_mod
} }
fn get_replacements_for_visibilty_change( fn get_replacements_for_visibility_change(
items: &mut [ast::Item], items: &mut [ast::Item],
is_clone_for_updated: bool, is_clone_for_updated: bool,
) -> ( ) -> (
@ -1236,7 +1236,8 @@ mod modname {
} }
#[test] #[test]
fn test_extract_module_for_correspoding_adt_of_impl_present_in_same_mod_but_not_in_selection() { fn test_extract_module_for_corresponding_adt_of_impl_present_in_same_mod_but_not_in_selection()
{
check_assist( check_assist(
extract_module, extract_module,
r" r"

View File

@ -1006,7 +1006,7 @@ enum X<'a, 'b, 'x> {
} }
#[test] #[test]
fn test_extract_struct_with_liftime_type_const() { fn test_extract_struct_with_lifetime_type_const() {
check_assist( check_assist(
extract_struct_from_enum_variant, extract_struct_from_enum_variant,
r#" r#"

View File

@ -324,7 +324,7 @@ fn self_name(ast_func: &ast::Fn) -> Option<String> {
self_partial_type(ast_func).map(|name| to_lower_snake_case(&name)) self_partial_type(ast_func).map(|name| to_lower_snake_case(&name))
} }
/// Heper function to get the name of the type of `self` /// Helper function to get the name of the type of `self`
fn self_type(ast_func: &ast::Fn) -> Option<ast::Type> { fn self_type(ast_func: &ast::Fn) -> Option<ast::Type> {
ast_func.syntax().ancestors().find_map(ast::Impl::cast).and_then(|i| i.self_ty()) ast_func.syntax().ancestors().find_map(ast::Impl::cast).and_then(|i| i.self_ty())
} }
@ -350,7 +350,7 @@ fn self_type_without_lifetimes(ast_func: &ast::Fn) -> Option<String> {
Some(name) Some(name)
} }
/// Heper function to get the name of the type of `self` without generic arguments /// Helper function to get the name of the type of `self` without generic arguments
fn self_partial_type(ast_func: &ast::Fn) -> Option<String> { fn self_partial_type(ast_func: &ast::Fn) -> Option<String> {
let mut self_type = self_type(ast_func)?.to_string(); let mut self_type = self_type(ast_func)?.to_string();
if let Some(idx) = self_type.find(|c| ['<', ' '].contains(&c)) { if let Some(idx) = self_type.find(|c| ['<', ' '].contains(&c)) {

View File

@ -893,14 +893,14 @@ fn filter_bounds_in_scope(
let target_impl = target.parent().ancestors().find_map(ast::Impl::cast)?; let target_impl = target.parent().ancestors().find_map(ast::Impl::cast)?;
let target_impl = ctx.sema.to_def(&target_impl)?; let target_impl = ctx.sema.to_def(&target_impl)?;
// It's sufficient to test only the first element of `generic_params` because of the order of // It's sufficient to test only the first element of `generic_params` because of the order of
// insertion (see `relevant_parmas_and_where_clauses()`). // insertion (see `params_and_where_preds_in_scope()`).
let def = generic_params.first()?.self_ty_param.parent(); let def = generic_params.first()?.self_ty_param.parent();
if def != hir::GenericDef::Impl(target_impl) { if def != hir::GenericDef::Impl(target_impl) {
return None; return None;
} }
// Now we know every element that belongs to an impl would be in scope at `target`, we can // Now we know every element that belongs to an impl would be in scope at `target`, we can
// filter them out just by lookint at their parent. // filter them out just by looking at their parent.
generic_params.retain(|it| !matches!(it.self_ty_param.parent(), hir::GenericDef::Impl(_))); generic_params.retain(|it| !matches!(it.self_ty_param.parent(), hir::GenericDef::Impl(_)));
where_preds.retain(|it| { where_preds.retain(|it| {
it.node.syntax().parent().and_then(|it| it.parent()).and_then(ast::Impl::cast).is_none() it.node.syntax().parent().and_then(|it| it.parent()).and_then(ast::Impl::cast).is_none()
@ -1087,7 +1087,7 @@ fn calculate_necessary_visibility(
} }
} }
// This is never intended to be used as a generic graph strucuture. If there's ever another need of // This is never intended to be used as a generic graph structure. If there's ever another need of
// graph algorithm, consider adding a library for that (and replace the following). // graph algorithm, consider adding a library for that (and replace the following).
/// Minimally implemented directed graph structure represented by adjacency list. /// Minimally implemented directed graph structure represented by adjacency list.
struct Graph { struct Graph {
@ -2380,7 +2380,7 @@ mod s {
} }
#[test] #[test]
fn create_method_with_cursor_anywhere_on_call_expresion() { fn create_method_with_cursor_anywhere_on_call_expression() {
check_assist( check_assist(
generate_function, generate_function,
r" r"
@ -2487,7 +2487,7 @@ fn foo() {s::S::bar();}
} }
#[test] #[test]
fn create_static_method_with_cursor_anywhere_on_call_expresion() { fn create_static_method_with_cursor_anywhere_on_call_expression() {
check_assist( check_assist(
generate_function, generate_function,
r" r"

View File

@ -174,7 +174,7 @@ pub(crate) fn generate_getter_impl(
// this buf inserts a newline at the end of a getter // this buf inserts a newline at the end of a getter
// automatically, if one wants to add one more newline // automatically, if one wants to add one more newline
// for separating it from other assoc items, that needs // for separating it from other assoc items, that needs
// to be handled spearately // to be handled separately
let mut getter_buf = let mut getter_buf =
generate_getter_from_info(ctx, &getter_info, record_field_info); generate_getter_from_info(ctx, &getter_info, record_field_info);

View File

@ -98,9 +98,9 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option
.fields() .fields()
.enumerate() .enumerate()
.filter_map(|(i, f)| { .filter_map(|(i, f)| {
let contructor = trivial_constructors[i].clone(); let constructor = trivial_constructors[i].clone();
if contructor.is_some() { if constructor.is_some() {
contructor constructor
} else { } else {
Some(f.name()?.to_string()) Some(f.name()?.to_string())
} }

View File

@ -148,7 +148,7 @@ macro_rules! num {
#[test] #[test]
fn inline_macro_simple_not_applicable_broken_macro() { fn inline_macro_simple_not_applicable_broken_macro() {
// FIXME: This is a bug. The macro should not expand, but it's // FIXME: This is a bug. The macro should not expand, but it's
// the same behaviour as the "Expand Macro Recursively" commmand // the same behaviour as the "Expand Macro Recursively" command
// so it's presumably OK for the time being. // so it's presumably OK for the time being.
check_assist( check_assist(
inline_macro, inline_macro,

View File

@ -66,7 +66,7 @@ fn generate_fn_def_assist(
// if we have a self reference, use that // if we have a self reference, use that
Some(NeedsLifetime::SelfParam(self_param)) Some(NeedsLifetime::SelfParam(self_param))
} else { } else {
// otherwise, if there's a single reference parameter without a named liftime, use that // otherwise, if there's a single reference parameter without a named lifetime, use that
let fn_params_without_lifetime: Vec<_> = param_list let fn_params_without_lifetime: Vec<_> = param_list
.params() .params()
.filter_map(|param| match param.ty() { .filter_map(|param| match param.ty() {
@ -79,7 +79,7 @@ fn generate_fn_def_assist(
match fn_params_without_lifetime.len() { match fn_params_without_lifetime.len() {
1 => Some(fn_params_without_lifetime.into_iter().next()?), 1 => Some(fn_params_without_lifetime.into_iter().next()?),
0 => None, 0 => None,
// multiple unnnamed is invalid. assist is not applicable // multiple unnamed is invalid. assist is not applicable
_ => return None, _ => return None,
} }
}; };

View File

@ -386,7 +386,7 @@ fn foo() {
} }
#[test] #[test]
fn pull_assignment_up_if_missing_assigment_not_applicable() { fn pull_assignment_up_if_missing_assignment_not_applicable() {
check_assist_not_applicable( check_assist_not_applicable(
pull_assignment_up, pull_assignment_up,
r#" r#"
@ -401,7 +401,7 @@ fn foo() {
} }
#[test] #[test]
fn pull_assignment_up_match_missing_assigment_not_applicable() { fn pull_assignment_up_match_missing_assignment_not_applicable() {
check_assist_not_applicable( check_assist_not_applicable(
pull_assignment_up, pull_assignment_up,
r#" r#"

View File

@ -507,7 +507,7 @@ fn main() {
} }
#[test] #[test]
fn struct_method_over_stuct_instance() { fn struct_method_over_struct_instance() {
check_assist_not_applicable( check_assist_not_applicable(
qualify_method_call, qualify_method_call,
r#" r#"
@ -525,7 +525,7 @@ fn main() {
} }
#[test] #[test]
fn trait_method_over_stuct_instance() { fn trait_method_over_struct_instance() {
check_assist_not_applicable( check_assist_not_applicable(
qualify_method_call, qualify_method_call,
r#" r#"

View File

@ -124,7 +124,7 @@ mod tests {
} }
#[test] #[test]
fn remove_parens_doesnt_apply_weird_syntax_and_adge_cases() { fn remove_parens_doesnt_apply_weird_syntax_and_edge_cases() {
// removing `()` would break code because {} would be counted as the loop/if body // removing `()` would break code because {} would be counted as the loop/if body
check_assist_not_applicable(remove_parentheses, r#"fn f() { for _ in $0(0..{3}) {} }"#); check_assist_not_applicable(remove_parentheses, r#"fn f() { for _ in $0(0..{3}) {} }"#);
check_assist_not_applicable(remove_parentheses, r#"fn f() { for _ in $0(S {}) {} }"#); check_assist_not_applicable(remove_parentheses, r#"fn f() { for _ in $0(S {}) {} }"#);

View File

@ -232,7 +232,7 @@ fn b() { foo( ) }
} }
#[test] #[test]
fn remove_unused_surrounded_by_parms() { fn remove_unused_surrounded_by_params() {
check_assist( check_assist(
remove_unused_param, remove_unused_param,
r#" r#"

View File

@ -31,14 +31,14 @@ pub(crate) fn replace_string_with_char(acc: &mut Assists, ctx: &AssistContext<'_
if value.chars().take(2).count() != 1 { if value.chars().take(2).count() != 1 {
return None; return None;
} }
let quote_offets = token.quote_offsets()?; let quote_offsets = token.quote_offsets()?;
acc.add( acc.add(
AssistId("replace_string_with_char", AssistKind::RefactorRewrite), AssistId("replace_string_with_char", AssistKind::RefactorRewrite),
"Replace string with char", "Replace string with char",
target, target,
|edit| { |edit| {
let (left, right) = quote_offets.quotes; let (left, right) = quote_offsets.quotes;
edit.replace(left, '\''); edit.replace(left, '\'');
edit.replace(right, '\''); edit.replace(right, '\'');
if value == "'" { if value == "'" {

View File

@ -609,14 +609,14 @@ fn classify_name_ref(
_ => false, _ => false,
}; };
let reciever_is_part_of_indivisible_expression = match &receiver { let receiver_is_part_of_indivisible_expression = match &receiver {
Some(ast::Expr::IfExpr(_)) => { Some(ast::Expr::IfExpr(_)) => {
let next_token_kind = next_non_trivia_token(name_ref.syntax().clone()).map(|t| t.kind()); let next_token_kind = next_non_trivia_token(name_ref.syntax().clone()).map(|t| t.kind());
next_token_kind == Some(SyntaxKind::ELSE_KW) next_token_kind == Some(SyntaxKind::ELSE_KW)
}, },
_ => false _ => false
}; };
if reciever_is_part_of_indivisible_expression { if receiver_is_part_of_indivisible_expression {
return None; return None;
} }

View File

@ -133,7 +133,7 @@ pub use crate::{
/// ///
/// Another case where this would be instrumental is macro expansion. We want to /// Another case where this would be instrumental is macro expansion. We want to
/// insert a fake ident and re-expand code. There's `expand_speculative` as a /// insert a fake ident and re-expand code. There's `expand_speculative` as a
/// work-around for this. /// workaround for this.
/// ///
/// A different use-case is completion of injection (examples and links in doc /// A different use-case is completion of injection (examples and links in doc
/// comments). When computing completion for a path in a doc-comment, you want /// comments). When computing completion for a path in a doc-comment, you want

View File

@ -74,7 +74,7 @@ fn render(
item.insert_text(banged_name(&escaped_name)).lookup_by(banged_name(&name)); item.insert_text(banged_name(&escaped_name)).lookup_by(banged_name(&name));
} }
_ => { _ => {
cov_mark::hit!(dont_insert_macro_call_parens_unncessary); cov_mark::hit!(dont_insert_macro_call_parens_unnecessary);
item.insert_text(escaped_name); item.insert_text(escaped_name);
} }
}; };
@ -140,8 +140,8 @@ mod tests {
use crate::tests::check_edit; use crate::tests::check_edit;
#[test] #[test]
fn dont_insert_macro_call_parens_unncessary() { fn dont_insert_macro_call_parens_unnecessary() {
cov_mark::check!(dont_insert_macro_call_parens_unncessary); cov_mark::check!(dont_insert_macro_call_parens_unnecessary);
check_edit( check_edit(
"frobnicate", "frobnicate",
r#" r#"

View File

@ -105,7 +105,7 @@ fn completion_list_with_config(
include_keywords: bool, include_keywords: bool,
trigger_character: Option<char>, trigger_character: Option<char>,
) -> String { ) -> String {
// filter out all but one builtintype completion for smaller test outputs // filter out all but one built-in type completion for smaller test outputs
let items = get_all_items(config, ra_fixture, trigger_character); let items = get_all_items(config, ra_fixture, trigger_character);
let items = items let items = items
.into_iter() .into_iter()

View File

@ -667,7 +667,7 @@ fn main() {
} }
#[test] #[test]
fn varaiant_with_struct() { fn variant_with_struct() {
check_empty( check_empty(
r#" r#"
pub struct YoloVariant { pub struct YoloVariant {

View File

@ -81,7 +81,7 @@ impl Foo {
} }
#[proc_macros::input_replace( #[proc_macros::input_replace(
fn suprise() { fn surprise() {
Foo.$0 Foo.$0
} }
)] )]
@ -114,7 +114,7 @@ impl Foo {
} }
#[proc_macros::input_replace( #[proc_macros::input_replace(
fn suprise() { fn surprise() {
Foo.f$0 Foo.f$0
} }
)] )]

View File

@ -98,7 +98,7 @@ impl FromStr for AssistKind {
#[derive(Debug, Clone, Copy, PartialEq, Eq)] #[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct AssistId(pub &'static str, pub AssistKind); pub struct AssistId(pub &'static str, pub AssistKind);
/// A way to control how many asssist to resolve during the assist resolution. /// A way to control how many assist to resolve during the assist resolution.
/// When an assist is resolved, its edits are calculated that might be costly to always do by default. /// When an assist is resolved, its edits are calculated that might be costly to always do by default.
#[derive(Debug)] #[derive(Debug)]
pub enum AssistResolveStrategy { pub enum AssistResolveStrategy {

View File

@ -4230,7 +4230,7 @@ pub union GenericUnion<T: Copy> { // Unions with non-`Copy` fields are unstable.
pub const THIS_IS_OKAY: GenericUnion<()> = GenericUnion { field: () }; pub const THIS_IS_OKAY: GenericUnion<()> = GenericUnion { field: () };
``` ```
Like transarent `struct`s, a transparent `union` of type `U` has the same Like transparent `struct`s, a transparent `union` of type `U` has the same
layout, size, and ABI as its single non-ZST field. If it is generic over a type layout, size, and ABI as its single non-ZST field. If it is generic over a type
`T`, and all its fields are ZSTs except for exactly one field of type `T`, then `T`, and all its fields are ZSTs except for exactly one field of type `T`, then
it has the same layout and ABI as `T` (even if `T` is a ZST when monomorphized). it has the same layout and ABI as `T` (even if `T` is a ZST when monomorphized).
@ -6548,7 +6548,7 @@ subtracting elements in an Add impl."##,
}, },
Lint { Lint {
label: "clippy::suspicious_assignment_formatting", label: "clippy::suspicious_assignment_formatting",
description: r##"Checks for use of the non-existent `=*`, `=!` and `=-` description: r##"Checks for use of the nonexistent `=*`, `=!` and `=-`
operators."##, operators."##,
}, },
Lint { Lint {

View File

@ -181,7 +181,7 @@ impl SourceChangeBuilder {
/// mutability, and different nodes in the same tree see the same mutations. /// mutability, and different nodes in the same tree see the same mutations.
/// ///
/// The typical pattern for an assist is to find specific nodes in the read /// The typical pattern for an assist is to find specific nodes in the read
/// phase, and then get their mutable couterparts using `make_mut` in the /// phase, and then get their mutable counterparts using `make_mut` in the
/// mutable state. /// mutable state.
pub fn make_syntax_mut(&mut self, node: SyntaxNode) -> SyntaxNode { pub fn make_syntax_mut(&mut self, node: SyntaxNode) -> SyntaxNode {
self.mutated_tree.get_or_insert_with(|| TreeMutator::new(&node)).make_syntax_mut(&node) self.mutated_tree.get_or_insert_with(|| TreeMutator::new(&node)).make_syntax_mut(&node)

View File

@ -92,7 +92,7 @@ pub fn lex_format_specifiers(
let (_, second) = cloned.next().unwrap_or_default(); let (_, second) = cloned.next().unwrap_or_default();
match second { match second {
'<' | '^' | '>' => { '<' | '^' | '>' => {
// alignment specifier, first char specifies fillment // alignment specifier, first char specifies fill
skip_char_and_emit(&mut chars, FormatSpecifier::Fill, &mut callback); skip_char_and_emit(&mut chars, FormatSpecifier::Fill, &mut callback);
skip_char_and_emit(&mut chars, FormatSpecifier::Align, &mut callback); skip_char_and_emit(&mut chars, FormatSpecifier::Align, &mut callback);
} }

View File

@ -1,7 +1,7 @@
//! Tools to work with expressions present in format string literals for the `format_args!` family of macros. //! Tools to work with expressions present in format string literals for the `format_args!` family of macros.
//! Primarily meant for assists and completions. //! Primarily meant for assists and completions.
/// Enum for represenging extraced format string args. /// Enum for representing extracted format string args.
/// Can either be extracted expressions (which includes identifiers), /// Can either be extracted expressions (which includes identifiers),
/// or placeholders `{}`. /// or placeholders `{}`.
#[derive(Debug, PartialEq, Eq)] #[derive(Debug, PartialEq, Eq)]

View File

@ -1,9 +1,9 @@
//! Functionality for generating trivial contructors //! Functionality for generating trivial constructors
use hir::StructKind; use hir::StructKind;
use syntax::ast; use syntax::ast;
/// given a type return the trivial contructor (if one exists) /// given a type return the trivial constructor (if one exists)
pub fn use_trivial_constructor( pub fn use_trivial_constructor(
db: &crate::RootDatabase, db: &crate::RootDatabase,
path: ast::Path, path: ast::Path,

View File

@ -295,7 +295,7 @@ impl someStruct {
} }
#[test] #[test]
fn no_diagnostic_for_enum_varinats() { fn no_diagnostic_for_enum_variants() {
check_diagnostics( check_diagnostics(
r#" r#"
enum Option { Some, None } enum Option { Some, None }

View File

@ -368,7 +368,7 @@ fn main() {
#[test] #[test]
fn mutation_in_dead_code() { fn mutation_in_dead_code() {
// This one is interesting. Dead code is not represented at all in the MIR, so // This one is interesting. Dead code is not represented at all in the MIR, so
// there would be no mutablility error for locals in dead code. Rustc tries to // there would be no mutability error for locals in dead code. Rustc tries to
// not emit `unused_mut` in this case, but since it works without `mut`, and // not emit `unused_mut` in this case, but since it works without `mut`, and
// special casing it is not trivial, we emit it. // special casing it is not trivial, we emit it.
check_diagnostics( check_diagnostics(
@ -773,7 +773,7 @@ fn fn_once(mut x: impl FnOnce(u8) -> u8) -> u8 {
#[test] #[test]
fn closure() { fn closure() {
// FIXME: Diagnositc spans are too large // FIXME: Diagnostic spans are too large
check_diagnostics( check_diagnostics(
r#" r#"
//- minicore: copy, fn //- minicore: copy, fn

View File

@ -45,7 +45,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedField) -> Option<Vec<A
} }
} }
// FIXME: We should fill out the call here, mvoe the cursor and trigger signature help // FIXME: We should fill out the call here, move the cursor and trigger signature help
fn method_fix( fn method_fix(
ctx: &DiagnosticsContext<'_>, ctx: &DiagnosticsContext<'_>,
expr_ptr: &InFile<AstPtr<ast::Expr>>, expr_ptr: &InFile<AstPtr<ast::Expr>>,

View File

@ -219,7 +219,7 @@ mod tests {
} }
#[test] #[test]
fn test_nagative_trait_bound() { fn test_negative_trait_bound() {
let txt = r#"impl !Unpin for Test {}"#; let txt = r#"impl !Unpin for Test {}"#;
check( check(
txt, txt,

View File

@ -66,7 +66,7 @@ pub(crate) fn annotations(ra_fixture: &str) -> (Analysis, FilePosition, Vec<(Fil
(host.analysis(), FilePosition { file_id, offset }, annotations) (host.analysis(), FilePosition { file_id, offset }, annotations)
} }
/// Creates analysis from a multi-file fixture with annonations without $0 /// Creates analysis from a multi-file fixture with annotations without $0
pub(crate) fn annotations_without_marker(ra_fixture: &str) -> (Analysis, Vec<(FileRange, String)>) { pub(crate) fn annotations_without_marker(ra_fixture: &str) -> (Analysis, Vec<(FileRange, String)>) {
let mut host = AnalysisHost::default(); let mut host = AnalysisHost::default();
let change_fixture = ChangeFixture::parse(ra_fixture); let change_fixture = ChangeFixture::parse(ra_fixture);

View File

@ -3115,7 +3115,7 @@ mod Foo$0 {
} }
#[test] #[test]
fn hover_doc_outer_inner_attribue() { fn hover_doc_outer_inner_attribute() {
check( check(
r#" r#"
#[doc = "Be quick;"] #[doc = "Be quick;"]
@ -3146,7 +3146,7 @@ mod Foo$0 {
} }
#[test] #[test]
fn hover_doc_block_style_indentend() { fn hover_doc_block_style_indent_end() {
check( check(
r#" r#"
/** /**
@ -4288,7 +4288,7 @@ fn hover_builtin() {
check( check(
r#" r#"
//- /main.rs crate:main deps:std //- /main.rs crate:main deps:std
cosnt _: &str$0 = ""; } const _: &str$0 = ""; }
//- /libstd.rs crate:std //- /libstd.rs crate:std
/// Docs for prim_str /// Docs for prim_str

View File

@ -148,7 +148,7 @@ pub(super) fn hints(
Some(()) Some(())
} }
/// Returns whatever the hint should be postfix and if we need to add paretheses on the inside and/or outside of `expr`, /// Returns whatever the hint should be postfix and if we need to add parentheses on the inside and/or outside of `expr`,
/// if we are going to add (`postfix`) adjustments hints to it. /// if we are going to add (`postfix`) adjustments hints to it.
fn mode_and_needs_parens_for_adjustment_hints( fn mode_and_needs_parens_for_adjustment_hints(
expr: &ast::Expr, expr: &ast::Expr,
@ -183,7 +183,7 @@ fn mode_and_needs_parens_for_adjustment_hints(
} }
} }
/// Returns whatever we need to add paretheses on the inside and/or outside of `expr`, /// Returns whatever we need to add parentheses on the inside and/or outside of `expr`,
/// if we are going to add (`postfix`) adjustments hints to it. /// if we are going to add (`postfix`) adjustments hints to it.
fn needs_parens_for_adjustment_hints(expr: &ast::Expr, postfix: bool) -> (bool, bool) { fn needs_parens_for_adjustment_hints(expr: &ast::Expr, postfix: bool) -> (bool, bool) {
// This is a very miserable pile of hacks... // This is a very miserable pile of hacks...
@ -194,10 +194,10 @@ fn needs_parens_for_adjustment_hints(expr: &ast::Expr, postfix: bool) -> (bool,
// But we want to check what would happen if we add `*`/`.*` to the inner expression. // But we want to check what would happen if we add `*`/`.*` to the inner expression.
// To check for inner we need `` expr.needs_parens_in(`*expr`) ``, // To check for inner we need `` expr.needs_parens_in(`*expr`) ``,
// to check for outer we need `` `*expr`.needs_parens_in(parent) ``, // to check for outer we need `` `*expr`.needs_parens_in(parent) ``,
// where "expr" is the `expr` parameter, `*expr` is the editted `expr`, // where "expr" is the `expr` parameter, `*expr` is the edited `expr`,
// and "parent" is the parent of the original expression... // and "parent" is the parent of the original expression...
// //
// For this we utilize mutable mutable trees, which is a HACK, but it works. // For this we utilize mutable trees, which is a HACK, but it works.
// //
// FIXME: comeup with a better API for `needs_parens_in`, so that we don't have to do *this* // FIXME: comeup with a better API for `needs_parens_in`, so that we don't have to do *this*
@ -243,7 +243,7 @@ fn needs_parens_for_adjustment_hints(expr: &ast::Expr, postfix: bool) -> (bool,
}; };
// At this point // At this point
// - `parent` is the parrent of the original expression // - `parent` is the parent of the original expression
// - `dummy_expr` is the original expression wrapped in the operator we want (`*`/`.*`) // - `dummy_expr` is the original expression wrapped in the operator we want (`*`/`.*`)
// - `expr` is the clone of the original expression (with `dummy_expr` as the parent) // - `expr` is the clone of the original expression (with `dummy_expr` as the parent)

View File

@ -108,7 +108,7 @@ fn invocation_fixtures(rules: &FxHashMap<String, DeclarativeMacro>) -> Vec<(Stri
} }
try_cnt += 1; try_cnt += 1;
if try_cnt > 100 { if try_cnt > 100 {
panic!("invocaton fixture {name} cannot be generated.\n"); panic!("invocation fixture {name} cannot be generated.\n");
} }
} }
} }
@ -195,7 +195,7 @@ fn invocation_fixtures(rules: &FxHashMap<String, DeclarativeMacro>) -> Vec<(Stri
Op::Ignore { .. } | Op::Index { .. } => {} Op::Ignore { .. } | Op::Index { .. } => {}
}; };
// Simple linear congruential generator for determistic result // Simple linear congruential generator for deterministic result
fn rand(seed: &mut usize) -> usize { fn rand(seed: &mut usize) -> usize {
let a = 1664525; let a = 1664525;
let c = 1013904223; let c = 1013904223;

View File

@ -332,7 +332,7 @@ struct MatchState<'t> {
/// Cached result of meta variable parsing /// Cached result of meta variable parsing
meta_result: Option<(TtIter<'t>, ExpandResult<Option<Fragment>>)>, meta_result: Option<(TtIter<'t>, ExpandResult<Option<Fragment>>)>,
/// Is error occuried in this state, will `poised` to "parent" /// Is error occurred in this state, will `poised` to "parent"
is_error: bool, is_error: bool,
} }

View File

@ -59,7 +59,7 @@ pub(super) fn ascription(p: &mut Parser<'_>) {
assert!(p.at(T![:])); assert!(p.at(T![:]));
p.bump(T![:]); p.bump(T![:]);
if p.at(T![=]) { if p.at(T![=]) {
// recover from `let x: = expr;`, `const X: = expr;` and similars // recover from `let x: = expr;`, `const X: = expr;` and similar
// hopefully no type starts with `=` // hopefully no type starts with `=`
p.error("missing type"); p.error("missing type");
return; return;

View File

@ -60,7 +60,7 @@ SOURCE_FILE
IDENT "doc" IDENT "doc"
TOKEN_TREE TOKEN_TREE
L_PAREN "(" L_PAREN "("
STRING "\"Being validated is not affected by duplcates\"" STRING "\"Being validated is not affected by duplicates\""
R_PAREN ")" R_PAREN ")"
R_BRACK "]" R_BRACK "]"
WHITESPACE "\n " WHITESPACE "\n "

View File

@ -3,7 +3,7 @@ fn inner() {
//! As are ModuleDoc style comments //! As are ModuleDoc style comments
{ {
#![doc("Inner attributes are allowed in blocks used as statements")] #![doc("Inner attributes are allowed in blocks used as statements")]
#![doc("Being validated is not affected by duplcates")] #![doc("Being validated is not affected by duplicates")]
//! As are ModuleDoc style comments //! As are ModuleDoc style comments
}; };
{ {

View File

@ -48,7 +48,7 @@ tracing-tree = "0.2.1"
always-assert = "0.1.2" always-assert = "0.1.2"
# These dependencies are unused, but we pin them to a version here to restrict them for our transitive dependencies # These dependencies are unused, but we pin them to a version here to restrict them for our transitive dependencies
# so that we don't pull in duplicates of their depdendenceies like windows-sys and syn 1 vs 2 # so that we don't pull in duplicates of their dependencies like windows-sys and syn 1 vs 2
# these would pull in serde 2 # these would pull in serde 2
thiserror = "=1.0.39" thiserror = "=1.0.39"
serde_repr = "=0.1.11" serde_repr = "=0.1.11"

View File

@ -338,7 +338,7 @@ config_data! {
inlayHints_closingBraceHints_minLines: usize = "25", inlayHints_closingBraceHints_minLines: usize = "25",
/// Whether to show inlay type hints for return types of closures. /// Whether to show inlay type hints for return types of closures.
inlayHints_closureReturnTypeHints_enable: ClosureReturnTypeHintsDef = "\"never\"", inlayHints_closureReturnTypeHints_enable: ClosureReturnTypeHintsDef = "\"never\"",
/// Closure notation in type and chaining inaly hints. /// Closure notation in type and chaining inlay hints.
inlayHints_closureStyle: ClosureStyle = "\"impl_fn\"", inlayHints_closureStyle: ClosureStyle = "\"impl_fn\"",
/// Whether to show enum variant discriminant hints. /// Whether to show enum variant discriminant hints.
inlayHints_discriminantHints_enable: DiscriminantHintsDef = "\"never\"", inlayHints_discriminantHints_enable: DiscriminantHintsDef = "\"never\"",
@ -488,7 +488,7 @@ config_data! {
/// When enabled, rust-analyzer will emit special token types for operator tokens instead /// When enabled, rust-analyzer will emit special token types for operator tokens instead
/// of the generic `operator` token type. /// of the generic `operator` token type.
semanticHighlighting_operator_specialization_enable: bool = "false", semanticHighlighting_operator_specialization_enable: bool = "false",
/// Use semantic tokens for punctuations. /// Use semantic tokens for punctuation.
/// ///
/// When disabled, rust-analyzer will emit semantic tokens only for punctuation tokens when /// When disabled, rust-analyzer will emit semantic tokens only for punctuation tokens when
/// they are tagged with modifiers or have a special role. /// they are tagged with modifiers or have a special role.
@ -496,7 +496,7 @@ config_data! {
/// When enabled, rust-analyzer will emit a punctuation semantic token for the `!` of macro /// When enabled, rust-analyzer will emit a punctuation semantic token for the `!` of macro
/// calls. /// calls.
semanticHighlighting_punctuation_separate_macro_bang: bool = "false", semanticHighlighting_punctuation_separate_macro_bang: bool = "false",
/// Use specialized semantic tokens for punctuations. /// Use specialized semantic tokens for punctuation.
/// ///
/// When enabled, rust-analyzer will emit special token types for punctuation tokens instead /// When enabled, rust-analyzer will emit special token types for punctuation tokens instead
/// of the generic `punctuation` token type. /// of the generic `punctuation` token type.
@ -1967,7 +1967,7 @@ fn get_field<T: DeserializeOwned>(
alias: Option<&'static str>, alias: Option<&'static str>,
default: &str, default: &str,
) -> T { ) -> T {
// XXX: check alias first, to work-around the VS Code where it pre-fills the // XXX: check alias first, to work around the VS Code where it pre-fills the
// defaults instead of sending an empty object. // defaults instead of sending an empty object.
alias alias
.into_iter() .into_iter()
@ -2199,8 +2199,8 @@ fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json
"enumDescriptions": [ "enumDescriptions": [
"Always show adjustment hints as prefix (`*expr`).", "Always show adjustment hints as prefix (`*expr`).",
"Always show adjustment hints as postfix (`expr.*`).", "Always show adjustment hints as postfix (`expr.*`).",
"Show prefix or postfix depending on which uses less parenthesis, prefering prefix.", "Show prefix or postfix depending on which uses less parenthesis, preferring prefix.",
"Show prefix or postfix depending on which uses less parenthesis, prefering postfix.", "Show prefix or postfix depending on which uses less parenthesis, preferring postfix.",
] ]
}, },
"CargoFeaturesDef" => set! { "CargoFeaturesDef" => set! {

View File

@ -288,7 +288,7 @@ impl GlobalState {
{ {
let raw_database = self.analysis_host.raw_database(); let raw_database = self.analysis_host.raw_database();
// FIXME: ideally we should only trigger a workspace fetch for non-library changes // FIXME: ideally we should only trigger a workspace fetch for non-library changes
// but somethings going wrong with the source root business when we add a new local // but something's going wrong with the source root business when we add a new local
// crate see https://github.com/rust-lang/rust-analyzer/issues/13029 // crate see https://github.com/rust-lang/rust-analyzer/issues/13029
if let Some(path) = workspace_structure_change { if let Some(path) = workspace_structure_change {
self.fetch_workspaces_queue self.fetch_workspaces_queue

View File

@ -36,7 +36,7 @@ pub fn main_loop(config: Config, connection: Connection) -> Result<()> {
// temporary bumped. This optimization backfires in our case: each time the // temporary bumped. This optimization backfires in our case: each time the
// `main_loop` schedules a task to run on a threadpool, the worker threads // `main_loop` schedules a task to run on a threadpool, the worker threads
// gets a higher priority, and (on a machine with fewer cores) displaces the // gets a higher priority, and (on a machine with fewer cores) displaces the
// main loop! We work-around this by marking the main loop as a // main loop! We work around this by marking the main loop as a
// higher-priority thread. // higher-priority thread.
// //
// https://docs.microsoft.com/en-us/windows/win32/procthread/scheduling-priorities // https://docs.microsoft.com/en-us/windows/win32/procthread/scheduling-priorities

View File

@ -166,8 +166,8 @@ fn merge_errors(
} }
res.extend(new_errors.into_iter().map(|new_err| { res.extend(new_errors.into_iter().map(|new_err| {
// fighting borrow checker with a variable ;) // fighting borrow checker with a variable ;)
let offseted_range = new_err.range() + range_before_reparse.start(); let offsetted_range = new_err.range() + range_before_reparse.start();
new_err.with_range(offseted_range) new_err.with_range(offsetted_range)
})); }));
res res
} }
@ -408,7 +408,7 @@ enum Foo {
#[test] #[test]
fn reparse_str_token_with_error_fixed() { fn reparse_str_token_with_error_fixed() {
do_check(r#""unterinated$0$0"#, "\"", 12); do_check(r#""unterminated$0$0"#, "\"", 13);
} }
#[test] #[test]

View File

@ -175,14 +175,14 @@ pub(crate) fn validate_block_structure(root: &SyntaxNode) {
assert_eq!( assert_eq!(
node.parent(), node.parent(),
pair.parent(), pair.parent(),
"\nunpaired curlys:\n{}\n{:#?}\n", "\nunpaired curlies:\n{}\n{:#?}\n",
root.text(), root.text(),
root, root,
); );
assert!( assert!(
node.next_sibling_or_token().is_none() node.next_sibling_or_token().is_none()
&& pair.prev_sibling_or_token().is_none(), && pair.prev_sibling_or_token().is_none(),
"\nfloating curlys at {:?}\nfile:\n{}\nerror:\n{}\n", "\nfloating curlies at {:?}\nfile:\n{}\nerror:\n{}\n",
node, node,
root.text(), root.text(),
node, node,

View File

@ -39,13 +39,13 @@
ast::Root::cast(self.syntax()).unwrap() ast::Root::cast(self.syntax()).unwrap()
} }
pub fn syntax(&self) -> SyntaxNodeRef { pub fn syntax(&self) -> SyntaxNodeRef {
self.root.brroowed() self.root.borrowed()
} }
mp_tree(root), mp_tree(root),
); );
assert!( assert!(
node.next_sibling().is_none() && pair.prev_sibling().is_none(), node.next_sibling().is_none() && pair.prev_sibling().is_none(),
"\nfloating curlys at {:?}\nfile:\n{}\nerror:\n{}\n", "\nfloating curlies at {:?}\nfile:\n{}\nerror:\n{}\n",
node, node,
root.text(), root.text(),
node.text(), node.text(),

View File

@ -333,7 +333,7 @@ Moreover, it would be cool if editors didn't need to implement even basic langua
### Unresolved Question ### Unresolved Question
* Should we return a nested brace structure, to allow paredit-like actions of jump *out* of the current brace pair? * Should we return a nested brace structure, to allow [paredit](https://paredit.org/)-like actions of jump *out* of the current brace pair?
This is how `SelectionRange` request works. This is how `SelectionRange` request works.
* Alternatively, should we perhaps flag certain `SelectionRange`s as being brace pairs? * Alternatively, should we perhaps flag certain `SelectionRange`s as being brace pairs?

View File

@ -477,7 +477,7 @@ Whether to show inlay type hints for return types of closures.
[[rust-analyzer.inlayHints.closureStyle]]rust-analyzer.inlayHints.closureStyle (default: `"impl_fn"`):: [[rust-analyzer.inlayHints.closureStyle]]rust-analyzer.inlayHints.closureStyle (default: `"impl_fn"`)::
+ +
-- --
Closure notation in type and chaining inaly hints. Closure notation in type and chaining inlay hints.
-- --
[[rust-analyzer.inlayHints.discriminantHints.enable]]rust-analyzer.inlayHints.discriminantHints.enable (default: `"never"`):: [[rust-analyzer.inlayHints.discriminantHints.enable]]rust-analyzer.inlayHints.discriminantHints.enable (default: `"never"`)::
+ +
@ -758,7 +758,7 @@ of the generic `operator` token type.
[[rust-analyzer.semanticHighlighting.punctuation.enable]]rust-analyzer.semanticHighlighting.punctuation.enable (default: `false`):: [[rust-analyzer.semanticHighlighting.punctuation.enable]]rust-analyzer.semanticHighlighting.punctuation.enable (default: `false`)::
+ +
-- --
Use semantic tokens for punctuations. Use semantic tokens for punctuation.
When disabled, rust-analyzer will emit semantic tokens only for punctuation tokens when When disabled, rust-analyzer will emit semantic tokens only for punctuation tokens when
they are tagged with modifiers or have a special role. they are tagged with modifiers or have a special role.
@ -772,7 +772,7 @@ calls.
[[rust-analyzer.semanticHighlighting.punctuation.specialization.enable]]rust-analyzer.semanticHighlighting.punctuation.specialization.enable (default: `false`):: [[rust-analyzer.semanticHighlighting.punctuation.specialization.enable]]rust-analyzer.semanticHighlighting.punctuation.specialization.enable (default: `false`)::
+ +
-- --
Use specialized semantic tokens for punctuations. Use specialized semantic tokens for punctuation.
When enabled, rust-analyzer will emit special token types for punctuation tokens instead When enabled, rust-analyzer will emit special token types for punctuation tokens instead
of the generic `punctuation` token type. of the generic `punctuation` token type.

View File

@ -1030,7 +1030,7 @@
] ]
}, },
"rust-analyzer.inlayHints.closureStyle": { "rust-analyzer.inlayHints.closureStyle": {
"markdownDescription": "Closure notation in type and chaining inaly hints.", "markdownDescription": "Closure notation in type and chaining inlay hints.",
"default": "impl_fn", "default": "impl_fn",
"type": "string", "type": "string",
"enum": [ "enum": [
@ -1094,8 +1094,8 @@
"enumDescriptions": [ "enumDescriptions": [
"Always show adjustment hints as prefix (`*expr`).", "Always show adjustment hints as prefix (`*expr`).",
"Always show adjustment hints as postfix (`expr.*`).", "Always show adjustment hints as postfix (`expr.*`).",
"Show prefix or postfix depending on which uses less parenthesis, prefering prefix.", "Show prefix or postfix depending on which uses less parenthesis, preferring prefix.",
"Show prefix or postfix depending on which uses less parenthesis, prefering postfix." "Show prefix or postfix depending on which uses less parenthesis, preferring postfix."
] ]
}, },
"rust-analyzer.inlayHints.lifetimeElisionHints.enable": { "rust-analyzer.inlayHints.lifetimeElisionHints.enable": {
@ -1381,7 +1381,7 @@
"type": "boolean" "type": "boolean"
}, },
"rust-analyzer.semanticHighlighting.punctuation.enable": { "rust-analyzer.semanticHighlighting.punctuation.enable": {
"markdownDescription": "Use semantic tokens for punctuations.\n\nWhen disabled, rust-analyzer will emit semantic tokens only for punctuation tokens when\nthey are tagged with modifiers or have a special role.", "markdownDescription": "Use semantic tokens for punctuation.\n\nWhen disabled, rust-analyzer will emit semantic tokens only for punctuation tokens when\nthey are tagged with modifiers or have a special role.",
"default": false, "default": false,
"type": "boolean" "type": "boolean"
}, },
@ -1391,7 +1391,7 @@
"type": "boolean" "type": "boolean"
}, },
"rust-analyzer.semanticHighlighting.punctuation.specialization.enable": { "rust-analyzer.semanticHighlighting.punctuation.specialization.enable": {
"markdownDescription": "Use specialized semantic tokens for punctuations.\n\nWhen enabled, rust-analyzer will emit special token types for punctuation tokens instead\nof the generic `punctuation` token type.", "markdownDescription": "Use specialized semantic tokens for punctuation.\n\nWhen enabled, rust-analyzer will emit special token types for punctuation tokens instead\nof the generic `punctuation` token type.",
"default": false, "default": false,
"type": "boolean" "type": "boolean"
}, },

View File

@ -157,7 +157,7 @@ export async function createTask(runnable: ra.Runnable, config: Config): Promise
cargoTask.presentationOptions.clear = true; cargoTask.presentationOptions.clear = true;
// Sadly, this doesn't prevent focus stealing if the terminal is currently // Sadly, this doesn't prevent focus stealing if the terminal is currently
// hidden, and will become revealed due to task exucution. // hidden, and will become revealed due to task execution.
cargoTask.presentationOptions.focus = false; cargoTask.presentationOptions.focus = false;
return cargoTask; return cargoTask;