Merge commit '3b7c7f97e4a7bb253a8d398ee4f8346f6cf2817b' into sync-from-ra

This commit is contained in:
Laurențiu Nicola 2023-11-08 08:15:03 +02:00
commit 626bd33e1e
177 changed files with 14930 additions and 2099 deletions

View File

@ -19,6 +19,10 @@ jobs:
with:
fetch-depth: 0
# https://github.com/jlumbroso/free-disk-space/blob/main/action.yml
- name: Free up some disk space
run: sudo rm -rf /usr/local/lib/android /usr/share/dotnet /opt/ghc /usr/local/.ghcup
- name: Install Rust toolchain
run: rustup update --no-self-update stable

View File

@ -8,6 +8,7 @@ on:
branches:
- auto
- try
- automation/bors/try
env:
CARGO_INCREMENTAL: 0
@ -84,6 +85,7 @@ jobs:
run: cargo build --quiet ${{ env.USE_SYSROOT_ABI }}
- name: Test
if: matrix.os == 'ubuntu-latest' || github.event_name == 'push'
run: cargo test ${{ env.USE_SYSROOT_ABI }} -- --nocapture --quiet
- name: Switch to stable toolchain

View File

@ -140,11 +140,14 @@ jobs:
- name: Combine json
run: |
git clone --depth 1 https://$METRICS_TOKEN@github.com/rust-analyzer/metrics.git
mkdir ~/.ssh
echo "${{ secrets.METRICS_DEPLOY_KEY }}" > ~/.ssh/id_ed25519
chmod 600 ~/.ssh/id_ed25519
chmod 700 ~/.ssh
git clone --depth 1 git@github.com:rust-analyzer/metrics.git
jq -s ".[0] * .[1] * .[2] * .[3] * .[4] * .[5]" build.json self.json ripgrep-13.0.0.json webrender-2022.json diesel-1.4.8.json hyper-0.14.18.json -c >> metrics/metrics.json
cd metrics
git add .
git -c user.name=Bot -c user.email=dummy@example.com commit --message 📈
git push origin master
env:
METRICS_TOKEN: ${{ secrets.METRICS_TOKEN }}

View File

@ -32,12 +32,6 @@ version = "1.0.71"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9c7d0618f0e0b7e8ff11427422b64564d5fb0be1940354bfe2e0529b18a9d9b8"
[[package]]
name = "anymap"
version = "1.0.0-beta.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8f1f8f5a6f3d50d89e3797d7593a50f96bb2aaa20ca0cc7be1fb673232c91d72"
[[package]]
name = "arbitrary"
version = "1.3.0"
@ -177,9 +171,9 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "chalk-derive"
version = "0.92.0"
version = "0.93.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ff5053a8a42dbff5279a82423946fc56dc1253b76cf211b2b3c14b3aad4e1281"
checksum = "264726159011fc7f22c23eb51f49021ece6e71bc358b96e7f2e842db0b14162b"
dependencies = [
"proc-macro2",
"quote",
@ -189,9 +183,9 @@ dependencies = [
[[package]]
name = "chalk-ir"
version = "0.92.0"
version = "0.93.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8a56de2146a8ed0fcd54f4bd50db852f1de4eac9e1efe568494f106c21b77d2a"
checksum = "d65c17407d4c756b8f7f84344acb0fb96364d0298822743219bb25769b6d00df"
dependencies = [
"bitflags 1.3.2",
"chalk-derive",
@ -200,9 +194,9 @@ dependencies = [
[[package]]
name = "chalk-recursive"
version = "0.92.0"
version = "0.93.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5cc09e6e9531f3544989ef89b189e80fbc7ad9e2f73f1c5e03ddc9ffb0527463"
checksum = "80e2cf7b70bedaaf3a8cf3c93b6120c2bb65be89389124028e724d19e209686e"
dependencies = [
"chalk-derive",
"chalk-ir",
@ -213,9 +207,9 @@ dependencies = [
[[package]]
name = "chalk-solve"
version = "0.92.0"
version = "0.93.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b392e02b4c81ec76d3748da839fc70a5539b83d27c9030668463d34d5110b860"
checksum = "afc67c548d3854f64e97e67dc5b7c88513425c5bfa347cff96b7992ae6379288"
dependencies = [
"chalk-derive",
"chalk-ir",
@ -518,7 +512,6 @@ dependencies = [
name = "hir-def"
version = "0.0.0"
dependencies = [
"anymap",
"arrayvec",
"base-db",
"bitflags 2.3.2",
@ -531,8 +524,6 @@ dependencies = [
"fst",
"hashbrown 0.12.3",
"hir-expand",
"hkalbasi-rustc-ap-rustc_abi",
"hkalbasi-rustc-ap-rustc_index",
"indexmap 2.0.0",
"intern",
"itertools",
@ -541,7 +532,7 @@ dependencies = [
"mbe",
"once_cell",
"profile",
"ra-ap-rustc_parse_format",
"rustc-dependencies",
"rustc-hash",
"smallvec",
"stdx",
@ -594,7 +585,6 @@ dependencies = [
"expect-test",
"hir-def",
"hir-expand",
"hkalbasi-rustc-ap-rustc_index",
"intern",
"itertools",
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
@ -604,6 +594,7 @@ dependencies = [
"oorandom",
"profile",
"project-model",
"rustc-dependencies",
"rustc-hash",
"scoped-tls",
"smallvec",
@ -617,27 +608,6 @@ dependencies = [
"typed-arena",
]
[[package]]
name = "hkalbasi-rustc-ap-rustc_abi"
version = "0.0.20221221"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "adabaadad9aa7576f97af02241cdf5554d62fb3d51a84cb05d77ba28edd3013f"
dependencies = [
"bitflags 1.3.2",
"hkalbasi-rustc-ap-rustc_index",
"tracing",
]
[[package]]
name = "hkalbasi-rustc-ap-rustc_index"
version = "0.0.20221221"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f4d3c48474e09afb0f5efbd6f758e05411699301a113c47d454d28ec7059d00e"
dependencies = [
"arrayvec",
"smallvec",
]
[[package]]
name = "home"
version = "0.5.5"
@ -918,9 +888,9 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]]
name = "libc"
version = "0.2.146"
version = "0.2.148"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f92be4933c13fd498862a9e02a3055f8a8d9c039ce33db97306fd5a6caa7f29b"
checksum = "9cdc71e17332e86d2e1d38c1f99edcb6288ee11b815fb1a4b049eaa2114d369b"
[[package]]
name = "libloading"
@ -1277,7 +1247,7 @@ dependencies = [
"drop_bomb",
"expect-test",
"limit",
"ra-ap-rustc_lexer",
"rustc-dependencies",
"sourcegen",
"stdx",
]
@ -1435,9 +1405,9 @@ dependencies = [
[[package]]
name = "protobuf"
version = "3.1.0"
version = "3.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4ee4a7d8b91800c8f167a6268d1a1026607368e1adc84e98fe044aeb905302f7"
checksum = "b55bad9126f378a853655831eb7363b7b01b81d19f8cb1218861086ca4a1a61e"
dependencies = [
"once_cell",
"protobuf-support",
@ -1446,9 +1416,9 @@ dependencies = [
[[package]]
name = "protobuf-support"
version = "3.1.0"
version = "3.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8ca157fe12fc7ee2e315f2f735e27df41b3d97cdd70ea112824dac1ffb08ee1c"
checksum = "a5d4d7b8601c814cfb36bcebb79f0e61e45e1e93640cf778837833bbed05c372"
dependencies = [
"thiserror",
]
@ -1483,10 +1453,31 @@ dependencies = [
]
[[package]]
name = "ra-ap-rustc_index"
version = "0.10.0"
name = "ra-ap-rustc_abi"
version = "0.18.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "07b5fa61d34da18e148dc3a81f654488ea07f40938d8aefb17f8b64bb78c6120"
checksum = "7082716cb2bbcd8b5f062fe950cbbc87f3aba022d6da4168db35af6732a7f15d"
dependencies = [
"bitflags 1.3.2",
"ra-ap-rustc_index 0.18.0",
"tracing",
]
[[package]]
name = "ra-ap-rustc_index"
version = "0.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "643ca3609870b1778d9cd1f2a8e4ccb4af0f48f3637cc257a09494d087bd93dc"
dependencies = [
"arrayvec",
"smallvec",
]
[[package]]
name = "ra-ap-rustc_index"
version = "0.18.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "19e14b1fc835d6992b128a03a3f3a8365ba9f03e1c656a1670305f63f30d786d"
dependencies = [
"arrayvec",
"smallvec",
@ -1494,9 +1485,19 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_lexer"
version = "0.10.0"
version = "0.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f2e2f6b48422e4eed5218277ab7cc9733e60dd8f3167f4f36a49a0cafe4dc195"
checksum = "30ffd24f9ba4f1d25ff27ca1469b8d22a3bdfb12cf644fc8bfcb63121fa5da6b"
dependencies = [
"unicode-properties",
"unicode-xid",
]
[[package]]
name = "ra-ap-rustc_lexer"
version = "0.18.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1762abb25eb1e37c1823f62b5da0821bbcd870812318db084c9516c2f78d2dcd"
dependencies = [
"unicode-properties",
"unicode-xid",
@ -1504,12 +1505,12 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_parse_format"
version = "0.10.0"
version = "0.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c3c7369ad01cc79f9e3513c9f6a6326f6b980100e4862a7ac71b9991c88108bb"
checksum = "207b5ac1a21d4926695e03b605ffb9f63d4968e0488e9197c04c512c37303aa7"
dependencies = [
"ra-ap-rustc_index",
"ra-ap-rustc_lexer",
"ra-ap-rustc_index 0.14.0",
"ra-ap-rustc_lexer 0.14.0",
]
[[package]]
@ -1594,10 +1595,12 @@ dependencies = [
"oorandom",
"parking_lot 0.12.1",
"parking_lot_core 0.9.6",
"parser",
"proc-macro-api",
"profile",
"project-model",
"rayon",
"rustc-dependencies",
"rustc-hash",
"scip",
"serde",
@ -1626,6 +1629,16 @@ version = "0.1.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76"
[[package]]
name = "rustc-dependencies"
version = "0.0.0"
dependencies = [
"ra-ap-rustc_abi",
"ra-ap-rustc_index 0.18.0",
"ra-ap-rustc_lexer 0.18.0",
"ra-ap-rustc_parse_format",
]
[[package]]
name = "rustc-hash"
version = "1.1.0"
@ -1678,9 +1691,9 @@ dependencies = [
[[package]]
name = "scip"
version = "0.1.1"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b2bfbb10286f69fad7c78db71004b7839bf957788359fe0c479f029f9849136b"
checksum = "3e84d21062a3ba08d58870c8c36b0c005b2b2261c6ad1bf7042585427c781883"
dependencies = [
"protobuf",
]
@ -1853,9 +1866,9 @@ dependencies = [
"proc-macro2",
"profile",
"quote",
"ra-ap-rustc_lexer",
"rayon",
"rowan",
"rustc-dependencies",
"rustc-hash",
"smol_str",
"sourcegen",

View File

@ -79,6 +79,7 @@ toolchain = { path = "./crates/toolchain", version = "0.0.0" }
tt = { path = "./crates/tt", version = "0.0.0" }
vfs-notify = { path = "./crates/vfs-notify", version = "0.0.0" }
vfs = { path = "./crates/vfs", version = "0.0.0" }
rustc-dependencies = { path = "./crates/rustc-dependencies", version = "0.0.0" }
# local crates that aren't published to crates.io. These should not have versions.
proc-macro-test = { path = "./crates/proc-macro-test" }
@ -101,11 +102,6 @@ serde = { version = "1.0.156", features = ["derive"] }
serde_json = "1.0.96"
triomphe = { version = "0.1.8", default-features = false, features = ["std"] }
# can't upgrade due to dashmap depending on 0.12.3 currently
hashbrown = { version = "0.12.3", features = ["inline-more"], default-features = false }
rustc_lexer = { version = "0.10.0", package = "ra-ap-rustc_lexer" }
rustc_parse_format = { version = "0.10.0", package = "ra-ap-rustc_parse_format", default-features = false }
# Upstream broke this for us so we can't update it
rustc_abi = { version = "0.0.20221221", package = "hkalbasi-rustc-ap-rustc_abi", default-features = false }
rustc_index = { version = "0.0.20221221", package = "hkalbasi-rustc-ap-rustc_index", default-features = false }
hashbrown = { version = "0.12.3", features = [
"inline-more",
], default-features = false }

View File

@ -257,6 +257,7 @@ pub trait ProcMacroExpander: fmt::Debug + Send + Sync + RefUnwindSafe {
) -> Result<Subtree, ProcMacroExpansionError>;
}
#[derive(Debug)]
pub enum ProcMacroExpansionError {
Panic(String),
/// Things like "proc macro server was killed by OOM".

View File

@ -75,7 +75,7 @@ pub trait SourceDatabase: FileLoader + std::fmt::Debug {
#[salsa::input]
fn crate_graph(&self) -> Arc<CrateGraph>;
/// The crate graph.
/// The proc macros.
#[salsa::input]
fn proc_macros(&self) -> Arc<ProcMacros>;
}

View File

@ -50,6 +50,7 @@ pub enum FlycheckConfig {
extra_args: Vec<String>,
extra_env: FxHashMap<String, String>,
ansi_color_output: bool,
target_dir: Option<PathBuf>,
},
CustomCommand {
command: String,
@ -308,6 +309,7 @@ impl FlycheckActor {
features,
extra_env,
ansi_color_output,
target_dir,
} => {
let mut cmd = Command::new(toolchain::cargo());
cmd.arg(command);
@ -340,6 +342,9 @@ impl FlycheckActor {
cmd.arg(features.join(" "));
}
}
if let Some(target_dir) = target_dir {
cmd.arg("--target-dir").arg(target_dir);
}
cmd.envs(extra_env);
(cmd, extra_args)
}

View File

@ -12,7 +12,6 @@ rust-version.workspace = true
doctest = false
[dependencies]
anymap = "1.0.0-beta.2"
arrayvec = "0.7.2"
bitflags = "2.1.0"
cov-mark = "2.0.0-pre.1"
@ -31,10 +30,7 @@ smallvec.workspace = true
hashbrown.workspace = true
triomphe.workspace = true
rustc_abi.workspace = true
rustc_index.workspace = true
rustc_parse_format.workspace = true
rustc-dependencies.workspace = true
# local deps
stdx.workspace = true
@ -48,8 +44,12 @@ cfg.workspace = true
tt.workspace = true
limit.workspace = true
[dev-dependencies]
expect-test = "1.4.0"
# local deps
test-utils.workspace = true
[features]
in-rust-tree = ["rustc-dependencies/in-rust-tree"]

View File

@ -215,6 +215,10 @@ impl Attrs {
self.doc_exprs().flat_map(|doc_expr| doc_expr.aliases().to_vec())
}
pub fn export_name(&self) -> Option<&SmolStr> {
self.by_key("export_name").string_value()
}
pub fn is_proc_macro(&self) -> bool {
self.by_key("proc_macro").exists()
}

View File

@ -57,7 +57,7 @@ pub struct Body {
pub type ExprPtr = AstPtr<ast::Expr>;
pub type ExprSource = InFile<ExprPtr>;
pub type PatPtr = Either<AstPtr<ast::Pat>, AstPtr<ast::SelfParam>>;
pub type PatPtr = AstPtr<Either<ast::Pat, ast::SelfParam>>;
pub type PatSource = InFile<PatPtr>;
pub type LabelPtr = AstPtr<ast::Label>;
@ -356,12 +356,12 @@ impl BodySourceMap {
}
pub fn node_pat(&self, node: InFile<&ast::Pat>) -> Option<PatId> {
let src = node.map(|it| Either::Left(AstPtr::new(it)));
let src = node.map(|it| AstPtr::new(it).wrap_left());
self.pat_map.get(&src).cloned()
}
pub fn node_self_param(&self, node: InFile<&ast::SelfParam>) -> Option<PatId> {
let src = node.map(|it| Either::Right(AstPtr::new(it)));
let src = node.map(|it| AstPtr::new(it).wrap_right());
self.pat_map.get(&src).cloned()
}

View File

@ -196,16 +196,12 @@ impl ExprCollector<'_> {
if let Some(self_param) =
param_list.self_param().filter(|_| attr_enabled.next().unwrap_or(false))
{
let ptr = AstPtr::new(&self_param);
let binding_id: la_arena::Idx<Binding> = self.alloc_binding(
name![self],
BindingAnnotation::new(
self_param.mut_token().is_some() && self_param.amp_token().is_none(),
false,
),
);
let param_pat =
self.alloc_pat(Pat::Bind { id: binding_id, subpat: None }, Either::Right(ptr));
let is_mutable =
self_param.mut_token().is_some() && self_param.amp_token().is_none();
let ptr = AstPtr::new(&Either::Right(self_param));
let binding_id: la_arena::Idx<Binding> =
self.alloc_binding(name![self], BindingAnnotation::new(is_mutable, false));
let param_pat = self.alloc_pat(Pat::Bind { id: binding_id, subpat: None }, ptr);
self.add_definition_to_binding(binding_id, param_pat);
self.body.params.push(param_pat);
}
@ -1260,8 +1256,8 @@ impl ExprCollector<'_> {
(Some(id), Pat::Bind { id, subpat })
};
let ptr = AstPtr::new(&pat);
let pat = self.alloc_pat(pattern, Either::Left(ptr));
let ptr = AstPtr::new(&Either::Left(pat));
let pat = self.alloc_pat(pattern, ptr);
if let Some(binding_id) = binding {
self.add_definition_to_binding(binding_id, pat);
}
@ -1395,7 +1391,7 @@ impl ExprCollector<'_> {
ast::Pat::MacroPat(mac) => match mac.macro_call() {
Some(call) => {
let macro_ptr = AstPtr::new(&call);
let src = self.expander.to_source(Either::Left(AstPtr::new(&pat)));
let src = self.expander.to_source(AstPtr::new(&Either::Left(pat)));
let pat =
self.collect_macro_call(call, macro_ptr, true, |this, expanded_pat| {
this.collect_pat_opt(expanded_pat, binding_list)
@ -1430,8 +1426,8 @@ impl ExprCollector<'_> {
Pat::Range { start, end }
}
};
let ptr = AstPtr::new(&pat);
self.alloc_pat(pattern, Either::Left(ptr))
let ptr = AstPtr::new(&Either::Left(pat));
self.alloc_pat(pattern, ptr)
}
fn collect_pat_opt(&mut self, pat: Option<ast::Pat>, binding_list: &mut BindingList) -> PatId {

View File

@ -1,7 +1,6 @@
//! Name resolution for expressions.
use hir_expand::name::Name;
use la_arena::{Arena, Idx, IdxRange, RawIdx};
use rustc_hash::FxHashMap;
use la_arena::{Arena, ArenaMap, Idx, IdxRange, RawIdx};
use triomphe::Arc;
use crate::{
@ -17,7 +16,7 @@ pub type ScopeId = Idx<ScopeData>;
pub struct ExprScopes {
scopes: Arena<ScopeData>,
scope_entries: Arena<ScopeEntry>,
scope_by_expr: FxHashMap<ExprId, ScopeId>,
scope_by_expr: ArenaMap<ExprId, ScopeId>,
}
#[derive(Debug, PartialEq, Eq)]
@ -77,10 +76,10 @@ impl ExprScopes {
}
pub fn scope_for(&self, expr: ExprId) -> Option<ScopeId> {
self.scope_by_expr.get(&expr).copied()
self.scope_by_expr.get(expr).copied()
}
pub fn scope_by_expr(&self) -> &FxHashMap<ExprId, ScopeId> {
pub fn scope_by_expr(&self) -> &ArenaMap<ExprId, ScopeId> {
&self.scope_by_expr
}
}
@ -94,7 +93,7 @@ impl ExprScopes {
let mut scopes = ExprScopes {
scopes: Arena::default(),
scope_entries: Arena::default(),
scope_by_expr: FxHashMap::default(),
scope_by_expr: ArenaMap::with_capacity(body.exprs.len()),
};
let mut root = scopes.root_scope();
scopes.add_params_bindings(body, root, &body.params);
@ -476,10 +475,7 @@ fn foo() {
.pat_syntax(*body.bindings[resolved.binding()].definitions.first().unwrap())
.unwrap();
let local_name = pat_src.value.either(
|it| it.syntax_node_ptr().to_node(file.syntax()),
|it| it.syntax_node_ptr().to_node(file.syntax()),
);
let local_name = pat_src.value.syntax_node_ptr().to_node(file.syntax());
assert_eq!(local_name.text_range(), expected_name.syntax().text_range());
}

View File

@ -15,9 +15,7 @@ use crate::{
attr::Attrs,
db::DefDatabase,
expander::{Expander, Mark},
item_tree::{
self, AssocItem, FnFlags, ItemTree, ItemTreeId, MacroCall, ModItem, Param, TreeId,
},
item_tree::{self, AssocItem, FnFlags, ItemTree, ItemTreeId, MacroCall, ModItem, TreeId},
macro_call_as_call_id, macro_id_to_def_id,
nameres::{
attr_resolution::ResolvedAttr,
@ -69,7 +67,7 @@ impl FunctionData {
let is_varargs = enabled_params
.clone()
.next_back()
.map_or(false, |param| matches!(item_tree[param], Param::Varargs));
.map_or(false, |param| item_tree[param].type_ref.is_none());
let mut flags = func.flags;
if is_varargs {
@ -105,10 +103,7 @@ impl FunctionData {
name: func.name.clone(),
params: enabled_params
.clone()
.filter_map(|id| match &item_tree[id] {
Param::Normal(ty) => Some(ty.clone()),
Param::Varargs => None,
})
.filter_map(|id| item_tree[id].type_ref.clone())
.collect(),
ret_type: func.ret_type.clone(),
attrs: item_tree.attrs(db, krate, ModItem::from(loc.id.value).into()),

View File

@ -11,7 +11,7 @@ use hir_expand::{
};
use intern::Interned;
use la_arena::{Arena, ArenaMap};
use rustc_abi::{Align, Integer, IntegerType, ReprFlags, ReprOptions};
use rustc_dependencies::abi::{Align, Integer, IntegerType, ReprFlags, ReprOptions};
use syntax::ast::{self, HasName, HasVisibility};
use triomphe::Arc;

View File

@ -29,8 +29,8 @@ use std::{
ops::{Index, IndexMut},
};
use anymap::Map;
use rustc_hash::FxHashMap;
use stdx::anymap::Map;
pub struct Key<K, V, P = (K, V)> {
_phantom: PhantomData<(K, V, P)>,

View File

@ -2,7 +2,7 @@
use std::mem;
use hir_expand::name::Name;
use rustc_parse_format as parse;
use rustc_dependencies::parse_format as parse;
use syntax::{
ast::{self, IsString},
AstToken, SmolStr, TextRange,

View File

@ -1,7 +1,6 @@
//! A map of all publicly exported items in a crate.
use std::collections::hash_map::Entry;
use std::{fmt, hash::BuildHasherDefault};
use std::{collections::hash_map::Entry, fmt, hash::BuildHasherDefault};
use base_db::CrateId;
use fst::{self, Streamer};
@ -11,10 +10,12 @@ use itertools::Itertools;
use rustc_hash::{FxHashMap, FxHashSet, FxHasher};
use triomphe::Arc;
use crate::item_scope::ImportOrExternCrate;
use crate::{
db::DefDatabase, item_scope::ItemInNs, nameres::DefMap, visibility::Visibility, AssocItemId,
ModuleDefId, ModuleId, TraitId,
db::DefDatabase,
item_scope::{ImportOrExternCrate, ItemInNs},
nameres::DefMap,
visibility::Visibility,
AssocItemId, ModuleDefId, ModuleId, TraitId,
};
type FxIndexMap<K, V> = IndexMap<K, V, BuildHasherDefault<FxHasher>>;
@ -94,7 +95,7 @@ fn collect_import_map(db: &dyn DefDatabase, krate: CrateId) -> FxIndexMap<ItemIn
// We look only into modules that are public(ly reexported), starting with the crate root.
let root = def_map.module_id(DefMap::ROOT);
let mut worklist = vec![(root, 0)];
let mut worklist = vec![(root, 0u32)];
// Records items' minimum module depth.
let mut depth_map = FxHashMap::default();
@ -278,6 +279,8 @@ enum SearchMode {
/// Import map entry should contain all letters from the query string,
/// in the same order, but not necessary adjacent.
Fuzzy,
/// Import map entry should match the query string by prefix.
Prefix,
}
/// Three possible ways to search for the name in associated and/or other items.
@ -319,6 +322,14 @@ impl Query {
Self { search_mode: SearchMode::Fuzzy, ..self }
}
pub fn prefix(self) -> Self {
Self { search_mode: SearchMode::Prefix, ..self }
}
pub fn exact(self) -> Self {
Self { search_mode: SearchMode::Exact, ..self }
}
/// Specifies whether we want to include associated items in the result.
pub fn assoc_search_mode(self, assoc_mode: AssocSearchMode) -> Self {
Self { assoc_mode, ..self }
@ -356,7 +367,8 @@ impl Query {
let query_string = if case_insensitive { &self.lowercased } else { &self.query };
match self.search_mode {
SearchMode::Exact => &input == query_string,
SearchMode::Exact => input == *query_string,
SearchMode::Prefix => input.starts_with(query_string),
SearchMode::Fuzzy => {
let mut input_chars = input.chars();
for query_char in query_string.chars() {

View File

@ -613,10 +613,17 @@ pub struct Function {
pub(crate) flags: FnFlags,
}
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum Param {
Normal(Interned<TypeRef>),
Varargs,
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Param {
/// This is [`None`] for varargs
pub type_ref: Option<Interned<TypeRef>>,
pub ast_id: ParamAstId,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum ParamAstId {
Param(FileAstId<ast::Param>),
SelfParam(FileAstId<ast::SelfParam>),
}
bitflags::bitflags! {

View File

@ -295,8 +295,12 @@ impl<'a> Ctx<'a> {
}
}
};
let ty = Interned::new(self_type);
let idx = self.data().params.alloc(Param::Normal(ty));
let type_ref = Interned::new(self_type);
let ast_id = self.source_ast_id_map.ast_id(&self_param);
let idx = self.data().params.alloc(Param {
type_ref: Some(type_ref),
ast_id: ParamAstId::SelfParam(ast_id),
});
self.add_attrs(
idx.into(),
RawAttrs::new(self.db.upcast(), &self_param, self.hygiene()),
@ -305,11 +309,19 @@ impl<'a> Ctx<'a> {
}
for param in param_list.params() {
let idx = match param.dotdotdot_token() {
Some(_) => self.data().params.alloc(Param::Varargs),
Some(_) => {
let ast_id = self.source_ast_id_map.ast_id(&param);
self.data()
.params
.alloc(Param { type_ref: None, ast_id: ParamAstId::Param(ast_id) })
}
None => {
let type_ref = TypeRef::from_ast_opt(&self.body_ctx, param.ty());
let ty = Interned::new(type_ref);
self.data().params.alloc(Param::Normal(ty))
let ast_id = self.source_ast_id_map.ast_id(&param);
self.data()
.params
.alloc(Param { type_ref: Some(ty), ast_id: ParamAstId::Param(ast_id) })
}
};
self.add_attrs(idx.into(), RawAttrs::new(self.db.upcast(), &param, self.hygiene()));

View File

@ -261,15 +261,15 @@ impl Printer<'_> {
self.indented(|this| {
for param in params.clone() {
this.print_attrs_of(param, "\n");
match &this.tree[param] {
Param::Normal(ty) => {
match &this.tree[param].type_ref {
Some(ty) => {
if flags.contains(FnFlags::HAS_SELF_PARAM) {
w!(this, "self: ");
}
this.print_type_ref(ty);
wln!(this, ",");
}
Param::Varargs => {
None => {
wln!(this, "...");
}
};

View File

@ -8,6 +8,7 @@
//! actually true.
#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
#[allow(unused)]
macro_rules! eprintln {
@ -48,7 +49,7 @@ pub mod visibility;
pub mod find_path;
pub mod import_map;
pub use rustc_abi as layout;
pub use rustc_dependencies::abi as layout;
use triomphe::Arc;
#[cfg(test)]
@ -72,6 +73,7 @@ use hir_expand::{
db::ExpandDatabase,
eager::expand_eager_macro_input,
hygiene::Hygiene,
name::Name,
proc_macro::ProcMacroExpander,
AstId, ExpandError, ExpandResult, ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind,
MacroDefId, MacroDefKind, UnresolvedMacro,
@ -173,6 +175,18 @@ impl ModuleId {
self.krate
}
pub fn name(self, db: &dyn db::DefDatabase) -> Option<Name> {
let def_map = self.def_map(db);
let parent = def_map[self.local_id].parent?;
def_map[parent].children.iter().find_map(|(name, module_id)| {
if *module_id == self.local_id {
Some(name.clone())
} else {
None
}
})
}
pub fn containing_module(self, db: &dyn db::DefDatabase) -> Option<ModuleId> {
self.def_map(db).containing_module(self.local_id)
}
@ -498,10 +512,7 @@ impl_from!(Macro2Id, MacroRulesId, ProcMacroId for MacroId);
impl MacroId {
pub fn is_attribute(self, db: &dyn db::DefDatabase) -> bool {
match self {
MacroId::ProcMacroId(it) => it.lookup(db).kind == ProcMacroKind::Attr,
_ => false,
}
matches!(self, MacroId::ProcMacroId(it) if it.lookup(db).kind == ProcMacroKind::Attr)
}
}

View File

@ -99,7 +99,7 @@ register_ast_id_node! {
TraitAlias,
TypeAlias,
Use,
AssocItem, BlockExpr, Variant, RecordField, TupleField, ConstArg
AssocItem, BlockExpr, Variant, RecordField, TupleField, ConstArg, Param, SelfParam
}
/// Maps items' `SyntaxNode`s to `ErasedFileAstId`s and back.

View File

@ -12,11 +12,15 @@ use syntax::{
use triomphe::Arc;
use crate::{
ast_id_map::AstIdMap, builtin_attr_macro::pseudo_derive_attr_expansion,
builtin_fn_macro::EagerExpander, fixup, hygiene::HygieneFrame, tt, AstId, BuiltinAttrExpander,
BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo, ExpandError, ExpandResult,
ExpandTo, HirFileId, HirFileIdRepr, MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId,
MacroDefKind, MacroFile, ProcMacroExpander,
ast_id_map::AstIdMap,
builtin_attr_macro::pseudo_derive_attr_expansion,
builtin_fn_macro::EagerExpander,
fixup,
hygiene::HygieneFrame,
name::{name, AsName},
tt, AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo,
ExpandError, ExpandResult, ExpandTo, HirFileId, HirFileIdRepr, MacroCallId, MacroCallKind,
MacroCallLoc, MacroDefId, MacroDefKind, MacroFile, ProcMacroExpander,
};
/// Total limit on the number of tokens produced by any macro invocation.
@ -614,9 +618,25 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
err = error.clone().or(err);
}
// Set a hard limit for the expanded tt
if let Err(value) = check_tt_count(&tt) {
return value;
// Skip checking token tree limit for include! macro call
let skip_check_tt_count = match loc.kind {
MacroCallKind::FnLike { ast_id, expand_to: _ } => {
if let Some(name_ref) =
ast_id.to_node(db).path().and_then(|p| p.segment()).and_then(|s| s.name_ref())
{
name_ref.as_name() == name!(include)
} else {
false
}
}
_ => false,
};
if !skip_check_tt_count {
// Set a hard limit for the expanded tt
if let Err(value) = check_tt_count(&tt) {
return value;
}
}
ExpandResult { value: Arc::new(tt), err }

View File

@ -23,17 +23,17 @@ oorandom = "11.1.3"
tracing = "0.1.35"
rustc-hash = "1.1.0"
scoped-tls = "1.0.0"
chalk-solve = { version = "0.92.0", default-features = false }
chalk-ir = "0.92.0"
chalk-recursive = { version = "0.92.0", default-features = false }
chalk-derive = "0.92.0"
chalk-solve = { version = "0.93.0", default-features = false }
chalk-ir = "0.93.0"
chalk-recursive = { version = "0.93.0", default-features = false }
chalk-derive = "0.93.0"
la-arena.workspace = true
once_cell = "1.17.0"
triomphe.workspace = true
nohash-hasher.workspace = true
typed-arena = "2.0.1"
rustc_index.workspace = true
rustc-dependencies.workspace = true
# local deps
stdx.workspace = true
@ -56,3 +56,6 @@ project-model = { path = "../project-model" }
# local deps
test-utils.workspace = true
[features]
in-rust-tree = ["rustc-dependencies/in-rust-tree"]

View File

@ -1159,6 +1159,20 @@ fn pattern_matching_slice() {
"#,
33213,
);
check_number(
r#"
//- minicore: slice, index, coerce_unsized, copy
const fn f(mut slice: &[u32]) -> usize {
slice = match slice {
[0, rest @ ..] | rest => rest,
};
slice.len()
}
const GOAL: usize = f(&[]) + f(&[10]) + f(&[0, 100])
+ f(&[1000, 1000, 1000]) + f(&[0, 57, 34, 46, 10000, 10000]);
"#,
10,
);
}
#[test]

View File

@ -9,6 +9,7 @@
//! - constants (e.g. `const FOO: u8 = 10;`)
//! - static items (e.g. `static FOO: u8 = 10;`)
//! - match arm bindings (e.g. `foo @ Some(_)`)
//! - modules (e.g. `mod foo { ... }` or `mod foo;`)
mod case_conv;
@ -19,7 +20,7 @@ use hir_def::{
hir::{Pat, PatId},
src::HasSource,
AdtId, AttrDefId, ConstId, DefWithBodyId, EnumId, EnumVariantId, FunctionId, ItemContainerId,
Lookup, ModuleDefId, StaticId, StructId,
Lookup, ModuleDefId, ModuleId, StaticId, StructId,
};
use hir_expand::{
name::{AsName, Name},
@ -83,6 +84,7 @@ pub enum IdentType {
Structure,
Variable,
Variant,
Module,
}
impl fmt::Display for IdentType {
@ -97,6 +99,7 @@ impl fmt::Display for IdentType {
IdentType::Structure => "Structure",
IdentType::Variable => "Variable",
IdentType::Variant => "Variant",
IdentType::Module => "Module",
};
repr.fmt(f)
@ -132,6 +135,7 @@ impl<'a> DeclValidator<'a> {
pub(super) fn validate_item(&mut self, item: ModuleDefId) {
match item {
ModuleDefId::ModuleId(module_id) => self.validate_module(module_id),
ModuleDefId::FunctionId(func) => self.validate_func(func),
ModuleDefId::AdtId(adt) => self.validate_adt(adt),
ModuleDefId::ConstId(const_id) => self.validate_const(const_id),
@ -230,6 +234,55 @@ impl<'a> DeclValidator<'a> {
|| parent()
}
fn validate_module(&mut self, module_id: ModuleId) {
// Check whether non-snake case identifiers are allowed for this module.
if self.allowed(module_id.into(), allow::NON_SNAKE_CASE, false) {
return;
}
// Check the module name.
let Some(module_name) = module_id.name(self.db.upcast()) else { return };
let module_name_replacement =
module_name.as_str().and_then(to_lower_snake_case).map(|new_name| Replacement {
current_name: module_name,
suggested_text: new_name,
expected_case: CaseType::LowerSnakeCase,
});
if let Some(module_name_replacement) = module_name_replacement {
let module_data = &module_id.def_map(self.db.upcast())[module_id.local_id];
let module_src = module_data.declaration_source(self.db.upcast());
if let Some(module_src) = module_src {
let ast_ptr = match module_src.value.name() {
Some(name) => name,
None => {
never!(
"Replacement ({:?}) was generated for a module without a name: {:?}",
module_name_replacement,
module_src
);
return;
}
};
let diagnostic = IncorrectCase {
file: module_src.file_id,
ident_type: IdentType::Module,
ident: AstPtr::new(&ast_ptr),
expected_case: module_name_replacement.expected_case,
ident_text: module_name_replacement
.current_name
.display(self.db.upcast())
.to_string(),
suggested_text: module_name_replacement.suggested_text,
};
self.sink.push(diagnostic);
}
}
}
fn validate_func(&mut self, func: FunctionId) {
let data = self.db.function_data(func);
if matches!(func.lookup(self.db.upcast()).container, ItemContainerId::ExternBlockId(_)) {
@ -336,48 +389,44 @@ impl<'a> DeclValidator<'a> {
for (id, replacement) in pats_replacements {
if let Ok(source_ptr) = source_map.pat_syntax(id) {
if let Some(expr) = source_ptr.value.as_ref().left() {
if let Some(ptr) = source_ptr.value.clone().cast::<ast::IdentPat>() {
let root = source_ptr.file_syntax(self.db.upcast());
if let ast::Pat::IdentPat(ident_pat) = expr.to_node(&root) {
let parent = match ident_pat.syntax().parent() {
Some(parent) => parent,
None => continue,
};
let name_ast = match ident_pat.name() {
Some(name_ast) => name_ast,
None => continue,
};
let ident_pat = ptr.to_node(&root);
let parent = match ident_pat.syntax().parent() {
Some(parent) => parent,
None => continue,
};
let name_ast = match ident_pat.name() {
Some(name_ast) => name_ast,
None => continue,
};
let is_param = ast::Param::can_cast(parent.kind());
let is_param = ast::Param::can_cast(parent.kind());
// We have to check that it's either `let var = ...` or `var @ Variant(_)` statement,
// because e.g. match arms are patterns as well.
// In other words, we check that it's a named variable binding.
let is_binding = ast::LetStmt::can_cast(parent.kind())
|| (ast::MatchArm::can_cast(parent.kind())
&& ident_pat.at_token().is_some());
if !(is_param || is_binding) {
// This pattern is not an actual variable declaration, e.g. `Some(val) => {..}` match arm.
continue;
}
let ident_type =
if is_param { IdentType::Parameter } else { IdentType::Variable };
let diagnostic = IncorrectCase {
file: source_ptr.file_id,
ident_type,
ident: AstPtr::new(&name_ast),
expected_case: replacement.expected_case,
ident_text: replacement
.current_name
.display(self.db.upcast())
.to_string(),
suggested_text: replacement.suggested_text,
};
self.sink.push(diagnostic);
// We have to check that it's either `let var = ...` or `var @ Variant(_)` statement,
// because e.g. match arms are patterns as well.
// In other words, we check that it's a named variable binding.
let is_binding = ast::LetStmt::can_cast(parent.kind())
|| (ast::MatchArm::can_cast(parent.kind())
&& ident_pat.at_token().is_some());
if !(is_param || is_binding) {
// This pattern is not an actual variable declaration, e.g. `Some(val) => {..}` match arm.
continue;
}
let ident_type =
if is_param { IdentType::Parameter } else { IdentType::Variable };
let diagnostic = IncorrectCase {
file: source_ptr.file_id,
ident_type,
ident: AstPtr::new(&name_ast),
expected_case: replacement.expected_case,
ident_text: replacement.current_name.display(self.db.upcast()).to_string(),
suggested_text: replacement.suggested_text,
};
self.sink.push(diagnostic);
}
}
}

View File

@ -11,50 +11,7 @@ pub(crate) fn to_camel_case(ident: &str) -> Option<String> {
return None;
}
// Taken from rustc.
let ret = ident
.trim_matches('_')
.split('_')
.filter(|component| !component.is_empty())
.map(|component| {
let mut camel_cased_component = String::with_capacity(component.len());
let mut new_word = true;
let mut prev_is_lower_case = true;
for c in component.chars() {
// Preserve the case if an uppercase letter follows a lowercase letter, so that
// `camelCase` is converted to `CamelCase`.
if prev_is_lower_case && c.is_uppercase() {
new_word = true;
}
if new_word {
camel_cased_component.extend(c.to_uppercase());
} else {
camel_cased_component.extend(c.to_lowercase());
}
prev_is_lower_case = c.is_lowercase();
new_word = false;
}
camel_cased_component
})
.fold((String::new(), None), |(acc, prev): (_, Option<String>), next| {
// separate two components with an underscore if their boundary cannot
// be distinguished using an uppercase/lowercase case distinction
let join = prev
.and_then(|prev| {
let f = next.chars().next()?;
let l = prev.chars().last()?;
Some(!char_has_case(l) && !char_has_case(f))
})
.unwrap_or(false);
(acc + if join { "_" } else { "" } + &next, Some(next))
})
.0;
Some(ret)
Some(stdx::to_camel_case(ident))
}
/// Converts an identifier to a lower_snake_case form.
@ -97,7 +54,9 @@ fn is_camel_case(name: &str) -> bool {
&& !name.chars().any(|snd| {
let ret = match fst {
None => false,
Some(fst) => char_has_case(fst) && snd == '_' || char_has_case(snd) && fst == '_',
Some(fst) => {
stdx::char_has_case(fst) && snd == '_' || stdx::char_has_case(snd) && fst == '_'
}
};
fst = Some(snd);
@ -135,11 +94,6 @@ fn is_snake_case<F: Fn(char) -> bool>(ident: &str, wrong_case: F) -> bool {
})
}
// Taken from rustc.
fn char_has_case(c: char) -> bool {
c.is_lowercase() || c.is_uppercase()
}
#[cfg(test)]
mod tests {
use super::*;

View File

@ -147,7 +147,7 @@ impl<'a> PatCtxt<'a> {
}
hir_def::hir::Pat::Bind { id, subpat, .. } => {
let bm = self.infer.binding_modes[id];
let bm = self.infer.binding_modes[pat];
ty = &self.infer[id];
let name = &self.body.bindings[id].name;
match (bm, ty.kind(Interner)) {

View File

@ -420,7 +420,19 @@ pub struct InferenceResult {
standard_types: InternedStandardTypes,
/// Stores the types which were implicitly dereferenced in pattern binding modes.
pub pat_adjustments: FxHashMap<PatId, Vec<Ty>>,
pub binding_modes: ArenaMap<BindingId, BindingMode>,
/// Stores the binding mode (`ref` in `let ref x = 2`) of bindings.
///
/// This one is tied to the `PatId` instead of `BindingId`, because in some rare cases, a binding in an
/// or pattern can have multiple binding modes. For example:
/// ```
/// fn foo(mut slice: &[u32]) -> usize {
/// slice = match slice {
/// [0, rest @ ..] | rest => rest,
/// };
/// }
/// ```
/// the first `rest` has implicit `ref` binding mode, but the second `rest` binding mode is `move`.
pub binding_modes: ArenaMap<PatId, BindingMode>,
pub expr_adjustments: FxHashMap<ExprId, Vec<Adjustment>>,
pub(crate) closure_info: FxHashMap<ClosureId, (Vec<CapturedItem>, FnTrait)>,
// FIXME: remove this field

View File

@ -679,7 +679,7 @@ impl InferenceContext<'_> {
| Pat::Range { .. } => {
update_result(CaptureKind::ByRef(BorrowKind::Shared));
}
Pat::Bind { id, .. } => match self.result.binding_modes[*id] {
Pat::Bind { id, .. } => match self.result.binding_modes[p] {
crate::BindingMode::Move => {
if self.is_ty_copy(self.result.type_of_binding[*id].clone()) {
update_result(CaptureKind::ByRef(BorrowKind::Shared));
@ -838,8 +838,8 @@ impl InferenceContext<'_> {
| Pat::ConstBlock(_)
| Pat::Path(_)
| Pat::Lit(_) => self.consume_place(place, pat.into()),
Pat::Bind { id, subpat: _ } => {
let mode = self.result.binding_modes[*id];
Pat::Bind { id: _, subpat: _ } => {
let mode = self.result.binding_modes[pat];
let capture_kind = match mode {
BindingMode::Move => {
self.consume_place(place, pat.into());

View File

@ -421,7 +421,7 @@ impl InferenceContext<'_> {
} else {
BindingMode::convert(mode)
};
self.result.binding_modes.insert(binding, mode);
self.result.binding_modes.insert(pat, mode);
let inner_ty = match subpat {
Some(subpat) => self.infer_pat(subpat, &expected, default_bm),

View File

@ -9,6 +9,10 @@ use hir_def::{
LocalEnumVariantId, LocalFieldId, StructId,
};
use la_arena::{Idx, RawIdx};
use rustc_dependencies::{
abi::AddressSpace,
index::{IndexSlice, IndexVec},
};
use stdx::never;
use triomphe::Arc;
@ -34,7 +38,7 @@ mod target;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct RustcEnumVariantIdx(pub LocalEnumVariantId);
impl rustc_index::vec::Idx for RustcEnumVariantIdx {
impl rustc_dependencies::index::Idx for RustcEnumVariantIdx {
fn new(idx: usize) -> Self {
RustcEnumVariantIdx(Idx::from_raw(RawIdx::from(idx as u32)))
}
@ -44,9 +48,28 @@ impl rustc_index::vec::Idx for RustcEnumVariantIdx {
}
}
pub type Layout = LayoutS<RustcEnumVariantIdx>;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct RustcFieldIdx(pub LocalFieldId);
impl RustcFieldIdx {
pub fn new(idx: usize) -> Self {
RustcFieldIdx(Idx::from_raw(RawIdx::from(idx as u32)))
}
}
impl rustc_dependencies::index::Idx for RustcFieldIdx {
fn new(idx: usize) -> Self {
RustcFieldIdx(Idx::from_raw(RawIdx::from(idx as u32)))
}
fn index(self) -> usize {
u32::from(self.0.into_raw()) as usize
}
}
pub type Layout = LayoutS<RustcFieldIdx, RustcEnumVariantIdx>;
pub type TagEncoding = hir_def::layout::TagEncoding<RustcEnumVariantIdx>;
pub type Variants = hir_def::layout::Variants<RustcEnumVariantIdx>;
pub type Variants = hir_def::layout::Variants<RustcFieldIdx, RustcEnumVariantIdx>;
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum LayoutError {
@ -66,7 +89,7 @@ struct LayoutCx<'a> {
impl<'a> LayoutCalculator for LayoutCx<'a> {
type TargetDataLayoutRef = &'a TargetDataLayout;
fn delay_bug(&self, txt: &str) {
fn delay_bug(&self, txt: String) {
never!("{}", txt);
}
@ -145,6 +168,8 @@ fn layout_of_simd_ty(
largest_niche: e_ly.largest_niche,
size,
align,
max_repr_align: None,
unadjusted_abi_align: align.abi,
}))
}
@ -230,7 +255,7 @@ pub fn layout_of_ty_query(
.map(|k| db.layout_of_ty(k.assert_ty_ref(Interner).clone(), trait_env.clone()))
.collect::<Result<Vec<_>, _>>()?;
let fields = fields.iter().map(|it| &**it).collect::<Vec<_>>();
let fields = fields.iter().collect::<Vec<_>>();
let fields = fields.iter().collect::<IndexVec<_, _>>();
cx.univariant(dl, &fields, &ReprOptions::default(), kind).ok_or(LayoutError::Unknown)?
}
TyKind::Array(element, count) => {
@ -255,6 +280,8 @@ pub fn layout_of_ty_query(
largest_niche,
align: element.align,
size,
max_repr_align: None,
unadjusted_abi_align: element.align.abi,
}
}
TyKind::Slice(element) => {
@ -266,11 +293,23 @@ pub fn layout_of_ty_query(
largest_niche: None,
align: element.align,
size: Size::ZERO,
max_repr_align: None,
unadjusted_abi_align: element.align.abi,
}
}
TyKind::Str => Layout {
variants: Variants::Single { index: struct_variant_idx() },
fields: FieldsShape::Array { stride: Size::from_bytes(1), count: 0 },
abi: Abi::Aggregate { sized: false },
largest_niche: None,
align: dl.i8_align,
size: Size::ZERO,
max_repr_align: None,
unadjusted_abi_align: dl.i8_align.abi,
},
// Potentially-wide pointers.
TyKind::Ref(_, _, pointee) | TyKind::Raw(_, pointee) => {
let mut data_ptr = scalar_unit(dl, Primitive::Pointer);
let mut data_ptr = scalar_unit(dl, Primitive::Pointer(AddressSpace::DATA));
if matches!(ty.kind(Interner), TyKind::Ref(..)) {
data_ptr.valid_range_mut().start = 1;
}
@ -294,7 +333,7 @@ pub fn layout_of_ty_query(
scalar_unit(dl, Primitive::Int(dl.ptr_sized_integer(), false))
}
TyKind::Dyn(..) => {
let mut vtable = scalar_unit(dl, Primitive::Pointer);
let mut vtable = scalar_unit(dl, Primitive::Pointer(AddressSpace::DATA));
vtable.valid_range_mut().start = 1;
vtable
}
@ -308,22 +347,7 @@ pub fn layout_of_ty_query(
cx.scalar_pair(data_ptr, metadata)
}
TyKind::FnDef(_, _) => layout_of_unit(&cx, dl)?,
TyKind::Str => Layout {
variants: Variants::Single { index: struct_variant_idx() },
fields: FieldsShape::Array { stride: Size::from_bytes(1), count: 0 },
abi: Abi::Aggregate { sized: false },
largest_niche: None,
align: dl.i8_align,
size: Size::ZERO,
},
TyKind::Never => Layout {
variants: Variants::Single { index: struct_variant_idx() },
fields: FieldsShape::Primitive,
abi: Abi::Uninhabited,
largest_niche: None,
align: dl.i8_align,
size: Size::ZERO,
},
TyKind::Never => cx.layout_of_never_type(),
TyKind::Dyn(_) | TyKind::Foreign(_) => {
let mut unit = layout_of_unit(&cx, dl)?;
match unit.abi {
@ -333,7 +357,7 @@ pub fn layout_of_ty_query(
unit
}
TyKind::Function(_) => {
let mut ptr = scalar_unit(dl, Primitive::Pointer);
let mut ptr = scalar_unit(dl, Primitive::Pointer(dl.instruction_address_space));
ptr.valid_range_mut().start = 1;
Layout::scalar(dl, ptr)
}
@ -363,7 +387,7 @@ pub fn layout_of_ty_query(
})
.collect::<Result<Vec<_>, _>>()?;
let fields = fields.iter().map(|it| &**it).collect::<Vec<_>>();
let fields = fields.iter().collect::<Vec<_>>();
let fields = fields.iter().collect::<IndexVec<_, _>>();
cx.univariant(dl, &fields, &ReprOptions::default(), StructKind::AlwaysSized)
.ok_or(LayoutError::Unknown)?
}
@ -398,9 +422,9 @@ pub fn layout_of_ty_recover(
}
fn layout_of_unit(cx: &LayoutCx<'_>, dl: &TargetDataLayout) -> Result<Layout, LayoutError> {
cx.univariant::<RustcEnumVariantIdx, &&Layout>(
cx.univariant::<RustcFieldIdx, RustcEnumVariantIdx, &&Layout>(
dl,
&[],
IndexSlice::empty(),
&ReprOptions::default(),
StructKind::AlwaysSized,
)

View File

@ -8,6 +8,7 @@ use hir_def::{
AdtId, EnumVariantId, LocalEnumVariantId, VariantId,
};
use la_arena::RawIdx;
use rustc_dependencies::index::IndexVec;
use smallvec::SmallVec;
use triomphe::Arc;
@ -20,8 +21,8 @@ use crate::{
use super::LayoutCx;
pub(crate) fn struct_variant_idx() -> RustcEnumVariantIdx {
RustcEnumVariantIdx(LocalEnumVariantId::from_raw(RawIdx::from(0)))
pub(crate) const fn struct_variant_idx() -> RustcEnumVariantIdx {
RustcEnumVariantIdx(LocalEnumVariantId::from_raw(RawIdx::from_u32(0)))
}
pub fn layout_of_adt_query(
@ -74,7 +75,7 @@ pub fn layout_of_adt_query(
.iter()
.map(|it| it.iter().map(|it| &**it).collect::<Vec<_>>())
.collect::<SmallVec<[_; 1]>>();
let variants = variants.iter().map(|it| it.iter().collect()).collect();
let variants = variants.iter().map(|it| it.iter().collect()).collect::<IndexVec<_, _>>();
let result = if matches!(def, AdtId::UnionId(..)) {
cx.layout_of_union(&repr, &variants).ok_or(LayoutError::Unknown)?
} else {
@ -105,7 +106,7 @@ pub fn layout_of_adt_query(
&& variants
.iter()
.next()
.and_then(|it| it.last().map(|it| !it.is_unsized()))
.and_then(|it| it.iter().last().map(|it| !it.is_unsized()))
.unwrap_or(true),
)
.ok_or(LayoutError::SizeOverflow)?
@ -119,7 +120,15 @@ fn layout_scalar_valid_range(db: &dyn HirDatabase, def: AdtId) -> (Bound<u128>,
let attr = attrs.by_key(name).tt_values();
for tree in attr {
if let Some(it) = tree.token_trees.first() {
if let Ok(it) = it.to_string().parse() {
let text = it.to_string().replace('_', "");
let (text, base) = match text.as_bytes() {
[b'0', b'x', ..] => (&text[2..], 16),
[b'0', b'o', ..] => (&text[2..], 8),
[b'0', b'b', ..] => (&text[2..], 2),
_ => (&*text, 10),
};
if let Ok(it) = u128::from_str_radix(text, base) {
return Bound::Included(it);
}
}

View File

@ -186,9 +186,9 @@ fn capture_specific_fields() {
fn match_pattern() {
size_and_align_expr! {
struct X(i64, i32, (u8, i128));
let y: X = X(2, 5, (7, 3));
let _y: X = X(2, 5, (7, 3));
move |x: i64| {
match y {
match _y {
_ => x,
}
}

View File

@ -243,16 +243,16 @@ impl Default for ProjectionStore {
}
impl ProjectionStore {
fn shrink_to_fit(&mut self) {
pub fn shrink_to_fit(&mut self) {
self.id_to_proj.shrink_to_fit();
self.proj_to_id.shrink_to_fit();
}
fn intern_if_exist(&self, projection: &[PlaceElem]) -> Option<ProjectionId> {
pub fn intern_if_exist(&self, projection: &[PlaceElem]) -> Option<ProjectionId> {
self.proj_to_id.get(projection).copied()
}
fn intern(&mut self, projection: Box<[PlaceElem]>) -> ProjectionId {
pub fn intern(&mut self, projection: Box<[PlaceElem]>) -> ProjectionId {
let new_id = ProjectionId(self.proj_to_id.len() as u32);
match self.proj_to_id.entry(projection) {
Entry::Occupied(id) => *id.get(),
@ -267,20 +267,20 @@ impl ProjectionStore {
}
impl ProjectionId {
const EMPTY: ProjectionId = ProjectionId(0);
pub const EMPTY: ProjectionId = ProjectionId(0);
fn lookup(self, store: &ProjectionStore) -> &[PlaceElem] {
pub fn lookup(self, store: &ProjectionStore) -> &[PlaceElem] {
store.id_to_proj.get(&self).unwrap()
}
fn project(self, projection: PlaceElem, store: &mut ProjectionStore) -> ProjectionId {
pub fn project(self, projection: PlaceElem, store: &mut ProjectionStore) -> ProjectionId {
let mut current = self.lookup(store).to_vec();
current.push(projection);
store.intern(current.into())
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct Place {
pub local: LocalId,
pub projection: ProjectionId,
@ -1007,7 +1007,7 @@ pub enum Rvalue {
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum StatementKind {
Assign(Place, Rvalue),
//FakeRead(Box<(FakeReadCause, Place)>),
FakeRead(Place),
//SetDiscriminant {
// place: Box<Place>,
// variant_index: VariantIdx,
@ -1109,7 +1109,9 @@ impl MirBody {
}
}
}
StatementKind::Deinit(p) => f(p, &mut self.projection_store),
StatementKind::FakeRead(p) | StatementKind::Deinit(p) => {
f(p, &mut self.projection_store)
}
StatementKind::StorageLive(_)
| StatementKind::StorageDead(_)
| StatementKind::Nop => (),

View File

@ -24,6 +24,7 @@ use super::{
pub enum MutabilityReason {
Mut { spans: Vec<MirSpan> },
Not,
Unused,
}
#[derive(Debug, Clone, PartialEq, Eq)]
@ -144,7 +145,8 @@ fn moved_out_of_ref(db: &dyn HirDatabase, body: &MirBody) -> Vec<MovedOutOfRef>
}
}
},
StatementKind::Deinit(_)
StatementKind::FakeRead(_)
| StatementKind::Deinit(_)
| StatementKind::StorageLive(_)
| StatementKind::StorageDead(_)
| StatementKind::Nop => (),
@ -264,7 +266,10 @@ fn ever_initialized_map(
is_ever_initialized = false;
}
}
StatementKind::Deinit(_) | StatementKind::Nop | StatementKind::StorageLive(_) => (),
StatementKind::Deinit(_)
| StatementKind::FakeRead(_)
| StatementKind::Nop
| StatementKind::StorageLive(_) => (),
}
}
let Some(terminator) = &block.terminator else {
@ -331,16 +336,37 @@ fn ever_initialized_map(
result
}
fn push_mut_span(local: LocalId, span: MirSpan, result: &mut ArenaMap<LocalId, MutabilityReason>) {
match &mut result[local] {
MutabilityReason::Mut { spans } => spans.push(span),
it @ (MutabilityReason::Not | MutabilityReason::Unused) => {
*it = MutabilityReason::Mut { spans: vec![span] }
}
};
}
fn record_usage(local: LocalId, result: &mut ArenaMap<LocalId, MutabilityReason>) {
match &mut result[local] {
it @ MutabilityReason::Unused => {
*it = MutabilityReason::Not;
}
_ => (),
};
}
fn record_usage_for_operand(arg: &Operand, result: &mut ArenaMap<LocalId, MutabilityReason>) {
if let Operand::Copy(p) | Operand::Move(p) = arg {
record_usage(p.local, result);
}
}
fn mutability_of_locals(
db: &dyn HirDatabase,
body: &MirBody,
) -> ArenaMap<LocalId, MutabilityReason> {
let mut result: ArenaMap<LocalId, MutabilityReason> =
body.locals.iter().map(|it| (it.0, MutabilityReason::Not)).collect();
let mut push_mut_span = |local, span| match &mut result[local] {
MutabilityReason::Mut { spans } => spans.push(span),
it @ MutabilityReason::Not => *it = MutabilityReason::Mut { spans: vec![span] },
};
body.locals.iter().map(|it| (it.0, MutabilityReason::Unused)).collect();
let ever_init_maps = ever_initialized_map(db, body);
for (block_id, mut ever_init_map) in ever_init_maps.into_iter() {
let block = &body.basic_blocks[block_id];
@ -350,23 +376,51 @@ fn mutability_of_locals(
match place_case(db, body, place) {
ProjectionCase::Direct => {
if ever_init_map.get(place.local).copied().unwrap_or_default() {
push_mut_span(place.local, statement.span);
push_mut_span(place.local, statement.span, &mut result);
} else {
ever_init_map.insert(place.local, true);
}
}
ProjectionCase::DirectPart => {
// Partial initialization is not supported, so it is definitely `mut`
push_mut_span(place.local, statement.span);
push_mut_span(place.local, statement.span, &mut result);
}
ProjectionCase::Indirect => (),
ProjectionCase::Indirect => {
record_usage(place.local, &mut result);
}
}
match value {
Rvalue::CopyForDeref(p)
| Rvalue::Discriminant(p)
| Rvalue::Len(p)
| Rvalue::Ref(_, p) => {
record_usage(p.local, &mut result);
}
Rvalue::Use(o)
| Rvalue::Repeat(o, _)
| Rvalue::Cast(_, o, _)
| Rvalue::UnaryOp(_, o) => record_usage_for_operand(o, &mut result),
Rvalue::CheckedBinaryOp(_, o1, o2) => {
for o in [o1, o2] {
record_usage_for_operand(o, &mut result);
}
}
Rvalue::Aggregate(_, args) => {
for arg in args.iter() {
record_usage_for_operand(arg, &mut result);
}
}
Rvalue::ShallowInitBox(_, _) | Rvalue::ShallowInitBoxWithAlloc(_) => (),
}
if let Rvalue::Ref(BorrowKind::Mut { .. }, p) = value {
if place_case(db, body, p) != ProjectionCase::Indirect {
push_mut_span(p.local, statement.span);
push_mut_span(p.local, statement.span, &mut result);
}
}
}
StatementKind::FakeRead(p) => {
record_usage(p.local, &mut result);
}
StatementKind::StorageDead(p) => {
ever_init_map.insert(*p, false);
}
@ -386,15 +440,21 @@ fn mutability_of_locals(
| TerminatorKind::FalseEdge { .. }
| TerminatorKind::FalseUnwind { .. }
| TerminatorKind::GeneratorDrop
| TerminatorKind::SwitchInt { .. }
| TerminatorKind::Drop { .. }
| TerminatorKind::DropAndReplace { .. }
| TerminatorKind::Assert { .. }
| TerminatorKind::Yield { .. } => (),
TerminatorKind::Call { destination, .. } => {
TerminatorKind::SwitchInt { discr, targets: _ } => {
record_usage_for_operand(discr, &mut result);
}
TerminatorKind::Call { destination, args, func, .. } => {
record_usage_for_operand(func, &mut result);
for arg in args.iter() {
record_usage_for_operand(arg, &mut result);
}
if destination.projection.lookup(&body.projection_store).len() == 0 {
if ever_init_map.get(destination.local).copied().unwrap_or_default() {
push_mut_span(destination.local, MirSpan::Unknown);
push_mut_span(destination.local, MirSpan::Unknown, &mut result);
} else {
ever_init_map.insert(destination.local, true);
}

View File

@ -162,7 +162,7 @@ pub struct Evaluator<'a> {
not_special_fn_cache: RefCell<FxHashSet<FunctionId>>,
mir_or_dyn_index_cache: RefCell<FxHashMap<(FunctionId, Substitution), MirOrDynIndex>>,
/// Constantly dropping and creating `Locals` is very costly. We store
/// old locals that we normaly want to drop here, to reuse their allocations
/// old locals that we normally want to drop here, to reuse their allocations
/// later.
unused_locals_store: RefCell<FxHashMap<DefWithBodyId, Vec<Locals>>>,
cached_ptr_size: usize,
@ -375,10 +375,7 @@ impl MirEvalError {
Err(_) => continue,
},
MirSpan::PatId(p) => match source_map.pat_syntax(*p) {
Ok(s) => s.map(|it| match it {
Either::Left(e) => e.into(),
Either::Right(e) => e.into(),
}),
Ok(s) => s.map(|it| it.syntax_node_ptr()),
Err(_) => continue,
},
MirSpan::Unknown => continue,
@ -842,6 +839,7 @@ impl Evaluator<'_> {
}
StatementKind::Deinit(_) => not_supported!("de-init statement"),
StatementKind::StorageLive(_)
| StatementKind::FakeRead(_)
| StatementKind::StorageDead(_)
| StatementKind::Nop => (),
}
@ -2301,7 +2299,7 @@ impl Evaluator<'_> {
match self.get_mir_or_dyn_index(def, generic_args.clone(), locals, span)? {
MirOrDynIndex::Dyn(self_ty_idx) => {
// In the layout of current possible receiver, which at the moment of writing this code is one of
// `&T`, `&mut T`, `Box<T>`, `Rc<T>`, `Arc<T>`, and `Pin<P>` where `P` is one of possible recievers,
// `&T`, `&mut T`, `Box<T>`, `Rc<T>`, `Arc<T>`, and `Pin<P>` where `P` is one of possible receivers,
// the vtable is exactly in the `[ptr_size..2*ptr_size]` bytes. So we can use it without branching on
// the type.
let first_arg = arg_bytes.clone().next().unwrap();

View File

@ -1045,7 +1045,7 @@ impl Evaluator<'_> {
}
"transmute" => {
let [arg] = args else {
return Err(MirEvalError::TypeError("trasmute arg is not provided"));
return Err(MirEvalError::TypeError("transmute arg is not provided"));
};
destination.write_from_interval(self, arg.interval)
}
@ -1065,7 +1065,7 @@ impl Evaluator<'_> {
}
"ctlz" | "ctlz_nonzero" => {
let [arg] = args else {
return Err(MirEvalError::TypeError("cttz arg is not provided"));
return Err(MirEvalError::TypeError("ctlz arg is not provided"));
};
let result =
u128::from_le_bytes(pad16(arg.get(self)?, false)).leading_zeros() as usize;

View File

@ -529,6 +529,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
else {
return Ok(None);
};
self.push_fake_read(current, cond_place, expr_id.into());
let (then_target, else_target) =
self.pattern_match(current, None, cond_place, *pat)?;
self.write_bytes_to_place(
@ -668,6 +669,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
else {
return Ok(None);
};
self.push_fake_read(current, cond_place, expr_id.into());
let mut end = None;
for MatchArm { pat, guard, expr } in arms.iter() {
let (then, mut otherwise) =
@ -1299,6 +1301,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
return Ok(None);
};
if matches!(&self.body.exprs[lhs], Expr::Underscore) {
self.push_fake_read_for_operand(current, rhs_op, span);
return Ok(Some(current));
}
if matches!(
@ -1575,6 +1578,16 @@ impl<'ctx> MirLowerCtx<'ctx> {
self.result.basic_blocks[block].statements.push(statement);
}
fn push_fake_read(&mut self, block: BasicBlockId, p: Place, span: MirSpan) {
self.push_statement(block, StatementKind::FakeRead(p).with_span(span));
}
fn push_fake_read_for_operand(&mut self, block: BasicBlockId, operand: Operand, span: MirSpan) {
if let Operand::Move(p) | Operand::Copy(p) = operand {
self.push_fake_read(block, p, span);
}
}
fn push_assignment(
&mut self,
block: BasicBlockId,
@ -1733,6 +1746,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
return Ok(None);
};
current = c;
self.push_fake_read(current, init_place, span);
(current, else_block) =
self.pattern_match(current, None, init_place, *pat)?;
match (else_block, else_branch) {
@ -1760,13 +1774,14 @@ impl<'ctx> MirLowerCtx<'ctx> {
}
}
}
hir_def::hir::Statement::Expr { expr, has_semi: _ } => {
&hir_def::hir::Statement::Expr { expr, has_semi: _ } => {
let scope2 = self.push_drop_scope();
let Some((_, c)) = self.lower_expr_as_place(current, *expr, true)? else {
let Some((p, c)) = self.lower_expr_as_place(current, expr, true)? else {
scope2.pop_assume_dropped(self);
scope.pop_assume_dropped(self);
return Ok(None);
};
self.push_fake_read(c, p, expr.into());
current = scope2.pop_and_drop(self, c);
}
}

View File

@ -284,6 +284,7 @@ impl MirLowerCtx<'_> {
);
(current, current_else) = self.pattern_match_binding(
id,
*slice,
next_place,
(*slice).into(),
current,
@ -395,6 +396,7 @@ impl MirLowerCtx<'_> {
if mode == MatchingMode::Bind {
self.pattern_match_binding(
*id,
pattern,
cond_place,
pattern.into(),
current,
@ -431,13 +433,14 @@ impl MirLowerCtx<'_> {
fn pattern_match_binding(
&mut self,
id: BindingId,
pat: PatId,
cond_place: Place,
span: MirSpan,
current: BasicBlockId,
current_else: Option<BasicBlockId>,
) -> Result<(BasicBlockId, Option<BasicBlockId>)> {
let target_place = self.binding_local(id)?;
let mode = self.infer.binding_modes[id];
let mode = self.infer.binding_modes[pat];
self.push_storage_live(id, current)?;
self.push_assignment(
current,

View File

@ -248,6 +248,7 @@ impl Filler<'_> {
| Rvalue::CopyForDeref(_) => (),
},
StatementKind::Deinit(_)
| StatementKind::FakeRead(_)
| StatementKind::StorageLive(_)
| StatementKind::StorageDead(_)
| StatementKind::Nop => (),

View File

@ -233,6 +233,11 @@ impl<'a> MirPrettyCtx<'a> {
this.place(p);
wln!(this, ");");
}
StatementKind::FakeRead(p) => {
w!(this, "FakeRead(");
this.place(p);
wln!(this, ");");
}
StatementKind::Nop => wln!(this, "Nop;"),
}
}

View File

@ -269,12 +269,7 @@ fn pat_node(
Some(match body_source_map.pat_syntax(pat) {
Ok(sp) => {
let root = db.parse_or_expand(sp.file_id);
sp.map(|ptr| {
ptr.either(
|it| it.to_node(&root).syntax().clone(),
|it| it.to_node(&root).syntax().clone(),
)
})
sp.map(|ptr| ptr.to_node(&root).syntax().clone())
}
Err(SyntheticSyntax) => return None,
})
@ -303,12 +298,7 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String {
let syntax_ptr = match body_source_map.pat_syntax(pat) {
Ok(sp) => {
let root = db.parse_or_expand(sp.file_id);
sp.map(|ptr| {
ptr.either(
|it| it.to_node(&root).syntax().clone(),
|it| it.to_node(&root).syntax().clone(),
)
})
sp.map(|ptr| ptr.to_node(&root).syntax().clone())
}
Err(SyntheticSyntax) => continue,
};

View File

@ -66,6 +66,7 @@ diagnostics![
UnresolvedModule,
UnresolvedProcMacro,
UnusedMut,
UnusedVariable,
];
#[derive(Debug)]
@ -173,20 +174,19 @@ pub struct MalformedDerive {
#[derive(Debug)]
pub struct NoSuchField {
pub field: InFile<Either<AstPtr<ast::RecordExprField>, AstPtr<ast::RecordPatField>>>,
pub field: InFile<AstPtr<Either<ast::RecordExprField, ast::RecordPatField>>>,
pub private: bool,
}
#[derive(Debug)]
pub struct PrivateAssocItem {
pub expr_or_pat:
InFile<Either<AstPtr<ast::Expr>, Either<AstPtr<ast::Pat>, AstPtr<ast::SelfParam>>>>,
pub expr_or_pat: InFile<AstPtr<Either<ast::Expr, Either<ast::Pat, ast::SelfParam>>>>,
pub item: AssocItem,
}
#[derive(Debug)]
pub struct MismatchedTupleStructPatArgCount {
pub expr_or_pat: InFile<Either<AstPtr<ast::Expr>, AstPtr<ast::Pat>>>,
pub expr_or_pat: InFile<AstPtr<Either<ast::Expr, ast::Pat>>>,
pub expected: usize,
pub found: usize,
}
@ -227,7 +227,7 @@ pub struct MissingUnsafe {
#[derive(Debug)]
pub struct MissingFields {
pub file: HirFileId,
pub field_list_parent: Either<AstPtr<ast::RecordExpr>, AstPtr<ast::RecordPat>>,
pub field_list_parent: AstPtr<Either<ast::RecordExpr, ast::RecordPat>>,
pub field_list_parent_path: Option<AstPtr<ast::Path>>,
pub missed_fields: Vec<Name>,
}
@ -254,7 +254,7 @@ pub struct MissingMatchArms {
#[derive(Debug)]
pub struct TypeMismatch {
pub expr_or_pat: Either<InFile<AstPtr<ast::Expr>>, InFile<AstPtr<ast::Pat>>>,
pub expr_or_pat: InFile<AstPtr<Either<ast::Expr, ast::Pat>>>,
pub expected: Type,
pub actual: Type,
}
@ -270,6 +270,11 @@ pub struct UnusedMut {
pub local: Local,
}
#[derive(Debug)]
pub struct UnusedVariable {
pub local: Local,
}
#[derive(Debug)]
pub struct MovedOutOfRef {
pub ty: Type,

View File

@ -64,7 +64,7 @@ use hir_ty::{
consteval::{try_const_usize, unknown_const_as_generic, ConstEvalError, ConstExt},
diagnostics::BodyValidationDiagnostic,
known_const_to_ast,
layout::{Layout as TyLayout, RustcEnumVariantIdx, TagEncoding},
layout::{Layout as TyLayout, RustcEnumVariantIdx, RustcFieldIdx, TagEncoding},
method_resolution::{self, TyFingerprint},
mir::{self, interpret_mir},
primitive::UintTy,
@ -98,7 +98,7 @@ pub use crate::{
ReplaceFilterMapNextWithFindMap, TypeMismatch, TypedHole, UndeclaredLabel,
UnimplementedBuiltinMacro, UnreachableLabel, UnresolvedExternCrate, UnresolvedField,
UnresolvedImport, UnresolvedMacroCall, UnresolvedMethodCall, UnresolvedModule,
UnresolvedProcMacro, UnusedMut,
UnresolvedProcMacro, UnusedMut, UnusedVariable,
},
has_source::HasSource,
semantics::{PathResolution, Semantics, SemanticsScope, TypeInfo, VisibleTraits},
@ -452,15 +452,7 @@ impl HasVisibility for ModuleDef {
impl Module {
/// Name of this module.
pub fn name(self, db: &dyn HirDatabase) -> Option<Name> {
let def_map = self.id.def_map(db.upcast());
let parent = def_map[self.id.local_id].parent?;
def_map[parent].children.iter().find_map(|(name, module_id)| {
if *module_id == self.id.local_id {
Some(name.clone())
} else {
None
}
})
self.id.name(db.upcast())
}
/// Returns the crate this module is part of.
@ -571,6 +563,7 @@ impl Module {
if def_map[m.id.local_id].origin.is_inline() {
m.diagnostics(db, acc)
}
acc.extend(def.diagnostics(db))
}
ModuleDef::Trait(t) => {
for diag in db.trait_data_with_diagnostics(t.id).1.iter() {
@ -1509,10 +1502,10 @@ impl DefWithBody {
&hir_ty::InferenceDiagnostic::NoSuchField { field: expr, private } => {
let expr_or_pat = match expr {
ExprOrPatId::ExprId(expr) => {
source_map.field_syntax(expr).map(Either::Left)
source_map.field_syntax(expr).map(AstPtr::wrap_left)
}
ExprOrPatId::PatId(pat) => {
source_map.pat_field_syntax(pat).map(Either::Right)
source_map.pat_field_syntax(pat).map(AstPtr::wrap_right)
}
};
acc.push(NoSuchField { field: expr_or_pat, private }.into())
@ -1530,8 +1523,8 @@ impl DefWithBody {
}
&hir_ty::InferenceDiagnostic::PrivateAssocItem { id, item } => {
let expr_or_pat = match id {
ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(Either::Left),
ExprOrPatId::PatId(pat) => pat_syntax(pat).map(Either::Right),
ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(AstPtr::wrap_left),
ExprOrPatId::PatId(pat) => pat_syntax(pat).map(AstPtr::wrap_right),
};
let item = item.into();
acc.push(PrivateAssocItem { expr_or_pat, item }.into())
@ -1609,12 +1602,17 @@ impl DefWithBody {
found,
} => {
let expr_or_pat = match pat {
ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(Either::Left),
ExprOrPatId::PatId(pat) => source_map
.pat_syntax(pat)
.expect("unexpected synthetic")
.map(|it| it.unwrap_left())
.map(Either::Right),
ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(AstPtr::wrap_left),
ExprOrPatId::PatId(pat) => {
let InFile { file_id, value } =
source_map.pat_syntax(pat).expect("unexpected synthetic");
// cast from Either<Pat, SelfParam> -> Either<_, Pat>
let Some(ptr) = AstPtr::try_from_raw(value.syntax_node_ptr()) else {
continue;
};
InFile { file_id, value: ptr }
}
};
acc.push(
MismatchedTupleStructPatArgCount { expr_or_pat, expected, found }.into(),
@ -1628,11 +1626,15 @@ impl DefWithBody {
ExprOrPatId::PatId(pat) => source_map.pat_syntax(pat).map(Either::Right),
};
let expr_or_pat = match expr_or_pat {
Ok(Either::Left(expr)) => Either::Left(expr),
Ok(Either::Right(InFile { file_id, value: Either::Left(pat) })) => {
Either::Right(InFile { file_id, value: pat })
Ok(Either::Left(expr)) => expr.map(AstPtr::wrap_left),
Ok(Either::Right(InFile { file_id, value: pat })) => {
// cast from Either<Pat, SelfParam> -> Either<_, Pat>
let Some(ptr) = AstPtr::try_from_raw(pat.syntax_node_ptr()) else {
continue;
};
InFile { file_id, value: ptr }
}
Ok(Either::Right(_)) | Err(SyntheticSyntax) => continue,
Err(SyntheticSyntax) => continue,
};
acc.push(
@ -1667,10 +1669,7 @@ impl DefWithBody {
Err(_) => continue,
},
mir::MirSpan::PatId(p) => match source_map.pat_syntax(p) {
Ok(s) => s.map(|it| match it {
Either::Left(e) => e.into(),
Either::Right(e) => e.into(),
}),
Ok(s) => s.map(|it| it.into()),
Err(_) => continue,
},
mir::MirSpan::Unknown => continue,
@ -1697,9 +1696,20 @@ impl DefWithBody {
// Skip synthetic bindings
continue;
}
let need_mut = &mol[local];
let mut need_mut = &mol[local];
if body[binding_id].name.as_str() == Some("self")
&& need_mut == &mir::MutabilityReason::Unused
{
need_mut = &mir::MutabilityReason::Not;
}
let local = Local { parent: self.into(), binding_id };
match (need_mut, local.is_mut(db)) {
(mir::MutabilityReason::Unused, _) => {
let should_ignore = matches!(body[binding_id].name.as_str(), Some(it) if it.starts_with("_"));
if !should_ignore {
acc.push(UnusedVariable { local }.into())
}
}
(mir::MutabilityReason::Mut { .. }, true)
| (mir::MutabilityReason::Not, false) => (),
(mir::MutabilityReason::Mut { spans }, false) => {
@ -1710,10 +1720,7 @@ impl DefWithBody {
Err(_) => continue,
},
mir::MirSpan::PatId(p) => match source_map.pat_syntax(*p) {
Ok(s) => s.map(|it| match it {
Either::Left(e) => e.into(),
Either::Right(e) => e.into(),
}),
Ok(s) => s.map(|it| it.into()),
Err(_) => continue,
},
mir::MirSpan::Unknown => continue,
@ -1752,18 +1759,18 @@ impl DefWithBody {
Ok(source_ptr) => {
let root = source_ptr.file_syntax(db.upcast());
if let ast::Expr::RecordExpr(record_expr) =
&source_ptr.value.to_node(&root)
source_ptr.value.to_node(&root)
{
if record_expr.record_expr_field_list().is_some() {
let field_list_parent_path =
record_expr.path().map(|path| AstPtr::new(&path));
acc.push(
MissingFields {
file: source_ptr.file_id,
field_list_parent: Either::Left(AstPtr::new(
field_list_parent: AstPtr::new(&Either::Left(
record_expr,
)),
field_list_parent_path: record_expr
.path()
.map(|path| AstPtr::new(&path)),
field_list_parent_path,
missed_fields,
}
.into(),
@ -1775,24 +1782,24 @@ impl DefWithBody {
},
Either::Right(record_pat) => match source_map.pat_syntax(record_pat) {
Ok(source_ptr) => {
if let Some(expr) = source_ptr.value.as_ref().left() {
if let Some(ptr) = source_ptr.value.clone().cast::<ast::RecordPat>()
{
let root = source_ptr.file_syntax(db.upcast());
if let ast::Pat::RecordPat(record_pat) = expr.to_node(&root) {
if record_pat.record_pat_field_list().is_some() {
acc.push(
MissingFields {
file: source_ptr.file_id,
field_list_parent: Either::Right(AstPtr::new(
&record_pat,
)),
field_list_parent_path: record_pat
.path()
.map(|path| AstPtr::new(&path)),
missed_fields,
}
.into(),
)
}
let record_pat = ptr.to_node(&root);
if record_pat.record_pat_field_list().is_some() {
let field_list_parent_path =
record_pat.path().map(|path| AstPtr::new(&path));
acc.push(
MissingFields {
file: source_ptr.file_id,
field_list_parent: AstPtr::new(&Either::Right(
record_pat,
)),
field_list_parent_path,
missed_fields,
}
.into(),
)
}
}
}
@ -1960,6 +1967,17 @@ impl Function {
db.function_data(self.id).attrs.is_test()
}
/// is this a `fn main` or a function with an `export_name` of `main`?
pub fn is_main(self, db: &dyn HirDatabase) -> bool {
if !self.module(db).is_crate_root() {
return false;
}
let data = db.function_data(self.id);
data.name.to_smol_str() == "main"
|| data.attrs.export_name().map(core::ops::Deref::deref) == Some("main")
}
/// Does this function have the ignore attribute?
pub fn is_ignore(self, db: &dyn HirDatabase) -> bool {
db.function_data(self.id).attrs.is_ignore()
@ -2926,10 +2944,10 @@ impl Local {
.map(|&definition| {
let src = source_map.pat_syntax(definition).unwrap(); // Hmm...
let root = src.file_syntax(db.upcast());
src.map(|ast| match ast {
// Suspicious unwrap
Either::Left(it) => Either::Left(it.cast().unwrap().to_node(&root)),
Either::Right(it) => Either::Right(it.to_node(&root)),
src.map(|ast| match ast.to_node(&root) {
Either::Left(ast::Pat::IdentPat(it)) => Either::Left(it),
Either::Left(_) => unreachable!("local with non ident-pattern"),
Either::Right(it) => Either::Right(it),
})
})
.map(move |source| LocalSource { local: self, source })
@ -4515,15 +4533,31 @@ impl Layout {
Some(self.0.largest_niche?.available(&*self.1))
}
pub fn field_offset(&self, idx: usize) -> Option<u64> {
pub fn field_offset(&self, field: Field) -> Option<u64> {
match self.0.fields {
layout::FieldsShape::Primitive => None,
layout::FieldsShape::Union(_) => Some(0),
layout::FieldsShape::Array { stride, count } => {
let i = u64::try_from(idx).ok()?;
let i = u64::try_from(field.index()).ok()?;
(i < count).then_some((stride * i).bytes())
}
layout::FieldsShape::Arbitrary { ref offsets, .. } => Some(offsets.get(idx)?.bytes()),
layout::FieldsShape::Arbitrary { ref offsets, .. } => {
Some(offsets.get(RustcFieldIdx(field.id))?.bytes())
}
}
}
pub fn tuple_field_offset(&self, field: usize) -> Option<u64> {
match self.0.fields {
layout::FieldsShape::Primitive => None,
layout::FieldsShape::Union(_) => Some(0),
layout::FieldsShape::Array { stride, count } => {
let i = u64::try_from(field).ok()?;
(i < count).then_some((stride * i).bytes())
}
layout::FieldsShape::Arbitrary { ref offsets, .. } => {
Some(offsets.get(RustcFieldIdx::new(field))?.bytes())
}
}
}

View File

@ -236,9 +236,9 @@ impl SourceAnalyzer {
_db: &dyn HirDatabase,
pat: &ast::IdentPat,
) -> Option<BindingMode> {
let binding_id = self.binding_id_of_pat(pat)?;
let id = self.pat_id(&pat.clone().into())?;
let infer = self.infer.as_ref()?;
infer.binding_modes.get(binding_id).map(|bm| match bm {
infer.binding_modes.get(id).map(|bm| match bm {
hir_ty::BindingMode::Move => BindingMode::Move,
hir_ty::BindingMode::Ref(hir_ty::Mutability::Mut) => BindingMode::Ref(Mutability::Mut),
hir_ty::BindingMode::Ref(hir_ty::Mutability::Not) => {
@ -888,7 +888,7 @@ fn scope_for_offset(
.scope_by_expr()
.iter()
.filter_map(|(id, scope)| {
let InFile { file_id, value } = source_map.expr_syntax(*id).ok()?;
let InFile { file_id, value } = source_map.expr_syntax(id).ok()?;
if from_file == file_id {
return Some((value.text_range(), scope));
}
@ -923,7 +923,7 @@ fn adjust(
.scope_by_expr()
.iter()
.filter_map(|(id, scope)| {
let source = source_map.expr_syntax(*id).ok()?;
let source = source_map.expr_syntax(id).ok()?;
// FIXME: correctly handle macro expansion
if source.file_id != from_file {
return None;

View File

@ -273,9 +273,10 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
syntax::SyntaxElement::Token(it) => {
// Don't have a way to make tokens mut, so instead make the parent mut
// and find the token again
let parent = edit.make_syntax_mut(it.parent().unwrap());
let parent =
edit.make_syntax_mut(it.parent().expect("Token must have a parent."));
let mut_token =
parent.covering_element(it.text_range()).into_token().unwrap();
parent.covering_element(it.text_range()).into_token().expect("Covering element cannot be found. Range may be beyond the current node's range");
syntax::SyntaxElement::from(mut_token)
}
@ -446,21 +447,23 @@ fn build_pat(
mod_path_to_ast(&module.find_use_path(db, ModuleDef::from(var), prefer_no_std)?);
// FIXME: use HIR for this; it doesn't currently expose struct vs. tuple vs. unit variants though
let pat: ast::Pat = match var.source(db)?.value.kind() {
Some(match var.source(db)?.value.kind() {
ast::StructKind::Tuple(field_list) => {
let pats =
iter::repeat(make::wildcard_pat().into()).take(field_list.fields().count());
make::tuple_struct_pat(path, pats).into()
}
ast::StructKind::Record(field_list) => {
let pats = field_list
.fields()
.map(|f| make::ext::simple_ident_pat(f.name().unwrap()).into());
let pats = field_list.fields().map(|f| {
make::ext::simple_ident_pat(
f.name().expect("Record field must have a name"),
)
.into()
});
make::record_pat(path, pats).into()
}
ast::StructKind::Unit => make::path_pat(path),
};
Some(pat)
})
}
ExtendedVariant::True => Some(ast::Pat::from(make::literal_pat("true"))),
ExtendedVariant::False => Some(ast::Pat::from(make::literal_pat("false"))),
@ -1941,4 +1944,35 @@ fn main() {
"#,
);
}
/// See [`discussion`](https://github.com/rust-lang/rust-analyzer/pull/15594#discussion_r1322960614)
#[test]
fn missing_field_name() {
check_assist(
add_missing_match_arms,
r#"
enum A {
A,
Missing { a: u32, : u32, c: u32 }
}
fn a() {
let b = A::A;
match b$0 {}
}"#,
r#"
enum A {
A,
Missing { a: u32, : u32, c: u32 }
}
fn a() {
let b = A::A;
match b {
$0A::A => todo!(),
A::Missing { a, u32, c } => todo!(),
}
}"#,
)
}
}

View File

@ -1,7 +1,13 @@
use std::collections::VecDeque;
use ide_db::{
assists::GroupLabel,
famous_defs::FamousDefs,
source_change::SourceChangeBuilder,
syntax_helpers::node_ext::{for_each_tail_expr, walk_expr},
};
use syntax::{
ast::{self, AstNode, Expr::BinExpr},
ast::{self, make, AstNode, Expr::BinExpr, HasArgList},
ted::{self, Position},
SyntaxKind,
};
@ -89,7 +95,8 @@ pub(crate) fn apply_demorgan(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
let dm_lhs = demorganed.lhs()?;
acc.add(
acc.add_group(
&GroupLabel("Apply De Morgan's law".to_string()),
AssistId("apply_demorgan", AssistKind::RefactorRewrite),
"Apply De Morgan's law",
op_range,
@ -143,6 +150,127 @@ pub(crate) fn apply_demorgan(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
)
}
// Assist: apply_demorgan_iterator
//
// Apply https://en.wikipedia.org/wiki/De_Morgan%27s_laws[De Morgan's law] to
// `Iterator::all` and `Iterator::any`.
//
// This transforms expressions of the form `!iter.any(|x| predicate(x))` into
// `iter.all(|x| !predicate(x))` and vice versa. This also works the other way for
// `Iterator::all` into `Iterator::any`.
//
// ```
// # //- minicore: iterator
// fn main() {
// let arr = [1, 2, 3];
// if !arr.into_iter().$0any(|num| num == 4) {
// println!("foo");
// }
// }
// ```
// ->
// ```
// fn main() {
// let arr = [1, 2, 3];
// if arr.into_iter().all(|num| num != 4) {
// println!("foo");
// }
// }
// ```
pub(crate) fn apply_demorgan_iterator(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let method_call: ast::MethodCallExpr = ctx.find_node_at_offset()?;
let (name, arg_expr) = validate_method_call_expr(ctx, &method_call)?;
let ast::Expr::ClosureExpr(closure_expr) = arg_expr else { return None };
let closure_body = closure_expr.body()?;
let op_range = method_call.syntax().text_range();
let label = format!("Apply De Morgan's law to `Iterator::{}`", name.text().as_str());
acc.add_group(
&GroupLabel("Apply De Morgan's law".to_string()),
AssistId("apply_demorgan_iterator", AssistKind::RefactorRewrite),
label,
op_range,
|edit| {
// replace the method name
let new_name = match name.text().as_str() {
"all" => make::name_ref("any"),
"any" => make::name_ref("all"),
_ => unreachable!(),
}
.clone_for_update();
edit.replace_ast(name, new_name);
// negate all tail expressions in the closure body
let tail_cb = &mut |e: &_| tail_cb_impl(edit, e);
walk_expr(&closure_body, &mut |expr| {
if let ast::Expr::ReturnExpr(ret_expr) = expr {
if let Some(ret_expr_arg) = &ret_expr.expr() {
for_each_tail_expr(ret_expr_arg, tail_cb);
}
}
});
for_each_tail_expr(&closure_body, tail_cb);
// negate the whole method call
if let Some(prefix_expr) = method_call
.syntax()
.parent()
.and_then(ast::PrefixExpr::cast)
.filter(|prefix_expr| matches!(prefix_expr.op_kind(), Some(ast::UnaryOp::Not)))
{
edit.delete(
prefix_expr
.op_token()
.expect("prefix expression always has an operator")
.text_range(),
);
} else {
edit.insert(method_call.syntax().text_range().start(), "!");
}
},
)
}
/// Ensures that the method call is to `Iterator::all` or `Iterator::any`.
fn validate_method_call_expr(
ctx: &AssistContext<'_>,
method_call: &ast::MethodCallExpr,
) -> Option<(ast::NameRef, ast::Expr)> {
let name_ref = method_call.name_ref()?;
if name_ref.text() != "all" && name_ref.text() != "any" {
return None;
}
let arg_expr = method_call.arg_list()?.args().next()?;
let sema = &ctx.sema;
let receiver = method_call.receiver()?;
let it_type = sema.type_of_expr(&receiver)?.adjusted();
let module = sema.scope(receiver.syntax())?.module();
let krate = module.krate();
let iter_trait = FamousDefs(sema, krate).core_iter_Iterator()?;
it_type.impls_trait(sema.db, iter_trait, &[]).then_some((name_ref, arg_expr))
}
fn tail_cb_impl(edit: &mut SourceChangeBuilder, e: &ast::Expr) {
match e {
ast::Expr::BreakExpr(break_expr) => {
if let Some(break_expr_arg) = break_expr.expr() {
for_each_tail_expr(&break_expr_arg, &mut |e| tail_cb_impl(edit, e))
}
}
ast::Expr::ReturnExpr(_) => {
// all return expressions have already been handled by the walk loop
}
e => {
let inverted_body = invert_boolean_expression(e.clone());
edit.replace(e.syntax().text_range(), inverted_body.syntax().text());
}
}
}
#[cfg(test)]
mod tests {
use super::*;
@ -255,4 +383,206 @@ fn f() { !(S <= S || S < S) }
"fn() { let x = a && b && c; }",
)
}
#[test]
fn demorgan_iterator_any_all_reverse() {
check_assist(
apply_demorgan_iterator,
r#"
//- minicore: iterator
fn main() {
let arr = [1, 2, 3];
if arr.into_iter().all(|num| num $0!= 4) {
println!("foo");
}
}
"#,
r#"
fn main() {
let arr = [1, 2, 3];
if !arr.into_iter().any(|num| num == 4) {
println!("foo");
}
}
"#,
);
}
#[test]
fn demorgan_iterator_all_any() {
check_assist(
apply_demorgan_iterator,
r#"
//- minicore: iterator
fn main() {
let arr = [1, 2, 3];
if !arr.into_iter().$0all(|num| num > 3) {
println!("foo");
}
}
"#,
r#"
fn main() {
let arr = [1, 2, 3];
if arr.into_iter().any(|num| num <= 3) {
println!("foo");
}
}
"#,
);
}
#[test]
fn demorgan_iterator_multiple_terms() {
check_assist(
apply_demorgan_iterator,
r#"
//- minicore: iterator
fn main() {
let arr = [1, 2, 3];
if !arr.into_iter().$0any(|num| num > 3 && num == 23 && num <= 30) {
println!("foo");
}
}
"#,
r#"
fn main() {
let arr = [1, 2, 3];
if arr.into_iter().all(|num| !(num > 3 && num == 23 && num <= 30)) {
println!("foo");
}
}
"#,
);
}
#[test]
fn demorgan_iterator_double_negation() {
check_assist(
apply_demorgan_iterator,
r#"
//- minicore: iterator
fn main() {
let arr = [1, 2, 3];
if !arr.into_iter().$0all(|num| !(num > 3)) {
println!("foo");
}
}
"#,
r#"
fn main() {
let arr = [1, 2, 3];
if arr.into_iter().any(|num| num > 3) {
println!("foo");
}
}
"#,
);
}
#[test]
fn demorgan_iterator_double_parens() {
check_assist(
apply_demorgan_iterator,
r#"
//- minicore: iterator
fn main() {
let arr = [1, 2, 3];
if !arr.into_iter().$0any(|num| (num > 3 && (num == 1 || num == 2))) {
println!("foo");
}
}
"#,
r#"
fn main() {
let arr = [1, 2, 3];
if arr.into_iter().all(|num| !(num > 3 && (num == 1 || num == 2))) {
println!("foo");
}
}
"#,
);
}
#[test]
fn demorgan_iterator_multiline() {
check_assist(
apply_demorgan_iterator,
r#"
//- minicore: iterator
fn main() {
let arr = [1, 2, 3];
if arr
.into_iter()
.all$0(|num| !num.is_negative())
{
println!("foo");
}
}
"#,
r#"
fn main() {
let arr = [1, 2, 3];
if !arr
.into_iter()
.any(|num| num.is_negative())
{
println!("foo");
}
}
"#,
);
}
#[test]
fn demorgan_iterator_block_closure() {
check_assist(
apply_demorgan_iterator,
r#"
//- minicore: iterator
fn main() {
let arr = [-1, 1, 2, 3];
if arr.into_iter().all(|num: i32| {
$0if num.is_positive() {
num <= 3
} else {
num >= -1
}
}) {
println!("foo");
}
}
"#,
r#"
fn main() {
let arr = [-1, 1, 2, 3];
if !arr.into_iter().any(|num: i32| {
if num.is_positive() {
num > 3
} else {
num < -1
}
}) {
println!("foo");
}
}
"#,
);
}
#[test]
fn demorgan_iterator_wrong_method() {
check_assist_not_applicable(
apply_demorgan_iterator,
r#"
//- minicore: iterator
fn main() {
let arr = [1, 2, 3];
if !arr.into_iter().$0map(|num| num > 3) {
println!("foo");
}
}
"#,
);
}
}

View File

@ -5,7 +5,7 @@ use ide_db::{
helpers::mod_path_to_ast,
imports::{
import_assets::{ImportAssets, ImportCandidate, LocatedImport},
insert_use::{insert_use, ImportScope},
insert_use::{insert_use, insert_use_as_alias, ImportScope},
},
};
use syntax::{ast, AstNode, NodeOrToken, SyntaxElement};
@ -129,10 +129,12 @@ pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<
for import in proposed_imports {
let import_path = import.import_path;
let (assist_id, import_name) =
(AssistId("auto_import", AssistKind::QuickFix), import_path.display(ctx.db()));
acc.add_group(
&group_label,
AssistId("auto_import", AssistKind::QuickFix),
format!("Import `{}`", import_path.display(ctx.db())),
assist_id,
format!("Import `{}`", import_name),
range,
|builder| {
let scope = match scope.clone() {
@ -143,6 +145,38 @@ pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<
insert_use(&scope, mod_path_to_ast(&import_path), &ctx.config.insert_use);
},
);
match import_assets.import_candidate() {
ImportCandidate::TraitAssocItem(name) | ImportCandidate::TraitMethod(name) => {
let is_method =
matches!(import_assets.import_candidate(), ImportCandidate::TraitMethod(_));
let type_ = if is_method { "method" } else { "item" };
let group_label = GroupLabel(format!(
"Import a trait for {} {} by alias",
type_,
name.assoc_item_name.text()
));
acc.add_group(
&group_label,
assist_id,
format!("Import `{} as _`", import_name),
range,
|builder| {
let scope = match scope.clone() {
ImportScope::File(it) => ImportScope::File(builder.make_mut(it)),
ImportScope::Module(it) => ImportScope::Module(builder.make_mut(it)),
ImportScope::Block(it) => ImportScope::Block(builder.make_mut(it)),
};
insert_use_as_alias(
&scope,
mod_path_to_ast(&import_path),
&ctx.config.insert_use,
);
},
);
}
_ => {}
}
}
Some(())
}
@ -253,7 +287,8 @@ mod tests {
};
use crate::tests::{
check_assist, check_assist_not_applicable, check_assist_target, TEST_CONFIG,
check_assist, check_assist_by_label, check_assist_not_applicable, check_assist_target,
TEST_CONFIG,
};
fn check_auto_import_order(before: &str, order: &[&str]) {
@ -705,7 +740,7 @@ fn main() {
#[test]
fn associated_trait_function() {
check_assist(
check_assist_by_label(
auto_import,
r"
mod test_mod {
@ -739,6 +774,44 @@ fn main() {
test_mod::TestStruct::test_function
}
",
"Import `test_mod::TestTrait`",
);
check_assist_by_label(
auto_import,
r"
mod test_mod {
pub trait TestTrait {
fn test_function();
}
pub struct TestStruct {}
impl TestTrait for TestStruct {
fn test_function() {}
}
}
fn main() {
test_mod::TestStruct::test_function$0
}
",
r"
use test_mod::TestTrait as _;
mod test_mod {
pub trait TestTrait {
fn test_function();
}
pub struct TestStruct {}
impl TestTrait for TestStruct {
fn test_function() {}
}
}
fn main() {
test_mod::TestStruct::test_function
}
",
"Import `test_mod::TestTrait as _`",
);
}
@ -776,7 +849,44 @@ fn main() {
#[test]
fn associated_trait_const() {
check_assist(
check_assist_by_label(
auto_import,
r"
mod test_mod {
pub trait TestTrait {
const TEST_CONST: u8;
}
pub struct TestStruct {}
impl TestTrait for TestStruct {
const TEST_CONST: u8 = 42;
}
}
fn main() {
test_mod::TestStruct::TEST_CONST$0
}
",
r"
use test_mod::TestTrait as _;
mod test_mod {
pub trait TestTrait {
const TEST_CONST: u8;
}
pub struct TestStruct {}
impl TestTrait for TestStruct {
const TEST_CONST: u8 = 42;
}
}
fn main() {
test_mod::TestStruct::TEST_CONST
}
",
"Import `test_mod::TestTrait as _`",
);
check_assist_by_label(
auto_import,
r"
mod test_mod {
@ -810,6 +920,7 @@ fn main() {
test_mod::TestStruct::TEST_CONST
}
",
"Import `test_mod::TestTrait`",
);
}
@ -847,7 +958,46 @@ fn main() {
#[test]
fn trait_method() {
check_assist(
check_assist_by_label(
auto_import,
r"
mod test_mod {
pub trait TestTrait {
fn test_method(&self);
}
pub struct TestStruct {}
impl TestTrait for TestStruct {
fn test_method(&self) {}
}
}
fn main() {
let test_struct = test_mod::TestStruct {};
test_struct.test_meth$0od()
}
",
r"
use test_mod::TestTrait as _;
mod test_mod {
pub trait TestTrait {
fn test_method(&self);
}
pub struct TestStruct {}
impl TestTrait for TestStruct {
fn test_method(&self) {}
}
}
fn main() {
let test_struct = test_mod::TestStruct {};
test_struct.test_method()
}
",
"Import `test_mod::TestTrait as _`",
);
check_assist_by_label(
auto_import,
r"
mod test_mod {
@ -883,12 +1033,43 @@ fn main() {
test_struct.test_method()
}
",
"Import `test_mod::TestTrait`",
);
}
#[test]
fn trait_method_cross_crate() {
check_assist(
check_assist_by_label(
auto_import,
r"
//- /main.rs crate:main deps:dep
fn main() {
let test_struct = dep::test_mod::TestStruct {};
test_struct.test_meth$0od()
}
//- /dep.rs crate:dep
pub mod test_mod {
pub trait TestTrait {
fn test_method(&self);
}
pub struct TestStruct {}
impl TestTrait for TestStruct {
fn test_method(&self) {}
}
}
",
r"
use dep::test_mod::TestTrait as _;
fn main() {
let test_struct = dep::test_mod::TestStruct {};
test_struct.test_method()
}
",
"Import `dep::test_mod::TestTrait as _`",
);
check_assist_by_label(
auto_import,
r"
//- /main.rs crate:main deps:dep
@ -915,12 +1096,41 @@ fn main() {
test_struct.test_method()
}
",
"Import `dep::test_mod::TestTrait`",
);
}
#[test]
fn assoc_fn_cross_crate() {
check_assist(
check_assist_by_label(
auto_import,
r"
//- /main.rs crate:main deps:dep
fn main() {
dep::test_mod::TestStruct::test_func$0tion
}
//- /dep.rs crate:dep
pub mod test_mod {
pub trait TestTrait {
fn test_function();
}
pub struct TestStruct {}
impl TestTrait for TestStruct {
fn test_function() {}
}
}
",
r"
use dep::test_mod::TestTrait as _;
fn main() {
dep::test_mod::TestStruct::test_function
}
",
"Import `dep::test_mod::TestTrait as _`",
);
check_assist_by_label(
auto_import,
r"
//- /main.rs crate:main deps:dep
@ -945,12 +1155,41 @@ fn main() {
dep::test_mod::TestStruct::test_function
}
",
"Import `dep::test_mod::TestTrait`",
);
}
#[test]
fn assoc_const_cross_crate() {
check_assist(
check_assist_by_label(
auto_import,
r"
//- /main.rs crate:main deps:dep
fn main() {
dep::test_mod::TestStruct::CONST$0
}
//- /dep.rs crate:dep
pub mod test_mod {
pub trait TestTrait {
const CONST: bool;
}
pub struct TestStruct {}
impl TestTrait for TestStruct {
const CONST: bool = true;
}
}
",
r"
use dep::test_mod::TestTrait as _;
fn main() {
dep::test_mod::TestStruct::CONST
}
",
"Import `dep::test_mod::TestTrait as _`",
);
check_assist_by_label(
auto_import,
r"
//- /main.rs crate:main deps:dep
@ -975,6 +1214,7 @@ fn main() {
dep::test_mod::TestStruct::CONST
}
",
"Import `dep::test_mod::TestTrait`",
);
}

File diff suppressed because it is too large Load Diff

View File

@ -25,9 +25,7 @@ pub(crate) fn convert_comment_block(acc: &mut Assists, ctx: &AssistContext<'_>)
let comment = ctx.find_token_at_offset::<ast::Comment>()?;
// Only allow comments which are alone on their line
if let Some(prev) = comment.syntax().prev_token() {
if Whitespace::cast(prev).filter(|w| w.text().contains('\n')).is_none() {
return None;
}
Whitespace::cast(prev).filter(|w| w.text().contains('\n'))?;
}
match comment.kind().shape {
@ -78,7 +76,7 @@ fn line_to_block(acc: &mut Assists, comment: ast::Comment) -> Option<()> {
// Establish the target of our edit based on the comments we found
let target = TextRange::new(
comments[0].syntax().text_range().start(),
comments.last().unwrap().syntax().text_range().end(),
comments.last()?.syntax().text_range().end(),
);
acc.add(
@ -91,8 +89,12 @@ fn line_to_block(acc: &mut Assists, comment: ast::Comment) -> Option<()> {
// contents of each line comment when they're put into the block comment.
let indentation = IndentLevel::from_token(comment.syntax());
let block_comment_body =
comments.into_iter().map(|c| line_comment_text(indentation, c)).join("\n");
let block_comment_body = comments
.into_iter()
.map(|c| line_comment_text(indentation, c))
.collect::<Vec<String>>()
.into_iter()
.join("\n");
let block_prefix =
CommentKind { shape: CommentShape::Block, ..comment.kind() }.prefix();
@ -160,7 +162,8 @@ pub(crate) fn relevant_line_comments(comment: &ast::Comment) -> Vec<Comment> {
//
// But since such comments aren't idiomatic we're okay with this.
pub(crate) fn line_comment_text(indentation: IndentLevel, comm: ast::Comment) -> String {
let contents_without_prefix = comm.text().strip_prefix(comm.prefix()).unwrap();
let text = comm.text();
let contents_without_prefix = text.strip_prefix(comm.prefix()).unwrap_or(text);
let contents = contents_without_prefix.strip_prefix(' ').unwrap_or(contents_without_prefix);
// Don't add the indentation if the line is empty

View File

@ -51,22 +51,7 @@ pub(crate) fn convert_to_guarded_return(acc: &mut Assists, ctx: &AssistContext<'
// Check if there is an IfLet that we can handle.
let (if_let_pat, cond_expr) = if is_pattern_cond(cond.clone()) {
let let_ = single_let(cond)?;
match let_.pat() {
Some(ast::Pat::TupleStructPat(pat)) if pat.fields().count() == 1 => {
let path = pat.path()?;
if path.qualifier().is_some() {
return None;
}
let bound_ident = pat.fields().next()?;
if !ast::IdentPat::can_cast(bound_ident.syntax().kind()) {
return None;
}
(Some((path, bound_ident)), let_.expr()?)
}
_ => return None, // Unsupported IfLet.
}
(Some(let_.pat()?), let_.expr()?)
} else {
(None, cond)
};
@ -136,11 +121,10 @@ pub(crate) fn convert_to_guarded_return(acc: &mut Assists, ctx: &AssistContext<'
};
new_expr.syntax().clone_for_update()
}
Some((path, bound_ident)) => {
Some(pat) => {
// If-let.
let pat = make::tuple_struct_pat(path, once(bound_ident));
let let_else_stmt = make::let_else_stmt(
pat.into(),
pat,
None,
cond_expr,
ast::make::tail_only_block_expr(early_expression),
@ -442,6 +426,60 @@ fn main() {
);
}
#[test]
fn convert_arbitrary_if_let_patterns() {
check_assist(
convert_to_guarded_return,
r#"
fn main() {
$0if let None = Some(92) {
foo();
}
}
"#,
r#"
fn main() {
let None = Some(92) else { return };
foo();
}
"#,
);
check_assist(
convert_to_guarded_return,
r#"
fn main() {
$0if let [1, x] = [1, 92] {
foo(x);
}
}
"#,
r#"
fn main() {
let [1, x] = [1, 92] else { return };
foo(x);
}
"#,
);
check_assist(
convert_to_guarded_return,
r#"
fn main() {
$0if let (Some(x), None) = (Some(92), None) {
foo(x);
}
}
"#,
r#"
fn main() {
let (Some(x), None) = (Some(92), None) else { return };
foo(x);
}
"#,
);
}
#[test]
fn ignore_already_converted_if() {
check_assist_not_applicable(

View File

@ -0,0 +1,886 @@
use either::Either;
use hir::ModuleDef;
use ide_db::{
assists::{AssistId, AssistKind},
defs::Definition,
helpers::mod_path_to_ast,
imports::insert_use::{insert_use, ImportScope},
search::{FileReference, UsageSearchResult},
source_change::SourceChangeBuilder,
syntax_helpers::node_ext::{for_each_tail_expr, walk_expr},
FxHashSet,
};
use syntax::{
ast::{self, edit::IndentLevel, edit_in_place::Indent, make, HasName},
match_ast, ted, AstNode, SyntaxNode,
};
use crate::assist_context::{AssistContext, Assists};
// Assist: convert_tuple_return_type_to_struct
//
// This converts the return type of a function from a tuple type
// into a tuple struct and updates the body accordingly.
//
// ```
// fn bar() {
// let (a, b, c) = foo();
// }
//
// fn foo() -> ($0u32, u32, u32) {
// (1, 2, 3)
// }
// ```
// ->
// ```
// fn bar() {
// let FooResult(a, b, c) = foo();
// }
//
// struct FooResult(u32, u32, u32);
//
// fn foo() -> FooResult {
// FooResult(1, 2, 3)
// }
// ```
pub(crate) fn convert_tuple_return_type_to_struct(
acc: &mut Assists,
ctx: &AssistContext<'_>,
) -> Option<()> {
let ret_type = ctx.find_node_at_offset::<ast::RetType>()?;
let type_ref = ret_type.ty()?;
let ast::Type::TupleType(tuple_ty) = &type_ref else { return None };
if tuple_ty.fields().any(|field| matches!(field, ast::Type::ImplTraitType(_))) {
return None;
}
let fn_ = ret_type.syntax().parent().and_then(ast::Fn::cast)?;
let fn_def = ctx.sema.to_def(&fn_)?;
let fn_name = fn_.name()?;
let target_module = ctx.sema.scope(fn_.syntax())?.module().nearest_non_block_module(ctx.db());
let target = type_ref.syntax().text_range();
acc.add(
AssistId("convert_tuple_return_type_to_struct", AssistKind::RefactorRewrite),
"Convert tuple return type to tuple struct",
target,
move |edit| {
let ret_type = edit.make_mut(ret_type);
let fn_ = edit.make_mut(fn_);
let usages = Definition::Function(fn_def).usages(&ctx.sema).all();
let struct_name = format!("{}Result", stdx::to_camel_case(&fn_name.to_string()));
let parent = fn_.syntax().ancestors().find_map(<Either<ast::Impl, ast::Trait>>::cast);
add_tuple_struct_def(
edit,
ctx,
&usages,
parent.as_ref().map(|it| it.syntax()).unwrap_or(fn_.syntax()),
tuple_ty,
&struct_name,
&target_module,
);
ted::replace(
ret_type.syntax(),
make::ret_type(make::ty(&struct_name)).syntax().clone_for_update(),
);
if let Some(fn_body) = fn_.body() {
replace_body_return_values(ast::Expr::BlockExpr(fn_body), &struct_name);
}
replace_usages(edit, ctx, &usages, &struct_name, &target_module);
},
)
}
/// Replaces tuple usages with the corresponding tuple struct pattern.
fn replace_usages(
edit: &mut SourceChangeBuilder,
ctx: &AssistContext<'_>,
usages: &UsageSearchResult,
struct_name: &str,
target_module: &hir::Module,
) {
for (file_id, references) in usages.iter() {
edit.edit_file(*file_id);
let refs_with_imports =
augment_references_with_imports(edit, ctx, references, struct_name, target_module);
refs_with_imports.into_iter().rev().for_each(|(name, import_data)| {
if let Some(fn_) = name.syntax().parent().and_then(ast::Fn::cast) {
cov_mark::hit!(replace_trait_impl_fns);
if let Some(ret_type) = fn_.ret_type() {
ted::replace(
ret_type.syntax(),
make::ret_type(make::ty(struct_name)).syntax().clone_for_update(),
);
}
if let Some(fn_body) = fn_.body() {
replace_body_return_values(ast::Expr::BlockExpr(fn_body), struct_name);
}
} else {
// replace tuple patterns
let pats = name
.syntax()
.ancestors()
.find(|node| {
ast::CallExpr::can_cast(node.kind())
|| ast::MethodCallExpr::can_cast(node.kind())
})
.and_then(|node| node.parent())
.and_then(node_to_pats)
.unwrap_or(Vec::new());
let tuple_pats = pats.iter().filter_map(|pat| match pat {
ast::Pat::TuplePat(tuple_pat) => Some(tuple_pat),
_ => None,
});
for tuple_pat in tuple_pats {
ted::replace(
tuple_pat.syntax(),
make::tuple_struct_pat(
make::path_from_text(struct_name),
tuple_pat.fields(),
)
.clone_for_update()
.syntax(),
);
}
}
// add imports across modules where needed
if let Some((import_scope, path)) = import_data {
insert_use(&import_scope, path, &ctx.config.insert_use);
}
})
}
}
fn node_to_pats(node: SyntaxNode) -> Option<Vec<ast::Pat>> {
match_ast! {
match node {
ast::LetStmt(it) => it.pat().map(|pat| vec![pat]),
ast::LetExpr(it) => it.pat().map(|pat| vec![pat]),
ast::MatchExpr(it) => it.match_arm_list().map(|arm_list| {
arm_list.arms().filter_map(|arm| arm.pat()).collect()
}),
_ => None,
}
}
}
fn augment_references_with_imports(
edit: &mut SourceChangeBuilder,
ctx: &AssistContext<'_>,
references: &[FileReference],
struct_name: &str,
target_module: &hir::Module,
) -> Vec<(ast::NameLike, Option<(ImportScope, ast::Path)>)> {
let mut visited_modules = FxHashSet::default();
references
.iter()
.filter_map(|FileReference { name, .. }| {
ctx.sema.scope(name.syntax()).map(|scope| (name, scope.module()))
})
.map(|(name, ref_module)| {
let new_name = edit.make_mut(name.clone());
// if the referenced module is not the same as the target one and has not been seen before, add an import
let import_data = if ref_module.nearest_non_block_module(ctx.db()) != *target_module
&& !visited_modules.contains(&ref_module)
{
visited_modules.insert(ref_module);
let import_scope =
ImportScope::find_insert_use_container(new_name.syntax(), &ctx.sema);
let path = ref_module
.find_use_path_prefixed(
ctx.sema.db,
ModuleDef::Module(*target_module),
ctx.config.insert_use.prefix_kind,
ctx.config.prefer_no_std,
)
.map(|mod_path| {
make::path_concat(
mod_path_to_ast(&mod_path),
make::path_from_text(struct_name),
)
});
import_scope.zip(path)
} else {
None
};
(new_name, import_data)
})
.collect()
}
// Adds the definition of the tuple struct before the parent function.
fn add_tuple_struct_def(
edit: &mut SourceChangeBuilder,
ctx: &AssistContext<'_>,
usages: &UsageSearchResult,
parent: &SyntaxNode,
tuple_ty: &ast::TupleType,
struct_name: &str,
target_module: &hir::Module,
) {
let make_struct_pub = usages
.iter()
.flat_map(|(_, refs)| refs)
.filter_map(|FileReference { name, .. }| {
ctx.sema.scope(name.syntax()).map(|scope| scope.module())
})
.any(|module| module.nearest_non_block_module(ctx.db()) != *target_module);
let visibility = if make_struct_pub { Some(make::visibility_pub()) } else { None };
let field_list = ast::FieldList::TupleFieldList(make::tuple_field_list(
tuple_ty.fields().map(|ty| make::tuple_field(visibility.clone(), ty)),
));
let struct_name = make::name(struct_name);
let struct_def = make::struct_(visibility, struct_name, None, field_list).clone_for_update();
let indent = IndentLevel::from_node(parent);
struct_def.reindent_to(indent);
edit.insert(parent.text_range().start(), format!("{struct_def}\n\n{indent}"));
}
/// Replaces each returned tuple in `body` with the constructor of the tuple struct named `struct_name`.
fn replace_body_return_values(body: ast::Expr, struct_name: &str) {
let mut exprs_to_wrap = Vec::new();
let tail_cb = &mut |e: &_| tail_cb_impl(&mut exprs_to_wrap, e);
walk_expr(&body, &mut |expr| {
if let ast::Expr::ReturnExpr(ret_expr) = expr {
if let Some(ret_expr_arg) = &ret_expr.expr() {
for_each_tail_expr(ret_expr_arg, tail_cb);
}
}
});
for_each_tail_expr(&body, tail_cb);
for ret_expr in exprs_to_wrap {
if let ast::Expr::TupleExpr(tuple_expr) = &ret_expr {
let struct_constructor = make::expr_call(
make::expr_path(make::ext::ident_path(struct_name)),
make::arg_list(tuple_expr.fields()),
)
.clone_for_update();
ted::replace(ret_expr.syntax(), struct_constructor.syntax());
}
}
}
fn tail_cb_impl(acc: &mut Vec<ast::Expr>, e: &ast::Expr) {
match e {
ast::Expr::BreakExpr(break_expr) => {
if let Some(break_expr_arg) = break_expr.expr() {
for_each_tail_expr(&break_expr_arg, &mut |e| tail_cb_impl(acc, e))
}
}
ast::Expr::ReturnExpr(_) => {
// all return expressions have already been handled by the walk loop
}
e => acc.push(e.clone()),
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::tests::{check_assist, check_assist_not_applicable};
#[test]
fn function_basic() {
check_assist(
convert_tuple_return_type_to_struct,
r#"
fn bar() -> $0(&'static str, bool) {
("bar", true)
}
"#,
r#"
struct BarResult(&'static str, bool);
fn bar() -> BarResult {
BarResult("bar", true)
}
"#,
)
}
#[test]
fn struct_and_usages_indented() {
check_assist(
convert_tuple_return_type_to_struct,
r#"
mod foo {
pub(crate) fn foo() {
let (bar, baz) = bar();
println!("{bar} {baz}");
}
pub(crate) fn bar() -> $0(usize, bool) {
(42, true)
}
}
"#,
r#"
mod foo {
pub(crate) fn foo() {
let BarResult(bar, baz) = bar();
println!("{bar} {baz}");
}
struct BarResult(usize, bool);
pub(crate) fn bar() -> BarResult {
BarResult(42, true)
}
}
"#,
)
}
#[test]
fn field_usage() {
check_assist(
convert_tuple_return_type_to_struct,
r#"
fn bar() -> $0(usize, bool) {
(42, true)
}
fn main() {
let bar_result = bar();
println!("{} {}", bar_result.1, bar().0);
}
"#,
r#"
struct BarResult(usize, bool);
fn bar() -> BarResult {
BarResult(42, true)
}
fn main() {
let bar_result = bar();
println!("{} {}", bar_result.1, bar().0);
}
"#,
)
}
#[test]
fn method_usage() {
check_assist(
convert_tuple_return_type_to_struct,
r#"
struct Foo;
impl Foo {
fn foo(&self, x: usize) -> $0(usize, usize) {
(x, x)
}
}
fn main() {
let foo = Foo {};
let (x, y) = foo.foo(2);
}
"#,
r#"
struct Foo;
struct FooResult(usize, usize);
impl Foo {
fn foo(&self, x: usize) -> FooResult {
FooResult(x, x)
}
}
fn main() {
let foo = Foo {};
let FooResult(x, y) = foo.foo(2);
}
"#,
)
}
#[test]
fn method_usage_within_same_impl() {
check_assist(
convert_tuple_return_type_to_struct,
r#"
struct Foo;
impl Foo {
fn new() -> $0(usize, usize) {
(0, 0)
}
fn foo() {
let (mut foo1, mut foo2) = Self::new();
}
}
"#,
r#"
struct Foo;
struct NewResult(usize, usize);
impl Foo {
fn new() -> NewResult {
NewResult(0, 0)
}
fn foo() {
let NewResult(mut foo1, mut foo2) = Self::new();
}
}
"#,
)
}
#[test]
fn multiple_usages() {
check_assist(
convert_tuple_return_type_to_struct,
r#"
fn bar() -> $0(usize, usize) {
(42, 24)
}
fn main() {
let bar_result = bar();
let (foo, b) = bar();
let (b, baz) = bar();
if foo == b && b == baz {
println!("{} {}", bar_result.1, bar().0);
}
}
"#,
r#"
struct BarResult(usize, usize);
fn bar() -> BarResult {
BarResult(42, 24)
}
fn main() {
let bar_result = bar();
let BarResult(foo, b) = bar();
let BarResult(b, baz) = bar();
if foo == b && b == baz {
println!("{} {}", bar_result.1, bar().0);
}
}
"#,
)
}
#[test]
fn usage_match_tuple_pat() {
check_assist(
convert_tuple_return_type_to_struct,
r#"
fn bar() -> $0(usize, bool) {
(42, true)
}
fn main() {
match bar() {
x if x.0 == 0 => println!("0"),
(x, false) => println!("{x}"),
(42, true) => println!("bar"),
_ => println!("foo"),
}
}
"#,
r#"
struct BarResult(usize, bool);
fn bar() -> BarResult {
BarResult(42, true)
}
fn main() {
match bar() {
x if x.0 == 0 => println!("0"),
BarResult(x, false) => println!("{x}"),
BarResult(42, true) => println!("bar"),
_ => println!("foo"),
}
}
"#,
)
}
#[test]
fn usage_if_let_tuple_pat() {
check_assist(
convert_tuple_return_type_to_struct,
r#"
fn bar() -> $0(usize, bool) {
(42, true)
}
fn main() {
if let (42, true) = bar() {
println!("bar")
}
}
"#,
r#"
struct BarResult(usize, bool);
fn bar() -> BarResult {
BarResult(42, true)
}
fn main() {
if let BarResult(42, true) = bar() {
println!("bar")
}
}
"#,
)
}
#[test]
fn function_nested_outer() {
check_assist(
convert_tuple_return_type_to_struct,
r#"
fn bar() -> $0(usize, bool) {
fn foo() -> (usize, bool) {
(42, true)
}
foo()
}
"#,
r#"
struct BarResult(usize, bool);
fn bar() -> BarResult {
fn foo() -> (usize, bool) {
(42, true)
}
foo()
}
"#,
)
}
#[test]
fn function_nested_inner() {
check_assist(
convert_tuple_return_type_to_struct,
r#"
fn bar() -> (usize, bool) {
fn foo() -> $0(usize, bool) {
(42, true)
}
foo()
}
"#,
r#"
fn bar() -> (usize, bool) {
struct FooResult(usize, bool);
fn foo() -> FooResult {
FooResult(42, true)
}
foo()
}
"#,
)
}
#[test]
fn trait_impl_and_usage() {
cov_mark::check!(replace_trait_impl_fns);
check_assist(
convert_tuple_return_type_to_struct,
r#"
struct Struct;
trait Foo {
fn foo(&self) -> $0(usize, bool);
}
impl Foo for Struct {
fn foo(&self) -> (usize, bool) {
(0, true)
}
}
fn main() {
let s = Struct {};
let (foo, bar) = s.foo();
let (foo, bar) = Struct::foo(&s);
println!("{foo} {bar}");
}
"#,
r#"
struct Struct;
struct FooResult(usize, bool);
trait Foo {
fn foo(&self) -> FooResult;
}
impl Foo for Struct {
fn foo(&self) -> FooResult {
FooResult(0, true)
}
}
fn main() {
let s = Struct {};
let FooResult(foo, bar) = s.foo();
let FooResult(foo, bar) = Struct::foo(&s);
println!("{foo} {bar}");
}
"#,
)
}
#[test]
fn body_wraps_nested() {
check_assist(
convert_tuple_return_type_to_struct,
r#"
fn foo() -> $0(u8, usize, u32) {
if true {
match 3 {
0 => (1, 2, 3),
_ => return (4, 5, 6),
}
} else {
(2, 1, 3)
}
}
"#,
r#"
struct FooResult(u8, usize, u32);
fn foo() -> FooResult {
if true {
match 3 {
0 => FooResult(1, 2, 3),
_ => return FooResult(4, 5, 6),
}
} else {
FooResult(2, 1, 3)
}
}
"#,
)
}
#[test]
fn body_wraps_break_and_return() {
check_assist(
convert_tuple_return_type_to_struct,
r#"
fn foo(mut i: isize) -> (usize, $0u32, u8) {
if i < 0 {
return (0, 0, 0);
}
loop {
if i == 2 {
println!("foo");
break (1, 2, 3);
}
i += 1;
}
}
"#,
r#"
struct FooResult(usize, u32, u8);
fn foo(mut i: isize) -> FooResult {
if i < 0 {
return FooResult(0, 0, 0);
}
loop {
if i == 2 {
println!("foo");
break FooResult(1, 2, 3);
}
i += 1;
}
}
"#,
)
}
#[test]
fn body_doesnt_wrap_identifier() {
check_assist(
convert_tuple_return_type_to_struct,
r#"
fn foo() -> $0(u8, usize, u32) {
let tuple = (1, 2, 3);
tuple
}
"#,
r#"
struct FooResult(u8, usize, u32);
fn foo() -> FooResult {
let tuple = (1, 2, 3);
tuple
}
"#,
)
}
#[test]
fn body_doesnt_wrap_other_exprs() {
check_assist(
convert_tuple_return_type_to_struct,
r#"
fn bar(num: usize) -> (u8, usize, u32) {
(1, num, 3)
}
fn foo() -> $0(u8, usize, u32) {
bar(2)
}
"#,
r#"
fn bar(num: usize) -> (u8, usize, u32) {
(1, num, 3)
}
struct FooResult(u8, usize, u32);
fn foo() -> FooResult {
bar(2)
}
"#,
)
}
#[test]
fn cross_file_and_module() {
check_assist(
convert_tuple_return_type_to_struct,
r#"
//- /main.rs
mod foo;
fn main() {
use foo::bar;
let (bar, baz) = bar::bar();
println!("{}", bar == baz);
}
//- /foo.rs
pub mod bar {
pub fn bar() -> $0(usize, usize) {
(1, 3)
}
}
"#,
r#"
//- /main.rs
use crate::foo::bar::BarResult;
mod foo;
fn main() {
use foo::bar;
let BarResult(bar, baz) = bar::bar();
println!("{}", bar == baz);
}
//- /foo.rs
pub mod bar {
pub struct BarResult(pub usize, pub usize);
pub fn bar() -> BarResult {
BarResult(1, 3)
}
}
"#,
)
}
#[test]
fn does_not_replace_nested_usage() {
check_assist(
convert_tuple_return_type_to_struct,
r#"
fn bar() -> $0(usize, bool) {
(42, true)
}
fn main() {
let ((bar1, bar2), foo) = (bar(), 3);
println!("{bar1} {bar2} {foo}");
}
"#,
r#"
struct BarResult(usize, bool);
fn bar() -> BarResult {
BarResult(42, true)
}
fn main() {
let ((bar1, bar2), foo) = (bar(), 3);
println!("{bar1} {bar2} {foo}");
}
"#,
)
}
#[test]
fn function_with_non_tuple_return_type() {
check_assist_not_applicable(
convert_tuple_return_type_to_struct,
r#"
fn bar() -> $0usize {
0
}
"#,
)
}
#[test]
fn function_with_impl_type() {
check_assist_not_applicable(
convert_tuple_return_type_to_struct,
r#"
fn bar() -> $0(impl Clone, usize) {
("bar", 0)
}
"#,
)
}
}

View File

@ -33,9 +33,7 @@ pub(crate) fn desugar_doc_comment(acc: &mut Assists, ctx: &AssistContext<'_>) ->
// Only allow comments which are alone on their line
if let Some(prev) = comment.syntax().prev_token() {
if Whitespace::cast(prev).filter(|w| w.text().contains('\n')).is_none() {
return None;
}
Whitespace::cast(prev).filter(|w| w.text().contains('\n'))?;
}
let indentation = IndentLevel::from_token(comment.syntax()).to_string();
@ -50,7 +48,7 @@ pub(crate) fn desugar_doc_comment(acc: &mut Assists, ctx: &AssistContext<'_>) ->
(
TextRange::new(
comments[0].syntax().text_range().start(),
comments.last().unwrap().syntax().text_range().end(),
comments.last()?.syntax().text_range().end(),
),
Either::Right(comments),
)
@ -71,9 +69,11 @@ pub(crate) fn desugar_doc_comment(acc: &mut Assists, ctx: &AssistContext<'_>) ->
.map(|l| l.strip_prefix(&indentation).unwrap_or(l))
.join("\n")
}
Either::Right(comments) => {
comments.into_iter().map(|c| line_comment_text(IndentLevel(0), c)).join("\n")
}
Either::Right(comments) => comments
.into_iter()
.map(|cm| line_comment_text(IndentLevel(0), cm))
.collect::<Vec<_>>()
.join("\n"),
};
let hashes = "#".repeat(required_hashes(&text));

View File

@ -29,22 +29,31 @@ use crate::{utils::suggest_name, AssistContext, AssistId, AssistKind, Assists};
// }
// ```
pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
if ctx.has_empty_selection() {
return None;
}
let node = match ctx.covering_element() {
NodeOrToken::Node(it) => it,
NodeOrToken::Token(it) if it.kind() == COMMENT => {
cov_mark::hit!(extract_var_in_comment_is_not_applicable);
let node = if ctx.has_empty_selection() {
if let Some(expr_stmt) = ctx.find_node_at_offset::<ast::ExprStmt>() {
expr_stmt.syntax().clone()
} else if let Some(expr) = ctx.find_node_at_offset::<ast::Expr>() {
expr.syntax().ancestors().find_map(valid_target_expr)?.syntax().clone()
} else {
return None;
}
NodeOrToken::Token(it) => it.parent()?,
} else {
match ctx.covering_element() {
NodeOrToken::Node(it) => it,
NodeOrToken::Token(it) if it.kind() == COMMENT => {
cov_mark::hit!(extract_var_in_comment_is_not_applicable);
return None;
}
NodeOrToken::Token(it) => it.parent()?,
}
};
let node = node.ancestors().take_while(|anc| anc.text_range() == node.text_range()).last()?;
let range = node.text_range();
let to_extract = node
.descendants()
.take_while(|it| ctx.selection_trimmed().contains_range(it.text_range()))
.take_while(|it| range.contains_range(it.text_range()))
.find_map(valid_target_expr)?;
let ty = ctx.sema.type_of_expr(&to_extract).map(TypeInfo::adjusted);
@ -235,6 +244,138 @@ mod tests {
use super::*;
#[test]
fn test_extract_var_simple_without_select() {
check_assist(
extract_variable,
r#"
fn main() -> i32 {
if true {
1
} else {
2
}$0
}
"#,
r#"
fn main() -> i32 {
let $0var_name = if true {
1
} else {
2
};
var_name
}
"#,
);
check_assist(
extract_variable,
r#"
fn foo() -> i32 { 1 }
fn main() {
foo();$0
}
"#,
r#"
fn foo() -> i32 { 1 }
fn main() {
let $0foo = foo();
}
"#,
);
check_assist(
extract_variable,
r#"
fn main() {
let a = Some(2);
a.is_some();$0
}
"#,
r#"
fn main() {
let a = Some(2);
let $0is_some = a.is_some();
}
"#,
);
check_assist(
extract_variable,
r#"
fn main() {
"hello"$0;
}
"#,
r#"
fn main() {
let $0var_name = "hello";
}
"#,
);
check_assist(
extract_variable,
r#"
fn main() {
1 + 2$0;
}
"#,
r#"
fn main() {
let $0var_name = 1 + 2;
}
"#,
);
check_assist(
extract_variable,
r#"
fn main() {
match () {
() if true => 1,
_ => 2,
};$0
}
"#,
r#"
fn main() {
let $0var_name = match () {
() if true => 1,
_ => 2,
};
}
"#,
);
}
#[test]
fn test_extract_var_unit_expr_without_select_not_applicable() {
check_assist_not_applicable(
extract_variable,
r#"
fn foo() {}
fn main() {
foo()$0;
}
"#,
);
check_assist_not_applicable(
extract_variable,
r#"
fn foo() {
let mut i = 3;
if i >= 0 {
i += 1;
} else {
i -= 1;
}$0
}"#,
);
}
#[test]
fn test_extract_var_simple() {
check_assist(

View File

@ -404,7 +404,11 @@ impl FunctionBuilder {
leading_ws,
ret_type: fn_def.ret_type(),
// PANIC: we guarantee we always create a function body with a tail expr
tail_expr: fn_def.body().unwrap().tail_expr().unwrap(),
tail_expr: fn_def
.body()
.expect("generated function should have a body")
.tail_expr()
.expect("function body should have a tail expression"),
should_focus_return_type: self.should_focus_return_type,
fn_def,
trailing_ws,
@ -683,7 +687,7 @@ where
{
// This function should be only called with `Impl`, `Trait`, or `Function`, for which it's
// infallible to get source ast.
let node = ctx.sema.source(def).unwrap().value;
let node = ctx.sema.source(def).expect("definition's source couldn't be found").value;
let generic_params = node.generic_param_list().into_iter().flat_map(|it| it.generic_params());
let where_clauses = node.where_clause().into_iter().flat_map(|it| it.predicates());
(generic_params, where_clauses)

View File

@ -0,0 +1,202 @@
use ide_db::famous_defs::FamousDefs;
use syntax::{
ast::{self, make},
ted, AstNode,
};
use crate::{AssistContext, AssistId, AssistKind, Assists};
// FIXME: Generate proper `index_mut` method body refer to `index` method body may impossible due to the unpredicable case [#15581].
// Here just leave the `index_mut` method body be same as `index` method body, user can modify it manually to meet their need.
// Assist: generate_mut_trait_impl
//
// Adds a IndexMut impl from the `Index` trait.
//
// ```
// # //- minicore: index
// pub enum Axis { X = 0, Y = 1, Z = 2 }
//
// impl<T> core::ops::Index$0<Axis> for [T; 3] {
// type Output = T;
//
// fn index(&self, index: Axis) -> &Self::Output {
// &self[index as usize]
// }
// }
// ```
// ->
// ```
// pub enum Axis { X = 0, Y = 1, Z = 2 }
//
// $0impl<T> core::ops::IndexMut<Axis> for [T; 3] {
// fn index_mut(&mut self, index: Axis) -> &mut Self::Output {
// &self[index as usize]
// }
// }
//
// impl<T> core::ops::Index<Axis> for [T; 3] {
// type Output = T;
//
// fn index(&self, index: Axis) -> &Self::Output {
// &self[index as usize]
// }
// }
// ```
pub(crate) fn generate_mut_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let impl_def = ctx.find_node_at_offset::<ast::Impl>()?.clone_for_update();
let trait_ = impl_def.trait_()?;
if let ast::Type::PathType(trait_path) = trait_.clone() {
let trait_type = ctx.sema.resolve_trait(&trait_path.path()?)?;
let scope = ctx.sema.scope(trait_path.syntax())?;
if trait_type != FamousDefs(&ctx.sema, scope.krate()).core_convert_Index()? {
return None;
}
}
// Index -> IndexMut
let index_trait = impl_def
.syntax()
.descendants()
.filter_map(ast::NameRef::cast)
.find(|it| it.text() == "Index")?;
ted::replace(
index_trait.syntax(),
make::path_segment(make::name_ref("IndexMut")).clone_for_update().syntax(),
);
// index -> index_mut
let trait_method_name = impl_def
.syntax()
.descendants()
.filter_map(ast::Name::cast)
.find(|it| it.text() == "index")?;
ted::replace(trait_method_name.syntax(), make::name("index_mut").clone_for_update().syntax());
let type_alias = impl_def.syntax().descendants().find_map(ast::TypeAlias::cast)?;
ted::remove(type_alias.syntax());
// &self -> &mut self
let mut_self_param = make::mut_self_param();
let self_param: ast::SelfParam =
impl_def.syntax().descendants().find_map(ast::SelfParam::cast)?;
ted::replace(self_param.syntax(), mut_self_param.clone_for_update().syntax());
// &Self::Output -> &mut Self::Output
let ret_type = impl_def.syntax().descendants().find_map(ast::RetType::cast)?;
ted::replace(
ret_type.syntax(),
make::ret_type(make::ty("&mut Self::Output")).clone_for_update().syntax(),
);
let fn_ = impl_def.assoc_item_list()?.assoc_items().find_map(|it| match it {
ast::AssocItem::Fn(f) => Some(f),
_ => None,
})?;
let assoc_list = make::assoc_item_list().clone_for_update();
assoc_list.add_item(syntax::ast::AssocItem::Fn(fn_));
ted::replace(impl_def.assoc_item_list()?.syntax(), assoc_list.syntax());
let target = impl_def.syntax().text_range();
acc.add(
AssistId("generate_mut_trait_impl", AssistKind::Generate),
"Generate `IndexMut` impl from this `Index` trait",
target,
|edit| {
edit.insert(target.start(), format!("$0{}\n\n", impl_def.to_string()));
},
)
}
#[cfg(test)]
mod tests {
use crate::tests::{check_assist, check_assist_not_applicable};
use super::*;
#[test]
fn test_generate_mut_trait_impl() {
check_assist(
generate_mut_trait_impl,
r#"
//- minicore: index
pub enum Axis { X = 0, Y = 1, Z = 2 }
impl<T> core::ops::Index$0<Axis> for [T; 3] {
type Output = T;
fn index(&self, index: Axis) -> &Self::Output {
&self[index as usize]
}
}
"#,
r#"
pub enum Axis { X = 0, Y = 1, Z = 2 }
$0impl<T> core::ops::IndexMut<Axis> for [T; 3] {
fn index_mut(&mut self, index: Axis) -> &mut Self::Output {
&self[index as usize]
}
}
impl<T> core::ops::Index<Axis> for [T; 3] {
type Output = T;
fn index(&self, index: Axis) -> &Self::Output {
&self[index as usize]
}
}
"#,
);
check_assist(
generate_mut_trait_impl,
r#"
//- minicore: index
pub enum Axis { X = 0, Y = 1, Z = 2 }
impl<T> core::ops::Index$0<Axis> for [T; 3] where T: Copy {
type Output = T;
fn index(&self, index: Axis) -> &Self::Output {
let var_name = &self[index as usize];
var_name
}
}
"#,
r#"
pub enum Axis { X = 0, Y = 1, Z = 2 }
$0impl<T> core::ops::IndexMut<Axis> for [T; 3] where T: Copy {
fn index_mut(&mut self, index: Axis) -> &mut Self::Output {
let var_name = &self[index as usize];
var_name
}
}
impl<T> core::ops::Index<Axis> for [T; 3] where T: Copy {
type Output = T;
fn index(&self, index: Axis) -> &Self::Output {
let var_name = &self[index as usize];
var_name
}
}
"#,
);
}
#[test]
fn test_generate_mut_trait_impl_not_applicable() {
check_assist_not_applicable(
generate_mut_trait_impl,
r#"
pub trait Index<Idx: ?Sized> {}
impl<T> Index$0<i32> for [T; 3] {}
"#,
);
}
}

View File

@ -224,7 +224,6 @@ pub(crate) fn inline_call(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<
syntax.text_range(),
|builder| {
let replacement = inline(&ctx.sema, file_id, function, &fn_body, &params, &call_info);
builder.replace_ast(
match call_info.node {
ast::CallableExpr::Call(it) => ast::Expr::CallExpr(it),
@ -363,16 +362,22 @@ fn inline(
.collect();
if function.self_param(sema.db).is_some() {
let this = || make::name_ref("this").syntax().clone_for_update().first_token().unwrap();
let this = || {
make::name_ref("this")
.syntax()
.clone_for_update()
.first_token()
.expect("NameRef should have had a token.")
};
if let Some(self_local) = params[0].2.as_local(sema.db) {
usages_for_locals(self_local)
.filter_map(|FileReference { name, range, .. }| match name {
ast::NameLike::NameRef(_) => Some(body.syntax().covering_element(range)),
_ => None,
})
.for_each(|it| {
ted::replace(it, &this());
})
.for_each(|usage| {
ted::replace(usage, &this());
});
}
}
@ -470,7 +475,9 @@ fn inline(
}
} else if let Some(stmt_list) = body.stmt_list() {
ted::insert_all(
ted::Position::after(stmt_list.l_curly_token().unwrap()),
ted::Position::after(
stmt_list.l_curly_token().expect("L_CURLY for StatementList is missing."),
),
let_stmts.into_iter().map(|stmt| stmt.syntax().clone().into()).collect(),
);
}
@ -481,8 +488,12 @@ fn inline(
};
body.reindent_to(original_indentation);
let no_stmts = body.statements().next().is_none();
match body.tail_expr() {
Some(expr) if !is_async_fn && body.statements().next().is_none() => expr,
Some(expr) if matches!(expr, ast::Expr::ClosureExpr(_)) && no_stmts => {
make::expr_paren(expr).clone_for_update()
}
Some(expr) if !is_async_fn && no_stmts => expr,
_ => match node
.syntax()
.parent()
@ -1471,6 +1482,31 @@ fn main() {
}
});
}
"#,
);
}
#[test]
fn inline_call_closure_body() {
check_assist(
inline_call,
r#"
fn f() -> impl Fn() -> i32 {
|| 2
}
fn main() {
let _ = $0f()();
}
"#,
r#"
fn f() -> impl Fn() -> i32 {
|| 2
}
fn main() {
let _ = (|| 2)();
}
"#,
);
}

View File

@ -96,8 +96,7 @@ pub(crate) fn inline_local_variable(acc: &mut Assists, ctx: &AssistContext<'_>)
);
let parent = matches!(
usage_parent,
ast::Expr::CallExpr(_)
| ast::Expr::TupleExpr(_)
ast::Expr::TupleExpr(_)
| ast::Expr::ArrayExpr(_)
| ast::Expr::ParenExpr(_)
| ast::Expr::ForExpr(_)
@ -949,6 +948,24 @@ fn f() {
let S$0 = S;
S;
}
"#,
);
}
#[test]
fn test_inline_closure() {
check_assist(
inline_local_variable,
r#"
fn main() {
let $0f = || 2;
let _ = f();
}
"#,
r#"
fn main() {
let _ = (|| 2)();
}
"#,
);
}

View File

@ -52,9 +52,13 @@ pub(crate) fn into_to_qualified_from(acc: &mut Assists, ctx: &AssistContext<'_>)
== FamousDefs(sema, scope.krate()).core_convert_Into()?
{
let type_call = sema.type_of_expr(&method_call.clone().into())?;
let type_call_disp =
type_call.adjusted().display_source_code(db, scope.module().into(), true).ok()?;
let adjusted_tc = type_call.adjusted();
if adjusted_tc.contains_unknown() {
return None;
}
let sc = adjusted_tc.display_source_code(db, scope.module().into(), true).ok()?;
acc.add(
AssistId("into_to_qualified_from", AssistKind::Generate),
"Convert `into` to fully qualified `from`",
@ -62,7 +66,11 @@ pub(crate) fn into_to_qualified_from(acc: &mut Assists, ctx: &AssistContext<'_>)
|edit| {
edit.replace(
method_call.syntax().text_range(),
format!("{}::from({})", type_call_disp, receiver),
if sc.chars().all(|c| c.is_alphanumeric() || c == ':') {
format!("{}::from({})", sc, receiver)
} else {
format!("<{}>::from({})", sc, receiver)
},
);
},
);
@ -199,6 +207,66 @@ mod C {
fn main() -> () {
let a: A = A;
let b: C::B = C::B::from(a);
}"#,
)
}
#[test]
fn preceding_type_qualifier() {
check_assist(
into_to_qualified_from,
r#"
//- minicore: from
impl From<(i32,i32)> for [i32;2] {
fn from(value: (i32,i32)) -> Self {
[value.0, value.1]
}
}
fn tuple_to_array() -> [i32; 2] {
(0,1).in$0to()
}"#,
r#"
impl From<(i32,i32)> for [i32;2] {
fn from(value: (i32,i32)) -> Self {
[value.0, value.1]
}
}
fn tuple_to_array() -> [i32; 2] {
<[i32; 2]>::from((0,1))
}"#,
)
}
#[test]
fn type_with_gens() {
check_assist(
into_to_qualified_from,
r#"
//- minicore: from
struct StructA<Gen>(Gen);
impl From<i32> for StructA<i32> {
fn from(value: i32) -> Self {
StructA(value + 1)
}
}
fn main() -> () {
let a: StructA<i32> = 3.in$0to();
}"#,
r#"
struct StructA<Gen>(Gen);
impl From<i32> for StructA<i32> {
fn from(value: i32) -> Self {
StructA(value + 1)
}
}
fn main() -> () {
let a: StructA<i32> = <StructA<i32>>::from(3);
}"#,
)
}

View File

@ -0,0 +1,172 @@
use syntax::ast::{self, AstNode};
use crate::{utils::suggest_name, AssistContext, AssistId, AssistKind, Assists};
// Assist: replace_is_some_with_if_let_some
//
// Replace `if x.is_some()` with `if let Some(_tmp) = x` or `if x.is_ok()` with `if let Ok(_tmp) = x`.
//
// ```
// fn main() {
// let x = Some(1);
// if x.is_som$0e() {}
// }
// ```
// ->
// ```
// fn main() {
// let x = Some(1);
// if let Some(${0:x}) = x {}
// }
// ```
pub(crate) fn replace_is_method_with_if_let_method(
acc: &mut Assists,
ctx: &AssistContext<'_>,
) -> Option<()> {
let if_expr = ctx.find_node_at_offset::<ast::IfExpr>()?;
let cond = if_expr.condition()?;
let call_expr = match cond {
ast::Expr::MethodCallExpr(call) => call,
_ => return None,
};
let name_ref = call_expr.name_ref()?;
match name_ref.text().as_str() {
"is_some" | "is_ok" => {
let receiver = call_expr.receiver()?;
let var_name = if let ast::Expr::PathExpr(path_expr) = receiver.clone() {
path_expr.path()?.to_string()
} else {
suggest_name::for_variable(&receiver, &ctx.sema)
};
let target = call_expr.syntax().text_range();
let (assist_id, message, text) = if name_ref.text() == "is_some" {
("replace_is_some_with_if_let_some", "Replace `is_some` with `if let Some`", "Some")
} else {
("replace_is_ok_with_if_let_ok", "Replace `is_ok` with `if let Ok`", "Ok")
};
acc.add(AssistId(assist_id, AssistKind::RefactorRewrite), message, target, |edit| {
let var_name = format!("${{0:{}}}", var_name);
let replacement = format!("let {}({}) = {}", text, var_name, receiver);
edit.replace(target, replacement);
})
}
_ => return None,
}
}
#[cfg(test)]
mod tests {
use crate::tests::{check_assist, check_assist_not_applicable};
use super::replace_is_method_with_if_let_method;
#[test]
fn replace_is_some_with_if_let_some_works() {
check_assist(
replace_is_method_with_if_let_method,
r#"
fn main() {
let x = Some(1);
if x.is_som$0e() {}
}
"#,
r#"
fn main() {
let x = Some(1);
if let Some(${0:x}) = x {}
}
"#,
);
check_assist(
replace_is_method_with_if_let_method,
r#"
fn test() -> Option<i32> {
Some(1)
}
fn main() {
if test().is_som$0e() {}
}
"#,
r#"
fn test() -> Option<i32> {
Some(1)
}
fn main() {
if let Some(${0:test}) = test() {}
}
"#,
);
}
#[test]
fn replace_is_some_with_if_let_some_not_applicable() {
check_assist_not_applicable(
replace_is_method_with_if_let_method,
r#"
fn main() {
let x = Some(1);
if x.is_non$0e() {}
}
"#,
);
}
#[test]
fn replace_is_ok_with_if_let_ok_works() {
check_assist(
replace_is_method_with_if_let_method,
r#"
fn main() {
let x = Ok(1);
if x.is_o$0k() {}
}
"#,
r#"
fn main() {
let x = Ok(1);
if let Ok(${0:x}) = x {}
}
"#,
);
check_assist(
replace_is_method_with_if_let_method,
r#"
fn test() -> Result<i32> {
Ok(1)
}
fn main() {
if test().is_o$0k() {}
}
"#,
r#"
fn test() -> Result<i32> {
Ok(1)
}
fn main() {
if let Ok(${0:test}) = test() {}
}
"#,
);
}
#[test]
fn replace_is_ok_with_if_let_ok_not_applicable() {
check_assist_not_applicable(
replace_is_method_with_if_let_method,
r#"
fn main() {
let x = Ok(1);
if x.is_e$0rr() {}
}
"#,
);
}
}

View File

@ -36,29 +36,25 @@ pub(crate) fn unmerge_use(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<
let old_parent_range = use_.syntax().parent()?.text_range();
let new_parent = use_.syntax().parent()?;
// If possible, explain what is going to be done.
let label = match tree.path().and_then(|path| path.first_segment()) {
Some(name) => format!("Unmerge use of `{name}`"),
None => "Unmerge use".into(),
};
let target = tree.syntax().text_range();
acc.add(
AssistId("unmerge_use", AssistKind::RefactorRewrite),
"Unmerge use",
target,
|builder| {
let new_use = make::use_(
use_.visibility(),
make::use_tree(
path,
tree.use_tree_list(),
tree.rename(),
tree.star_token().is_some(),
),
)
.clone_for_update();
acc.add(AssistId("unmerge_use", AssistKind::RefactorRewrite), label, target, |builder| {
let new_use = make::use_(
use_.visibility(),
make::use_tree(path, tree.use_tree_list(), tree.rename(), tree.star_token().is_some()),
)
.clone_for_update();
tree.remove();
ted::insert(Position::after(use_.syntax()), new_use.syntax());
tree.remove();
ted::insert(Position::after(use_.syntax()), new_use.syntax());
builder.replace(old_parent_range, new_parent.to_string());
},
)
builder.replace(old_parent_range, new_parent.to_string());
})
}
fn resolve_full_path(tree: &ast::UseTree) -> Option<ast::Path> {

View File

@ -1,3 +1,4 @@
use ide_db::imports::insert_use::ImportScope;
use syntax::{
ast::{self, make, AstNode, HasArgList},
TextRange,
@ -17,6 +18,8 @@ use crate::{AssistContext, AssistId, AssistKind, Assists};
// ```
// ->
// ```
// use std::ops::Add;
//
// fn main() {
// 1.add(2);
// }
@ -38,7 +41,7 @@ pub(crate) fn unqualify_method_call(acc: &mut Assists, ctx: &AssistContext<'_>)
let first_arg = args_iter.next()?;
let second_arg = args_iter.next();
_ = path.qualifier()?;
let qualifier = path.qualifier()?;
let method_name = path.segment()?.name_ref()?;
let res = ctx.sema.resolve_path(&path)?;
@ -76,10 +79,51 @@ pub(crate) fn unqualify_method_call(acc: &mut Assists, ctx: &AssistContext<'_>)
edit.insert(close, ")");
}
edit.replace(replace_comma, format!(".{method_name}("));
add_import(qualifier, ctx, edit);
},
)
}
fn add_import(
qualifier: ast::Path,
ctx: &AssistContext<'_>,
edit: &mut ide_db::source_change::SourceChangeBuilder,
) {
if let Some(path_segment) = qualifier.segment() {
// for `<i32 as std::ops::Add>`
let path_type = path_segment.syntax().children().filter_map(ast::PathType::cast).last();
let import = match path_type {
Some(it) => {
if let Some(path) = it.path() {
path
} else {
return;
}
}
None => qualifier,
};
// in case for `<_>`
if import.coloncolon_token().is_none() {
return;
}
let scope = ide_db::imports::insert_use::ImportScope::find_insert_use_container(
import.syntax(),
&ctx.sema,
);
if let Some(scope) = scope {
let scope = match scope {
ImportScope::File(it) => ImportScope::File(edit.make_mut(it)),
ImportScope::Module(it) => ImportScope::Module(edit.make_mut(it)),
ImportScope::Block(it) => ImportScope::Block(edit.make_mut(it)),
};
ide_db::imports::insert_use::insert_use(&scope, import, &ctx.config.insert_use);
}
}
}
fn needs_parens_as_receiver(expr: &ast::Expr) -> bool {
// Make `(expr).dummy()`
let dummy_call = make::expr_method_call(
@ -127,6 +171,8 @@ fn f() { S.f(S); }"#,
//- minicore: add
fn f() { <u32 as core::ops::Add>::$0add(2, 2); }"#,
r#"
use core::ops::Add;
fn f() { 2.add(2); }"#,
);
@ -136,6 +182,8 @@ fn f() { 2.add(2); }"#,
//- minicore: add
fn f() { core::ops::Add::$0add(2, 2); }"#,
r#"
use core::ops::Add;
fn f() { 2.add(2); }"#,
);
@ -179,6 +227,8 @@ impl core::ops::Deref for S {
}
fn f() { core::ops::Deref::$0deref(&S); }"#,
r#"
use core::ops::Deref;
struct S;
impl core::ops::Deref for S {
type Target = S;

View File

@ -123,10 +123,8 @@ fn tail_cb_impl(acc: &mut Vec<ast::Expr>, e: &ast::Expr) {
for_each_tail_expr(&break_expr_arg, &mut |e| tail_cb_impl(acc, e))
}
}
Expr::ReturnExpr(ret_expr) => {
if let Some(ret_expr_arg) = &ret_expr.expr() {
for_each_tail_expr(ret_expr_arg, &mut |e| tail_cb_impl(acc, e));
}
Expr::ReturnExpr(_) => {
// all return expressions have already been handled by the walk loop
}
e => acc.push(e.clone()),
}
@ -800,6 +798,24 @@ fn foo() -> i32 {
);
}
#[test]
fn wrap_return_in_tail_position() {
check_assist(
unwrap_result_return_type,
r#"
//- minicore: result
fn foo(num: i32) -> $0Result<i32, String> {
return Ok(num)
}
"#,
r#"
fn foo(num: i32) -> i32 {
return num
}
"#,
);
}
#[test]
fn unwrap_result_return_type_simple_with_closure() {
check_assist(

View File

@ -98,10 +98,8 @@ fn tail_cb_impl(acc: &mut Vec<ast::Expr>, e: &ast::Expr) {
for_each_tail_expr(&break_expr_arg, &mut |e| tail_cb_impl(acc, e))
}
}
Expr::ReturnExpr(ret_expr) => {
if let Some(ret_expr_arg) = &ret_expr.expr() {
for_each_tail_expr(ret_expr_arg, &mut |e| tail_cb_impl(acc, e));
}
Expr::ReturnExpr(_) => {
// all return expressions have already been handled by the walk loop
}
e => acc.push(e.clone()),
}
@ -732,6 +730,24 @@ fn foo() -> Result<i32, ${0:_}> {
);
}
#[test]
fn wrap_return_in_tail_position() {
check_assist(
wrap_return_type_in_result,
r#"
//- minicore: result
fn foo(num: i32) -> $0i32 {
return num
}
"#,
r#"
fn foo(num: i32) -> Result<i32, ${0:_}> {
return Ok(num)
}
"#,
);
}
#[test]
fn wrap_return_type_in_result_simple_with_closure() {
check_assist(

View File

@ -115,6 +115,7 @@ mod handlers {
mod apply_demorgan;
mod auto_import;
mod bind_unused_param;
mod bool_to_enum;
mod change_visibility;
mod convert_bool_then;
mod convert_comment_block;
@ -124,6 +125,7 @@ mod handlers {
mod convert_let_else_to_match;
mod convert_match_to_let_else;
mod convert_nested_function_to_closure;
mod convert_tuple_return_type_to_struct;
mod convert_tuple_struct_to_named_struct;
mod convert_named_struct_to_tuple_struct;
mod convert_to_guarded_return;
@ -158,6 +160,7 @@ mod handlers {
mod generate_getter_or_setter;
mod generate_impl;
mod generate_is_empty_from_len;
mod generate_mut_trait_impl;
mod generate_new;
mod generate_delegate_methods;
mod generate_trait_from_impl;
@ -193,6 +196,7 @@ mod handlers {
mod replace_try_expr_with_match;
mod replace_derive_with_manual_impl;
mod replace_if_let_with_match;
mod replace_is_method_with_if_let_method;
mod replace_method_eager_lazy;
mod replace_arith_op;
mod introduce_named_generic;
@ -225,8 +229,10 @@ mod handlers {
add_return_type::add_return_type,
add_turbo_fish::add_turbo_fish,
apply_demorgan::apply_demorgan,
apply_demorgan::apply_demorgan_iterator,
auto_import::auto_import,
bind_unused_param::bind_unused_param,
bool_to_enum::bool_to_enum,
change_visibility::change_visibility,
convert_bool_then::convert_bool_then_to_if,
convert_bool_then::convert_if_to_bool_then,
@ -237,6 +243,7 @@ mod handlers {
convert_iter_for_each_to_for::convert_for_loop_with_for_each,
convert_let_else_to_match::convert_let_else_to_match,
convert_match_to_let_else::convert_match_to_let_else,
convert_tuple_return_type_to_struct::convert_tuple_return_type_to_struct,
convert_named_struct_to_tuple_struct::convert_named_struct_to_tuple_struct,
convert_nested_function_to_closure::convert_nested_function_to_closure,
convert_to_guarded_return::convert_to_guarded_return,
@ -268,6 +275,7 @@ mod handlers {
generate_function::generate_function,
generate_impl::generate_impl,
generate_impl::generate_trait_impl,
generate_mut_trait_impl::generate_mut_trait_impl,
generate_is_empty_from_len::generate_is_empty_from_len,
generate_new::generate_new,
generate_trait_from_impl::generate_trait_from_impl,
@ -308,6 +316,7 @@ mod handlers {
replace_derive_with_manual_impl::replace_derive_with_manual_impl,
replace_if_let_with_match::replace_if_let_with_match,
replace_if_let_with_match::replace_match_with_if_let,
replace_is_method_with_if_let_method::replace_is_method_with_if_let_method,
replace_let_with_if_let::replace_let_with_if_let,
replace_method_eager_lazy::replace_with_eager_method,
replace_method_eager_lazy::replace_with_lazy_method,

View File

@ -244,6 +244,30 @@ fn main() {
)
}
#[test]
fn doctest_apply_demorgan_iterator() {
check_doc_test(
"apply_demorgan_iterator",
r#####"
//- minicore: iterator
fn main() {
let arr = [1, 2, 3];
if !arr.into_iter().$0any(|num| num == 4) {
println!("foo");
}
}
"#####,
r#####"
fn main() {
let arr = [1, 2, 3];
if arr.into_iter().all(|num| num != 4) {
println!("foo");
}
}
"#####,
)
}
#[test]
fn doctest_auto_import() {
check_doc_test(
@ -280,6 +304,34 @@ fn some_function(x: i32) {
)
}
#[test]
fn doctest_bool_to_enum() {
check_doc_test(
"bool_to_enum",
r#####"
fn main() {
let $0bool = true;
if bool {
println!("foo");
}
}
"#####,
r#####"
#[derive(PartialEq, Eq)]
enum Bool { True, False }
fn main() {
let bool = Bool::True;
if bool == Bool::True {
println!("foo");
}
}
"#####,
)
}
#[test]
fn doctest_change_visibility() {
check_doc_test(
@ -558,6 +610,33 @@ fn main() {
)
}
#[test]
fn doctest_convert_tuple_return_type_to_struct() {
check_doc_test(
"convert_tuple_return_type_to_struct",
r#####"
fn bar() {
let (a, b, c) = foo();
}
fn foo() -> ($0u32, u32, u32) {
(1, 2, 3)
}
"#####,
r#####"
fn bar() {
let FooResult(a, b, c) = foo();
}
struct FooResult(u32, u32, u32);
fn foo() -> FooResult {
FooResult(1, 2, 3)
}
"#####,
)
}
#[test]
fn doctest_convert_tuple_struct_to_named_struct() {
check_doc_test(
@ -1459,6 +1538,42 @@ impl MyStruct {
)
}
#[test]
fn doctest_generate_mut_trait_impl() {
check_doc_test(
"generate_mut_trait_impl",
r#####"
//- minicore: index
pub enum Axis { X = 0, Y = 1, Z = 2 }
impl<T> core::ops::Index$0<Axis> for [T; 3] {
type Output = T;
fn index(&self, index: Axis) -> &Self::Output {
&self[index as usize]
}
}
"#####,
r#####"
pub enum Axis { X = 0, Y = 1, Z = 2 }
$0impl<T> core::ops::IndexMut<Axis> for [T; 3] {
fn index_mut(&mut self, index: Axis) -> &mut Self::Output {
&self[index as usize]
}
}
impl<T> core::ops::Index<Axis> for [T; 3] {
type Output = T;
fn index(&self, index: Axis) -> &Self::Output {
&self[index as usize]
}
}
"#####,
)
}
#[test]
fn doctest_generate_new() {
check_doc_test(
@ -2479,6 +2594,25 @@ fn handle(action: Action) {
)
}
#[test]
fn doctest_replace_is_some_with_if_let_some() {
check_doc_test(
"replace_is_some_with_if_let_some",
r#####"
fn main() {
let x = Some(1);
if x.is_som$0e() {}
}
"#####,
r#####"
fn main() {
let x = Some(1);
if let Some(${0:x}) = x {}
}
"#####,
)
}
#[test]
fn doctest_replace_let_with_if_let() {
check_doc_test(
@ -2850,6 +2984,8 @@ fn main() {
mod std { pub mod ops { pub trait Add { fn add(self, _: Self) {} } impl Add for i32 {} } }
"#####,
r#####"
use std::ops::Add;
fn main() {
1.add(2);
}

View File

@ -13,10 +13,9 @@ use crate::{
TypeLocation,
},
render::{render_resolution_with_import, render_resolution_with_import_pat, RenderContext},
Completions,
};
use super::Completions;
// Feature: Completion With Autoimport
//
// When completing names in the current scope, proposes additional imports from other modules or crates,
@ -377,9 +376,12 @@ fn import_assets_for_path(
&ctx.sema,
ctx.token.parent()?,
)?;
if fuzzy_name_length < 3 {
cov_mark::hit!(flyimport_exact_on_short_path);
assets_for_path.path_fuzzy_name_to_exact(false);
if fuzzy_name_length == 0 {
// nothing matches the empty string exactly, but we still compute assoc items in this case
assets_for_path.path_fuzzy_name_to_exact();
} else if fuzzy_name_length < 3 {
cov_mark::hit!(flyimport_prefix_on_short_path);
assets_for_path.path_fuzzy_name_to_prefix();
}
Some(assets_for_path)
}

View File

@ -14,6 +14,7 @@ pub struct CompletionConfig {
pub enable_imports_on_the_fly: bool,
pub enable_self_on_the_fly: bool,
pub enable_private_editable: bool,
pub full_function_signatures: bool,
pub callable: Option<CallableSnippets>,
pub snippet_cap: Option<SnippetCap>,
pub insert_use: InsertUseConfig,

View File

@ -89,7 +89,7 @@ impl fmt::Debug for CompletionItem {
let mut s = f.debug_struct("CompletionItem");
s.field("label", &self.label).field("source_range", &self.source_range);
if self.text_edit.len() == 1 {
let atom = &self.text_edit.iter().next().unwrap();
let atom = self.text_edit.iter().next().unwrap();
s.field("delete", &atom.delete);
s.field("insert", &atom.insert);
} else {

View File

@ -169,6 +169,28 @@ pub fn completions(
return Some(completions.into());
}
// when the user types a bare `_` (that is it does not belong to an identifier)
// the user might just wanted to type a `_` for type inference or pattern discarding
// so try to suppress completions in those cases
if trigger_character == Some('_') && ctx.original_token.kind() == syntax::SyntaxKind::UNDERSCORE
{
if let CompletionAnalysis::NameRef(NameRefContext {
kind:
NameRefKind::Path(
path_ctx @ PathCompletionCtx {
kind: PathKind::Type { .. } | PathKind::Pat { .. },
..
},
),
..
}) = analysis
{
if path_ctx.is_trivial_path() {
return None;
}
}
}
{
let acc = &mut completions;

View File

@ -98,9 +98,14 @@ fn render(
_ => (),
}
let detail = if ctx.completion.config.full_function_signatures {
detail_full(db, func)
} else {
detail(db, func)
};
item.set_documentation(ctx.docs(func))
.set_deprecated(ctx.is_deprecated(func) || ctx.is_deprecated_assoc_item(func))
.detail(detail(db, func))
.detail(detail)
.lookup_by(name.unescaped().to_smol_str());
match ctx.completion.config.snippet_cap {
@ -263,6 +268,21 @@ fn detail(db: &dyn HirDatabase, func: hir::Function) -> String {
detail
}
fn detail_full(db: &dyn HirDatabase, func: hir::Function) -> String {
let signature = format!("{}", func.display(db));
let mut detail = String::with_capacity(signature.len());
for segment in signature.split_whitespace() {
if !detail.is_empty() {
detail.push(' ');
}
detail.push_str(segment);
}
detail
}
fn params_display(db: &dyn HirDatabase, func: hir::Function) -> String {
if let Some(self_param) = func.self_param(db) {
let assoc_fn_params = func.assoc_fn_params(db);

View File

@ -64,6 +64,7 @@ pub(crate) const TEST_CONFIG: CompletionConfig = CompletionConfig {
enable_imports_on_the_fly: true,
enable_self_on_the_fly: true,
enable_private_editable: false,
full_function_signatures: false,
callable: Some(CallableSnippets::FillArguments),
snippet_cap: SnippetCap::new(true),
prefer_no_std: false,

View File

@ -116,19 +116,47 @@ fn main() {
}
#[test]
fn short_paths_are_ignored() {
cov_mark::check!(flyimport_exact_on_short_path);
fn short_paths_are_prefix_matched() {
cov_mark::check!(flyimport_prefix_on_short_path);
check(
r#"
//- /lib.rs crate:dep
pub struct Bar;
pub struct Barc;
pub struct Rcar;
pub struct Rc;
pub const RC: () = ();
pub mod some_module {
pub struct Bar;
pub struct Rcar;
pub struct Rc;
pub const RC: () = ();
}
//- /main.rs crate:main deps:dep
fn main() {
Rc$0
}
"#,
expect![[r#"
st Rc (use dep::Rc)
st Rcar (use dep::Rcar)
st Rc (use dep::some_module::Rc)
st Rcar (use dep::some_module::Rcar)
"#]],
);
check(
r#"
//- /lib.rs crate:dep
pub struct Barc;
pub struct Rcar;
pub struct Rc;
pub const RC: () = ();
pub mod some_module {
pub struct Bar;
pub struct Rcar;
pub struct Rc;
pub const RC: () = ();
}
//- /main.rs crate:main deps:dep
@ -137,8 +165,36 @@ fn main() {
}
"#,
expect![[r#"
ct RC (use dep::RC)
st Rc (use dep::Rc)
st Rcar (use dep::Rcar)
ct RC (use dep::some_module::RC)
st Rc (use dep::some_module::Rc)
st Rcar (use dep::some_module::Rcar)
"#]],
);
check(
r#"
//- /lib.rs crate:dep
pub struct Barc;
pub struct Rcar;
pub struct Rc;
pub const RC: () = ();
pub mod some_module {
pub struct Bar;
pub struct Rcar;
pub struct Rc;
pub const RC: () = ();
}
//- /main.rs crate:main deps:dep
fn main() {
RC$0
}
"#,
expect![[r#"
ct RC (use dep::RC)
ct RC (use dep::some_module::RC)
"#]],
);
}
@ -841,8 +897,8 @@ fn main() {
TES$0
}"#,
expect![[r#"
ct TEST_CONST (use foo::TEST_CONST)
"#]],
ct TEST_CONST (use foo::TEST_CONST)
"#]],
);
check(
@ -858,9 +914,9 @@ fn main() {
tes$0
}"#,
expect![[r#"
ct TEST_CONST (use foo::TEST_CONST)
fn test_function() (use foo::test_function) fn() -> i32
"#]],
ct TEST_CONST (use foo::TEST_CONST)
fn test_function() (use foo::test_function) fn() -> i32
"#]],
);
check(
@ -873,9 +929,9 @@ mod foo {
}
fn main() {
Te$0
Tes$0
}"#,
expect![[]],
expect![""],
);
}

View File

@ -2,10 +2,15 @@
use expect_test::{expect, Expect};
use crate::tests::{
check_edit, completion_list, completion_list_no_kw, completion_list_with_trigger_character,
use crate::{
tests::{
check_edit, completion_list, completion_list_no_kw, completion_list_with_trigger_character,
},
CompletionItemKind,
};
use super::{do_completion_with_config, TEST_CONFIG};
fn check_no_kw(ra_fixture: &str, expect: Expect) {
let actual = completion_list_no_kw(ra_fixture);
expect.assert_eq(&actual)
@ -1303,3 +1308,176 @@ struct Foo<T: PartialOrd
"#,
);
}
fn check_signatures(src: &str, kind: CompletionItemKind, reduced: Expect, full: Expect) {
const FULL_SIGNATURES_CONFIG: crate::CompletionConfig = {
let mut x = TEST_CONFIG;
x.full_function_signatures = true;
x
};
// reduced signature
let completion = do_completion_with_config(TEST_CONFIG, src, kind);
assert!(completion[0].detail.is_some());
reduced.assert_eq(completion[0].detail.as_ref().unwrap());
// full signature
let completion = do_completion_with_config(FULL_SIGNATURES_CONFIG, src, kind);
assert!(completion[0].detail.is_some());
full.assert_eq(completion[0].detail.as_ref().unwrap());
}
#[test]
fn respects_full_function_signatures() {
check_signatures(
r#"
pub fn foo<'x, T>(x: &'x mut T) -> u8 where T: Clone, { 0u8 }
fn main() { fo$0 }
"#,
CompletionItemKind::SymbolKind(ide_db::SymbolKind::Function),
expect!("fn(&mut T) -> u8"),
expect!("pub fn foo<'x, T>(x: &'x mut T) -> u8 where T: Clone,"),
);
check_signatures(
r#"
struct Foo;
struct Bar;
impl Bar {
pub const fn baz(x: Foo) -> ! { loop {} };
}
fn main() { Bar::b$0 }
"#,
CompletionItemKind::SymbolKind(ide_db::SymbolKind::Function),
expect!("const fn(Foo) -> !"),
expect!("pub const fn baz(x: Foo) -> !"),
);
check_signatures(
r#"
struct Foo;
struct Bar;
impl Bar {
pub const fn baz<'foo>(&'foo mut self, x: &'foo Foo) -> ! { loop {} };
}
fn main() {
let mut bar = Bar;
bar.b$0
}
"#,
CompletionItemKind::Method,
expect!("const fn(&'foo mut self, &Foo) -> !"),
expect!("pub const fn baz<'foo>(&'foo mut self, x: &'foo Foo) -> !"),
);
}
#[test]
fn skips_underscore() {
check_with_trigger_character(
r#"
fn foo(_$0) { }
"#,
Some('_'),
expect![[r#""#]],
);
check_with_trigger_character(
r#"
fn foo(_: _$0) { }
"#,
Some('_'),
expect![[r#""#]],
);
check_with_trigger_character(
r#"
fn foo<T>() {
foo::<_$0>();
}
"#,
Some('_'),
expect![[r#""#]],
);
// underscore expressions are fine, they are invalid so the user definitely meant to type an
// underscored name here
check_with_trigger_character(
r#"
fn foo() {
_$0
}
"#,
Some('_'),
expect![[r#"
fn foo() fn()
bt u32
kw const
kw crate::
kw enum
kw extern
kw false
kw fn
kw for
kw if
kw if let
kw impl
kw let
kw loop
kw match
kw mod
kw return
kw self::
kw static
kw struct
kw trait
kw true
kw type
kw union
kw unsafe
kw use
kw while
kw while let
sn macro_rules
sn pd
sn ppd
"#]],
);
}
#[test]
fn no_skip_underscore_ident() {
check_with_trigger_character(
r#"
fn foo(a_$0) { }
"#,
Some('_'),
expect![[r#"
kw mut
kw ref
"#]],
);
check_with_trigger_character(
r#"
fn foo(_: a_$0) { }
"#,
Some('_'),
expect![[r#"
bt u32
kw crate::
kw self::
"#]],
);
check_with_trigger_character(
r#"
fn foo<T>() {
foo::<a_$0>();
}
"#,
Some('_'),
expect![[r#"
tp T
bt u32
kw crate::
kw self::
"#]],
);
}

View File

@ -161,8 +161,8 @@ impl IdentClass {
ast::AwaitExpr(await_expr) => OperatorClass::classify_await(sema, &await_expr).map(IdentClass::Operator),
ast::BinExpr(bin_expr) => OperatorClass::classify_bin(sema, &bin_expr).map(IdentClass::Operator),
ast::IndexExpr(index_expr) => OperatorClass::classify_index(sema, &index_expr).map(IdentClass::Operator),
ast::PrefixExpr(prefix_expr) => OperatorClass::classify_prefix(sema,&prefix_expr).map(IdentClass::Operator),
ast::TryExpr(try_expr) => OperatorClass::classify_try(sema,&try_expr).map(IdentClass::Operator),
ast::PrefixExpr(prefix_expr) => OperatorClass::classify_prefix(sema, &prefix_expr).map(IdentClass::Operator),
ast::TryExpr(try_expr) => OperatorClass::classify_try(sema, &try_expr).map(IdentClass::Operator),
_ => None,
}
}

View File

@ -54,6 +54,10 @@ impl FamousDefs<'_, '_> {
self.find_trait("core:convert:Into")
}
pub fn core_convert_Index(&self) -> Option<Trait> {
self.find_trait("core:ops:Index")
}
pub fn core_option_Option(&self) -> Option<Enum> {
self.find_enum("core:option:Option")
}

File diff suppressed because it is too large Load Diff

View File

@ -68,22 +68,29 @@ pub struct FirstSegmentUnresolved {
pub enum NameToImport {
/// Requires items with names that exactly match the given string, bool indicates case-sensitivity.
Exact(String, bool),
/// Requires items with names that case-insensitively contain all letters from the string,
/// Requires items with names that match the given string by prefix, bool indicates case-sensitivity.
Prefix(String, bool),
/// Requires items with names contain all letters from the string,
/// in the same order, but not necessary adjacent.
Fuzzy(String),
Fuzzy(String, bool),
}
impl NameToImport {
pub fn exact_case_sensitive(s: String) -> NameToImport {
NameToImport::Exact(s, true)
}
}
impl NameToImport {
pub fn fuzzy(s: String) -> NameToImport {
// unless all chars are lowercase, we do a case sensitive search
let case_sensitive = s.chars().any(|c| c.is_uppercase());
NameToImport::Fuzzy(s, case_sensitive)
}
pub fn text(&self) -> &str {
match self {
NameToImport::Exact(text, _) => text.as_str(),
NameToImport::Fuzzy(text) => text.as_str(),
NameToImport::Prefix(text, _)
| NameToImport::Exact(text, _)
| NameToImport::Fuzzy(text, _) => text.as_str(),
}
}
}
@ -165,7 +172,7 @@ impl ImportAssets {
Some(Self {
import_candidate: ImportCandidate::TraitMethod(TraitImportCandidate {
receiver_ty,
assoc_item_name: NameToImport::Fuzzy(fuzzy_method_name),
assoc_item_name: NameToImport::fuzzy(fuzzy_method_name),
}),
module_with_candidate: module_with_method_call,
candidate_node,
@ -228,12 +235,30 @@ impl ImportAssets {
self.search_for(sema, None, prefer_no_std)
}
pub fn path_fuzzy_name_to_exact(&mut self, case_sensitive: bool) {
/// Requires imports to by prefix instead of fuzzily.
pub fn path_fuzzy_name_to_prefix(&mut self) {
if let ImportCandidate::Path(PathImportCandidate { name: to_import, .. }) =
&mut self.import_candidate
{
let name = match to_import {
NameToImport::Fuzzy(name) => std::mem::take(name),
let (name, case_sensitive) = match to_import {
NameToImport::Fuzzy(name, case_sensitive) => {
(std::mem::take(name), *case_sensitive)
}
_ => return,
};
*to_import = NameToImport::Prefix(name, case_sensitive);
}
}
/// Requires imports to match exactly instead of fuzzily.
pub fn path_fuzzy_name_to_exact(&mut self) {
if let ImportCandidate::Path(PathImportCandidate { name: to_import, .. }) =
&mut self.import_candidate
{
let (name, case_sensitive) = match to_import {
NameToImport::Fuzzy(name, case_sensitive) => {
(std::mem::take(name), *case_sensitive)
}
_ => return,
};
*to_import = NameToImport::Exact(name, case_sensitive);
@ -623,7 +648,7 @@ impl ImportCandidate {
fuzzy_name: String,
sema: &Semantics<'_, RootDatabase>,
) -> Option<Self> {
path_import_candidate(sema, qualifier, NameToImport::Fuzzy(fuzzy_name))
path_import_candidate(sema, qualifier, NameToImport::fuzzy(fuzzy_name))
}
}

View File

@ -9,7 +9,7 @@ use syntax::{
algo,
ast::{
self, edit_in_place::Removable, make, AstNode, HasAttrs, HasModuleItem, HasVisibility,
PathSegmentKind,
PathSegmentKind, UseTree,
},
ted, Direction, NodeOrToken, SyntaxKind, SyntaxNode,
};
@ -157,6 +157,29 @@ impl ImportScope {
/// Insert an import path into the given file/node. A `merge` value of none indicates that no import merging is allowed to occur.
pub fn insert_use(scope: &ImportScope, path: ast::Path, cfg: &InsertUseConfig) {
insert_use_with_alias_option(scope, path, cfg, None);
}
pub fn insert_use_as_alias(scope: &ImportScope, path: ast::Path, cfg: &InsertUseConfig) {
let text: &str = "use foo as _";
let parse = syntax::SourceFile::parse(text);
let node = parse
.tree()
.syntax()
.descendants()
.find_map(UseTree::cast)
.expect("Failed to make ast node `Rename`");
let alias = node.rename();
insert_use_with_alias_option(scope, path, cfg, alias);
}
fn insert_use_with_alias_option(
scope: &ImportScope,
path: ast::Path,
cfg: &InsertUseConfig,
alias: Option<ast::Rename>,
) {
let _p = profile::span("insert_use");
let mut mb = match cfg.granularity {
ImportGranularity::Crate => Some(MergeBehavior::Crate),
@ -176,7 +199,8 @@ pub fn insert_use(scope: &ImportScope, path: ast::Path, cfg: &InsertUseConfig) {
}
let use_item =
make::use_(None, make::use_tree(path.clone(), None, None, false)).clone_for_update();
make::use_(None, make::use_tree(path.clone(), None, alias, false)).clone_for_update();
// merge into existing imports if possible
if let Some(mb) = mb {
let filter = |it: &_| !(cfg.skip_glob_imports && ast::Use::is_simple_glob(it));

View File

@ -993,6 +993,46 @@ use foo::bar::qux;
);
}
#[test]
fn insert_with_renamed_import_simple_use() {
check_with_config(
"use self::foo::Foo",
r#"
use self::foo::Foo as _;
"#,
r#"
use self::foo::Foo;
"#,
&InsertUseConfig {
granularity: ImportGranularity::Crate,
prefix_kind: hir::PrefixKind::BySelf,
enforce_granularity: true,
group: true,
skip_glob_imports: true,
},
);
}
#[test]
fn insert_with_renamed_import_complex_use() {
check_with_config(
"use self::foo::Foo;",
r#"
use self::foo::{self, Foo as _, Bar};
"#,
r#"
use self::foo::{self, Foo, Bar};
"#,
&InsertUseConfig {
granularity: ImportGranularity::Crate,
prefix_kind: hir::PrefixKind::BySelf,
enforce_granularity: true,
group: true,
skip_glob_imports: true,
},
);
}
fn check_with_config(
path: &str,
ra_fixture_before: &str,

View File

@ -78,6 +78,10 @@ fn try_merge_trees_mut(lhs: &ast::UseTree, rhs: &ast::UseTree, merge: MergeBehav
{
lhs.split_prefix(&lhs_prefix);
rhs.split_prefix(&rhs_prefix);
} else {
ted::replace(lhs.syntax(), rhs.syntax());
// we can safely return here, in this case `recursive_merge` doesn't do anything
return Some(());
}
recursive_merge(lhs, rhs, merge)
}
@ -123,6 +127,13 @@ fn recursive_merge(lhs: &ast::UseTree, rhs: &ast::UseTree, merge: MergeBehavior)
// so they need to be handled explicitly
.or_else(|| tree.star_token().map(|_| false))
};
if lhs_t.rename().and_then(|x| x.underscore_token()).is_some() {
ted::replace(lhs_t.syntax(), rhs_t.syntax());
*lhs_t = rhs_t;
continue;
}
match (tree_contains_self(lhs_t), tree_contains_self(&rhs_t)) {
(Some(true), None) => continue,
(None, Some(true)) => {

View File

@ -31,26 +31,34 @@ pub fn items_with_name<'a>(
)
});
let prefix = matches!(name, NameToImport::Prefix(..));
let (mut local_query, mut external_query) = match name {
NameToImport::Exact(exact_name, case_sensitive) => {
NameToImport::Prefix(exact_name, case_sensitive)
| NameToImport::Exact(exact_name, case_sensitive) => {
let mut local_query = symbol_index::Query::new(exact_name.clone());
local_query.exact();
let external_query = import_map::Query::new(exact_name);
(
local_query,
if case_sensitive { external_query.case_sensitive() } else { external_query },
)
let mut external_query = import_map::Query::new(exact_name);
if prefix {
local_query.prefix();
external_query = external_query.prefix();
} else {
local_query.exact();
external_query = external_query.exact();
}
if case_sensitive {
local_query.case_sensitive();
external_query = external_query.case_sensitive();
}
(local_query, external_query)
}
NameToImport::Fuzzy(fuzzy_search_string) => {
NameToImport::Fuzzy(fuzzy_search_string, case_sensitive) => {
let mut local_query = symbol_index::Query::new(fuzzy_search_string.clone());
local_query.fuzzy();
let mut external_query = import_map::Query::new(fuzzy_search_string.clone())
.fuzzy()
.assoc_search_mode(assoc_item_search);
if fuzzy_search_string.to_lowercase() != fuzzy_search_string {
if case_sensitive {
local_query.case_sensitive();
external_query = external_query.case_sensitive();
}

View File

@ -43,13 +43,20 @@ use triomphe::Arc;
use crate::RootDatabase;
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
enum SearchMode {
Fuzzy,
Exact,
Prefix,
}
#[derive(Debug)]
pub struct Query {
query: String,
lowercased: String,
only_types: bool,
libs: bool,
exact: bool,
mode: SearchMode,
case_sensitive: bool,
limit: usize,
}
@ -62,7 +69,7 @@ impl Query {
lowercased,
only_types: false,
libs: false,
exact: false,
mode: SearchMode::Fuzzy,
case_sensitive: false,
limit: usize::max_value(),
}
@ -76,8 +83,16 @@ impl Query {
self.libs = true;
}
pub fn fuzzy(&mut self) {
self.mode = SearchMode::Fuzzy;
}
pub fn exact(&mut self) {
self.exact = true;
self.mode = SearchMode::Exact;
}
pub fn prefix(&mut self) {
self.mode = SearchMode::Prefix;
}
pub fn case_sensitive(&mut self) {
@ -329,13 +344,23 @@ impl Query {
{
continue;
}
if self.exact {
if symbol.name != self.query {
continue;
let skip = match self.mode {
SearchMode::Fuzzy => {
self.case_sensitive
&& self.query.chars().any(|c| !symbol.name.contains(c))
}
} else if self.case_sensitive
&& self.query.chars().any(|c| !symbol.name.contains(c))
{
SearchMode::Exact => symbol.name != self.query,
SearchMode::Prefix if self.case_sensitive => {
!symbol.name.starts_with(&self.query)
}
SearchMode::Prefix => symbol
.name
.chars()
.zip(self.lowercased.chars())
.all(|(n, q)| n.to_lowercase().next() == Some(q)),
};
if skip {
continue;
}

View File

@ -1,4 +1,5 @@
//! Generates descriptors structure for unstable feature from Unstable Book
//! Generates descriptor structures for unstable features from the unstable book
//! and lints from rustc, rustdoc, and clippy.
use std::{borrow::Cow, fs, path::Path};
use itertools::Itertools;
@ -6,6 +7,8 @@ use stdx::format_to;
use test_utils::project_root;
use xshell::{cmd, Shell};
const DESTINATION: &str = "crates/ide-db/src/generated/lints.rs";
/// This clones rustc repo, and so is not worth to keep up-to-date. We update
/// manually by un-ignoring the test from time to time.
#[test]
@ -14,11 +17,21 @@ fn sourcegen_lint_completions() {
let sh = &Shell::new().unwrap();
let rust_repo = project_root().join("./target/rust");
if !rust_repo.exists() {
if rust_repo.exists() {
cmd!(sh, "git -C {rust_repo} pull --rebase").run().unwrap();
} else {
cmd!(sh, "git clone --depth=1 https://github.com/rust-lang/rust {rust_repo}")
.run()
.unwrap();
}
// need submodules for Cargo to parse the workspace correctly
cmd!(
sh,
"git -C {rust_repo} submodule update --init --recursive --depth=1 --
compiler library src/tools"
)
.run()
.unwrap();
let mut contents = String::from(
r"
@ -27,17 +40,28 @@ pub struct Lint {
pub label: &'static str,
pub description: &'static str,
}
pub struct LintGroup {
pub lint: Lint,
pub children: &'static [&'static str],
}
",
);
generate_lint_descriptor(sh, &mut contents);
contents.push('\n');
generate_feature_descriptor(&mut contents, &rust_repo.join("src/doc/unstable-book/src"));
let cargo = std::env::var("CARGO").unwrap_or_else(|_| "cargo".to_string());
let unstable_book = project_root().join("./target/unstable-book-gen");
cmd!(
sh,
"{cargo} run --manifest-path {rust_repo}/src/tools/unstable-book-gen/Cargo.toml --
{rust_repo}/library {rust_repo}/compiler {rust_repo}/src {unstable_book}"
)
.run()
.unwrap();
generate_feature_descriptor(&mut contents, &unstable_book.join("src"));
contents.push('\n');
let lints_json = project_root().join("./target/clippy_lints.json");
@ -51,41 +75,60 @@ pub struct LintGroup {
let contents = sourcegen::add_preamble("sourcegen_lints", sourcegen::reformat(contents));
let destination = project_root().join("crates/ide_db/src/generated/lints.rs");
let destination = project_root().join(DESTINATION);
sourcegen::ensure_file_contents(destination.as_path(), &contents);
}
/// Parses the output of `rustdoc -Whelp` and prints `Lint` and `LintGroup` constants into `buf`.
///
/// As of writing, the output of `rustc -Whelp` (not rustdoc) has the following format:
///
/// ```text
/// Lint checks provided by rustc:
///
/// name default meaning
/// ---- ------- -------
///
/// ...
///
/// Lint groups provided by rustc:
///
/// name sub-lints
/// ---- ---------
///
/// ...
/// ```
///
/// `rustdoc -Whelp` (and any other custom `rustc` driver) adds another two
/// tables after the `rustc` ones, with a different title but the same format.
fn generate_lint_descriptor(sh: &Shell, buf: &mut String) {
// FIXME: rustdoc currently requires an input file for -Whelp cc https://github.com/rust-lang/rust/pull/88831
let file = project_root().join(file!());
let stdout = cmd!(sh, "rustdoc -W help {file}").read().unwrap();
let start_lints = stdout.find("---- ------- -------").unwrap();
let start_lint_groups = stdout.find("---- ---------").unwrap();
let start_lints_rustdoc =
stdout.find("Lint checks provided by plugins loaded by this crate:").unwrap();
let start_lint_groups_rustdoc =
stdout.find("Lint groups provided by plugins loaded by this crate:").unwrap();
let stdout = cmd!(sh, "rustdoc -Whelp").read().unwrap();
let lints_pat = "---- ------- -------\n";
let lint_groups_pat = "---- ---------\n";
let lints = find_and_slice(&stdout, lints_pat);
let lint_groups = find_and_slice(lints, lint_groups_pat);
let lints_rustdoc = find_and_slice(lint_groups, lints_pat);
let lint_groups_rustdoc = find_and_slice(lints_rustdoc, lint_groups_pat);
buf.push_str(r#"pub const DEFAULT_LINTS: &[Lint] = &["#);
buf.push('\n');
let lints = stdout[start_lints..].lines().skip(1).take_while(|l| !l.is_empty()).map(|line| {
let lints = lints.lines().take_while(|l| !l.is_empty()).map(|line| {
let (name, rest) = line.trim().split_once(char::is_whitespace).unwrap();
let (_default_level, description) = rest.trim().split_once(char::is_whitespace).unwrap();
(name.trim(), Cow::Borrowed(description.trim()), vec![])
});
let lint_groups =
stdout[start_lint_groups..].lines().skip(1).take_while(|l| !l.is_empty()).map(|line| {
let (name, lints) = line.trim().split_once(char::is_whitespace).unwrap();
(
name.trim(),
format!("lint group for: {}", lints.trim()).into(),
lints
.split_ascii_whitespace()
.map(|s| s.trim().trim_matches(',').replace('-', "_"))
.collect(),
)
});
let lint_groups = lint_groups.lines().take_while(|l| !l.is_empty()).map(|line| {
let (name, lints) = line.trim().split_once(char::is_whitespace).unwrap();
(
name.trim(),
format!("lint group for: {}", lints.trim()).into(),
lints
.split_ascii_whitespace()
.map(|s| s.trim().trim_matches(',').replace('-', "_"))
.collect(),
)
});
let lints = lints
.chain(lint_groups)
@ -94,7 +137,8 @@ fn generate_lint_descriptor(sh: &Shell, buf: &mut String) {
for (name, description, ..) in &lints {
push_lint_completion(buf, &name.replace('-', "_"), description);
}
buf.push_str("];\n");
buf.push_str("];\n\n");
buf.push_str(r#"pub const DEFAULT_LINT_GROUPS: &[LintGroup] = &["#);
for (name, description, children) in &lints {
if !children.is_empty() {
@ -115,27 +159,23 @@ fn generate_lint_descriptor(sh: &Shell, buf: &mut String) {
buf.push_str(r#"pub const RUSTDOC_LINTS: &[Lint] = &["#);
buf.push('\n');
let lints_rustdoc =
stdout[start_lints_rustdoc..].lines().skip(2).take_while(|l| !l.is_empty()).map(|line| {
let (name, rest) = line.trim().split_once(char::is_whitespace).unwrap();
let (_default_level, description) =
rest.trim().split_once(char::is_whitespace).unwrap();
(name.trim(), Cow::Borrowed(description.trim()), vec![])
});
let lints_rustdoc = lints_rustdoc.lines().take_while(|l| !l.is_empty()).map(|line| {
let (name, rest) = line.trim().split_once(char::is_whitespace).unwrap();
let (_default_level, description) = rest.trim().split_once(char::is_whitespace).unwrap();
(name.trim(), Cow::Borrowed(description.trim()), vec![])
});
let lint_groups_rustdoc =
stdout[start_lint_groups_rustdoc..].lines().skip(2).take_while(|l| !l.is_empty()).map(
|line| {
let (name, lints) = line.trim().split_once(char::is_whitespace).unwrap();
(
name.trim(),
format!("lint group for: {}", lints.trim()).into(),
lints
.split_ascii_whitespace()
.map(|s| s.trim().trim_matches(',').replace('-', "_"))
.collect(),
)
},
);
lint_groups_rustdoc.lines().take_while(|l| !l.is_empty()).map(|line| {
let (name, lints) = line.trim().split_once(char::is_whitespace).unwrap();
(
name.trim(),
format!("lint group for: {}", lints.trim()).into(),
lints
.split_ascii_whitespace()
.map(|s| s.trim().trim_matches(',').replace('-', "_"))
.collect(),
)
});
let lints_rustdoc = lints_rustdoc
.chain(lint_groups_rustdoc)
@ -145,7 +185,7 @@ fn generate_lint_descriptor(sh: &Shell, buf: &mut String) {
for (name, description, ..) in &lints_rustdoc {
push_lint_completion(buf, &name.replace('-', "_"), description)
}
buf.push_str("];\n");
buf.push_str("];\n\n");
buf.push_str(r#"pub const RUSTDOC_LINT_GROUPS: &[LintGroup] = &["#);
for (name, description, children) in &lints_rustdoc {
@ -157,14 +197,24 @@ fn generate_lint_descriptor(sh: &Shell, buf: &mut String) {
buf.push_str("];\n");
}
#[track_caller]
fn find_and_slice<'a>(i: &'a str, p: &str) -> &'a str {
let idx = i.find(p).unwrap();
&i[idx + p.len()..]
}
/// Parses the unstable book `src_dir` and prints a constant with the list of
/// unstable features into `buf`.
///
/// It does this by looking for all `.md` files in the `language-features` and
/// `library-features` directories, and using the file name as the feature
/// name, and the file contents as the feature description.
fn generate_feature_descriptor(buf: &mut String, src_dir: &Path) {
let mut features = ["language-features", "library-features"]
.into_iter()
.flat_map(|it| sourcegen::list_files(&src_dir.join(it)))
.filter(|path| {
// Get all `.md ` files
path.extension().unwrap_or_default().to_str().unwrap_or_default() == "md"
})
// Get all `.md` files
.filter(|path| path.extension() == Some("md".as_ref()))
.map(|path| {
let feature_ident = path.file_stem().unwrap().to_str().unwrap().replace('-', "_");
let doc = fs::read_to_string(path).unwrap();
@ -196,7 +246,7 @@ fn generate_descriptor_clippy(buf: &mut String, path: &Path) {
let mut clippy_lints: Vec<ClippyLint> = Vec::new();
let mut clippy_groups: std::collections::BTreeMap<String, Vec<String>> = Default::default();
for line in file_content.lines().map(|line| line.trim()) {
for line in file_content.lines().map(str::trim) {
if let Some(line) = line.strip_prefix(r#""id": ""#) {
let clippy_lint = ClippyLint {
id: line.strip_suffix(r#"","#).expect("should be suffixed by comma").into(),
@ -211,12 +261,19 @@ fn generate_descriptor_clippy(buf: &mut String, path: &Path) {
.push(clippy_lints.last().unwrap().id.clone());
}
} else if let Some(line) = line.strip_prefix(r#""docs": ""#) {
let prefix_to_strip = r#" ### What it does"#;
let line = match line.strip_prefix(prefix_to_strip) {
Some(line) => line,
let header = "### What it does";
let line = match line.find(header) {
Some(idx) => &line[idx + header.len()..],
None => {
eprintln!("unexpected clippy prefix for {}", clippy_lints.last().unwrap().id);
continue;
let id = &clippy_lints.last().unwrap().id;
// these just don't have the common header
let allowed = ["allow_attributes", "read_line_without_trim"];
if allowed.contains(&id.as_str()) {
line
} else {
eprintln!("\nunexpected clippy prefix for {id}, line={line:?}\n",);
continue;
}
}
};
// Only take the description, any more than this is a lot of additional data we would embed into the exe

View File

@ -166,7 +166,7 @@ fn main() {
check_diagnostics(
r#"
struct A { a: &'static str }
fn f(a: A) { let A { a: hello } = a; }
fn f(a: A) { let A { a: _hello } = a; }
"#,
);
check_diagnostics(
@ -181,12 +181,14 @@ fn f(a: A) { let A { 0: 0 } = a; }
struct A { a: &'static str }
fn f(a: A) {
let A { a$0: a } = a;
_ = a;
}
"#,
r#"
struct A { a: &'static str }
fn f(a: A) {
let A { a } = a;
_ = a;
}
"#,
);
@ -196,12 +198,14 @@ fn f(a: A) {
struct A { a: &'static str, b: &'static str }
fn f(a: A) {
let A { a$0: a, b } = a;
_ = (a, b);
}
"#,
r#"
struct A { a: &'static str, b: &'static str }
fn f(a: A) {
let A { a, b } = a;
_ = (a, b);
}
"#,
);

View File

@ -111,6 +111,31 @@ fn some_fn() {
let what_aweird_formatting = 10;
another_func(what_aweird_formatting);
}
"#,
);
check_fix(
r#"
static S: i32 = M::A;
mod $0M {
pub const A: i32 = 10;
}
mod other {
use crate::M::A;
}
"#,
r#"
static S: i32 = m::A;
mod m {
pub const A: i32 = 10;
}
mod other {
use crate::m::A;
}
"#,
);
}
@ -175,10 +200,10 @@ fn NonSnakeCaseName() {}
fn incorrect_function_params() {
check_diagnostics(
r#"
fn foo(SomeParam: u8) {}
fn foo(SomeParam: u8) { _ = SomeParam; }
// ^^^^^^^^^ 💡 warn: Parameter `SomeParam` should have snake_case name, e.g. `some_param`
fn foo2(ok_param: &str, CAPS_PARAM: u8) {}
fn foo2(ok_param: &str, CAPS_PARAM: u8) { _ = (ok_param, CAPS_PARAM); }
// ^^^^^^^^^^ 💡 warn: Parameter `CAPS_PARAM` should have snake_case name, e.g. `caps_param`
"#,
);
@ -188,6 +213,7 @@ fn foo2(ok_param: &str, CAPS_PARAM: u8) {}
fn incorrect_variable_names() {
check_diagnostics(
r#"
#[allow(unused)]
fn foo() {
let SOME_VALUE = 10;
// ^^^^^^^^^^ 💡 warn: Variable `SOME_VALUE` should have snake_case name, e.g. `some_value`
@ -294,6 +320,7 @@ impl someStruct {
// ^^^^^^^^ 💡 warn: Function `SomeFunc` should have snake_case name, e.g. `some_func`
let WHY_VAR_IS_CAPS = 10;
// ^^^^^^^^^^^^^^^ 💡 warn: Variable `WHY_VAR_IS_CAPS` should have snake_case name, e.g. `why_var_is_caps`
_ = WHY_VAR_IS_CAPS;
}
}
"#,
@ -306,6 +333,7 @@ impl someStruct {
r#"
enum Option { Some, None }
#[allow(unused)]
fn main() {
match Option::None {
None => (),
@ -322,6 +350,7 @@ fn main() {
r#"
enum Option { Some, None }
#[allow(unused)]
fn main() {
match Option::None {
SOME_VAR @ None => (),
@ -349,7 +378,9 @@ enum E {
}
mod F {
fn CheckItWorksWithCrateAttr(BAD_NAME_HI: u8) {}
fn CheckItWorksWithCrateAttr(BAD_NAME_HI: u8) {
_ = BAD_NAME_HI;
}
}
"#,
);
@ -395,7 +426,7 @@ fn qualify() {
#[test] // Issue #8809.
fn parenthesized_parameter() {
check_diagnostics(r#"fn f((O): _) {}"#)
check_diagnostics(r#"fn f((O): _) { _ = O; }"#)
}
#[test]
@ -472,7 +503,9 @@ mod CheckBadStyle {
mod F {
#![allow(non_snake_case)]
fn CheckItWorksWithModAttr(BAD_NAME_HI: u8) {}
fn CheckItWorksWithModAttr(BAD_NAME_HI: u8) {
_ = BAD_NAME_HI;
}
}
#[allow(non_snake_case, non_camel_case_types)]
@ -510,17 +543,20 @@ fn NonSnakeCaseName(some_var: u8) -> u8 {
#[deny(nonstandard_style)]
mod CheckNonstandardStyle {
//^^^^^^^^^^^^^^^^^^^^^ 💡 error: Module `CheckNonstandardStyle` should have snake_case name, e.g. `check_nonstandard_style`
fn HiImABadFnName() {}
//^^^^^^^^^^^^^^ 💡 error: Function `HiImABadFnName` should have snake_case name, e.g. `hi_im_abad_fn_name`
}
#[deny(warnings)]
mod CheckBadStyle {
//^^^^^^^^^^^^^ 💡 error: Module `CheckBadStyle` should have snake_case name, e.g. `check_bad_style`
struct fooo;
//^^^^ 💡 error: Structure `fooo` should have CamelCase name, e.g. `Fooo`
}
mod F {
//^ 💡 warn: Module `F` should have snake_case name, e.g. `f`
#![deny(non_snake_case)]
fn CheckItWorksWithModAttr() {}
//^^^^^^^^^^^^^^^^^^^^^^^ 💡 error: Function `CheckItWorksWithModAttr` should have snake_case name, e.g. `check_it_works_with_mod_attr`
@ -641,4 +677,30 @@ enum E {
"#,
);
}
#[test]
fn module_name_inline() {
check_diagnostics(
r#"
mod M {
//^ 💡 warn: Module `M` should have snake_case name, e.g. `m`
mod IncorrectCase {}
//^^^^^^^^^^^^^ 💡 warn: Module `IncorrectCase` should have snake_case name, e.g. `incorrect_case`
}
"#,
);
}
#[test]
fn module_name_decl() {
check_diagnostics(
r#"
//- /Foo.rs
//- /main.rs
mod Foo;
//^^^ 💡 warn: Module `Foo` should have snake_case name, e.g. `foo`
"#,
)
}
}

View File

@ -23,12 +23,7 @@ pub(crate) fn mismatched_tuple_struct_pat_arg_count(
Diagnostic::new(
DiagnosticCode::RustcHardError("E0023"),
message,
invalid_args_range(
ctx,
d.expr_or_pat.clone().map(|it| it.either(Into::into, Into::into)),
d.expected,
d.found,
),
invalid_args_range(ctx, d.expr_or_pat.clone().map(Into::into), d.expected, d.found),
)
}
@ -131,7 +126,7 @@ fn f() { zero(); }
fn simple_free_fn_one() {
check_diagnostics(
r#"
fn one(arg: u8) {}
fn one(_arg: u8) {}
fn f() { one(); }
//^^ error: expected 1 argument, found 0
"#,
@ -139,7 +134,7 @@ fn f() { one(); }
check_diagnostics(
r#"
fn one(arg: u8) {}
fn one(_arg: u8) {}
fn f() { one(1); }
"#,
);
@ -176,7 +171,7 @@ fn f() {
check_diagnostics(
r#"
struct S;
impl S { fn method(&self, arg: u8) {} }
impl S { fn method(&self, _arg: u8) {} }
fn f() {
S.method();
@ -187,7 +182,7 @@ impl S { fn method(&self, arg: u8) {} }
check_diagnostics(
r#"
struct S;
impl S { fn method(&self, arg: u8) {} }
impl S { fn method(&self, _arg: u8) {} }
fn f() {
S::method(&S, 0);
@ -335,8 +330,8 @@ struct S;
impl S {
fn method(#[cfg(NEVER)] self) {}
fn method2(#[cfg(NEVER)] self, arg: u8) {}
fn method3(self, #[cfg(NEVER)] arg: u8) {}
fn method2(#[cfg(NEVER)] self, _arg: u8) {}
fn method3(self, #[cfg(NEVER)] _arg: u8) {}
}
extern "C" {
@ -365,8 +360,8 @@ fn main() {
r#"
#[rustc_legacy_const_generics(1, 3)]
fn mixed<const N1: &'static str, const N2: bool>(
a: u8,
b: i8,
_a: u8,
_b: i8,
) {}
fn f() {
@ -376,8 +371,8 @@ fn f() {
#[rustc_legacy_const_generics(1, 3)]
fn b<const N1: u8, const N2: u8>(
a: u8,
b: u8,
_a: u8,
_b: u8,
) {}
fn g() {
@ -403,7 +398,7 @@ fn f(
// ^^ error: this pattern has 0 fields, but the corresponding tuple struct has 2 fields
S(e, f, .., g, d): S
// ^^^^^^^^^ error: this pattern has 4 fields, but the corresponding tuple struct has 2 fields
) {}
) { _ = (a, b, c, d, e, f, g); }
"#,
)
}

View File

@ -39,7 +39,7 @@ pub(crate) fn missing_fields(ctx: &DiagnosticsContext<'_>, d: &hir::MissingField
d.field_list_parent_path
.clone()
.map(SyntaxNodePtr::from)
.unwrap_or_else(|| d.field_list_parent.clone().either(|it| it.into(), |it| it.into())),
.unwrap_or_else(|| d.field_list_parent.clone().into()),
);
Diagnostic::new_with_syntax_node_ptr(ctx, DiagnosticCode::RustcHardError("E0063"), message, ptr)
@ -58,10 +58,8 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::MissingFields) -> Option<Vec<Ass
let root = ctx.sema.db.parse_or_expand(d.file);
let current_module = match &d.field_list_parent {
Either::Left(ptr) => ctx.sema.scope(ptr.to_node(&root).syntax()).map(|it| it.module()),
Either::Right(ptr) => ctx.sema.scope(ptr.to_node(&root).syntax()).map(|it| it.module()),
};
let current_module =
ctx.sema.scope(d.field_list_parent.to_node(&root).syntax()).map(|it| it.module());
let build_text_edit = |parent_syntax, new_syntax: &SyntaxNode, old_syntax| {
let edit = {
@ -87,9 +85,8 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::MissingFields) -> Option<Vec<Ass
)])
};
match &d.field_list_parent {
Either::Left(record_expr) => {
let field_list_parent = record_expr.to_node(&root);
match &d.field_list_parent.to_node(&root) {
Either::Left(field_list_parent) => {
let missing_fields = ctx.sema.record_literal_missing_fields(&field_list_parent);
let mut locals = FxHashMap::default();
@ -152,8 +149,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::MissingFields) -> Option<Vec<Ass
old_field_list.syntax(),
)
}
Either::Right(record_pat) => {
let field_list_parent = record_pat.to_node(&root);
Either::Right(field_list_parent) => {
let missing_fields = ctx.sema.record_pattern_missing_fields(&field_list_parent);
let old_field_list = field_list_parent.record_pat_field_list()?;
@ -290,6 +286,7 @@ fn x(a: S) {
struct S { s: u32 }
fn x(a: S) {
let S { ref s } = a;
_ = s;
}
",
)
@ -626,7 +623,7 @@ struct TestStruct { one: i32, two: i64 }
fn test_fn() {
let one = 1;
let s = TestStruct{ one, two: 2 };
let _s = TestStruct{ one, two: 2 };
}
"#,
);

View File

@ -19,6 +19,7 @@ pub(crate) fn missing_match_arms(
mod tests {
use crate::tests::check_diagnostics;
#[track_caller]
fn check_diagnostics_no_bails(ra_fixture: &str) {
cov_mark::check_count!(validate_match_bailed_out, 0);
crate::tests::check_diagnostics(ra_fixture)
@ -564,6 +565,7 @@ fn bang(never: !) {
r#"
enum Option<T> { Some(T), None }
#[allow(unused)]
fn main() {
// `Never` is deliberately not defined so that it's an uninferred type.
match Option::<Never>::None {
@ -719,7 +721,7 @@ fn main() {
r#"
struct S { a: char}
fn main(v: S) {
match v { S{ a } => {} }
match v { S{ a } => { _ = a; } }
match v { S{ a: _x } => {} }
match v { S{ a: 'a' } => {} }
match v { S{..} => {} }
@ -901,7 +903,7 @@ enum E{ A, B }
fn foo() {
match &E::A {
E::A => {}
x => {}
_x => {}
}
}",
);

View File

@ -100,9 +100,9 @@ mod tests {
r#"
fn main() {
let x = &5 as *const usize;
unsafe { let y = *x; }
let z = *x;
} //^^💡 error: this operation is unsafe and requires an unsafe function or block
unsafe { let _y = *x; }
let _z = *x;
} //^^💡 error: this operation is unsafe and requires an unsafe function or block
"#,
)
}
@ -116,13 +116,13 @@ struct HasUnsafe;
impl HasUnsafe {
unsafe fn unsafe_fn(&self) {
let x = &5 as *const usize;
let y = *x;
let _y = *x;
}
}
unsafe fn unsafe_fn() {
let x = &5 as *const usize;
let y = *x;
let _y = *x;
}
fn main() {
@ -152,10 +152,10 @@ struct Ty {
static mut STATIC_MUT: Ty = Ty { a: 0 };
fn main() {
let x = STATIC_MUT.a;
//^^^^^^^^^^💡 error: this operation is unsafe and requires an unsafe function or block
let _x = STATIC_MUT.a;
//^^^^^^^^^^💡 error: this operation is unsafe and requires an unsafe function or block
unsafe {
let x = STATIC_MUT.a;
let _x = STATIC_MUT.a;
}
}
"#,
@ -187,13 +187,13 @@ fn main() {
r#"
fn main() {
let x = &5 as *const usize;
let z = *x$0;
let _z = *x$0;
}
"#,
r#"
fn main() {
let x = &5 as *const usize;
let z = unsafe { *x };
let _z = unsafe { *x };
}
"#,
);
@ -231,7 +231,7 @@ struct S(usize);
impl S {
unsafe fn func(&self) {
let x = &self.0 as *const usize;
let z = *x;
let _z = *x;
}
}
fn main() {
@ -244,7 +244,7 @@ struct S(usize);
impl S {
unsafe fn func(&self) {
let x = &self.0 as *const usize;
let z = *x;
let _z = *x;
}
}
fn main() {
@ -267,7 +267,7 @@ struct Ty {
static mut STATIC_MUT: Ty = Ty { a: 0 };
fn main() {
let x = STATIC_MUT$0.a;
let _x = STATIC_MUT$0.a;
}
"#,
r#"
@ -278,7 +278,7 @@ struct Ty {
static mut STATIC_MUT: Ty = Ty { a: 0 };
fn main() {
let x = unsafe { STATIC_MUT.a };
let _x = unsafe { STATIC_MUT.a };
}
"#,
)
@ -382,16 +382,16 @@ fn main() {
static mut STATIC_MUT: u8 = 0;
fn main() {
let x;
x = STATIC_MUT$0;
let _x;
_x = STATIC_MUT$0;
}
"#,
r#"
static mut STATIC_MUT: u8 = 0;
fn main() {
let x;
x = unsafe { STATIC_MUT };
let _x;
_x = unsafe { STATIC_MUT };
}
"#,
)
@ -405,14 +405,14 @@ fn main() {
static mut STATIC_MUT: u8 = 0;
fn main() {
let x = STATIC_MUT$0 + 1;
let _x = STATIC_MUT$0 + 1;
}
"#,
r#"
static mut STATIC_MUT: u8 = 0;
fn main() {
let x = unsafe { STATIC_MUT } + 1;
let _x = unsafe { STATIC_MUT } + 1;
}
"#,
)
@ -425,14 +425,14 @@ fn main() {
static mut STATIC_MUT: u8 = 0;
fn main() {
let x = &STATIC_MUT$0;
let _x = &STATIC_MUT$0;
}
"#,
r#"
static mut STATIC_MUT: u8 = 0;
fn main() {
let x = unsafe { &STATIC_MUT };
let _x = unsafe { &STATIC_MUT };
}
"#,
)
@ -445,14 +445,14 @@ fn main() {
static mut STATIC_MUT: u8 = 0;
fn main() {
let x = &&STATIC_MUT$0;
let _x = &&STATIC_MUT$0;
}
"#,
r#"
static mut STATIC_MUT: u8 = 0;
fn main() {
let x = unsafe { &&STATIC_MUT };
let _x = unsafe { &&STATIC_MUT };
}
"#,
)

View File

@ -29,6 +29,7 @@ fn main() {
let a = &X;
let b = *a;
//^ error: cannot move `X` out of reference
_ = b;
}
"#,
);
@ -46,6 +47,7 @@ fn main() {
let b = a.0;
//^ error: cannot move `X` out of reference
let y = a.1;
_ = (b, y);
}
"#,
);
@ -59,8 +61,8 @@ fn main() {
struct X;
fn main() {
static S: X = X;
let s = S;
//^ error: cannot move `X` out of reference
let _s = S;
//^^ error: cannot move `X` out of reference
}
"#,
);
@ -165,7 +167,7 @@ enum X {
fn main() {
let x = &X::Bar;
let c = || match *x {
let _c = || match *x {
X::Foo(t) => t,
_ => 5,
};
@ -173,4 +175,19 @@ fn main() {
"#,
);
}
#[test]
fn regression_15787() {
check_diagnostics(
r#"
//- minicore: coerce_unsized, slice, copy
fn foo(mut slice: &[u32]) -> usize {
slice = match slice {
[0, rest @ ..] | rest => rest,
};
slice.len()
}
"#,
);
}
}

View File

@ -324,6 +324,7 @@ fn main() {
let x_own = 2;
let ref mut x_ref = x_own;
//^^^^^^^^^^^^^ 💡 error: cannot mutate immutable variable `x_own`
_ = x_ref;
}
"#,
);
@ -331,7 +332,7 @@ fn main() {
r#"
struct Foo;
impl Foo {
fn method(&mut self, x: i32) {}
fn method(&mut self, _x: i32) {}
}
fn main() {
let x = Foo;
@ -391,6 +392,7 @@ fn main() {
//^^^^^ 💡 warn: variable does not need to be mutable
x = 7;
//^^^^^ 💡 error: cannot mutate immutable variable `x`
_ = y;
}
}
}
@ -404,12 +406,14 @@ fn main() {
// there would be no mutability error for locals in dead code. Rustc tries to
// not emit `unused_mut` in this case, but since it works without `mut`, and
// special casing it is not trivial, we emit it.
// Update: now MIR based `unused-variable` is taking over `unused-mut` for the same reason.
check_diagnostics(
r#"
fn main() {
return;
let mut x = 2;
//^^^^^ 💡 warn: variable does not need to be mutable
//^^^^^ warn: unused variable
&mut x;
}
"#,
@ -419,7 +423,7 @@ fn main() {
fn main() {
loop {}
let mut x = 2;
//^^^^^ 💡 warn: variable does not need to be mutable
//^^^^^ warn: unused variable
&mut x;
}
"#,
@ -440,7 +444,7 @@ fn main(b: bool) {
g();
}
let mut x = 2;
//^^^^^ 💡 warn: variable does not need to be mutable
//^^^^^ warn: unused variable
&mut x;
}
"#,
@ -454,7 +458,7 @@ fn main(b: bool) {
return;
}
let mut x = 2;
//^^^^^ 💡 warn: variable does not need to be mutable
//^^^^^ warn: unused variable
&mut x;
}
"#,
@ -536,6 +540,7 @@ fn main() {
(k @ 5, ref mut t) if { continue; } => {
//^^^^^^^^^ 💡 error: cannot mutate immutable variable `z`
*t = 5;
_ = k;
}
_ => {
let y = (1, 2);
@ -588,6 +593,7 @@ fn main() {
b = 1;
c = (2, 3);
d = 3;
_ = (c, b, d);
}
}
"#,
@ -600,6 +606,7 @@ fn main() {
r#"
fn f(mut x: i32) {
//^^^^^ 💡 warn: variable does not need to be mutable
f(x + 2);
}
"#,
);
@ -615,8 +622,11 @@ fn f(x: i32) {
r#"
fn f((x, y): (i32, i32)) {
let t = [0; 2];
x = 5;
//^^^^^ 💡 error: cannot mutate immutable variable `x`
x = 5;
//^^^^^ 💡 error: cannot mutate immutable variable `x`
_ = x;
_ = y;
_ = t;
}
"#,
);
@ -645,6 +655,7 @@ fn f(x: [(i32, u8); 10]) {
//^^^^^ 💡 warn: variable does not need to be mutable
a = 2;
//^^^^^ 💡 error: cannot mutate immutable variable `a`
_ = b;
}
}
"#,
@ -666,6 +677,7 @@ fn f(x: [(i32, u8); 10]) {
//^^^^^ 💡 error: cannot mutate immutable variable `a`
c = 2;
//^^^^^ 💡 error: cannot mutate immutable variable `c`
_ = (b, d);
}
}
}
@ -696,18 +708,18 @@ fn f() {
fn overloaded_index() {
check_diagnostics(
r#"
//- minicore: index
//- minicore: index, copy
use core::ops::{Index, IndexMut};
struct Foo;
impl Index<usize> for Foo {
type Output = (i32, u8);
fn index(&self, index: usize) -> &(i32, u8) {
fn index(&self, _index: usize) -> &(i32, u8) {
&(5, 2)
}
}
impl IndexMut<usize> for Foo {
fn index_mut(&mut self, index: usize) -> &mut (i32, u8) {
fn index_mut(&mut self, _index: usize) -> &mut (i32, u8) {
&mut (5, 2)
}
}
@ -715,26 +727,32 @@ fn f() {
let mut x = Foo;
//^^^^^ 💡 warn: variable does not need to be mutable
let y = &x[2];
_ = (x, y);
let x = Foo;
let y = &mut x[2];
//^💡 error: cannot mutate immutable variable `x`
_ = (x, y);
let mut x = &mut Foo;
//^^^^^ 💡 warn: variable does not need to be mutable
let y: &mut (i32, u8) = &mut x[2];
_ = (x, y);
let x = Foo;
let ref mut y = x[7];
//^ 💡 error: cannot mutate immutable variable `x`
_ = (x, y);
let (ref mut y, _) = x[3];
//^ 💡 error: cannot mutate immutable variable `x`
_ = y;
match x[10] {
//^ 💡 error: cannot mutate immutable variable `x`
(ref y, _) => (),
(_, ref mut y) => (),
(ref y, 5) => _ = y,
(_, ref mut y) => _ = y,
}
let mut x = Foo;
let mut i = 5;
//^^^^^ 💡 warn: variable does not need to be mutable
let y = &mut x[i];
_ = y;
}
"#,
);
@ -744,7 +762,7 @@ fn f() {
fn overloaded_deref() {
check_diagnostics(
r#"
//- minicore: deref_mut
//- minicore: deref_mut, copy
use core::ops::{Deref, DerefMut};
struct Foo;
@ -763,21 +781,27 @@ fn f() {
let mut x = Foo;
//^^^^^ 💡 warn: variable does not need to be mutable
let y = &*x;
_ = (x, y);
let x = Foo;
let y = &mut *x;
//^^ 💡 error: cannot mutate immutable variable `x`
_ = (x, y);
let x = Foo;
//^ warn: unused variable
let x = Foo;
let y: &mut (i32, u8) = &mut x;
//^^^^^^ 💡 error: cannot mutate immutable variable `x`
_ = (x, y);
let ref mut y = *x;
//^^ 💡 error: cannot mutate immutable variable `x`
_ = y;
let (ref mut y, _) = *x;
//^^ 💡 error: cannot mutate immutable variable `x`
_ = y;
match *x {
//^^ 💡 error: cannot mutate immutable variable `x`
(ref y, _) => (),
(_, ref mut y) => (),
(ref y, 5) => _ = y,
(_, ref mut y) => _ = y,
}
}
"#,
@ -866,6 +890,7 @@ pub fn test() {
data: 0
}
);
_ = tree;
}
"#,
);
@ -925,6 +950,7 @@ fn fn_once(mut x: impl FnOnce(u8) -> u8) -> u8 {
let x = X;
let closure4 = || { x.mutate(); };
//^ 💡 error: cannot mutate immutable variable `x`
_ = (closure2, closure3, closure4);
}
"#,
);
@ -941,7 +967,9 @@ fn fn_once(mut x: impl FnOnce(u8) -> u8) -> u8 {
z = 3;
let mut k = z;
//^^^^^ 💡 warn: variable does not need to be mutable
_ = k;
};
_ = (x, closure);
}
"#,
);
@ -958,6 +986,7 @@ fn f() {
}
}
};
_ = closure;
}
"#,
);
@ -972,7 +1001,8 @@ fn f() {
let mut x = X;
let c2 = || { x = X; x };
let mut x = X;
let c2 = move || { x = X; };
let c3 = move || { x = X; };
_ = (c1, c2, c3);
}
"#,
);
@ -1023,7 +1053,7 @@ fn x(t: &[u8]) {
a = 2;
//^^^^^ 💡 error: cannot mutate immutable variable `a`
_ = b;
}
_ => {}
}
@ -1079,6 +1109,7 @@ fn f() {
let x = Box::new(5);
let closure = || *x = 2;
//^ 💡 error: cannot mutate immutable variable `x`
_ = closure;
}
"#,
);
@ -1156,6 +1187,7 @@ macro_rules! mac {
fn main2() {
let mut x = mac![];
//^^^^^ 💡 warn: variable does not need to be mutable
_ = x;
}
"#,
);

View File

@ -13,7 +13,7 @@ use crate::{fix, Assist, Diagnostic, DiagnosticCode, DiagnosticsContext};
//
// This diagnostic is triggered if created structure does not have field provided in record.
pub(crate) fn no_such_field(ctx: &DiagnosticsContext<'_>, d: &hir::NoSuchField) -> Diagnostic {
let node = d.field.clone().map(|it| it.either(Into::into, Into::into));
let node = d.field.clone().map(Into::into);
if d.private {
// FIXME: quickfix to add required visibility
Diagnostic::new_with_syntax_node_ptr(
@ -35,15 +35,13 @@ pub(crate) fn no_such_field(ctx: &DiagnosticsContext<'_>, d: &hir::NoSuchField)
fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::NoSuchField) -> Option<Vec<Assist>> {
// FIXME: quickfix for pattern
match &d.field.value {
Either::Left(ptr) => {
let root = ctx.sema.db.parse_or_expand(d.field.file_id);
missing_record_expr_field_fixes(
&ctx.sema,
d.field.file_id.original_file(ctx.sema.db),
&ptr.to_node(&root),
)
}
let root = ctx.sema.db.parse_or_expand(d.field.file_id);
match &d.field.value.to_node(&root) {
Either::Left(node) => missing_record_expr_field_fixes(
&ctx.sema,
d.field.file_id.original_file(ctx.sema.db),
node,
),
_ => None,
}
}

View File

@ -1,5 +1,3 @@
use either::Either;
use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: private-assoc-item
@ -28,13 +26,7 @@ pub(crate) fn private_assoc_item(
},
name,
),
d.expr_or_pat.clone().map(|it| match it {
Either::Left(it) => it.into(),
Either::Right(it) => match it {
Either::Left(it) => it.into(),
Either::Right(it) => it.into(),
},
}),
d.expr_or_pat.clone().map(Into::into),
)
}

View File

@ -74,8 +74,8 @@ mod tests {
r#"
//- minicore: iterators
fn foo() {
let m = core::iter::repeat(()).filter_map(|()| Some(92)).next();
} //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 💡 weak: replace filter_map(..).next() with find_map(..)
let _m = core::iter::repeat(()).filter_map(|()| Some(92)).next();
} //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 💡 weak: replace filter_map(..).next() with find_map(..)
"#,
);
}
@ -117,7 +117,7 @@ fn foo() {
fn foo() {
let mut m = core::iter::repeat(())
.filter_map(|()| Some(92));
let n = m.next();
let _n = m.next();
}
"#,
);
@ -148,22 +148,22 @@ fn foo() {
fn foo() {
#[allow(clippy::filter_map_next)]
let m = core::iter::repeat(()).filter_map(|()| Some(92)).next();
let _m = core::iter::repeat(()).filter_map(|()| Some(92)).next();
}
#[deny(clippy::filter_map_next)]
fn foo() {
let m = core::iter::repeat(()).filter_map(|()| Some(92)).next();
} //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 💡 error: replace filter_map(..).next() with find_map(..)
let _m = core::iter::repeat(()).filter_map(|()| Some(92)).next();
} //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 💡 error: replace filter_map(..).next() with find_map(..)
fn foo() {
let m = core::iter::repeat(()).filter_map(|()| Some(92)).next();
} //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 💡 weak: replace filter_map(..).next() with find_map(..)
let _m = core::iter::repeat(()).filter_map(|()| Some(92)).next();
} //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 💡 weak: replace filter_map(..).next() with find_map(..)
#[warn(clippy::filter_map_next)]
fn foo() {
let m = core::iter::repeat(()).filter_map(|()| Some(92)).next();
} //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 💡 warn: replace filter_map(..).next() with find_map(..)
let _m = core::iter::repeat(()).filter_map(|()| Some(92)).next();
} //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 💡 warn: replace filter_map(..).next() with find_map(..)
"#,
);

View File

@ -1,4 +1,3 @@
use either::Either;
use hir::{db::ExpandDatabase, ClosureStyle, HirDisplay, InFile, Type};
use ide_db::{famous_defs::FamousDefs, source_change::SourceChange};
use syntax::{
@ -14,9 +13,11 @@ use crate::{adjusted_display_range, fix, Assist, Diagnostic, DiagnosticCode, Dia
// This diagnostic is triggered when the type of an expression or pattern does not match
// the expected type.
pub(crate) fn type_mismatch(ctx: &DiagnosticsContext<'_>, d: &hir::TypeMismatch) -> Diagnostic {
let display_range = match &d.expr_or_pat {
Either::Left(expr) => {
adjusted_display_range::<ast::Expr>(ctx, expr.clone().map(|it| it.into()), &|expr| {
let display_range = match &d.expr_or_pat.value {
expr if ast::Expr::can_cast(expr.kind()) => adjusted_display_range::<ast::Expr>(
ctx,
InFile { file_id: d.expr_or_pat.file_id, value: expr.syntax_node_ptr() },
&|expr| {
let salient_token_range = match expr {
ast::Expr::IfExpr(it) => it.if_token()?.text_range(),
ast::Expr::LoopExpr(it) => it.loop_token()?.text_range(),
@ -32,10 +33,15 @@ pub(crate) fn type_mismatch(ctx: &DiagnosticsContext<'_>, d: &hir::TypeMismatch)
cov_mark::hit!(type_mismatch_range_adjustment);
Some(salient_token_range)
})
}
Either::Right(pat) => {
ctx.sema.diagnostics_display_range(pat.clone().map(|it| it.into())).range
},
),
pat => {
ctx.sema
.diagnostics_display_range(InFile {
file_id: d.expr_or_pat.file_id,
value: pat.syntax_node_ptr(),
})
.range
}
};
let mut diag = Diagnostic::new(
@ -57,14 +63,12 @@ pub(crate) fn type_mismatch(ctx: &DiagnosticsContext<'_>, d: &hir::TypeMismatch)
fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::TypeMismatch) -> Option<Vec<Assist>> {
let mut fixes = Vec::new();
match &d.expr_or_pat {
Either::Left(expr_ptr) => {
add_reference(ctx, d, expr_ptr, &mut fixes);
add_missing_ok_or_some(ctx, d, expr_ptr, &mut fixes);
remove_semicolon(ctx, d, expr_ptr, &mut fixes);
str_ref_to_owned(ctx, d, expr_ptr, &mut fixes);
}
Either::Right(_pat_ptr) => {}
if let Some(expr_ptr) = d.expr_or_pat.value.clone().cast::<ast::Expr>() {
let expr_ptr = &InFile { file_id: d.expr_or_pat.file_id, value: expr_ptr.clone() };
add_reference(ctx, d, expr_ptr, &mut fixes);
add_missing_ok_or_some(ctx, d, expr_ptr, &mut fixes);
remove_semicolon(ctx, d, expr_ptr, &mut fixes);
str_ref_to_owned(ctx, d, expr_ptr, &mut fixes);
}
if fixes.is_empty() {
@ -205,7 +209,7 @@ fn main() {
test(123);
//^^^ 💡 error: expected &i32, found i32
}
fn test(arg: &i32) {}
fn test(_arg: &i32) {}
"#,
);
}
@ -217,13 +221,13 @@ fn test(arg: &i32) {}
fn main() {
test(123$0);
}
fn test(arg: &i32) {}
fn test(_arg: &i32) {}
"#,
r#"
fn main() {
test(&123);
}
fn test(arg: &i32) {}
fn test(_arg: &i32) {}
"#,
);
}
@ -235,13 +239,13 @@ fn test(arg: &i32) {}
fn main() {
test($0123);
}
fn test(arg: &mut i32) {}
fn test(_arg: &mut i32) {}
"#,
r#"
fn main() {
test(&mut 123);
}
fn test(arg: &mut i32) {}
fn test(_arg: &mut i32) {}
"#,
);
}
@ -254,13 +258,13 @@ fn test(arg: &mut i32) {}
fn main() {
test($0[1, 2, 3]);
}
fn test(arg: &[i32]) {}
fn test(_arg: &[i32]) {}
"#,
r#"
fn main() {
test(&[1, 2, 3]);
}
fn test(arg: &[i32]) {}
fn test(_arg: &[i32]) {}
"#,
);
}
@ -279,7 +283,7 @@ impl core::ops::Deref for Foo {
fn main() {
test($0Foo);
}
fn test(arg: &Bar) {}
fn test(_arg: &Bar) {}
"#,
r#"
struct Foo;
@ -291,7 +295,7 @@ impl core::ops::Deref for Foo {
fn main() {
test(&Foo);
}
fn test(arg: &Bar) {}
fn test(_arg: &Bar) {}
"#,
);
}
@ -305,7 +309,7 @@ fn main() {
}
struct Test;
impl Test {
fn call_by_ref(&self, arg: &i32) {}
fn call_by_ref(&self, _arg: &i32) {}
}
"#,
r#"
@ -314,7 +318,7 @@ fn main() {
}
struct Test;
impl Test {
fn call_by_ref(&self, arg: &i32) {}
fn call_by_ref(&self, _arg: &i32) {}
}
"#,
);
@ -345,7 +349,7 @@ macro_rules! thousand {
1000_u64
};
}
fn test(foo: &u64) {}
fn test(_foo: &u64) {}
fn main() {
test($0thousand!());
}
@ -356,7 +360,7 @@ macro_rules! thousand {
1000_u64
};
}
fn test(foo: &u64) {}
fn test(_foo: &u64) {}
fn main() {
test(&thousand!());
}
@ -369,12 +373,12 @@ fn main() {
check_fix(
r#"
fn main() {
let test: &mut i32 = $0123;
let _test: &mut i32 = $0123;
}
"#,
r#"
fn main() {
let test: &mut i32 = &mut 123;
let _test: &mut i32 = &mut 123;
}
"#,
);
@ -411,7 +415,7 @@ fn div(x: i32, y: i32) -> Option<i32> {
fn f<const N: u64>() -> Rate<N> { // FIXME: add some error
loop {}
}
fn run(t: Rate<5>) {
fn run(_t: Rate<5>) {
}
fn main() {
run(f()) // FIXME: remove this error
@ -426,7 +430,7 @@ fn div(x: i32, y: i32) -> Option<i32> {
check_diagnostics(
r#"
pub struct Rate<T, const NOM: u32, const DENOM: u32>(T);
fn run(t: Rate<u32, 1, 1>) {
fn run(_t: Rate<u32, 1, 1>) {
}
fn main() {
run(Rate::<_, _, _>(5));
@ -650,7 +654,7 @@ fn h() {
r#"
struct X<T>(T);
fn foo(x: X<Unknown>) {}
fn foo(_x: X<Unknown>) {}
fn test1() {
// Unknown might be `i32`, so we should not emit type mismatch here.
foo(X(42));

View File

@ -142,8 +142,8 @@ fn t<T>() -> T { loop {} }
check_diagnostics(
r#"
fn main() {
let x = [(); _];
let y: [(); 10] = [(); _];
let _x = [(); _];
let _y: [(); 10] = [(); _];
_ = 0;
(_,) = (1,);
}

View File

@ -0,0 +1,111 @@
use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
// Diagnostic: unused-variables
//
// This diagnostic is triggered when a local variable is not used.
pub(crate) fn unused_variables(
ctx: &DiagnosticsContext<'_>,
d: &hir::UnusedVariable,
) -> Diagnostic {
let ast = d.local.primary_source(ctx.sema.db).syntax_ptr();
Diagnostic::new_with_syntax_node_ptr(
ctx,
DiagnosticCode::RustcLint("unused_variables"),
"unused variable",
ast,
)
.experimental()
}
#[cfg(test)]
mod tests {
use crate::tests::check_diagnostics;
#[test]
fn unused_variables_simple() {
check_diagnostics(
r#"
//- minicore: fn
struct Foo { f1: i32, f2: i64 }
fn f(kkk: i32) {}
//^^^ warn: unused variable
fn main() {
let a = 2;
//^ warn: unused variable
let b = 5;
// note: `unused variable` implies `unused mut`, so we should not emit both at the same time.
let mut c = f(b);
//^^^^^ warn: unused variable
let (d, e) = (3, 5);
//^ warn: unused variable
let _ = e;
let f1 = 2;
let f2 = 5;
let f = Foo { f1, f2 };
match f {
Foo { f1, f2 } => {
//^^ warn: unused variable
_ = f2;
}
}
let g = false;
if g {}
let h: fn() -> i32 = || 2;
let i = h();
//^ warn: unused variable
}
"#,
);
}
#[test]
fn unused_self() {
check_diagnostics(
r#"
struct S {
}
impl S {
fn owned_self(self, u: i32) {}
//^ warn: unused variable
fn ref_self(&self, u: i32) {}
//^ warn: unused variable
fn ref_mut_self(&mut self, u: i32) {}
//^ warn: unused variable
fn owned_mut_self(mut self) {}
//^^^^^^^^ 💡 warn: variable does not need to be mutable
}
"#,
);
}
#[test]
fn allow_unused_variables_for_identifiers_starting_with_underline() {
check_diagnostics(
r#"
fn main() {
let _x = 2;
}
"#,
);
}
#[test]
fn respect_lint_attributes_for_unused_variables() {
check_diagnostics(
r#"
fn main() {
#[allow(unused_variables)]
let x = 2;
}
#[deny(unused)]
fn main2() {
let x = 2;
//^ error: unused variable
}
"#,
);
}
}

Some files were not shown because too many files have changed in this diff Show More