mirror of
https://github.com/rust-lang/rust.git
synced 2025-04-28 02:57:37 +00:00
Rollup merge of #137173 - lnicola:sync-from-ra, r=lnicola
Subtree update of `rust-analyzer` r? `@ghost`
This commit is contained in:
commit
fc829035c1
@ -64,7 +64,11 @@ jobs:
|
||||
run: |
|
||||
rustup update --no-self-update ${{ env.RUST_CHANNEL }}
|
||||
rustup default ${{ env.RUST_CHANNEL }}
|
||||
rustup component add --toolchain ${{ env.RUST_CHANNEL }} rustfmt rust-src
|
||||
rustup component add --toolchain ${{ env.RUST_CHANNEL }} rust-src
|
||||
# We always use a nightly rustfmt, regardless of channel, because we need
|
||||
# --file-lines.
|
||||
rustup toolchain add nightly --profile minimal
|
||||
rustup component add --toolchain nightly rustfmt
|
||||
# https://github.com/actions-rust-lang/setup-rust-toolchain/blob/main/rust.json
|
||||
- name: Install Rust Problem Matcher
|
||||
if: matrix.os == 'ubuntu-latest'
|
||||
|
@ -1514,9 +1514,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ra-ap-rustc_abi"
|
||||
version = "0.94.0"
|
||||
version = "0.95.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2fa4333df7b71217edb44a36702cafd2bcfc9850677bdf78b32c9f2c98e5df40"
|
||||
checksum = "b40c4e339b71a8f075a829b1acaf32f870a11b466d9b8623d50b0ce33e65af95"
|
||||
dependencies = [
|
||||
"bitflags 2.7.0",
|
||||
"ra-ap-rustc_index",
|
||||
@ -1525,9 +1525,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ra-ap-rustc_index"
|
||||
version = "0.94.0"
|
||||
version = "0.95.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d200275ff3d952cc11108f4dc6a692473659758623d63f2bdcea6104a7f1cec8"
|
||||
checksum = "872072e2ba11d11147ebe9fde1608fe7f7d9b5c51dac524af28ee07c6dade468"
|
||||
dependencies = [
|
||||
"ra-ap-rustc_index_macros",
|
||||
"smallvec",
|
||||
@ -1535,9 +1535,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ra-ap-rustc_index_macros"
|
||||
version = "0.94.0"
|
||||
version = "0.95.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "06eb63df8c1ce2dcb07647305bed811c9c5ebd157def01a81c1b9479b8592b3b"
|
||||
checksum = "ffcd77debcaf2ad690a57c2d041c11eb33fe66869754b2c5f35c52954b46af0c"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@ -1546,9 +1546,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ra-ap-rustc_lexer"
|
||||
version = "0.94.0"
|
||||
version = "0.95.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b7a4d402b2f85650e8c1f78e2e2defc241b03948d6e30d9f5254c9b82755cc4d"
|
||||
checksum = "49265cdf8823f8d246e476c79c60bd6e5b551c81ae76e1c8d6a5e0dc73df0bca"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
"unicode-properties",
|
||||
@ -1557,9 +1557,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ra-ap-rustc_parse_format"
|
||||
version = "0.94.0"
|
||||
version = "0.95.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a23a382dbe392beb26360c1a8ce9193155ef74eeac59bcda0fa0a233e047323a"
|
||||
checksum = "b3da239fdc971176de0db45cb631d71475b52033a3d0027d91964da7be89eee6"
|
||||
dependencies = [
|
||||
"ra-ap-rustc_index",
|
||||
"ra-ap-rustc_lexer",
|
||||
@ -1567,9 +1567,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ra-ap-rustc_pattern_analysis"
|
||||
version = "0.94.0"
|
||||
version = "0.95.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d746955d67f315ab79767f1d0bbc17fee4f0970d4a00b9ad76bf09cc7d3cd17e"
|
||||
checksum = "56057d08fdfa0d95494e461bbdd5d4b3fdb349cca6be05ad7759bc964be1b8d4"
|
||||
dependencies = [
|
||||
"ra-ap-rustc_index",
|
||||
"rustc-hash 2.0.0",
|
||||
|
@ -87,11 +87,11 @@ vfs-notify = { path = "./crates/vfs-notify", version = "0.0.0" }
|
||||
vfs = { path = "./crates/vfs", version = "0.0.0" }
|
||||
edition = { path = "./crates/edition", version = "0.0.0" }
|
||||
|
||||
ra-ap-rustc_lexer = { version = "0.94", default-features = false }
|
||||
ra-ap-rustc_parse_format = { version = "0.94", default-features = false }
|
||||
ra-ap-rustc_index = { version = "0.94", default-features = false }
|
||||
ra-ap-rustc_abi = { version = "0.94", default-features = false }
|
||||
ra-ap-rustc_pattern_analysis = { version = "0.94", default-features = false }
|
||||
ra-ap-rustc_lexer = { version = "0.95", default-features = false }
|
||||
ra-ap-rustc_parse_format = { version = "0.95", default-features = false }
|
||||
ra-ap-rustc_index = { version = "0.95", default-features = false }
|
||||
ra-ap-rustc_abi = { version = "0.95", default-features = false }
|
||||
ra-ap-rustc_pattern_analysis = { version = "0.95", default-features = false }
|
||||
|
||||
# local crates that aren't published to crates.io. These should not have versions.
|
||||
|
||||
|
@ -250,6 +250,7 @@ bitflags::bitflags! {
|
||||
const RUSTC_HAS_INCOHERENT_INHERENT_IMPLS = 1 << 3;
|
||||
const SKIP_ARRAY_DURING_METHOD_DISPATCH = 1 << 4;
|
||||
const SKIP_BOXED_SLICE_DURING_METHOD_DISPATCH = 1 << 5;
|
||||
const RUSTC_PAREN_SUGAR = 1 << 6;
|
||||
}
|
||||
}
|
||||
|
||||
@ -294,6 +295,9 @@ impl TraitData {
|
||||
if attrs.by_key(&sym::rustc_has_incoherent_inherent_impls).exists() {
|
||||
flags |= TraitFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS;
|
||||
}
|
||||
if attrs.by_key(&sym::rustc_paren_sugar).exists() {
|
||||
flags |= TraitFlags::RUSTC_PAREN_SUGAR;
|
||||
}
|
||||
|
||||
let mut skip_array_during_method_dispatch =
|
||||
attrs.by_key(&sym::rustc_skip_array_during_method_dispatch).exists();
|
||||
|
@ -31,9 +31,9 @@ pub mod keys {
|
||||
|
||||
use crate::{
|
||||
dyn_map::{DynMap, Policy},
|
||||
BlockId, ConstId, EnumId, EnumVariantId, ExternCrateId, FieldId, FunctionId, ImplId,
|
||||
LifetimeParamId, Macro2Id, MacroRulesId, ProcMacroId, StaticId, StructId, TraitAliasId,
|
||||
TraitId, TypeAliasId, TypeOrConstParamId, UnionId, UseId,
|
||||
BlockId, ConstId, EnumId, EnumVariantId, ExternBlockId, ExternCrateId, FieldId, FunctionId,
|
||||
ImplId, LifetimeParamId, Macro2Id, MacroRulesId, ProcMacroId, StaticId, StructId,
|
||||
TraitAliasId, TraitId, TypeAliasId, TypeOrConstParamId, UnionId, UseId,
|
||||
};
|
||||
|
||||
pub type Key<K, V> = crate::dyn_map::Key<AstPtr<K>, V, AstPtrPolicy<K, V>>;
|
||||
@ -44,6 +44,7 @@ pub mod keys {
|
||||
pub const STATIC: Key<ast::Static, StaticId> = Key::new();
|
||||
pub const TYPE_ALIAS: Key<ast::TypeAlias, TypeAliasId> = Key::new();
|
||||
pub const IMPL: Key<ast::Impl, ImplId> = Key::new();
|
||||
pub const EXTERN_BLOCK: Key<ast::ExternBlock, ExternBlockId> = Key::new();
|
||||
pub const TRAIT: Key<ast::Trait, TraitId> = Key::new();
|
||||
pub const TRAIT_ALIAS: Key<ast::TraitAlias, TraitAliasId> = Key::new();
|
||||
pub const STRUCT: Key<ast::Struct, StructId> = Key::new();
|
||||
|
@ -112,9 +112,9 @@ pub struct ExpressionStoreSourceMap {
|
||||
// AST expressions can create patterns in destructuring assignments. Therefore, `ExprSource` can also map
|
||||
// to `PatId`, and `PatId` can also map to `ExprSource` (the other way around is unaffected).
|
||||
expr_map: FxHashMap<ExprSource, ExprOrPatId>,
|
||||
expr_map_back: ArenaMap<ExprId, ExprSource>,
|
||||
expr_map_back: ArenaMap<ExprId, ExprOrPatSource>,
|
||||
|
||||
pat_map: FxHashMap<PatSource, PatId>,
|
||||
pat_map: FxHashMap<PatSource, ExprOrPatId>,
|
||||
pat_map_back: ArenaMap<PatId, ExprOrPatSource>,
|
||||
|
||||
label_map: FxHashMap<LabelSource, LabelId>,
|
||||
@ -606,12 +606,12 @@ impl Index<TypeRefId> for ExpressionStore {
|
||||
impl ExpressionStoreSourceMap {
|
||||
pub fn expr_or_pat_syntax(&self, id: ExprOrPatId) -> Result<ExprOrPatSource, SyntheticSyntax> {
|
||||
match id {
|
||||
ExprOrPatId::ExprId(id) => self.expr_syntax(id).map(|it| it.map(AstPtr::wrap_left)),
|
||||
ExprOrPatId::ExprId(id) => self.expr_syntax(id),
|
||||
ExprOrPatId::PatId(id) => self.pat_syntax(id),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expr_syntax(&self, expr: ExprId) -> Result<ExprSource, SyntheticSyntax> {
|
||||
pub fn expr_syntax(&self, expr: ExprId) -> Result<ExprOrPatSource, SyntheticSyntax> {
|
||||
self.expr_map_back.get(expr).cloned().ok_or(SyntheticSyntax)
|
||||
}
|
||||
|
||||
@ -633,7 +633,7 @@ impl ExpressionStoreSourceMap {
|
||||
self.pat_map_back.get(pat).cloned().ok_or(SyntheticSyntax)
|
||||
}
|
||||
|
||||
pub fn node_pat(&self, node: InFile<&ast::Pat>) -> Option<PatId> {
|
||||
pub fn node_pat(&self, node: InFile<&ast::Pat>) -> Option<ExprOrPatId> {
|
||||
self.pat_map.get(&node.map(AstPtr::new)).cloned()
|
||||
}
|
||||
|
||||
|
@ -44,8 +44,8 @@ use crate::{
|
||||
FormatPlaceholder, FormatSign, FormatTrait,
|
||||
},
|
||||
Array, Binding, BindingAnnotation, BindingId, BindingProblems, CaptureBy, ClosureKind,
|
||||
Expr, ExprId, Item, Label, LabelId, Literal, LiteralOrConst, MatchArm, Movability,
|
||||
OffsetOf, Pat, PatId, RecordFieldPat, RecordLitField, Statement,
|
||||
Expr, ExprId, Item, Label, LabelId, Literal, MatchArm, Movability, OffsetOf, Pat, PatId,
|
||||
RecordFieldPat, RecordLitField, Statement,
|
||||
},
|
||||
item_scope::BuiltinShadowMode,
|
||||
lang_item::LangItem,
|
||||
@ -1784,23 +1784,33 @@ impl ExprCollector<'_> {
|
||||
self.collect_macro_call(call, macro_ptr, true, |this, expanded_pat| {
|
||||
this.collect_pat_opt(expanded_pat, binding_list)
|
||||
});
|
||||
self.source_map.pat_map.insert(src, pat);
|
||||
self.source_map.pat_map.insert(src, pat.into());
|
||||
return pat;
|
||||
}
|
||||
None => Pat::Missing,
|
||||
},
|
||||
// FIXME: implement in a way that also builds source map and calculates assoc resolutions in type inference.
|
||||
ast::Pat::RangePat(p) => {
|
||||
let mut range_part_lower = |p: Option<ast::Pat>| {
|
||||
p.and_then(|it| match &it {
|
||||
ast::Pat::LiteralPat(it) => {
|
||||
Some(Box::new(LiteralOrConst::Literal(pat_literal_to_hir(it)?.0)))
|
||||
let mut range_part_lower = |p: Option<ast::Pat>| -> Option<ExprId> {
|
||||
p.and_then(|it| {
|
||||
let ptr = PatPtr::new(&it);
|
||||
match &it {
|
||||
ast::Pat::LiteralPat(it) => Some(self.alloc_expr_from_pat(
|
||||
Expr::Literal(pat_literal_to_hir(it)?.0),
|
||||
ptr,
|
||||
)),
|
||||
ast::Pat::IdentPat(ident) if ident.is_simple_ident() => ident
|
||||
.name()
|
||||
.map(|name| name.as_name())
|
||||
.map(Path::from)
|
||||
.map(|path| self.alloc_expr_from_pat(Expr::Path(path), ptr)),
|
||||
ast::Pat::PathPat(p) => p
|
||||
.path()
|
||||
.and_then(|path| self.parse_path(path))
|
||||
.map(|parsed| self.alloc_expr_from_pat(Expr::Path(parsed), ptr)),
|
||||
// We only need to handle literal, ident (if bare) and path patterns here,
|
||||
// as any other pattern as a range pattern operand is semantically invalid.
|
||||
_ => None,
|
||||
}
|
||||
pat @ (ast::Pat::IdentPat(_) | ast::Pat::PathPat(_)) => {
|
||||
let subpat = self.collect_pat(pat.clone(), binding_list);
|
||||
Some(Box::new(LiteralOrConst::Const(subpat)))
|
||||
}
|
||||
_ => None,
|
||||
})
|
||||
};
|
||||
let start = range_part_lower(p.start());
|
||||
@ -1863,7 +1873,7 @@ impl ExprCollector<'_> {
|
||||
}
|
||||
});
|
||||
if let Some(pat) = pat.left() {
|
||||
self.source_map.pat_map.insert(src, pat);
|
||||
self.source_map.pat_map.insert(src, pat.into());
|
||||
}
|
||||
pat
|
||||
}
|
||||
@ -2490,7 +2500,7 @@ impl ExprCollector<'_> {
|
||||
fn alloc_expr(&mut self, expr: Expr, ptr: ExprPtr) -> ExprId {
|
||||
let src = self.expander.in_file(ptr);
|
||||
let id = self.store.exprs.alloc(expr);
|
||||
self.source_map.expr_map_back.insert(id, src);
|
||||
self.source_map.expr_map_back.insert(id, src.map(AstPtr::wrap_left));
|
||||
self.source_map.expr_map.insert(src, id.into());
|
||||
id
|
||||
}
|
||||
@ -2502,7 +2512,7 @@ impl ExprCollector<'_> {
|
||||
fn alloc_expr_desugared_with_ptr(&mut self, expr: Expr, ptr: ExprPtr) -> ExprId {
|
||||
let src = self.expander.in_file(ptr);
|
||||
let id = self.store.exprs.alloc(expr);
|
||||
self.source_map.expr_map_back.insert(id, src);
|
||||
self.source_map.expr_map_back.insert(id, src.map(AstPtr::wrap_left));
|
||||
// We intentionally don't fill this as it could overwrite a non-desugared entry
|
||||
// self.source_map.expr_map.insert(src, id);
|
||||
id
|
||||
@ -2526,11 +2536,20 @@ impl ExprCollector<'_> {
|
||||
self.source_map.pat_map_back.insert(id, src.map(AstPtr::wrap_left));
|
||||
id
|
||||
}
|
||||
|
||||
fn alloc_expr_from_pat(&mut self, expr: Expr, ptr: PatPtr) -> ExprId {
|
||||
let src = self.expander.in_file(ptr);
|
||||
let id = self.store.exprs.alloc(expr);
|
||||
self.source_map.pat_map.insert(src, id.into());
|
||||
self.source_map.expr_map_back.insert(id, src.map(AstPtr::wrap_right));
|
||||
id
|
||||
}
|
||||
|
||||
fn alloc_pat(&mut self, pat: Pat, ptr: PatPtr) -> PatId {
|
||||
let src = self.expander.in_file(ptr);
|
||||
let id = self.store.pats.alloc(pat);
|
||||
self.source_map.pat_map_back.insert(id, src.map(AstPtr::wrap_right));
|
||||
self.source_map.pat_map.insert(src, id);
|
||||
self.source_map.pat_map.insert(src, id.into());
|
||||
id
|
||||
}
|
||||
// FIXME: desugared pats don't have ptr, that's wrong and should be fixed somehow.
|
||||
|
@ -6,10 +6,7 @@ use itertools::Itertools;
|
||||
use span::Edition;
|
||||
|
||||
use crate::{
|
||||
hir::{
|
||||
Array, BindingAnnotation, CaptureBy, ClosureKind, Literal, LiteralOrConst, Movability,
|
||||
Statement,
|
||||
},
|
||||
hir::{Array, BindingAnnotation, CaptureBy, ClosureKind, Literal, Movability, Statement},
|
||||
pretty::{print_generic_args, print_path, print_type_ref},
|
||||
};
|
||||
|
||||
@ -656,11 +653,11 @@ impl Printer<'_> {
|
||||
}
|
||||
Pat::Range { start, end } => {
|
||||
if let Some(start) = start {
|
||||
self.print_literal_or_const(start);
|
||||
self.print_expr(*start);
|
||||
}
|
||||
w!(self, "..=");
|
||||
if let Some(end) = end {
|
||||
self.print_literal_or_const(end);
|
||||
self.print_expr(*end);
|
||||
}
|
||||
}
|
||||
Pat::Slice { prefix, slice, suffix } => {
|
||||
@ -757,13 +754,6 @@ impl Printer<'_> {
|
||||
}
|
||||
}
|
||||
|
||||
fn print_literal_or_const(&mut self, literal_or_const: &LiteralOrConst) {
|
||||
match literal_or_const {
|
||||
LiteralOrConst::Literal(l) => self.print_literal(l),
|
||||
LiteralOrConst::Const(c) => self.print_pat(*c),
|
||||
}
|
||||
}
|
||||
|
||||
fn print_literal(&mut self, literal: &Literal) {
|
||||
match literal {
|
||||
Literal::String(it) => w!(self, "{:?}", it),
|
||||
|
@ -1,11 +1,10 @@
|
||||
mod block;
|
||||
|
||||
use crate::{hir::MatchArm, test_db::TestDB, ModuleDefId};
|
||||
use expect_test::{expect, Expect};
|
||||
use la_arena::RawIdx;
|
||||
use test_fixture::WithFixture;
|
||||
|
||||
use crate::{test_db::TestDB, ModuleDefId};
|
||||
|
||||
use super::*;
|
||||
|
||||
fn lower(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> (TestDB, Arc<Body>, DefWithBodyId) {
|
||||
@ -460,3 +459,45 @@ async fn foo(a: (), b: i32) -> u32 {
|
||||
expect!["fn foo(<28>: (), <20>: i32) -> impl ::core::future::Future::<Output = u32> <20>"]
|
||||
.assert_eq(&printed);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn range_bounds_are_hir_exprs() {
|
||||
let (_, body, _) = lower(
|
||||
r#"
|
||||
pub const L: i32 = 6;
|
||||
mod x {
|
||||
pub const R: i32 = 100;
|
||||
}
|
||||
const fn f(x: i32) -> i32 {
|
||||
match x {
|
||||
-1..=5 => x * 10,
|
||||
L..=x::R => x * 100,
|
||||
_ => x,
|
||||
}
|
||||
}"#,
|
||||
);
|
||||
|
||||
let mtch_arms = body
|
||||
.exprs
|
||||
.iter()
|
||||
.find_map(|(_, expr)| {
|
||||
if let Expr::Match { arms, .. } = expr {
|
||||
return Some(arms);
|
||||
}
|
||||
|
||||
None
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
let MatchArm { pat, .. } = mtch_arms[1];
|
||||
match body.pats[pat] {
|
||||
Pat::Range { start, end } => {
|
||||
let hir_start = &body.exprs[start.unwrap()];
|
||||
let hir_end = &body.exprs[end.unwrap()];
|
||||
|
||||
assert!(matches!(hir_start, Expr::Path { .. }));
|
||||
assert!(matches!(hir_end, Expr::Path { .. }));
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
@ -55,12 +55,20 @@ impl ExprOrPatId {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_expr(&self) -> bool {
|
||||
matches!(self, Self::ExprId(_))
|
||||
}
|
||||
|
||||
pub fn as_pat(self) -> Option<PatId> {
|
||||
match self {
|
||||
Self::PatId(v) => Some(v),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_pat(&self) -> bool {
|
||||
matches!(self, Self::PatId(_))
|
||||
}
|
||||
}
|
||||
stdx::impl_from!(ExprId, PatId for ExprOrPatId);
|
||||
|
||||
@ -571,8 +579,8 @@ pub enum Pat {
|
||||
ellipsis: bool,
|
||||
},
|
||||
Range {
|
||||
start: Option<Box<LiteralOrConst>>,
|
||||
end: Option<Box<LiteralOrConst>>,
|
||||
start: Option<ExprId>,
|
||||
end: Option<ExprId>,
|
||||
},
|
||||
Slice {
|
||||
prefix: Box<[PatId]>,
|
||||
|
@ -320,7 +320,7 @@ impl SearchMode {
|
||||
};
|
||||
match m {
|
||||
Some((index, _)) => {
|
||||
name = &name[index + 1..];
|
||||
name = name[index..].strip_prefix(|_: char| true).unwrap_or_default();
|
||||
true
|
||||
}
|
||||
None => false,
|
||||
@ -1039,4 +1039,22 @@ pub mod fmt {
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unicode_fn_name() {
|
||||
let ra_fixture = r#"
|
||||
//- /main.rs crate:main deps:dep
|
||||
//- /dep.rs crate:dep
|
||||
pub fn あい() {}
|
||||
"#;
|
||||
|
||||
check_search(
|
||||
ra_fixture,
|
||||
"main",
|
||||
Query::new("あ".to_owned()).fuzzy(),
|
||||
expect![[r#"
|
||||
dep::あい (f)
|
||||
"#]],
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -18,8 +18,8 @@ use crate::{
|
||||
db::DefDatabase,
|
||||
per_ns::{Item, MacrosItem, PerNs, TypesItem, ValuesItem},
|
||||
visibility::{Visibility, VisibilityExplicitness},
|
||||
AdtId, BuiltinType, ConstId, ExternCrateId, FxIndexMap, HasModule, ImplId, LocalModuleId,
|
||||
Lookup, MacroId, ModuleDefId, ModuleId, TraitId, UseId,
|
||||
AdtId, BuiltinType, ConstId, ExternBlockId, ExternCrateId, FxIndexMap, HasModule, ImplId,
|
||||
LocalModuleId, Lookup, MacroId, ModuleDefId, ModuleId, TraitId, UseId,
|
||||
};
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
@ -158,6 +158,8 @@ pub struct ItemScope {
|
||||
declarations: Vec<ModuleDefId>,
|
||||
|
||||
impls: Vec<ImplId>,
|
||||
#[allow(clippy::box_collection)]
|
||||
extern_blocks: Option<Box<Vec<ExternBlockId>>>,
|
||||
unnamed_consts: Vec<ConstId>,
|
||||
/// Traits imported via `use Trait as _;`.
|
||||
unnamed_trait_imports: FxHashMap<TraitId, Item<()>>,
|
||||
@ -319,6 +321,10 @@ impl ItemScope {
|
||||
self.extern_crate_decls.iter().copied()
|
||||
}
|
||||
|
||||
pub fn extern_blocks(&self) -> impl Iterator<Item = ExternBlockId> + '_ {
|
||||
self.extern_blocks.iter().flat_map(|it| it.iter()).copied()
|
||||
}
|
||||
|
||||
pub fn use_decls(&self) -> impl ExactSizeIterator<Item = UseId> + '_ {
|
||||
self.use_decls.iter().copied()
|
||||
}
|
||||
@ -469,6 +475,10 @@ impl ItemScope {
|
||||
self.impls.push(imp);
|
||||
}
|
||||
|
||||
pub(crate) fn define_extern_block(&mut self, extern_block: ExternBlockId) {
|
||||
self.extern_blocks.get_or_insert_default().push(extern_block);
|
||||
}
|
||||
|
||||
pub(crate) fn define_extern_crate_decl(&mut self, extern_crate: ExternCrateId) {
|
||||
self.extern_crate_decls.push(extern_crate);
|
||||
}
|
||||
@ -806,7 +816,11 @@ impl ItemScope {
|
||||
use_imports_types,
|
||||
use_imports_macros,
|
||||
macro_invocations,
|
||||
extern_blocks,
|
||||
} = self;
|
||||
if let Some(it) = extern_blocks {
|
||||
it.shrink_to_fit();
|
||||
}
|
||||
types.shrink_to_fit();
|
||||
values.shrink_to_fit();
|
||||
macros.shrink_to_fit();
|
||||
|
@ -5,7 +5,7 @@
|
||||
//! in-memory macros.
|
||||
use expect_test::expect;
|
||||
|
||||
use crate::macro_expansion_tests::check;
|
||||
use crate::macro_expansion_tests::{check, check_errors};
|
||||
|
||||
#[test]
|
||||
fn attribute_macro_attr_censoring() {
|
||||
@ -216,3 +216,21 @@ struct S;
|
||||
#[doc = "doc attr"] struct S;"##]],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cfg_evaluated_before_attr_macros() {
|
||||
check_errors(
|
||||
r#"
|
||||
//- proc_macros: disallow_cfg
|
||||
|
||||
use proc_macros::disallow_cfg;
|
||||
|
||||
#[disallow_cfg] #[cfg(false)] fn foo() {}
|
||||
// True cfg are kept.
|
||||
// #[disallow_cfg] #[cfg(true)] fn bar() {}
|
||||
#[disallow_cfg] #[cfg_attr(false, inline)] fn baz() {}
|
||||
#[disallow_cfg] #[cfg_attr(true, inline)] fn qux() {}
|
||||
"#,
|
||||
expect![[r#""#]],
|
||||
);
|
||||
}
|
||||
|
@ -1759,16 +1759,20 @@ impl ModCollector<'_, '_> {
|
||||
);
|
||||
}
|
||||
}
|
||||
ModItem::ExternBlock(block) => self.collect(
|
||||
&self.item_tree[block].children,
|
||||
ItemContainerId::ExternBlockId(
|
||||
ExternBlockLoc {
|
||||
container: module,
|
||||
id: ItemTreeId::new(self.tree_id, block),
|
||||
}
|
||||
.intern(db),
|
||||
),
|
||||
),
|
||||
ModItem::ExternBlock(block) => {
|
||||
let extern_block_id = ExternBlockLoc {
|
||||
container: module,
|
||||
id: ItemTreeId::new(self.tree_id, block),
|
||||
}
|
||||
.intern(db);
|
||||
self.def_collector.def_map.modules[self.module_id]
|
||||
.scope
|
||||
.define_extern_block(extern_block_id);
|
||||
self.collect(
|
||||
&self.item_tree[block].children,
|
||||
ItemContainerId::ExternBlockId(extern_block_id),
|
||||
)
|
||||
}
|
||||
ModItem::MacroCall(mac) => self.collect_macro_call(&self.item_tree[mac], container),
|
||||
ModItem::MacroRules(id) => self.collect_macro_rules(id, module),
|
||||
ModItem::Macro2(id) => self.collect_macro_def(id, module),
|
||||
|
@ -173,10 +173,7 @@ impl Path {
|
||||
segments: path.mod_path().segments(),
|
||||
generic_args: Some(path.generic_args()),
|
||||
},
|
||||
Path::LangItem(_, seg) => PathSegments {
|
||||
segments: seg.as_ref().map_or(&[], |seg| std::slice::from_ref(seg)),
|
||||
generic_args: None,
|
||||
},
|
||||
Path::LangItem(_, seg) => PathSegments { segments: seg.as_slice(), generic_args: None },
|
||||
}
|
||||
}
|
||||
|
||||
@ -240,6 +237,11 @@ pub struct PathSegment<'a> {
|
||||
pub args_and_bindings: Option<&'a GenericArgs>,
|
||||
}
|
||||
|
||||
impl PathSegment<'_> {
|
||||
pub const MISSING: PathSegment<'static> =
|
||||
PathSegment { name: &Name::missing(), args_and_bindings: None };
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct PathSegments<'a> {
|
||||
segments: &'a [Name],
|
||||
|
@ -327,8 +327,9 @@ impl Resolver {
|
||||
| LangItemTarget::ImplDef(_)
|
||||
| LangItemTarget::Static(_) => return None,
|
||||
};
|
||||
// Remaining segments start from 0 because lang paths have no segments other than the remaining.
|
||||
return Some((
|
||||
ResolveValueResult::Partial(type_ns, 1, None),
|
||||
ResolveValueResult::Partial(type_ns, 0, None),
|
||||
ResolvePathResultPrefixInfo::default(),
|
||||
));
|
||||
}
|
||||
|
@ -201,9 +201,6 @@ pub(crate) fn process_cfg_attrs(
|
||||
MacroDefKind::BuiltInAttr(_, expander) => expander.is_derive(),
|
||||
_ => false,
|
||||
};
|
||||
if !is_derive {
|
||||
return None;
|
||||
}
|
||||
let mut remove = FxHashSet::default();
|
||||
|
||||
let item = ast::Item::cast(node.clone())?;
|
||||
@ -220,28 +217,43 @@ pub(crate) fn process_cfg_attrs(
|
||||
}
|
||||
}
|
||||
}
|
||||
match item {
|
||||
ast::Item::Struct(it) => match it.field_list()? {
|
||||
ast::FieldList::RecordFieldList(fields) => {
|
||||
process_has_attrs_with_possible_comma(db, fields.fields(), loc.krate, &mut remove)?;
|
||||
|
||||
if is_derive {
|
||||
// Only derives get their code cfg-clean, normal attribute macros process only the cfg at their level
|
||||
// (cfg_attr is handled above, cfg is handled in the def map).
|
||||
match item {
|
||||
ast::Item::Struct(it) => match it.field_list()? {
|
||||
ast::FieldList::RecordFieldList(fields) => {
|
||||
process_has_attrs_with_possible_comma(
|
||||
db,
|
||||
fields.fields(),
|
||||
loc.krate,
|
||||
&mut remove,
|
||||
)?;
|
||||
}
|
||||
ast::FieldList::TupleFieldList(fields) => {
|
||||
process_has_attrs_with_possible_comma(
|
||||
db,
|
||||
fields.fields(),
|
||||
loc.krate,
|
||||
&mut remove,
|
||||
)?;
|
||||
}
|
||||
},
|
||||
ast::Item::Enum(it) => {
|
||||
process_enum(db, it.variant_list()?, loc.krate, &mut remove)?;
|
||||
}
|
||||
ast::FieldList::TupleFieldList(fields) => {
|
||||
process_has_attrs_with_possible_comma(db, fields.fields(), loc.krate, &mut remove)?;
|
||||
ast::Item::Union(it) => {
|
||||
process_has_attrs_with_possible_comma(
|
||||
db,
|
||||
it.record_field_list()?.fields(),
|
||||
loc.krate,
|
||||
&mut remove,
|
||||
)?;
|
||||
}
|
||||
},
|
||||
ast::Item::Enum(it) => {
|
||||
process_enum(db, it.variant_list()?, loc.krate, &mut remove)?;
|
||||
// FIXME: Implement for other items if necessary. As we do not support #[cfg_eval] yet, we do not need to implement it for now
|
||||
_ => {}
|
||||
}
|
||||
ast::Item::Union(it) => {
|
||||
process_has_attrs_with_possible_comma(
|
||||
db,
|
||||
it.record_field_list()?.fields(),
|
||||
loc.krate,
|
||||
&mut remove,
|
||||
)?;
|
||||
}
|
||||
// FIXME: Implement for other items if necessary. As we do not support #[cfg_eval] yet, we do not need to implement it for now
|
||||
_ => {}
|
||||
}
|
||||
Some(remove)
|
||||
}
|
||||
|
@ -142,8 +142,8 @@ impl Name {
|
||||
/// Ideally, we want a `gensym` semantics for missing names -- each missing
|
||||
/// name is equal only to itself. It's not clear how to implement this in
|
||||
/// salsa though, so we punt on that bit for a moment.
|
||||
pub fn missing() -> Name {
|
||||
Name { symbol: sym::MISSING_NAME.clone(), ctx: () }
|
||||
pub const fn missing() -> Name {
|
||||
Name { symbol: sym::consts::MISSING_NAME, ctx: () }
|
||||
}
|
||||
|
||||
/// Returns true if this is a fake name for things missing in the source code. See
|
||||
|
@ -768,23 +768,21 @@ pub(crate) fn adt_datum_query(
|
||||
phantom_data,
|
||||
};
|
||||
|
||||
// this slows down rust-analyzer by quite a bit unfortunately, so enabling this is currently not worth it
|
||||
let _variant_id_to_fields = |id: VariantId| {
|
||||
let variant_id_to_fields = |id: VariantId| {
|
||||
let variant_data = &id.variant_data(db.upcast());
|
||||
let fields = if variant_data.fields().is_empty() {
|
||||
let fields = if variant_data.fields().is_empty() || bound_vars_subst.is_empty(Interner) {
|
||||
vec![]
|
||||
} else {
|
||||
let field_types = db.field_types(id);
|
||||
variant_data
|
||||
.fields()
|
||||
.iter()
|
||||
.map(|(idx, _)| field_types[idx].clone().substitute(Interner, &bound_vars_subst))
|
||||
.filter(|it| !it.contains_unknown())
|
||||
.collect()
|
||||
// HACK: provide full struct type info slows down rust-analyzer by quite a bit unfortunately,
|
||||
// so we trick chalk into thinking that our struct impl Unsize
|
||||
if let Some(ty) = bound_vars_subst.at(Interner, 0).ty(Interner) {
|
||||
vec![ty.clone()]
|
||||
} else {
|
||||
vec![]
|
||||
}
|
||||
};
|
||||
rust_ir::AdtVariantDatum { fields }
|
||||
};
|
||||
let variant_id_to_fields = |_: VariantId| rust_ir::AdtVariantDatum { fields: vec![] };
|
||||
|
||||
let (kind, variants) = match adt_id {
|
||||
hir_def::AdtId::StructId(id) => {
|
||||
|
@ -440,7 +440,9 @@ impl ExprValidator {
|
||||
return;
|
||||
};
|
||||
let root = source_ptr.file_syntax(db.upcast());
|
||||
let ast::Expr::IfExpr(if_expr) = source_ptr.value.to_node(&root) else {
|
||||
let either::Left(ast::Expr::IfExpr(if_expr)) =
|
||||
source_ptr.value.to_node(&root)
|
||||
else {
|
||||
return;
|
||||
};
|
||||
let mut top_if_expr = if_expr;
|
||||
|
@ -16,7 +16,7 @@
|
||||
pub(crate) mod cast;
|
||||
pub(crate) mod closure;
|
||||
mod coerce;
|
||||
mod diagnostics;
|
||||
pub(crate) mod diagnostics;
|
||||
mod expr;
|
||||
mod mutability;
|
||||
mod pat;
|
||||
@ -236,7 +236,7 @@ pub enum InferenceDiagnostic {
|
||||
name: Name,
|
||||
/// Contains the type the field resolves to
|
||||
field_with_same_name: Option<Ty>,
|
||||
assoc_func_with_same_name: Option<AssocItemId>,
|
||||
assoc_func_with_same_name: Option<FunctionId>,
|
||||
},
|
||||
UnresolvedAssocItem {
|
||||
id: ExprOrPatId,
|
||||
@ -1239,7 +1239,29 @@ impl<'a> InferenceContext<'a> {
|
||||
}
|
||||
|
||||
fn write_expr_adj(&mut self, expr: ExprId, adjustments: Vec<Adjustment>) {
|
||||
self.result.expr_adjustments.insert(expr, adjustments);
|
||||
if adjustments.is_empty() {
|
||||
return;
|
||||
}
|
||||
match self.result.expr_adjustments.entry(expr) {
|
||||
std::collections::hash_map::Entry::Occupied(mut entry) => {
|
||||
match (&mut entry.get_mut()[..], &adjustments[..]) {
|
||||
(
|
||||
[Adjustment { kind: Adjust::NeverToAny, target }],
|
||||
[.., Adjustment { target: new_target, .. }],
|
||||
) => {
|
||||
// NeverToAny coercion can target any type, so instead of adding a new
|
||||
// adjustment on top we can change the target.
|
||||
*target = new_target.clone();
|
||||
}
|
||||
_ => {
|
||||
*entry.get_mut() = adjustments;
|
||||
}
|
||||
}
|
||||
}
|
||||
std::collections::hash_map::Entry::Vacant(entry) => {
|
||||
entry.insert(adjustments);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn write_method_resolution(&mut self, expr: ExprId, func: FunctionId, subst: Substitution) {
|
||||
@ -1480,21 +1502,22 @@ impl<'a> InferenceContext<'a> {
|
||||
&self.diagnostics,
|
||||
InferenceTyDiagnosticSource::Body,
|
||||
);
|
||||
let mut path_ctx = ctx.at_path(path, node);
|
||||
let (resolution, unresolved) = if value_ns {
|
||||
let Some(res) = ctx.resolve_path_in_value_ns(path, node, HygieneId::ROOT) else {
|
||||
let Some(res) = path_ctx.resolve_path_in_value_ns(HygieneId::ROOT) else {
|
||||
return (self.err_ty(), None);
|
||||
};
|
||||
match res {
|
||||
ResolveValueResult::ValueNs(value, _) => match value {
|
||||
ValueNs::EnumVariantId(var) => {
|
||||
let substs = ctx.substs_from_path(path, var.into(), true);
|
||||
let substs = path_ctx.substs_from_path(var.into(), true);
|
||||
drop(ctx);
|
||||
let ty = self.db.ty(var.lookup(self.db.upcast()).parent.into());
|
||||
let ty = self.insert_type_vars(ty.substitute(Interner, &substs));
|
||||
return (ty, Some(var.into()));
|
||||
}
|
||||
ValueNs::StructId(strukt) => {
|
||||
let substs = ctx.substs_from_path(path, strukt.into(), true);
|
||||
let substs = path_ctx.substs_from_path(strukt.into(), true);
|
||||
drop(ctx);
|
||||
let ty = self.db.ty(strukt.into());
|
||||
let ty = self.insert_type_vars(ty.substitute(Interner, &substs));
|
||||
@ -1509,7 +1532,7 @@ impl<'a> InferenceContext<'a> {
|
||||
ResolveValueResult::Partial(typens, unresolved, _) => (typens, Some(unresolved)),
|
||||
}
|
||||
} else {
|
||||
match ctx.resolve_path_in_type_ns(path, node) {
|
||||
match path_ctx.resolve_path_in_type_ns() {
|
||||
Some((it, idx)) => (it, idx),
|
||||
None => return (self.err_ty(), None),
|
||||
}
|
||||
@ -1520,21 +1543,21 @@ impl<'a> InferenceContext<'a> {
|
||||
};
|
||||
return match resolution {
|
||||
TypeNs::AdtId(AdtId::StructId(strukt)) => {
|
||||
let substs = ctx.substs_from_path(path, strukt.into(), true);
|
||||
let substs = path_ctx.substs_from_path(strukt.into(), true);
|
||||
drop(ctx);
|
||||
let ty = self.db.ty(strukt.into());
|
||||
let ty = self.insert_type_vars(ty.substitute(Interner, &substs));
|
||||
forbid_unresolved_segments((ty, Some(strukt.into())), unresolved)
|
||||
}
|
||||
TypeNs::AdtId(AdtId::UnionId(u)) => {
|
||||
let substs = ctx.substs_from_path(path, u.into(), true);
|
||||
let substs = path_ctx.substs_from_path(u.into(), true);
|
||||
drop(ctx);
|
||||
let ty = self.db.ty(u.into());
|
||||
let ty = self.insert_type_vars(ty.substitute(Interner, &substs));
|
||||
forbid_unresolved_segments((ty, Some(u.into())), unresolved)
|
||||
}
|
||||
TypeNs::EnumVariantId(var) => {
|
||||
let substs = ctx.substs_from_path(path, var.into(), true);
|
||||
let substs = path_ctx.substs_from_path(var.into(), true);
|
||||
drop(ctx);
|
||||
let ty = self.db.ty(var.lookup(self.db.upcast()).parent.into());
|
||||
let ty = self.insert_type_vars(ty.substitute(Interner, &substs));
|
||||
@ -1545,31 +1568,32 @@ impl<'a> InferenceContext<'a> {
|
||||
let substs = generics.placeholder_subst(self.db);
|
||||
let mut ty = self.db.impl_self_ty(impl_id).substitute(Interner, &substs);
|
||||
|
||||
let Some(mut remaining_idx) = unresolved else {
|
||||
let Some(remaining_idx) = unresolved else {
|
||||
drop(ctx);
|
||||
return self.resolve_variant_on_alias(ty, None, mod_path);
|
||||
};
|
||||
|
||||
let mut remaining_segments = path.segments().skip(remaining_idx);
|
||||
|
||||
if remaining_segments.len() >= 2 {
|
||||
path_ctx.ignore_last_segment();
|
||||
}
|
||||
|
||||
// We need to try resolving unresolved segments one by one because each may resolve
|
||||
// to a projection, which `TyLoweringContext` cannot handle on its own.
|
||||
let mut tried_resolving_once = false;
|
||||
while !remaining_segments.is_empty() {
|
||||
let resolved_segment = path.segments().get(remaining_idx - 1).unwrap();
|
||||
let current_segment = remaining_segments.take(1);
|
||||
|
||||
while let Some(current_segment) = remaining_segments.first() {
|
||||
// If we can resolve to an enum variant, it takes priority over associated type
|
||||
// of the same name.
|
||||
if let Some((AdtId::EnumId(id), _)) = ty.as_adt() {
|
||||
let enum_data = self.db.enum_data(id);
|
||||
let name = current_segment.first().unwrap().name;
|
||||
if let Some(variant) = enum_data.variant(name) {
|
||||
if let Some(variant) = enum_data.variant(current_segment.name) {
|
||||
return if remaining_segments.len() == 1 {
|
||||
(ty, Some(variant.into()))
|
||||
} else {
|
||||
// We still have unresolved paths, but enum variants never have
|
||||
// associated types!
|
||||
// FIXME: Report an error.
|
||||
(self.err_ty(), None)
|
||||
};
|
||||
}
|
||||
@ -1578,23 +1602,13 @@ impl<'a> InferenceContext<'a> {
|
||||
if tried_resolving_once {
|
||||
// FIXME: with `inherent_associated_types` this is allowed, but our `lower_partly_resolved_path()`
|
||||
// will need to be updated to err at the correct segment.
|
||||
//
|
||||
// We need to stop here because otherwise the segment index passed to `lower_partly_resolved_path()`
|
||||
// will be incorrect, and that can mess up error reporting.
|
||||
break;
|
||||
}
|
||||
|
||||
// `lower_partly_resolved_path()` returns `None` as type namespace unless
|
||||
// `remaining_segments` is empty, which is never the case here. We don't know
|
||||
// which namespace the new `ty` is in until normalized anyway.
|
||||
(ty, _) = ctx.lower_partly_resolved_path(
|
||||
node,
|
||||
resolution,
|
||||
resolved_segment,
|
||||
current_segment,
|
||||
(remaining_idx - 1) as u32,
|
||||
false,
|
||||
);
|
||||
(ty, _) = path_ctx.lower_partly_resolved_path(resolution, false);
|
||||
tried_resolving_once = true;
|
||||
|
||||
ty = self.table.insert_type_vars(ty);
|
||||
@ -1604,8 +1618,6 @@ impl<'a> InferenceContext<'a> {
|
||||
return (self.err_ty(), None);
|
||||
}
|
||||
|
||||
// FIXME(inherent_associated_types): update `resolution` based on `ty` here.
|
||||
remaining_idx += 1;
|
||||
remaining_segments = remaining_segments.skip(1);
|
||||
}
|
||||
drop(ctx);
|
||||
@ -1621,12 +1633,7 @@ impl<'a> InferenceContext<'a> {
|
||||
(ty, variant)
|
||||
}
|
||||
TypeNs::TypeAliasId(it) => {
|
||||
let resolved_seg = match unresolved {
|
||||
None => path.segments().last().unwrap(),
|
||||
Some(n) => path.segments().get(path.segments().len() - n - 1).unwrap(),
|
||||
};
|
||||
let substs =
|
||||
ctx.substs_from_path_segment(resolved_seg, Some(it.into()), true, None);
|
||||
let substs = path_ctx.substs_from_path_segment(it.into(), true, None);
|
||||
drop(ctx);
|
||||
let ty = self.db.ty(it.into());
|
||||
let ty = self.insert_type_vars(ty.substitute(Interner, &substs));
|
||||
|
@ -163,10 +163,27 @@ impl CoerceMany {
|
||||
// type is a type variable and the new one is `!`, trying it the other
|
||||
// way around first would mean we make the type variable `!`, instead of
|
||||
// just marking it as possibly diverging.
|
||||
if let Ok(res) = ctx.coerce(expr, &expr_ty, &self.merged_ty(), CoerceNever::Yes) {
|
||||
self.final_ty = Some(res);
|
||||
} else if let Ok(res) = ctx.coerce(expr, &self.merged_ty(), &expr_ty, CoerceNever::Yes) {
|
||||
//
|
||||
// - [Comment from rustc](https://github.com/rust-lang/rust/blob/5ff18d0eaefd1bd9ab8ec33dab2404a44e7631ed/compiler/rustc_hir_typeck/src/coercion.rs#L1334-L1335)
|
||||
// First try to coerce the new expression to the type of the previous ones,
|
||||
// but only if the new expression has no coercion already applied to it.
|
||||
if expr.is_none_or(|expr| !ctx.result.expr_adjustments.contains_key(&expr)) {
|
||||
if let Ok(res) = ctx.coerce(expr, &expr_ty, &self.merged_ty(), CoerceNever::Yes) {
|
||||
self.final_ty = Some(res);
|
||||
if let Some(expr) = expr {
|
||||
self.expressions.push(expr);
|
||||
}
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if let Ok((adjustments, res)) =
|
||||
ctx.coerce_inner(&self.merged_ty(), &expr_ty, CoerceNever::Yes)
|
||||
{
|
||||
self.final_ty = Some(res);
|
||||
for &e in &self.expressions {
|
||||
ctx.write_expr_adj(e, adjustments.clone());
|
||||
}
|
||||
} else {
|
||||
match cause {
|
||||
CoercionCause::Expr(id) => {
|
||||
@ -244,14 +261,23 @@ impl InferenceContext<'_> {
|
||||
// between places and values.
|
||||
coerce_never: CoerceNever,
|
||||
) -> Result<Ty, TypeError> {
|
||||
let from_ty = self.resolve_ty_shallow(from_ty);
|
||||
let to_ty = self.resolve_ty_shallow(to_ty);
|
||||
let (adjustments, ty) = self.table.coerce(&from_ty, &to_ty, coerce_never)?;
|
||||
let (adjustments, ty) = self.coerce_inner(from_ty, to_ty, coerce_never)?;
|
||||
if let Some(expr) = expr {
|
||||
self.write_expr_adj(expr, adjustments);
|
||||
}
|
||||
Ok(ty)
|
||||
}
|
||||
|
||||
fn coerce_inner(
|
||||
&mut self,
|
||||
from_ty: &Ty,
|
||||
to_ty: &Ty,
|
||||
coerce_never: CoerceNever,
|
||||
) -> Result<(Vec<Adjustment>, Ty), TypeError> {
|
||||
let from_ty = self.resolve_ty_shallow(from_ty);
|
||||
let to_ty = self.resolve_ty_shallow(to_ty);
|
||||
self.table.coerce(&from_ty, &to_ty, coerce_never)
|
||||
}
|
||||
}
|
||||
|
||||
impl InferenceTable<'_> {
|
||||
|
@ -5,16 +5,14 @@
|
||||
use std::cell::RefCell;
|
||||
use std::ops::{Deref, DerefMut};
|
||||
|
||||
use hir_def::expr_store::HygieneId;
|
||||
use hir_def::hir::ExprOrPatId;
|
||||
use hir_def::path::{Path, PathSegment, PathSegments};
|
||||
use hir_def::resolver::{ResolveValueResult, Resolver, TypeNs};
|
||||
use hir_def::type_ref::TypesMap;
|
||||
use hir_def::TypeOwnerId;
|
||||
use either::Either;
|
||||
use hir_def::{hir::ExprOrPatId, path::Path, resolver::Resolver, type_ref::TypesMap, TypeOwnerId};
|
||||
use la_arena::{Idx, RawIdx};
|
||||
|
||||
use crate::db::HirDatabase;
|
||||
use crate::{
|
||||
InferenceDiagnostic, InferenceTyDiagnosticSource, Ty, TyLoweringContext, TyLoweringDiagnostic,
|
||||
db::HirDatabase,
|
||||
lower::path::{PathDiagnosticCallback, PathLoweringContext},
|
||||
InferenceDiagnostic, InferenceTyDiagnosticSource, TyLoweringContext, TyLoweringDiagnostic,
|
||||
};
|
||||
|
||||
// Unfortunately, this struct needs to use interior mutability (but we encapsulate it)
|
||||
@ -44,6 +42,11 @@ impl Diagnostics {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct PathDiagnosticCallbackData<'a> {
|
||||
node: ExprOrPatId,
|
||||
diagnostics: &'a Diagnostics,
|
||||
}
|
||||
|
||||
pub(super) struct InferenceTyLoweringContext<'a> {
|
||||
ctx: TyLoweringContext<'a>,
|
||||
diagnostics: &'a Diagnostics,
|
||||
@ -51,6 +54,7 @@ pub(super) struct InferenceTyLoweringContext<'a> {
|
||||
}
|
||||
|
||||
impl<'a> InferenceTyLoweringContext<'a> {
|
||||
#[inline]
|
||||
pub(super) fn new(
|
||||
db: &'a dyn HirDatabase,
|
||||
resolver: &'a Resolver,
|
||||
@ -62,65 +66,62 @@ impl<'a> InferenceTyLoweringContext<'a> {
|
||||
Self { ctx: TyLoweringContext::new(db, resolver, types_map, owner), diagnostics, source }
|
||||
}
|
||||
|
||||
pub(super) fn resolve_path_in_type_ns(
|
||||
&mut self,
|
||||
path: &Path,
|
||||
#[inline]
|
||||
pub(super) fn at_path<'b>(
|
||||
&'b mut self,
|
||||
path: &'b Path,
|
||||
node: ExprOrPatId,
|
||||
) -> Option<(TypeNs, Option<usize>)> {
|
||||
let diagnostics = self.diagnostics;
|
||||
self.ctx.resolve_path_in_type_ns(path, &mut |_, diag| {
|
||||
diagnostics.push(InferenceDiagnostic::PathDiagnostic { node, diag })
|
||||
})
|
||||
) -> PathLoweringContext<'b, 'a> {
|
||||
let on_diagnostic = PathDiagnosticCallback {
|
||||
data: Either::Right(PathDiagnosticCallbackData { diagnostics: self.diagnostics, node }),
|
||||
callback: |data, _, diag| {
|
||||
let data = data.as_ref().right().unwrap();
|
||||
data.diagnostics
|
||||
.push(InferenceDiagnostic::PathDiagnostic { node: data.node, diag });
|
||||
},
|
||||
};
|
||||
PathLoweringContext::new(&mut self.ctx, on_diagnostic, path)
|
||||
}
|
||||
|
||||
pub(super) fn resolve_path_in_value_ns(
|
||||
&mut self,
|
||||
path: &Path,
|
||||
node: ExprOrPatId,
|
||||
hygiene_id: HygieneId,
|
||||
) -> Option<ResolveValueResult> {
|
||||
let diagnostics = self.diagnostics;
|
||||
self.ctx.resolve_path_in_value_ns(path, hygiene_id, &mut |_, diag| {
|
||||
diagnostics.push(InferenceDiagnostic::PathDiagnostic { node, diag })
|
||||
})
|
||||
#[inline]
|
||||
pub(super) fn at_path_forget_diagnostics<'b>(
|
||||
&'b mut self,
|
||||
path: &'b Path,
|
||||
) -> PathLoweringContext<'b, 'a> {
|
||||
let on_diagnostic = PathDiagnosticCallback {
|
||||
data: Either::Right(PathDiagnosticCallbackData {
|
||||
diagnostics: self.diagnostics,
|
||||
node: ExprOrPatId::ExprId(Idx::from_raw(RawIdx::from_u32(0))),
|
||||
}),
|
||||
callback: |_data, _, _diag| {},
|
||||
};
|
||||
PathLoweringContext::new(&mut self.ctx, on_diagnostic, path)
|
||||
}
|
||||
|
||||
pub(super) fn lower_partly_resolved_path(
|
||||
&mut self,
|
||||
node: ExprOrPatId,
|
||||
resolution: TypeNs,
|
||||
resolved_segment: PathSegment<'_>,
|
||||
remaining_segments: PathSegments<'_>,
|
||||
resolved_segment_idx: u32,
|
||||
infer_args: bool,
|
||||
) -> (Ty, Option<TypeNs>) {
|
||||
let diagnostics = self.diagnostics;
|
||||
self.ctx.lower_partly_resolved_path(
|
||||
resolution,
|
||||
resolved_segment,
|
||||
remaining_segments,
|
||||
resolved_segment_idx,
|
||||
infer_args,
|
||||
&mut |_, diag| diagnostics.push(InferenceDiagnostic::PathDiagnostic { node, diag }),
|
||||
)
|
||||
#[inline]
|
||||
pub(super) fn forget_diagnostics(&mut self) {
|
||||
self.ctx.diagnostics.clear();
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Deref for InferenceTyLoweringContext<'a> {
|
||||
type Target = TyLoweringContext<'a>;
|
||||
|
||||
#[inline]
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.ctx
|
||||
}
|
||||
}
|
||||
|
||||
impl DerefMut for InferenceTyLoweringContext<'_> {
|
||||
#[inline]
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.ctx
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for InferenceTyLoweringContext<'_> {
|
||||
#[inline]
|
||||
fn drop(&mut self) {
|
||||
self.diagnostics
|
||||
.push_ty_diagnostics(self.source, std::mem::take(&mut self.ctx.diagnostics));
|
||||
|
@ -489,78 +489,7 @@ impl InferenceContext<'_> {
|
||||
|
||||
ty
|
||||
}
|
||||
Expr::Call { callee, args, .. } => {
|
||||
let callee_ty = self.infer_expr(*callee, &Expectation::none(), ExprIsRead::Yes);
|
||||
let mut derefs = Autoderef::new(&mut self.table, callee_ty.clone(), false, true);
|
||||
let (res, derefed_callee) = loop {
|
||||
let Some((callee_deref_ty, _)) = derefs.next() else {
|
||||
break (None, callee_ty.clone());
|
||||
};
|
||||
if let Some(res) = derefs.table.callable_sig(&callee_deref_ty, args.len()) {
|
||||
break (Some(res), callee_deref_ty);
|
||||
}
|
||||
};
|
||||
// if the function is unresolved, we use is_varargs=true to
|
||||
// suppress the arg count diagnostic here
|
||||
let is_varargs =
|
||||
derefed_callee.callable_sig(self.db).is_some_and(|sig| sig.is_varargs)
|
||||
|| res.is_none();
|
||||
let (param_tys, ret_ty) = match res {
|
||||
Some((func, params, ret_ty)) => {
|
||||
let mut adjustments = auto_deref_adjust_steps(&derefs);
|
||||
if let TyKind::Closure(c, _) =
|
||||
self.table.resolve_completely(callee_ty.clone()).kind(Interner)
|
||||
{
|
||||
if let Some(par) = self.current_closure {
|
||||
self.closure_dependencies.entry(par).or_default().push(*c);
|
||||
}
|
||||
self.deferred_closures.entry(*c).or_default().push((
|
||||
derefed_callee.clone(),
|
||||
callee_ty.clone(),
|
||||
params.clone(),
|
||||
tgt_expr,
|
||||
));
|
||||
}
|
||||
if let Some(fn_x) = func {
|
||||
self.write_fn_trait_method_resolution(
|
||||
fn_x,
|
||||
&derefed_callee,
|
||||
&mut adjustments,
|
||||
&callee_ty,
|
||||
¶ms,
|
||||
tgt_expr,
|
||||
);
|
||||
}
|
||||
self.write_expr_adj(*callee, adjustments);
|
||||
(params, ret_ty)
|
||||
}
|
||||
None => {
|
||||
self.push_diagnostic(InferenceDiagnostic::ExpectedFunction {
|
||||
call_expr: tgt_expr,
|
||||
found: callee_ty.clone(),
|
||||
});
|
||||
(Vec::new(), self.err_ty())
|
||||
}
|
||||
};
|
||||
let indices_to_skip = self.check_legacy_const_generics(derefed_callee, args);
|
||||
self.register_obligations_for_call(&callee_ty);
|
||||
|
||||
let expected_inputs = self.expected_inputs_for_expected_output(
|
||||
expected,
|
||||
ret_ty.clone(),
|
||||
param_tys.clone(),
|
||||
);
|
||||
|
||||
self.check_call_arguments(
|
||||
tgt_expr,
|
||||
args,
|
||||
&expected_inputs,
|
||||
¶m_tys,
|
||||
&indices_to_skip,
|
||||
is_varargs,
|
||||
);
|
||||
self.normalize_associated_types_in(ret_ty)
|
||||
}
|
||||
Expr::Call { callee, args, .. } => self.infer_call(tgt_expr, *callee, args, expected),
|
||||
Expr::MethodCall { receiver, args, method_name, generic_args } => self
|
||||
.infer_method_call(
|
||||
tgt_expr,
|
||||
@ -1872,6 +1801,107 @@ impl InferenceContext<'_> {
|
||||
}
|
||||
}
|
||||
|
||||
fn infer_call(
|
||||
&mut self,
|
||||
tgt_expr: ExprId,
|
||||
callee: ExprId,
|
||||
args: &[ExprId],
|
||||
expected: &Expectation,
|
||||
) -> Ty {
|
||||
let callee_ty = self.infer_expr(callee, &Expectation::none(), ExprIsRead::Yes);
|
||||
let mut derefs = Autoderef::new(&mut self.table, callee_ty.clone(), false, true);
|
||||
let (res, derefed_callee) = loop {
|
||||
let Some((callee_deref_ty, _)) = derefs.next() else {
|
||||
break (None, callee_ty.clone());
|
||||
};
|
||||
if let Some(res) = derefs.table.callable_sig(&callee_deref_ty, args.len()) {
|
||||
break (Some(res), callee_deref_ty);
|
||||
}
|
||||
};
|
||||
// if the function is unresolved, we use is_varargs=true to
|
||||
// suppress the arg count diagnostic here
|
||||
let is_varargs =
|
||||
derefed_callee.callable_sig(self.db).is_some_and(|sig| sig.is_varargs) || res.is_none();
|
||||
let (param_tys, ret_ty) = match res {
|
||||
Some((func, params, ret_ty)) => {
|
||||
let mut adjustments = auto_deref_adjust_steps(&derefs);
|
||||
if let TyKind::Closure(c, _) =
|
||||
self.table.resolve_completely(callee_ty.clone()).kind(Interner)
|
||||
{
|
||||
if let Some(par) = self.current_closure {
|
||||
self.closure_dependencies.entry(par).or_default().push(*c);
|
||||
}
|
||||
self.deferred_closures.entry(*c).or_default().push((
|
||||
derefed_callee.clone(),
|
||||
callee_ty.clone(),
|
||||
params.clone(),
|
||||
tgt_expr,
|
||||
));
|
||||
}
|
||||
if let Some(fn_x) = func {
|
||||
self.write_fn_trait_method_resolution(
|
||||
fn_x,
|
||||
&derefed_callee,
|
||||
&mut adjustments,
|
||||
&callee_ty,
|
||||
¶ms,
|
||||
tgt_expr,
|
||||
);
|
||||
}
|
||||
self.write_expr_adj(callee, adjustments);
|
||||
(params, ret_ty)
|
||||
}
|
||||
None => {
|
||||
self.push_diagnostic(InferenceDiagnostic::ExpectedFunction {
|
||||
call_expr: tgt_expr,
|
||||
found: callee_ty.clone(),
|
||||
});
|
||||
(Vec::new(), self.err_ty())
|
||||
}
|
||||
};
|
||||
let indices_to_skip = self.check_legacy_const_generics(derefed_callee, args);
|
||||
self.check_call(
|
||||
tgt_expr,
|
||||
args,
|
||||
callee_ty,
|
||||
¶m_tys,
|
||||
ret_ty,
|
||||
&indices_to_skip,
|
||||
is_varargs,
|
||||
expected,
|
||||
)
|
||||
}
|
||||
|
||||
fn check_call(
|
||||
&mut self,
|
||||
tgt_expr: ExprId,
|
||||
args: &[ExprId],
|
||||
callee_ty: Ty,
|
||||
param_tys: &[Ty],
|
||||
ret_ty: Ty,
|
||||
indices_to_skip: &[u32],
|
||||
is_varargs: bool,
|
||||
expected: &Expectation,
|
||||
) -> Ty {
|
||||
self.register_obligations_for_call(&callee_ty);
|
||||
|
||||
let expected_inputs = self.expected_inputs_for_expected_output(
|
||||
expected,
|
||||
ret_ty.clone(),
|
||||
param_tys.to_owned(),
|
||||
);
|
||||
|
||||
self.check_call_arguments(
|
||||
tgt_expr,
|
||||
args,
|
||||
&expected_inputs,
|
||||
param_tys,
|
||||
indices_to_skip,
|
||||
is_varargs,
|
||||
);
|
||||
self.normalize_associated_types_in(ret_ty)
|
||||
}
|
||||
|
||||
fn infer_method_call(
|
||||
&mut self,
|
||||
tgt_expr: ExprId,
|
||||
@ -1892,21 +1922,32 @@ impl InferenceContext<'_> {
|
||||
VisibleFromModule::Filter(self.resolver.module()),
|
||||
method_name,
|
||||
);
|
||||
let (receiver_ty, method_ty, substs) = match resolved {
|
||||
match resolved {
|
||||
Some((adjust, func, visible)) => {
|
||||
let (ty, adjustments) = adjust.apply(&mut self.table, receiver_ty);
|
||||
let generics = generics(self.db.upcast(), func.into());
|
||||
let substs = self.substs_for_method_call(generics, generic_args);
|
||||
self.write_expr_adj(receiver, adjustments);
|
||||
self.write_method_resolution(tgt_expr, func, substs.clone());
|
||||
if !visible {
|
||||
self.push_diagnostic(InferenceDiagnostic::PrivateAssocItem {
|
||||
id: tgt_expr.into(),
|
||||
item: func.into(),
|
||||
})
|
||||
}
|
||||
(ty, self.db.value_ty(func.into()).unwrap(), substs)
|
||||
|
||||
let (ty, adjustments) = adjust.apply(&mut self.table, receiver_ty);
|
||||
self.write_expr_adj(receiver, adjustments);
|
||||
|
||||
let generics = generics(self.db.upcast(), func.into());
|
||||
let substs = self.substs_for_method_call(generics, generic_args);
|
||||
self.write_method_resolution(tgt_expr, func, substs.clone());
|
||||
self.check_method_call(
|
||||
tgt_expr,
|
||||
args,
|
||||
self.db.value_ty(func.into()).expect("we have a function def"),
|
||||
substs,
|
||||
ty,
|
||||
expected,
|
||||
)
|
||||
}
|
||||
// Failed to resolve, report diagnostic and try to resolve as call to field access or
|
||||
// assoc function
|
||||
None => {
|
||||
let field_with_same_name_exists = match self.lookup_field(&receiver_ty, method_name)
|
||||
{
|
||||
@ -1926,12 +1967,11 @@ impl InferenceContext<'_> {
|
||||
VisibleFromModule::Filter(self.resolver.module()),
|
||||
Some(method_name),
|
||||
method_resolution::LookupMode::Path,
|
||||
|_ty, item, visible| {
|
||||
if visible {
|
||||
Some(item)
|
||||
} else {
|
||||
None
|
||||
|_ty, item, visible| match item {
|
||||
hir_def::AssocItemId::FunctionId(function_id) if visible => {
|
||||
Some(function_id)
|
||||
}
|
||||
_ => None,
|
||||
},
|
||||
);
|
||||
|
||||
@ -1939,17 +1979,45 @@ impl InferenceContext<'_> {
|
||||
expr: tgt_expr,
|
||||
receiver: receiver_ty.clone(),
|
||||
name: method_name.clone(),
|
||||
field_with_same_name: field_with_same_name_exists,
|
||||
field_with_same_name: field_with_same_name_exists.clone(),
|
||||
assoc_func_with_same_name,
|
||||
});
|
||||
(
|
||||
receiver_ty,
|
||||
Binders::empty(Interner, self.err_ty()),
|
||||
Substitution::empty(Interner),
|
||||
)
|
||||
|
||||
let recovered = match assoc_func_with_same_name {
|
||||
Some(f) => {
|
||||
let generics = generics(self.db.upcast(), f.into());
|
||||
let substs = self.substs_for_method_call(generics, generic_args);
|
||||
let f = self
|
||||
.db
|
||||
.value_ty(f.into())
|
||||
.expect("we have a function def")
|
||||
.substitute(Interner, &substs);
|
||||
let sig = f.callable_sig(self.db).expect("we have a function def");
|
||||
Some((f, sig, true))
|
||||
}
|
||||
None => field_with_same_name_exists.and_then(|field_ty| {
|
||||
let callable_sig = field_ty.callable_sig(self.db)?;
|
||||
Some((field_ty, callable_sig, false))
|
||||
}),
|
||||
};
|
||||
match recovered {
|
||||
Some((callee_ty, sig, strip_first)) => self.check_call(
|
||||
tgt_expr,
|
||||
args,
|
||||
callee_ty,
|
||||
sig.params().get(strip_first as usize..).unwrap_or(&[]),
|
||||
sig.ret().clone(),
|
||||
&[],
|
||||
true,
|
||||
expected,
|
||||
),
|
||||
None => {
|
||||
self.check_call_arguments(tgt_expr, args, &[], &[], &[], true);
|
||||
self.err_ty()
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
self.check_method_call(tgt_expr, args, method_ty, substs, receiver_ty, expected)
|
||||
}
|
||||
}
|
||||
|
||||
fn check_method_call(
|
||||
@ -2019,9 +2087,10 @@ impl InferenceContext<'_> {
|
||||
expected_inputs: &[Ty],
|
||||
param_tys: &[Ty],
|
||||
skip_indices: &[u32],
|
||||
is_varargs: bool,
|
||||
ignore_arg_param_mismatch: bool,
|
||||
) {
|
||||
let arg_count_mismatch = args.len() != param_tys.len() + skip_indices.len() && !is_varargs;
|
||||
let arg_count_mismatch =
|
||||
!ignore_arg_param_mismatch && args.len() != param_tys.len() + skip_indices.len();
|
||||
if arg_count_mismatch {
|
||||
self.push_diagnostic(InferenceDiagnostic::MismatchedArgCount {
|
||||
call_expr: expr,
|
||||
@ -2050,7 +2119,7 @@ impl InferenceContext<'_> {
|
||||
continue;
|
||||
}
|
||||
|
||||
while skip_indices.peek().is_some_and(|i| *i < idx as u32) {
|
||||
while skip_indices.peek().is_some_and(|&i| i < idx as u32) {
|
||||
skip_indices.next();
|
||||
}
|
||||
if skip_indices.peek().copied() == Some(idx as u32) {
|
||||
@ -2132,8 +2201,8 @@ impl InferenceContext<'_> {
|
||||
for kind_id in def_generics.iter_self_id().take(self_params) {
|
||||
let arg = args.peek();
|
||||
let arg = match (kind_id, arg) {
|
||||
// Lifetimes can be elided.
|
||||
// Once we have implemented lifetime elision correctly,
|
||||
// Lifetimes can be inferred.
|
||||
// Once we have implemented lifetime inference correctly,
|
||||
// this should be handled in a proper way.
|
||||
(
|
||||
GenericParamId::LifetimeParamId(_),
|
||||
|
@ -564,9 +564,10 @@ impl InferenceContext<'_> {
|
||||
| Pat::Range { .. }
|
||||
| Pat::Slice { .. } => true,
|
||||
Pat::Or(pats) => pats.iter().all(|p| self.is_non_ref_pat(body, *p)),
|
||||
Pat::Path(p) => {
|
||||
let v = self.resolve_value_path_inner(p, pat.into());
|
||||
v.is_some_and(|x| !matches!(x.0, hir_def::resolver::ValueNs::ConstId(_)))
|
||||
Pat::Path(path) => {
|
||||
// A const is a reference pattern, but other value ns things aren't (see #16131).
|
||||
let resolved = self.resolve_value_path_inner(path, pat.into(), true);
|
||||
resolved.is_some_and(|it| !matches!(it.0, hir_def::resolver::ValueNs::ConstId(_)))
|
||||
}
|
||||
Pat::ConstBlock(..) => false,
|
||||
Pat::Lit(expr) => !matches!(
|
||||
|
@ -7,7 +7,6 @@ use hir_def::{
|
||||
AdtId, AssocItemId, GenericDefId, ItemContainerId, Lookup,
|
||||
};
|
||||
use hir_expand::name::Name;
|
||||
use intern::sym;
|
||||
use stdx::never;
|
||||
|
||||
use crate::{
|
||||
@ -41,7 +40,7 @@ impl InferenceContext<'_> {
|
||||
}
|
||||
|
||||
fn resolve_value_path(&mut self, path: &Path, id: ExprOrPatId) -> Option<ValuePathResolution> {
|
||||
let (value, self_subst) = self.resolve_value_path_inner(path, id)?;
|
||||
let (value, self_subst) = self.resolve_value_path_inner(path, id, false)?;
|
||||
|
||||
let value_def: ValueTyDefId = match value {
|
||||
ValueNs::FunctionId(it) => it.into(),
|
||||
@ -94,7 +93,14 @@ impl InferenceContext<'_> {
|
||||
return Some(ValuePathResolution::NonGeneric(ty));
|
||||
};
|
||||
|
||||
let substs = self.with_body_ty_lowering(|ctx| ctx.substs_from_path(path, value_def, true));
|
||||
let substs = self.with_body_ty_lowering(|ctx| {
|
||||
let mut path_ctx = ctx.at_path(path, id);
|
||||
let last_segment = path.segments().len().checked_sub(1);
|
||||
if let Some(last_segment) = last_segment {
|
||||
path_ctx.set_current_segment(last_segment)
|
||||
}
|
||||
path_ctx.substs_from_path(value_def, true)
|
||||
});
|
||||
let substs = substs.as_slice(Interner);
|
||||
|
||||
if let ValueNs::EnumVariantId(_) = value {
|
||||
@ -146,6 +152,7 @@ impl InferenceContext<'_> {
|
||||
&mut self,
|
||||
path: &Path,
|
||||
id: ExprOrPatId,
|
||||
no_diagnostics: bool,
|
||||
) -> Option<(ValueNs, Option<chalk_ir::Substitution<Interner>>)> {
|
||||
// Don't use `self.make_ty()` here as we need `orig_ns`.
|
||||
let mut ctx = TyLoweringContext::new(
|
||||
@ -156,33 +163,83 @@ impl InferenceContext<'_> {
|
||||
&self.diagnostics,
|
||||
InferenceTyDiagnosticSource::Body,
|
||||
);
|
||||
let mut path_ctx = if no_diagnostics {
|
||||
ctx.at_path_forget_diagnostics(path)
|
||||
} else {
|
||||
ctx.at_path(path, id)
|
||||
};
|
||||
let (value, self_subst) = if let Some(type_ref) = path.type_anchor() {
|
||||
let last = path.segments().last()?;
|
||||
|
||||
let (ty, orig_ns) = ctx.lower_ty_ext(type_ref);
|
||||
let (ty, orig_ns) = path_ctx.ty_ctx().lower_ty_ext(type_ref);
|
||||
let ty = self.table.insert_type_vars(ty);
|
||||
let ty = self.table.normalize_associated_types_in(ty);
|
||||
|
||||
let remaining_segments_for_ty = path.segments().take(path.segments().len() - 1);
|
||||
let (ty, _) = ctx.lower_ty_relative_path(ty, orig_ns, remaining_segments_for_ty);
|
||||
drop(ctx);
|
||||
path_ctx.ignore_last_segment();
|
||||
let (ty, _) = path_ctx.lower_ty_relative_path(ty, orig_ns);
|
||||
drop_ctx(ctx, no_diagnostics);
|
||||
let ty = self.table.insert_type_vars(ty);
|
||||
let ty = self.table.normalize_associated_types_in(ty);
|
||||
self.resolve_ty_assoc_item(ty, last.name, id).map(|(it, substs)| (it, Some(substs)))?
|
||||
} else {
|
||||
let hygiene = self.body.expr_or_pat_path_hygiene(id);
|
||||
// FIXME: report error, unresolved first path segment
|
||||
let value_or_partial = ctx.resolve_path_in_value_ns(path, id, hygiene)?;
|
||||
drop(ctx);
|
||||
let value_or_partial = path_ctx.resolve_path_in_value_ns(hygiene)?;
|
||||
|
||||
match value_or_partial {
|
||||
ResolveValueResult::ValueNs(it, _) => (it, None),
|
||||
ResolveValueResult::Partial(def, remaining_index, _) => self
|
||||
.resolve_assoc_item(id, def, path, remaining_index, id)
|
||||
.map(|(it, substs)| (it, Some(substs)))?,
|
||||
ResolveValueResult::ValueNs(it, _) => {
|
||||
drop_ctx(ctx, no_diagnostics);
|
||||
(it, None)
|
||||
}
|
||||
ResolveValueResult::Partial(def, remaining_index, _) => {
|
||||
// there may be more intermediate segments between the resolved one and
|
||||
// the end. Only the last segment needs to be resolved to a value; from
|
||||
// the segments before that, we need to get either a type or a trait ref.
|
||||
|
||||
let remaining_segments = path.segments().skip(remaining_index);
|
||||
let is_before_last = remaining_segments.len() == 1;
|
||||
let last_segment = remaining_segments
|
||||
.last()
|
||||
.expect("there should be at least one segment here");
|
||||
|
||||
let (resolution, substs) = match (def, is_before_last) {
|
||||
(TypeNs::TraitId(trait_), true) => {
|
||||
let self_ty = self.table.new_type_var();
|
||||
let trait_ref =
|
||||
path_ctx.lower_trait_ref_from_resolved_path(trait_, self_ty);
|
||||
drop_ctx(ctx, no_diagnostics);
|
||||
self.resolve_trait_assoc_item(trait_ref, last_segment, id)
|
||||
}
|
||||
(def, _) => {
|
||||
// Either we already have a type (e.g. `Vec::new`), or we have a
|
||||
// trait but it's not the last segment, so the next segment
|
||||
// should resolve to an associated type of that trait (e.g. `<T
|
||||
// as Iterator>::Item::default`)
|
||||
path_ctx.ignore_last_segment();
|
||||
let (ty, _) = path_ctx.lower_partly_resolved_path(def, true);
|
||||
drop_ctx(ctx, no_diagnostics);
|
||||
if ty.is_unknown() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let ty = self.insert_type_vars(ty);
|
||||
let ty = self.normalize_associated_types_in(ty);
|
||||
|
||||
self.resolve_ty_assoc_item(ty, last_segment.name, id)
|
||||
}
|
||||
}?;
|
||||
(resolution, Some(substs))
|
||||
}
|
||||
}
|
||||
};
|
||||
Some((value, self_subst))
|
||||
return Some((value, self_subst));
|
||||
|
||||
#[inline]
|
||||
fn drop_ctx(mut ctx: TyLoweringContext<'_>, no_diagnostics: bool) {
|
||||
if no_diagnostics {
|
||||
ctx.forget_diagnostics();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn add_required_obligations_for_value_path(&mut self, def: GenericDefId, subst: &Substitution) {
|
||||
@ -212,89 +269,6 @@ impl InferenceContext<'_> {
|
||||
}
|
||||
}
|
||||
|
||||
fn resolve_assoc_item(
|
||||
&mut self,
|
||||
node: ExprOrPatId,
|
||||
def: TypeNs,
|
||||
path: &Path,
|
||||
remaining_index: usize,
|
||||
id: ExprOrPatId,
|
||||
) -> Option<(ValueNs, Substitution)> {
|
||||
// there may be more intermediate segments between the resolved one and
|
||||
// the end. Only the last segment needs to be resolved to a value; from
|
||||
// the segments before that, we need to get either a type or a trait ref.
|
||||
|
||||
let _d;
|
||||
let (resolved_segment, remaining_segments) = match path {
|
||||
Path::Normal { .. } | Path::BarePath(_) => {
|
||||
assert!(remaining_index < path.segments().len());
|
||||
(
|
||||
path.segments().get(remaining_index - 1).unwrap(),
|
||||
path.segments().skip(remaining_index),
|
||||
)
|
||||
}
|
||||
Path::LangItem(..) => (
|
||||
PathSegment {
|
||||
name: {
|
||||
_d = Name::new_symbol_root(sym::Unknown.clone());
|
||||
&_d
|
||||
},
|
||||
args_and_bindings: None,
|
||||
},
|
||||
path.segments(),
|
||||
),
|
||||
};
|
||||
let is_before_last = remaining_segments.len() == 1;
|
||||
|
||||
match (def, is_before_last) {
|
||||
(TypeNs::TraitId(trait_), true) => {
|
||||
let segment =
|
||||
remaining_segments.last().expect("there should be at least one segment here");
|
||||
let self_ty = self.table.new_type_var();
|
||||
let trait_ref = self.with_body_ty_lowering(|ctx| {
|
||||
ctx.lower_trait_ref_from_resolved_path(trait_, resolved_segment, self_ty)
|
||||
});
|
||||
self.resolve_trait_assoc_item(trait_ref, segment, id)
|
||||
}
|
||||
(def, _) => {
|
||||
// Either we already have a type (e.g. `Vec::new`), or we have a
|
||||
// trait but it's not the last segment, so the next segment
|
||||
// should resolve to an associated type of that trait (e.g. `<T
|
||||
// as Iterator>::Item::default`)
|
||||
let remaining_segments_for_ty =
|
||||
remaining_segments.take(remaining_segments.len() - 1);
|
||||
let mut ctx = TyLoweringContext::new(
|
||||
self.db,
|
||||
&self.resolver,
|
||||
&self.body.types,
|
||||
self.owner.into(),
|
||||
&self.diagnostics,
|
||||
InferenceTyDiagnosticSource::Body,
|
||||
);
|
||||
let (ty, _) = ctx.lower_partly_resolved_path(
|
||||
node,
|
||||
def,
|
||||
resolved_segment,
|
||||
remaining_segments_for_ty,
|
||||
(remaining_index - 1) as u32,
|
||||
true,
|
||||
);
|
||||
drop(ctx);
|
||||
if ty.is_unknown() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let ty = self.insert_type_vars(ty);
|
||||
let ty = self.normalize_associated_types_in(ty);
|
||||
|
||||
let segment =
|
||||
remaining_segments.last().expect("there should be at least one segment here");
|
||||
|
||||
self.resolve_ty_assoc_item(ty, segment.name, id)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn resolve_trait_assoc_item(
|
||||
&mut self,
|
||||
trait_ref: TraitRef,
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -26,11 +26,11 @@ pub enum GenericArgsProhibitedReason {
|
||||
Static,
|
||||
/// When there is a generic enum, within the expression `Enum::Variant`,
|
||||
/// either `Enum` or `Variant` are allowed to have generic arguments, but not both.
|
||||
// FIXME: This is not used now but it should be.
|
||||
EnumVariant,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||
pub enum PathLoweringDiagnostic {
|
||||
GenericArgsProhibited { segment: u32, reason: GenericArgsProhibitedReason },
|
||||
ParenthesizedGenericArgsWithoutFnTrait { segment: u32 },
|
||||
}
|
||||
|
911
src/tools/rust-analyzer/crates/hir-ty/src/lower/path.rs
Normal file
911
src/tools/rust-analyzer/crates/hir-ty/src/lower/path.rs
Normal file
@ -0,0 +1,911 @@
|
||||
//! A wrapper around [`TyLoweringContext`] specifically for lowering paths.
|
||||
|
||||
use std::iter;
|
||||
|
||||
use chalk_ir::{cast::Cast, fold::Shift, BoundVar};
|
||||
use either::Either;
|
||||
use hir_def::{
|
||||
data::TraitFlags,
|
||||
expr_store::HygieneId,
|
||||
generics::{TypeParamProvenance, WherePredicate, WherePredicateTypeTarget},
|
||||
path::{GenericArg, GenericArgs, Path, PathSegment, PathSegments},
|
||||
resolver::{ResolveValueResult, TypeNs, ValueNs},
|
||||
type_ref::{TypeBound, TypeRef},
|
||||
GenericDefId, GenericParamId, ItemContainerId, Lookup, TraitId,
|
||||
};
|
||||
use smallvec::SmallVec;
|
||||
use stdx::never;
|
||||
|
||||
use crate::{
|
||||
consteval::unknown_const_as_generic,
|
||||
error_lifetime,
|
||||
generics::generics,
|
||||
lower::{
|
||||
generic_arg_to_chalk, named_associated_type_shorthand_candidates, ImplTraitLoweringState,
|
||||
},
|
||||
to_assoc_type_id, to_chalk_trait_id, to_placeholder_idx,
|
||||
utils::associated_type_by_name_including_super_traits,
|
||||
AliasEq, AliasTy, GenericArgsProhibitedReason, ImplTraitLoweringMode, Interner,
|
||||
ParamLoweringMode, PathLoweringDiagnostic, ProjectionTy, QuantifiedWhereClause, Substitution,
|
||||
TraitRef, Ty, TyBuilder, TyDefId, TyKind, TyLoweringContext, ValueTyDefId, WhereClause,
|
||||
};
|
||||
|
||||
type CallbackData<'a> = Either<
|
||||
super::PathDiagnosticCallbackData,
|
||||
crate::infer::diagnostics::PathDiagnosticCallbackData<'a>,
|
||||
>;
|
||||
|
||||
// We cannot use `&mut dyn FnMut()` because of lifetime issues, and we don't want to use `Box<dyn FnMut()>`
|
||||
// because of the allocation, so we create a lifetime-less callback, tailored for our needs.
|
||||
pub(crate) struct PathDiagnosticCallback<'a> {
|
||||
pub(crate) data: CallbackData<'a>,
|
||||
pub(crate) callback: fn(&CallbackData<'_>, &mut TyLoweringContext<'_>, PathLoweringDiagnostic),
|
||||
}
|
||||
|
||||
pub(crate) struct PathLoweringContext<'a, 'b> {
|
||||
ctx: &'a mut TyLoweringContext<'b>,
|
||||
on_diagnostic: PathDiagnosticCallback<'a>,
|
||||
path: &'a Path,
|
||||
segments: PathSegments<'a>,
|
||||
current_segment_idx: usize,
|
||||
/// Contains the previous segment if `current_segment_idx == segments.len()`
|
||||
current_or_prev_segment: PathSegment<'a>,
|
||||
}
|
||||
|
||||
impl<'a, 'b> PathLoweringContext<'a, 'b> {
|
||||
#[inline]
|
||||
pub(crate) fn new(
|
||||
ctx: &'a mut TyLoweringContext<'b>,
|
||||
on_diagnostic: PathDiagnosticCallback<'a>,
|
||||
path: &'a Path,
|
||||
) -> Self {
|
||||
let segments = path.segments();
|
||||
let first_segment = segments.first().unwrap_or(PathSegment::MISSING);
|
||||
Self {
|
||||
ctx,
|
||||
on_diagnostic,
|
||||
path,
|
||||
segments,
|
||||
current_segment_idx: 0,
|
||||
current_or_prev_segment: first_segment,
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
#[cold]
|
||||
fn on_diagnostic(&mut self, diag: PathLoweringDiagnostic) {
|
||||
(self.on_diagnostic.callback)(&self.on_diagnostic.data, self.ctx, diag);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub(crate) fn ty_ctx(&mut self) -> &mut TyLoweringContext<'b> {
|
||||
self.ctx
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn current_segment_u32(&self) -> u32 {
|
||||
self.current_segment_idx as u32
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn skip_resolved_segment(&mut self) {
|
||||
if !matches!(self.path, Path::LangItem(..)) {
|
||||
// In lang items, the resolved "segment" is not one of the segments. Perhaps we should've put it
|
||||
// point at -1, but I don't feel this is clearer.
|
||||
self.current_segment_idx += 1;
|
||||
}
|
||||
self.update_current_segment();
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn update_current_segment(&mut self) {
|
||||
self.current_or_prev_segment =
|
||||
self.segments.get(self.current_segment_idx).unwrap_or(self.current_or_prev_segment);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub(crate) fn ignore_last_segment(&mut self) {
|
||||
self.segments = self.segments.strip_last();
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub(crate) fn set_current_segment(&mut self, segment: usize) {
|
||||
self.current_segment_idx = segment;
|
||||
self.current_or_prev_segment = self
|
||||
.segments
|
||||
.get(segment)
|
||||
.expect("invalid segment passed to PathLoweringContext::set_current_segment()");
|
||||
}
|
||||
|
||||
pub(crate) fn lower_ty_relative_path(
|
||||
&mut self,
|
||||
ty: Ty,
|
||||
// We need the original resolution to lower `Self::AssocTy` correctly
|
||||
res: Option<TypeNs>,
|
||||
) -> (Ty, Option<TypeNs>) {
|
||||
match self.segments.len() - self.current_segment_idx {
|
||||
0 => (ty, res),
|
||||
1 => {
|
||||
// resolve unselected assoc types
|
||||
(self.select_associated_type(res), None)
|
||||
}
|
||||
_ => {
|
||||
// FIXME report error (ambiguous associated type)
|
||||
(TyKind::Error.intern(Interner), None)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn prohibit_parenthesized_generic_args(&mut self) -> bool {
|
||||
if let Some(generic_args) = self.current_or_prev_segment.args_and_bindings {
|
||||
if generic_args.desugared_from_fn {
|
||||
let segment = self.current_segment_u32();
|
||||
self.on_diagnostic(
|
||||
PathLoweringDiagnostic::ParenthesizedGenericArgsWithoutFnTrait { segment },
|
||||
);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
// When calling this, the current segment is the resolved segment (we don't advance it yet).
|
||||
pub(crate) fn lower_partly_resolved_path(
|
||||
&mut self,
|
||||
resolution: TypeNs,
|
||||
infer_args: bool,
|
||||
) -> (Ty, Option<TypeNs>) {
|
||||
let remaining_segments = self.segments.skip(self.current_segment_idx + 1);
|
||||
|
||||
let ty = match resolution {
|
||||
TypeNs::TraitId(trait_) => {
|
||||
let ty = match remaining_segments.len() {
|
||||
1 => {
|
||||
let trait_ref = self.lower_trait_ref_from_resolved_path(
|
||||
trait_,
|
||||
TyKind::Error.intern(Interner),
|
||||
);
|
||||
|
||||
self.skip_resolved_segment();
|
||||
let segment = self.current_or_prev_segment;
|
||||
let found =
|
||||
self.ctx.db.trait_data(trait_).associated_type_by_name(segment.name);
|
||||
|
||||
match found {
|
||||
Some(associated_ty) => {
|
||||
// FIXME: `substs_from_path_segment()` pushes `TyKind::Error` for every parent
|
||||
// generic params. It's inefficient to splice the `Substitution`s, so we may want
|
||||
// that method to optionally take parent `Substitution` as we already know them at
|
||||
// this point (`trait_ref.substitution`).
|
||||
let substitution = self.substs_from_path_segment(
|
||||
associated_ty.into(),
|
||||
false,
|
||||
None,
|
||||
);
|
||||
let len_self =
|
||||
generics(self.ctx.db.upcast(), associated_ty.into()).len_self();
|
||||
let substitution = Substitution::from_iter(
|
||||
Interner,
|
||||
substitution
|
||||
.iter(Interner)
|
||||
.take(len_self)
|
||||
.chain(trait_ref.substitution.iter(Interner)),
|
||||
);
|
||||
TyKind::Alias(AliasTy::Projection(ProjectionTy {
|
||||
associated_ty_id: to_assoc_type_id(associated_ty),
|
||||
substitution,
|
||||
}))
|
||||
.intern(Interner)
|
||||
}
|
||||
None => {
|
||||
// FIXME: report error (associated type not found)
|
||||
TyKind::Error.intern(Interner)
|
||||
}
|
||||
}
|
||||
}
|
||||
0 => {
|
||||
// Trait object type without dyn; this should be handled in upstream. See
|
||||
// `lower_path()`.
|
||||
stdx::never!("unexpected fully resolved trait path");
|
||||
TyKind::Error.intern(Interner)
|
||||
}
|
||||
_ => {
|
||||
// FIXME report error (ambiguous associated type)
|
||||
TyKind::Error.intern(Interner)
|
||||
}
|
||||
};
|
||||
return (ty, None);
|
||||
}
|
||||
TypeNs::TraitAliasId(_) => {
|
||||
// FIXME(trait_alias): Implement trait alias.
|
||||
return (TyKind::Error.intern(Interner), None);
|
||||
}
|
||||
TypeNs::GenericParam(param_id) => match self.ctx.type_param_mode {
|
||||
ParamLoweringMode::Placeholder => {
|
||||
TyKind::Placeholder(to_placeholder_idx(self.ctx.db, param_id.into()))
|
||||
}
|
||||
ParamLoweringMode::Variable => {
|
||||
let idx = match self
|
||||
.ctx
|
||||
.generics()
|
||||
.expect("generics in scope")
|
||||
.type_or_const_param_idx(param_id.into())
|
||||
{
|
||||
None => {
|
||||
never!("no matching generics");
|
||||
return (TyKind::Error.intern(Interner), None);
|
||||
}
|
||||
Some(idx) => idx,
|
||||
};
|
||||
|
||||
TyKind::BoundVar(BoundVar::new(self.ctx.in_binders, idx))
|
||||
}
|
||||
}
|
||||
.intern(Interner),
|
||||
TypeNs::SelfType(impl_id) => {
|
||||
let generics = self.ctx.generics().expect("impl should have generic param scope");
|
||||
|
||||
match self.ctx.type_param_mode {
|
||||
ParamLoweringMode::Placeholder => {
|
||||
// `def` can be either impl itself or item within, and we need impl itself
|
||||
// now.
|
||||
let generics = generics.parent_or_self();
|
||||
let subst = generics.placeholder_subst(self.ctx.db);
|
||||
self.ctx.db.impl_self_ty(impl_id).substitute(Interner, &subst)
|
||||
}
|
||||
ParamLoweringMode::Variable => {
|
||||
let starting_from = match generics.def() {
|
||||
GenericDefId::ImplId(_) => 0,
|
||||
// `def` is an item within impl. We need to substitute `BoundVar`s but
|
||||
// remember that they are for parent (i.e. impl) generic params so they
|
||||
// come after our own params.
|
||||
_ => generics.len_self(),
|
||||
};
|
||||
TyBuilder::impl_self_ty(self.ctx.db, impl_id)
|
||||
.fill_with_bound_vars(self.ctx.in_binders, starting_from)
|
||||
.build()
|
||||
}
|
||||
}
|
||||
}
|
||||
TypeNs::AdtSelfType(adt) => {
|
||||
let generics = generics(self.ctx.db.upcast(), adt.into());
|
||||
let substs = match self.ctx.type_param_mode {
|
||||
ParamLoweringMode::Placeholder => generics.placeholder_subst(self.ctx.db),
|
||||
ParamLoweringMode::Variable => {
|
||||
generics.bound_vars_subst(self.ctx.db, self.ctx.in_binders)
|
||||
}
|
||||
};
|
||||
self.ctx.db.ty(adt.into()).substitute(Interner, &substs)
|
||||
}
|
||||
|
||||
TypeNs::AdtId(it) => self.lower_path_inner(it.into(), infer_args),
|
||||
TypeNs::BuiltinType(it) => self.lower_path_inner(it.into(), infer_args),
|
||||
TypeNs::TypeAliasId(it) => self.lower_path_inner(it.into(), infer_args),
|
||||
// FIXME: report error
|
||||
TypeNs::EnumVariantId(_) => return (TyKind::Error.intern(Interner), None),
|
||||
};
|
||||
|
||||
self.skip_resolved_segment();
|
||||
self.lower_ty_relative_path(ty, Some(resolution))
|
||||
}
|
||||
|
||||
fn handle_type_ns_resolution(&mut self, resolution: &TypeNs) {
|
||||
let mut prohibit_generics_on_resolved = |reason| {
|
||||
if self.current_or_prev_segment.args_and_bindings.is_some() {
|
||||
let segment = self.current_segment_u32();
|
||||
self.on_diagnostic(PathLoweringDiagnostic::GenericArgsProhibited {
|
||||
segment,
|
||||
reason,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
match resolution {
|
||||
TypeNs::SelfType(_) => {
|
||||
prohibit_generics_on_resolved(GenericArgsProhibitedReason::SelfTy)
|
||||
}
|
||||
TypeNs::GenericParam(_) => {
|
||||
prohibit_generics_on_resolved(GenericArgsProhibitedReason::TyParam)
|
||||
}
|
||||
TypeNs::AdtSelfType(_) => {
|
||||
prohibit_generics_on_resolved(GenericArgsProhibitedReason::SelfTy)
|
||||
}
|
||||
TypeNs::BuiltinType(_) => {
|
||||
prohibit_generics_on_resolved(GenericArgsProhibitedReason::PrimitiveTy)
|
||||
}
|
||||
TypeNs::AdtId(_)
|
||||
| TypeNs::EnumVariantId(_)
|
||||
| TypeNs::TypeAliasId(_)
|
||||
| TypeNs::TraitId(_)
|
||||
| TypeNs::TraitAliasId(_) => {}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn resolve_path_in_type_ns_fully(&mut self) -> Option<TypeNs> {
|
||||
let (res, unresolved) = self.resolve_path_in_type_ns()?;
|
||||
if unresolved.is_some() {
|
||||
return None;
|
||||
}
|
||||
Some(res)
|
||||
}
|
||||
|
||||
pub(crate) fn resolve_path_in_type_ns(&mut self) -> Option<(TypeNs, Option<usize>)> {
|
||||
let (resolution, remaining_index, _, prefix_info) = self
|
||||
.ctx
|
||||
.resolver
|
||||
.resolve_path_in_type_ns_with_prefix_info(self.ctx.db.upcast(), self.path)?;
|
||||
|
||||
let segments = self.segments;
|
||||
if segments.is_empty() || matches!(self.path, Path::LangItem(..)) {
|
||||
// `segments.is_empty()` can occur with `self`.
|
||||
return Some((resolution, remaining_index));
|
||||
}
|
||||
|
||||
let (module_segments, resolved_segment_idx, enum_segment) = match remaining_index {
|
||||
None if prefix_info.enum_variant => {
|
||||
(segments.strip_last_two(), segments.len() - 1, Some(segments.len() - 2))
|
||||
}
|
||||
None => (segments.strip_last(), segments.len() - 1, None),
|
||||
Some(i) => (segments.take(i - 1), i - 1, None),
|
||||
};
|
||||
|
||||
self.current_segment_idx = resolved_segment_idx;
|
||||
self.current_or_prev_segment =
|
||||
segments.get(resolved_segment_idx).expect("should have resolved segment");
|
||||
|
||||
if matches!(self.path, Path::BarePath(..)) {
|
||||
// Bare paths cannot have generics, so skip them as an optimization.
|
||||
return Some((resolution, remaining_index));
|
||||
}
|
||||
|
||||
for (i, mod_segment) in module_segments.iter().enumerate() {
|
||||
if mod_segment.args_and_bindings.is_some() {
|
||||
self.on_diagnostic(PathLoweringDiagnostic::GenericArgsProhibited {
|
||||
segment: i as u32,
|
||||
reason: GenericArgsProhibitedReason::Module,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(enum_segment) = enum_segment {
|
||||
if segments.get(enum_segment).is_some_and(|it| it.args_and_bindings.is_some())
|
||||
&& segments.get(enum_segment + 1).is_some_and(|it| it.args_and_bindings.is_some())
|
||||
{
|
||||
self.on_diagnostic(PathLoweringDiagnostic::GenericArgsProhibited {
|
||||
segment: (enum_segment + 1) as u32,
|
||||
reason: GenericArgsProhibitedReason::EnumVariant,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
self.handle_type_ns_resolution(&resolution);
|
||||
|
||||
Some((resolution, remaining_index))
|
||||
}
|
||||
|
||||
pub(crate) fn resolve_path_in_value_ns(
|
||||
&mut self,
|
||||
hygiene_id: HygieneId,
|
||||
) -> Option<ResolveValueResult> {
|
||||
let (res, prefix_info) = self.ctx.resolver.resolve_path_in_value_ns_with_prefix_info(
|
||||
self.ctx.db.upcast(),
|
||||
self.path,
|
||||
hygiene_id,
|
||||
)?;
|
||||
|
||||
let segments = self.segments;
|
||||
if segments.is_empty() || matches!(self.path, Path::LangItem(..)) {
|
||||
// `segments.is_empty()` can occur with `self`.
|
||||
return Some(res);
|
||||
}
|
||||
|
||||
let (mod_segments, enum_segment, resolved_segment_idx) = match res {
|
||||
ResolveValueResult::Partial(_, unresolved_segment, _) => {
|
||||
(segments.take(unresolved_segment - 1), None, unresolved_segment - 1)
|
||||
}
|
||||
ResolveValueResult::ValueNs(ValueNs::EnumVariantId(_), _)
|
||||
if prefix_info.enum_variant =>
|
||||
{
|
||||
(segments.strip_last_two(), segments.len().checked_sub(2), segments.len() - 1)
|
||||
}
|
||||
ResolveValueResult::ValueNs(..) => (segments.strip_last(), None, segments.len() - 1),
|
||||
};
|
||||
|
||||
self.current_segment_idx = resolved_segment_idx;
|
||||
self.current_or_prev_segment =
|
||||
segments.get(resolved_segment_idx).expect("should have resolved segment");
|
||||
|
||||
for (i, mod_segment) in mod_segments.iter().enumerate() {
|
||||
if mod_segment.args_and_bindings.is_some() {
|
||||
self.on_diagnostic(PathLoweringDiagnostic::GenericArgsProhibited {
|
||||
segment: i as u32,
|
||||
reason: GenericArgsProhibitedReason::Module,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(enum_segment) = enum_segment {
|
||||
if segments.get(enum_segment).is_some_and(|it| it.args_and_bindings.is_some())
|
||||
&& segments.get(enum_segment + 1).is_some_and(|it| it.args_and_bindings.is_some())
|
||||
{
|
||||
self.on_diagnostic(PathLoweringDiagnostic::GenericArgsProhibited {
|
||||
segment: (enum_segment + 1) as u32,
|
||||
reason: GenericArgsProhibitedReason::EnumVariant,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
match &res {
|
||||
ResolveValueResult::ValueNs(resolution, _) => {
|
||||
let resolved_segment_idx = self.current_segment_u32();
|
||||
let resolved_segment = self.current_or_prev_segment;
|
||||
|
||||
let mut prohibit_generics_on_resolved = |reason| {
|
||||
if resolved_segment.args_and_bindings.is_some() {
|
||||
self.on_diagnostic(PathLoweringDiagnostic::GenericArgsProhibited {
|
||||
segment: resolved_segment_idx,
|
||||
reason,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
match resolution {
|
||||
ValueNs::ImplSelf(_) => {
|
||||
prohibit_generics_on_resolved(GenericArgsProhibitedReason::SelfTy)
|
||||
}
|
||||
// FIXME: rustc generates E0107 (incorrect number of generic arguments) and not
|
||||
// E0109 (generic arguments provided for a type that doesn't accept them) for
|
||||
// consts and statics, presumably as a defense against future in which consts
|
||||
// and statics can be generic, or just because it was easier for rustc implementors.
|
||||
// That means we'll show the wrong error code. Because of us it's easier to do it
|
||||
// this way :)
|
||||
ValueNs::GenericParam(_) | ValueNs::ConstId(_) => {
|
||||
prohibit_generics_on_resolved(GenericArgsProhibitedReason::Const)
|
||||
}
|
||||
ValueNs::StaticId(_) => {
|
||||
prohibit_generics_on_resolved(GenericArgsProhibitedReason::Static)
|
||||
}
|
||||
ValueNs::FunctionId(_) | ValueNs::StructId(_) | ValueNs::EnumVariantId(_) => {}
|
||||
ValueNs::LocalBinding(_) => {}
|
||||
}
|
||||
}
|
||||
ResolveValueResult::Partial(resolution, _, _) => {
|
||||
self.handle_type_ns_resolution(resolution);
|
||||
}
|
||||
};
|
||||
Some(res)
|
||||
}
|
||||
|
||||
fn select_associated_type(&mut self, res: Option<TypeNs>) -> Ty {
|
||||
let Some((generics, res)) = self.ctx.generics().zip(res) else {
|
||||
return TyKind::Error.intern(Interner);
|
||||
};
|
||||
let segment = self.current_or_prev_segment;
|
||||
let ty = named_associated_type_shorthand_candidates(
|
||||
self.ctx.db,
|
||||
generics.def(),
|
||||
res,
|
||||
Some(segment.name.clone()),
|
||||
move |name, t, associated_ty| {
|
||||
let generics = self.ctx.generics().unwrap();
|
||||
|
||||
if name != segment.name {
|
||||
return None;
|
||||
}
|
||||
|
||||
let parent_subst = t.substitution.clone();
|
||||
let parent_subst = match self.ctx.type_param_mode {
|
||||
ParamLoweringMode::Placeholder => {
|
||||
// if we're lowering to placeholders, we have to put them in now.
|
||||
let s = generics.placeholder_subst(self.ctx.db);
|
||||
s.apply(parent_subst, Interner)
|
||||
}
|
||||
ParamLoweringMode::Variable => {
|
||||
// We need to shift in the bound vars, since
|
||||
// `named_associated_type_shorthand_candidates` does not do that.
|
||||
parent_subst.shifted_in_from(Interner, self.ctx.in_binders)
|
||||
}
|
||||
};
|
||||
|
||||
// FIXME: `substs_from_path_segment()` pushes `TyKind::Error` for every parent
|
||||
// generic params. It's inefficient to splice the `Substitution`s, so we may want
|
||||
// that method to optionally take parent `Substitution` as we already know them at
|
||||
// this point (`t.substitution`).
|
||||
let substs = self.substs_from_path_segment(associated_ty.into(), false, None);
|
||||
|
||||
let len_self =
|
||||
crate::generics::generics(self.ctx.db.upcast(), associated_ty.into())
|
||||
.len_self();
|
||||
|
||||
let substs = Substitution::from_iter(
|
||||
Interner,
|
||||
substs.iter(Interner).take(len_self).chain(parent_subst.iter(Interner)),
|
||||
);
|
||||
|
||||
Some(
|
||||
TyKind::Alias(AliasTy::Projection(ProjectionTy {
|
||||
associated_ty_id: to_assoc_type_id(associated_ty),
|
||||
substitution: substs,
|
||||
}))
|
||||
.intern(Interner),
|
||||
)
|
||||
},
|
||||
);
|
||||
|
||||
ty.unwrap_or_else(|| TyKind::Error.intern(Interner))
|
||||
}
|
||||
|
||||
fn lower_path_inner(&mut self, typeable: TyDefId, infer_args: bool) -> Ty {
|
||||
let generic_def = match typeable {
|
||||
TyDefId::BuiltinType(builtin) => return TyBuilder::builtin(builtin),
|
||||
TyDefId::AdtId(it) => it.into(),
|
||||
TyDefId::TypeAliasId(it) => it.into(),
|
||||
};
|
||||
let substs = self.substs_from_path_segment(generic_def, infer_args, None);
|
||||
self.ctx.db.ty(typeable).substitute(Interner, &substs)
|
||||
}
|
||||
|
||||
/// Collect generic arguments from a path into a `Substs`. See also
|
||||
/// `create_substs_for_ast_path` and `def_to_ty` in rustc.
|
||||
pub(crate) fn substs_from_path(
|
||||
&mut self,
|
||||
// Note that we don't call `db.value_type(resolved)` here,
|
||||
// `ValueTyDefId` is just a convenient way to pass generics and
|
||||
// special-case enum variants
|
||||
resolved: ValueTyDefId,
|
||||
infer_args: bool,
|
||||
) -> Substitution {
|
||||
let prev_current_segment_idx = self.current_segment_idx;
|
||||
let prev_current_segment = self.current_or_prev_segment;
|
||||
|
||||
let generic_def = match resolved {
|
||||
ValueTyDefId::FunctionId(it) => it.into(),
|
||||
ValueTyDefId::StructId(it) => it.into(),
|
||||
ValueTyDefId::UnionId(it) => it.into(),
|
||||
ValueTyDefId::ConstId(it) => it.into(),
|
||||
ValueTyDefId::StaticId(_) => return Substitution::empty(Interner),
|
||||
ValueTyDefId::EnumVariantId(var) => {
|
||||
// the generic args for an enum variant may be either specified
|
||||
// on the segment referring to the enum, or on the segment
|
||||
// referring to the variant. So `Option::<T>::None` and
|
||||
// `Option::None::<T>` are both allowed (though the former is
|
||||
// FIXME: This isn't strictly correct, enum variants may be used not through the enum
|
||||
// (via `use Enum::Variant`). The resolver returns whether they were, but we don't have its result
|
||||
// available here. The worst that can happen is that we will show some confusing diagnostics to the user,
|
||||
// if generics exist on the module and they don't match with the variant.
|
||||
// preferred). See also `def_ids_for_path_segments` in rustc.
|
||||
//
|
||||
// `wrapping_sub(1)` will return a number which `get` will return None for if current_segment_idx<2.
|
||||
// This simplifies the code a bit.
|
||||
let penultimate_idx = self.current_segment_idx.wrapping_sub(1);
|
||||
let penultimate = self.segments.get(penultimate_idx);
|
||||
if let Some(penultimate) = penultimate {
|
||||
if self.current_or_prev_segment.args_and_bindings.is_none()
|
||||
&& penultimate.args_and_bindings.is_some()
|
||||
{
|
||||
self.current_segment_idx = penultimate_idx;
|
||||
self.current_or_prev_segment = penultimate;
|
||||
}
|
||||
}
|
||||
var.lookup(self.ctx.db.upcast()).parent.into()
|
||||
}
|
||||
};
|
||||
let result = self.substs_from_path_segment(generic_def, infer_args, None);
|
||||
self.current_segment_idx = prev_current_segment_idx;
|
||||
self.current_or_prev_segment = prev_current_segment;
|
||||
result
|
||||
}
|
||||
|
||||
pub(crate) fn substs_from_path_segment(
|
||||
&mut self,
|
||||
def: GenericDefId,
|
||||
infer_args: bool,
|
||||
explicit_self_ty: Option<Ty>,
|
||||
) -> Substitution {
|
||||
let prohibit_parens = match def {
|
||||
GenericDefId::TraitId(trait_) => {
|
||||
let trait_data = self.ctx.db.trait_data(trait_);
|
||||
!trait_data.flags.contains(TraitFlags::RUSTC_PAREN_SUGAR)
|
||||
}
|
||||
_ => true,
|
||||
};
|
||||
if prohibit_parens && self.prohibit_parenthesized_generic_args() {
|
||||
return TyBuilder::unknown_subst(self.ctx.db, def);
|
||||
}
|
||||
|
||||
self.substs_from_args_and_bindings(
|
||||
self.current_or_prev_segment.args_and_bindings,
|
||||
def,
|
||||
infer_args,
|
||||
explicit_self_ty,
|
||||
)
|
||||
}
|
||||
|
||||
pub(super) fn substs_from_args_and_bindings(
|
||||
&mut self,
|
||||
args_and_bindings: Option<&GenericArgs>,
|
||||
def: GenericDefId,
|
||||
infer_args: bool,
|
||||
explicit_self_ty: Option<Ty>,
|
||||
) -> Substitution {
|
||||
// Order is
|
||||
// - Optional Self parameter
|
||||
// - Lifetime parameters
|
||||
// - Type or Const parameters
|
||||
// - Parent parameters
|
||||
let def_generics = generics(self.ctx.db.upcast(), def);
|
||||
let (
|
||||
parent_params,
|
||||
self_param,
|
||||
type_params,
|
||||
const_params,
|
||||
impl_trait_params,
|
||||
lifetime_params,
|
||||
) = def_generics.provenance_split();
|
||||
let item_len =
|
||||
self_param as usize + type_params + const_params + impl_trait_params + lifetime_params;
|
||||
let total_len = parent_params + item_len;
|
||||
|
||||
let mut substs = Vec::new();
|
||||
|
||||
// we need to iterate the lifetime and type/const params separately as our order of them
|
||||
// differs from the supplied syntax
|
||||
|
||||
let ty_error = || TyKind::Error.intern(Interner).cast(Interner);
|
||||
let mut def_toc_iter = def_generics.iter_self_type_or_consts_id();
|
||||
let fill_self_param = || {
|
||||
if self_param {
|
||||
let self_ty = explicit_self_ty.map(|x| x.cast(Interner)).unwrap_or_else(ty_error);
|
||||
|
||||
if let Some(id) = def_toc_iter.next() {
|
||||
assert!(matches!(id, GenericParamId::TypeParamId(_)));
|
||||
substs.push(self_ty);
|
||||
}
|
||||
}
|
||||
};
|
||||
let mut had_explicit_args = false;
|
||||
|
||||
if let Some(&GenericArgs { ref args, has_self_type, .. }) = args_and_bindings {
|
||||
// Fill in the self param first
|
||||
if has_self_type && self_param {
|
||||
had_explicit_args = true;
|
||||
if let Some(id) = def_toc_iter.next() {
|
||||
assert!(matches!(id, GenericParamId::TypeParamId(_)));
|
||||
had_explicit_args = true;
|
||||
if let GenericArg::Type(ty) = &args[0] {
|
||||
substs.push(self.ctx.lower_ty(*ty).cast(Interner));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
fill_self_param()
|
||||
};
|
||||
|
||||
// Then fill in the supplied lifetime args, or error lifetimes if there are too few
|
||||
// (default lifetimes aren't a thing)
|
||||
for arg in args
|
||||
.iter()
|
||||
.filter_map(|arg| match arg {
|
||||
GenericArg::Lifetime(arg) => Some(self.ctx.lower_lifetime(arg)),
|
||||
_ => None,
|
||||
})
|
||||
.chain(iter::repeat(error_lifetime()))
|
||||
.take(lifetime_params)
|
||||
{
|
||||
substs.push(arg.cast(Interner));
|
||||
}
|
||||
|
||||
let skip = if has_self_type { 1 } else { 0 };
|
||||
// Fill in supplied type and const args
|
||||
// Note if non-lifetime args are provided, it should be all of them, but we can't rely on that
|
||||
for (arg, id) in args
|
||||
.iter()
|
||||
.filter(|arg| !matches!(arg, GenericArg::Lifetime(_)))
|
||||
.skip(skip)
|
||||
.take(type_params + const_params)
|
||||
.zip(def_toc_iter)
|
||||
{
|
||||
had_explicit_args = true;
|
||||
let arg = generic_arg_to_chalk(
|
||||
self.ctx.db,
|
||||
id,
|
||||
arg,
|
||||
self.ctx,
|
||||
self.ctx.types_map,
|
||||
|ctx, type_ref| ctx.lower_ty(type_ref),
|
||||
|ctx, const_ref, ty| ctx.lower_const(const_ref, ty),
|
||||
|ctx, lifetime_ref| ctx.lower_lifetime(lifetime_ref),
|
||||
);
|
||||
substs.push(arg);
|
||||
}
|
||||
} else {
|
||||
fill_self_param();
|
||||
}
|
||||
|
||||
let param_to_err = |id| match id {
|
||||
GenericParamId::ConstParamId(x) => {
|
||||
unknown_const_as_generic(self.ctx.db.const_param_ty(x))
|
||||
}
|
||||
GenericParamId::TypeParamId(_) => ty_error(),
|
||||
GenericParamId::LifetimeParamId(_) => error_lifetime().cast(Interner),
|
||||
};
|
||||
// handle defaults. In expression or pattern path segments without
|
||||
// explicitly specified type arguments, missing type arguments are inferred
|
||||
// (i.e. defaults aren't used).
|
||||
// Generic parameters for associated types are not supposed to have defaults, so we just
|
||||
// ignore them.
|
||||
let is_assoc_ty = || match def {
|
||||
GenericDefId::TypeAliasId(id) => {
|
||||
matches!(id.lookup(self.ctx.db.upcast()).container, ItemContainerId::TraitId(_))
|
||||
}
|
||||
_ => false,
|
||||
};
|
||||
let fill_defaults = (!infer_args || had_explicit_args) && !is_assoc_ty();
|
||||
if fill_defaults {
|
||||
let defaults = &*self.ctx.db.generic_defaults(def);
|
||||
let (item, _parent) = defaults.split_at(item_len);
|
||||
let parent_from = item_len - substs.len();
|
||||
|
||||
let mut rem =
|
||||
def_generics.iter_id().skip(substs.len()).map(param_to_err).collect::<Vec<_>>();
|
||||
// Fill in defaults for type/const params
|
||||
for (idx, default_ty) in item[substs.len()..].iter().enumerate() {
|
||||
// each default can depend on the previous parameters
|
||||
let substs_so_far = Substitution::from_iter(
|
||||
Interner,
|
||||
substs.iter().cloned().chain(rem[idx..].iter().cloned()),
|
||||
);
|
||||
substs.push(default_ty.clone().substitute(Interner, &substs_so_far));
|
||||
}
|
||||
// Fill in remaining parent params
|
||||
substs.extend(rem.drain(parent_from..));
|
||||
} else {
|
||||
// Fill in remaining def params and parent params
|
||||
substs.extend(def_generics.iter_id().skip(substs.len()).map(param_to_err));
|
||||
}
|
||||
|
||||
assert_eq!(substs.len(), total_len, "expected {} substs, got {}", total_len, substs.len());
|
||||
Substitution::from_iter(Interner, substs)
|
||||
}
|
||||
|
||||
pub(crate) fn lower_trait_ref_from_resolved_path(
|
||||
&mut self,
|
||||
resolved: TraitId,
|
||||
explicit_self_ty: Ty,
|
||||
) -> TraitRef {
|
||||
let substs = self.trait_ref_substs_from_path(resolved, explicit_self_ty);
|
||||
TraitRef { trait_id: to_chalk_trait_id(resolved), substitution: substs }
|
||||
}
|
||||
|
||||
fn trait_ref_substs_from_path(
|
||||
&mut self,
|
||||
resolved: TraitId,
|
||||
explicit_self_ty: Ty,
|
||||
) -> Substitution {
|
||||
self.substs_from_path_segment(resolved.into(), false, Some(explicit_self_ty))
|
||||
}
|
||||
|
||||
pub(super) fn assoc_type_bindings_from_type_bound<'c>(
|
||||
mut self,
|
||||
bound: &'c TypeBound,
|
||||
trait_ref: TraitRef,
|
||||
) -> Option<impl Iterator<Item = QuantifiedWhereClause> + use<'a, 'b, 'c>> {
|
||||
self.current_or_prev_segment.args_and_bindings.map(|args_and_bindings| {
|
||||
args_and_bindings.bindings.iter().flat_map(move |binding| {
|
||||
let found = associated_type_by_name_including_super_traits(
|
||||
self.ctx.db,
|
||||
trait_ref.clone(),
|
||||
&binding.name,
|
||||
);
|
||||
let (super_trait_ref, associated_ty) = match found {
|
||||
None => return SmallVec::new(),
|
||||
Some(t) => t,
|
||||
};
|
||||
// FIXME: `substs_from_path_segment()` pushes `TyKind::Error` for every parent
|
||||
// generic params. It's inefficient to splice the `Substitution`s, so we may want
|
||||
// that method to optionally take parent `Substitution` as we already know them at
|
||||
// this point (`super_trait_ref.substitution`).
|
||||
let substitution = self.substs_from_args_and_bindings(
|
||||
binding.args.as_ref(),
|
||||
associated_ty.into(),
|
||||
false, // this is not relevant
|
||||
Some(super_trait_ref.self_type_parameter(Interner)),
|
||||
);
|
||||
let self_params = generics(self.ctx.db.upcast(), associated_ty.into()).len_self();
|
||||
let substitution = Substitution::from_iter(
|
||||
Interner,
|
||||
substitution
|
||||
.iter(Interner)
|
||||
.take(self_params)
|
||||
.chain(super_trait_ref.substitution.iter(Interner)),
|
||||
);
|
||||
let projection_ty = ProjectionTy {
|
||||
associated_ty_id: to_assoc_type_id(associated_ty),
|
||||
substitution,
|
||||
};
|
||||
let mut predicates: SmallVec<[_; 1]> = SmallVec::with_capacity(
|
||||
binding.type_ref.as_ref().map_or(0, |_| 1) + binding.bounds.len(),
|
||||
);
|
||||
if let Some(type_ref) = binding.type_ref {
|
||||
match (&self.ctx.types_map[type_ref], self.ctx.impl_trait_mode.mode) {
|
||||
(TypeRef::ImplTrait(_), ImplTraitLoweringMode::Disallowed) => (),
|
||||
(_, ImplTraitLoweringMode::Disallowed | ImplTraitLoweringMode::Opaque) => {
|
||||
let ty = self.ctx.lower_ty(type_ref);
|
||||
let alias_eq =
|
||||
AliasEq { alias: AliasTy::Projection(projection_ty.clone()), ty };
|
||||
predicates
|
||||
.push(crate::wrap_empty_binders(WhereClause::AliasEq(alias_eq)));
|
||||
}
|
||||
(_, ImplTraitLoweringMode::Param | ImplTraitLoweringMode::Variable) => {
|
||||
// Find the generic index for the target of our `bound`
|
||||
let target_param_idx =
|
||||
self.ctx.resolver.where_predicates_in_scope().find_map(|(p, _)| {
|
||||
match p {
|
||||
WherePredicate::TypeBound {
|
||||
target: WherePredicateTypeTarget::TypeOrConstParam(idx),
|
||||
bound: b,
|
||||
} if b == bound => Some(idx),
|
||||
_ => None,
|
||||
}
|
||||
});
|
||||
let ty = if let Some(target_param_idx) = target_param_idx {
|
||||
let mut counter = 0;
|
||||
let generics = self.ctx.generics().expect("generics in scope");
|
||||
for (idx, data) in generics.iter_self_type_or_consts() {
|
||||
// Count the number of `impl Trait` things that appear before
|
||||
// the target of our `bound`.
|
||||
// Our counter within `impl_trait_mode` should be that number
|
||||
// to properly lower each types within `type_ref`
|
||||
if data.type_param().is_some_and(|p| {
|
||||
p.provenance == TypeParamProvenance::ArgumentImplTrait
|
||||
}) {
|
||||
counter += 1;
|
||||
}
|
||||
if idx == *target_param_idx {
|
||||
break;
|
||||
}
|
||||
}
|
||||
let mut ext = TyLoweringContext::new_maybe_unowned(
|
||||
self.ctx.db,
|
||||
self.ctx.resolver,
|
||||
self.ctx.types_map,
|
||||
self.ctx.types_source_map,
|
||||
self.ctx.owner,
|
||||
)
|
||||
.with_type_param_mode(self.ctx.type_param_mode);
|
||||
match self.ctx.impl_trait_mode.mode {
|
||||
ImplTraitLoweringMode::Param => {
|
||||
ext.impl_trait_mode =
|
||||
ImplTraitLoweringState::param(counter);
|
||||
}
|
||||
ImplTraitLoweringMode::Variable => {
|
||||
ext.impl_trait_mode =
|
||||
ImplTraitLoweringState::variable(counter);
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
let ty = ext.lower_ty(type_ref);
|
||||
self.ctx.diagnostics.extend(ext.diagnostics);
|
||||
ty
|
||||
} else {
|
||||
self.ctx.lower_ty(type_ref)
|
||||
};
|
||||
|
||||
let alias_eq =
|
||||
AliasEq { alias: AliasTy::Projection(projection_ty.clone()), ty };
|
||||
predicates
|
||||
.push(crate::wrap_empty_binders(WhereClause::AliasEq(alias_eq)));
|
||||
}
|
||||
}
|
||||
}
|
||||
for bound in binding.bounds.iter() {
|
||||
predicates.extend(self.ctx.lower_type_bound(
|
||||
bound,
|
||||
TyKind::Alias(AliasTy::Projection(projection_ty.clone())).intern(Interner),
|
||||
false,
|
||||
));
|
||||
}
|
||||
predicates
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
@ -10,7 +10,7 @@ use crate::{
|
||||
lang_items::is_box,
|
||||
mapping::ToChalk,
|
||||
CallableDefId, ClosureId, Const, ConstScalar, InferenceResult, Interner, MemoryMap,
|
||||
Substitution, TraitEnvironment, Ty, TyKind,
|
||||
Substitution, TraitEnvironment, Ty, TyExt, TyKind,
|
||||
};
|
||||
use base_db::CrateId;
|
||||
use chalk_ir::Mutability;
|
||||
@ -144,6 +144,13 @@ impl<V, T> ProjectionElem<V, T> {
|
||||
closure_field: impl FnOnce(ClosureId, &Substitution, usize) -> Ty,
|
||||
krate: CrateId,
|
||||
) -> Ty {
|
||||
// we only bail on mir building when there are type mismatches
|
||||
// but error types may pop up resulting in us still attempting to build the mir
|
||||
// so just propagate the error type
|
||||
if base.is_unknown() {
|
||||
return TyKind::Error.intern(Interner);
|
||||
}
|
||||
|
||||
if matches!(base.kind(Interner), TyKind::Alias(_) | TyKind::AssociatedType(..)) {
|
||||
base = normalize(
|
||||
db,
|
||||
@ -166,7 +173,7 @@ impl<V, T> ProjectionElem<V, T> {
|
||||
TyKind::Error.intern(Interner)
|
||||
}
|
||||
},
|
||||
ProjectionElem::Field(Either::Left(f)) => match &base.kind(Interner) {
|
||||
ProjectionElem::Field(Either::Left(f)) => match base.kind(Interner) {
|
||||
TyKind::Adt(_, subst) => {
|
||||
db.field_types(f.parent)[f.local_id].clone().substitute(Interner, subst)
|
||||
}
|
||||
|
@ -912,3 +912,36 @@ fn main() {
|
||||
"",
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn regression_19021() {
|
||||
check_pass(
|
||||
r#"
|
||||
//- minicore: deref
|
||||
use core::ops::Deref;
|
||||
|
||||
#[lang = "owned_box"]
|
||||
struct Box<T>(T);
|
||||
|
||||
impl<T> Deref for Box<T> {
|
||||
type Target = T;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
struct Foo;
|
||||
|
||||
fn main() {
|
||||
let x = Box(Foo);
|
||||
let y = &Foo;
|
||||
|
||||
|| match x {
|
||||
ref x => x,
|
||||
_ => y,
|
||||
};
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
@ -8,8 +8,8 @@ use hir_def::{
|
||||
data::adt::{StructKind, VariantData},
|
||||
expr_store::{Body, HygieneId},
|
||||
hir::{
|
||||
ArithOp, Array, BinaryOp, BindingAnnotation, BindingId, ExprId, LabelId, Literal,
|
||||
LiteralOrConst, MatchArm, Pat, PatId, RecordFieldPat, RecordLitField,
|
||||
ArithOp, Array, BinaryOp, BindingAnnotation, BindingId, ExprId, LabelId, Literal, MatchArm,
|
||||
Pat, PatId, RecordFieldPat, RecordLitField,
|
||||
},
|
||||
lang_item::{LangItem, LangItemTarget},
|
||||
path::Path,
|
||||
@ -1358,20 +1358,10 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn lower_literal_or_const_to_operand(
|
||||
&mut self,
|
||||
ty: Ty,
|
||||
loc: &LiteralOrConst,
|
||||
) -> Result<Operand> {
|
||||
match loc {
|
||||
LiteralOrConst::Literal(l) => self.lower_literal_to_operand(ty, l),
|
||||
LiteralOrConst::Const(c) => {
|
||||
let c = match &self.body.pats[*c] {
|
||||
Pat::Path(p) => p,
|
||||
_ => not_supported!(
|
||||
"only `char` and numeric types are allowed in range patterns"
|
||||
),
|
||||
};
|
||||
fn lower_literal_or_const_to_operand(&mut self, ty: Ty, loc: &ExprId) -> Result<Operand> {
|
||||
match &self.body.exprs[*loc] {
|
||||
Expr::Literal(l) => self.lower_literal_to_operand(ty, l),
|
||||
Expr::Path(c) => {
|
||||
let edition = self.edition();
|
||||
let unresolved_name =
|
||||
|| MirLowerError::unresolved_path(self.db, c, edition, &self.body.types);
|
||||
@ -1392,6 +1382,9 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
not_supported!("only `char` and numeric types are allowed in range patterns");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
//! MIR lowering for patterns
|
||||
|
||||
use hir_def::{hir::LiteralOrConst, AssocItemId};
|
||||
use hir_def::{hir::ExprId, AssocItemId};
|
||||
|
||||
use crate::{
|
||||
mir::{
|
||||
@ -207,7 +207,7 @@ impl MirLowerCtx<'_> {
|
||||
)?
|
||||
}
|
||||
Pat::Range { start, end } => {
|
||||
let mut add_check = |l: &LiteralOrConst, binop| -> Result<()> {
|
||||
let mut add_check = |l: &ExprId, binop| -> Result<()> {
|
||||
let lv =
|
||||
self.lower_literal_or_const_to_operand(self.infer[pattern].clone(), l)?;
|
||||
let else_target = *current_else.get_or_insert_with(|| self.new_basic_block());
|
||||
|
@ -117,7 +117,7 @@ fn check_impl(
|
||||
expected.trim_start_matches("adjustments:").trim().to_owned(),
|
||||
);
|
||||
} else {
|
||||
panic!("unexpected annotation: {expected}");
|
||||
panic!("unexpected annotation: {expected} @ {range:?}");
|
||||
}
|
||||
had_annotations = true;
|
||||
}
|
||||
|
@ -185,11 +185,10 @@ fn test() {
|
||||
let t = &mut 1;
|
||||
let x = match 1 {
|
||||
1 => t as *mut i32,
|
||||
//^^^^^^^^^^^^^ adjustments: Pointer(MutToConstPointer)
|
||||
2 => t as &i32,
|
||||
//^^^^^^^^^ expected *mut i32, got &'? i32
|
||||
_ => t as *const i32,
|
||||
// ^^^^^^^^^^^^^^^ adjustments: Pointer(MutToConstPointer)
|
||||
|
||||
};
|
||||
x;
|
||||
//^ type: *const i32
|
||||
@ -536,7 +535,7 @@ fn test() {
|
||||
|
||||
#[test]
|
||||
fn coerce_unsize_generic() {
|
||||
check(
|
||||
check_no_mismatches(
|
||||
r#"
|
||||
//- minicore: coerce_unsized
|
||||
struct Foo<T> { t: T };
|
||||
@ -544,9 +543,7 @@ struct Bar<T>(Foo<T>);
|
||||
|
||||
fn test() {
|
||||
let _: &Foo<[usize]> = &Foo { t: [1, 2, 3] };
|
||||
//^^^^^^^^^^^^^^^^^^^^^ expected &'? Foo<[usize]>, got &'? Foo<[i32; 3]>
|
||||
let _: &Bar<[usize]> = &Bar(Foo { t: [1, 2, 3] });
|
||||
//^^^^^^^^^^^^^^^^^^^^^^^^^^ expected &'? Bar<[usize]>, got &'? Bar<[i32; 3]>
|
||||
}
|
||||
"#,
|
||||
);
|
||||
@ -958,3 +955,24 @@ fn f() {
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn coerce_nested_unsized_struct() {
|
||||
check_types(
|
||||
r#"
|
||||
//- minicore: fn, coerce_unsized, dispatch_from_dyn, sized
|
||||
use core::marker::Unsize;
|
||||
|
||||
struct Foo<T: ?Sized>(T);
|
||||
|
||||
fn need(_: &Foo<dyn Fn(i32) -> i32>) {
|
||||
}
|
||||
|
||||
fn test() {
|
||||
let callback = |x| x;
|
||||
//^ i32
|
||||
need(&Foo(callback));
|
||||
}
|
||||
"#,
|
||||
)
|
||||
}
|
||||
|
@ -153,3 +153,53 @@ fn consume() -> Option<()> {
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn method_call_on_field() {
|
||||
check(
|
||||
r#"
|
||||
struct S {
|
||||
field: fn(f32) -> u32,
|
||||
field2: u32
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let s = S { field: |_| 0, field2: 0 };
|
||||
s.field(0);
|
||||
// ^ expected f32, got i32
|
||||
// ^^^^^^^^^^ type: u32
|
||||
s.field2(0);
|
||||
// ^ type: i32
|
||||
// ^^^^^^^^^^^ type: {unknown}
|
||||
s.not_a_field(0);
|
||||
// ^ type: i32
|
||||
// ^^^^^^^^^^^^^^^^ type: {unknown}
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn method_call_on_assoc() {
|
||||
check(
|
||||
r#"
|
||||
struct S;
|
||||
|
||||
impl S {
|
||||
fn not_a_method() -> f32 { 0.0 }
|
||||
fn not_a_method2(this: Self, param: f32) -> Self { this }
|
||||
fn not_a_method3(param: f32) -> Self { S }
|
||||
}
|
||||
|
||||
fn main() {
|
||||
S.not_a_method(0);
|
||||
// ^^^^^^^^^^^^^^^^^ type: f32
|
||||
S.not_a_method2(0);
|
||||
// ^ expected f32, got i32
|
||||
// ^^^^^^^^^^^^^^^^^^ type: S
|
||||
S.not_a_method3(0);
|
||||
// ^^^^^^^^^^^^^^^^^^ type: S
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
@ -1210,7 +1210,7 @@ impl<T> Slice<T> {
|
||||
fn main() {
|
||||
let foo: Slice<u32>;
|
||||
foo.into_vec(); // we shouldn't crash on this at least
|
||||
} //^^^^^^^^^^^^^^ {unknown}
|
||||
} //^^^^^^^^^^^^^^ ()
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
@ -4694,21 +4694,21 @@ fn f<T: Send, U>() {
|
||||
Struct::<T>::IS_SEND;
|
||||
//^^^^^^^^^^^^^^^^^^^^Yes
|
||||
Struct::<U>::IS_SEND;
|
||||
//^^^^^^^^^^^^^^^^^^^^Yes
|
||||
//^^^^^^^^^^^^^^^^^^^^{unknown}
|
||||
Struct::<*const T>::IS_SEND;
|
||||
//^^^^^^^^^^^^^^^^^^^^^^^^^^^Yes
|
||||
//^^^^^^^^^^^^^^^^^^^^^^^^^^^{unknown}
|
||||
Enum::<T>::IS_SEND;
|
||||
//^^^^^^^^^^^^^^^^^^Yes
|
||||
Enum::<U>::IS_SEND;
|
||||
//^^^^^^^^^^^^^^^^^^Yes
|
||||
//^^^^^^^^^^^^^^^^^^{unknown}
|
||||
Enum::<*const T>::IS_SEND;
|
||||
//^^^^^^^^^^^^^^^^^^^^^^^^^Yes
|
||||
//^^^^^^^^^^^^^^^^^^^^^^^^^{unknown}
|
||||
Union::<T>::IS_SEND;
|
||||
//^^^^^^^^^^^^^^^^^^^Yes
|
||||
Union::<U>::IS_SEND;
|
||||
//^^^^^^^^^^^^^^^^^^^Yes
|
||||
//^^^^^^^^^^^^^^^^^^^{unknown}
|
||||
Union::<*const T>::IS_SEND;
|
||||
//^^^^^^^^^^^^^^^^^^^^^^^^^^Yes
|
||||
//^^^^^^^^^^^^^^^^^^^^^^^^^^{unknown}
|
||||
PhantomData::<T>::IS_SEND;
|
||||
//^^^^^^^^^^^^^^^^^^^^^^^^^Yes
|
||||
PhantomData::<U>::IS_SEND;
|
||||
|
@ -6,10 +6,11 @@
|
||||
use cfg::{CfgExpr, CfgOptions};
|
||||
use either::Either;
|
||||
use hir_def::{
|
||||
expr_store::ExprOrPatPtr,
|
||||
hir::ExprOrPatId,
|
||||
path::{hir_segment_to_ast_segment, ModPath},
|
||||
type_ref::TypesSourceMap,
|
||||
AssocItemId, DefWithBodyId, SyntheticSyntax,
|
||||
DefWithBodyId, SyntheticSyntax,
|
||||
};
|
||||
use hir_expand::{name::Name, HirFileId, InFile};
|
||||
use hir_ty::{
|
||||
@ -24,7 +25,7 @@ use syntax::{
|
||||
};
|
||||
use triomphe::Arc;
|
||||
|
||||
use crate::{AssocItem, Field, Local, Trait, Type};
|
||||
use crate::{AssocItem, Field, Function, Local, Trait, Type};
|
||||
|
||||
pub use hir_def::VariantId;
|
||||
pub use hir_ty::{
|
||||
@ -111,18 +112,19 @@ diagnostics![
|
||||
UnusedMut,
|
||||
UnusedVariable,
|
||||
GenericArgsProhibited,
|
||||
ParenthesizedGenericArgsWithoutFnTrait,
|
||||
];
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct BreakOutsideOfLoop {
|
||||
pub expr: InFile<AstPtr<ast::Expr>>,
|
||||
pub expr: InFile<ExprOrPatPtr>,
|
||||
pub is_break: bool,
|
||||
pub bad_value_break: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct TypedHole {
|
||||
pub expr: InFile<AstPtr<ast::Expr>>,
|
||||
pub expr: InFile<ExprOrPatPtr>,
|
||||
pub expected: Type,
|
||||
}
|
||||
|
||||
@ -221,26 +223,26 @@ pub struct NoSuchField {
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct PrivateAssocItem {
|
||||
pub expr_or_pat: InFile<AstPtr<Either<ast::Expr, ast::Pat>>>,
|
||||
pub expr_or_pat: InFile<ExprOrPatPtr>,
|
||||
pub item: AssocItem,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct MismatchedTupleStructPatArgCount {
|
||||
pub expr_or_pat: InFile<AstPtr<Either<ast::Expr, ast::Pat>>>,
|
||||
pub expr_or_pat: InFile<ExprOrPatPtr>,
|
||||
pub expected: usize,
|
||||
pub found: usize,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ExpectedFunction {
|
||||
pub call: InFile<AstPtr<ast::Expr>>,
|
||||
pub call: InFile<ExprOrPatPtr>,
|
||||
pub found: Type,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct UnresolvedField {
|
||||
pub expr: InFile<AstPtr<ast::Expr>>,
|
||||
pub expr: InFile<ExprOrPatPtr>,
|
||||
pub receiver: Type,
|
||||
pub name: Name,
|
||||
pub method_with_same_name_exists: bool,
|
||||
@ -248,26 +250,26 @@ pub struct UnresolvedField {
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct UnresolvedMethodCall {
|
||||
pub expr: InFile<AstPtr<ast::Expr>>,
|
||||
pub expr: InFile<ExprOrPatPtr>,
|
||||
pub receiver: Type,
|
||||
pub name: Name,
|
||||
pub field_with_same_name: Option<Type>,
|
||||
pub assoc_func_with_same_name: Option<AssocItemId>,
|
||||
pub assoc_func_with_same_name: Option<Function>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct UnresolvedAssocItem {
|
||||
pub expr_or_pat: InFile<AstPtr<Either<ast::Expr, ast::Pat>>>,
|
||||
pub expr_or_pat: InFile<ExprOrPatPtr>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct UnresolvedIdent {
|
||||
pub node: InFile<(AstPtr<Either<ast::Expr, ast::Pat>>, Option<TextRange>)>,
|
||||
pub node: InFile<(ExprOrPatPtr, Option<TextRange>)>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct PrivateField {
|
||||
pub expr: InFile<AstPtr<ast::Expr>>,
|
||||
pub expr: InFile<ExprOrPatPtr>,
|
||||
pub field: Field,
|
||||
}
|
||||
|
||||
@ -280,7 +282,7 @@ pub enum UnsafeLint {
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct MissingUnsafe {
|
||||
pub node: InFile<AstPtr<Either<ast::Expr, ast::Pat>>>,
|
||||
pub node: InFile<ExprOrPatPtr>,
|
||||
pub lint: UnsafeLint,
|
||||
pub reason: UnsafetyReason,
|
||||
}
|
||||
@ -302,7 +304,7 @@ pub struct ReplaceFilterMapNextWithFindMap {
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct MismatchedArgCount {
|
||||
pub call_expr: InFile<AstPtr<ast::Expr>>,
|
||||
pub call_expr: InFile<ExprOrPatPtr>,
|
||||
pub expected: usize,
|
||||
pub found: usize,
|
||||
}
|
||||
@ -321,7 +323,7 @@ pub struct NonExhaustiveLet {
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct TypeMismatch {
|
||||
pub expr_or_pat: InFile<AstPtr<Either<ast::Expr, ast::Pat>>>,
|
||||
pub expr_or_pat: InFile<ExprOrPatPtr>,
|
||||
pub expected: Type,
|
||||
pub actual: Type,
|
||||
}
|
||||
@ -395,13 +397,13 @@ pub struct RemoveUnnecessaryElse {
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct CastToUnsized {
|
||||
pub expr: InFile<AstPtr<ast::Expr>>,
|
||||
pub expr: InFile<ExprOrPatPtr>,
|
||||
pub cast_ty: Type,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct InvalidCast {
|
||||
pub expr: InFile<AstPtr<ast::Expr>>,
|
||||
pub expr: InFile<ExprOrPatPtr>,
|
||||
pub error: CastError,
|
||||
pub expr_ty: Type,
|
||||
pub cast_ty: Type,
|
||||
@ -413,6 +415,11 @@ pub struct GenericArgsProhibited {
|
||||
pub reason: GenericArgsProhibitedReason,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ParenthesizedGenericArgsWithoutFnTrait {
|
||||
pub args: InFile<AstPtr<ast::ParenthesizedArgList>>,
|
||||
}
|
||||
|
||||
impl AnyDiagnostic {
|
||||
pub(crate) fn body_validation_diagnostic(
|
||||
db: &dyn HirDatabase,
|
||||
@ -428,9 +435,7 @@ impl AnyDiagnostic {
|
||||
.collect();
|
||||
|
||||
let record = match record {
|
||||
Either::Left(record_expr) => {
|
||||
source_map.expr_syntax(record_expr).ok()?.map(AstPtr::wrap_left)
|
||||
}
|
||||
Either::Left(record_expr) => source_map.expr_syntax(record_expr).ok()?,
|
||||
Either::Right(record_pat) => source_map.pat_syntax(record_pat).ok()?,
|
||||
};
|
||||
let file = record.file_id;
|
||||
@ -474,7 +479,7 @@ impl AnyDiagnostic {
|
||||
return Some(
|
||||
ReplaceFilterMapNextWithFindMap {
|
||||
file: next_source_ptr.file_id,
|
||||
next_expr: next_source_ptr.value,
|
||||
next_expr: next_source_ptr.value.cast()?,
|
||||
}
|
||||
.into(),
|
||||
);
|
||||
@ -484,7 +489,9 @@ impl AnyDiagnostic {
|
||||
match source_map.expr_syntax(match_expr) {
|
||||
Ok(source_ptr) => {
|
||||
let root = source_ptr.file_syntax(db.upcast());
|
||||
if let ast::Expr::MatchExpr(match_expr) = &source_ptr.value.to_node(&root) {
|
||||
if let Either::Left(ast::Expr::MatchExpr(match_expr)) =
|
||||
&source_ptr.value.to_node(&root)
|
||||
{
|
||||
match match_expr.expr() {
|
||||
Some(scrut_expr) if match_expr.match_arm_list().is_some() => {
|
||||
return Some(
|
||||
@ -561,7 +568,7 @@ impl AnyDiagnostic {
|
||||
let pat_syntax =
|
||||
|pat| source_map.pat_syntax(pat).inspect_err(|_| stdx::never!("synthetic syntax")).ok();
|
||||
let expr_or_pat_syntax = |id| match id {
|
||||
ExprOrPatId::ExprId(expr) => expr_syntax(expr).map(|it| it.map(AstPtr::wrap_left)),
|
||||
ExprOrPatId::ExprId(expr) => expr_syntax(expr),
|
||||
ExprOrPatId::PatId(pat) => pat_syntax(pat),
|
||||
};
|
||||
Some(match d {
|
||||
@ -622,7 +629,7 @@ impl AnyDiagnostic {
|
||||
field_with_same_name: field_with_same_name
|
||||
.clone()
|
||||
.map(|ty| Type::new(db, def, ty)),
|
||||
assoc_func_with_same_name: *assoc_func_with_same_name,
|
||||
assoc_func_with_same_name: assoc_func_with_same_name.map(Into::into),
|
||||
}
|
||||
.into()
|
||||
}
|
||||
@ -633,7 +640,7 @@ impl AnyDiagnostic {
|
||||
&InferenceDiagnostic::UnresolvedIdent { id } => {
|
||||
let node = match id {
|
||||
ExprOrPatId::ExprId(id) => match source_map.expr_syntax(id) {
|
||||
Ok(syntax) => syntax.map(|it| (it.wrap_left(), None)),
|
||||
Ok(syntax) => syntax.map(|it| (it, None)),
|
||||
Err(SyntheticSyntax) => source_map
|
||||
.format_args_implicit_capture(id)?
|
||||
.map(|(node, range)| (node.wrap_left(), Some(range))),
|
||||
@ -652,7 +659,7 @@ impl AnyDiagnostic {
|
||||
}
|
||||
&InferenceDiagnostic::MismatchedTupleStructPatArgCount { pat, expected, found } => {
|
||||
let expr_or_pat = match pat {
|
||||
ExprOrPatId::ExprId(expr) => expr_syntax(expr)?.map(AstPtr::wrap_left),
|
||||
ExprOrPatId::ExprId(expr) => expr_syntax(expr)?,
|
||||
ExprOrPatId::PatId(pat) => {
|
||||
let InFile { file_id, value } = pat_syntax(pat)?;
|
||||
|
||||
@ -702,8 +709,8 @@ impl AnyDiagnostic {
|
||||
diag: &PathLoweringDiagnostic,
|
||||
path: InFile<ast::Path>,
|
||||
) -> Option<AnyDiagnostic> {
|
||||
Some(match diag {
|
||||
&PathLoweringDiagnostic::GenericArgsProhibited { segment, reason } => {
|
||||
Some(match *diag {
|
||||
PathLoweringDiagnostic::GenericArgsProhibited { segment, reason } => {
|
||||
let segment = hir_segment_to_ast_segment(&path.value, segment)?;
|
||||
let args = if let Some(generics) = segment.generic_arg_list() {
|
||||
AstPtr::new(&generics).wrap_left()
|
||||
@ -713,6 +720,12 @@ impl AnyDiagnostic {
|
||||
let args = path.with_value(args);
|
||||
GenericArgsProhibited { args, reason }.into()
|
||||
}
|
||||
PathLoweringDiagnostic::ParenthesizedGenericArgsWithoutFnTrait { segment } => {
|
||||
let segment = hir_segment_to_ast_segment(&path.value, segment)?;
|
||||
let args = AstPtr::new(&segment.parenthesized_arg_list()?);
|
||||
let args = path.with_value(args);
|
||||
ParenthesizedGenericArgsWithoutFnTrait { args }.into()
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -49,6 +49,7 @@ from_id![
|
||||
(hir_def::LifetimeParamId, crate::LifetimeParam),
|
||||
(hir_def::MacroId, crate::Macro),
|
||||
(hir_def::ExternCrateId, crate::ExternCrateDecl),
|
||||
(hir_def::ExternBlockId, crate::ExternBlock),
|
||||
];
|
||||
|
||||
impl From<AdtId> for Adt {
|
||||
|
@ -248,7 +248,7 @@ impl HasSource for Param {
|
||||
let ast @ InFile { file_id, value } = source_map.expr_syntax(expr_id).ok()?;
|
||||
let root = db.parse_or_expand(file_id);
|
||||
match value.to_node(&root) {
|
||||
ast::Expr::ClosureExpr(it) => it
|
||||
Either::Left(ast::Expr::ClosureExpr(it)) => it
|
||||
.param_list()?
|
||||
.params()
|
||||
.nth(self.idx)
|
||||
@ -301,7 +301,7 @@ impl HasSource for InlineAsmOperand {
|
||||
let root = src.file_syntax(db.upcast());
|
||||
return src
|
||||
.map(|ast| match ast.to_node(&root) {
|
||||
ast::Expr::AsmExpr(asm) => asm
|
||||
Either::Left(ast::Expr::AsmExpr(asm)) => asm
|
||||
.asm_pieces()
|
||||
.filter_map(|it| match it {
|
||||
ast::AsmPiece::AsmOperandNamed(it) => Some(it),
|
||||
|
@ -55,8 +55,8 @@ use hir_def::{
|
||||
resolver::{HasResolver, Resolver},
|
||||
type_ref::TypesSourceMap,
|
||||
AdtId, AssocItemId, AssocItemLoc, AttrDefId, CallableDefId, ConstId, ConstParamId,
|
||||
CrateRootModuleId, DefWithBodyId, EnumId, EnumVariantId, ExternCrateId, FunctionId,
|
||||
GenericDefId, GenericParamId, HasModule, ImplId, InTypeConstId, ItemContainerId,
|
||||
CrateRootModuleId, DefWithBodyId, EnumId, EnumVariantId, ExternBlockId, ExternCrateId,
|
||||
FunctionId, GenericDefId, GenericParamId, HasModule, ImplId, InTypeConstId, ItemContainerId,
|
||||
LifetimeParamId, LocalFieldId, Lookup, MacroExpander, MacroId, ModuleId, StaticId, StructId,
|
||||
SyntheticSyntax, TraitAliasId, TupleId, TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId,
|
||||
};
|
||||
@ -1957,7 +1957,7 @@ impl DefWithBody {
|
||||
ExprOrPatId::PatId(pat) => source_map.pat_syntax(pat).map(Either::Right),
|
||||
};
|
||||
let expr_or_pat = match expr_or_pat {
|
||||
Ok(Either::Left(expr)) => expr.map(AstPtr::wrap_left),
|
||||
Ok(Either::Left(expr)) => expr,
|
||||
Ok(Either::Right(InFile { file_id, value: pat })) => {
|
||||
// cast from Either<Pat, SelfParam> -> Either<_, Pat>
|
||||
let Some(ptr) = AstPtr::try_from_raw(pat.syntax_node_ptr()) else {
|
||||
@ -2003,7 +2003,7 @@ impl DefWithBody {
|
||||
match source_map.expr_syntax(node) {
|
||||
Ok(node) => acc.push(
|
||||
MissingUnsafe {
|
||||
node: node.map(|it| it.wrap_left()),
|
||||
node,
|
||||
lint: UnsafeLint::DeprecatedSafe2024,
|
||||
reason: UnsafetyReason::UnsafeFnCall,
|
||||
}
|
||||
@ -2327,6 +2327,13 @@ impl Function {
|
||||
db.function_data(self.id).is_async()
|
||||
}
|
||||
|
||||
pub fn extern_block(self, db: &dyn HirDatabase) -> Option<ExternBlock> {
|
||||
match self.id.lookup(db.upcast()).container {
|
||||
ItemContainerId::ExternBlockId(id) => Some(ExternBlock { id }),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn returns_impl_future(self, db: &dyn HirDatabase) -> bool {
|
||||
if self.is_async(db) {
|
||||
return true;
|
||||
@ -2761,6 +2768,13 @@ impl Static {
|
||||
Type::from_value_def(db, self.id)
|
||||
}
|
||||
|
||||
pub fn extern_block(self, db: &dyn HirDatabase) -> Option<ExternBlock> {
|
||||
match self.id.lookup(db.upcast()).container {
|
||||
ItemContainerId::ExternBlockId(id) => Some(ExternBlock { id }),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Evaluate the static initializer.
|
||||
pub fn eval(self, db: &dyn HirDatabase) -> Result<EvaluatedConst, ConstEvalError> {
|
||||
db.const_eval(self.id.into(), Substitution::empty(Interner), None)
|
||||
@ -2928,6 +2942,17 @@ impl HasVisibility for TypeAlias {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct ExternBlock {
|
||||
pub(crate) id: ExternBlockId,
|
||||
}
|
||||
|
||||
impl ExternBlock {
|
||||
pub fn module(self, db: &dyn HirDatabase) -> Module {
|
||||
Module { id: self.id.module(db.upcast()) }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct StaticLifetime;
|
||||
|
||||
@ -4592,10 +4617,7 @@ impl CaptureUsages {
|
||||
match span {
|
||||
mir::MirSpan::ExprId(expr) => {
|
||||
if let Ok(expr) = source_map.expr_syntax(expr) {
|
||||
result.push(CaptureUsageSource {
|
||||
is_ref,
|
||||
source: expr.map(AstPtr::wrap_left),
|
||||
})
|
||||
result.push(CaptureUsageSource { is_ref, source: expr })
|
||||
}
|
||||
}
|
||||
mir::MirSpan::PatId(pat) => {
|
||||
@ -6180,9 +6202,15 @@ impl HasContainer for TraitAlias {
|
||||
}
|
||||
}
|
||||
|
||||
impl HasContainer for ExternBlock {
|
||||
fn container(&self, db: &dyn HirDatabase) -> ItemContainer {
|
||||
ItemContainer::Module(Module { id: self.id.lookup(db.upcast()).container })
|
||||
}
|
||||
}
|
||||
|
||||
fn container_id_to_hir(c: ItemContainerId) -> ItemContainer {
|
||||
match c {
|
||||
ItemContainerId::ExternBlockId(_id) => ItemContainer::ExternBlock(),
|
||||
ItemContainerId::ExternBlockId(id) => ItemContainer::ExternBlock(ExternBlock { id }),
|
||||
ItemContainerId::ModuleId(id) => ItemContainer::Module(Module { id }),
|
||||
ItemContainerId::ImplId(id) => ItemContainer::Impl(Impl { id }),
|
||||
ItemContainerId::TraitId(id) => ItemContainer::Trait(Trait { id }),
|
||||
@ -6194,7 +6222,7 @@ pub enum ItemContainer {
|
||||
Trait(Trait),
|
||||
Impl(Impl),
|
||||
Module(Module),
|
||||
ExternBlock(),
|
||||
ExternBlock(ExternBlock),
|
||||
Crate(CrateId),
|
||||
}
|
||||
|
||||
|
@ -1998,6 +1998,7 @@ to_def_impls![
|
||||
(crate::Adt, ast::Adt, adt_to_def),
|
||||
(crate::ExternCrateDecl, ast::ExternCrate, extern_crate_to_def),
|
||||
(crate::InlineAsmOperand, ast::AsmOperandNamed, asm_operand_to_def),
|
||||
(crate::ExternBlock, ast::ExternBlock, extern_block_to_def),
|
||||
(MacroCallId, ast::MacroCall, macro_call_to_macro_call),
|
||||
];
|
||||
|
||||
|
@ -74,6 +74,9 @@ impl ChildBySource for ItemScope {
|
||||
fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId) {
|
||||
self.declarations().for_each(|item| add_module_def(db, res, file_id, item));
|
||||
self.impls().for_each(|imp| insert_item_loc(db, res, file_id, imp, keys::IMPL));
|
||||
self.extern_blocks().for_each(|extern_block| {
|
||||
insert_item_loc(db, res, file_id, extern_block, keys::EXTERN_BLOCK)
|
||||
});
|
||||
self.extern_crate_decls()
|
||||
.for_each(|ext| insert_item_loc(db, res, file_id, ext, keys::EXTERN_CRATE));
|
||||
self.use_decls().for_each(|ext| insert_item_loc(db, res, file_id, ext, keys::USE));
|
||||
|
@ -92,10 +92,10 @@ use hir_def::{
|
||||
DynMap,
|
||||
},
|
||||
hir::{BindingId, Expr, LabelId},
|
||||
AdtId, BlockId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId, ExternCrateId,
|
||||
FieldId, FunctionId, GenericDefId, GenericParamId, ImplId, LifetimeParamId, Lookup, MacroId,
|
||||
ModuleId, StaticId, StructId, TraitAliasId, TraitId, TypeAliasId, TypeParamId, UnionId, UseId,
|
||||
VariantId,
|
||||
AdtId, BlockId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId, ExternBlockId,
|
||||
ExternCrateId, FieldId, FunctionId, GenericDefId, GenericParamId, ImplId, LifetimeParamId,
|
||||
Lookup, MacroId, ModuleId, StaticId, StructId, TraitAliasId, TraitId, TypeAliasId, TypeParamId,
|
||||
UnionId, UseId, VariantId,
|
||||
};
|
||||
use hir_expand::{
|
||||
attrs::AttrId, name::AsName, ExpansionInfo, HirFileId, HirFileIdExt, InMacroFile, MacroCallId,
|
||||
@ -308,6 +308,12 @@ impl SourceToDefCtx<'_, '_> {
|
||||
) -> Option<ExternCrateId> {
|
||||
self.to_def(src, keys::EXTERN_CRATE)
|
||||
}
|
||||
pub(super) fn extern_block_to_def(
|
||||
&mut self,
|
||||
src: InFile<&ast::ExternBlock>,
|
||||
) -> Option<ExternBlockId> {
|
||||
self.to_def(src, keys::EXTERN_BLOCK)
|
||||
}
|
||||
#[allow(dead_code)]
|
||||
pub(super) fn use_to_def(&mut self, src: InFile<&ast::Use>) -> Option<UseId> {
|
||||
self.to_def(src, keys::USE)
|
||||
@ -352,7 +358,7 @@ impl SourceToDefCtx<'_, '_> {
|
||||
let src = src.cloned().map(ast::Pat::from);
|
||||
let pat_id = source_map.node_pat(src.as_ref())?;
|
||||
// the pattern could resolve to a constant, verify that this is not the case
|
||||
if let crate::Pat::Bind { id, .. } = body[pat_id] {
|
||||
if let crate::Pat::Bind { id, .. } = body[pat_id.as_pat()?] {
|
||||
Some((container, id))
|
||||
} else {
|
||||
None
|
||||
|
@ -18,7 +18,7 @@ use hir_def::{
|
||||
scope::{ExprScopes, ScopeId},
|
||||
Body, BodySourceMap, HygieneId,
|
||||
},
|
||||
hir::{BindingId, Expr, ExprId, ExprOrPatId, Pat, PatId},
|
||||
hir::{BindingId, Expr, ExprId, ExprOrPatId, Pat},
|
||||
lang_item::LangItem,
|
||||
lower::LowerCtx,
|
||||
nameres::MacroSubNs,
|
||||
@ -139,7 +139,7 @@ impl SourceAnalyzer {
|
||||
sm.node_expr(src.as_ref())
|
||||
}
|
||||
|
||||
fn pat_id(&self, pat: &ast::Pat) -> Option<PatId> {
|
||||
fn pat_id(&self, pat: &ast::Pat) -> Option<ExprOrPatId> {
|
||||
// FIXME: macros, see `expr_id`
|
||||
let src = InFile { file_id: self.file_id, value: pat };
|
||||
self.body_source_map()?.node_pat(src)
|
||||
@ -147,7 +147,7 @@ impl SourceAnalyzer {
|
||||
|
||||
fn binding_id_of_pat(&self, pat: &ast::IdentPat) -> Option<BindingId> {
|
||||
let pat_id = self.pat_id(&pat.clone().into())?;
|
||||
if let Pat::Bind { id, .. } = self.body()?.pats[pat_id] {
|
||||
if let Pat::Bind { id, .. } = self.body()?.pats[pat_id.as_pat()?] {
|
||||
Some(id)
|
||||
} else {
|
||||
None
|
||||
@ -210,11 +210,20 @@ impl SourceAnalyzer {
|
||||
db: &dyn HirDatabase,
|
||||
pat: &ast::Pat,
|
||||
) -> Option<(Type, Option<Type>)> {
|
||||
let pat_id = self.pat_id(pat)?;
|
||||
let expr_or_pat_id = self.pat_id(pat)?;
|
||||
let infer = self.infer.as_ref()?;
|
||||
let coerced =
|
||||
infer.pat_adjustments.get(&pat_id).and_then(|adjusts| adjusts.last().cloned());
|
||||
let ty = infer[pat_id].clone();
|
||||
let coerced = match expr_or_pat_id {
|
||||
ExprOrPatId::ExprId(idx) => infer
|
||||
.expr_adjustments
|
||||
.get(&idx)
|
||||
.and_then(|adjusts| adjusts.last().cloned())
|
||||
.map(|adjust| adjust.target),
|
||||
ExprOrPatId::PatId(idx) => {
|
||||
infer.pat_adjustments.get(&idx).and_then(|adjusts| adjusts.last().cloned())
|
||||
}
|
||||
};
|
||||
|
||||
let ty = infer[expr_or_pat_id].clone();
|
||||
let mk_ty = |ty| Type::new_with_resolver(db, &self.resolver, ty);
|
||||
Some((mk_ty(ty), coerced.map(mk_ty)))
|
||||
}
|
||||
@ -248,7 +257,7 @@ impl SourceAnalyzer {
|
||||
) -> Option<BindingMode> {
|
||||
let id = self.pat_id(&pat.clone().into())?;
|
||||
let infer = self.infer.as_ref()?;
|
||||
infer.binding_modes.get(id).map(|bm| match bm {
|
||||
infer.binding_modes.get(id.as_pat()?).map(|bm| match bm {
|
||||
hir_ty::BindingMode::Move => BindingMode::Move,
|
||||
hir_ty::BindingMode::Ref(hir_ty::Mutability::Mut) => BindingMode::Ref(Mutability::Mut),
|
||||
hir_ty::BindingMode::Ref(hir_ty::Mutability::Not) => {
|
||||
@ -266,7 +275,7 @@ impl SourceAnalyzer {
|
||||
Some(
|
||||
infer
|
||||
.pat_adjustments
|
||||
.get(&pat_id)?
|
||||
.get(&pat_id.as_pat()?)?
|
||||
.iter()
|
||||
.map(|ty| Type::new_with_resolver(db, &self.resolver, ty.clone()))
|
||||
.collect(),
|
||||
@ -649,10 +658,10 @@ impl SourceAnalyzer {
|
||||
let field_name = field.field_name()?.as_name();
|
||||
let record_pat = ast::RecordPat::cast(field.syntax().parent().and_then(|p| p.parent())?)?;
|
||||
let pat_id = self.pat_id(&record_pat.into())?;
|
||||
let variant = self.infer.as_ref()?.variant_resolution_for_pat(pat_id)?;
|
||||
let variant = self.infer.as_ref()?.variant_resolution_for_pat(pat_id.as_pat()?)?;
|
||||
let variant_data = variant.variant_data(db.upcast());
|
||||
let field = FieldId { parent: variant, local_id: variant_data.field(&field_name)? };
|
||||
let (adt, subst) = self.infer.as_ref()?.type_of_pat.get(pat_id)?.as_adt()?;
|
||||
let (adt, subst) = self.infer.as_ref()?.type_of_pat.get(pat_id.as_pat()?)?.as_adt()?;
|
||||
let field_ty =
|
||||
db.field_types(variant).get(field.local_id)?.clone().substitute(Interner, subst);
|
||||
Some((
|
||||
@ -682,12 +691,20 @@ impl SourceAnalyzer {
|
||||
db: &dyn HirDatabase,
|
||||
pat: &ast::IdentPat,
|
||||
) -> Option<ModuleDef> {
|
||||
let pat_id = self.pat_id(&pat.clone().into())?;
|
||||
let expr_or_pat_id = self.pat_id(&pat.clone().into())?;
|
||||
let body = self.body()?;
|
||||
let path = match &body[pat_id] {
|
||||
Pat::Path(path) => path,
|
||||
_ => return None,
|
||||
|
||||
let path = match expr_or_pat_id {
|
||||
ExprOrPatId::ExprId(idx) => match &body[idx] {
|
||||
Expr::Path(path) => path,
|
||||
_ => return None,
|
||||
},
|
||||
ExprOrPatId::PatId(idx) => match &body[idx] {
|
||||
Pat::Path(path) => path,
|
||||
_ => return None,
|
||||
},
|
||||
};
|
||||
|
||||
let res = resolve_hir_path(db, &self.resolver, path, HygieneId::ROOT, TypesMap::EMPTY)?;
|
||||
match res {
|
||||
PathResolution::Def(def) => Some(def),
|
||||
@ -782,8 +799,9 @@ impl SourceAnalyzer {
|
||||
}
|
||||
prefer_value_ns = true;
|
||||
} else if let Some(path_pat) = parent().and_then(ast::PathPat::cast) {
|
||||
let pat_id = self.pat_id(&path_pat.into())?;
|
||||
if let Some((assoc, subs)) = infer.assoc_resolutions_for_pat(pat_id) {
|
||||
let expr_or_pat_id = self.pat_id(&path_pat.into())?;
|
||||
if let Some((assoc, subs)) = infer.assoc_resolutions_for_expr_or_pat(expr_or_pat_id)
|
||||
{
|
||||
let (assoc, subst) = match assoc {
|
||||
AssocItemId::ConstId(const_id) => {
|
||||
let (konst, subst) =
|
||||
@ -807,7 +825,7 @@ impl SourceAnalyzer {
|
||||
return Some((PathResolution::Def(AssocItem::from(assoc).into()), Some(subst)));
|
||||
}
|
||||
if let Some(VariantId::EnumVariantId(variant)) =
|
||||
infer.variant_resolution_for_pat(pat_id)
|
||||
infer.variant_resolution_for_expr_or_pat(expr_or_pat_id)
|
||||
{
|
||||
return Some((PathResolution::Def(ModuleDef::Variant(variant.into())), None));
|
||||
}
|
||||
@ -824,7 +842,7 @@ impl SourceAnalyzer {
|
||||
|| parent().and_then(ast::TupleStructPat::cast).map(ast::Pat::from);
|
||||
if let Some(pat) = record_pat.or_else(tuple_struct_pat) {
|
||||
let pat_id = self.pat_id(&pat)?;
|
||||
let variant_res_for_pat = infer.variant_resolution_for_pat(pat_id);
|
||||
let variant_res_for_pat = infer.variant_resolution_for_pat(pat_id.as_pat()?);
|
||||
if let Some(VariantId::EnumVariantId(variant)) = variant_res_for_pat {
|
||||
return Some((
|
||||
PathResolution::Def(ModuleDef::Variant(variant.into())),
|
||||
@ -866,7 +884,8 @@ impl SourceAnalyzer {
|
||||
|
||||
// Case where path is a qualifier of another path, e.g. foo::bar::Baz where we are
|
||||
// trying to resolve foo::bar.
|
||||
if path.parent_path().is_some() {
|
||||
if let Some(parent_path) = path.parent_path() {
|
||||
let parent_hir_path = Path::from_src(&mut ctx, parent_path);
|
||||
return match resolve_hir_path_qualifier(db, &self.resolver, &hir_path, &types_map) {
|
||||
None if meta_path.is_some() => path
|
||||
.first_segment()
|
||||
@ -876,6 +895,42 @@ impl SourceAnalyzer {
|
||||
.map(PathResolution::ToolModule)
|
||||
})
|
||||
.map(|it| (it, None)),
|
||||
// Case the type name conflict with use module,
|
||||
// e.g.
|
||||
// ```
|
||||
// use std::str;
|
||||
// fn main() {
|
||||
// str::from_utf8(); // as module std::str
|
||||
// str::len(); // as primitive type str
|
||||
// str::no_exist_item(); // as primitive type str
|
||||
// }
|
||||
// ```
|
||||
Some(it) if matches!(it, PathResolution::Def(ModuleDef::BuiltinType(_))) => {
|
||||
if let (Some(mod_path), Some(parent_hir_path)) =
|
||||
(hir_path.mod_path(), parent_hir_path)
|
||||
{
|
||||
if let Some(ModuleDefId::ModuleId(id)) = self
|
||||
.resolver
|
||||
.resolve_module_path_in_items(db.upcast(), mod_path)
|
||||
.take_types()
|
||||
{
|
||||
let parent_hir_name =
|
||||
parent_hir_path.segments().get(1).map(|it| it.name);
|
||||
let module = crate::Module { id };
|
||||
if module
|
||||
.scope(db, None)
|
||||
.into_iter()
|
||||
.any(|(name, _)| Some(&name) == parent_hir_name)
|
||||
{
|
||||
return Some((
|
||||
PathResolution::Def(ModuleDef::Module(module)),
|
||||
None,
|
||||
));
|
||||
};
|
||||
}
|
||||
}
|
||||
Some((it, None))
|
||||
}
|
||||
// FIXME: We do not show substitutions for parts of path, because this is really complex
|
||||
// due to the interactions with associated items of `impl`s and associated items of associated
|
||||
// types.
|
||||
@ -1043,7 +1098,7 @@ impl SourceAnalyzer {
|
||||
let body = self.body()?;
|
||||
let infer = self.infer.as_ref()?;
|
||||
|
||||
let pat_id = self.pat_id(&pattern.clone().into())?;
|
||||
let pat_id = self.pat_id(&pattern.clone().into())?.as_pat()?;
|
||||
let substs = infer.type_of_pat[pat_id].as_adt()?.1;
|
||||
|
||||
let (variant, missing_fields, _exhaustive) =
|
||||
|
@ -6,7 +6,9 @@ use ide_db::syntax_helpers::suggest_name;
|
||||
use ide_db::RootDatabase;
|
||||
use ide_db::{famous_defs::FamousDefs, helpers::mod_path_to_ast};
|
||||
use itertools::Itertools;
|
||||
use syntax::ast::edit_in_place::Removable;
|
||||
use syntax::ast::edit::IndentLevel;
|
||||
use syntax::ast::edit_in_place::Indent;
|
||||
use syntax::ast::syntax_factory::SyntaxFactory;
|
||||
use syntax::ast::{self, make, AstNode, MatchArmList, MatchExpr, Pat};
|
||||
|
||||
use crate::{utils, AssistContext, AssistId, AssistKind, Assists};
|
||||
@ -200,8 +202,8 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
|
||||
AssistId("add_missing_match_arms", AssistKind::QuickFix),
|
||||
"Fill match arms",
|
||||
ctx.sema.original_range(match_expr.syntax()).range,
|
||||
|edit| {
|
||||
let new_match_arm_list = match_arm_list.clone_for_update();
|
||||
|builder| {
|
||||
let make = SyntaxFactory::new();
|
||||
|
||||
// having any hidden variants means that we need a catch-all arm
|
||||
needs_catch_all_arm |= has_hidden_variants;
|
||||
@ -211,89 +213,85 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
|
||||
// filter out hidden patterns because they're handled by the catch-all arm
|
||||
!hidden
|
||||
})
|
||||
.map(|(pat, _)| {
|
||||
make::match_arm(pat, None, make::ext::expr_todo()).clone_for_update()
|
||||
});
|
||||
.map(|(pat, _)| make.match_arm(pat, None, make::ext::expr_todo()));
|
||||
|
||||
let catch_all_arm = new_match_arm_list
|
||||
let mut arms: Vec<_> = match_arm_list
|
||||
.arms()
|
||||
.find(|arm| matches!(arm.pat(), Some(ast::Pat::WildcardPat(_))));
|
||||
if let Some(arm) = catch_all_arm {
|
||||
let is_empty_expr = arm.expr().is_none_or(|e| match e {
|
||||
ast::Expr::BlockExpr(b) => {
|
||||
b.statements().next().is_none() && b.tail_expr().is_none()
|
||||
.filter(|arm| {
|
||||
if matches!(arm.pat(), Some(ast::Pat::WildcardPat(_))) {
|
||||
let is_empty_expr = arm.expr().is_none_or(|e| match e {
|
||||
ast::Expr::BlockExpr(b) => {
|
||||
b.statements().next().is_none() && b.tail_expr().is_none()
|
||||
}
|
||||
ast::Expr::TupleExpr(t) => t.fields().next().is_none(),
|
||||
_ => false,
|
||||
});
|
||||
if is_empty_expr {
|
||||
false
|
||||
} else {
|
||||
cov_mark::hit!(add_missing_match_arms_empty_expr);
|
||||
true
|
||||
}
|
||||
} else {
|
||||
true
|
||||
}
|
||||
ast::Expr::TupleExpr(t) => t.fields().next().is_none(),
|
||||
_ => false,
|
||||
});
|
||||
if is_empty_expr {
|
||||
arm.remove();
|
||||
} else {
|
||||
cov_mark::hit!(add_missing_match_arms_empty_expr);
|
||||
}
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
let mut added_arms = Vec::new();
|
||||
let mut todo_placeholders = Vec::new();
|
||||
for arm in missing_arms {
|
||||
todo_placeholders.push(arm.expr().unwrap());
|
||||
added_arms.push(arm);
|
||||
}
|
||||
let first_new_arm_idx = arms.len();
|
||||
arms.extend(missing_arms);
|
||||
|
||||
if needs_catch_all_arm && !has_catch_all_arm {
|
||||
cov_mark::hit!(added_wildcard_pattern);
|
||||
let arm =
|
||||
make::match_arm(make::wildcard_pat().into(), None, make::ext::expr_todo())
|
||||
.clone_for_update();
|
||||
todo_placeholders.push(arm.expr().unwrap());
|
||||
added_arms.push(arm);
|
||||
let arm = make.match_arm(make::wildcard_pat().into(), None, make::ext::expr_todo());
|
||||
arms.push(arm);
|
||||
}
|
||||
|
||||
let first_new_arm = added_arms.first().cloned();
|
||||
let last_new_arm = added_arms.last().cloned();
|
||||
let new_match_arm_list = make.match_arm_list(arms);
|
||||
|
||||
for arm in added_arms {
|
||||
new_match_arm_list.add_arm(arm);
|
||||
}
|
||||
|
||||
if let Some(cap) = ctx.config.snippet_cap {
|
||||
if let Some(it) = first_new_arm
|
||||
.and_then(|arm| arm.syntax().descendants().find_map(ast::WildcardPat::cast))
|
||||
{
|
||||
edit.add_placeholder_snippet(cap, it);
|
||||
}
|
||||
|
||||
for placeholder in todo_placeholders {
|
||||
edit.add_placeholder_snippet(cap, placeholder);
|
||||
}
|
||||
|
||||
if let Some(arm) = last_new_arm {
|
||||
edit.add_tabstop_after(cap, arm);
|
||||
}
|
||||
}
|
||||
|
||||
// FIXME: Hack for mutable syntax trees not having great support for macros
|
||||
// FIXME: Hack for syntax trees not having great support for macros
|
||||
// Just replace the element that the original range came from
|
||||
let old_place = {
|
||||
// Find the original element
|
||||
let file = ctx.sema.parse(arm_list_range.file_id);
|
||||
let old_place = file.syntax().covering_element(arm_list_range.range);
|
||||
|
||||
// Make `old_place` mut
|
||||
match old_place {
|
||||
syntax::SyntaxElement::Node(it) => {
|
||||
syntax::SyntaxElement::from(edit.make_syntax_mut(it))
|
||||
}
|
||||
syntax::SyntaxElement::Node(it) => it,
|
||||
syntax::SyntaxElement::Token(it) => {
|
||||
// If a token is found, it is '{' or '}'
|
||||
// The parent is `{ ... }`
|
||||
let parent = it.parent().expect("Token must have a parent.");
|
||||
syntax::SyntaxElement::from(edit.make_syntax_mut(parent))
|
||||
it.parent().expect("Token must have a parent.")
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
syntax::ted::replace(old_place, new_match_arm_list.syntax());
|
||||
let mut editor = builder.make_editor(&old_place);
|
||||
new_match_arm_list.indent(IndentLevel::from_node(&old_place));
|
||||
editor.replace(old_place, new_match_arm_list.syntax());
|
||||
|
||||
if let Some(cap) = ctx.config.snippet_cap {
|
||||
if let Some(it) = new_match_arm_list
|
||||
.arms()
|
||||
.nth(first_new_arm_idx)
|
||||
.and_then(|arm| arm.syntax().descendants().find_map(ast::WildcardPat::cast))
|
||||
{
|
||||
editor.add_annotation(it.syntax(), builder.make_placeholder_snippet(cap));
|
||||
}
|
||||
|
||||
for arm in new_match_arm_list.arms().skip(first_new_arm_idx) {
|
||||
if let Some(expr) = arm.expr() {
|
||||
editor.add_annotation(expr.syntax(), builder.make_placeholder_snippet(cap));
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(arm) = new_match_arm_list.arms().skip(first_new_arm_idx).last() {
|
||||
editor.add_annotation(arm.syntax(), builder.make_tabstop_after(cap));
|
||||
}
|
||||
}
|
||||
|
||||
editor.add_mappings(make.finish_with_mappings());
|
||||
builder.add_file_edits(ctx.file_id(), editor);
|
||||
},
|
||||
)
|
||||
}
|
||||
@ -1377,6 +1375,9 @@ fn main() {
|
||||
);
|
||||
}
|
||||
|
||||
// FIXME: Preserving comments is quite hard in the current transitional syntax editing model.
|
||||
// Once we migrate to new trivia model addressed in #6854, remove the ignore attribute.
|
||||
#[ignore]
|
||||
#[test]
|
||||
fn add_missing_match_arms_preserves_comments() {
|
||||
check_assist(
|
||||
@ -1405,6 +1406,9 @@ fn foo(a: A) {
|
||||
);
|
||||
}
|
||||
|
||||
// FIXME: Preserving comments is quite hard in the current transitional syntax editing model.
|
||||
// Once we migrate to new trivia model addressed in #6854, remove the ignore attribute.
|
||||
#[ignore]
|
||||
#[test]
|
||||
fn add_missing_match_arms_preserves_comments_empty() {
|
||||
check_assist(
|
||||
@ -1502,10 +1506,10 @@ enum Test {
|
||||
|
||||
fn foo(t: Test) {
|
||||
m!(match t {
|
||||
Test::A => ${1:todo!()},
|
||||
Test::B => ${2:todo!()},
|
||||
Test::C => ${3:todo!()},$0
|
||||
});
|
||||
Test::A => ${1:todo!()},
|
||||
Test::B => ${2:todo!()},
|
||||
Test::C => ${3:todo!()},$0
|
||||
});
|
||||
}"#,
|
||||
);
|
||||
}
|
||||
|
@ -3,10 +3,11 @@ use hir::{AssocItem, Enum, HasVisibility, Module, ModuleDef, Name, PathResolutio
|
||||
use ide_db::{
|
||||
defs::{Definition, NameRefClass},
|
||||
search::SearchScope,
|
||||
source_change::SourceChangeBuilder,
|
||||
};
|
||||
use stdx::never;
|
||||
use syntax::{
|
||||
ast::{self, make},
|
||||
ast::{self, make, Use, UseTree, VisibilityKind},
|
||||
ted, AstNode, Direction, SyntaxNode, SyntaxToken, T,
|
||||
};
|
||||
|
||||
@ -43,6 +44,7 @@ use crate::{
|
||||
pub(crate) fn expand_glob_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
|
||||
let star = ctx.find_token_syntax_at_offset(T![*])?;
|
||||
let use_tree = star.parent().and_then(ast::UseTree::cast)?;
|
||||
let use_item = star.parent_ancestors().find_map(ast::Use::cast)?;
|
||||
let (parent, mod_path) = find_parent_and_path(&star)?;
|
||||
let target_module = match ctx.sema.resolve_path(&mod_path)? {
|
||||
PathResolution::Def(ModuleDef::Module(it)) => Expandable::Module(it),
|
||||
@ -53,8 +55,9 @@ pub(crate) fn expand_glob_import(acc: &mut Assists, ctx: &AssistContext<'_>) ->
|
||||
let current_scope = ctx.sema.scope(&star.parent()?)?;
|
||||
let current_module = current_scope.module();
|
||||
|
||||
let refs_in_target = find_refs_in_mod(ctx, target_module, current_module)?;
|
||||
let imported_defs = find_imported_defs(ctx, star)?;
|
||||
if !is_visible_from(ctx, &target_module, current_module) {
|
||||
return None;
|
||||
}
|
||||
|
||||
let target = parent.either(|n| n.syntax().clone(), |n| n.syntax().clone());
|
||||
acc.add(
|
||||
@ -62,37 +65,149 @@ pub(crate) fn expand_glob_import(acc: &mut Assists, ctx: &AssistContext<'_>) ->
|
||||
"Expand glob import",
|
||||
target.text_range(),
|
||||
|builder| {
|
||||
let use_tree = builder.make_mut(use_tree);
|
||||
|
||||
let names_to_import = find_names_to_import(ctx, refs_in_target, imported_defs);
|
||||
let expanded = make::use_tree_list(names_to_import.iter().map(|n| {
|
||||
let path = make::ext::ident_path(
|
||||
&n.display(ctx.db(), current_module.krate().edition(ctx.db())).to_string(),
|
||||
);
|
||||
make::use_tree(path, None, None, false)
|
||||
}))
|
||||
.clone_for_update();
|
||||
|
||||
match use_tree.star_token() {
|
||||
Some(star) => {
|
||||
let needs_braces = use_tree.path().is_some() && names_to_import.len() != 1;
|
||||
if needs_braces {
|
||||
ted::replace(star, expanded.syntax())
|
||||
} else {
|
||||
let without_braces = expanded
|
||||
.syntax()
|
||||
.children_with_tokens()
|
||||
.filter(|child| !matches!(child.kind(), T!['{'] | T!['}']))
|
||||
.collect();
|
||||
ted::replace_with_many(star, without_braces)
|
||||
}
|
||||
}
|
||||
None => never!(),
|
||||
}
|
||||
build_expanded_import(
|
||||
ctx,
|
||||
builder,
|
||||
use_tree,
|
||||
use_item,
|
||||
target_module,
|
||||
current_module,
|
||||
false,
|
||||
)
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
// Assist: expand_glob_reexport
|
||||
//
|
||||
// Expands non-private glob imports.
|
||||
//
|
||||
// ```
|
||||
// mod foo {
|
||||
// pub struct Bar;
|
||||
// pub struct Baz;
|
||||
// }
|
||||
//
|
||||
// pub use foo::*$0;
|
||||
// ```
|
||||
// ->
|
||||
// ```
|
||||
// mod foo {
|
||||
// pub struct Bar;
|
||||
// pub struct Baz;
|
||||
// }
|
||||
//
|
||||
// pub use foo::{Bar, Baz};
|
||||
// ```
|
||||
pub(crate) fn expand_glob_reexport(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
|
||||
let star = ctx.find_token_syntax_at_offset(T![*])?;
|
||||
let use_tree = star.parent().and_then(ast::UseTree::cast)?;
|
||||
let use_item = star.parent_ancestors().find_map(ast::Use::cast)?;
|
||||
let (parent, mod_path) = find_parent_and_path(&star)?;
|
||||
let target_module = match ctx.sema.resolve_path(&mod_path)? {
|
||||
PathResolution::Def(ModuleDef::Module(it)) => Expandable::Module(it),
|
||||
PathResolution::Def(ModuleDef::Adt(hir::Adt::Enum(e))) => Expandable::Enum(e),
|
||||
_ => return None,
|
||||
};
|
||||
|
||||
let current_scope = ctx.sema.scope(&star.parent()?)?;
|
||||
let current_module = current_scope.module();
|
||||
|
||||
if let VisibilityKind::PubSelf = get_export_visibility_kind(&use_item) {
|
||||
return None;
|
||||
}
|
||||
if !is_visible_from(ctx, &target_module, current_module) {
|
||||
return None;
|
||||
}
|
||||
|
||||
let target = parent.either(|n| n.syntax().clone(), |n| n.syntax().clone());
|
||||
acc.add(
|
||||
AssistId("expand_glob_reexport", AssistKind::RefactorRewrite),
|
||||
"Expand glob reexport",
|
||||
target.text_range(),
|
||||
|builder| {
|
||||
build_expanded_import(
|
||||
ctx,
|
||||
builder,
|
||||
use_tree,
|
||||
use_item,
|
||||
target_module,
|
||||
current_module,
|
||||
true,
|
||||
)
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
fn build_expanded_import(
|
||||
ctx: &AssistContext<'_>,
|
||||
builder: &mut SourceChangeBuilder,
|
||||
use_tree: UseTree,
|
||||
use_item: Use,
|
||||
target_module: Expandable,
|
||||
current_module: Module,
|
||||
reexport_public_items: bool,
|
||||
) {
|
||||
let (must_be_pub, visible_from) = if !reexport_public_items {
|
||||
(false, current_module)
|
||||
} else {
|
||||
match get_export_visibility_kind(&use_item) {
|
||||
VisibilityKind::Pub => (true, current_module.krate().root_module()),
|
||||
VisibilityKind::PubCrate => (false, current_module.krate().root_module()),
|
||||
_ => (false, current_module),
|
||||
}
|
||||
};
|
||||
|
||||
let refs_in_target = find_refs_in_mod(ctx, target_module, visible_from, must_be_pub);
|
||||
let imported_defs = find_imported_defs(ctx, use_item);
|
||||
|
||||
let filtered_defs =
|
||||
if reexport_public_items { refs_in_target } else { refs_in_target.used_refs(ctx) };
|
||||
|
||||
let use_tree = builder.make_mut(use_tree);
|
||||
|
||||
let names_to_import = find_names_to_import(filtered_defs, imported_defs);
|
||||
let expanded = make::use_tree_list(names_to_import.iter().map(|n| {
|
||||
let path = make::ext::ident_path(
|
||||
&n.display(ctx.db(), current_module.krate().edition(ctx.db())).to_string(),
|
||||
);
|
||||
make::use_tree(path, None, None, false)
|
||||
}))
|
||||
.clone_for_update();
|
||||
|
||||
match use_tree.star_token() {
|
||||
Some(star) => {
|
||||
let needs_braces = use_tree.path().is_some() && names_to_import.len() != 1;
|
||||
if needs_braces {
|
||||
ted::replace(star, expanded.syntax())
|
||||
} else {
|
||||
let without_braces = expanded
|
||||
.syntax()
|
||||
.children_with_tokens()
|
||||
.filter(|child| !matches!(child.kind(), T!['{'] | T!['}']))
|
||||
.collect();
|
||||
ted::replace_with_many(star, without_braces)
|
||||
}
|
||||
}
|
||||
None => never!(),
|
||||
}
|
||||
}
|
||||
|
||||
fn get_export_visibility_kind(use_item: &Use) -> VisibilityKind {
|
||||
use syntax::ast::HasVisibility as _;
|
||||
match use_item.visibility() {
|
||||
Some(vis) => match vis.kind() {
|
||||
VisibilityKind::PubCrate => VisibilityKind::PubCrate,
|
||||
VisibilityKind::Pub => VisibilityKind::Pub,
|
||||
VisibilityKind::PubSelf => VisibilityKind::PubSelf,
|
||||
// We don't handle pub(in ...) and pub(super) yet
|
||||
VisibilityKind::In(_) => VisibilityKind::PubSelf,
|
||||
VisibilityKind::PubSuper => VisibilityKind::PubSelf,
|
||||
},
|
||||
None => VisibilityKind::PubSelf,
|
||||
}
|
||||
}
|
||||
|
||||
enum Expandable {
|
||||
Module(Module),
|
||||
Enum(Enum),
|
||||
@ -130,14 +245,17 @@ struct Ref {
|
||||
// could be alias
|
||||
visible_name: Name,
|
||||
def: Definition,
|
||||
is_pub: bool,
|
||||
}
|
||||
|
||||
impl Ref {
|
||||
fn from_scope_def(name: Name, scope_def: ScopeDef) -> Option<Self> {
|
||||
fn from_scope_def(ctx: &AssistContext<'_>, name: Name, scope_def: ScopeDef) -> Option<Self> {
|
||||
match scope_def {
|
||||
ScopeDef::ModuleDef(def) => {
|
||||
Some(Ref { visible_name: name, def: Definition::from(def) })
|
||||
}
|
||||
ScopeDef::ModuleDef(def) => Some(Ref {
|
||||
visible_name: name,
|
||||
def: Definition::from(def),
|
||||
is_pub: matches!(def.visibility(ctx.db()), hir::Visibility::Public),
|
||||
}),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
@ -180,32 +298,32 @@ fn find_refs_in_mod(
|
||||
ctx: &AssistContext<'_>,
|
||||
expandable: Expandable,
|
||||
visible_from: Module,
|
||||
) -> Option<Refs> {
|
||||
if !is_expandable_visible_from(ctx, &expandable, visible_from) {
|
||||
return None;
|
||||
}
|
||||
|
||||
must_be_pub: bool,
|
||||
) -> Refs {
|
||||
match expandable {
|
||||
Expandable::Module(module) => {
|
||||
let module_scope = module.scope(ctx.db(), Some(visible_from));
|
||||
let refs =
|
||||
module_scope.into_iter().filter_map(|(n, d)| Ref::from_scope_def(n, d)).collect();
|
||||
Some(Refs(refs))
|
||||
let refs = module_scope
|
||||
.into_iter()
|
||||
.filter_map(|(n, d)| Ref::from_scope_def(ctx, n, d))
|
||||
.filter(|r| !must_be_pub || r.is_pub)
|
||||
.collect();
|
||||
Refs(refs)
|
||||
}
|
||||
Expandable::Enum(enm) => Some(Refs(
|
||||
Expandable::Enum(enm) => Refs(
|
||||
enm.variants(ctx.db())
|
||||
.into_iter()
|
||||
.map(|v| Ref { visible_name: v.name(ctx.db()), def: Definition::Variant(v) })
|
||||
.map(|v| Ref {
|
||||
visible_name: v.name(ctx.db()),
|
||||
def: Definition::Variant(v),
|
||||
is_pub: true,
|
||||
})
|
||||
.collect(),
|
||||
)),
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
fn is_expandable_visible_from(
|
||||
ctx: &AssistContext<'_>,
|
||||
expandable: &Expandable,
|
||||
from: Module,
|
||||
) -> bool {
|
||||
fn is_visible_from(ctx: &AssistContext<'_>, expandable: &Expandable, from: Module) -> bool {
|
||||
fn is_mod_visible_from(ctx: &AssistContext<'_>, module: Module, from: Module) -> bool {
|
||||
match module.parent(ctx.db()) {
|
||||
Some(parent) => {
|
||||
@ -246,50 +364,34 @@ fn is_expandable_visible_from(
|
||||
// use foo::*$0;
|
||||
// use baz::Baz;
|
||||
// ↑ ---------------
|
||||
fn find_imported_defs(ctx: &AssistContext<'_>, star: SyntaxToken) -> Option<Vec<Definition>> {
|
||||
let parent_use_item_syntax = star.parent_ancestors().find_map(|n| {
|
||||
if ast::Use::can_cast(n.kind()) {
|
||||
Some(n)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})?;
|
||||
|
||||
Some(
|
||||
[Direction::Prev, Direction::Next]
|
||||
.into_iter()
|
||||
.flat_map(|dir| {
|
||||
parent_use_item_syntax
|
||||
.siblings(dir.to_owned())
|
||||
.filter(|n| ast::Use::can_cast(n.kind()))
|
||||
})
|
||||
.flat_map(|n| n.descendants().filter_map(ast::NameRef::cast))
|
||||
.filter_map(|r| match NameRefClass::classify(&ctx.sema, &r)? {
|
||||
NameRefClass::Definition(
|
||||
def @ (Definition::Macro(_)
|
||||
| Definition::Module(_)
|
||||
| Definition::Function(_)
|
||||
| Definition::Adt(_)
|
||||
| Definition::Variant(_)
|
||||
| Definition::Const(_)
|
||||
| Definition::Static(_)
|
||||
| Definition::Trait(_)
|
||||
| Definition::TypeAlias(_)),
|
||||
_,
|
||||
) => Some(def),
|
||||
_ => None,
|
||||
})
|
||||
.collect(),
|
||||
)
|
||||
fn find_imported_defs(ctx: &AssistContext<'_>, use_item: Use) -> Vec<Definition> {
|
||||
[Direction::Prev, Direction::Next]
|
||||
.into_iter()
|
||||
.flat_map(|dir| {
|
||||
use_item.syntax().siblings(dir.to_owned()).filter(|n| ast::Use::can_cast(n.kind()))
|
||||
})
|
||||
.flat_map(|n| n.descendants().filter_map(ast::NameRef::cast))
|
||||
.filter_map(|r| match NameRefClass::classify(&ctx.sema, &r)? {
|
||||
NameRefClass::Definition(
|
||||
def @ (Definition::Macro(_)
|
||||
| Definition::Module(_)
|
||||
| Definition::Function(_)
|
||||
| Definition::Adt(_)
|
||||
| Definition::Variant(_)
|
||||
| Definition::Const(_)
|
||||
| Definition::Static(_)
|
||||
| Definition::Trait(_)
|
||||
| Definition::TypeAlias(_)),
|
||||
_,
|
||||
) => Some(def),
|
||||
_ => None,
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn find_names_to_import(
|
||||
ctx: &AssistContext<'_>,
|
||||
refs_in_target: Refs,
|
||||
imported_defs: Vec<Definition>,
|
||||
) -> Vec<Name> {
|
||||
let used_refs = refs_in_target.used_refs(ctx).filter_out_by_defs(imported_defs);
|
||||
used_refs.0.iter().map(|r| r.visible_name.clone()).collect()
|
||||
fn find_names_to_import(refs_in_target: Refs, imported_defs: Vec<Definition>) -> Vec<Name> {
|
||||
let final_refs = refs_in_target.filter_out_by_defs(imported_defs);
|
||||
final_refs.0.iter().map(|r| r.visible_name.clone()).collect()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
@ -1036,4 +1138,83 @@ mod abc {
|
||||
}"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn expanding_glob_reexport() {
|
||||
check_assist(
|
||||
expand_glob_reexport,
|
||||
r"
|
||||
mod foo {
|
||||
pub struct Bar;
|
||||
pub struct Baz;
|
||||
struct Qux;
|
||||
|
||||
pub fn f() {}
|
||||
|
||||
pub(crate) fn g() {}
|
||||
pub(self) fn h() {}
|
||||
}
|
||||
|
||||
pub use foo::*$0;
|
||||
",
|
||||
r"
|
||||
mod foo {
|
||||
pub struct Bar;
|
||||
pub struct Baz;
|
||||
struct Qux;
|
||||
|
||||
pub fn f() {}
|
||||
|
||||
pub(crate) fn g() {}
|
||||
pub(self) fn h() {}
|
||||
}
|
||||
|
||||
pub use foo::{Bar, Baz, f};
|
||||
",
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn expanding_recursive_glob_reexport() {
|
||||
check_assist(
|
||||
expand_glob_reexport,
|
||||
r"
|
||||
mod foo {
|
||||
pub use bar::*;
|
||||
mod bar {
|
||||
pub struct Bar;
|
||||
pub struct Baz;
|
||||
}
|
||||
}
|
||||
|
||||
pub use foo::*$0;
|
||||
",
|
||||
r"
|
||||
mod foo {
|
||||
pub use bar::*;
|
||||
mod bar {
|
||||
pub struct Bar;
|
||||
pub struct Baz;
|
||||
}
|
||||
}
|
||||
|
||||
pub use foo::{Bar, Baz};
|
||||
",
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn expanding_reexport_is_not_applicable_for_private_import() {
|
||||
check_assist_not_applicable(
|
||||
expand_glob_reexport,
|
||||
r"
|
||||
mod foo {
|
||||
pub struct Bar;
|
||||
pub struct Baz;
|
||||
}
|
||||
|
||||
use foo::*$0;
|
||||
",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -270,6 +270,7 @@ mod handlers {
|
||||
destructure_tuple_binding::destructure_tuple_binding,
|
||||
destructure_struct_binding::destructure_struct_binding,
|
||||
expand_glob_import::expand_glob_import,
|
||||
expand_glob_import::expand_glob_reexport,
|
||||
explicit_enum_discriminant::explicit_enum_discriminant,
|
||||
extract_expressions_from_format_string::extract_expressions_from_format_string,
|
||||
extract_struct_from_enum_variant::extract_struct_from_enum_variant,
|
||||
|
@ -909,6 +909,29 @@ fn qux(bar: Bar, baz: Baz) {}
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn doctest_expand_glob_reexport() {
|
||||
check_doc_test(
|
||||
"expand_glob_reexport",
|
||||
r#####"
|
||||
mod foo {
|
||||
pub struct Bar;
|
||||
pub struct Baz;
|
||||
}
|
||||
|
||||
pub use foo::*$0;
|
||||
"#####,
|
||||
r#####"
|
||||
mod foo {
|
||||
pub struct Bar;
|
||||
pub struct Baz;
|
||||
}
|
||||
|
||||
pub use foo::{Bar, Baz};
|
||||
"#####,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn doctest_explicit_enum_discriminant() {
|
||||
check_doc_test(
|
||||
|
@ -2,17 +2,18 @@
|
||||
|
||||
mod format_like;
|
||||
|
||||
use hir::ItemInNs;
|
||||
use ide_db::text_edit::TextEdit;
|
||||
use base_db::SourceDatabase;
|
||||
use hir::{ItemInNs, Semantics};
|
||||
use ide_db::{
|
||||
documentation::{Documentation, HasDocs},
|
||||
imports::insert_use::ImportScope,
|
||||
text_edit::TextEdit,
|
||||
ty_filter::TryEnum,
|
||||
SnippetCap,
|
||||
RootDatabase, SnippetCap,
|
||||
};
|
||||
use stdx::never;
|
||||
use syntax::{
|
||||
ast::{self, make, AstNode, AstToken},
|
||||
ast::{self, AstNode, AstToken},
|
||||
SyntaxKind::{BLOCK_EXPR, EXPR_STMT, FOR_EXPR, IF_EXPR, LOOP_EXPR, STMT_LIST, WHILE_EXPR},
|
||||
TextRange, TextSize,
|
||||
};
|
||||
@ -48,7 +49,8 @@ pub(crate) fn complete_postfix(
|
||||
};
|
||||
let expr_ctx = &dot_access.ctx;
|
||||
|
||||
let receiver_text = get_receiver_text(dot_receiver, receiver_is_ambiguous_float_literal);
|
||||
let receiver_text =
|
||||
get_receiver_text(&ctx.sema, dot_receiver, receiver_is_ambiguous_float_literal);
|
||||
|
||||
let cap = match ctx.config.snippet_cap {
|
||||
Some(it) => it,
|
||||
@ -172,13 +174,15 @@ pub(crate) fn complete_postfix(
|
||||
// The rest of the postfix completions create an expression that moves an argument,
|
||||
// so it's better to consider references now to avoid breaking the compilation
|
||||
|
||||
let (dot_receiver, node_to_replace_with) = include_references(dot_receiver);
|
||||
let receiver_text =
|
||||
get_receiver_text(&node_to_replace_with, receiver_is_ambiguous_float_literal);
|
||||
let postfix_snippet = match build_postfix_snippet_builder(ctx, cap, &dot_receiver) {
|
||||
Some(it) => it,
|
||||
None => return,
|
||||
};
|
||||
let (dot_receiver_including_refs, prefix) = include_references(dot_receiver);
|
||||
let mut receiver_text =
|
||||
get_receiver_text(&ctx.sema, dot_receiver, receiver_is_ambiguous_float_literal);
|
||||
receiver_text.insert_str(0, &prefix);
|
||||
let postfix_snippet =
|
||||
match build_postfix_snippet_builder(ctx, cap, &dot_receiver_including_refs) {
|
||||
Some(it) => it,
|
||||
None => return,
|
||||
};
|
||||
|
||||
if !ctx.config.snippets.is_empty() {
|
||||
add_custom_postfix_completions(acc, ctx, &postfix_snippet, &receiver_text);
|
||||
@ -222,7 +226,7 @@ pub(crate) fn complete_postfix(
|
||||
postfix_snippet("call", "function(expr)", &format!("${{1}}({receiver_text})"))
|
||||
.add_to(acc, ctx.db);
|
||||
|
||||
if let Some(parent) = dot_receiver.syntax().parent().and_then(|p| p.parent()) {
|
||||
if let Some(parent) = dot_receiver_including_refs.syntax().parent().and_then(|p| p.parent()) {
|
||||
if matches!(parent.kind(), STMT_LIST | EXPR_STMT) {
|
||||
postfix_snippet("let", "let", &format!("let $0 = {receiver_text};"))
|
||||
.add_to(acc, ctx.db);
|
||||
@ -231,9 +235,9 @@ pub(crate) fn complete_postfix(
|
||||
}
|
||||
}
|
||||
|
||||
if let ast::Expr::Literal(literal) = dot_receiver.clone() {
|
||||
if let ast::Expr::Literal(literal) = dot_receiver_including_refs.clone() {
|
||||
if let Some(literal_text) = ast::String::cast(literal.token()) {
|
||||
add_format_like_completions(acc, ctx, &dot_receiver, cap, &literal_text);
|
||||
add_format_like_completions(acc, ctx, &dot_receiver_including_refs, cap, &literal_text);
|
||||
}
|
||||
}
|
||||
|
||||
@ -260,14 +264,20 @@ pub(crate) fn complete_postfix(
|
||||
}
|
||||
}
|
||||
|
||||
fn get_receiver_text(receiver: &ast::Expr, receiver_is_ambiguous_float_literal: bool) -> String {
|
||||
let mut text = if receiver_is_ambiguous_float_literal {
|
||||
let text = receiver.syntax().text();
|
||||
let without_dot = ..text.len() - TextSize::of('.');
|
||||
text.slice(without_dot).to_string()
|
||||
} else {
|
||||
receiver.to_string()
|
||||
fn get_receiver_text(
|
||||
sema: &Semantics<'_, RootDatabase>,
|
||||
receiver: &ast::Expr,
|
||||
receiver_is_ambiguous_float_literal: bool,
|
||||
) -> String {
|
||||
// Do not just call `receiver.to_string()`, as that will mess up whitespaces inside macros.
|
||||
let Some(mut range) = sema.original_range_opt(receiver.syntax()) else {
|
||||
return receiver.to_string();
|
||||
};
|
||||
if receiver_is_ambiguous_float_literal {
|
||||
range.range = TextRange::at(range.range.start(), range.range.len() - TextSize::of('.'))
|
||||
}
|
||||
let file_text = sema.db.file_text(range.file_id.file_id());
|
||||
let mut text = file_text[range.range].to_owned();
|
||||
|
||||
// The receiver texts should be interpreted as-is, as they are expected to be
|
||||
// normal Rust expressions.
|
||||
@ -284,7 +294,7 @@ fn escape_snippet_bits(text: &mut String) {
|
||||
stdx::replace(text, '$', "\\$");
|
||||
}
|
||||
|
||||
fn include_references(initial_element: &ast::Expr) -> (ast::Expr, ast::Expr) {
|
||||
fn include_references(initial_element: &ast::Expr) -> (ast::Expr, String) {
|
||||
let mut resulting_element = initial_element.clone();
|
||||
|
||||
while let Some(field_expr) = resulting_element.syntax().parent().and_then(ast::FieldExpr::cast)
|
||||
@ -292,7 +302,7 @@ fn include_references(initial_element: &ast::Expr) -> (ast::Expr, ast::Expr) {
|
||||
resulting_element = ast::Expr::from(field_expr);
|
||||
}
|
||||
|
||||
let mut new_element_opt = initial_element.clone();
|
||||
let mut prefix = String::new();
|
||||
|
||||
while let Some(parent_deref_element) =
|
||||
resulting_element.syntax().parent().and_then(ast::PrefixExpr::cast)
|
||||
@ -303,7 +313,7 @@ fn include_references(initial_element: &ast::Expr) -> (ast::Expr, ast::Expr) {
|
||||
|
||||
resulting_element = ast::Expr::from(parent_deref_element);
|
||||
|
||||
new_element_opt = make::expr_prefix(syntax::T![*], new_element_opt).into();
|
||||
prefix.insert(0, '*');
|
||||
}
|
||||
|
||||
if let Some(first_ref_expr) = resulting_element.syntax().parent().and_then(ast::RefExpr::cast) {
|
||||
@ -317,7 +327,7 @@ fn include_references(initial_element: &ast::Expr) -> (ast::Expr, ast::Expr) {
|
||||
let exclusive = parent_ref_element.mut_token().is_some();
|
||||
resulting_element = ast::Expr::from(parent_ref_element);
|
||||
|
||||
new_element_opt = make::expr_ref(new_element_opt, exclusive);
|
||||
prefix.insert_str(0, if exclusive { "&mut " } else { "&" });
|
||||
}
|
||||
} else {
|
||||
// If we do not find any ref expressions, restore
|
||||
@ -325,7 +335,7 @@ fn include_references(initial_element: &ast::Expr) -> (ast::Expr, ast::Expr) {
|
||||
resulting_element = initial_element.clone();
|
||||
}
|
||||
|
||||
(resulting_element, new_element_opt)
|
||||
(resulting_element, prefix)
|
||||
}
|
||||
|
||||
fn build_postfix_snippet_builder<'ctx>(
|
||||
@ -901,4 +911,31 @@ fn main() {
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn inside_macro() {
|
||||
check_edit(
|
||||
"box",
|
||||
r#"
|
||||
macro_rules! assert {
|
||||
( $it:expr $(,)? ) => { $it };
|
||||
}
|
||||
|
||||
fn foo() {
|
||||
let a = true;
|
||||
assert!(if a == false { true } else { false }.$0);
|
||||
}
|
||||
"#,
|
||||
r#"
|
||||
macro_rules! assert {
|
||||
( $it:expr $(,)? ) => { $it };
|
||||
}
|
||||
|
||||
fn foo() {
|
||||
let a = true;
|
||||
assert!(Box::new(if a == false { true } else { false }));
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -108,7 +108,7 @@ impl Definition {
|
||||
ItemContainer::Trait(it) => Some(it.into()),
|
||||
ItemContainer::Impl(it) => Some(it.into()),
|
||||
ItemContainer::Module(it) => Some(it.into()),
|
||||
ItemContainer::ExternBlock() | ItemContainer::Crate(_) => None,
|
||||
ItemContainer::ExternBlock(_) | ItemContainer::Crate(_) => None,
|
||||
}
|
||||
}
|
||||
match self {
|
||||
|
@ -40,7 +40,7 @@ pub(crate) fn mismatched_arg_count(
|
||||
Diagnostic::new(
|
||||
DiagnosticCode::RustcHardError("E0107"),
|
||||
message,
|
||||
invalid_args_range(ctx, d.call_expr.map(AstPtr::wrap_left), d.expected, d.found),
|
||||
invalid_args_range(ctx, d.call_expr, d.expected, d.found),
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -0,0 +1,59 @@
|
||||
use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
|
||||
|
||||
// Diagnostic: parenthesized-generic-args-without-fn-trait
|
||||
//
|
||||
// This diagnostic is shown when a `Fn`-trait-style generic parameters (`Trait(A, B) -> C`)
|
||||
// was used on non-`Fn` trait/type.
|
||||
pub(crate) fn parenthesized_generic_args_without_fn_trait(
|
||||
ctx: &DiagnosticsContext<'_>,
|
||||
d: &hir::ParenthesizedGenericArgsWithoutFnTrait,
|
||||
) -> Diagnostic {
|
||||
Diagnostic::new_with_syntax_node_ptr(
|
||||
ctx,
|
||||
DiagnosticCode::RustcHardError("E0214"),
|
||||
"parenthesized type parameters may only be used with a `Fn` trait",
|
||||
d.args.map(Into::into),
|
||||
)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::tests::check_diagnostics;
|
||||
|
||||
#[test]
|
||||
fn fn_traits_work() {
|
||||
check_diagnostics(
|
||||
r#"
|
||||
//- minicore: async_fn, fn
|
||||
fn foo<
|
||||
A: Fn(),
|
||||
B: FnMut() -> i32,
|
||||
C: FnOnce(&str, bool),
|
||||
D: AsyncFn::(u32) -> u32,
|
||||
E: AsyncFnMut(),
|
||||
F: AsyncFnOnce() -> bool,
|
||||
>() {}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn non_fn_trait() {
|
||||
check_diagnostics(
|
||||
r#"
|
||||
struct Struct<T>(T);
|
||||
enum Enum<T> { EnumVariant(T) }
|
||||
type TypeAlias<T> = bool;
|
||||
|
||||
type Foo = TypeAlias() -> bool;
|
||||
// ^^ error: parenthesized type parameters may only be used with a `Fn` trait
|
||||
|
||||
fn foo(_a: Struct(i32)) {
|
||||
// ^^^^^ error: parenthesized type parameters may only be used with a `Fn` trait
|
||||
let _ = <Enum::(u32)>::EnumVariant(0);
|
||||
// ^^^^^^^ error: parenthesized type parameters may only be used with a `Fn` trait
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
}
|
@ -1235,4 +1235,25 @@ fn f() {
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn complex_enum_variant_non_ref_pat() {
|
||||
check_diagnostics(
|
||||
r#"
|
||||
enum Enum { Variant }
|
||||
|
||||
trait Trait {
|
||||
type Assoc;
|
||||
}
|
||||
impl Trait for () {
|
||||
type Assoc = Enum;
|
||||
}
|
||||
|
||||
fn foo(v: &Enum) {
|
||||
let <Enum>::Variant = v;
|
||||
let <() as Trait>::Assoc::Variant = v;
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -1,5 +1,6 @@
|
||||
use std::iter;
|
||||
|
||||
use either::Either;
|
||||
use hir::{db::ExpandDatabase, Adt, FileRange, HasSource, HirDisplay, InFile, Struct, Union};
|
||||
use ide_db::text_edit::TextEdit;
|
||||
use ide_db::{
|
||||
@ -41,7 +42,7 @@ pub(crate) fn unresolved_field(
|
||||
),
|
||||
adjusted_display_range(ctx, d.expr, &|expr| {
|
||||
Some(
|
||||
match expr {
|
||||
match expr.left()? {
|
||||
ast::Expr::MethodCallExpr(it) => it.name_ref(),
|
||||
ast::Expr::FieldExpr(it) => it.name_ref(),
|
||||
_ => None,
|
||||
@ -72,7 +73,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedField) -> Option<Vec<A
|
||||
fn field_fix(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedField) -> Option<Assist> {
|
||||
// Get the FileRange of the invalid field access
|
||||
let root = ctx.sema.db.parse_or_expand(d.expr.file_id);
|
||||
let expr = d.expr.value.to_node(&root);
|
||||
let expr = d.expr.value.to_node(&root).left()?;
|
||||
|
||||
let error_range = ctx.sema.original_range_opt(expr.syntax())?;
|
||||
let field_name = d.name.as_str();
|
||||
@ -263,7 +264,7 @@ fn record_field_layout(
|
||||
// FIXME: We should fill out the call here, move the cursor and trigger signature help
|
||||
fn method_fix(
|
||||
ctx: &DiagnosticsContext<'_>,
|
||||
expr_ptr: &InFile<AstPtr<ast::Expr>>,
|
||||
expr_ptr: &InFile<AstPtr<Either<ast::Expr, ast::Pat>>>,
|
||||
) -> Option<Assist> {
|
||||
let root = ctx.sema.db.parse_or_expand(expr_ptr.file_id);
|
||||
let expr = expr_ptr.value.to_node(&root);
|
||||
|
@ -1,4 +1,4 @@
|
||||
use hir::{db::ExpandDatabase, AssocItem, FileRange, HirDisplay, InFile};
|
||||
use hir::{db::ExpandDatabase, FileRange, HirDisplay, InFile};
|
||||
use ide_db::text_edit::TextEdit;
|
||||
use ide_db::{
|
||||
assists::{Assist, AssistId, AssistKind},
|
||||
@ -35,7 +35,7 @@ pub(crate) fn unresolved_method(
|
||||
),
|
||||
adjusted_display_range(ctx, d.expr, &|expr| {
|
||||
Some(
|
||||
match expr {
|
||||
match expr.left()? {
|
||||
ast::Expr::MethodCallExpr(it) => it.name_ref(),
|
||||
ast::Expr::FieldExpr(it) => it.name_ref(),
|
||||
_ => None,
|
||||
@ -85,7 +85,7 @@ fn field_fix(
|
||||
let expr_ptr = &d.expr;
|
||||
let root = ctx.sema.db.parse_or_expand(expr_ptr.file_id);
|
||||
let expr = expr_ptr.value.to_node(&root);
|
||||
let (file_id, range) = match expr {
|
||||
let (file_id, range) = match expr.left()? {
|
||||
ast::Expr::MethodCallExpr(mcall) => {
|
||||
let FileRange { range, file_id } =
|
||||
ctx.sema.original_range_opt(mcall.receiver()?.syntax())?;
|
||||
@ -112,12 +112,12 @@ fn field_fix(
|
||||
}
|
||||
|
||||
fn assoc_func_fix(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedMethodCall) -> Option<Assist> {
|
||||
if let Some(assoc_item_id) = d.assoc_func_with_same_name {
|
||||
if let Some(f) = d.assoc_func_with_same_name {
|
||||
let db = ctx.sema.db;
|
||||
|
||||
let expr_ptr = &d.expr;
|
||||
let root = db.parse_or_expand(expr_ptr.file_id);
|
||||
let expr: ast::Expr = expr_ptr.value.to_node(&root);
|
||||
let expr: ast::Expr = expr_ptr.value.to_node(&root).left()?;
|
||||
|
||||
let call = ast::MethodCallExpr::cast(expr.syntax().clone())?;
|
||||
let range = InFile::new(expr_ptr.file_id, call.syntax().text_range())
|
||||
@ -127,30 +127,25 @@ fn assoc_func_fix(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedMethodCall) -
|
||||
let receiver = call.receiver()?;
|
||||
let receiver_type = &ctx.sema.type_of_expr(&receiver)?.original;
|
||||
|
||||
let need_to_take_receiver_as_first_arg = match hir::AssocItem::from(assoc_item_id) {
|
||||
AssocItem::Function(f) => {
|
||||
let assoc_fn_params = f.assoc_fn_params(db);
|
||||
if assoc_fn_params.is_empty() {
|
||||
false
|
||||
} else {
|
||||
assoc_fn_params
|
||||
.first()
|
||||
.map(|first_arg| {
|
||||
// For generic type, say `Box`, take `Box::into_raw(b: Self)` as example,
|
||||
// type of `b` is `Self`, which is `Box<T, A>`, containing unspecified generics.
|
||||
// However, type of `receiver` is specified, it could be `Box<i32, Global>` or something like that,
|
||||
// so `first_arg.ty() == receiver_type` evaluate to `false` here.
|
||||
// Here add `first_arg.ty().as_adt() == receiver_type.as_adt()` as guard,
|
||||
// apply `.as_adt()` over `Box<T, A>` or `Box<i32, Global>` gets `Box`, so we get `true` here.
|
||||
let assoc_fn_params = f.assoc_fn_params(db);
|
||||
let need_to_take_receiver_as_first_arg = if assoc_fn_params.is_empty() {
|
||||
false
|
||||
} else {
|
||||
assoc_fn_params
|
||||
.first()
|
||||
.map(|first_arg| {
|
||||
// For generic type, say `Box`, take `Box::into_raw(b: Self)` as example,
|
||||
// type of `b` is `Self`, which is `Box<T, A>`, containing unspecified generics.
|
||||
// However, type of `receiver` is specified, it could be `Box<i32, Global>` or something like that,
|
||||
// so `first_arg.ty() == receiver_type` evaluate to `false` here.
|
||||
// Here add `first_arg.ty().as_adt() == receiver_type.as_adt()` as guard,
|
||||
// apply `.as_adt()` over `Box<T, A>` or `Box<i32, Global>` gets `Box`, so we get `true` here.
|
||||
|
||||
// FIXME: it fails when type of `b` is `Box` with other generic param different from `receiver`
|
||||
first_arg.ty() == receiver_type
|
||||
|| first_arg.ty().as_adt() == receiver_type.as_adt()
|
||||
})
|
||||
.unwrap_or(false)
|
||||
}
|
||||
}
|
||||
_ => false,
|
||||
// FIXME: it fails when type of `b` is `Box` with other generic param different from `receiver`
|
||||
first_arg.ty() == receiver_type
|
||||
|| first_arg.ty().as_adt() == receiver_type.as_adt()
|
||||
})
|
||||
.unwrap_or(false)
|
||||
};
|
||||
|
||||
let mut receiver_type_adt_name =
|
||||
|
@ -260,6 +260,19 @@ fn main() {
|
||||
let arr = [1, 2, 3, 4, 5];
|
||||
let [_x, _y @ ..] = arr;
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
// regression test as we used to panic in this scenario
|
||||
#[test]
|
||||
fn unknown_struct_pattern_param_type() {
|
||||
check_diagnostics(
|
||||
r#"
|
||||
struct S { field : u32 }
|
||||
fn f(S { field }: error) {
|
||||
// ^^^^^ 💡 warn: unused variable
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
@ -43,6 +43,7 @@ mod handlers {
|
||||
pub(crate) mod mutability_errors;
|
||||
pub(crate) mod no_such_field;
|
||||
pub(crate) mod non_exhaustive_let;
|
||||
pub(crate) mod parenthesized_generic_args_without_fn_trait;
|
||||
pub(crate) mod private_assoc_item;
|
||||
pub(crate) mod private_field;
|
||||
pub(crate) mod remove_trailing_return;
|
||||
@ -466,7 +467,12 @@ pub fn semantic_diagnostics(
|
||||
Some(it) => it,
|
||||
None => continue,
|
||||
},
|
||||
AnyDiagnostic::GenericArgsProhibited(d) => handlers::generic_args_prohibited::generic_args_prohibited(&ctx, &d)
|
||||
AnyDiagnostic::GenericArgsProhibited(d) => {
|
||||
handlers::generic_args_prohibited::generic_args_prohibited(&ctx, &d)
|
||||
}
|
||||
AnyDiagnostic::ParenthesizedGenericArgsWithoutFnTrait(d) => {
|
||||
handlers::parenthesized_generic_args_without_fn_trait::parenthesized_generic_args_without_fn_trait(&ctx, &d)
|
||||
}
|
||||
};
|
||||
res.push(d)
|
||||
}
|
||||
|
@ -1,6 +1,6 @@
|
||||
use hir::{HasSource, InFile, InRealFile, Semantics};
|
||||
use ide_db::{
|
||||
defs::Definition, helpers::visit_file_defs, FileId, FilePosition, FileRange, FxHashSet,
|
||||
defs::Definition, helpers::visit_file_defs, FileId, FilePosition, FileRange, FxIndexSet,
|
||||
RootDatabase,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
@ -55,7 +55,7 @@ pub(crate) fn annotations(
|
||||
config: &AnnotationConfig,
|
||||
file_id: FileId,
|
||||
) -> Vec<Annotation> {
|
||||
let mut annotations = FxHashSet::default();
|
||||
let mut annotations = FxIndexSet::default();
|
||||
|
||||
if config.annotate_runnables {
|
||||
for runnable in runnables(db, file_id) {
|
||||
@ -170,7 +170,12 @@ pub(crate) fn annotations(
|
||||
}));
|
||||
}
|
||||
|
||||
annotations.into_iter().sorted_by_key(|a| (a.range.start(), a.range.end())).collect()
|
||||
annotations
|
||||
.into_iter()
|
||||
.sorted_by_key(|a| {
|
||||
(a.range.start(), a.range.end(), matches!(a.kind, AnnotationKind::Runnable(..)))
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub(crate) fn resolve_annotation(db: &RootDatabase, mut annotation: Annotation) -> Annotation {
|
||||
@ -535,6 +540,20 @@ fn main() {
|
||||
),
|
||||
},
|
||||
},
|
||||
Annotation {
|
||||
range: 69..73,
|
||||
kind: HasReferences {
|
||||
pos: FilePositionWrapper {
|
||||
file_id: FileId(
|
||||
0,
|
||||
),
|
||||
offset: 69,
|
||||
},
|
||||
data: Some(
|
||||
[],
|
||||
),
|
||||
},
|
||||
},
|
||||
Annotation {
|
||||
range: 69..73,
|
||||
kind: Runnable(
|
||||
@ -559,20 +578,6 @@ fn main() {
|
||||
},
|
||||
),
|
||||
},
|
||||
Annotation {
|
||||
range: 69..73,
|
||||
kind: HasReferences {
|
||||
pos: FilePositionWrapper {
|
||||
file_id: FileId(
|
||||
0,
|
||||
),
|
||||
offset: 69,
|
||||
},
|
||||
data: Some(
|
||||
[],
|
||||
),
|
||||
},
|
||||
},
|
||||
]
|
||||
"#]],
|
||||
);
|
||||
@ -717,6 +722,20 @@ fn main() {
|
||||
),
|
||||
},
|
||||
},
|
||||
Annotation {
|
||||
range: 61..65,
|
||||
kind: HasReferences {
|
||||
pos: FilePositionWrapper {
|
||||
file_id: FileId(
|
||||
0,
|
||||
),
|
||||
offset: 61,
|
||||
},
|
||||
data: Some(
|
||||
[],
|
||||
),
|
||||
},
|
||||
},
|
||||
Annotation {
|
||||
range: 61..65,
|
||||
kind: Runnable(
|
||||
@ -741,20 +760,6 @@ fn main() {
|
||||
},
|
||||
),
|
||||
},
|
||||
Annotation {
|
||||
range: 61..65,
|
||||
kind: HasReferences {
|
||||
pos: FilePositionWrapper {
|
||||
file_id: FileId(
|
||||
0,
|
||||
),
|
||||
offset: 61,
|
||||
},
|
||||
data: Some(
|
||||
[],
|
||||
),
|
||||
},
|
||||
},
|
||||
]
|
||||
"#]],
|
||||
);
|
||||
|
@ -3290,4 +3290,38 @@ fn main() {
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn shadow_builtin_type_by_module() {
|
||||
check(
|
||||
r#"
|
||||
mod Foo{
|
||||
pub mod str {
|
||||
// ^^^
|
||||
pub fn foo() {}
|
||||
}
|
||||
}
|
||||
|
||||
fn main() {
|
||||
use Foo::str;
|
||||
let s = st$0r::foo();
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn not_goto_module_because_str_is_builtin_type() {
|
||||
check(
|
||||
r#"
|
||||
mod str {
|
||||
pub fn foo() {}
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let s = st$0r::f();
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -7,7 +7,7 @@ use crate::{InlayHint, InlayHintsConfig};
|
||||
|
||||
pub(super) fn extern_block_hints(
|
||||
acc: &mut Vec<InlayHint>,
|
||||
FamousDefs(_sema, _): &FamousDefs<'_, '_>,
|
||||
FamousDefs(sema, _): &FamousDefs<'_, '_>,
|
||||
config: &InlayHintsConfig,
|
||||
_file_id: EditionedFileId,
|
||||
extern_block: ast::ExternBlock,
|
||||
@ -16,6 +16,7 @@ pub(super) fn extern_block_hints(
|
||||
return None;
|
||||
}
|
||||
let abi = extern_block.abi()?;
|
||||
sema.to_def(&extern_block)?;
|
||||
acc.push(InlayHint {
|
||||
range: abi.syntax().text_range(),
|
||||
position: crate::InlayHintPosition::Before,
|
||||
@ -33,7 +34,7 @@ pub(super) fn extern_block_hints(
|
||||
|
||||
pub(super) fn fn_hints(
|
||||
acc: &mut Vec<InlayHint>,
|
||||
FamousDefs(_sema, _): &FamousDefs<'_, '_>,
|
||||
FamousDefs(sema, _): &FamousDefs<'_, '_>,
|
||||
config: &InlayHintsConfig,
|
||||
_file_id: EditionedFileId,
|
||||
fn_: &ast::Fn,
|
||||
@ -43,14 +44,16 @@ pub(super) fn fn_hints(
|
||||
if !implicit_unsafe {
|
||||
return None;
|
||||
}
|
||||
let fn_ = fn_.fn_token()?;
|
||||
acc.push(item_hint(config, extern_block, fn_));
|
||||
let fn_token = fn_.fn_token()?;
|
||||
if sema.to_def(fn_).is_some_and(|def| def.extern_block(sema.db).is_some()) {
|
||||
acc.push(item_hint(config, extern_block, fn_token));
|
||||
}
|
||||
Some(())
|
||||
}
|
||||
|
||||
pub(super) fn static_hints(
|
||||
acc: &mut Vec<InlayHint>,
|
||||
FamousDefs(_sema, _): &FamousDefs<'_, '_>,
|
||||
FamousDefs(sema, _): &FamousDefs<'_, '_>,
|
||||
config: &InlayHintsConfig,
|
||||
_file_id: EditionedFileId,
|
||||
static_: &ast::Static,
|
||||
@ -60,8 +63,10 @@ pub(super) fn static_hints(
|
||||
if !implicit_unsafe {
|
||||
return None;
|
||||
}
|
||||
let static_ = static_.static_token()?;
|
||||
acc.push(item_hint(config, extern_block, static_));
|
||||
let static_token = static_.static_token()?;
|
||||
if sema.to_def(static_).is_some_and(|def| def.extern_block(sema.db).is_some()) {
|
||||
acc.push(item_hint(config, extern_block, static_token));
|
||||
}
|
||||
Some(())
|
||||
}
|
||||
|
||||
|
@ -54,7 +54,8 @@ pub(super) fn hints(
|
||||
};
|
||||
let range = match terminator.span {
|
||||
MirSpan::ExprId(e) => match source_map.expr_syntax(e) {
|
||||
Ok(s) => {
|
||||
// don't show inlay hint for macro
|
||||
Ok(s) if !s.file_id.is_macro() => {
|
||||
let root = &s.file_syntax(sema.db);
|
||||
let expr = s.value.to_node(root);
|
||||
let expr = expr.syntax();
|
||||
@ -69,11 +70,11 @@ pub(super) fn hints(
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(_) => continue,
|
||||
_ => continue,
|
||||
},
|
||||
MirSpan::PatId(p) => match source_map.pat_syntax(p) {
|
||||
Ok(s) => s.value.text_range(),
|
||||
Err(_) => continue,
|
||||
Ok(s) if !s.file_id.is_macro() => s.value.text_range(),
|
||||
_ => continue,
|
||||
},
|
||||
MirSpan::BindingId(b) => {
|
||||
match source_map
|
||||
@ -81,13 +82,13 @@ pub(super) fn hints(
|
||||
.iter()
|
||||
.find_map(|p| source_map.pat_syntax(*p).ok())
|
||||
{
|
||||
Some(s) => s.value.text_range(),
|
||||
None => continue,
|
||||
Some(s) if !s.file_id.is_macro() => s.value.text_range(),
|
||||
_ => continue,
|
||||
}
|
||||
}
|
||||
MirSpan::SelfParam => match source_map.self_param_syntax() {
|
||||
Some(s) => s.value.text_range(),
|
||||
None => continue,
|
||||
Some(s) if !s.file_id.is_macro() => s.value.text_range(),
|
||||
_ => continue,
|
||||
},
|
||||
MirSpan::Unknown => continue,
|
||||
};
|
||||
@ -228,6 +229,27 @@ mod tests {
|
||||
//^ drop(y)
|
||||
}
|
||||
//^ drop(x)
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ignore_inlay_hint_for_macro_call() {
|
||||
check_with_config(
|
||||
ONLY_DROP_CONFIG,
|
||||
r#"
|
||||
struct X;
|
||||
|
||||
macro_rules! my_macro {
|
||||
() => {{
|
||||
let bbb = X;
|
||||
bbb
|
||||
}};
|
||||
}
|
||||
|
||||
fn test() -> X {
|
||||
my_macro!()
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
@ -4,8 +4,8 @@ use arrayvec::ArrayVec;
|
||||
use ast::HasName;
|
||||
use cfg::{CfgAtom, CfgExpr};
|
||||
use hir::{
|
||||
db::HirDatabase, sym, AsAssocItem, AttrsWithOwner, HasAttrs, HasCrate, HasSource, HirFileIdExt,
|
||||
ModPath, Name, PathKind, Semantics, Symbol,
|
||||
db::HirDatabase, sym, symbols::FxIndexSet, AsAssocItem, AttrsWithOwner, HasAttrs, HasCrate,
|
||||
HasSource, HirFileIdExt, ModPath, Name, PathKind, Semantics, Symbol,
|
||||
};
|
||||
use ide_assists::utils::{has_test_related_attribute, test_related_attribute_syn};
|
||||
use ide_db::{
|
||||
@ -13,7 +13,7 @@ use ide_db::{
|
||||
documentation::docs_from_attrs,
|
||||
helpers::visit_file_defs,
|
||||
search::{FileReferenceNode, SearchScope},
|
||||
FilePosition, FxHashMap, FxHashSet, RootDatabase, SymbolKind,
|
||||
FilePosition, FxHashMap, FxIndexMap, RootDatabase, SymbolKind,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
use smallvec::SmallVec;
|
||||
@ -61,8 +61,8 @@ pub enum RunnableKind {
|
||||
|
||||
#[derive(Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)]
|
||||
enum RunnableDiscKind {
|
||||
Test,
|
||||
TestMod,
|
||||
Test,
|
||||
DocTest,
|
||||
Bench,
|
||||
Bin,
|
||||
@ -130,7 +130,7 @@ pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec<Runnable> {
|
||||
let mut res = Vec::new();
|
||||
// Record all runnables that come from macro expansions here instead.
|
||||
// In case an expansion creates multiple runnables we want to name them to avoid emitting a bunch of equally named runnables.
|
||||
let mut in_macro_expansion = FxHashMap::<hir::HirFileId, Vec<Runnable>>::default();
|
||||
let mut in_macro_expansion = FxIndexMap::<hir::HirFileId, Vec<Runnable>>::default();
|
||||
let mut add_opt = |runnable: Option<Runnable>, def| {
|
||||
if let Some(runnable) = runnable.filter(|runnable| runnable.nav.file_id == file_id) {
|
||||
if let Some(def) = def {
|
||||
@ -182,20 +182,7 @@ pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec<Runnable> {
|
||||
r
|
||||
})
|
||||
}));
|
||||
res.sort_by(|Runnable { nav, kind, .. }, Runnable { nav: nav_b, kind: kind_b, .. }| {
|
||||
// full_range.start < focus_range.start < name, should give us a decent unique ordering
|
||||
nav.full_range
|
||||
.start()
|
||||
.cmp(&nav_b.full_range.start())
|
||||
.then_with(|| {
|
||||
let t_0 = || TextSize::from(0);
|
||||
nav.focus_range
|
||||
.map_or_else(t_0, |it| it.start())
|
||||
.cmp(&nav_b.focus_range.map_or_else(t_0, |it| it.start()))
|
||||
})
|
||||
.then_with(|| kind.disc().cmp(&kind_b.disc()))
|
||||
.then_with(|| nav.name.cmp(&nav_b.name))
|
||||
});
|
||||
res.sort_by(cmp_runnables);
|
||||
res
|
||||
}
|
||||
|
||||
@ -215,12 +202,30 @@ pub(crate) fn related_tests(
|
||||
search_scope: Option<SearchScope>,
|
||||
) -> Vec<Runnable> {
|
||||
let sema = Semantics::new(db);
|
||||
let mut res: FxHashSet<Runnable> = FxHashSet::default();
|
||||
let mut res: FxIndexSet<Runnable> = FxIndexSet::default();
|
||||
let syntax = sema.parse_guess_edition(position.file_id).syntax().clone();
|
||||
|
||||
find_related_tests(&sema, &syntax, position, search_scope, &mut res);
|
||||
|
||||
res.into_iter().collect()
|
||||
res.into_iter().sorted_by(cmp_runnables).collect()
|
||||
}
|
||||
|
||||
fn cmp_runnables(
|
||||
Runnable { nav, kind, .. }: &Runnable,
|
||||
Runnable { nav: nav_b, kind: kind_b, .. }: &Runnable,
|
||||
) -> std::cmp::Ordering {
|
||||
// full_range.start < focus_range.start < name, should give us a decent unique ordering
|
||||
nav.full_range
|
||||
.start()
|
||||
.cmp(&nav_b.full_range.start())
|
||||
.then_with(|| {
|
||||
let t_0 = || TextSize::from(0);
|
||||
nav.focus_range
|
||||
.map_or_else(t_0, |it| it.start())
|
||||
.cmp(&nav_b.focus_range.map_or_else(t_0, |it| it.start()))
|
||||
})
|
||||
.then_with(|| kind.disc().cmp(&kind_b.disc()))
|
||||
.then_with(|| nav.name.cmp(&nav_b.name))
|
||||
}
|
||||
|
||||
fn find_related_tests(
|
||||
@ -228,7 +233,7 @@ fn find_related_tests(
|
||||
syntax: &SyntaxNode,
|
||||
position: FilePosition,
|
||||
search_scope: Option<SearchScope>,
|
||||
tests: &mut FxHashSet<Runnable>,
|
||||
tests: &mut FxIndexSet<Runnable>,
|
||||
) {
|
||||
// FIXME: why is this using references::find_defs, this should use ide_db::search
|
||||
let defs = match references::find_defs(sema, syntax, position.offset) {
|
||||
@ -268,7 +273,7 @@ fn find_related_tests_in_module(
|
||||
syntax: &SyntaxNode,
|
||||
fn_def: &ast::Fn,
|
||||
parent_module: &hir::Module,
|
||||
tests: &mut FxHashSet<Runnable>,
|
||||
tests: &mut FxIndexSet<Runnable>,
|
||||
) {
|
||||
let fn_name = match fn_def.name() {
|
||||
Some(it) => it,
|
||||
@ -1228,8 +1233,8 @@ gen_main!();
|
||||
"(TestMod, NavigationTarget { file_id: FileId(0), full_range: 0..315, name: \"\", kind: Module })",
|
||||
"(TestMod, NavigationTarget { file_id: FileId(0), full_range: 267..292, focus_range: 271..276, name: \"tests\", kind: Module, description: \"mod tests\" })",
|
||||
"(Test, NavigationTarget { file_id: FileId(0), full_range: 283..290, name: \"foo_test\", kind: Function })",
|
||||
"(Test, NavigationTarget { file_id: FileId(0), full_range: 293..301, name: \"foo_test2\", kind: Function }, true)",
|
||||
"(TestMod, NavigationTarget { file_id: FileId(0), full_range: 293..301, name: \"tests2\", kind: Module, description: \"mod tests2\" }, true)",
|
||||
"(Test, NavigationTarget { file_id: FileId(0), full_range: 293..301, name: \"foo_test2\", kind: Function }, true)",
|
||||
"(Bin, NavigationTarget { file_id: FileId(0), full_range: 302..314, name: \"main\", kind: Function })",
|
||||
]
|
||||
"#]],
|
||||
@ -1258,10 +1263,10 @@ foo!();
|
||||
"#,
|
||||
expect![[r#"
|
||||
[
|
||||
"(TestMod, NavigationTarget { file_id: FileId(0), full_range: 210..217, name: \"foo_tests\", kind: Module, description: \"mod foo_tests\" }, true)",
|
||||
"(Test, NavigationTarget { file_id: FileId(0), full_range: 210..217, name: \"foo0\", kind: Function }, true)",
|
||||
"(Test, NavigationTarget { file_id: FileId(0), full_range: 210..217, name: \"foo1\", kind: Function }, true)",
|
||||
"(Test, NavigationTarget { file_id: FileId(0), full_range: 210..217, name: \"foo2\", kind: Function }, true)",
|
||||
"(TestMod, NavigationTarget { file_id: FileId(0), full_range: 210..217, name: \"foo_tests\", kind: Module, description: \"mod foo_tests\" }, true)",
|
||||
]
|
||||
"#]],
|
||||
);
|
||||
@ -1501,18 +1506,18 @@ mod tests {
|
||||
file_id: FileId(
|
||||
0,
|
||||
),
|
||||
full_range: 121..185,
|
||||
focus_range: 136..145,
|
||||
name: "foo2_test",
|
||||
full_range: 52..115,
|
||||
focus_range: 67..75,
|
||||
name: "foo_test",
|
||||
kind: Function,
|
||||
},
|
||||
NavigationTarget {
|
||||
file_id: FileId(
|
||||
0,
|
||||
),
|
||||
full_range: 52..115,
|
||||
focus_range: 67..75,
|
||||
name: "foo_test",
|
||||
full_range: 121..185,
|
||||
focus_range: 136..145,
|
||||
name: "foo2_test",
|
||||
kind: Function,
|
||||
},
|
||||
]
|
||||
|
@ -703,6 +703,7 @@ fn highlight_name_ref_by_syntax(
|
||||
};
|
||||
|
||||
match parent.kind() {
|
||||
EXTERN_CRATE => HlTag::Symbol(SymbolKind::Module) | HlMod::CrateRoot,
|
||||
METHOD_CALL_EXPR => ast::MethodCallExpr::cast(parent)
|
||||
.and_then(|it| highlight_method_call(sema, krate, &it, edition))
|
||||
.unwrap_or_else(|| SymbolKind::Method.into()),
|
||||
|
@ -50,6 +50,15 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
|
||||
<span class="comment documentation">//!</span><span class="comment documentation"> </span><span class="keyword injected">fn</span><span class="none injected"> </span><span class="function declaration injected">test</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="none injected"> </span><span class="brace injected">{</span><span class="brace injected">}</span>
|
||||
<span class="comment documentation">//! ```</span>
|
||||
|
||||
<span class="comment documentation">//! ```rust</span>
|
||||
<span class="comment documentation">//!</span><span class="comment documentation"> </span><span class="keyword injected">extern</span><span class="none injected"> </span><span class="keyword injected">crate</span><span class="none injected"> </span><span class="self_keyword crate_root injected">self</span><span class="semicolon injected">;</span>
|
||||
<span class="comment documentation">//!</span><span class="comment documentation"> </span><span class="keyword injected">extern</span><span class="none injected"> </span><span class="keyword injected">crate</span><span class="none injected"> </span><span class="module crate_root injected">std</span><span class="semicolon injected">;</span>
|
||||
<span class="comment documentation">//!</span><span class="comment documentation"> </span><span class="keyword injected">extern</span><span class="none injected"> </span><span class="keyword injected">crate</span><span class="none injected"> </span><span class="module crate_root injected">core</span><span class="semicolon injected">;</span>
|
||||
<span class="comment documentation">//!</span><span class="comment documentation"> </span><span class="keyword injected">extern</span><span class="none injected"> </span><span class="keyword injected">crate</span><span class="none injected"> </span><span class="module crate_root injected">alloc</span><span class="semicolon injected">;</span>
|
||||
<span class="comment documentation">//!</span><span class="comment documentation"> </span><span class="keyword injected">extern</span><span class="none injected"> </span><span class="keyword injected">crate</span><span class="none injected"> </span><span class="module crate_root injected">proc_macro</span><span class="semicolon injected">;</span>
|
||||
<span class="comment documentation">//!</span><span class="comment documentation"> </span><span class="keyword injected">extern</span><span class="none injected"> </span><span class="keyword injected">crate</span><span class="none injected"> </span><span class="module crate_root injected">test</span><span class="semicolon injected">;</span>
|
||||
<span class="comment documentation">//!</span><span class="comment documentation"> </span><span class="keyword injected">extern</span><span class="none injected"> </span><span class="keyword injected">crate</span><span class="none injected"> </span><span class="module crate_root injected">Krate</span><span class="semicolon injected">;</span>
|
||||
<span class="comment documentation">//! ```</span>
|
||||
<span class="keyword">mod</span> <span class="module declaration">outline_module</span><span class="semicolon">;</span>
|
||||
|
||||
<span class="comment documentation">/// ```</span>
|
||||
|
@ -48,17 +48,6 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
|
||||
<pre><code><span class="keyword">use</span> <span class="module">inner</span><span class="operator">::</span><span class="brace">{</span><span class="self_keyword">self</span> <span class="keyword">as</span> <span class="module declaration">inner_mod</span><span class="brace">}</span><span class="semicolon">;</span>
|
||||
<span class="keyword">mod</span> <span class="module declaration">inner</span> <span class="brace">{</span><span class="brace">}</span>
|
||||
|
||||
<span class="keyword">pub</span> <span class="keyword">mod</span> <span class="module declaration public">ops</span> <span class="brace">{</span>
|
||||
<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute">lang</span> <span class="operator attribute">=</span> <span class="string_literal attribute">"fn_once"</span><span class="attribute_bracket attribute">]</span>
|
||||
<span class="keyword">pub</span> <span class="keyword">trait</span> <span class="trait declaration public">FnOnce</span><span class="angle"><</span><span class="type_param declaration">Args</span><span class="angle">></span> <span class="brace">{</span><span class="brace">}</span>
|
||||
|
||||
<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute">lang</span> <span class="operator attribute">=</span> <span class="string_literal attribute">"fn_mut"</span><span class="attribute_bracket attribute">]</span>
|
||||
<span class="keyword">pub</span> <span class="keyword">trait</span> <span class="trait declaration public">FnMut</span><span class="angle"><</span><span class="type_param declaration">Args</span><span class="angle">></span><span class="colon">:</span> <span class="trait public">FnOnce</span><span class="angle"><</span><span class="type_param">Args</span><span class="angle">></span> <span class="brace">{</span><span class="brace">}</span>
|
||||
|
||||
<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute">lang</span> <span class="operator attribute">=</span> <span class="string_literal attribute">"fn"</span><span class="attribute_bracket attribute">]</span>
|
||||
<span class="keyword">pub</span> <span class="keyword">trait</span> <span class="trait declaration public">Fn</span><span class="angle"><</span><span class="type_param declaration">Args</span><span class="angle">></span><span class="colon">:</span> <span class="trait public">FnMut</span><span class="angle"><</span><span class="type_param">Args</span><span class="angle">></span> <span class="brace">{</span><span class="brace">}</span>
|
||||
<span class="brace">}</span>
|
||||
|
||||
<span class="keyword">struct</span> <span class="struct declaration">Foo</span> <span class="brace">{</span>
|
||||
<span class="field declaration">x</span><span class="colon">:</span> <span class="builtin_type">u32</span><span class="comma">,</span>
|
||||
<span class="brace">}</span>
|
||||
@ -125,8 +114,8 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
|
||||
<span class="const_param const">FOO</span>
|
||||
<span class="brace">}</span>
|
||||
|
||||
<span class="keyword">use</span> <span class="module public">ops</span><span class="operator">::</span><span class="trait public">Fn</span><span class="semicolon">;</span>
|
||||
<span class="keyword">fn</span> <span class="function declaration">baz</span><span class="angle"><</span><span class="type_param declaration">F</span><span class="colon">:</span> <span class="trait public">Fn</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="operator">-></span> <span class="parenthesis">(</span><span class="parenthesis">)</span><span class="angle">></span><span class="parenthesis">(</span><span class="value_param callable declaration">f</span><span class="colon">:</span> <span class="type_param">F</span><span class="parenthesis">)</span> <span class="brace">{</span>
|
||||
<span class="keyword">use</span> <span class="module crate_root default_library library">core</span><span class="operator">::</span><span class="module default_library library">ops</span><span class="operator">::</span><span class="trait default_library library">Fn</span><span class="semicolon">;</span>
|
||||
<span class="keyword">fn</span> <span class="function declaration">baz</span><span class="angle"><</span><span class="type_param declaration">F</span><span class="colon">:</span> <span class="trait default_library library">Fn</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="operator">-></span> <span class="parenthesis">(</span><span class="parenthesis">)</span><span class="angle">></span><span class="parenthesis">(</span><span class="value_param callable declaration">f</span><span class="colon">:</span> <span class="type_param">F</span><span class="parenthesis">)</span> <span class="brace">{</span>
|
||||
<span class="value_param callable">f</span><span class="parenthesis">(</span><span class="parenthesis">)</span>
|
||||
<span class="brace">}</span>
|
||||
|
||||
|
@ -136,22 +136,11 @@ use self::foo as bar;
|
||||
fn test_highlighting() {
|
||||
check_highlighting(
|
||||
r#"
|
||||
//- minicore: derive, copy
|
||||
//- minicore: derive, copy, fn
|
||||
//- /main.rs crate:main deps:foo
|
||||
use inner::{self as inner_mod};
|
||||
mod inner {}
|
||||
|
||||
pub mod ops {
|
||||
#[lang = "fn_once"]
|
||||
pub trait FnOnce<Args> {}
|
||||
|
||||
#[lang = "fn_mut"]
|
||||
pub trait FnMut<Args>: FnOnce<Args> {}
|
||||
|
||||
#[lang = "fn"]
|
||||
pub trait Fn<Args>: FnMut<Args> {}
|
||||
}
|
||||
|
||||
struct Foo {
|
||||
x: u32,
|
||||
}
|
||||
@ -218,7 +207,7 @@ fn const_param<const FOO: usize>() -> usize {
|
||||
FOO
|
||||
}
|
||||
|
||||
use ops::Fn;
|
||||
use core::ops::Fn;
|
||||
fn baz<F: Fn() -> ()>(f: F) {
|
||||
f()
|
||||
}
|
||||
@ -722,6 +711,15 @@ fn test_highlight_doc_comment() {
|
||||
//! fn test() {}
|
||||
//! ```
|
||||
|
||||
//! ```rust
|
||||
//! extern crate self;
|
||||
//! extern crate std;
|
||||
//! extern crate core;
|
||||
//! extern crate alloc;
|
||||
//! extern crate proc_macro;
|
||||
//! extern crate test;
|
||||
//! extern crate Krate;
|
||||
//! ```
|
||||
mod outline_module;
|
||||
|
||||
/// ```
|
||||
@ -1084,6 +1082,9 @@ pub struct Struct;
|
||||
);
|
||||
}
|
||||
|
||||
// Rainbow highlighting uses a deterministic hash (fxhash) but the hashing does differ
|
||||
// depending on the pointer width so only runs this on 64-bit targets.
|
||||
#[cfg(target_pointer_width = "64")]
|
||||
#[test]
|
||||
fn test_rainbow_highlighting() {
|
||||
check_highlighting(
|
||||
|
@ -13,15 +13,35 @@ use crate::{
|
||||
|
||||
macro_rules! define_symbols {
|
||||
(@WITH_NAME: $($alias:ident = $value:literal,)* @PLAIN: $($name:ident,)*) => {
|
||||
// Ideally we would be emitting `const` here, but then we no longer have stable addresses
|
||||
// which is what we are relying on for equality! In the future if consts can refer to
|
||||
// statics we should swap these for `const`s and have the string literal being pointed
|
||||
// to be statics to refer to such that their address is stable.
|
||||
// We define symbols as both `const`s and `static`s because some const code requires const symbols,
|
||||
// but code from before the transition relies on the lifetime of the predefined symbols and making them
|
||||
// `const`s make it error (because now they're temporaries). In the future we probably should only
|
||||
// use consts.
|
||||
|
||||
/// Predefined symbols as `const`s (instead of the default `static`s).
|
||||
pub mod consts {
|
||||
use super::{Symbol, TaggedArcPtr};
|
||||
|
||||
// The strings should be in `static`s so that symbol equality holds.
|
||||
$(
|
||||
pub const $name: Symbol = {
|
||||
static SYMBOL_STR: &str = stringify!($name);
|
||||
Symbol { repr: TaggedArcPtr::non_arc(&SYMBOL_STR) }
|
||||
};
|
||||
)*
|
||||
$(
|
||||
pub const $alias: Symbol = {
|
||||
static SYMBOL_STR: &str = $value;
|
||||
Symbol { repr: TaggedArcPtr::non_arc(&SYMBOL_STR) }
|
||||
};
|
||||
)*
|
||||
}
|
||||
|
||||
$(
|
||||
pub static $name: Symbol = Symbol { repr: TaggedArcPtr::non_arc(&stringify!($name)) };
|
||||
pub static $name: Symbol = consts::$name;
|
||||
)*
|
||||
$(
|
||||
pub static $alias: Symbol = Symbol { repr: TaggedArcPtr::non_arc(&$value) };
|
||||
pub static $alias: Symbol = consts::$alias;
|
||||
)*
|
||||
|
||||
|
||||
@ -428,6 +448,7 @@ define_symbols! {
|
||||
rustc_layout_scalar_valid_range_start,
|
||||
rustc_legacy_const_generics,
|
||||
rustc_macro_transparency,
|
||||
rustc_paren_sugar,
|
||||
rustc_reallocator,
|
||||
rustc_reservation_impl,
|
||||
rustc_safe_intrinsic,
|
||||
|
@ -94,7 +94,9 @@ pub fn load_workspace(
|
||||
let contents = loader.load_sync(path);
|
||||
let path = vfs::VfsPath::from(path.to_path_buf());
|
||||
vfs.set_file_contents(path.clone(), contents);
|
||||
vfs.file_id(&path)
|
||||
vfs.file_id(&path).and_then(|(file_id, excluded)| {
|
||||
(excluded == vfs::FileExcluded::No).then_some(file_id)
|
||||
})
|
||||
},
|
||||
extra_env,
|
||||
);
|
||||
|
@ -277,6 +277,9 @@ impl CargoWorkspace {
|
||||
/// Fetches the metadata for the given `cargo_toml` manifest.
|
||||
/// A successful result may contain another metadata error if the initial fetching failed but
|
||||
/// the `--no-deps` retry succeeded.
|
||||
///
|
||||
/// The sysroot is used to set the `RUSTUP_TOOLCHAIN` env var when invoking cargo
|
||||
/// to ensure that the rustup proxy uses the correct toolchain.
|
||||
pub fn fetch_metadata(
|
||||
cargo_toml: &ManifestPath,
|
||||
current_dir: &AbsPath,
|
||||
|
@ -260,19 +260,19 @@ fn parse_cfg(s: &str) -> Result<cfg::CfgAtom, String> {
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub enum SysrootSourceWorkspaceConfig {
|
||||
pub enum RustSourceWorkspaceConfig {
|
||||
CargoMetadata(CargoMetadataConfig),
|
||||
Stitched,
|
||||
}
|
||||
|
||||
impl Default for SysrootSourceWorkspaceConfig {
|
||||
impl Default for RustSourceWorkspaceConfig {
|
||||
fn default() -> Self {
|
||||
SysrootSourceWorkspaceConfig::default_cargo()
|
||||
RustSourceWorkspaceConfig::default_cargo()
|
||||
}
|
||||
}
|
||||
|
||||
impl SysrootSourceWorkspaceConfig {
|
||||
impl RustSourceWorkspaceConfig {
|
||||
pub fn default_cargo() -> Self {
|
||||
SysrootSourceWorkspaceConfig::CargoMetadata(Default::default())
|
||||
RustSourceWorkspaceConfig::CargoMetadata(Default::default())
|
||||
}
|
||||
}
|
||||
|
@ -22,38 +22,40 @@ use toolchain::{probe_for_binary, Tool};
|
||||
|
||||
use crate::{
|
||||
cargo_workspace::CargoMetadataConfig, utf8_stdout, CargoWorkspace, ManifestPath,
|
||||
SysrootSourceWorkspaceConfig,
|
||||
RustSourceWorkspaceConfig,
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct Sysroot {
|
||||
root: Option<AbsPathBuf>,
|
||||
src_root: Option<AbsPathBuf>,
|
||||
workspace: SysrootWorkspace,
|
||||
rust_lib_src_root: Option<AbsPathBuf>,
|
||||
workspace: RustLibSrcWorkspace,
|
||||
error: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub(crate) enum SysrootWorkspace {
|
||||
pub enum RustLibSrcWorkspace {
|
||||
Workspace(CargoWorkspace),
|
||||
Stitched(Stitched),
|
||||
Empty,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub(crate) struct Stitched {
|
||||
crates: Arena<SysrootCrateData>,
|
||||
pub struct Stitched {
|
||||
crates: Arena<RustLibSrcCrateData>,
|
||||
}
|
||||
|
||||
impl ops::Index<SysrootCrate> for Stitched {
|
||||
type Output = SysrootCrateData;
|
||||
fn index(&self, index: SysrootCrate) -> &SysrootCrateData {
|
||||
impl ops::Index<RustLibSrcCrate> for Stitched {
|
||||
type Output = RustLibSrcCrateData;
|
||||
fn index(&self, index: RustLibSrcCrate) -> &RustLibSrcCrateData {
|
||||
&self.crates[index]
|
||||
}
|
||||
}
|
||||
|
||||
impl Stitched {
|
||||
pub(crate) fn public_deps(&self) -> impl Iterator<Item = (CrateName, SysrootCrate, bool)> + '_ {
|
||||
pub(crate) fn public_deps(
|
||||
&self,
|
||||
) -> impl Iterator<Item = (CrateName, RustLibSrcCrate, bool)> + '_ {
|
||||
// core is added as a dependency before std in order to
|
||||
// mimic rustcs dependency order
|
||||
[("core", true), ("alloc", false), ("std", true), ("test", false)].into_iter().filter_map(
|
||||
@ -63,32 +65,37 @@ impl Stitched {
|
||||
)
|
||||
}
|
||||
|
||||
pub(crate) fn proc_macro(&self) -> Option<SysrootCrate> {
|
||||
pub(crate) fn proc_macro(&self) -> Option<RustLibSrcCrate> {
|
||||
self.by_name("proc_macro")
|
||||
}
|
||||
|
||||
pub(crate) fn crates(&self) -> impl ExactSizeIterator<Item = SysrootCrate> + '_ {
|
||||
pub(crate) fn crates(&self) -> impl ExactSizeIterator<Item = RustLibSrcCrate> + '_ {
|
||||
self.crates.iter().map(|(id, _data)| id)
|
||||
}
|
||||
|
||||
fn by_name(&self, name: &str) -> Option<SysrootCrate> {
|
||||
fn by_name(&self, name: &str) -> Option<RustLibSrcCrate> {
|
||||
let (id, _data) = self.crates.iter().find(|(_id, data)| data.name == name)?;
|
||||
Some(id)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) type SysrootCrate = Idx<SysrootCrateData>;
|
||||
pub(crate) type RustLibSrcCrate = Idx<RustLibSrcCrateData>;
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub(crate) struct SysrootCrateData {
|
||||
pub(crate) struct RustLibSrcCrateData {
|
||||
pub(crate) name: String,
|
||||
pub(crate) root: ManifestPath,
|
||||
pub(crate) deps: Vec<SysrootCrate>,
|
||||
pub(crate) deps: Vec<RustLibSrcCrate>,
|
||||
}
|
||||
|
||||
impl Sysroot {
|
||||
pub const fn empty() -> Sysroot {
|
||||
Sysroot { root: None, src_root: None, workspace: SysrootWorkspace::Empty, error: None }
|
||||
Sysroot {
|
||||
root: None,
|
||||
rust_lib_src_root: None,
|
||||
workspace: RustLibSrcWorkspace::Empty,
|
||||
error: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns sysroot "root" directory, where `bin/`, `etc/`, `lib/`, `libexec/`
|
||||
@ -100,15 +107,15 @@ impl Sysroot {
|
||||
|
||||
/// Returns the sysroot "source" directory, where stdlib sources are located, like:
|
||||
/// `$HOME/.rustup/toolchains/nightly-2022-07-23-x86_64-unknown-linux-gnu/lib/rustlib/src/rust/library`
|
||||
pub fn src_root(&self) -> Option<&AbsPath> {
|
||||
self.src_root.as_deref()
|
||||
pub fn rust_lib_src_root(&self) -> Option<&AbsPath> {
|
||||
self.rust_lib_src_root.as_deref()
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
pub fn is_rust_lib_src_empty(&self) -> bool {
|
||||
match &self.workspace {
|
||||
SysrootWorkspace::Workspace(ws) => ws.packages().next().is_none(),
|
||||
SysrootWorkspace::Stitched(stitched) => stitched.crates.is_empty(),
|
||||
SysrootWorkspace::Empty => true,
|
||||
RustLibSrcWorkspace::Workspace(ws) => ws.packages().next().is_none(),
|
||||
RustLibSrcWorkspace::Stitched(stitched) => stitched.crates.is_empty(),
|
||||
RustLibSrcWorkspace::Empty => true,
|
||||
}
|
||||
}
|
||||
|
||||
@ -118,13 +125,13 @@ impl Sysroot {
|
||||
|
||||
pub fn num_packages(&self) -> usize {
|
||||
match &self.workspace {
|
||||
SysrootWorkspace::Workspace(ws) => ws.packages().count(),
|
||||
SysrootWorkspace::Stitched(c) => c.crates().count(),
|
||||
SysrootWorkspace::Empty => 0,
|
||||
RustLibSrcWorkspace::Workspace(ws) => ws.packages().count(),
|
||||
RustLibSrcWorkspace::Stitched(c) => c.crates().count(),
|
||||
RustLibSrcWorkspace::Empty => 0,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn workspace(&self) -> &SysrootWorkspace {
|
||||
pub(crate) fn workspace(&self) -> &RustLibSrcWorkspace {
|
||||
&self.workspace
|
||||
}
|
||||
}
|
||||
@ -133,33 +140,33 @@ impl Sysroot {
|
||||
/// Attempts to discover the toolchain's sysroot from the given `dir`.
|
||||
pub fn discover(dir: &AbsPath, extra_env: &FxHashMap<String, String>) -> Sysroot {
|
||||
let sysroot_dir = discover_sysroot_dir(dir, extra_env);
|
||||
let sysroot_src_dir = sysroot_dir.as_ref().ok().map(|sysroot_dir| {
|
||||
discover_sysroot_src_dir_or_add_component(sysroot_dir, dir, extra_env)
|
||||
let rust_lib_src_dir = sysroot_dir.as_ref().ok().map(|sysroot_dir| {
|
||||
discover_rust_lib_src_dir_or_add_component(sysroot_dir, dir, extra_env)
|
||||
});
|
||||
Sysroot::assemble(Some(sysroot_dir), sysroot_src_dir)
|
||||
Sysroot::assemble(Some(sysroot_dir), rust_lib_src_dir)
|
||||
}
|
||||
|
||||
pub fn discover_with_src_override(
|
||||
current_dir: &AbsPath,
|
||||
extra_env: &FxHashMap<String, String>,
|
||||
sysroot_src_dir: AbsPathBuf,
|
||||
rust_lib_src_dir: AbsPathBuf,
|
||||
) -> Sysroot {
|
||||
let sysroot_dir = discover_sysroot_dir(current_dir, extra_env);
|
||||
Sysroot::assemble(Some(sysroot_dir), Some(Ok(sysroot_src_dir)))
|
||||
Sysroot::assemble(Some(sysroot_dir), Some(Ok(rust_lib_src_dir)))
|
||||
}
|
||||
|
||||
pub fn discover_sysroot_src_dir(sysroot_dir: AbsPathBuf) -> Sysroot {
|
||||
let sysroot_src_dir = discover_sysroot_src_dir(&sysroot_dir)
|
||||
pub fn discover_rust_lib_src_dir(sysroot_dir: AbsPathBuf) -> Sysroot {
|
||||
let rust_lib_src_dir = discover_rust_lib_src_dir(&sysroot_dir)
|
||||
.ok_or_else(|| format_err!("can't find standard library sources in {sysroot_dir}"));
|
||||
Sysroot::assemble(Some(Ok(sysroot_dir)), Some(sysroot_src_dir))
|
||||
Sysroot::assemble(Some(Ok(sysroot_dir)), Some(rust_lib_src_dir))
|
||||
}
|
||||
|
||||
pub fn discover_rustc_src(&self) -> Option<ManifestPath> {
|
||||
get_rustc_src(self.root()?)
|
||||
}
|
||||
|
||||
pub fn new(sysroot_dir: Option<AbsPathBuf>, sysroot_src_dir: Option<AbsPathBuf>) -> Sysroot {
|
||||
Self::assemble(sysroot_dir.map(Ok), sysroot_src_dir.map(Ok))
|
||||
pub fn new(sysroot_dir: Option<AbsPathBuf>, rust_lib_src_dir: Option<AbsPathBuf>) -> Sysroot {
|
||||
Self::assemble(sysroot_dir.map(Ok), rust_lib_src_dir.map(Ok))
|
||||
}
|
||||
|
||||
/// Returns a command to run a tool preferring the cargo proxies if the sysroot exists.
|
||||
@ -200,7 +207,7 @@ impl Sysroot {
|
||||
|
||||
fn assemble(
|
||||
sysroot_dir: Option<Result<AbsPathBuf, anyhow::Error>>,
|
||||
sysroot_src_dir: Option<Result<AbsPathBuf, anyhow::Error>>,
|
||||
rust_lib_src_dir: Option<Result<AbsPathBuf, anyhow::Error>>,
|
||||
) -> Sysroot {
|
||||
let mut errors = String::new();
|
||||
let root = match sysroot_dir {
|
||||
@ -211,8 +218,8 @@ impl Sysroot {
|
||||
}
|
||||
None => None,
|
||||
};
|
||||
let src_root = match sysroot_src_dir {
|
||||
Some(Ok(sysroot_src_dir)) => Some(sysroot_src_dir),
|
||||
let rust_lib_src_root = match rust_lib_src_dir {
|
||||
Some(Ok(rust_lib_src_dir)) => Some(rust_lib_src_dir),
|
||||
Some(Err(e)) => {
|
||||
format_to!(errors, "{e}\n");
|
||||
None
|
||||
@ -221,24 +228,28 @@ impl Sysroot {
|
||||
};
|
||||
Sysroot {
|
||||
root,
|
||||
src_root,
|
||||
workspace: SysrootWorkspace::Empty,
|
||||
rust_lib_src_root,
|
||||
workspace: RustLibSrcWorkspace::Empty,
|
||||
error: errors.is_empty().not().then_some(errors),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn load_workspace(&mut self, sysroot_source_config: &SysrootSourceWorkspaceConfig) {
|
||||
assert!(matches!(self.workspace, SysrootWorkspace::Empty), "workspace already loaded");
|
||||
let Self { root: _, src_root: Some(src_root), workspace, error: _ } = self else { return };
|
||||
if let SysrootSourceWorkspaceConfig::CargoMetadata(cargo_config) = sysroot_source_config {
|
||||
pub fn load_workspace(
|
||||
&self,
|
||||
sysroot_source_config: &RustSourceWorkspaceConfig,
|
||||
) -> Option<RustLibSrcWorkspace> {
|
||||
assert!(matches!(self.workspace, RustLibSrcWorkspace::Empty), "workspace already loaded");
|
||||
let Self { root: _, rust_lib_src_root: Some(src_root), workspace: _, error: _ } = self
|
||||
else {
|
||||
return None;
|
||||
};
|
||||
if let RustSourceWorkspaceConfig::CargoMetadata(cargo_config) = sysroot_source_config {
|
||||
let library_manifest = ManifestPath::try_from(src_root.join("Cargo.toml")).unwrap();
|
||||
if fs::metadata(&library_manifest).is_ok() {
|
||||
if let Some(loaded) =
|
||||
Self::load_library_via_cargo(library_manifest, src_root, cargo_config)
|
||||
self.load_library_via_cargo(library_manifest, src_root, cargo_config)
|
||||
{
|
||||
*workspace = loaded;
|
||||
self.load_core_check();
|
||||
return;
|
||||
return Some(loaded);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -255,7 +266,7 @@ impl Sysroot {
|
||||
.find(|it| fs::metadata(it).is_ok());
|
||||
|
||||
if let Some(root) = root {
|
||||
stitched.crates.alloc(SysrootCrateData {
|
||||
stitched.crates.alloc(RustLibSrcCrateData {
|
||||
name: name.into(),
|
||||
root,
|
||||
deps: Vec::new(),
|
||||
@ -286,21 +297,23 @@ impl Sysroot {
|
||||
}
|
||||
}
|
||||
}
|
||||
*workspace = SysrootWorkspace::Stitched(stitched);
|
||||
self.load_core_check();
|
||||
Some(RustLibSrcWorkspace::Stitched(stitched))
|
||||
}
|
||||
|
||||
fn load_core_check(&mut self) {
|
||||
pub fn set_workspace(&mut self, workspace: RustLibSrcWorkspace) {
|
||||
self.workspace = workspace;
|
||||
if self.error.is_none() {
|
||||
if let Some(src_root) = &self.src_root {
|
||||
if let Some(src_root) = &self.rust_lib_src_root {
|
||||
let has_core = match &self.workspace {
|
||||
SysrootWorkspace::Workspace(ws) => ws.packages().any(|p| ws[p].name == "core"),
|
||||
SysrootWorkspace::Stitched(stitched) => stitched.by_name("core").is_some(),
|
||||
SysrootWorkspace::Empty => true,
|
||||
RustLibSrcWorkspace::Workspace(ws) => {
|
||||
ws.packages().any(|p| ws[p].name == "core")
|
||||
}
|
||||
RustLibSrcWorkspace::Stitched(stitched) => stitched.by_name("core").is_some(),
|
||||
RustLibSrcWorkspace::Empty => true,
|
||||
};
|
||||
if !has_core {
|
||||
let var_note = if env::var_os("RUST_SRC_PATH").is_some() {
|
||||
" (env var `RUST_SRC_PATH` is set and may be incorrect, try unsetting it)"
|
||||
let var_note = if env::var_os("rust_lib_src_PATH").is_some() {
|
||||
" (env var `rust_lib_src_PATH` is set and may be incorrect, try unsetting it)"
|
||||
} else {
|
||||
", try running `rustup component add rust-src` to possibly fix this"
|
||||
};
|
||||
@ -313,10 +326,11 @@ impl Sysroot {
|
||||
}
|
||||
|
||||
fn load_library_via_cargo(
|
||||
&self,
|
||||
library_manifest: ManifestPath,
|
||||
sysroot_src_dir: &AbsPathBuf,
|
||||
rust_lib_src_dir: &AbsPathBuf,
|
||||
cargo_config: &CargoMetadataConfig,
|
||||
) -> Option<SysrootWorkspace> {
|
||||
) -> Option<RustLibSrcWorkspace> {
|
||||
tracing::debug!("Loading library metadata: {library_manifest}");
|
||||
let mut cargo_config = cargo_config.clone();
|
||||
// the sysroot uses `public-dependency`, so we make cargo think it's a nightly
|
||||
@ -327,9 +341,9 @@ impl Sysroot {
|
||||
|
||||
let (mut res, _) = match CargoWorkspace::fetch_metadata(
|
||||
&library_manifest,
|
||||
sysroot_src_dir,
|
||||
rust_lib_src_dir,
|
||||
&cargo_config,
|
||||
&Sysroot::empty(),
|
||||
self,
|
||||
// Make sure we never attempt to write to the sysroot
|
||||
true,
|
||||
&|_| (),
|
||||
@ -391,7 +405,7 @@ impl Sysroot {
|
||||
});
|
||||
|
||||
let cargo_workspace = CargoWorkspace::new(res, library_manifest, Default::default());
|
||||
Some(SysrootWorkspace::Workspace(cargo_workspace))
|
||||
Some(RustLibSrcWorkspace::Workspace(cargo_workspace))
|
||||
}
|
||||
}
|
||||
|
||||
@ -407,36 +421,38 @@ fn discover_sysroot_dir(
|
||||
Ok(AbsPathBuf::assert(Utf8PathBuf::from(stdout)))
|
||||
}
|
||||
|
||||
fn discover_sysroot_src_dir(sysroot_path: &AbsPathBuf) -> Option<AbsPathBuf> {
|
||||
if let Ok(path) = env::var("RUST_SRC_PATH") {
|
||||
fn discover_rust_lib_src_dir(sysroot_path: &AbsPathBuf) -> Option<AbsPathBuf> {
|
||||
if let Ok(path) = env::var("rust_lib_src_PATH") {
|
||||
if let Ok(path) = AbsPathBuf::try_from(path.as_str()) {
|
||||
let core = path.join("core");
|
||||
if fs::metadata(&core).is_ok() {
|
||||
tracing::debug!("Discovered sysroot by RUST_SRC_PATH: {path}");
|
||||
tracing::debug!("Discovered sysroot by rust_lib_src_PATH: {path}");
|
||||
return Some(path);
|
||||
}
|
||||
tracing::debug!("RUST_SRC_PATH is set, but is invalid (no core: {core:?}), ignoring");
|
||||
tracing::debug!(
|
||||
"rust_lib_src_PATH is set, but is invalid (no core: {core:?}), ignoring"
|
||||
);
|
||||
} else {
|
||||
tracing::debug!("RUST_SRC_PATH is set, but is invalid, ignoring");
|
||||
tracing::debug!("rust_lib_src_PATH is set, but is invalid, ignoring");
|
||||
}
|
||||
}
|
||||
|
||||
get_rust_src(sysroot_path)
|
||||
get_rust_lib_src(sysroot_path)
|
||||
}
|
||||
|
||||
fn discover_sysroot_src_dir_or_add_component(
|
||||
fn discover_rust_lib_src_dir_or_add_component(
|
||||
sysroot_path: &AbsPathBuf,
|
||||
current_dir: &AbsPath,
|
||||
extra_env: &FxHashMap<String, String>,
|
||||
) -> Result<AbsPathBuf> {
|
||||
discover_sysroot_src_dir(sysroot_path)
|
||||
discover_rust_lib_src_dir(sysroot_path)
|
||||
.or_else(|| {
|
||||
let mut rustup = toolchain::command(Tool::Rustup.prefer_proxy(), current_dir);
|
||||
rustup.envs(extra_env);
|
||||
rustup.args(["component", "add", "rust-src"]);
|
||||
tracing::info!("adding rust-src component by {:?}", rustup);
|
||||
utf8_stdout(&mut rustup).ok()?;
|
||||
get_rust_src(sysroot_path)
|
||||
get_rust_lib_src(sysroot_path)
|
||||
})
|
||||
.ok_or_else(|| {
|
||||
tracing::error!(%sysroot_path, "can't load standard library, try installing `rust-src`");
|
||||
@ -461,11 +477,11 @@ fn get_rustc_src(sysroot_path: &AbsPath) -> Option<ManifestPath> {
|
||||
}
|
||||
}
|
||||
|
||||
fn get_rust_src(sysroot_path: &AbsPath) -> Option<AbsPathBuf> {
|
||||
let rust_src = sysroot_path.join("lib/rustlib/src/rust/library");
|
||||
tracing::debug!("checking sysroot library: {rust_src}");
|
||||
if fs::metadata(&rust_src).is_ok() {
|
||||
Some(rust_src)
|
||||
fn get_rust_lib_src(sysroot_path: &AbsPath) -> Option<AbsPathBuf> {
|
||||
let rust_lib_src = sysroot_path.join("lib/rustlib/src/rust/library");
|
||||
tracing::debug!("checking sysroot library: {rust_lib_src}");
|
||||
if fs::metadata(&rust_lib_src).is_ok() {
|
||||
Some(rust_lib_src)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
|
@ -12,9 +12,9 @@ use span::FileId;
|
||||
use triomphe::Arc;
|
||||
|
||||
use crate::{
|
||||
sysroot::SysrootWorkspace, workspace::ProjectWorkspaceKind, CargoWorkspace, CfgOverrides,
|
||||
ManifestPath, ProjectJson, ProjectJsonData, ProjectWorkspace, Sysroot,
|
||||
SysrootSourceWorkspaceConfig, WorkspaceBuildScripts,
|
||||
sysroot::RustLibSrcWorkspace, workspace::ProjectWorkspaceKind, CargoWorkspace, CfgOverrides,
|
||||
ManifestPath, ProjectJson, ProjectJsonData, ProjectWorkspace, RustSourceWorkspaceConfig,
|
||||
Sysroot, WorkspaceBuildScripts,
|
||||
};
|
||||
|
||||
fn load_cargo(file: &str) -> (CrateGraph, ProcMacroPaths) {
|
||||
@ -42,7 +42,6 @@ fn load_workspace_from_metadata(file: &str) -> ProjectWorkspace {
|
||||
build_scripts: WorkspaceBuildScripts::default(),
|
||||
rustc: Err(None),
|
||||
error: None,
|
||||
set_test: true,
|
||||
},
|
||||
cfg_overrides: Default::default(),
|
||||
sysroot: Sysroot::empty(),
|
||||
@ -50,6 +49,7 @@ fn load_workspace_from_metadata(file: &str) -> ProjectWorkspace {
|
||||
toolchain: None,
|
||||
target_layout: Err("target_data_layout not loaded".into()),
|
||||
extra_includes: Vec::new(),
|
||||
set_test: true,
|
||||
}
|
||||
}
|
||||
|
||||
@ -65,6 +65,7 @@ fn load_rust_project(file: &str) -> (CrateGraph, ProcMacroPaths) {
|
||||
target_layout: Err(Arc::from("test has no data layout")),
|
||||
cfg_overrides: Default::default(),
|
||||
extra_includes: Vec::new(),
|
||||
set_test: true,
|
||||
};
|
||||
to_crate_graph(project_workspace, &mut Default::default())
|
||||
}
|
||||
@ -125,7 +126,10 @@ fn get_fake_sysroot() -> Sysroot {
|
||||
let sysroot_dir = AbsPathBuf::assert(sysroot_path);
|
||||
let sysroot_src_dir = sysroot_dir.clone();
|
||||
let mut sysroot = Sysroot::new(Some(sysroot_dir), Some(sysroot_src_dir));
|
||||
sysroot.load_workspace(&SysrootSourceWorkspaceConfig::default_cargo());
|
||||
let loaded_sysroot = sysroot.load_workspace(&RustSourceWorkspaceConfig::default_cargo());
|
||||
if let Some(loaded_sysroot) = loaded_sysroot {
|
||||
sysroot.set_workspace(loaded_sysroot);
|
||||
}
|
||||
sysroot
|
||||
}
|
||||
|
||||
@ -271,15 +275,17 @@ fn smoke_test_real_sysroot_cargo() {
|
||||
AbsPath::assert(Utf8Path::new(env!("CARGO_MANIFEST_DIR"))),
|
||||
&Default::default(),
|
||||
);
|
||||
sysroot.load_workspace(&SysrootSourceWorkspaceConfig::default_cargo());
|
||||
assert!(matches!(sysroot.workspace(), SysrootWorkspace::Workspace(_)));
|
||||
let loaded_sysroot = sysroot.load_workspace(&RustSourceWorkspaceConfig::default_cargo());
|
||||
if let Some(loaded_sysroot) = loaded_sysroot {
|
||||
sysroot.set_workspace(loaded_sysroot);
|
||||
}
|
||||
assert!(matches!(sysroot.workspace(), RustLibSrcWorkspace::Workspace(_)));
|
||||
let project_workspace = ProjectWorkspace {
|
||||
kind: ProjectWorkspaceKind::Cargo {
|
||||
cargo: cargo_workspace,
|
||||
build_scripts: WorkspaceBuildScripts::default(),
|
||||
rustc: Err(None),
|
||||
error: None,
|
||||
set_test: true,
|
||||
},
|
||||
sysroot,
|
||||
rustc_cfg: Vec::new(),
|
||||
@ -287,6 +293,7 @@ fn smoke_test_real_sysroot_cargo() {
|
||||
toolchain: None,
|
||||
target_layout: Err("target_data_layout not loaded".into()),
|
||||
extra_includes: Vec::new(),
|
||||
set_test: true,
|
||||
};
|
||||
project_workspace.to_crate_graph(
|
||||
&mut {
|
||||
|
@ -2,7 +2,7 @@
|
||||
//! metadata` or `rust-project.json`) into representation stored in the salsa
|
||||
//! database -- `CrateGraph`.
|
||||
|
||||
use std::{collections::VecDeque, fmt, fs, iter, ops::Deref, sync};
|
||||
use std::{collections::VecDeque, fmt, fs, iter, ops::Deref, sync, thread};
|
||||
|
||||
use anyhow::Context;
|
||||
use base_db::{
|
||||
@ -23,10 +23,10 @@ use crate::{
|
||||
cargo_workspace::{CargoMetadataConfig, DepKind, PackageData, RustLibSource},
|
||||
env::{cargo_config_env, inject_cargo_env, inject_cargo_package_env, inject_rustc_tool_env},
|
||||
project_json::{Crate, CrateArrayIdx},
|
||||
sysroot::{SysrootCrate, SysrootWorkspace},
|
||||
sysroot::{RustLibSrcCrate, RustLibSrcWorkspace},
|
||||
toolchain_info::{rustc_cfg, target_data_layout, target_tuple, version, QueryConfig},
|
||||
CargoConfig, CargoWorkspace, CfgOverrides, InvocationStrategy, ManifestPath, Package,
|
||||
ProjectJson, ProjectManifest, Sysroot, SysrootSourceWorkspaceConfig, TargetData, TargetKind,
|
||||
ProjectJson, ProjectManifest, RustSourceWorkspaceConfig, Sysroot, TargetData, TargetKind,
|
||||
WorkspaceBuildScripts,
|
||||
};
|
||||
use tracing::{debug, error, info};
|
||||
@ -64,6 +64,8 @@ pub struct ProjectWorkspace {
|
||||
pub cfg_overrides: CfgOverrides,
|
||||
/// Additional includes to add for the VFS.
|
||||
pub extra_includes: Vec<AbsPathBuf>,
|
||||
/// Set `cfg(test)` for local crates
|
||||
pub set_test: bool,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
@ -79,7 +81,6 @@ pub enum ProjectWorkspaceKind {
|
||||
/// The rustc workspace loaded for this workspace. An `Err(None)` means loading has been
|
||||
/// disabled or was otherwise not requested.
|
||||
rustc: Result<Box<(CargoWorkspace, WorkspaceBuildScripts)>, Option<String>>,
|
||||
set_test: bool,
|
||||
},
|
||||
/// Project workspace was specified using a `rust-project.json` file.
|
||||
Json(ProjectJson),
|
||||
@ -98,7 +99,6 @@ pub enum ProjectWorkspaceKind {
|
||||
file: ManifestPath,
|
||||
/// Is this file a cargo script file?
|
||||
cargo: Option<(CargoWorkspace, WorkspaceBuildScripts, Option<Arc<anyhow::Error>>)>,
|
||||
set_test: bool,
|
||||
},
|
||||
}
|
||||
|
||||
@ -113,9 +113,10 @@ impl fmt::Debug for ProjectWorkspace {
|
||||
target_layout,
|
||||
cfg_overrides,
|
||||
extra_includes,
|
||||
set_test,
|
||||
} = self;
|
||||
match kind {
|
||||
ProjectWorkspaceKind::Cargo { cargo, error: _, build_scripts, rustc, set_test } => f
|
||||
ProjectWorkspaceKind::Cargo { cargo, error: _, build_scripts, rustc } => f
|
||||
.debug_struct("Cargo")
|
||||
.field("root", &cargo.workspace_root().file_name())
|
||||
.field("n_packages", &cargo.packages().len())
|
||||
@ -141,11 +142,12 @@ impl fmt::Debug for ProjectWorkspace {
|
||||
.field("toolchain", &toolchain)
|
||||
.field("data_layout", &target_layout)
|
||||
.field("n_cfg_overrides", &cfg_overrides.len())
|
||||
.field("n_extra_includes", &extra_includes.len());
|
||||
.field("n_extra_includes", &extra_includes.len())
|
||||
.field("set_test", set_test);
|
||||
|
||||
debug_struct.finish()
|
||||
}
|
||||
ProjectWorkspaceKind::DetachedFile { file, cargo: cargo_script, set_test } => f
|
||||
ProjectWorkspaceKind::DetachedFile { file, cargo: cargo_script } => f
|
||||
.debug_struct("DetachedFiles")
|
||||
.field("file", &file)
|
||||
.field("cargo_script", &cargo_script.is_some())
|
||||
@ -186,7 +188,7 @@ impl ProjectWorkspace {
|
||||
let project_location = project_json.parent().to_path_buf();
|
||||
let project_json: ProjectJson =
|
||||
ProjectJson::new(Some(project_json.clone()), &project_location, data);
|
||||
ProjectWorkspace::load_inline(project_json, config)
|
||||
ProjectWorkspace::load_inline(project_json, config, progress)
|
||||
}
|
||||
ProjectManifest::CargoScript(rust_file) => {
|
||||
ProjectWorkspace::load_detached_file(rust_file, config)?
|
||||
@ -204,19 +206,33 @@ impl ProjectWorkspace {
|
||||
config: &CargoConfig,
|
||||
progress: &dyn Fn(String),
|
||||
) -> Result<ProjectWorkspace, anyhow::Error> {
|
||||
let mut sysroot = match (&config.sysroot, &config.sysroot_src) {
|
||||
progress("Discovering sysroot".to_owned());
|
||||
let CargoConfig {
|
||||
features,
|
||||
rustc_source,
|
||||
extra_args,
|
||||
extra_env,
|
||||
set_test,
|
||||
cfg_overrides,
|
||||
extra_includes,
|
||||
sysroot,
|
||||
sysroot_src,
|
||||
target,
|
||||
..
|
||||
} = config;
|
||||
let mut sysroot = match (sysroot, sysroot_src) {
|
||||
(Some(RustLibSource::Discover), None) => {
|
||||
Sysroot::discover(cargo_toml.parent(), &config.extra_env)
|
||||
Sysroot::discover(cargo_toml.parent(), extra_env)
|
||||
}
|
||||
(Some(RustLibSource::Discover), Some(sysroot_src)) => {
|
||||
Sysroot::discover_with_src_override(
|
||||
cargo_toml.parent(),
|
||||
&config.extra_env,
|
||||
extra_env,
|
||||
sysroot_src.clone(),
|
||||
)
|
||||
}
|
||||
(Some(RustLibSource::Path(path)), None) => {
|
||||
Sysroot::discover_sysroot_src_dir(path.clone())
|
||||
Sysroot::discover_rust_lib_src_dir(path.clone())
|
||||
}
|
||||
(Some(RustLibSource::Path(sysroot)), Some(sysroot_src)) => {
|
||||
Sysroot::new(Some(sysroot.clone()), Some(sysroot_src.clone()))
|
||||
@ -224,100 +240,147 @@ impl ProjectWorkspace {
|
||||
(None, _) => Sysroot::empty(),
|
||||
};
|
||||
|
||||
let rustc_dir = match &config.rustc_source {
|
||||
Some(RustLibSource::Path(path)) => ManifestPath::try_from(path.clone())
|
||||
.map_err(|p| Some(format!("rustc source path is not absolute: {p}"))),
|
||||
Some(RustLibSource::Discover) => sysroot
|
||||
.discover_rustc_src()
|
||||
.ok_or_else(|| Some("Failed to discover rustc source for sysroot.".to_owned())),
|
||||
None => Err(None),
|
||||
};
|
||||
|
||||
tracing::info!(workspace = %cargo_toml, src_root = ?sysroot.src_root(), root = ?sysroot.root(), "Using sysroot");
|
||||
tracing::info!(workspace = %cargo_toml, src_root = ?sysroot.rust_lib_src_root(), root = ?sysroot.root(), "Using sysroot");
|
||||
progress("Querying project metadata".to_owned());
|
||||
let toolchain_config = QueryConfig::Cargo(&sysroot, cargo_toml);
|
||||
let targets =
|
||||
target_tuple::get(toolchain_config, config.target.as_deref(), &config.extra_env)
|
||||
.unwrap_or_default();
|
||||
let toolchain = version::get(toolchain_config, &config.extra_env)
|
||||
.inspect_err(|e| {
|
||||
tracing::error!(%e,
|
||||
"failed fetching toolchain version for {cargo_toml:?} workspace"
|
||||
)
|
||||
})
|
||||
.ok()
|
||||
.flatten();
|
||||
let rustc_cfg =
|
||||
rustc_cfg::get(toolchain_config, targets.first().map(Deref::deref), &config.extra_env);
|
||||
let cfg_overrides = config.cfg_overrides.clone();
|
||||
let data_layout = target_data_layout::get(
|
||||
toolchain_config,
|
||||
targets.first().map(Deref::deref),
|
||||
&config.extra_env,
|
||||
);
|
||||
if let Err(e) = &data_layout {
|
||||
tracing::error!(%e, "failed fetching data layout for {cargo_toml:?} workspace");
|
||||
}
|
||||
sysroot.load_workspace(&SysrootSourceWorkspaceConfig::CargoMetadata(
|
||||
sysroot_metadata_config(&config.extra_env, &targets),
|
||||
));
|
||||
target_tuple::get(toolchain_config, target.as_deref(), extra_env).unwrap_or_default();
|
||||
|
||||
let rustc = rustc_dir.and_then(|rustc_dir| {
|
||||
info!(workspace = %cargo_toml, rustc_dir = %rustc_dir, "Using rustc source");
|
||||
match CargoWorkspace::fetch_metadata(
|
||||
&rustc_dir,
|
||||
cargo_toml.parent(),
|
||||
&CargoMetadataConfig {
|
||||
features: crate::CargoFeatures::default(),
|
||||
targets: targets.clone(),
|
||||
extra_args: config.extra_args.clone(),
|
||||
extra_env: config.extra_env.clone(),
|
||||
},
|
||||
&sysroot,
|
||||
false,
|
||||
progress,
|
||||
) {
|
||||
Ok((meta, _error)) => {
|
||||
let workspace = CargoWorkspace::new(meta, cargo_toml.clone(), Env::default());
|
||||
let build_scripts = WorkspaceBuildScripts::rustc_crates(
|
||||
&workspace,
|
||||
cargo_toml.parent(),
|
||||
&config.extra_env,
|
||||
// We spawn a bunch of processes to query various information about the workspace's
|
||||
// toolchain and sysroot
|
||||
// We can speed up loading a bit by spawning all of these processes in parallel (especially
|
||||
// on systems were process spawning is delayed)
|
||||
let join = thread::scope(|s| {
|
||||
let workspace_dir = cargo_toml.parent();
|
||||
let toolchain = s.spawn(|| {
|
||||
version::get(toolchain_config, extra_env)
|
||||
.inspect_err(|e| {
|
||||
tracing::error!(%e,
|
||||
"failed fetching toolchain version for {cargo_toml:?} workspace"
|
||||
)
|
||||
})
|
||||
.ok()
|
||||
.flatten()
|
||||
});
|
||||
|
||||
let rustc_cfg = s.spawn(|| {
|
||||
rustc_cfg::get(toolchain_config, targets.first().map(Deref::deref), extra_env)
|
||||
});
|
||||
let data_layout = s.spawn(|| {
|
||||
target_data_layout::get(
|
||||
toolchain_config,
|
||||
targets.first().map(Deref::deref),
|
||||
extra_env,
|
||||
).inspect_err(|e| {
|
||||
tracing::error!(%e, "failed fetching data layout for {cargo_toml:?} workspace")
|
||||
})
|
||||
});
|
||||
|
||||
let rustc_dir = s.spawn(|| {
|
||||
let rustc_dir = match rustc_source {
|
||||
Some(RustLibSource::Path(path)) => ManifestPath::try_from(path.clone())
|
||||
.map_err(|p| Some(format!("rustc source path is not absolute: {p}"))),
|
||||
Some(RustLibSource::Discover) => {
|
||||
sysroot.discover_rustc_src().ok_or_else(|| {
|
||||
Some("Failed to discover rustc source for sysroot.".to_owned())
|
||||
})
|
||||
}
|
||||
None => Err(None),
|
||||
};
|
||||
rustc_dir.and_then(|rustc_dir| {
|
||||
info!(workspace = %cargo_toml, rustc_dir = %rustc_dir, "Using rustc source");
|
||||
match CargoWorkspace::fetch_metadata(
|
||||
&rustc_dir,
|
||||
workspace_dir,
|
||||
&CargoMetadataConfig {
|
||||
features: crate::CargoFeatures::default(),
|
||||
targets: targets.clone(),
|
||||
extra_args: extra_args.clone(),
|
||||
extra_env: extra_env.clone(),
|
||||
},
|
||||
&sysroot,
|
||||
);
|
||||
Ok(Box::new((workspace, build_scripts)))
|
||||
}
|
||||
Err(e) => {
|
||||
tracing::error!(
|
||||
%e,
|
||||
"Failed to read Cargo metadata from rustc source at {rustc_dir}",
|
||||
);
|
||||
Err(Some(format!(
|
||||
"Failed to read Cargo metadata from rustc source at {rustc_dir}: {e}"
|
||||
)))
|
||||
}
|
||||
}
|
||||
false,
|
||||
&|_| (),
|
||||
) {
|
||||
Ok((meta, _error)) => {
|
||||
let workspace =
|
||||
CargoWorkspace::new(meta, cargo_toml.clone(), Env::default());
|
||||
let build_scripts = WorkspaceBuildScripts::rustc_crates(
|
||||
&workspace,
|
||||
workspace_dir,
|
||||
extra_env,
|
||||
&sysroot,
|
||||
);
|
||||
Ok(Box::new((workspace, build_scripts)))
|
||||
}
|
||||
Err(e) => {
|
||||
tracing::error!(
|
||||
%e,
|
||||
"Failed to read Cargo metadata from rustc source at {rustc_dir}",
|
||||
);
|
||||
Err(Some(format!(
|
||||
"Failed to read Cargo metadata from rustc source at {rustc_dir}: {e}"
|
||||
)))
|
||||
}
|
||||
}
|
||||
})
|
||||
});
|
||||
|
||||
let cargo_metadata = s.spawn(|| {
|
||||
CargoWorkspace::fetch_metadata(
|
||||
cargo_toml,
|
||||
workspace_dir,
|
||||
&CargoMetadataConfig {
|
||||
features: features.clone(),
|
||||
targets: targets.clone(),
|
||||
extra_args: extra_args.clone(),
|
||||
extra_env: extra_env.clone(),
|
||||
},
|
||||
&sysroot,
|
||||
false,
|
||||
&|_| (),
|
||||
)
|
||||
});
|
||||
let loaded_sysroot = s.spawn(|| {
|
||||
sysroot.load_workspace(&RustSourceWorkspaceConfig::CargoMetadata(
|
||||
sysroot_metadata_config(extra_env, &targets),
|
||||
))
|
||||
});
|
||||
let cargo_config_extra_env =
|
||||
s.spawn(|| cargo_config_env(cargo_toml, extra_env, &sysroot));
|
||||
thread::Result::Ok((
|
||||
toolchain.join()?,
|
||||
rustc_cfg.join()?,
|
||||
data_layout.join()?,
|
||||
rustc_dir.join()?,
|
||||
loaded_sysroot.join()?,
|
||||
cargo_metadata.join()?,
|
||||
cargo_config_extra_env.join()?,
|
||||
))
|
||||
});
|
||||
|
||||
let (meta, error) = CargoWorkspace::fetch_metadata(
|
||||
cargo_toml,
|
||||
cargo_toml.parent(),
|
||||
&CargoMetadataConfig {
|
||||
features: config.features.clone(),
|
||||
targets,
|
||||
extra_args: config.extra_args.clone(),
|
||||
extra_env: config.extra_env.clone(),
|
||||
},
|
||||
&sysroot,
|
||||
false,
|
||||
progress,
|
||||
)
|
||||
.with_context(|| {
|
||||
let (
|
||||
toolchain,
|
||||
rustc_cfg,
|
||||
data_layout,
|
||||
rustc,
|
||||
loaded_sysroot,
|
||||
cargo_metadata,
|
||||
cargo_config_extra_env,
|
||||
) = match join {
|
||||
Ok(it) => it,
|
||||
Err(e) => std::panic::resume_unwind(e),
|
||||
};
|
||||
|
||||
let (meta, error) = cargo_metadata.with_context(|| {
|
||||
format!(
|
||||
"Failed to read Cargo metadata from Cargo.toml file {cargo_toml}, {toolchain:?}",
|
||||
)
|
||||
})?;
|
||||
let cargo_config_extra_env = cargo_config_env(cargo_toml, &config.extra_env, &sysroot);
|
||||
let cargo = CargoWorkspace::new(meta, cargo_toml.clone(), cargo_config_extra_env);
|
||||
if let Some(loaded_sysroot) = loaded_sysroot {
|
||||
sysroot.set_workspace(loaded_sysroot);
|
||||
}
|
||||
|
||||
Ok(ProjectWorkspace {
|
||||
kind: ProjectWorkspaceKind::Cargo {
|
||||
@ -325,35 +388,70 @@ impl ProjectWorkspace {
|
||||
build_scripts: WorkspaceBuildScripts::default(),
|
||||
rustc,
|
||||
error: error.map(Arc::new),
|
||||
set_test: config.set_test,
|
||||
},
|
||||
sysroot,
|
||||
rustc_cfg,
|
||||
cfg_overrides,
|
||||
cfg_overrides: cfg_overrides.clone(),
|
||||
toolchain,
|
||||
target_layout: data_layout.map(Arc::from).map_err(|it| Arc::from(it.to_string())),
|
||||
extra_includes: config.extra_includes.clone(),
|
||||
extra_includes: extra_includes.clone(),
|
||||
set_test: *set_test,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn load_inline(project_json: ProjectJson, config: &CargoConfig) -> ProjectWorkspace {
|
||||
pub fn load_inline(
|
||||
project_json: ProjectJson,
|
||||
config: &CargoConfig,
|
||||
progress: &dyn Fn(String),
|
||||
) -> ProjectWorkspace {
|
||||
progress("Discovering sysroot".to_owned());
|
||||
let mut sysroot =
|
||||
Sysroot::new(project_json.sysroot.clone(), project_json.sysroot_src.clone());
|
||||
sysroot.load_workspace(&SysrootSourceWorkspaceConfig::Stitched);
|
||||
let query_config = QueryConfig::Rustc(&sysroot, project_json.path().as_ref());
|
||||
let toolchain = version::get(query_config, &config.extra_env).ok().flatten();
|
||||
let loaded_sysroot = sysroot.load_workspace(&RustSourceWorkspaceConfig::Stitched);
|
||||
if let Some(loaded_sysroot) = loaded_sysroot {
|
||||
sysroot.set_workspace(loaded_sysroot);
|
||||
}
|
||||
|
||||
tracing::info!(workspace = %project_json.manifest_or_root(), src_root = ?sysroot.rust_lib_src_root(), root = ?sysroot.root(), "Using sysroot");
|
||||
progress("Querying project metadata".to_owned());
|
||||
let query_config = QueryConfig::Rustc(&sysroot, project_json.path().as_ref());
|
||||
let targets = target_tuple::get(query_config, config.target.as_deref(), &config.extra_env)
|
||||
.unwrap_or_default();
|
||||
|
||||
// We spawn a bunch of processes to query various information about the workspace's
|
||||
// toolchain and sysroot
|
||||
// We can speed up loading a bit by spawning all of these processes in parallel (especially
|
||||
// on systems were process spawning is delayed)
|
||||
let join = thread::scope(|s| {
|
||||
let toolchain =
|
||||
s.spawn(|| version::get(query_config, &config.extra_env).ok().flatten());
|
||||
let rustc_cfg = s.spawn(|| {
|
||||
rustc_cfg::get(query_config, targets.first().map(Deref::deref), &config.extra_env)
|
||||
});
|
||||
let data_layout = s.spawn(|| {
|
||||
target_data_layout::get(
|
||||
query_config,
|
||||
targets.first().map(Deref::deref),
|
||||
&config.extra_env,
|
||||
)
|
||||
});
|
||||
thread::Result::Ok((toolchain.join()?, rustc_cfg.join()?, data_layout.join()?))
|
||||
});
|
||||
|
||||
let (toolchain, rustc_cfg, target_layout) = match join {
|
||||
Ok(it) => it,
|
||||
Err(e) => std::panic::resume_unwind(e),
|
||||
};
|
||||
|
||||
let target = config.target.as_deref();
|
||||
let rustc_cfg = rustc_cfg::get(query_config, target, &config.extra_env);
|
||||
let data_layout = target_data_layout::get(query_config, target, &config.extra_env);
|
||||
ProjectWorkspace {
|
||||
kind: ProjectWorkspaceKind::Json(project_json),
|
||||
sysroot,
|
||||
rustc_cfg,
|
||||
toolchain,
|
||||
target_layout: data_layout.map(Arc::from).map_err(|it| Arc::from(it.to_string())),
|
||||
target_layout: target_layout.map(Arc::from).map_err(|it| Arc::from(it.to_string())),
|
||||
cfg_overrides: config.cfg_overrides.clone(),
|
||||
extra_includes: config.extra_includes.clone(),
|
||||
set_test: config.set_test,
|
||||
}
|
||||
}
|
||||
|
||||
@ -363,7 +461,7 @@ impl ProjectWorkspace {
|
||||
) -> anyhow::Result<ProjectWorkspace> {
|
||||
let dir = detached_file.parent();
|
||||
let mut sysroot = match &config.sysroot {
|
||||
Some(RustLibSource::Path(path)) => Sysroot::discover_sysroot_src_dir(path.clone()),
|
||||
Some(RustLibSource::Path(path)) => Sysroot::discover_rust_lib_src_dir(path.clone()),
|
||||
Some(RustLibSource::Discover) => Sysroot::discover(dir, &config.extra_env),
|
||||
None => Sysroot::empty(),
|
||||
};
|
||||
@ -374,9 +472,12 @@ impl ProjectWorkspace {
|
||||
.unwrap_or_default();
|
||||
let rustc_cfg = rustc_cfg::get(query_config, None, &config.extra_env);
|
||||
let data_layout = target_data_layout::get(query_config, None, &config.extra_env);
|
||||
sysroot.load_workspace(&SysrootSourceWorkspaceConfig::CargoMetadata(
|
||||
let loaded_sysroot = sysroot.load_workspace(&RustSourceWorkspaceConfig::CargoMetadata(
|
||||
sysroot_metadata_config(&config.extra_env, &targets),
|
||||
));
|
||||
if let Some(loaded_sysroot) = loaded_sysroot {
|
||||
sysroot.set_workspace(loaded_sysroot);
|
||||
}
|
||||
|
||||
let cargo_script = CargoWorkspace::fetch_metadata(
|
||||
detached_file,
|
||||
@ -406,7 +507,6 @@ impl ProjectWorkspace {
|
||||
kind: ProjectWorkspaceKind::DetachedFile {
|
||||
file: detached_file.to_owned(),
|
||||
cargo: cargo_script,
|
||||
set_test: config.set_test,
|
||||
},
|
||||
sysroot,
|
||||
rustc_cfg,
|
||||
@ -414,6 +514,7 @@ impl ProjectWorkspace {
|
||||
target_layout: data_layout.map(Arc::from).map_err(|it| Arc::from(it.to_string())),
|
||||
cfg_overrides: config.cfg_overrides.clone(),
|
||||
extra_includes: config.extra_includes.clone(),
|
||||
set_test: config.set_test,
|
||||
})
|
||||
}
|
||||
|
||||
@ -545,7 +646,7 @@ impl ProjectWorkspace {
|
||||
pub fn to_roots(&self) -> Vec<PackageRoot> {
|
||||
let mk_sysroot = || {
|
||||
let mut r = match self.sysroot.workspace() {
|
||||
SysrootWorkspace::Workspace(ws) => ws
|
||||
RustLibSrcWorkspace::Workspace(ws) => ws
|
||||
.packages()
|
||||
.filter_map(|pkg| {
|
||||
if ws[pkg].is_local {
|
||||
@ -566,12 +667,17 @@ impl ProjectWorkspace {
|
||||
Some(PackageRoot { is_local: false, include, exclude })
|
||||
})
|
||||
.collect(),
|
||||
SysrootWorkspace::Stitched(_) | SysrootWorkspace::Empty => vec![],
|
||||
RustLibSrcWorkspace::Stitched(_) | RustLibSrcWorkspace::Empty => vec![],
|
||||
};
|
||||
|
||||
r.push(PackageRoot {
|
||||
is_local: false,
|
||||
include: self.sysroot.src_root().map(|it| it.to_path_buf()).into_iter().collect(),
|
||||
include: self
|
||||
.sysroot
|
||||
.rust_lib_src_root()
|
||||
.map(|it| it.to_path_buf())
|
||||
.into_iter()
|
||||
.collect(),
|
||||
exclude: Vec::new(),
|
||||
});
|
||||
r
|
||||
@ -593,7 +699,7 @@ impl ProjectWorkspace {
|
||||
.into_iter()
|
||||
.chain(mk_sysroot())
|
||||
.collect::<Vec<_>>(),
|
||||
ProjectWorkspaceKind::Cargo { cargo, rustc, build_scripts, error: _, set_test: _ } => {
|
||||
ProjectWorkspaceKind::Cargo { cargo, rustc, build_scripts, error: _ } => {
|
||||
cargo
|
||||
.packages()
|
||||
.map(|pkg| {
|
||||
@ -728,8 +834,9 @@ impl ProjectWorkspace {
|
||||
sysroot,
|
||||
extra_env,
|
||||
cfg_overrides,
|
||||
self.set_test,
|
||||
),
|
||||
ProjectWorkspaceKind::Cargo { cargo, rustc, build_scripts, error: _, set_test } => {
|
||||
ProjectWorkspaceKind::Cargo { cargo, rustc, build_scripts, error: _ } => {
|
||||
cargo_to_crate_graph(
|
||||
load,
|
||||
rustc.as_ref().map(|a| a.as_ref()).ok(),
|
||||
@ -738,10 +845,10 @@ impl ProjectWorkspace {
|
||||
rustc_cfg.clone(),
|
||||
cfg_overrides,
|
||||
build_scripts,
|
||||
*set_test,
|
||||
self.set_test,
|
||||
)
|
||||
}
|
||||
ProjectWorkspaceKind::DetachedFile { file, cargo: cargo_script, set_test, .. } => {
|
||||
ProjectWorkspaceKind::DetachedFile { file, cargo: cargo_script, .. } => {
|
||||
if let Some((cargo, build_scripts, _)) = cargo_script {
|
||||
cargo_to_crate_graph(
|
||||
&mut |path| load(path),
|
||||
@ -751,7 +858,7 @@ impl ProjectWorkspace {
|
||||
rustc_cfg.clone(),
|
||||
cfg_overrides,
|
||||
build_scripts,
|
||||
*set_test,
|
||||
self.set_test,
|
||||
)
|
||||
} else {
|
||||
detached_file_to_crate_graph(
|
||||
@ -760,7 +867,7 @@ impl ProjectWorkspace {
|
||||
file,
|
||||
sysroot,
|
||||
cfg_overrides,
|
||||
*set_test,
|
||||
self.set_test,
|
||||
)
|
||||
}
|
||||
}
|
||||
@ -782,34 +889,22 @@ impl ProjectWorkspace {
|
||||
} = other;
|
||||
(match (kind, o_kind) {
|
||||
(
|
||||
ProjectWorkspaceKind::Cargo {
|
||||
cargo,
|
||||
rustc,
|
||||
build_scripts: _,
|
||||
error: _,
|
||||
set_test: _,
|
||||
},
|
||||
ProjectWorkspaceKind::Cargo { cargo, rustc, build_scripts: _, error: _ },
|
||||
ProjectWorkspaceKind::Cargo {
|
||||
cargo: o_cargo,
|
||||
rustc: o_rustc,
|
||||
build_scripts: _,
|
||||
error: _,
|
||||
set_test: _,
|
||||
},
|
||||
) => cargo == o_cargo && rustc == o_rustc,
|
||||
(ProjectWorkspaceKind::Json(project), ProjectWorkspaceKind::Json(o_project)) => {
|
||||
project == o_project
|
||||
}
|
||||
(
|
||||
ProjectWorkspaceKind::DetachedFile {
|
||||
file,
|
||||
cargo: Some((cargo_script, _, _)),
|
||||
set_test: _,
|
||||
},
|
||||
ProjectWorkspaceKind::DetachedFile { file, cargo: Some((cargo_script, _, _)) },
|
||||
ProjectWorkspaceKind::DetachedFile {
|
||||
file: o_file,
|
||||
cargo: Some((o_cargo_script, _, _)),
|
||||
set_test: _,
|
||||
},
|
||||
) => file == o_file && cargo_script == o_cargo_script,
|
||||
_ => return false,
|
||||
@ -837,13 +932,13 @@ fn project_json_to_crate_graph(
|
||||
sysroot: &Sysroot,
|
||||
extra_env: &FxHashMap<String, String>,
|
||||
override_cfg: &CfgOverrides,
|
||||
set_test: bool,
|
||||
) -> (CrateGraph, ProcMacroPaths) {
|
||||
let mut res = (CrateGraph::default(), ProcMacroPaths::default());
|
||||
let (crate_graph, proc_macros) = &mut res;
|
||||
let (public_deps, libproc_macro) =
|
||||
sysroot_to_crate_graph(crate_graph, sysroot, rustc_cfg.clone(), load);
|
||||
|
||||
let r_a_cfg_flag = CfgAtom::Flag(sym::rust_analyzer.clone());
|
||||
let mut cfg_cache: FxHashMap<&str, Vec<CfgAtom>> = FxHashMap::default();
|
||||
|
||||
let idx_to_crate_id: FxHashMap<CrateArrayIdx, CrateId> = project
|
||||
@ -862,6 +957,7 @@ fn project_json_to_crate_graph(
|
||||
proc_macro_dylib_path,
|
||||
is_proc_macro,
|
||||
repository,
|
||||
is_workspace_member,
|
||||
..
|
||||
},
|
||||
file_id,
|
||||
@ -879,19 +975,28 @@ fn project_json_to_crate_graph(
|
||||
None => &rustc_cfg,
|
||||
};
|
||||
|
||||
let mut cfg_options = target_cfgs
|
||||
.iter()
|
||||
.chain(cfg.iter())
|
||||
.chain(iter::once(&r_a_cfg_flag))
|
||||
.cloned()
|
||||
.collect();
|
||||
override_cfg.apply(
|
||||
&mut cfg_options,
|
||||
display_name
|
||||
.as_ref()
|
||||
.map(|it| it.canonical_name().as_str())
|
||||
.unwrap_or_default(),
|
||||
);
|
||||
let cfg_options = {
|
||||
let mut cfg_options: CfgOptions =
|
||||
target_cfgs.iter().chain(cfg.iter()).cloned().collect();
|
||||
|
||||
if *is_workspace_member {
|
||||
if set_test {
|
||||
// Add test cfg for local crates
|
||||
cfg_options.insert_atom(sym::test.clone());
|
||||
}
|
||||
cfg_options.insert_atom(sym::rust_analyzer.clone());
|
||||
}
|
||||
|
||||
override_cfg.apply(
|
||||
&mut cfg_options,
|
||||
display_name
|
||||
.as_ref()
|
||||
.map(|it| it.canonical_name().as_str())
|
||||
.unwrap_or_default(),
|
||||
);
|
||||
cfg_options
|
||||
};
|
||||
|
||||
let crate_graph_crate_id = crate_graph.add_crate_root(
|
||||
file_id,
|
||||
*edition,
|
||||
@ -1385,7 +1490,7 @@ fn sysroot_to_crate_graph(
|
||||
) -> (SysrootPublicDeps, Option<CrateId>) {
|
||||
let _p = tracing::info_span!("sysroot_to_crate_graph").entered();
|
||||
match sysroot.workspace() {
|
||||
SysrootWorkspace::Workspace(cargo) => {
|
||||
RustLibSrcWorkspace::Workspace(cargo) => {
|
||||
let (mut cg, mut pm) = cargo_to_crate_graph(
|
||||
load,
|
||||
None,
|
||||
@ -1460,7 +1565,7 @@ fn sysroot_to_crate_graph(
|
||||
|
||||
(SysrootPublicDeps { deps: pub_deps }, libproc_macro)
|
||||
}
|
||||
SysrootWorkspace::Stitched(stitched) => {
|
||||
RustLibSrcWorkspace::Stitched(stitched) => {
|
||||
let cfg_options = Arc::new({
|
||||
let mut cfg_options = CfgOptions::default();
|
||||
cfg_options.extend(rustc_cfg);
|
||||
@ -1468,7 +1573,7 @@ fn sysroot_to_crate_graph(
|
||||
cfg_options.insert_atom(sym::miri.clone());
|
||||
cfg_options
|
||||
});
|
||||
let sysroot_crates: FxHashMap<SysrootCrate, CrateId> = stitched
|
||||
let sysroot_crates: FxHashMap<RustLibSrcCrate, CrateId> = stitched
|
||||
.crates()
|
||||
.filter_map(|krate| {
|
||||
let file_id = load(&stitched[krate].root)?;
|
||||
@ -1513,7 +1618,7 @@ fn sysroot_to_crate_graph(
|
||||
stitched.proc_macro().and_then(|it| sysroot_crates.get(&it).copied());
|
||||
(public_deps, libproc_macro)
|
||||
}
|
||||
SysrootWorkspace::Empty => (SysrootPublicDeps { deps: vec![] }, None),
|
||||
RustLibSrcWorkspace::Empty => (SysrootPublicDeps { deps: vec![] }, None),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -420,6 +420,7 @@
|
||||
"group1_other_cfg=other_config",
|
||||
"group2_cfg=yet_another_config",
|
||||
"rust_analyzer",
|
||||
"test",
|
||||
"true",
|
||||
],
|
||||
),
|
||||
@ -496,6 +497,7 @@
|
||||
"group2_cfg=fourth_config",
|
||||
"group2_cfg=yet_another_config",
|
||||
"rust_analyzer",
|
||||
"test",
|
||||
"true",
|
||||
"unrelated_cfg",
|
||||
],
|
||||
|
@ -417,6 +417,7 @@
|
||||
cfg_options: CfgOptions(
|
||||
[
|
||||
"rust_analyzer",
|
||||
"test",
|
||||
"true",
|
||||
],
|
||||
),
|
||||
|
@ -12,8 +12,8 @@ use paths::Utf8PathBuf;
|
||||
use profile::StopWatch;
|
||||
use project_model::toolchain_info::{target_data_layout, QueryConfig};
|
||||
use project_model::{
|
||||
CargoConfig, ManifestPath, ProjectWorkspace, ProjectWorkspaceKind, RustLibSource, Sysroot,
|
||||
SysrootSourceWorkspaceConfig,
|
||||
CargoConfig, ManifestPath, ProjectWorkspace, ProjectWorkspaceKind, RustLibSource,
|
||||
RustSourceWorkspaceConfig, Sysroot,
|
||||
};
|
||||
|
||||
use load_cargo::{load_workspace, LoadCargoConfig, ProcMacroServerChoice};
|
||||
@ -75,7 +75,11 @@ impl Tester {
|
||||
};
|
||||
|
||||
let mut sysroot = Sysroot::discover(tmp_file.parent().unwrap(), &cargo_config.extra_env);
|
||||
sysroot.load_workspace(&SysrootSourceWorkspaceConfig::default_cargo());
|
||||
let loaded_sysroot = sysroot.load_workspace(&RustSourceWorkspaceConfig::default_cargo());
|
||||
if let Some(loaded_sysroot) = loaded_sysroot {
|
||||
sysroot.set_workspace(loaded_sysroot);
|
||||
}
|
||||
|
||||
let data_layout = target_data_layout::get(
|
||||
QueryConfig::Rustc(&sysroot, tmp_file.parent().unwrap().as_ref()),
|
||||
None,
|
||||
@ -86,7 +90,6 @@ impl Tester {
|
||||
kind: ProjectWorkspaceKind::DetachedFile {
|
||||
file: ManifestPath::try_from(tmp_file).unwrap(),
|
||||
cargo: None,
|
||||
set_test: true,
|
||||
},
|
||||
sysroot,
|
||||
rustc_cfg: vec![],
|
||||
@ -94,6 +97,7 @@ impl Tester {
|
||||
target_layout: data_layout.map(Arc::from).map_err(|it| Arc::from(it.to_string())),
|
||||
cfg_overrides: Default::default(),
|
||||
extra_includes: vec![],
|
||||
set_test: true,
|
||||
};
|
||||
let load_cargo_config = LoadCargoConfig {
|
||||
load_out_dirs_from_check: false,
|
||||
|
@ -84,10 +84,10 @@ config_data! {
|
||||
completion_snippets_custom: FxHashMap<String, SnippetDef> = Config::completion_snippets_default(),
|
||||
|
||||
|
||||
/// These directories will be ignored by rust-analyzer. They are
|
||||
/// These paths (file/directories) will be ignored by rust-analyzer. They are
|
||||
/// relative to the workspace root, and globs are not supported. You may
|
||||
/// also need to add the folders to Code's `files.watcherExclude`.
|
||||
files_excludeDirs: Vec<Utf8PathBuf> = vec![],
|
||||
files_exclude | files_excludeDirs: Vec<Utf8PathBuf> = vec![],
|
||||
|
||||
|
||||
|
||||
@ -1792,7 +1792,7 @@ impl Config {
|
||||
|
||||
fn discovered_projects(&self) -> Vec<ManifestOrProjectJson> {
|
||||
let exclude_dirs: Vec<_> =
|
||||
self.files_excludeDirs().iter().map(|p| self.root_path.join(p)).collect();
|
||||
self.files_exclude().iter().map(|p| self.root_path.join(p)).collect();
|
||||
|
||||
let mut projects = vec![];
|
||||
for fs_proj in &self.discovered_projects_from_filesystem {
|
||||
@ -1914,10 +1914,14 @@ impl Config {
|
||||
}
|
||||
_ => FilesWatcher::Server,
|
||||
},
|
||||
exclude: self.files_excludeDirs().iter().map(|it| self.root_path.join(it)).collect(),
|
||||
exclude: self.excluded().collect(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn excluded(&self) -> impl Iterator<Item = AbsPathBuf> + use<'_> {
|
||||
self.files_exclude().iter().map(|it| self.root_path.join(it))
|
||||
}
|
||||
|
||||
pub fn notifications(&self) -> NotificationsConfig {
|
||||
NotificationsConfig {
|
||||
cargo_toml_not_found: self.notifications_cargoTomlNotFound().to_owned(),
|
||||
@ -3798,8 +3802,10 @@ mod tests {
|
||||
(config, _, _) = config.apply_change(change);
|
||||
|
||||
assert_eq!(config.cargo_targetDir(None), &Some(TargetDirectory::UseSubdirectory(true)));
|
||||
let target =
|
||||
Utf8PathBuf::from(std::env::var("CARGO_TARGET_DIR").unwrap_or("target".to_owned()));
|
||||
assert!(
|
||||
matches!(config.flycheck(None), FlycheckConfig::CargoCommand { options, .. } if options.target_dir == Some(Utf8PathBuf::from("target/rust-analyzer")))
|
||||
matches!(config.flycheck(None), FlycheckConfig::CargoCommand { options, .. } if options.target_dir == Some(target.join("rust-analyzer")))
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -650,7 +650,8 @@ impl GlobalStateSnapshot {
|
||||
RwLockReadGuard::map(self.vfs.read(), |(it, _)| it)
|
||||
}
|
||||
|
||||
pub(crate) fn url_to_file_id(&self, url: &Url) -> anyhow::Result<FileId> {
|
||||
/// Returns `None` if the file was excluded.
|
||||
pub(crate) fn url_to_file_id(&self, url: &Url) -> anyhow::Result<Option<FileId>> {
|
||||
url_to_file_id(&self.vfs_read(), url)
|
||||
}
|
||||
|
||||
@ -658,7 +659,8 @@ impl GlobalStateSnapshot {
|
||||
file_id_to_url(&self.vfs_read(), id)
|
||||
}
|
||||
|
||||
pub(crate) fn vfs_path_to_file_id(&self, vfs_path: &VfsPath) -> anyhow::Result<FileId> {
|
||||
/// Returns `None` if the file was excluded.
|
||||
pub(crate) fn vfs_path_to_file_id(&self, vfs_path: &VfsPath) -> anyhow::Result<Option<FileId>> {
|
||||
vfs_path_to_file_id(&self.vfs_read(), vfs_path)
|
||||
}
|
||||
|
||||
@ -750,14 +752,21 @@ pub(crate) fn file_id_to_url(vfs: &vfs::Vfs, id: FileId) -> Url {
|
||||
url_from_abs_path(path)
|
||||
}
|
||||
|
||||
pub(crate) fn url_to_file_id(vfs: &vfs::Vfs, url: &Url) -> anyhow::Result<FileId> {
|
||||
/// Returns `None` if the file was excluded.
|
||||
pub(crate) fn url_to_file_id(vfs: &vfs::Vfs, url: &Url) -> anyhow::Result<Option<FileId>> {
|
||||
let path = from_proto::vfs_path(url)?;
|
||||
let res = vfs.file_id(&path).ok_or_else(|| anyhow::format_err!("file not found: {path}"))?;
|
||||
Ok(res)
|
||||
vfs_path_to_file_id(vfs, &path)
|
||||
}
|
||||
|
||||
pub(crate) fn vfs_path_to_file_id(vfs: &vfs::Vfs, vfs_path: &VfsPath) -> anyhow::Result<FileId> {
|
||||
let res =
|
||||
/// Returns `None` if the file was excluded.
|
||||
pub(crate) fn vfs_path_to_file_id(
|
||||
vfs: &vfs::Vfs,
|
||||
vfs_path: &VfsPath,
|
||||
) -> anyhow::Result<Option<FileId>> {
|
||||
let (file_id, excluded) =
|
||||
vfs.file_id(vfs_path).ok_or_else(|| anyhow::format_err!("file not found: {vfs_path}"))?;
|
||||
Ok(res)
|
||||
match excluded {
|
||||
vfs::FileExcluded::Yes => Ok(None),
|
||||
vfs::FileExcluded::No => Ok(Some(file_id)),
|
||||
}
|
||||
}
|
||||
|
@ -22,6 +22,7 @@ use crate::{
|
||||
mem_docs::DocumentData,
|
||||
reload,
|
||||
target_spec::TargetSpec,
|
||||
try_default,
|
||||
};
|
||||
|
||||
pub(crate) fn handle_cancel(state: &mut GlobalState, params: CancelParams) -> anyhow::Result<()> {
|
||||
@ -74,6 +75,14 @@ pub(crate) fn handle_did_open_text_document(
|
||||
tracing::error!("duplicate DidOpenTextDocument: {}", path);
|
||||
}
|
||||
|
||||
if let Some(abs_path) = path.as_path() {
|
||||
if state.config.excluded().any(|excluded| abs_path.starts_with(&excluded)) {
|
||||
tracing::trace!("opened excluded file {abs_path}");
|
||||
state.vfs.write().0.insert_excluded_file(path);
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
|
||||
let contents = params.text_document.text.into_bytes();
|
||||
state.vfs.write().0.set_file_contents(path, Some(contents));
|
||||
if state.config.discover_workspace_config().is_some() {
|
||||
@ -127,7 +136,8 @@ pub(crate) fn handle_did_close_text_document(
|
||||
tracing::error!("orphan DidCloseTextDocument: {}", path);
|
||||
}
|
||||
|
||||
if let Some(file_id) = state.vfs.read().0.file_id(&path) {
|
||||
// Clear diagnostics also for excluded files, just in case.
|
||||
if let Some((file_id, _)) = state.vfs.read().0.file_id(&path) {
|
||||
state.diagnostics.clear_native_for(file_id);
|
||||
}
|
||||
|
||||
@ -146,7 +156,7 @@ pub(crate) fn handle_did_save_text_document(
|
||||
) -> anyhow::Result<()> {
|
||||
if let Ok(vfs_path) = from_proto::vfs_path(¶ms.text_document.uri) {
|
||||
let snap = state.snapshot();
|
||||
let file_id = snap.vfs_path_to_file_id(&vfs_path)?;
|
||||
let file_id = try_default!(snap.vfs_path_to_file_id(&vfs_path)?);
|
||||
let sr = snap.analysis.source_root_id(file_id)?;
|
||||
|
||||
if state.config.script_rebuild_on_save(Some(sr)) && state.build_deps_changed {
|
||||
@ -290,7 +300,7 @@ fn run_flycheck(state: &mut GlobalState, vfs_path: VfsPath) -> bool {
|
||||
let _p = tracing::info_span!("run_flycheck").entered();
|
||||
|
||||
let file_id = state.vfs.read().0.file_id(&vfs_path);
|
||||
if let Some(file_id) = file_id {
|
||||
if let Some((file_id, vfs::FileExcluded::No)) = file_id {
|
||||
let world = state.snapshot();
|
||||
let invocation_strategy_once = state.config.flycheck(None).invocation_strategy_once();
|
||||
let may_flycheck_workspace = state.config.flycheck_workspace(None);
|
||||
|
@ -53,6 +53,7 @@ use crate::{
|
||||
},
|
||||
target_spec::{CargoTargetSpec, TargetSpec},
|
||||
test_runner::{CargoTestHandle, TestTarget},
|
||||
try_default,
|
||||
};
|
||||
|
||||
pub(crate) fn handle_workspace_reload(state: &mut GlobalState, _: ()) -> anyhow::Result<()> {
|
||||
@ -83,7 +84,8 @@ pub(crate) fn handle_analyzer_status(
|
||||
let mut file_id = None;
|
||||
if let Some(tdi) = params.text_document {
|
||||
match from_proto::file_id(&snap, &tdi.uri) {
|
||||
Ok(it) => file_id = Some(it),
|
||||
Ok(Some(it)) => file_id = Some(it),
|
||||
Ok(None) => {}
|
||||
Err(_) => format_to!(buf, "file {} not found in vfs", tdi.uri),
|
||||
}
|
||||
}
|
||||
@ -141,7 +143,7 @@ pub(crate) fn handle_view_syntax_tree(
|
||||
params: lsp_ext::ViewSyntaxTreeParams,
|
||||
) -> anyhow::Result<String> {
|
||||
let _p = tracing::info_span!("handle_view_syntax_tree").entered();
|
||||
let id = from_proto::file_id(&snap, ¶ms.text_document.uri)?;
|
||||
let id = try_default!(from_proto::file_id(&snap, ¶ms.text_document.uri)?);
|
||||
let res = snap.analysis.view_syntax_tree(id)?;
|
||||
Ok(res)
|
||||
}
|
||||
@ -151,7 +153,7 @@ pub(crate) fn handle_view_hir(
|
||||
params: lsp_types::TextDocumentPositionParams,
|
||||
) -> anyhow::Result<String> {
|
||||
let _p = tracing::info_span!("handle_view_hir").entered();
|
||||
let position = from_proto::file_position(&snap, params)?;
|
||||
let position = try_default!(from_proto::file_position(&snap, params)?);
|
||||
let res = snap.analysis.view_hir(position)?;
|
||||
Ok(res)
|
||||
}
|
||||
@ -161,7 +163,7 @@ pub(crate) fn handle_view_mir(
|
||||
params: lsp_types::TextDocumentPositionParams,
|
||||
) -> anyhow::Result<String> {
|
||||
let _p = tracing::info_span!("handle_view_mir").entered();
|
||||
let position = from_proto::file_position(&snap, params)?;
|
||||
let position = try_default!(from_proto::file_position(&snap, params)?);
|
||||
let res = snap.analysis.view_mir(position)?;
|
||||
Ok(res)
|
||||
}
|
||||
@ -171,7 +173,7 @@ pub(crate) fn handle_interpret_function(
|
||||
params: lsp_types::TextDocumentPositionParams,
|
||||
) -> anyhow::Result<String> {
|
||||
let _p = tracing::info_span!("handle_interpret_function").entered();
|
||||
let position = from_proto::file_position(&snap, params)?;
|
||||
let position = try_default!(from_proto::file_position(&snap, params)?);
|
||||
let res = snap.analysis.interpret_function(position)?;
|
||||
Ok(res)
|
||||
}
|
||||
@ -180,7 +182,7 @@ pub(crate) fn handle_view_file_text(
|
||||
snap: GlobalStateSnapshot,
|
||||
params: lsp_types::TextDocumentIdentifier,
|
||||
) -> anyhow::Result<String> {
|
||||
let file_id = from_proto::file_id(&snap, ¶ms.uri)?;
|
||||
let file_id = try_default!(from_proto::file_id(&snap, ¶ms.uri)?);
|
||||
Ok(snap.analysis.file_text(file_id)?.to_string())
|
||||
}
|
||||
|
||||
@ -189,7 +191,7 @@ pub(crate) fn handle_view_item_tree(
|
||||
params: lsp_ext::ViewItemTreeParams,
|
||||
) -> anyhow::Result<String> {
|
||||
let _p = tracing::info_span!("handle_view_item_tree").entered();
|
||||
let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?;
|
||||
let file_id = try_default!(from_proto::file_id(&snap, ¶ms.text_document.uri)?);
|
||||
let res = snap.analysis.view_item_tree(file_id)?;
|
||||
Ok(res)
|
||||
}
|
||||
@ -315,7 +317,7 @@ pub(crate) fn handle_expand_macro(
|
||||
params: lsp_ext::ExpandMacroParams,
|
||||
) -> anyhow::Result<Option<lsp_ext::ExpandedMacro>> {
|
||||
let _p = tracing::info_span!("handle_expand_macro").entered();
|
||||
let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?;
|
||||
let file_id = try_default!(from_proto::file_id(&snap, ¶ms.text_document.uri)?);
|
||||
let line_index = snap.file_line_index(file_id)?;
|
||||
let offset = from_proto::offset(&line_index, params.position)?;
|
||||
|
||||
@ -328,7 +330,7 @@ pub(crate) fn handle_selection_range(
|
||||
params: lsp_types::SelectionRangeParams,
|
||||
) -> anyhow::Result<Option<Vec<lsp_types::SelectionRange>>> {
|
||||
let _p = tracing::info_span!("handle_selection_range").entered();
|
||||
let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?;
|
||||
let file_id = try_default!(from_proto::file_id(&snap, ¶ms.text_document.uri)?);
|
||||
let line_index = snap.file_line_index(file_id)?;
|
||||
let res: anyhow::Result<Vec<lsp_types::SelectionRange>> = params
|
||||
.positions
|
||||
@ -371,7 +373,7 @@ pub(crate) fn handle_matching_brace(
|
||||
params: lsp_ext::MatchingBraceParams,
|
||||
) -> anyhow::Result<Vec<Position>> {
|
||||
let _p = tracing::info_span!("handle_matching_brace").entered();
|
||||
let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?;
|
||||
let file_id = try_default!(from_proto::file_id(&snap, ¶ms.text_document.uri)?);
|
||||
let line_index = snap.file_line_index(file_id)?;
|
||||
params
|
||||
.positions
|
||||
@ -395,7 +397,7 @@ pub(crate) fn handle_join_lines(
|
||||
) -> anyhow::Result<Vec<lsp_types::TextEdit>> {
|
||||
let _p = tracing::info_span!("handle_join_lines").entered();
|
||||
|
||||
let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?;
|
||||
let file_id = try_default!(from_proto::file_id(&snap, ¶ms.text_document.uri)?);
|
||||
let config = snap.config.join_lines();
|
||||
let line_index = snap.file_line_index(file_id)?;
|
||||
|
||||
@ -419,7 +421,7 @@ pub(crate) fn handle_on_enter(
|
||||
params: lsp_types::TextDocumentPositionParams,
|
||||
) -> anyhow::Result<Option<Vec<lsp_ext::SnippetTextEdit>>> {
|
||||
let _p = tracing::info_span!("handle_on_enter").entered();
|
||||
let position = from_proto::file_position(&snap, params)?;
|
||||
let position = try_default!(from_proto::file_position(&snap, params)?);
|
||||
let edit = match snap.analysis.on_enter(position)? {
|
||||
None => return Ok(None),
|
||||
Some(it) => it,
|
||||
@ -439,7 +441,8 @@ pub(crate) fn handle_on_type_formatting(
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let mut position = from_proto::file_position(&snap, params.text_document_position)?;
|
||||
let mut position =
|
||||
try_default!(from_proto::file_position(&snap, params.text_document_position)?);
|
||||
let line_index = snap.file_line_index(position.file_id)?;
|
||||
|
||||
// in `ide`, the `on_type` invariant is that
|
||||
@ -465,32 +468,33 @@ pub(crate) fn handle_on_type_formatting(
|
||||
Ok(Some(change))
|
||||
}
|
||||
|
||||
pub(crate) fn empty_diagnostic_report() -> lsp_types::DocumentDiagnosticReportResult {
|
||||
lsp_types::DocumentDiagnosticReportResult::Report(lsp_types::DocumentDiagnosticReport::Full(
|
||||
lsp_types::RelatedFullDocumentDiagnosticReport {
|
||||
related_documents: None,
|
||||
full_document_diagnostic_report: lsp_types::FullDocumentDiagnosticReport {
|
||||
result_id: Some("rust-analyzer".to_owned()),
|
||||
items: vec![],
|
||||
},
|
||||
},
|
||||
))
|
||||
}
|
||||
|
||||
pub(crate) fn handle_document_diagnostics(
|
||||
snap: GlobalStateSnapshot,
|
||||
params: lsp_types::DocumentDiagnosticParams,
|
||||
) -> anyhow::Result<lsp_types::DocumentDiagnosticReportResult> {
|
||||
let empty = || {
|
||||
lsp_types::DocumentDiagnosticReportResult::Report(
|
||||
lsp_types::DocumentDiagnosticReport::Full(
|
||||
lsp_types::RelatedFullDocumentDiagnosticReport {
|
||||
related_documents: None,
|
||||
full_document_diagnostic_report: lsp_types::FullDocumentDiagnosticReport {
|
||||
result_id: Some("rust-analyzer".to_owned()),
|
||||
items: vec![],
|
||||
},
|
||||
},
|
||||
),
|
||||
)
|
||||
let file_id = match from_proto::file_id(&snap, ¶ms.text_document.uri)? {
|
||||
Some(it) => it,
|
||||
None => return Ok(empty_diagnostic_report()),
|
||||
};
|
||||
|
||||
let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?;
|
||||
let source_root = snap.analysis.source_root_id(file_id)?;
|
||||
if !snap.analysis.is_local_source_root(source_root)? {
|
||||
return Ok(empty());
|
||||
return Ok(empty_diagnostic_report());
|
||||
}
|
||||
let config = snap.config.diagnostics(Some(source_root));
|
||||
if !config.enabled {
|
||||
return Ok(empty());
|
||||
return Ok(empty_diagnostic_report());
|
||||
}
|
||||
let line_index = snap.file_line_index(file_id)?;
|
||||
let supports_related = snap.config.text_document_diagnostic_related_document_support();
|
||||
@ -546,7 +550,7 @@ pub(crate) fn handle_document_symbol(
|
||||
params: lsp_types::DocumentSymbolParams,
|
||||
) -> anyhow::Result<Option<lsp_types::DocumentSymbolResponse>> {
|
||||
let _p = tracing::info_span!("handle_document_symbol").entered();
|
||||
let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?;
|
||||
let file_id = try_default!(from_proto::file_id(&snap, ¶ms.text_document.uri)?);
|
||||
let line_index = snap.file_line_index(file_id)?;
|
||||
|
||||
let mut parents: Vec<(lsp_types::DocumentSymbol, Option<usize>)> = Vec::new();
|
||||
@ -760,7 +764,7 @@ pub(crate) fn handle_will_rename_files(
|
||||
}
|
||||
})
|
||||
.filter_map(|(file_id, new_name)| {
|
||||
snap.analysis.will_rename_file(file_id, &new_name).ok()?
|
||||
snap.analysis.will_rename_file(file_id?, &new_name).ok()?
|
||||
})
|
||||
.collect();
|
||||
|
||||
@ -782,7 +786,8 @@ pub(crate) fn handle_goto_definition(
|
||||
params: lsp_types::GotoDefinitionParams,
|
||||
) -> anyhow::Result<Option<lsp_types::GotoDefinitionResponse>> {
|
||||
let _p = tracing::info_span!("handle_goto_definition").entered();
|
||||
let position = from_proto::file_position(&snap, params.text_document_position_params)?;
|
||||
let position =
|
||||
try_default!(from_proto::file_position(&snap, params.text_document_position_params)?);
|
||||
let nav_info = match snap.analysis.goto_definition(position)? {
|
||||
None => return Ok(None),
|
||||
Some(it) => it,
|
||||
@ -797,7 +802,10 @@ pub(crate) fn handle_goto_declaration(
|
||||
params: lsp_types::request::GotoDeclarationParams,
|
||||
) -> anyhow::Result<Option<lsp_types::request::GotoDeclarationResponse>> {
|
||||
let _p = tracing::info_span!("handle_goto_declaration").entered();
|
||||
let position = from_proto::file_position(&snap, params.text_document_position_params.clone())?;
|
||||
let position = try_default!(from_proto::file_position(
|
||||
&snap,
|
||||
params.text_document_position_params.clone()
|
||||
)?);
|
||||
let nav_info = match snap.analysis.goto_declaration(position)? {
|
||||
None => return handle_goto_definition(snap, params),
|
||||
Some(it) => it,
|
||||
@ -812,7 +820,8 @@ pub(crate) fn handle_goto_implementation(
|
||||
params: lsp_types::request::GotoImplementationParams,
|
||||
) -> anyhow::Result<Option<lsp_types::request::GotoImplementationResponse>> {
|
||||
let _p = tracing::info_span!("handle_goto_implementation").entered();
|
||||
let position = from_proto::file_position(&snap, params.text_document_position_params)?;
|
||||
let position =
|
||||
try_default!(from_proto::file_position(&snap, params.text_document_position_params)?);
|
||||
let nav_info = match snap.analysis.goto_implementation(position)? {
|
||||
None => return Ok(None),
|
||||
Some(it) => it,
|
||||
@ -827,7 +836,8 @@ pub(crate) fn handle_goto_type_definition(
|
||||
params: lsp_types::request::GotoTypeDefinitionParams,
|
||||
) -> anyhow::Result<Option<lsp_types::request::GotoTypeDefinitionResponse>> {
|
||||
let _p = tracing::info_span!("handle_goto_type_definition").entered();
|
||||
let position = from_proto::file_position(&snap, params.text_document_position_params)?;
|
||||
let position =
|
||||
try_default!(from_proto::file_position(&snap, params.text_document_position_params)?);
|
||||
let nav_info = match snap.analysis.goto_type_definition(position)? {
|
||||
None => return Ok(None),
|
||||
Some(it) => it,
|
||||
@ -880,7 +890,7 @@ pub(crate) fn handle_parent_module(
|
||||
}
|
||||
|
||||
// check if invoked at the crate root
|
||||
let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?;
|
||||
let file_id = try_default!(from_proto::file_id(&snap, ¶ms.text_document.uri)?);
|
||||
let crate_id = match snap.analysis.crates_for(file_id)?.first() {
|
||||
Some(&crate_id) => crate_id,
|
||||
None => return Ok(None),
|
||||
@ -904,7 +914,7 @@ pub(crate) fn handle_parent_module(
|
||||
}
|
||||
|
||||
// locate parent module by semantics
|
||||
let position = from_proto::file_position(&snap, params)?;
|
||||
let position = try_default!(from_proto::file_position(&snap, params)?);
|
||||
let navs = snap.analysis.parent_module(position)?;
|
||||
let res = to_proto::goto_definition_response(&snap, None, navs)?;
|
||||
Ok(Some(res))
|
||||
@ -915,7 +925,7 @@ pub(crate) fn handle_runnables(
|
||||
params: lsp_ext::RunnablesParams,
|
||||
) -> anyhow::Result<Vec<lsp_ext::Runnable>> {
|
||||
let _p = tracing::info_span!("handle_runnables").entered();
|
||||
let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?;
|
||||
let file_id = try_default!(from_proto::file_id(&snap, ¶ms.text_document.uri)?);
|
||||
let source_root = snap.analysis.source_root_id(file_id).ok();
|
||||
let line_index = snap.file_line_index(file_id)?;
|
||||
let offset = params.position.and_then(|it| from_proto::offset(&line_index, it).ok());
|
||||
@ -1035,7 +1045,7 @@ pub(crate) fn handle_related_tests(
|
||||
params: lsp_types::TextDocumentPositionParams,
|
||||
) -> anyhow::Result<Vec<lsp_ext::TestInfo>> {
|
||||
let _p = tracing::info_span!("handle_related_tests").entered();
|
||||
let position = from_proto::file_position(&snap, params)?;
|
||||
let position = try_default!(from_proto::file_position(&snap, params)?);
|
||||
|
||||
let tests = snap.analysis.related_tests(position, None)?;
|
||||
let mut res = Vec::new();
|
||||
@ -1053,7 +1063,8 @@ pub(crate) fn handle_completion(
|
||||
lsp_types::CompletionParams { text_document_position, context,.. }: lsp_types::CompletionParams,
|
||||
) -> anyhow::Result<Option<lsp_types::CompletionResponse>> {
|
||||
let _p = tracing::info_span!("handle_completion").entered();
|
||||
let mut position = from_proto::file_position(&snap, text_document_position.clone())?;
|
||||
let mut position =
|
||||
try_default!(from_proto::file_position(&snap, text_document_position.clone())?);
|
||||
let line_index = snap.file_line_index(position.file_id)?;
|
||||
let completion_trigger_character =
|
||||
context.and_then(|ctx| ctx.trigger_character).and_then(|s| s.chars().next());
|
||||
@ -1102,7 +1113,8 @@ pub(crate) fn handle_completion_resolve(
|
||||
|
||||
let resolve_data: lsp_ext::CompletionResolveData = serde_json::from_value(data)?;
|
||||
|
||||
let file_id = from_proto::file_id(&snap, &resolve_data.position.text_document.uri)?;
|
||||
let file_id = from_proto::file_id(&snap, &resolve_data.position.text_document.uri)?
|
||||
.expect("we never provide completions for excluded files");
|
||||
let line_index = snap.file_line_index(file_id)?;
|
||||
// FIXME: We should fix up the position when retrying the cancelled request instead
|
||||
let Ok(offset) = from_proto::offset(&line_index, resolve_data.position.position) else {
|
||||
@ -1185,7 +1197,7 @@ pub(crate) fn handle_folding_range(
|
||||
params: FoldingRangeParams,
|
||||
) -> anyhow::Result<Option<Vec<FoldingRange>>> {
|
||||
let _p = tracing::info_span!("handle_folding_range").entered();
|
||||
let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?;
|
||||
let file_id = try_default!(from_proto::file_id(&snap, ¶ms.text_document.uri)?);
|
||||
let folds = snap.analysis.folding_ranges(file_id)?;
|
||||
let text = snap.analysis.file_text(file_id)?;
|
||||
let line_index = snap.file_line_index(file_id)?;
|
||||
@ -1202,7 +1214,8 @@ pub(crate) fn handle_signature_help(
|
||||
params: lsp_types::SignatureHelpParams,
|
||||
) -> anyhow::Result<Option<lsp_types::SignatureHelp>> {
|
||||
let _p = tracing::info_span!("handle_signature_help").entered();
|
||||
let position = from_proto::file_position(&snap, params.text_document_position_params)?;
|
||||
let position =
|
||||
try_default!(from_proto::file_position(&snap, params.text_document_position_params)?);
|
||||
let help = match snap.analysis.signature_help(position)? {
|
||||
Some(it) => it,
|
||||
None => return Ok(None),
|
||||
@ -1221,7 +1234,7 @@ pub(crate) fn handle_hover(
|
||||
PositionOrRange::Position(position) => Range::new(position, position),
|
||||
PositionOrRange::Range(range) => range,
|
||||
};
|
||||
let file_range = from_proto::file_range(&snap, ¶ms.text_document, range)?;
|
||||
let file_range = try_default!(from_proto::file_range(&snap, ¶ms.text_document, range)?);
|
||||
|
||||
let hover = snap.config.hover();
|
||||
let info = match snap.analysis.hover(&hover, file_range)? {
|
||||
@ -1255,7 +1268,7 @@ pub(crate) fn handle_prepare_rename(
|
||||
params: lsp_types::TextDocumentPositionParams,
|
||||
) -> anyhow::Result<Option<PrepareRenameResponse>> {
|
||||
let _p = tracing::info_span!("handle_prepare_rename").entered();
|
||||
let position = from_proto::file_position(&snap, params)?;
|
||||
let position = try_default!(from_proto::file_position(&snap, params)?);
|
||||
|
||||
let change = snap.analysis.prepare_rename(position)?.map_err(to_proto::rename_error)?;
|
||||
|
||||
@ -1269,7 +1282,7 @@ pub(crate) fn handle_rename(
|
||||
params: RenameParams,
|
||||
) -> anyhow::Result<Option<WorkspaceEdit>> {
|
||||
let _p = tracing::info_span!("handle_rename").entered();
|
||||
let position = from_proto::file_position(&snap, params.text_document_position)?;
|
||||
let position = try_default!(from_proto::file_position(&snap, params.text_document_position)?);
|
||||
|
||||
let mut change =
|
||||
snap.analysis.rename(position, ¶ms.new_name)?.map_err(to_proto::rename_error)?;
|
||||
@ -1304,7 +1317,7 @@ pub(crate) fn handle_references(
|
||||
params: lsp_types::ReferenceParams,
|
||||
) -> anyhow::Result<Option<Vec<Location>>> {
|
||||
let _p = tracing::info_span!("handle_references").entered();
|
||||
let position = from_proto::file_position(&snap, params.text_document_position)?;
|
||||
let position = try_default!(from_proto::file_position(&snap, params.text_document_position)?);
|
||||
|
||||
let exclude_imports = snap.config.find_all_refs_exclude_imports();
|
||||
let exclude_tests = snap.config.find_all_refs_exclude_tests();
|
||||
@ -1375,9 +1388,9 @@ pub(crate) fn handle_code_action(
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?;
|
||||
let file_id = try_default!(from_proto::file_id(&snap, ¶ms.text_document.uri)?);
|
||||
let line_index = snap.file_line_index(file_id)?;
|
||||
let frange = from_proto::file_range(&snap, ¶ms.text_document, params.range)?;
|
||||
let frange = try_default!(from_proto::file_range(&snap, ¶ms.text_document, params.range)?);
|
||||
let source_root = snap.analysis.source_root_id(file_id)?;
|
||||
|
||||
let mut assists_config = snap.config.assist(Some(source_root));
|
||||
@ -1455,7 +1468,8 @@ pub(crate) fn handle_code_action_resolve(
|
||||
return Err(invalid_params_error("code action without data".to_owned()).into());
|
||||
};
|
||||
|
||||
let file_id = from_proto::file_id(&snap, ¶ms.code_action_params.text_document.uri)?;
|
||||
let file_id = from_proto::file_id(&snap, ¶ms.code_action_params.text_document.uri)?
|
||||
.expect("we never provide code actions for excluded files");
|
||||
if snap.file_version(file_id) != params.version {
|
||||
return Err(invalid_params_error("stale code action".to_owned()).into());
|
||||
}
|
||||
@ -1551,7 +1565,7 @@ pub(crate) fn handle_code_lens(
|
||||
return Ok(Some(Vec::default()));
|
||||
}
|
||||
|
||||
let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?;
|
||||
let file_id = try_default!(from_proto::file_id(&snap, ¶ms.text_document.uri)?);
|
||||
let target_spec = TargetSpec::for_file(&snap, file_id)?;
|
||||
|
||||
let annotations = snap.analysis.annotations(
|
||||
@ -1613,7 +1627,8 @@ pub(crate) fn handle_document_highlight(
|
||||
params: lsp_types::DocumentHighlightParams,
|
||||
) -> anyhow::Result<Option<Vec<lsp_types::DocumentHighlight>>> {
|
||||
let _p = tracing::info_span!("handle_document_highlight").entered();
|
||||
let position = from_proto::file_position(&snap, params.text_document_position_params)?;
|
||||
let position =
|
||||
try_default!(from_proto::file_position(&snap, params.text_document_position_params)?);
|
||||
let line_index = snap.file_line_index(position.file_id)?;
|
||||
let source_root = snap.analysis.source_root_id(position.file_id)?;
|
||||
|
||||
@ -1639,12 +1654,12 @@ pub(crate) fn handle_ssr(
|
||||
params: lsp_ext::SsrParams,
|
||||
) -> anyhow::Result<lsp_types::WorkspaceEdit> {
|
||||
let _p = tracing::info_span!("handle_ssr").entered();
|
||||
let selections = params
|
||||
let selections = try_default!(params
|
||||
.selections
|
||||
.iter()
|
||||
.map(|range| from_proto::file_range(&snap, ¶ms.position.text_document, *range))
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
let position = from_proto::file_position(&snap, params.position)?;
|
||||
.collect::<Result<Option<Vec<_>>, _>>()?);
|
||||
let position = try_default!(from_proto::file_position(&snap, params.position)?);
|
||||
let source_change = snap.analysis.structural_search_replace(
|
||||
¶ms.query,
|
||||
params.parse_only,
|
||||
@ -1660,11 +1675,11 @@ pub(crate) fn handle_inlay_hints(
|
||||
) -> anyhow::Result<Option<Vec<InlayHint>>> {
|
||||
let _p = tracing::info_span!("handle_inlay_hints").entered();
|
||||
let document_uri = ¶ms.text_document.uri;
|
||||
let FileRange { file_id, range } = from_proto::file_range(
|
||||
let FileRange { file_id, range } = try_default!(from_proto::file_range(
|
||||
&snap,
|
||||
&TextDocumentIdentifier::new(document_uri.to_owned()),
|
||||
params.range,
|
||||
)?;
|
||||
)?);
|
||||
let line_index = snap.file_line_index(file_id)?;
|
||||
let range = TextRange::new(
|
||||
range.start().min(line_index.index.len()),
|
||||
@ -1744,7 +1759,8 @@ pub(crate) fn handle_call_hierarchy_prepare(
|
||||
params: CallHierarchyPrepareParams,
|
||||
) -> anyhow::Result<Option<Vec<CallHierarchyItem>>> {
|
||||
let _p = tracing::info_span!("handle_call_hierarchy_prepare").entered();
|
||||
let position = from_proto::file_position(&snap, params.text_document_position_params)?;
|
||||
let position =
|
||||
try_default!(from_proto::file_position(&snap, params.text_document_position_params)?);
|
||||
|
||||
let nav_info = match snap.analysis.call_hierarchy(position)? {
|
||||
None => return Ok(None),
|
||||
@ -1769,7 +1785,7 @@ pub(crate) fn handle_call_hierarchy_incoming(
|
||||
let item = params.item;
|
||||
|
||||
let doc = TextDocumentIdentifier::new(item.uri);
|
||||
let frange = from_proto::file_range(&snap, &doc, item.selection_range)?;
|
||||
let frange = try_default!(from_proto::file_range(&snap, &doc, item.selection_range)?);
|
||||
let fpos = FilePosition { file_id: frange.file_id, offset: frange.range.start() };
|
||||
|
||||
let config = snap.config.call_hierarchy();
|
||||
@ -1807,7 +1823,7 @@ pub(crate) fn handle_call_hierarchy_outgoing(
|
||||
let item = params.item;
|
||||
|
||||
let doc = TextDocumentIdentifier::new(item.uri);
|
||||
let frange = from_proto::file_range(&snap, &doc, item.selection_range)?;
|
||||
let frange = try_default!(from_proto::file_range(&snap, &doc, item.selection_range)?);
|
||||
let fpos = FilePosition { file_id: frange.file_id, offset: frange.range.start() };
|
||||
let line_index = snap.file_line_index(fpos.file_id)?;
|
||||
|
||||
@ -1842,7 +1858,7 @@ pub(crate) fn handle_semantic_tokens_full(
|
||||
) -> anyhow::Result<Option<SemanticTokensResult>> {
|
||||
let _p = tracing::info_span!("handle_semantic_tokens_full").entered();
|
||||
|
||||
let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?;
|
||||
let file_id = try_default!(from_proto::file_id(&snap, ¶ms.text_document.uri)?);
|
||||
let text = snap.analysis.file_text(file_id)?;
|
||||
let line_index = snap.file_line_index(file_id)?;
|
||||
|
||||
@ -1872,7 +1888,7 @@ pub(crate) fn handle_semantic_tokens_full_delta(
|
||||
) -> anyhow::Result<Option<SemanticTokensFullDeltaResult>> {
|
||||
let _p = tracing::info_span!("handle_semantic_tokens_full_delta").entered();
|
||||
|
||||
let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?;
|
||||
let file_id = try_default!(from_proto::file_id(&snap, ¶ms.text_document.uri)?);
|
||||
let text = snap.analysis.file_text(file_id)?;
|
||||
let line_index = snap.file_line_index(file_id)?;
|
||||
|
||||
@ -1915,7 +1931,7 @@ pub(crate) fn handle_semantic_tokens_range(
|
||||
) -> anyhow::Result<Option<SemanticTokensRangeResult>> {
|
||||
let _p = tracing::info_span!("handle_semantic_tokens_range").entered();
|
||||
|
||||
let frange = from_proto::file_range(&snap, ¶ms.text_document, params.range)?;
|
||||
let frange = try_default!(from_proto::file_range(&snap, ¶ms.text_document, params.range)?);
|
||||
let text = snap.analysis.file_text(frange.file_id)?;
|
||||
let line_index = snap.file_line_index(frange.file_id)?;
|
||||
|
||||
@ -1940,7 +1956,7 @@ pub(crate) fn handle_open_docs(
|
||||
params: lsp_types::TextDocumentPositionParams,
|
||||
) -> anyhow::Result<ExternalDocsResponse> {
|
||||
let _p = tracing::info_span!("handle_open_docs").entered();
|
||||
let position = from_proto::file_position(&snap, params)?;
|
||||
let position = try_default!(from_proto::file_position(&snap, params)?);
|
||||
|
||||
let ws_and_sysroot = snap.workspaces.iter().find_map(|ws| match &ws.kind {
|
||||
ProjectWorkspaceKind::Cargo { cargo, .. }
|
||||
@ -1982,7 +1998,7 @@ pub(crate) fn handle_open_cargo_toml(
|
||||
params: lsp_ext::OpenCargoTomlParams,
|
||||
) -> anyhow::Result<Option<lsp_types::GotoDefinitionResponse>> {
|
||||
let _p = tracing::info_span!("handle_open_cargo_toml").entered();
|
||||
let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?;
|
||||
let file_id = try_default!(from_proto::file_id(&snap, ¶ms.text_document.uri)?);
|
||||
|
||||
let cargo_spec = match TargetSpec::for_file(&snap, file_id)? {
|
||||
Some(TargetSpec::Cargo(it)) => it,
|
||||
@ -2000,8 +2016,8 @@ pub(crate) fn handle_move_item(
|
||||
params: lsp_ext::MoveItemParams,
|
||||
) -> anyhow::Result<Vec<lsp_ext::SnippetTextEdit>> {
|
||||
let _p = tracing::info_span!("handle_move_item").entered();
|
||||
let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?;
|
||||
let range = from_proto::file_range(&snap, ¶ms.text_document, params.range)?;
|
||||
let file_id = try_default!(from_proto::file_id(&snap, ¶ms.text_document.uri)?);
|
||||
let range = try_default!(from_proto::file_range(&snap, ¶ms.text_document, params.range)?);
|
||||
|
||||
let direction = match params.direction {
|
||||
lsp_ext::MoveItemDirection::Up => ide::Direction::Up,
|
||||
@ -2022,7 +2038,7 @@ pub(crate) fn handle_view_recursive_memory_layout(
|
||||
params: lsp_types::TextDocumentPositionParams,
|
||||
) -> anyhow::Result<Option<lsp_ext::RecursiveMemoryLayout>> {
|
||||
let _p = tracing::info_span!("handle_view_recursive_memory_layout").entered();
|
||||
let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?;
|
||||
let file_id = try_default!(from_proto::file_id(&snap, ¶ms.text_document.uri)?);
|
||||
let line_index = snap.file_line_index(file_id)?;
|
||||
let offset = from_proto::offset(&line_index, params.position)?;
|
||||
|
||||
@ -2210,7 +2226,7 @@ fn run_rustfmt(
|
||||
text_document: TextDocumentIdentifier,
|
||||
range: Option<lsp_types::Range>,
|
||||
) -> anyhow::Result<Option<Vec<lsp_types::TextEdit>>> {
|
||||
let file_id = from_proto::file_id(snap, &text_document.uri)?;
|
||||
let file_id = try_default!(from_proto::file_id(snap, &text_document.uri)?);
|
||||
let file = snap.analysis.file_text(file_id)?;
|
||||
|
||||
// Determine the edition of the crate the file belongs to (if there's multiple, we pick the
|
||||
@ -2275,7 +2291,7 @@ fn run_rustfmt(
|
||||
.into());
|
||||
}
|
||||
|
||||
let frange = from_proto::file_range(snap, &text_document, range)?;
|
||||
let frange = try_default!(from_proto::file_range(snap, &text_document, range)?);
|
||||
let start_line = line_index.index.line_col(frange.range.start()).line;
|
||||
let end_line = line_index.index.line_col(frange.range.end()).line;
|
||||
|
||||
@ -2284,7 +2300,8 @@ fn run_rustfmt(
|
||||
cmd.arg(
|
||||
json!([{
|
||||
"file": "stdin",
|
||||
"range": [start_line, end_line]
|
||||
// LineCol is 0-based, but rustfmt is 1-based.
|
||||
"range": [start_line + 1, end_line + 1]
|
||||
}])
|
||||
.to_string(),
|
||||
);
|
||||
@ -2416,15 +2433,15 @@ pub(crate) fn internal_testing_fetch_config(
|
||||
state: GlobalStateSnapshot,
|
||||
params: InternalTestingFetchConfigParams,
|
||||
) -> anyhow::Result<Option<InternalTestingFetchConfigResponse>> {
|
||||
let source_root = params
|
||||
.text_document
|
||||
.map(|it| {
|
||||
let source_root = match params.text_document {
|
||||
Some(it) => Some(
|
||||
state
|
||||
.analysis
|
||||
.source_root_id(from_proto::file_id(&state, &it.uri)?)
|
||||
.map_err(anyhow::Error::from)
|
||||
})
|
||||
.transpose()?;
|
||||
.source_root_id(try_default!(from_proto::file_id(&state, &it.uri)?))
|
||||
.map_err(anyhow::Error::from)?,
|
||||
),
|
||||
None => None,
|
||||
};
|
||||
Ok(Some(match params.config {
|
||||
InternalTestingFetchConfigOption::AssistEmitMustUse => {
|
||||
InternalTestingFetchConfigResponse::AssistEmitMustUse(
|
||||
|
@ -25,6 +25,14 @@ use vfs::{AbsPathBuf, VfsPath};
|
||||
|
||||
use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice};
|
||||
|
||||
#[track_caller]
|
||||
fn file_id(vfs: &vfs::Vfs, path: &VfsPath) -> vfs::FileId {
|
||||
match vfs.file_id(path) {
|
||||
Some((file_id, vfs::FileExcluded::No)) => file_id,
|
||||
None | Some((_, vfs::FileExcluded::Yes)) => panic!("can't find virtual file for {path}"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn integrated_highlighting_benchmark() {
|
||||
if std::env::var("RUN_SLOW_BENCHES").is_err() {
|
||||
@ -62,7 +70,7 @@ fn integrated_highlighting_benchmark() {
|
||||
let file_id = {
|
||||
let file = workspace_to_load.join(file);
|
||||
let path = VfsPath::from(AbsPathBuf::assert(file));
|
||||
vfs.file_id(&path).unwrap_or_else(|| panic!("can't find virtual file for {path}"))
|
||||
file_id(&vfs, &path)
|
||||
};
|
||||
|
||||
{
|
||||
@ -130,7 +138,7 @@ fn integrated_completion_benchmark() {
|
||||
let file_id = {
|
||||
let file = workspace_to_load.join(file);
|
||||
let path = VfsPath::from(AbsPathBuf::assert(file));
|
||||
vfs.file_id(&path).unwrap_or_else(|| panic!("can't find virtual file for {path}"))
|
||||
file_id(&vfs, &path)
|
||||
};
|
||||
|
||||
// kick off parsing and index population
|
||||
@ -324,7 +332,7 @@ fn integrated_diagnostics_benchmark() {
|
||||
let file_id = {
|
||||
let file = workspace_to_load.join(file);
|
||||
let path = VfsPath::from(AbsPathBuf::assert(file));
|
||||
vfs.file_id(&path).unwrap_or_else(|| panic!("can't find virtual file for {path}"))
|
||||
file_id(&vfs, &path)
|
||||
};
|
||||
|
||||
let diagnostics_config = DiagnosticsConfig {
|
||||
|
@ -173,3 +173,14 @@ fn completion_item_hash(item: &CompletionItem, is_ref_completion: bool) -> [u8;
|
||||
|
||||
hasher.finalize()
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
macro_rules! try_default_ {
|
||||
($it:expr $(,)?) => {
|
||||
match $it {
|
||||
Some(it) => it,
|
||||
None => return Ok(Default::default()),
|
||||
}
|
||||
};
|
||||
}
|
||||
pub(crate) use try_default_ as try_default;
|
||||
|
@ -9,7 +9,7 @@ use vfs::AbsPathBuf;
|
||||
use crate::{
|
||||
global_state::GlobalStateSnapshot,
|
||||
line_index::{LineIndex, PositionEncoding},
|
||||
lsp_ext,
|
||||
lsp_ext, try_default,
|
||||
};
|
||||
|
||||
pub(crate) fn abs_path(url: &lsp_types::Url) -> anyhow::Result<AbsPathBuf> {
|
||||
@ -61,37 +61,44 @@ pub(crate) fn text_range(
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn file_id(snap: &GlobalStateSnapshot, url: &lsp_types::Url) -> anyhow::Result<FileId> {
|
||||
/// Returns `None` if the file was excluded.
|
||||
pub(crate) fn file_id(
|
||||
snap: &GlobalStateSnapshot,
|
||||
url: &lsp_types::Url,
|
||||
) -> anyhow::Result<Option<FileId>> {
|
||||
snap.url_to_file_id(url)
|
||||
}
|
||||
|
||||
/// Returns `None` if the file was excluded.
|
||||
pub(crate) fn file_position(
|
||||
snap: &GlobalStateSnapshot,
|
||||
tdpp: lsp_types::TextDocumentPositionParams,
|
||||
) -> anyhow::Result<FilePosition> {
|
||||
let file_id = file_id(snap, &tdpp.text_document.uri)?;
|
||||
) -> anyhow::Result<Option<FilePosition>> {
|
||||
let file_id = try_default!(file_id(snap, &tdpp.text_document.uri)?);
|
||||
let line_index = snap.file_line_index(file_id)?;
|
||||
let offset = offset(&line_index, tdpp.position)?;
|
||||
Ok(FilePosition { file_id, offset })
|
||||
Ok(Some(FilePosition { file_id, offset }))
|
||||
}
|
||||
|
||||
/// Returns `None` if the file was excluded.
|
||||
pub(crate) fn file_range(
|
||||
snap: &GlobalStateSnapshot,
|
||||
text_document_identifier: &lsp_types::TextDocumentIdentifier,
|
||||
range: lsp_types::Range,
|
||||
) -> anyhow::Result<FileRange> {
|
||||
) -> anyhow::Result<Option<FileRange>> {
|
||||
file_range_uri(snap, &text_document_identifier.uri, range)
|
||||
}
|
||||
|
||||
/// Returns `None` if the file was excluded.
|
||||
pub(crate) fn file_range_uri(
|
||||
snap: &GlobalStateSnapshot,
|
||||
document: &lsp_types::Url,
|
||||
range: lsp_types::Range,
|
||||
) -> anyhow::Result<FileRange> {
|
||||
let file_id = file_id(snap, document)?;
|
||||
) -> anyhow::Result<Option<FileRange>> {
|
||||
let file_id = try_default!(file_id(snap, document)?);
|
||||
let line_index = snap.file_line_index(file_id)?;
|
||||
let range = text_range(&line_index, range)?;
|
||||
Ok(FileRange { file_id, range })
|
||||
Ok(Some(FileRange { file_id, range }))
|
||||
}
|
||||
|
||||
pub(crate) fn assist_kind(kind: lsp_types::CodeActionKind) -> Option<AssistKind> {
|
||||
@ -108,6 +115,7 @@ pub(crate) fn assist_kind(kind: lsp_types::CodeActionKind) -> Option<AssistKind>
|
||||
Some(assist_kind)
|
||||
}
|
||||
|
||||
/// Returns `None` if the file was excluded.
|
||||
pub(crate) fn annotation(
|
||||
snap: &GlobalStateSnapshot,
|
||||
range: lsp_types::Range,
|
||||
@ -121,7 +129,7 @@ pub(crate) fn annotation(
|
||||
return Ok(None);
|
||||
}
|
||||
let pos @ FilePosition { file_id, .. } =
|
||||
file_position(snap, params.text_document_position_params)?;
|
||||
try_default!(file_position(snap, params.text_document_position_params)?);
|
||||
let line_index = snap.file_line_index(file_id)?;
|
||||
|
||||
Ok(Annotation {
|
||||
@ -133,7 +141,7 @@ pub(crate) fn annotation(
|
||||
if snap.url_file_version(¶ms.text_document.uri) != Some(data.version) {
|
||||
return Ok(None);
|
||||
}
|
||||
let pos @ FilePosition { file_id, .. } = file_position(snap, params)?;
|
||||
let pos @ FilePosition { file_id, .. } = try_default!(file_position(snap, params)?);
|
||||
let line_index = snap.file_line_index(file_id)?;
|
||||
|
||||
Ok(Annotation {
|
||||
|
@ -27,7 +27,10 @@ use crate::{
|
||||
FetchWorkspaceResponse, GlobalState,
|
||||
},
|
||||
hack_recover_crate_name,
|
||||
handlers::dispatch::{NotificationDispatcher, RequestDispatcher},
|
||||
handlers::{
|
||||
dispatch::{NotificationDispatcher, RequestDispatcher},
|
||||
request::empty_diagnostic_report,
|
||||
},
|
||||
lsp::{
|
||||
from_proto, to_proto,
|
||||
utils::{notification_is, Progress},
|
||||
@ -548,6 +551,9 @@ impl GlobalState {
|
||||
self.mem_docs
|
||||
.iter()
|
||||
.map(|path| vfs.file_id(path).unwrap())
|
||||
.filter_map(|(file_id, excluded)| {
|
||||
(excluded == vfs::FileExcluded::No).then_some(file_id)
|
||||
})
|
||||
.filter(|&file_id| {
|
||||
let source_root = db.file_source_root(file_id);
|
||||
// Only publish diagnostics for files in the workspace, not from crates.io deps
|
||||
@ -632,6 +638,9 @@ impl GlobalState {
|
||||
.mem_docs
|
||||
.iter()
|
||||
.map(|path| self.vfs.read().0.file_id(path).unwrap())
|
||||
.filter_map(|(file_id, excluded)| {
|
||||
(excluded == vfs::FileExcluded::No).then_some(file_id)
|
||||
})
|
||||
.filter(|&file_id| {
|
||||
let source_root = db.file_source_root(file_id);
|
||||
!db.source_root(source_root).is_library
|
||||
@ -879,7 +888,10 @@ impl GlobalState {
|
||||
self.task_pool.handle.spawn_with_sender(ThreadIntent::Worker, move |sender| {
|
||||
let _p = tracing::info_span!("GlobalState::check_if_indexed").entered();
|
||||
tracing::debug!(?uri, "handling uri");
|
||||
let id = from_proto::file_id(&snap, &uri).expect("unable to get FileId");
|
||||
let Some(id) = from_proto::file_id(&snap, &uri).expect("unable to get FileId")
|
||||
else {
|
||||
return;
|
||||
};
|
||||
if let Ok(crates) = &snap.analysis.crates_for(id) {
|
||||
if crates.is_empty() {
|
||||
if snap.config.discover_workspace_config().is_some() {
|
||||
@ -987,13 +999,14 @@ impl GlobalState {
|
||||
);
|
||||
for diag in diagnostics {
|
||||
match url_to_file_id(&self.vfs.read().0, &diag.url) {
|
||||
Ok(file_id) => self.diagnostics.add_check_diagnostic(
|
||||
Ok(Some(file_id)) => self.diagnostics.add_check_diagnostic(
|
||||
id,
|
||||
&package_id,
|
||||
file_id,
|
||||
diag.diagnostic,
|
||||
diag.fix,
|
||||
),
|
||||
Ok(None) => {}
|
||||
Err(err) => {
|
||||
error!(
|
||||
"flycheck {id}: File with cargo diagnostic not found in VFS: {}",
|
||||
@ -1115,17 +1128,7 @@ impl GlobalState {
|
||||
.on_latency_sensitive::<NO_RETRY, lsp_request::SemanticTokensRangeRequest>(handlers::handle_semantic_tokens_range)
|
||||
// FIXME: Some of these NO_RETRY could be retries if the file they are interested didn't change.
|
||||
// All other request handlers
|
||||
.on_with_vfs_default::<lsp_request::DocumentDiagnosticRequest>(handlers::handle_document_diagnostics, || lsp_types::DocumentDiagnosticReportResult::Report(
|
||||
lsp_types::DocumentDiagnosticReport::Full(
|
||||
lsp_types::RelatedFullDocumentDiagnosticReport {
|
||||
related_documents: None,
|
||||
full_document_diagnostic_report: lsp_types::FullDocumentDiagnosticReport {
|
||||
result_id: Some("rust-analyzer".to_owned()),
|
||||
items: vec![],
|
||||
},
|
||||
},
|
||||
),
|
||||
), || lsp_server::ResponseError {
|
||||
.on_with_vfs_default::<lsp_request::DocumentDiagnosticRequest>(handlers::handle_document_diagnostics, empty_diagnostic_report, || lsp_server::ResponseError {
|
||||
code: lsp_server::ErrorCode::ServerCancelled as i32,
|
||||
message: "server cancelled the request".to_owned(),
|
||||
data: serde_json::to_value(lsp_types::DiagnosticServerCancellationData {
|
||||
|
@ -316,6 +316,7 @@ impl GlobalState {
|
||||
let workspace = project_model::ProjectWorkspace::load_inline(
|
||||
it.clone(),
|
||||
&cargo_config,
|
||||
&progress,
|
||||
);
|
||||
Ok(workspace)
|
||||
}
|
||||
@ -705,7 +706,9 @@ impl GlobalState {
|
||||
let load = |path: &AbsPath| {
|
||||
let vfs_path = vfs::VfsPath::from(path.to_path_buf());
|
||||
self.crate_graph_file_dependencies.insert(vfs_path.clone());
|
||||
vfs.file_id(&vfs_path)
|
||||
vfs.file_id(&vfs_path).and_then(|(file_id, excluded)| {
|
||||
(excluded == vfs::FileExcluded::No).then_some(file_id)
|
||||
})
|
||||
};
|
||||
|
||||
ws_to_crate_graph(&self.workspaces, self.config.extra_env(None), load)
|
||||
|
@ -21,12 +21,14 @@ use lsp_types::{
|
||||
notification::DidOpenTextDocument,
|
||||
request::{
|
||||
CodeActionRequest, Completion, Formatting, GotoTypeDefinition, HoverRequest,
|
||||
InlayHintRequest, InlayHintResolveRequest, WillRenameFiles, WorkspaceSymbolRequest,
|
||||
InlayHintRequest, InlayHintResolveRequest, RangeFormatting, WillRenameFiles,
|
||||
WorkspaceSymbolRequest,
|
||||
},
|
||||
CodeActionContext, CodeActionParams, CompletionParams, DidOpenTextDocumentParams,
|
||||
DocumentFormattingParams, FileRename, FormattingOptions, GotoDefinitionParams, HoverParams,
|
||||
InlayHint, InlayHintLabel, InlayHintParams, PartialResultParams, Position, Range,
|
||||
RenameFilesParams, TextDocumentItem, TextDocumentPositionParams, WorkDoneProgressParams,
|
||||
DocumentFormattingParams, DocumentRangeFormattingParams, FileRename, FormattingOptions,
|
||||
GotoDefinitionParams, HoverParams, InlayHint, InlayHintLabel, InlayHintParams,
|
||||
PartialResultParams, Position, Range, RenameFilesParams, TextDocumentItem,
|
||||
TextDocumentPositionParams, WorkDoneProgressParams,
|
||||
};
|
||||
use rust_analyzer::lsp::ext::{OnEnter, Runnables, RunnablesParams};
|
||||
use serde_json::json;
|
||||
@ -660,6 +662,70 @@ fn main() {}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_format_document_range() {
|
||||
if skip_slow_tests() {
|
||||
return;
|
||||
}
|
||||
|
||||
let server = Project::with_fixture(
|
||||
r#"
|
||||
//- /Cargo.toml
|
||||
[package]
|
||||
name = "foo"
|
||||
version = "0.0.0"
|
||||
|
||||
//- /src/lib.rs
|
||||
fn main() {
|
||||
let unit_offsets_cache = collect(dwarf.units ()) ?;
|
||||
}
|
||||
"#,
|
||||
)
|
||||
.with_config(serde_json::json!({
|
||||
"rustfmt": {
|
||||
"overrideCommand": [ "rustfmt", "+nightly", ],
|
||||
"rangeFormatting": { "enable": true }
|
||||
},
|
||||
}))
|
||||
.server()
|
||||
.wait_until_workspace_is_loaded();
|
||||
|
||||
server.request::<RangeFormatting>(
|
||||
DocumentRangeFormattingParams {
|
||||
range: Range {
|
||||
end: Position { line: 1, character: 0 },
|
||||
start: Position { line: 1, character: 0 },
|
||||
},
|
||||
text_document: server.doc_id("src/lib.rs"),
|
||||
options: FormattingOptions {
|
||||
tab_size: 4,
|
||||
insert_spaces: false,
|
||||
insert_final_newline: None,
|
||||
trim_final_newlines: None,
|
||||
trim_trailing_whitespace: None,
|
||||
properties: HashMap::new(),
|
||||
},
|
||||
work_done_progress_params: WorkDoneProgressParams::default(),
|
||||
},
|
||||
json!([
|
||||
{
|
||||
"newText": "",
|
||||
"range": {
|
||||
"start": { "character": 48, "line": 1 },
|
||||
"end": { "character": 50, "line": 1 },
|
||||
},
|
||||
},
|
||||
{
|
||||
"newText": "",
|
||||
"range": {
|
||||
"start": { "character": 53, "line": 1 },
|
||||
"end": { "character": 55, "line": 1 },
|
||||
},
|
||||
}
|
||||
]),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_missing_module_code_action() {
|
||||
if skip_slow_tests() {
|
||||
@ -1086,7 +1152,11 @@ fn resolve_proc_macro() {
|
||||
&AbsPathBuf::assert_utf8(std::env::current_dir().unwrap()),
|
||||
&Default::default(),
|
||||
);
|
||||
sysroot.load_workspace(&project_model::SysrootSourceWorkspaceConfig::default_cargo());
|
||||
let loaded_sysroot =
|
||||
sysroot.load_workspace(&project_model::RustSourceWorkspaceConfig::default_cargo());
|
||||
if let Some(loaded_sysroot) = loaded_sysroot {
|
||||
sysroot.set_workspace(loaded_sysroot);
|
||||
}
|
||||
|
||||
let proc_macro_server_path = sysroot.discover_proc_macro_srv().unwrap();
|
||||
|
||||
@ -1372,6 +1442,40 @@ pub fn foo() {}
|
||||
name = "bar"
|
||||
version = "0.0.0"
|
||||
|
||||
[dependencies]
|
||||
foo = { path = "../foo" }
|
||||
|
||||
//- /bar/src/lib.rs
|
||||
"#,
|
||||
)
|
||||
.root("foo")
|
||||
.root("bar")
|
||||
.root("baz")
|
||||
.with_config(json!({
|
||||
"files": {
|
||||
"exclude": ["foo"]
|
||||
}
|
||||
}))
|
||||
.server()
|
||||
.wait_until_workspace_is_loaded();
|
||||
|
||||
server.request::<WorkspaceSymbolRequest>(Default::default(), json!([]));
|
||||
|
||||
let server = Project::with_fixture(
|
||||
r#"
|
||||
//- /foo/Cargo.toml
|
||||
[package]
|
||||
name = "foo"
|
||||
version = "0.0.0"
|
||||
|
||||
//- /foo/src/lib.rs
|
||||
pub fn foo() {}
|
||||
|
||||
//- /bar/Cargo.toml
|
||||
[package]
|
||||
name = "bar"
|
||||
version = "0.0.0"
|
||||
|
||||
//- /bar/src/lib.rs
|
||||
pub fn bar() {}
|
||||
|
||||
@ -1388,7 +1492,7 @@ version = "0.0.0"
|
||||
.root("baz")
|
||||
.with_config(json!({
|
||||
"files": {
|
||||
"excludeDirs": ["foo", "bar"]
|
||||
"exclude": ["foo", "bar"]
|
||||
}
|
||||
}))
|
||||
.server()
|
||||
|
@ -1,48 +1,44 @@
|
||||
//! A micro-crate to enhance panic messages with context info.
|
||||
//!
|
||||
//! FIXME: upstream to <https://github.com/kriomant/panic-context> ?
|
||||
|
||||
use std::{cell::RefCell, panic, sync::Once};
|
||||
|
||||
pub fn enter(context: String) -> PanicContext {
|
||||
static ONCE: Once = Once::new();
|
||||
ONCE.call_once(PanicContext::init);
|
||||
|
||||
with_ctx(|ctx| ctx.push(context));
|
||||
PanicContext { _priv: () }
|
||||
}
|
||||
|
||||
/// Dummy for leveraging RAII cleanup to pop frames.
|
||||
#[must_use]
|
||||
pub struct PanicContext {
|
||||
// prevent arbitrary construction
|
||||
_priv: (),
|
||||
}
|
||||
|
||||
impl PanicContext {
|
||||
#[allow(clippy::print_stderr)]
|
||||
fn init() {
|
||||
let default_hook = panic::take_hook();
|
||||
#[allow(deprecated)]
|
||||
let hook = move |panic_info: &panic::PanicInfo<'_>| {
|
||||
with_ctx(|ctx| {
|
||||
if !ctx.is_empty() {
|
||||
eprintln!("Panic context:");
|
||||
for frame in ctx.iter() {
|
||||
eprintln!("> {frame}\n");
|
||||
}
|
||||
}
|
||||
default_hook(panic_info);
|
||||
});
|
||||
};
|
||||
panic::set_hook(Box::new(hook));
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for PanicContext {
|
||||
fn drop(&mut self) {
|
||||
with_ctx(|ctx| assert!(ctx.pop().is_some()));
|
||||
}
|
||||
}
|
||||
|
||||
pub fn enter(frame: String) -> PanicContext {
|
||||
#[allow(clippy::print_stderr)]
|
||||
fn set_hook() {
|
||||
let default_hook = panic::take_hook();
|
||||
panic::set_hook(Box::new(move |panic_info| {
|
||||
with_ctx(|ctx| {
|
||||
if !ctx.is_empty() {
|
||||
eprintln!("Panic context:");
|
||||
for frame in ctx.iter() {
|
||||
eprintln!("> {frame}\n");
|
||||
}
|
||||
}
|
||||
});
|
||||
default_hook(panic_info);
|
||||
}));
|
||||
}
|
||||
|
||||
static SET_HOOK: Once = Once::new();
|
||||
SET_HOOK.call_once(set_hook);
|
||||
|
||||
with_ctx(|ctx| ctx.push(frame));
|
||||
PanicContext { _priv: () }
|
||||
}
|
||||
|
||||
fn with_ctx(f: impl FnOnce(&mut Vec<String>)) {
|
||||
thread_local! {
|
||||
static CTX: RefCell<Vec<String>> = const { RefCell::new(Vec::new()) };
|
||||
|
@ -710,52 +710,6 @@ impl ast::Fn {
|
||||
}
|
||||
}
|
||||
|
||||
impl Removable for ast::MatchArm {
|
||||
fn remove(&self) {
|
||||
if let Some(sibling) = self.syntax().prev_sibling_or_token() {
|
||||
if sibling.kind() == SyntaxKind::WHITESPACE {
|
||||
ted::remove(sibling);
|
||||
}
|
||||
}
|
||||
if let Some(sibling) = self.syntax().next_sibling_or_token() {
|
||||
if sibling.kind() == T![,] {
|
||||
ted::remove(sibling);
|
||||
}
|
||||
}
|
||||
ted::remove(self.syntax());
|
||||
}
|
||||
}
|
||||
|
||||
impl ast::MatchArmList {
|
||||
pub fn add_arm(&self, arm: ast::MatchArm) {
|
||||
normalize_ws_between_braces(self.syntax());
|
||||
let mut elements = Vec::new();
|
||||
let position = match self.arms().last() {
|
||||
Some(last_arm) => {
|
||||
if needs_comma(&last_arm) {
|
||||
ted::append_child(last_arm.syntax(), make::token(SyntaxKind::COMMA));
|
||||
}
|
||||
Position::after(last_arm.syntax().clone())
|
||||
}
|
||||
None => match self.l_curly_token() {
|
||||
Some(it) => Position::after(it),
|
||||
None => Position::last_child_of(self.syntax()),
|
||||
},
|
||||
};
|
||||
let indent = IndentLevel::from_node(self.syntax()) + 1;
|
||||
elements.push(make::tokens::whitespace(&format!("\n{indent}")).into());
|
||||
elements.push(arm.syntax().clone().into());
|
||||
if needs_comma(&arm) {
|
||||
ted::append_child(arm.syntax(), make::token(SyntaxKind::COMMA));
|
||||
}
|
||||
ted::insert_all(position, elements);
|
||||
|
||||
fn needs_comma(arm: &ast::MatchArm) -> bool {
|
||||
arm.expr().is_some_and(|e| !e.is_block_like()) && arm.comma_token().is_none()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ast::LetStmt {
|
||||
pub fn set_ty(&self, ty: Option<ast::Type>) {
|
||||
match ty {
|
||||
|
@ -837,7 +837,8 @@ pub fn match_guard(condition: ast::Expr) -> ast::MatchGuard {
|
||||
|
||||
pub fn match_arm_list(arms: impl IntoIterator<Item = ast::MatchArm>) -> ast::MatchArmList {
|
||||
let arms_str = arms.into_iter().fold(String::new(), |mut acc, arm| {
|
||||
let needs_comma = arm.expr().is_none_or(|it| !it.is_block_like());
|
||||
let needs_comma =
|
||||
arm.comma_token().is_none() && arm.expr().is_none_or(|it| !it.is_block_like());
|
||||
let comma = if needs_comma { "," } else { "" };
|
||||
let arm = arm.syntax();
|
||||
format_to_acc!(acc, " {arm}{comma}\n")
|
||||
|
@ -17,7 +17,7 @@ use hir_expand::{
|
||||
tt::{Leaf, TokenTree, TopSubtree, TopSubtreeBuilder, TtElement, TtIter},
|
||||
FileRange,
|
||||
};
|
||||
use intern::Symbol;
|
||||
use intern::{sym, Symbol};
|
||||
use rustc_hash::FxHashMap;
|
||||
use span::{Edition, EditionedFileId, FileId, Span};
|
||||
use stdx::itertools::Itertools;
|
||||
@ -511,6 +511,21 @@ pub fn issue_18898(_attr: TokenStream, input: TokenStream) -> TokenStream {
|
||||
disabled: false,
|
||||
},
|
||||
),
|
||||
(
|
||||
r#"
|
||||
#[proc_macro_attribute]
|
||||
pub fn disallow_cfg(_attr: TokenStream, input: TokenStream) -> TokenStream {
|
||||
input
|
||||
}
|
||||
"#
|
||||
.into(),
|
||||
ProcMacro {
|
||||
name: Symbol::intern("disallow_cfg"),
|
||||
kind: ProcMacroKind::Attr,
|
||||
expander: sync::Arc::new(DisallowCfgProcMacroExpander),
|
||||
disabled: false,
|
||||
},
|
||||
),
|
||||
])
|
||||
}
|
||||
|
||||
@ -865,3 +880,30 @@ impl ProcMacroExpander for Issue18898ProcMacroExpander {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Reads ident type within string quotes, for issue #17479.
|
||||
#[derive(Debug)]
|
||||
struct DisallowCfgProcMacroExpander;
|
||||
impl ProcMacroExpander for DisallowCfgProcMacroExpander {
|
||||
fn expand(
|
||||
&self,
|
||||
subtree: &TopSubtree,
|
||||
_: Option<&TopSubtree>,
|
||||
_: &Env,
|
||||
_: Span,
|
||||
_: Span,
|
||||
_: Span,
|
||||
_: Option<String>,
|
||||
) -> Result<TopSubtree, ProcMacroExpansionError> {
|
||||
for tt in subtree.token_trees().flat_tokens() {
|
||||
if let tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) = tt {
|
||||
if ident.sym == sym::cfg || ident.sym == sym::cfg_attr {
|
||||
return Err(ProcMacroExpansionError::Panic(
|
||||
"cfg or cfg_attr found in DisallowCfgProcMacroExpander".to_owned(),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(subtree.clone())
|
||||
}
|
||||
}
|
||||
|
@ -396,12 +396,19 @@ pub fn skip_slow_tests() -> bool {
|
||||
if should_skip {
|
||||
eprintln!("ignoring slow test");
|
||||
} else {
|
||||
let path = project_root().join("./target/.slow_tests_cookie");
|
||||
let path = target_dir().join(".slow_tests_cookie");
|
||||
fs::write(path, ".").unwrap();
|
||||
}
|
||||
should_skip
|
||||
}
|
||||
|
||||
pub fn target_dir() -> Utf8PathBuf {
|
||||
match std::env::var("CARGO_TARGET_DIR") {
|
||||
Ok(target) => Utf8PathBuf::from(target),
|
||||
Err(_) => project_root().join("target"),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the path to the root directory of `rust-analyzer` project.
|
||||
pub fn project_root() -> Utf8PathBuf {
|
||||
let dir = env!("CARGO_MANIFEST_DIR");
|
||||
|
@ -647,18 +647,21 @@ pub mod ops {
|
||||
|
||||
#[lang = "fn"]
|
||||
#[fundamental]
|
||||
#[rustc_paren_sugar]
|
||||
pub trait Fn<Args: Tuple>: FnMut<Args> {
|
||||
extern "rust-call" fn call(&self, args: Args) -> Self::Output;
|
||||
}
|
||||
|
||||
#[lang = "fn_mut"]
|
||||
#[fundamental]
|
||||
#[rustc_paren_sugar]
|
||||
pub trait FnMut<Args: Tuple>: FnOnce<Args> {
|
||||
extern "rust-call" fn call_mut(&mut self, args: Args) -> Self::Output;
|
||||
}
|
||||
|
||||
#[lang = "fn_once"]
|
||||
#[fundamental]
|
||||
#[rustc_paren_sugar]
|
||||
pub trait FnOnce<Args: Tuple> {
|
||||
#[lang = "fn_once_output"]
|
||||
type Output;
|
||||
@ -736,12 +739,14 @@ pub mod ops {
|
||||
|
||||
#[lang = "async_fn"]
|
||||
#[fundamental]
|
||||
#[rustc_paren_sugar]
|
||||
pub trait AsyncFn<Args: Tuple>: AsyncFnMut<Args> {
|
||||
extern "rust-call" fn async_call(&self, args: Args) -> Self::CallRefFuture<'_>;
|
||||
}
|
||||
|
||||
#[lang = "async_fn_mut"]
|
||||
#[fundamental]
|
||||
#[rustc_paren_sugar]
|
||||
pub trait AsyncFnMut<Args: Tuple>: AsyncFnOnce<Args> {
|
||||
#[lang = "call_ref_future"]
|
||||
type CallRefFuture<'a>: Future<Output = Self::Output>
|
||||
@ -752,6 +757,7 @@ pub mod ops {
|
||||
|
||||
#[lang = "async_fn_once"]
|
||||
#[fundamental]
|
||||
#[rustc_paren_sugar]
|
||||
pub trait AsyncFnOnce<Args: Tuple> {
|
||||
#[lang = "async_fn_once_output"]
|
||||
type Output;
|
||||
|
@ -280,8 +280,9 @@ impl NotifyActor {
|
||||
return false;
|
||||
}
|
||||
|
||||
root == path
|
||||
|| dirs.exclude.iter().chain(&dirs.include).all(|it| it != path)
|
||||
// We want to filter out subdirectories that are roots themselves, because they will be visited separately.
|
||||
dirs.exclude.iter().all(|it| it != path)
|
||||
&& (root == path || dirs.include.iter().all(|it| it != path))
|
||||
});
|
||||
|
||||
let files = walkdir.filter_map(|it| it.ok()).filter_map(|entry| {
|
||||
|
@ -100,6 +100,9 @@ pub enum FileState {
|
||||
Exists(u64),
|
||||
/// The file is deleted.
|
||||
Deleted,
|
||||
/// The file was specifically excluded by the user. We still include excluded files
|
||||
/// when they're opened (without their contents).
|
||||
Excluded,
|
||||
}
|
||||
|
||||
/// Changed file in the [`Vfs`].
|
||||
@ -164,10 +167,22 @@ pub enum ChangeKind {
|
||||
Delete,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum FileExcluded {
|
||||
Yes,
|
||||
No,
|
||||
}
|
||||
|
||||
impl Vfs {
|
||||
/// Id of the given path if it exists in the `Vfs` and is not deleted.
|
||||
pub fn file_id(&self, path: &VfsPath) -> Option<FileId> {
|
||||
self.interner.get(path).filter(|&it| matches!(self.get(it), FileState::Exists(_)))
|
||||
pub fn file_id(&self, path: &VfsPath) -> Option<(FileId, FileExcluded)> {
|
||||
let file_id = self.interner.get(path)?;
|
||||
let file_state = self.get(file_id);
|
||||
match file_state {
|
||||
FileState::Exists(_) => Some((file_id, FileExcluded::No)),
|
||||
FileState::Deleted => None,
|
||||
FileState::Excluded => Some((file_id, FileExcluded::Yes)),
|
||||
}
|
||||
}
|
||||
|
||||
/// File path corresponding to the given `file_id`.
|
||||
@ -216,6 +231,7 @@ impl Vfs {
|
||||
}
|
||||
Change::Modify(v, new_hash)
|
||||
}
|
||||
(FileState::Excluded, _) => return false,
|
||||
};
|
||||
|
||||
let mut set_data = |change_kind| {
|
||||
@ -297,6 +313,13 @@ impl Vfs {
|
||||
fn get(&self, file_id: FileId) -> FileState {
|
||||
self.data[file_id.0 as usize]
|
||||
}
|
||||
|
||||
/// We cannot ignore excluded files, because this will lead to errors when the client
|
||||
/// requests semantic information for them, so we instead mark them specially.
|
||||
pub fn insert_excluded_file(&mut self, path: VfsPath) {
|
||||
let file_id = self.alloc_file_id(path);
|
||||
self.data[file_id.0 as usize] = FileState::Excluded;
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for Vfs {
|
||||
|
@ -26,4 +26,4 @@ Start with the mdbook [User Guide](https://rust-lang.github.io/mdBook/guide/inst
|
||||
Four sections are generated dynamically: assists, configuration, diagnostics and features. Their content is found in the `generated.md` files
|
||||
of the respective book section, for example `src/configuration_generated.md`, and are included in the book via mdbook's
|
||||
[include](https://rust-lang.github.io/mdBook/format/mdbook.html#including-files) functionality. Generated files can be rebuilt by running the various
|
||||
test cases that generate them, or by simply running all of the `rust-analyzer` tests with `cargo test`.
|
||||
test cases that generate them, or by simply running all of the `rust-analyzer` tests with `cargo test` and `cargo xtask codegen`.
|
||||
|
@ -9,10 +9,10 @@ title = "rust-analyzer"
|
||||
edition = "2021"
|
||||
|
||||
[output.html]
|
||||
edit-url-template = "https://github.com/rust-lang/rust-analyzer/edit/master/manual/{path}"
|
||||
git-repository-url = "https://github.com/rust-lang/rust-analyzer/tree/master/manual"
|
||||
edit-url-template = "https://github.com/rust-lang/rust-analyzer/edit/master/docs/book/{path}"
|
||||
git-repository-url = "https://github.com/rust-lang/rust-analyzer/tree/master/docs/book"
|
||||
mathjax-support = true
|
||||
site-url = "/manual/"
|
||||
site-url = "/book/"
|
||||
|
||||
[output.html.playground]
|
||||
editable = true
|
||||
@ -34,8 +34,3 @@ use-boolean-and = true
|
||||
[output.html.fold]
|
||||
enable = true
|
||||
level = 3
|
||||
|
||||
[preprocessor.toc]
|
||||
command = "mdbook-toc"
|
||||
renderer = ["html"]
|
||||
max-level = 3
|
||||
|
@ -2,6 +2,9 @@
|
||||
|
||||
- [Introduction](README.md)
|
||||
- [Installation](installation.md)
|
||||
- [VS Code](vs_code.md)
|
||||
- [rust-analyzer Binary](rust_analyzer_binary.md)
|
||||
- [Other Editors](other_editors.md)
|
||||
- [Troubleshooting](troubleshooting.md)
|
||||
- [Configuration](configuration.md)
|
||||
- [Non-Cargo Based Projects](non_cargo_based_projects.md)
|
||||
|
3820
src/tools/rust-analyzer/docs/book/src/assists_generated.md
Normal file
3820
src/tools/rust-analyzer/docs/book/src/assists_generated.md
Normal file
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user