Merge commit 'af40101841c45aa75b56f4e9ca745369da8fb4ba' into sync-from-ra

This commit is contained in:
Laurențiu Nicola 2024-01-08 11:29:25 +02:00
parent 79fa976864
commit 3afeb24198
115 changed files with 3106 additions and 3623 deletions

1
Cargo.lock generated
View File

@ -569,6 +569,7 @@ dependencies = [
"expect-test", "expect-test",
"hir-def", "hir-def",
"hir-expand", "hir-expand",
"indexmap",
"intern", "intern",
"itertools", "itertools",
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",

View File

@ -1,6 +1,6 @@
[workspace] [workspace]
members = ["xtask/", "lib/*", "crates/*"] members = ["xtask/", "lib/*", "crates/*"]
exclude = ["crates/proc-macro-srv/proc-macro-test/"] exclude = ["crates/proc-macro-srv/proc-macro-test/imp"]
resolver = "2" resolver = "2"
[workspace.package] [workspace.package]
@ -138,4 +138,4 @@ dashmap = { version = "=5.5.3", features = ["raw-api"] }
collapsible_if = "allow" collapsible_if = "allow"
needless_pass_by_value = "allow" needless_pass_by_value = "allow"
nonminimal_bool = "allow" nonminimal_bool = "allow"
redundant_pattern_matching = "allow" redundant_pattern_matching = "allow"

View File

@ -17,7 +17,7 @@ use smallvec::SmallVec;
use syntax::{ use syntax::{
ast::{ ast::{
self, ArrayExprKind, AstChildren, BlockExpr, HasArgList, HasAttrs, HasLoopBody, HasName, self, ArrayExprKind, AstChildren, BlockExpr, HasArgList, HasAttrs, HasLoopBody, HasName,
SlicePatComponents, RangeItem, SlicePatComponents,
}, },
AstNode, AstPtr, SyntaxNodePtr, AstNode, AstPtr, SyntaxNodePtr,
}; };
@ -622,7 +622,8 @@ impl ExprCollector<'_> {
ast::Expr::IndexExpr(e) => { ast::Expr::IndexExpr(e) => {
let base = self.collect_expr_opt(e.base()); let base = self.collect_expr_opt(e.base());
let index = self.collect_expr_opt(e.index()); let index = self.collect_expr_opt(e.index());
self.alloc_expr(Expr::Index { base, index }, syntax_ptr) let is_assignee_expr = self.is_lowering_assignee_expr;
self.alloc_expr(Expr::Index { base, index, is_assignee_expr }, syntax_ptr)
} }
ast::Expr::RangeExpr(e) => { ast::Expr::RangeExpr(e) => {
let lhs = e.start().map(|lhs| self.collect_expr(lhs)); let lhs = e.start().map(|lhs| self.collect_expr(lhs));
@ -1609,7 +1610,7 @@ impl ExprCollector<'_> {
|name| self.alloc_expr_desugared(Expr::Path(Path::from(name))), |name| self.alloc_expr_desugared(Expr::Path(Path::from(name))),
|name, span| { |name, span| {
if let Some(span) = span { if let Some(span) = span {
mappings.push((span, name.clone())) mappings.push((span, name))
} }
}, },
), ),

View File

@ -376,7 +376,7 @@ impl Printer<'_> {
w!(self, ") "); w!(self, ") ");
} }
} }
Expr::Index { base, index } => { Expr::Index { base, index, is_assignee_expr: _ } => {
self.print_expr(*base); self.print_expr(*base);
w!(self, "["); w!(self, "[");
self.print_expr(*index); self.print_expr(*index);

View File

@ -128,7 +128,7 @@ fn parse_repr_tt(tt: &Subtree) -> Option<ReprOptions> {
} else { } else {
0 0
}; };
let pack = Align::from_bytes(pack).unwrap(); let pack = Align::from_bytes(pack).unwrap_or(Align::ONE);
min_pack = min_pack =
Some(if let Some(min_pack) = min_pack { min_pack.min(pack) } else { pack }); Some(if let Some(min_pack) = min_pack { min_pack.min(pack) } else { pack });
ReprFlags::empty() ReprFlags::empty()

View File

@ -265,6 +265,7 @@ pub enum Expr {
Index { Index {
base: ExprId, base: ExprId,
index: ExprId, index: ExprId,
is_assignee_expr: bool,
}, },
Closure { Closure {
args: Box<[PatId]>, args: Box<[PatId]>,
@ -432,7 +433,7 @@ impl Expr {
f(rhs); f(rhs);
} }
} }
Expr::Index { base, index } => { Expr::Index { base, index, .. } => {
f(*base); f(*base);
f(*index); f(*index);
} }

View File

@ -3,13 +3,13 @@
use std::{fmt, hash::BuildHasherDefault}; use std::{fmt, hash::BuildHasherDefault};
use base_db::CrateId; use base_db::CrateId;
use fst::{self, raw::IndexedValue, Streamer}; use fst::{self, raw::IndexedValue, Automaton, Streamer};
use hir_expand::name::Name; use hir_expand::name::Name;
use indexmap::IndexMap; use indexmap::IndexMap;
use itertools::Itertools; use itertools::Itertools;
use rustc_hash::{FxHashSet, FxHasher}; use rustc_hash::{FxHashSet, FxHasher};
use smallvec::SmallVec; use smallvec::SmallVec;
use stdx::format_to; use stdx::{format_to, TupleExt};
use triomphe::Arc; use triomphe::Arc;
use crate::{ use crate::{
@ -20,12 +20,10 @@ use crate::{
AssocItemId, ModuleDefId, ModuleId, TraitId, AssocItemId, ModuleDefId, ModuleId, TraitId,
}; };
type FxIndexMap<K, V> = IndexMap<K, V, BuildHasherDefault<FxHasher>>;
/// Item import details stored in the `ImportMap`. /// Item import details stored in the `ImportMap`.
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
pub struct ImportInfo { pub struct ImportInfo {
/// A name that can be used to import the item, relative to the crate's root. /// A name that can be used to import the item, relative to the container.
pub name: Name, pub name: Name,
/// The module containing this item. /// The module containing this item.
pub container: ModuleId, pub container: ModuleId,
@ -35,22 +33,22 @@ pub struct ImportInfo {
pub is_unstable: bool, pub is_unstable: bool,
} }
type ImportMapIndex = FxIndexMap<ItemInNs, (SmallVec<[ImportInfo; 1]>, IsTraitAssocItem)>;
/// A map from publicly exported items to its name. /// A map from publicly exported items to its name.
/// ///
/// Reexports of items are taken into account, ie. if something is exported under multiple /// Reexports of items are taken into account.
/// names, the one with the shortest import path will be used.
#[derive(Default)] #[derive(Default)]
pub struct ImportMap { pub struct ImportMap {
map: ImportMapIndex, /// Maps from `ItemInNs` to information of imports that bring the item into scope.
/// List of keys stored in `map`, sorted lexicographically by their `ModPath`. Indexed by the item_to_info_map: ImportMapIndex,
/// values returned by running `fst`. /// List of keys stored in [`Self::item_to_info_map`], sorted lexicographically by their
/// [`Name`]. Indexed by the values returned by running `fst`.
/// ///
/// Since a name can refer to multiple items due to namespacing, we store all items with the /// Since a name can refer to multiple items due to namespacing and import aliases, we store all
/// same name right after each other. This allows us to find all items after the FST gives us /// items with the same name right after each other. This allows us to find all items after the
/// the index of the first one. /// fst gives us the index of the first one.
importables: Vec<ItemInNs>, ///
/// The [`u32`] is the index into the smallvec in the value of [`Self::item_to_info_map`].
importables: Vec<(ItemInNs, u32)>,
fst: fst::Map<Vec<u8>>, fst: fst::Map<Vec<u8>>,
} }
@ -60,10 +58,13 @@ enum IsTraitAssocItem {
No, No,
} }
type FxIndexMap<K, V> = IndexMap<K, V, BuildHasherDefault<FxHasher>>;
type ImportMapIndex = FxIndexMap<ItemInNs, (SmallVec<[ImportInfo; 1]>, IsTraitAssocItem)>;
impl ImportMap { impl ImportMap {
pub fn dump(&self, db: &dyn DefDatabase) -> String { pub fn dump(&self, db: &dyn DefDatabase) -> String {
let mut out = String::new(); let mut out = String::new();
for (k, v) in self.map.iter() { for (k, v) in self.item_to_info_map.iter() {
format_to!(out, "{:?} ({:?}) -> ", k, v.1); format_to!(out, "{:?} ({:?}) -> ", k, v.1);
for v in &v.0 { for v in &v.0 {
format_to!(out, "{}:{:?}, ", v.name.display(db.upcast()), v.container); format_to!(out, "{}:{:?}, ", v.name.display(db.upcast()), v.container);
@ -76,177 +77,191 @@ impl ImportMap {
pub(crate) fn import_map_query(db: &dyn DefDatabase, krate: CrateId) -> Arc<Self> { pub(crate) fn import_map_query(db: &dyn DefDatabase, krate: CrateId) -> Arc<Self> {
let _p = profile::span("import_map_query"); let _p = profile::span("import_map_query");
let map = collect_import_map(db, krate); let map = Self::collect_import_map(db, krate);
let mut importables: Vec<_> = map let mut importables: Vec<_> = map
.iter() .iter()
// We've only collected items, whose name cannot be tuple field. // We've only collected items, whose name cannot be tuple field so unwrapping is fine.
.flat_map(|(&item, (info, is_assoc))| { .flat_map(|(&item, (info, _))| {
info.iter().map(move |info| { info.iter()
(item, *is_assoc, info.name.as_str().unwrap().to_ascii_lowercase()) .enumerate()
}) .map(move |(idx, info)| (item, info.name.to_smol_str(), idx as u32))
}) })
.collect(); .collect();
importables.sort_by(|(_, l_is_assoc, lhs_name), (_, r_is_assoc, rhs_name)| { importables.sort_by(|(_, l_info, _), (_, r_info, _)| {
lhs_name.cmp(rhs_name).then_with(|| l_is_assoc.cmp(r_is_assoc)) let lhs_chars = l_info.chars().map(|c| c.to_ascii_lowercase());
let rhs_chars = r_info.chars().map(|c| c.to_ascii_lowercase());
lhs_chars.cmp(rhs_chars)
}); });
importables.dedup(); importables.dedup();
// Build the FST, taking care not to insert duplicate values. // Build the FST, taking care not to insert duplicate values.
let mut builder = fst::MapBuilder::memory(); let mut builder = fst::MapBuilder::memory();
let iter = importables let mut iter = importables
.iter() .iter()
.enumerate() .enumerate()
.dedup_by(|(_, (_, _, lhs)), (_, (_, _, rhs))| lhs == rhs); .dedup_by(|&(_, (_, lhs, _)), &(_, (_, rhs, _))| lhs.eq_ignore_ascii_case(rhs));
for (start_idx, (_, _, name)) in iter {
let _ = builder.insert(name, start_idx as u64); let mut insert = |name: &str, start, end| {
builder.insert(name.to_ascii_lowercase(), ((start as u64) << 32) | end as u64).unwrap()
};
if let Some((mut last, (_, name, _))) = iter.next() {
debug_assert_eq!(last, 0);
let mut last_name = name;
for (next, (_, next_name, _)) in iter {
insert(last_name, last, next);
last = next;
last_name = next_name;
}
insert(last_name, last, importables.len());
} }
Arc::new(ImportMap { let importables = importables.into_iter().map(|(item, _, idx)| (item, idx)).collect();
map, Arc::new(ImportMap { item_to_info_map: map, fst: builder.into_map(), importables })
fst: builder.into_map(),
importables: importables.into_iter().map(|(item, _, _)| item).collect(),
})
} }
pub fn import_info_for(&self, item: ItemInNs) -> Option<&[ImportInfo]> { pub fn import_info_for(&self, item: ItemInNs) -> Option<&[ImportInfo]> {
self.map.get(&item).map(|(info, _)| &**info) self.item_to_info_map.get(&item).map(|(info, _)| &**info)
} }
}
fn collect_import_map(db: &dyn DefDatabase, krate: CrateId) -> ImportMapIndex { fn collect_import_map(db: &dyn DefDatabase, krate: CrateId) -> ImportMapIndex {
let _p = profile::span("collect_import_map"); let _p = profile::span("collect_import_map");
let def_map = db.crate_def_map(krate); let def_map = db.crate_def_map(krate);
let mut map = FxIndexMap::default(); let mut map = FxIndexMap::default();
// We look only into modules that are public(ly reexported), starting with the crate root. // We look only into modules that are public(ly reexported), starting with the crate root.
let root = def_map.module_id(DefMap::ROOT); let root = def_map.module_id(DefMap::ROOT);
let mut worklist = vec![root]; let mut worklist = vec![root];
let mut visited = FxHashSet::default(); let mut visited = FxHashSet::default();
while let Some(module) = worklist.pop() { while let Some(module) = worklist.pop() {
if !visited.insert(module) { if !visited.insert(module) {
continue;
}
let ext_def_map;
let mod_data = if module.krate == krate {
&def_map[module.local_id]
} else {
// The crate might reexport a module defined in another crate.
ext_def_map = module.def_map(db);
&ext_def_map[module.local_id]
};
let visible_items = mod_data.scope.entries().filter_map(|(name, per_ns)| {
let per_ns = per_ns.filter_visibility(|vis| vis == Visibility::Public);
if per_ns.is_none() { None } else { Some((name, per_ns)) }
});
for (name, per_ns) in visible_items {
for (item, import) in per_ns.iter_items() {
let attr_id = if let Some(import) = import {
match import {
ImportOrExternCrate::ExternCrate(id) => Some(id.into()),
ImportOrExternCrate::Import(id) => Some(id.import.into()),
}
} else {
match item {
ItemInNs::Types(id) | ItemInNs::Values(id) => id.try_into().ok(),
ItemInNs::Macros(id) => Some(id.into()),
}
};
let (is_doc_hidden, is_unstable) = attr_id.map_or((false, false), |attr_id| {
let attrs = db.attrs(attr_id);
(attrs.has_doc_hidden(), attrs.is_unstable())
});
let import_info = ImportInfo {
name: name.clone(),
container: module,
is_doc_hidden,
is_unstable,
};
if let Some(ModuleDefId::TraitId(tr)) = item.as_module_def_id() {
collect_trait_assoc_items(
db,
&mut map,
tr,
matches!(item, ItemInNs::Types(_)),
&import_info,
);
}
let (infos, _) =
map.entry(item).or_insert_with(|| (SmallVec::new(), IsTraitAssocItem::No));
infos.reserve_exact(1);
infos.push(import_info);
// If we've just added a module, descend into it.
if let Some(ModuleDefId::ModuleId(mod_id)) = item.as_module_def_id() {
worklist.push(mod_id);
}
}
}
}
map.shrink_to_fit();
map
}
fn collect_trait_assoc_items(
db: &dyn DefDatabase,
map: &mut ImportMapIndex,
tr: TraitId,
is_type_in_ns: bool,
trait_import_info: &ImportInfo,
) {
let _p = profile::span("collect_trait_assoc_items");
for &(ref assoc_item_name, item) in &db.trait_data(tr).items {
let module_def_id = match item {
AssocItemId::FunctionId(f) => ModuleDefId::from(f),
AssocItemId::ConstId(c) => ModuleDefId::from(c),
// cannot use associated type aliases directly: need a `<Struct as Trait>::TypeAlias`
// qualifier, ergo no need to store it for imports in import_map
AssocItemId::TypeAliasId(_) => {
cov_mark::hit!(type_aliases_ignored);
continue; continue;
} }
}; let ext_def_map;
let assoc_item = if is_type_in_ns { let mod_data = if module.krate == krate {
ItemInNs::Types(module_def_id) &def_map[module.local_id]
} else { } else {
ItemInNs::Values(module_def_id) // The crate might reexport a module defined in another crate.
}; ext_def_map = module.def_map(db);
&ext_def_map[module.local_id]
};
let attrs = &db.attrs(item.into()); let visible_items = mod_data.scope.entries().filter_map(|(name, per_ns)| {
let assoc_item_info = ImportInfo { let per_ns = per_ns.filter_visibility(|vis| vis == Visibility::Public);
container: trait_import_info.container, if per_ns.is_none() {
name: assoc_item_name.clone(), None
is_doc_hidden: attrs.has_doc_hidden(), } else {
is_unstable: attrs.is_unstable(), Some((name, per_ns))
}; }
});
let (infos, _) = for (name, per_ns) in visible_items {
map.entry(assoc_item).or_insert_with(|| (SmallVec::new(), IsTraitAssocItem::Yes)); for (item, import) in per_ns.iter_items() {
infos.reserve_exact(1); let attr_id = if let Some(import) = import {
infos.push(assoc_item_info); match import {
ImportOrExternCrate::ExternCrate(id) => Some(id.into()),
ImportOrExternCrate::Import(id) => Some(id.import.into()),
}
} else {
match item {
ItemInNs::Types(id) | ItemInNs::Values(id) => id.try_into().ok(),
ItemInNs::Macros(id) => Some(id.into()),
}
};
let (is_doc_hidden, is_unstable) = attr_id.map_or((false, false), |attr_id| {
let attrs = db.attrs(attr_id);
(attrs.has_doc_hidden(), attrs.is_unstable())
});
let import_info = ImportInfo {
name: name.clone(),
container: module,
is_doc_hidden,
is_unstable,
};
if let Some(ModuleDefId::TraitId(tr)) = item.as_module_def_id() {
Self::collect_trait_assoc_items(
db,
&mut map,
tr,
matches!(item, ItemInNs::Types(_)),
&import_info,
);
}
let (infos, _) =
map.entry(item).or_insert_with(|| (SmallVec::new(), IsTraitAssocItem::No));
infos.reserve_exact(1);
infos.push(import_info);
// If we've just added a module, descend into it.
if let Some(ModuleDefId::ModuleId(mod_id)) = item.as_module_def_id() {
worklist.push(mod_id);
}
}
}
}
map.shrink_to_fit();
map
} }
}
impl PartialEq for ImportMap { fn collect_trait_assoc_items(
fn eq(&self, other: &Self) -> bool { db: &dyn DefDatabase,
// `fst` and `importables` are built from `map`, so we don't need to compare them. map: &mut ImportMapIndex,
self.map == other.map tr: TraitId,
is_type_in_ns: bool,
trait_import_info: &ImportInfo,
) {
let _p = profile::span("collect_trait_assoc_items");
for &(ref assoc_item_name, item) in &db.trait_data(tr).items {
let module_def_id = match item {
AssocItemId::FunctionId(f) => ModuleDefId::from(f),
AssocItemId::ConstId(c) => ModuleDefId::from(c),
// cannot use associated type aliases directly: need a `<Struct as Trait>::TypeAlias`
// qualifier, ergo no need to store it for imports in import_map
AssocItemId::TypeAliasId(_) => {
cov_mark::hit!(type_aliases_ignored);
continue;
}
};
let assoc_item = if is_type_in_ns {
ItemInNs::Types(module_def_id)
} else {
ItemInNs::Values(module_def_id)
};
let attrs = &db.attrs(item.into());
let assoc_item_info = ImportInfo {
container: trait_import_info.container,
name: assoc_item_name.clone(),
is_doc_hidden: attrs.has_doc_hidden(),
is_unstable: attrs.is_unstable(),
};
let (infos, _) =
map.entry(assoc_item).or_insert_with(|| (SmallVec::new(), IsTraitAssocItem::Yes));
infos.reserve_exact(1);
infos.push(assoc_item_info);
}
} }
} }
impl Eq for ImportMap {} impl Eq for ImportMap {}
impl PartialEq for ImportMap {
fn eq(&self, other: &Self) -> bool {
// `fst` and `importables` are built from `map`, so we don't need to compare them.
self.item_to_info_map == other.item_to_info_map
}
}
impl fmt::Debug for ImportMap { impl fmt::Debug for ImportMap {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut importable_names: Vec<_> = self let mut importable_names: Vec<_> = self
.map .item_to_info_map
.iter() .iter()
.map(|(item, (infos, _))| { .map(|(item, (infos, _))| {
let l = infos.len(); let l = infos.len();
@ -264,8 +279,8 @@ impl fmt::Debug for ImportMap {
} }
/// A way to match import map contents against the search query. /// A way to match import map contents against the search query.
#[derive(Copy, Clone, Debug)] #[derive(Debug, Copy, Clone, PartialEq, Eq)]
enum SearchMode { pub enum SearchMode {
/// Import map entry should strictly match the query string. /// Import map entry should strictly match the query string.
Exact, Exact,
/// Import map entry should contain all letters from the query string, /// Import map entry should contain all letters from the query string,
@ -275,6 +290,42 @@ enum SearchMode {
Prefix, Prefix,
} }
impl SearchMode {
pub fn check(self, query: &str, case_sensitive: bool, candidate: &str) -> bool {
match self {
SearchMode::Exact if case_sensitive => candidate == query,
SearchMode::Exact => candidate.eq_ignore_ascii_case(&query),
SearchMode::Prefix => {
query.len() <= candidate.len() && {
let prefix = &candidate[..query.len() as usize];
if case_sensitive {
prefix == query
} else {
prefix.eq_ignore_ascii_case(&query)
}
}
}
SearchMode::Fuzzy => {
let mut name = candidate;
query.chars().all(|query_char| {
let m = if case_sensitive {
name.match_indices(query_char).next()
} else {
name.match_indices([query_char, query_char.to_ascii_uppercase()]).next()
};
match m {
Some((index, _)) => {
name = &name[index + 1..];
true
}
None => false,
}
})
}
}
}
}
/// Three possible ways to search for the name in associated and/or other items. /// Three possible ways to search for the name in associated and/or other items.
#[derive(Debug, Clone, Copy)] #[derive(Debug, Clone, Copy)]
pub enum AssocSearchMode { pub enum AssocSearchMode {
@ -293,7 +344,6 @@ pub struct Query {
search_mode: SearchMode, search_mode: SearchMode,
assoc_mode: AssocSearchMode, assoc_mode: AssocSearchMode,
case_sensitive: bool, case_sensitive: bool,
limit: usize,
} }
impl Query { impl Query {
@ -305,7 +355,6 @@ impl Query {
search_mode: SearchMode::Exact, search_mode: SearchMode::Exact,
assoc_mode: AssocSearchMode::Include, assoc_mode: AssocSearchMode::Include,
case_sensitive: false, case_sensitive: false,
limit: usize::MAX,
} }
} }
@ -327,11 +376,6 @@ impl Query {
Self { assoc_mode, ..self } Self { assoc_mode, ..self }
} }
/// Limits the returned number of items to `limit`.
pub fn limit(self, limit: usize) -> Self {
Self { limit, ..self }
}
/// Respect casing of the query string when matching. /// Respect casing of the query string when matching.
pub fn case_sensitive(self) -> Self { pub fn case_sensitive(self) -> Self {
Self { case_sensitive: true, ..self } Self { case_sensitive: true, ..self }
@ -344,39 +388,6 @@ impl Query {
_ => true, _ => true,
} }
} }
/// Checks whether the import map entry matches the query.
fn import_matches(&self, import: &ImportInfo, enforce_lowercase: bool) -> bool {
let _p = profile::span("import_map::Query::import_matches");
// FIXME: Can we get rid of the alloc here?
let input = import.name.to_smol_str();
let mut _s_slot;
let case_insensitive = enforce_lowercase || !self.case_sensitive;
let input = if case_insensitive {
_s_slot = String::from(input);
_s_slot.make_ascii_lowercase();
&*_s_slot
} else {
&*input
};
let query_string = if case_insensitive { &self.lowercased } else { &self.query };
match self.search_mode {
SearchMode::Exact => input == *query_string,
SearchMode::Prefix => input.starts_with(query_string),
SearchMode::Fuzzy => {
let mut input_chars = input.chars();
for query_char in query_string.chars() {
if !input_chars.any(|it| it == query_char) {
return false;
}
}
true
}
}
}
} }
/// Searches dependencies of `krate` for an importable name matching `query`. /// Searches dependencies of `krate` for an importable name matching `query`.
@ -394,73 +405,66 @@ pub fn search_dependencies(
let import_maps: Vec<_> = let import_maps: Vec<_> =
graph[krate].dependencies.iter().map(|dep| db.import_map(dep.crate_id)).collect(); graph[krate].dependencies.iter().map(|dep| db.import_map(dep.crate_id)).collect();
let automaton = fst::automaton::Subsequence::new(&query.lowercased);
let mut op = fst::map::OpBuilder::new(); let mut op = fst::map::OpBuilder::new();
for map in &import_maps {
op = op.add(map.fst.search(&automaton)); match query.search_mode {
SearchMode::Exact => {
let automaton = fst::automaton::Str::new(&query.lowercased);
for map in &import_maps {
op = op.add(map.fst.search(&automaton));
}
search_maps(&import_maps, op.union(), query)
}
SearchMode::Fuzzy => {
let automaton = fst::automaton::Subsequence::new(&query.lowercased);
for map in &import_maps {
op = op.add(map.fst.search(&automaton));
}
search_maps(&import_maps, op.union(), query)
}
SearchMode::Prefix => {
let automaton = fst::automaton::Str::new(&query.lowercased).starts_with();
for map in &import_maps {
op = op.add(map.fst.search(&automaton));
}
search_maps(&import_maps, op.union(), query)
}
} }
}
let mut stream = op.union(); fn search_maps(
import_maps: &[Arc<ImportMap>],
mut stream: fst::map::Union<'_>,
query: &Query,
) -> FxHashSet<ItemInNs> {
let mut res = FxHashSet::default(); let mut res = FxHashSet::default();
let mut common_importable_data_scratch = vec![];
// FIXME: Improve this, its rather unreadable and does duplicate amount of work
while let Some((_, indexed_values)) = stream.next() { while let Some((_, indexed_values)) = stream.next() {
for &IndexedValue { index, value } in indexed_values { for &IndexedValue { index: import_map_idx, value } in indexed_values {
let import_map = &import_maps[index]; let end = (value & 0xFFFF_FFFF) as usize;
let importables @ [importable, ..] = &import_map.importables[value as usize..] else { let start = (value >> 32) as usize;
continue; let ImportMap { item_to_info_map, importables, .. } = &*import_maps[import_map_idx];
}; let importables = &importables[start as usize..end];
let &(ref importable_data, is_trait_assoc_item) = &import_map.map[importable];
if !query.matches_assoc_mode(is_trait_assoc_item) {
continue;
}
// Fetch all the known names of this importable item (to handle import aliases/renames) let iter = importables
common_importable_data_scratch.extend( .iter()
importable_data .copied()
.iter() .filter_map(|(item, info_idx)| {
.filter(|&info| query.import_matches(info, true)) let (import_infos, assoc_mode) = &item_to_info_map[&item];
// Name shared by the importable items in this group. query
.map(|info| info.name.to_smol_str()), .matches_assoc_mode(*assoc_mode)
); .then(|| (item, &import_infos[info_idx as usize]))
if common_importable_data_scratch.is_empty() { })
continue; .filter(|&(_, info)| {
} query.search_mode.check(
common_importable_data_scratch.sort(); &query.query,
common_importable_data_scratch.dedup(); query.case_sensitive,
&info.name.to_smol_str(),
let iter = )
common_importable_data_scratch.drain(..).flat_map(|common_importable_name| {
// Add the items from this name group. Those are all subsequent items in
// `importables` whose name match `common_importable_name`.
importables
.iter()
.copied()
.take_while(move |item| {
let &(ref import_infos, assoc_mode) = &import_map.map[item];
query.matches_assoc_mode(assoc_mode)
&& import_infos.iter().any(|info| {
info.name
.to_smol_str()
.eq_ignore_ascii_case(&common_importable_name)
})
})
.filter(move |item| {
!query.case_sensitive || {
// we've already checked the common importables name case-insensitively
let &(ref import_infos, _) = &import_map.map[item];
import_infos.iter().any(|info| query.import_matches(info, false))
}
})
}); });
res.extend(iter); res.extend(iter.map(TupleExt::head));
if res.len() >= query.limit {
return res;
}
} }
} }
@ -480,7 +484,7 @@ mod tests {
impl ImportMap { impl ImportMap {
fn fmt_for_test(&self, db: &dyn DefDatabase) -> String { fn fmt_for_test(&self, db: &dyn DefDatabase) -> String {
let mut importable_paths: Vec<_> = self let mut importable_paths: Vec<_> = self
.map .item_to_info_map
.iter() .iter()
.flat_map(|(item, (info, _))| info.iter().map(move |info| (item, info))) .flat_map(|(item, (info, _))| info.iter().map(move |info| (item, info)))
.map(|(item, info)| { .map(|(item, info)| {
@ -907,28 +911,28 @@ mod tests {
#[test] #[test]
fn search_mode() { fn search_mode() {
let ra_fixture = r#" let ra_fixture = r#"
//- /main.rs crate:main deps:dep //- /main.rs crate:main deps:dep
//- /dep.rs crate:dep deps:tdep //- /dep.rs crate:dep deps:tdep
use tdep::fmt as fmt_dep; use tdep::fmt as fmt_dep;
pub mod fmt { pub mod fmt {
pub trait Display { pub trait Display {
fn fmt(); fn fmt();
} }
} }
#[macro_export] #[macro_export]
macro_rules! Fmt { macro_rules! Fmt {
() => {}; () => {};
} }
pub struct Fmt; pub struct Fmt;
pub fn format() {} pub fn format() {}
pub fn no() {} pub fn no() {}
//- /tdep.rs crate:tdep //- /tdep.rs crate:tdep
pub mod fmt { pub mod fmt {
pub struct NotImportableFromMain; pub struct NotImportableFromMain;
} }
"#; "#;
check_search( check_search(
ra_fixture, ra_fixture,
@ -996,19 +1000,6 @@ mod tests {
dep::fmt::Display::fmt (a) dep::fmt::Display::fmt (a)
"#]], "#]],
); );
check_search(
ra_fixture,
"main",
Query::new("fmt".to_string()),
expect![[r#"
dep::Fmt (m)
dep::Fmt (t)
dep::Fmt (v)
dep::fmt (t)
dep::fmt::Display::fmt (a)
"#]],
);
} }
#[test] #[test]
@ -1043,32 +1034,4 @@ mod tests {
"#]], "#]],
); );
} }
#[test]
fn search_limit() {
check_search(
r#"
//- /main.rs crate:main deps:dep
//- /dep.rs crate:dep
pub mod fmt {
pub trait Display {
fn fmt();
}
}
#[macro_export]
macro_rules! Fmt {
() => {};
}
pub struct Fmt;
pub fn format() {}
pub fn no() {}
"#,
"main",
Query::new("".to_string()).fuzzy().limit(1),
expect![[r#"
dep::fmt::Display (t)
"#]],
);
}
} }

View File

@ -307,6 +307,15 @@ pub struct FieldId {
pub type LocalFieldId = Idx<data::adt::FieldData>; pub type LocalFieldId = Idx<data::adt::FieldData>;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct TupleId(pub u32);
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct TupleFieldId {
pub tuple: TupleId,
pub index: u32,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct ConstId(salsa::InternId); pub struct ConstId(salsa::InternId);
type ConstLoc = AssocItemLoc<Const>; type ConstLoc = AssocItemLoc<Const>;

View File

@ -1397,7 +1397,7 @@ impl DefCollector<'_> {
always!(krate == loc.def.krate); always!(krate == loc.def.krate);
DefDiagnostic::unresolved_proc_macro(module_id, loc.kind.clone(), loc.def.krate) DefDiagnostic::unresolved_proc_macro(module_id, loc.kind.clone(), loc.def.krate)
} }
_ => DefDiagnostic::macro_error(module_id, loc.kind.clone(), err.to_string()), _ => DefDiagnostic::macro_error(module_id, loc.kind, err.to_string()),
}; };
self.def_map.diagnostics.push(diag); self.def_map.diagnostics.push(diag);

View File

@ -390,7 +390,13 @@ fn parse_macro_expansion(
let expand_to = loc.expand_to(); let expand_to = loc.expand_to();
let mbe::ValueResult { value: tt, err } = macro_expand(db, macro_file.macro_call_id, loc); let mbe::ValueResult { value: tt, err } = macro_expand(db, macro_file.macro_call_id, loc);
let (parse, rev_token_map) = token_tree_to_syntax_node(&tt, expand_to); let (parse, rev_token_map) = token_tree_to_syntax_node(
match &tt {
CowArc::Arc(it) => it,
CowArc::Owned(it) => it,
},
expand_to,
);
ExpandResult { value: (parse, Arc::new(rev_token_map)), err } ExpandResult { value: (parse, Arc::new(rev_token_map)), err }
} }
@ -669,15 +675,20 @@ fn macro_expander(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander {
} }
} }
enum CowArc<T> {
Arc(Arc<T>),
Owned(T),
}
fn macro_expand( fn macro_expand(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
macro_call_id: MacroCallId, macro_call_id: MacroCallId,
loc: MacroCallLoc, loc: MacroCallLoc,
) -> ExpandResult<Arc<tt::Subtree>> { ) -> ExpandResult<CowArc<tt::Subtree>> {
let _p = profile::span("macro_expand"); let _p = profile::span("macro_expand");
let ExpandResult { value: tt, mut err } = match loc.def.kind { let ExpandResult { value: tt, mut err } = match loc.def.kind {
MacroDefKind::ProcMacro(..) => return db.expand_proc_macro(macro_call_id), MacroDefKind::ProcMacro(..) => return db.expand_proc_macro(macro_call_id).map(CowArc::Arc),
MacroDefKind::BuiltInDerive(expander, ..) => { MacroDefKind::BuiltInDerive(expander, ..) => {
let (root, map) = parse_with_map(db, loc.kind.file_id()); let (root, map) = parse_with_map(db, loc.kind.file_id());
let root = root.syntax_node(); let root = root.syntax_node();
@ -692,7 +703,7 @@ fn macro_expand(
let ValueResult { value, err } = db.macro_arg(macro_call_id); let ValueResult { value, err } = db.macro_arg(macro_call_id);
let Some((macro_arg, undo_info)) = value else { let Some((macro_arg, undo_info)) = value else {
return ExpandResult { return ExpandResult {
value: Arc::new(tt::Subtree { value: CowArc::Owned(tt::Subtree {
delimiter: tt::Delimiter::invisible_spanned(loc.call_site), delimiter: tt::Delimiter::invisible_spanned(loc.call_site),
token_trees: Vec::new(), token_trees: Vec::new(),
}), }),
@ -718,7 +729,7 @@ fn macro_expand(
// As such we just return the input subtree here. // As such we just return the input subtree here.
MacroDefKind::BuiltInEager(..) if loc.eager.is_none() => { MacroDefKind::BuiltInEager(..) if loc.eager.is_none() => {
return ExpandResult { return ExpandResult {
value: macro_arg.clone(), value: CowArc::Arc(macro_arg.clone()),
err: err.map(|err| { err: err.map(|err| {
let mut buf = String::new(); let mut buf = String::new();
for err in &**err { for err in &**err {
@ -752,12 +763,17 @@ fn macro_expand(
// Skip checking token tree limit for include! macro call // Skip checking token tree limit for include! macro call
if !loc.def.is_include() { if !loc.def.is_include() {
// Set a hard limit for the expanded tt // Set a hard limit for the expanded tt
if let Err(value) = check_tt_count(&tt, loc.call_site) { if let Err(value) = check_tt_count(&tt) {
return value; return value.map(|()| {
CowArc::Owned(tt::Subtree {
delimiter: tt::Delimiter::invisible_spanned(loc.call_site),
token_trees: vec![],
})
});
} }
} }
ExpandResult { value: Arc::new(tt), err } ExpandResult { value: CowArc::Owned(tt), err }
} }
fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt::Subtree>> { fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt::Subtree>> {
@ -796,8 +812,13 @@ fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<A
); );
// Set a hard limit for the expanded tt // Set a hard limit for the expanded tt
if let Err(value) = check_tt_count(&tt, loc.call_site) { if let Err(value) = check_tt_count(&tt) {
return value; return value.map(|()| {
Arc::new(tt::Subtree {
delimiter: tt::Delimiter::invisible_spanned(loc.call_site),
token_trees: vec![],
})
});
} }
fixup::reverse_fixups(&mut tt, &undo_info); fixup::reverse_fixups(&mut tt, &undo_info);
@ -819,14 +840,11 @@ fn token_tree_to_syntax_node(
mbe::token_tree_to_syntax_node(tt, entry_point) mbe::token_tree_to_syntax_node(tt, entry_point)
} }
fn check_tt_count(tt: &tt::Subtree, call_site: Span) -> Result<(), ExpandResult<Arc<tt::Subtree>>> { fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult<()>> {
let count = tt.count(); let count = tt.count();
if TOKEN_LIMIT.check(count).is_err() { if TOKEN_LIMIT.check(count).is_err() {
Err(ExpandResult { Err(ExpandResult {
value: Arc::new(tt::Subtree { value: (),
delimiter: tt::Delimiter::invisible_spanned(call_site),
token_trees: vec![],
}),
err: Some(ExpandError::other(format!( err: Some(ExpandError::other(format!(
"macro invocation exceeds token limit: produced {} tokens, limit is {}", "macro invocation exceeds token limit: produced {} tokens, limit is {}",
count, count,

View File

@ -220,6 +220,8 @@ pub enum MacroCallKind {
}, },
Attr { Attr {
ast_id: AstId<ast::Item>, ast_id: AstId<ast::Item>,
// FIXME: This is being interned, subtrees can very quickly differ just slightly causing
// leakage problems here
attr_args: Option<Arc<tt::Subtree>>, attr_args: Option<Arc<tt::Subtree>>,
/// Syntactical index of the invoking `#[attribute]`. /// Syntactical index of the invoking `#[attribute]`.
/// ///

View File

@ -32,6 +32,7 @@ once_cell = "1.17.0"
triomphe.workspace = true triomphe.workspace = true
nohash-hasher.workspace = true nohash-hasher.workspace = true
typed-arena = "2.0.1" typed-arena = "2.0.1"
indexmap.workspace = true
rustc-dependencies.workspace = true rustc-dependencies.workspace = true
@ -60,4 +61,4 @@ test-fixture.workspace = true
in-rust-tree = ["rustc-dependencies/in-rust-tree"] in-rust-tree = ["rustc-dependencies/in-rust-tree"]
[lints] [lints]
workspace = true workspace = true

View File

@ -142,15 +142,15 @@ pub fn intern_const_ref(
LiteralConstRef::Int(i) => { LiteralConstRef::Int(i) => {
// FIXME: We should handle failure of layout better. // FIXME: We should handle failure of layout better.
let size = layout.map(|it| it.size.bytes_usize()).unwrap_or(16); let size = layout.map(|it| it.size.bytes_usize()).unwrap_or(16);
ConstScalar::Bytes(i.to_le_bytes()[0..size].to_vec(), MemoryMap::default()) ConstScalar::Bytes(i.to_le_bytes()[0..size].into(), MemoryMap::default())
} }
LiteralConstRef::UInt(i) => { LiteralConstRef::UInt(i) => {
let size = layout.map(|it| it.size.bytes_usize()).unwrap_or(16); let size = layout.map(|it| it.size.bytes_usize()).unwrap_or(16);
ConstScalar::Bytes(i.to_le_bytes()[0..size].to_vec(), MemoryMap::default()) ConstScalar::Bytes(i.to_le_bytes()[0..size].into(), MemoryMap::default())
} }
LiteralConstRef::Bool(b) => ConstScalar::Bytes(vec![*b as u8], MemoryMap::default()), LiteralConstRef::Bool(b) => ConstScalar::Bytes(Box::new([*b as u8]), MemoryMap::default()),
LiteralConstRef::Char(c) => { LiteralConstRef::Char(c) => {
ConstScalar::Bytes((*c as u32).to_le_bytes().to_vec(), MemoryMap::default()) ConstScalar::Bytes((*c as u32).to_le_bytes().into(), MemoryMap::default())
} }
LiteralConstRef::Unknown => ConstScalar::Unknown, LiteralConstRef::Unknown => ConstScalar::Unknown,
}; };

View File

@ -515,7 +515,7 @@ fn render_const_scalar(
TyKind::Dyn(_) => { TyKind::Dyn(_) => {
let addr = usize::from_le_bytes(b[0..b.len() / 2].try_into().unwrap()); let addr = usize::from_le_bytes(b[0..b.len() / 2].try_into().unwrap());
let ty_id = usize::from_le_bytes(b[b.len() / 2..].try_into().unwrap()); let ty_id = usize::from_le_bytes(b[b.len() / 2..].try_into().unwrap());
let Ok(t) = memory_map.vtable.ty(ty_id) else { let Ok(t) = memory_map.vtable_ty(ty_id) else {
return f.write_str("<ty-missing-in-vtable-map>"); return f.write_str("<ty-missing-in-vtable-map>");
}; };
let Ok(layout) = f.db.layout_of_ty(t.clone(), trait_env) else { let Ok(layout) = f.db.layout_of_ty(t.clone(), trait_env) else {
@ -609,7 +609,7 @@ fn render_const_scalar(
} }
hir_def::AdtId::EnumId(e) => { hir_def::AdtId::EnumId(e) => {
let Some((var_id, var_layout)) = let Some((var_id, var_layout)) =
detect_variant_from_bytes(&layout, f.db, trait_env.clone(), b, e) detect_variant_from_bytes(&layout, f.db, trait_env, b, e)
else { else {
return f.write_str("<failed-to-detect-variant>"); return f.write_str("<failed-to-detect-variant>");
}; };

View File

@ -41,9 +41,10 @@ use hir_def::{
resolver::{HasResolver, ResolveValueResult, Resolver, TypeNs, ValueNs}, resolver::{HasResolver, ResolveValueResult, Resolver, TypeNs, ValueNs},
type_ref::TypeRef, type_ref::TypeRef,
AdtId, AssocItemId, DefWithBodyId, EnumVariantId, FieldId, FunctionId, ItemContainerId, Lookup, AdtId, AssocItemId, DefWithBodyId, EnumVariantId, FieldId, FunctionId, ItemContainerId, Lookup,
TraitId, TypeAliasId, VariantId, TraitId, TupleFieldId, TupleId, TypeAliasId, VariantId,
}; };
use hir_expand::name::{name, Name}; use hir_expand::name::{name, Name};
use indexmap::IndexSet;
use la_arena::{ArenaMap, Entry}; use la_arena::{ArenaMap, Entry};
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::{FxHashMap, FxHashSet};
use stdx::{always, never}; use stdx::{always, never};
@ -403,11 +404,15 @@ pub struct InferenceResult {
/// For each method call expr, records the function it resolves to. /// For each method call expr, records the function it resolves to.
method_resolutions: FxHashMap<ExprId, (FunctionId, Substitution)>, method_resolutions: FxHashMap<ExprId, (FunctionId, Substitution)>,
/// For each field access expr, records the field it resolves to. /// For each field access expr, records the field it resolves to.
field_resolutions: FxHashMap<ExprId, FieldId>, field_resolutions: FxHashMap<ExprId, Either<FieldId, TupleFieldId>>,
/// For each struct literal or pattern, records the variant it resolves to. /// For each struct literal or pattern, records the variant it resolves to.
variant_resolutions: FxHashMap<ExprOrPatId, VariantId>, variant_resolutions: FxHashMap<ExprOrPatId, VariantId>,
/// For each associated item record what it resolves to /// For each associated item record what it resolves to
assoc_resolutions: FxHashMap<ExprOrPatId, (AssocItemId, Substitution)>, assoc_resolutions: FxHashMap<ExprOrPatId, (AssocItemId, Substitution)>,
/// Whenever a tuple field expression access a tuple field, we allocate a tuple id in
/// [`InferenceContext`] and store the tuples substitution there. This map is the reverse of
/// that which allows us to resolve a [`TupleFieldId`]s type.
pub tuple_field_access_types: FxHashMap<TupleId, Substitution>,
pub diagnostics: Vec<InferenceDiagnostic>, pub diagnostics: Vec<InferenceDiagnostic>,
pub type_of_expr: ArenaMap<ExprId, Ty>, pub type_of_expr: ArenaMap<ExprId, Ty>,
/// For each pattern record the type it resolves to. /// For each pattern record the type it resolves to.
@ -447,7 +452,7 @@ impl InferenceResult {
pub fn method_resolution(&self, expr: ExprId) -> Option<(FunctionId, Substitution)> { pub fn method_resolution(&self, expr: ExprId) -> Option<(FunctionId, Substitution)> {
self.method_resolutions.get(&expr).cloned() self.method_resolutions.get(&expr).cloned()
} }
pub fn field_resolution(&self, expr: ExprId) -> Option<FieldId> { pub fn field_resolution(&self, expr: ExprId) -> Option<Either<FieldId, TupleFieldId>> {
self.field_resolutions.get(&expr).copied() self.field_resolutions.get(&expr).copied()
} }
pub fn variant_resolution_for_expr(&self, id: ExprId) -> Option<VariantId> { pub fn variant_resolution_for_expr(&self, id: ExprId) -> Option<VariantId> {
@ -517,6 +522,8 @@ pub(crate) struct InferenceContext<'a> {
/// The traits in scope, disregarding block modules. This is used for caching purposes. /// The traits in scope, disregarding block modules. This is used for caching purposes.
traits_in_scope: FxHashSet<TraitId>, traits_in_scope: FxHashSet<TraitId>,
pub(crate) result: InferenceResult, pub(crate) result: InferenceResult,
tuple_field_accesses_rev:
IndexSet<Substitution, std::hash::BuildHasherDefault<rustc_hash::FxHasher>>,
/// The return type of the function being inferred, the closure or async block if we're /// The return type of the function being inferred, the closure or async block if we're
/// currently within one. /// currently within one.
/// ///
@ -598,6 +605,7 @@ impl<'a> InferenceContext<'a> {
InferenceContext { InferenceContext {
result: InferenceResult::default(), result: InferenceResult::default(),
table: unify::InferenceTable::new(db, trait_env), table: unify::InferenceTable::new(db, trait_env),
tuple_field_accesses_rev: Default::default(),
return_ty: TyKind::Error.intern(Interner), // set in collect_* calls return_ty: TyKind::Error.intern(Interner), // set in collect_* calls
resume_yield_tys: None, resume_yield_tys: None,
return_coercion: None, return_coercion: None,
@ -621,7 +629,13 @@ impl<'a> InferenceContext<'a> {
// used this function for another workaround, mention it here. If you really need this function and believe that // used this function for another workaround, mention it here. If you really need this function and believe that
// there is no problem in it being `pub(crate)`, remove this comment. // there is no problem in it being `pub(crate)`, remove this comment.
pub(crate) fn resolve_all(self) -> InferenceResult { pub(crate) fn resolve_all(self) -> InferenceResult {
let InferenceContext { mut table, mut result, deferred_cast_checks, .. } = self; let InferenceContext {
mut table,
mut result,
deferred_cast_checks,
tuple_field_accesses_rev,
..
} = self;
// Destructure every single field so whenever new fields are added to `InferenceResult` we // Destructure every single field so whenever new fields are added to `InferenceResult` we
// don't forget to handle them here. // don't forget to handle them here.
let InferenceResult { let InferenceResult {
@ -645,6 +659,7 @@ impl<'a> InferenceContext<'a> {
// to resolve them here. // to resolve them here.
closure_info: _, closure_info: _,
mutated_bindings_in_closure: _, mutated_bindings_in_closure: _,
tuple_field_access_types: _,
} = &mut result; } = &mut result;
table.fallback_if_possible(); table.fallback_if_possible();
@ -720,6 +735,11 @@ impl<'a> InferenceContext<'a> {
for adjustment in pat_adjustments.values_mut().flatten() { for adjustment in pat_adjustments.values_mut().flatten() {
*adjustment = table.resolve_completely(adjustment.clone()); *adjustment = table.resolve_completely(adjustment.clone());
} }
result.tuple_field_access_types = tuple_field_accesses_rev
.into_iter()
.enumerate()
.map(|(idx, subst)| (TupleId(idx as u32), table.resolve_completely(subst)))
.collect();
result result
} }

View File

@ -1,18 +1,19 @@
//! Inference of closure parameter types based on the closure's expected type. //! Inference of closure parameter types based on the closure's expected type.
use std::{cmp, collections::HashMap, convert::Infallible, mem}; use std::{cmp, convert::Infallible, mem};
use chalk_ir::{ use chalk_ir::{
cast::Cast, cast::Cast,
fold::{FallibleTypeFolder, TypeFoldable}, fold::{FallibleTypeFolder, TypeFoldable},
AliasEq, AliasTy, BoundVar, DebruijnIndex, FnSubst, Mutability, TyKind, WhereClause, AliasEq, AliasTy, BoundVar, DebruijnIndex, FnSubst, Mutability, TyKind, WhereClause,
}; };
use either::Either;
use hir_def::{ use hir_def::{
data::adt::VariantData, data::adt::VariantData,
hir::{Array, BinaryOp, BindingId, CaptureBy, Expr, ExprId, Pat, PatId, Statement, UnaryOp}, hir::{Array, BinaryOp, BindingId, CaptureBy, Expr, ExprId, Pat, PatId, Statement, UnaryOp},
lang_item::LangItem, lang_item::LangItem,
resolver::{resolver_for_expr, ResolveValueResult, ValueNs}, resolver::{resolver_for_expr, ResolveValueResult, ValueNs},
DefWithBodyId, FieldId, HasModule, VariantId, DefWithBodyId, FieldId, HasModule, TupleFieldId, TupleId, VariantId,
}; };
use hir_expand::name; use hir_expand::name;
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
@ -129,7 +130,7 @@ impl HirPlace {
ctx.owner.module(ctx.db.upcast()).krate(), ctx.owner.module(ctx.db.upcast()).krate(),
); );
} }
ty.clone() ty
} }
fn capture_kind_of_truncated_place( fn capture_kind_of_truncated_place(
@ -186,7 +187,7 @@ impl CapturedItem {
result = format!("*{result}"); result = format!("*{result}");
field_need_paren = true; field_need_paren = true;
} }
ProjectionElem::Field(f) => { ProjectionElem::Field(Either::Left(f)) => {
if field_need_paren { if field_need_paren {
result = format!("({result})"); result = format!("({result})");
} }
@ -207,7 +208,15 @@ impl CapturedItem {
result = format!("{result}.{field}"); result = format!("{result}.{field}");
field_need_paren = false; field_need_paren = false;
} }
&ProjectionElem::TupleOrClosureField(field) => { ProjectionElem::Field(Either::Right(f)) => {
let field = f.index;
if field_need_paren {
result = format!("({result})");
}
result = format!("{result}.{field}");
field_need_paren = false;
}
&ProjectionElem::ClosureField(field) => {
if field_need_paren { if field_need_paren {
result = format!("({result})"); result = format!("({result})");
} }
@ -236,7 +245,7 @@ pub(crate) struct CapturedItemWithoutTy {
impl CapturedItemWithoutTy { impl CapturedItemWithoutTy {
fn with_ty(self, ctx: &mut InferenceContext<'_>) -> CapturedItem { fn with_ty(self, ctx: &mut InferenceContext<'_>) -> CapturedItem {
let ty = self.place.ty(ctx).clone(); let ty = self.place.ty(ctx);
let ty = match &self.kind { let ty = match &self.kind {
CaptureKind::ByValue => ty, CaptureKind::ByValue => ty,
CaptureKind::ByRef(bk) => { CaptureKind::ByRef(bk) => {
@ -329,15 +338,10 @@ impl InferenceContext<'_> {
} }
} }
} }
Expr::Field { expr, name } => { Expr::Field { expr, name: _ } => {
let mut place = self.place_of_expr(*expr)?; let mut place = self.place_of_expr(*expr)?;
if let TyKind::Tuple(..) = self.expr_ty(*expr).kind(Interner) { let field = self.result.field_resolution(tgt_expr)?;
let index = name.as_tuple_index()?; place.projections.push(ProjectionElem::Field(field));
place.projections.push(ProjectionElem::TupleOrClosureField(index))
} else {
let field = self.result.field_resolution(tgt_expr)?;
place.projections.push(ProjectionElem::Field(field));
}
return Some(place); return Some(place);
} }
Expr::UnaryOp { expr, op: UnaryOp::Deref } => { Expr::UnaryOp { expr, op: UnaryOp::Deref } => {
@ -392,7 +396,7 @@ impl InferenceContext<'_> {
fn consume_place(&mut self, place: HirPlace, span: MirSpan) { fn consume_place(&mut self, place: HirPlace, span: MirSpan) {
if self.is_upvar(&place) { if self.is_upvar(&place) {
let ty = place.ty(self).clone(); let ty = place.ty(self);
let kind = if self.is_ty_copy(ty) { let kind = if self.is_ty_copy(ty) {
CaptureKind::ByRef(BorrowKind::Shared) CaptureKind::ByRef(BorrowKind::Shared)
} else { } else {
@ -598,7 +602,7 @@ impl InferenceContext<'_> {
self.consume_expr(expr); self.consume_expr(expr);
} }
} }
Expr::Index { base, index } => { Expr::Index { base, index, is_assignee_expr: _ } => {
self.select_from_expr(*base); self.select_from_expr(*base);
self.consume_expr(*index); self.consume_expr(*index);
} }
@ -774,7 +778,7 @@ impl InferenceContext<'_> {
fn minimize_captures(&mut self) { fn minimize_captures(&mut self) {
self.current_captures.sort_by_key(|it| it.place.projections.len()); self.current_captures.sort_by_key(|it| it.place.projections.len());
let mut hash_map = HashMap::<HirPlace, usize>::new(); let mut hash_map = FxHashMap::<HirPlace, usize>::default();
let result = mem::take(&mut self.current_captures); let result = mem::take(&mut self.current_captures);
for item in result { for item in result {
let mut lookup_place = HirPlace { local: item.place.local, projections: vec![] }; let mut lookup_place = HirPlace { local: item.place.local, projections: vec![] };
@ -825,7 +829,10 @@ impl InferenceContext<'_> {
let it = al.iter().zip(fields.clone()).chain(ar.iter().rev().zip(fields.rev())); let it = al.iter().zip(fields.clone()).chain(ar.iter().rev().zip(fields.rev()));
for (arg, i) in it { for (arg, i) in it {
let mut p = place.clone(); let mut p = place.clone();
p.projections.push(ProjectionElem::TupleOrClosureField(i)); p.projections.push(ProjectionElem::Field(Either::Right(TupleFieldId {
tuple: TupleId(!0), // dummy this, as its unused anyways
index: i as u32,
})));
self.consume_with_pat(p, *arg); self.consume_with_pat(p, *arg);
} }
} }
@ -850,10 +857,10 @@ impl InferenceContext<'_> {
continue; continue;
}; };
let mut p = place.clone(); let mut p = place.clone();
p.projections.push(ProjectionElem::Field(FieldId { p.projections.push(ProjectionElem::Field(Either::Left(FieldId {
parent: variant.into(), parent: variant.into(),
local_id, local_id,
})); })));
self.consume_with_pat(p, arg); self.consume_with_pat(p, arg);
} }
} }
@ -894,10 +901,10 @@ impl InferenceContext<'_> {
al.iter().zip(fields.clone()).chain(ar.iter().rev().zip(fields.rev())); al.iter().zip(fields.clone()).chain(ar.iter().rev().zip(fields.rev()));
for (arg, (i, _)) in it { for (arg, (i, _)) in it {
let mut p = place.clone(); let mut p = place.clone();
p.projections.push(ProjectionElem::Field(FieldId { p.projections.push(ProjectionElem::Field(Either::Left(FieldId {
parent: variant.into(), parent: variant.into(),
local_id: i, local_id: i,
})); })));
self.consume_with_pat(p, *arg); self.consume_with_pat(p, *arg);
} }
} }

View File

@ -6,6 +6,7 @@ use std::{
}; };
use chalk_ir::{cast::Cast, fold::Shift, DebruijnIndex, Mutability, TyVariableKind}; use chalk_ir::{cast::Cast, fold::Shift, DebruijnIndex, Mutability, TyVariableKind};
use either::Either;
use hir_def::{ use hir_def::{
generics::TypeOrConstParamData, generics::TypeOrConstParamData,
hir::{ hir::{
@ -13,7 +14,7 @@ use hir_def::{
}, },
lang_item::{LangItem, LangItemTarget}, lang_item::{LangItem, LangItemTarget},
path::{GenericArg, GenericArgs}, path::{GenericArg, GenericArgs},
BlockId, ConstParamId, FieldId, ItemContainerId, Lookup, BlockId, ConstParamId, FieldId, ItemContainerId, Lookup, TupleFieldId, TupleId,
}; };
use hir_expand::name::{name, Name}; use hir_expand::name::{name, Name};
use stdx::always; use stdx::always;
@ -744,7 +745,7 @@ impl InferenceContext<'_> {
(RangeOp::Inclusive, _, None) => self.err_ty(), (RangeOp::Inclusive, _, None) => self.err_ty(),
} }
} }
Expr::Index { base, index } => { Expr::Index { base, index, is_assignee_expr } => {
let base_ty = self.infer_expr_inner(*base, &Expectation::none()); let base_ty = self.infer_expr_inner(*base, &Expectation::none());
let index_ty = self.infer_expr(*index, &Expectation::none()); let index_ty = self.infer_expr(*index, &Expectation::none());
@ -772,11 +773,24 @@ impl InferenceContext<'_> {
.build(); .build();
self.write_method_resolution(tgt_expr, func, substs); self.write_method_resolution(tgt_expr, func, substs);
} }
self.resolve_associated_type_with_params( let assoc = self.resolve_ops_index_output();
self_ty, let res = self.resolve_associated_type_with_params(
self.resolve_ops_index_output(), self_ty.clone(),
&[index_ty.cast(Interner)], assoc,
) &[index_ty.clone().cast(Interner)],
);
if *is_assignee_expr {
if let Some(index_trait) = self.resolve_lang_trait(LangItem::IndexMut) {
let trait_ref = TyBuilder::trait_ref(self.db, index_trait)
.push(self_ty)
.fill(|_| index_ty.clone().cast(Interner))
.build();
self.push_obligation(trait_ref.cast(Interner));
}
}
res
} else { } else {
self.err_ty() self.err_ty()
} }
@ -964,7 +978,7 @@ impl InferenceContext<'_> {
.push(callee_ty.clone()) .push(callee_ty.clone())
.push(TyBuilder::tuple_with(params.iter().cloned())) .push(TyBuilder::tuple_with(params.iter().cloned()))
.build(); .build();
self.write_method_resolution(tgt_expr, func, subst.clone()); self.write_method_resolution(tgt_expr, func, subst);
} }
} }
@ -1393,7 +1407,7 @@ impl InferenceContext<'_> {
&mut self, &mut self,
receiver_ty: &Ty, receiver_ty: &Ty,
name: &Name, name: &Name,
) -> Option<(Ty, Option<FieldId>, Vec<Adjustment>, bool)> { ) -> Option<(Ty, Either<FieldId, TupleFieldId>, Vec<Adjustment>, bool)> {
let mut autoderef = Autoderef::new(&mut self.table, receiver_ty.clone(), false); let mut autoderef = Autoderef::new(&mut self.table, receiver_ty.clone(), false);
let mut private_field = None; let mut private_field = None;
let res = autoderef.by_ref().find_map(|(derefed_ty, _)| { let res = autoderef.by_ref().find_map(|(derefed_ty, _)| {
@ -1405,7 +1419,20 @@ impl InferenceContext<'_> {
.get(idx) .get(idx)
.map(|a| a.assert_ty_ref(Interner)) .map(|a| a.assert_ty_ref(Interner))
.cloned() .cloned()
.map(|ty| (None, ty)) .map(|ty| {
(
Either::Right(TupleFieldId {
tuple: TupleId(
self.tuple_field_accesses_rev
.insert_full(substs.clone())
.0
as u32,
),
index: idx as u32,
}),
ty,
)
})
}); });
} }
TyKind::Adt(AdtId(hir_def::AdtId::StructId(s)), parameters) => { TyKind::Adt(AdtId(hir_def::AdtId::StructId(s)), parameters) => {
@ -1431,7 +1458,7 @@ impl InferenceContext<'_> {
let ty = self.db.field_types(field_id.parent)[field_id.local_id] let ty = self.db.field_types(field_id.parent)[field_id.local_id]
.clone() .clone()
.substitute(Interner, &parameters); .substitute(Interner, &parameters);
Some((Some(field_id), ty)) Some((Either::Left(field_id), ty))
}); });
Some(match res { Some(match res {
@ -1451,7 +1478,7 @@ impl InferenceContext<'_> {
let ty = self.insert_type_vars(ty); let ty = self.insert_type_vars(ty);
let ty = self.normalize_associated_types_in(ty); let ty = self.normalize_associated_types_in(ty);
(ty, Some(field_id), adjustments, false) (ty, Either::Left(field_id), adjustments, false)
} }
}) })
} }
@ -1474,11 +1501,9 @@ impl InferenceContext<'_> {
match self.lookup_field(&receiver_ty, name) { match self.lookup_field(&receiver_ty, name) {
Some((ty, field_id, adjustments, is_public)) => { Some((ty, field_id, adjustments, is_public)) => {
self.write_expr_adj(receiver, adjustments); self.write_expr_adj(receiver, adjustments);
if let Some(field_id) = field_id { self.result.field_resolutions.insert(tgt_expr, field_id);
self.result.field_resolutions.insert(tgt_expr, field_id);
}
if !is_public { if !is_public {
if let Some(field) = field_id { if let Either::Left(field) = field_id {
// FIXME: Merge this diagnostic into UnresolvedField? // FIXME: Merge this diagnostic into UnresolvedField?
self.result self.result
.diagnostics .diagnostics
@ -1568,9 +1593,7 @@ impl InferenceContext<'_> {
{ {
Some((ty, field_id, adjustments, _public)) => { Some((ty, field_id, adjustments, _public)) => {
self.write_expr_adj(receiver, adjustments); self.write_expr_adj(receiver, adjustments);
if let Some(field_id) = field_id { self.result.field_resolutions.insert(tgt_expr, field_id);
self.result.field_resolutions.insert(tgt_expr, field_id);
}
Some(ty) Some(ty)
} }
None => None, None => None,

View File

@ -96,7 +96,7 @@ impl InferenceContext<'_> {
Expr::RecordLit { path: _, fields, spread, ellipsis: _, is_assignee_expr: _ } => { Expr::RecordLit { path: _, fields, spread, ellipsis: _, is_assignee_expr: _ } => {
self.infer_mut_not_expr_iter(fields.iter().map(|it| it.expr).chain(*spread)) self.infer_mut_not_expr_iter(fields.iter().map(|it| it.expr).chain(*spread))
} }
&Expr::Index { base, index } => { &Expr::Index { base, index, is_assignee_expr: _ } => {
if mutability == Mutability::Mut { if mutability == Mutability::Mut {
if let Some((f, _)) = self.result.method_resolutions.get_mut(&tgt_expr) { if let Some((f, _)) = self.result.method_resolutions.get_mut(&tgt_expr) {
if let Some(index_trait) = self if let Some(index_trait) = self

View File

@ -233,7 +233,6 @@ impl InferenceContext<'_> {
}; };
let mut expectations_iter = expectations let mut expectations_iter = expectations
.iter() .iter()
.cloned()
.map(|a| a.assert_ty_ref(Interner).clone()) .map(|a| a.assert_ty_ref(Interner).clone())
.chain(repeat_with(|| self.table.new_type_var())); .chain(repeat_with(|| self.table.new_type_var()));
@ -336,7 +335,7 @@ impl InferenceContext<'_> {
&Pat::Lit(expr) => { &Pat::Lit(expr) => {
// Don't emit type mismatches again, the expression lowering already did that. // Don't emit type mismatches again, the expression lowering already did that.
let ty = self.infer_lit_pat(expr, &expected); let ty = self.infer_lit_pat(expr, &expected);
self.write_pat_ty(pat, ty.clone()); self.write_pat_ty(pat, ty);
return self.pat_ty_after_adjustment(pat); return self.pat_ty_after_adjustment(pat);
} }
Pat::Box { inner } => match self.resolve_boxed_box() { Pat::Box { inner } => match self.resolve_boxed_box() {

View File

@ -1,9 +1,15 @@
//! Implementation of the Chalk `Interner` trait, which allows customizing the //! Implementation of the Chalk `Interner` trait, which allows customizing the
//! representation of the various objects Chalk deals with (types, goals etc.). //! representation of the various objects Chalk deals with (types, goals etc.).
use crate::{chalk_db, tls, ConstScalar, GenericArg}; use crate::{
chalk_db, tls, AliasTy, CanonicalVarKind, CanonicalVarKinds, ClosureId, Const, ConstData,
ConstScalar, Constraint, Constraints, FnDefId, GenericArg, GenericArgData, Goal, GoalData,
Goals, InEnvironment, Lifetime, LifetimeData, OpaqueTy, OpaqueTyId, ProgramClause,
ProgramClauseData, ProgramClauses, ProjectionTy, QuantifiedWhereClause, QuantifiedWhereClauses,
Substitution, Ty, TyData, TyKind, VariableKind, VariableKinds,
};
use base_db::salsa::InternId; use base_db::salsa::InternId;
use chalk_ir::{Goal, GoalData}; use chalk_ir::{ProgramClauseImplication, SeparatorTraitRef, Variance};
use hir_def::TypeAliasId; use hir_def::TypeAliasId;
use intern::{impl_internable, Interned}; use intern::{impl_internable, Interned};
use smallvec::SmallVec; use smallvec::SmallVec;
@ -31,36 +37,37 @@ impl<T> std::ops::Deref for InternedWrapper<T> {
} }
impl_internable!( impl_internable!(
InternedWrapper<Vec<chalk_ir::VariableKind<Interner>>>, InternedWrapper<Vec<VariableKind>>,
InternedWrapper<SmallVec<[GenericArg; 2]>>, InternedWrapper<SmallVec<[GenericArg; 2]>>,
InternedWrapper<chalk_ir::TyData<Interner>>, InternedWrapper<TyData>,
InternedWrapper<chalk_ir::LifetimeData<Interner>>, InternedWrapper<LifetimeData>,
InternedWrapper<chalk_ir::ConstData<Interner>>, InternedWrapper<ConstData>,
InternedWrapper<ConstScalar>, InternedWrapper<ConstScalar>,
InternedWrapper<Vec<chalk_ir::CanonicalVarKind<Interner>>>, InternedWrapper<Vec<CanonicalVarKind>>,
InternedWrapper<Vec<chalk_ir::ProgramClause<Interner>>>, InternedWrapper<Vec<ProgramClause>>,
InternedWrapper<Vec<chalk_ir::QuantifiedWhereClause<Interner>>>, InternedWrapper<Vec<QuantifiedWhereClause>>,
InternedWrapper<Vec<chalk_ir::Variance>>, InternedWrapper<SmallVec<[Variance; 16]>>,
); );
impl chalk_ir::interner::Interner for Interner { impl chalk_ir::interner::Interner for Interner {
type InternedType = Interned<InternedWrapper<chalk_ir::TyData<Self>>>; type InternedType = Interned<InternedWrapper<TyData>>;
type InternedLifetime = Interned<InternedWrapper<chalk_ir::LifetimeData<Self>>>; type InternedLifetime = Interned<InternedWrapper<LifetimeData>>;
type InternedConst = Interned<InternedWrapper<chalk_ir::ConstData<Self>>>; type InternedConst = Interned<InternedWrapper<ConstData>>;
type InternedConcreteConst = ConstScalar; type InternedConcreteConst = ConstScalar;
type InternedGenericArg = chalk_ir::GenericArgData<Self>; type InternedGenericArg = GenericArgData;
type InternedGoal = Arc<GoalData<Self>>; // We could do the following, but that saves "only" 20mb on self while increasing inferecene
type InternedGoals = Vec<Goal<Self>>; // time by ~2.5%
// type InternedGoal = Interned<InternedWrapper<GoalData>>;
type InternedGoal = Arc<GoalData>;
type InternedGoals = Vec<Goal>;
type InternedSubstitution = Interned<InternedWrapper<SmallVec<[GenericArg; 2]>>>; type InternedSubstitution = Interned<InternedWrapper<SmallVec<[GenericArg; 2]>>>;
type InternedProgramClauses = Interned<InternedWrapper<Vec<chalk_ir::ProgramClause<Self>>>>; type InternedProgramClauses = Interned<InternedWrapper<Vec<ProgramClause>>>;
type InternedProgramClause = chalk_ir::ProgramClauseData<Self>; type InternedProgramClause = ProgramClauseData;
type InternedQuantifiedWhereClauses = type InternedQuantifiedWhereClauses = Interned<InternedWrapper<Vec<QuantifiedWhereClause>>>;
Interned<InternedWrapper<Vec<chalk_ir::QuantifiedWhereClause<Self>>>>; type InternedVariableKinds = Interned<InternedWrapper<Vec<VariableKind>>>;
type InternedVariableKinds = Interned<InternedWrapper<Vec<chalk_ir::VariableKind<Interner>>>>; type InternedCanonicalVarKinds = Interned<InternedWrapper<Vec<CanonicalVarKind>>>;
type InternedCanonicalVarKinds = type InternedConstraints = Vec<InEnvironment<Constraint>>;
Interned<InternedWrapper<Vec<chalk_ir::CanonicalVarKind<Self>>>>; type InternedVariances = SmallVec<[Variance; 16]>;
type InternedConstraints = Vec<chalk_ir::InEnvironment<chalk_ir::Constraint<Self>>>;
type InternedVariances = Interned<InternedWrapper<Vec<chalk_ir::Variance>>>;
type DefId = InternId; type DefId = InternId;
type InternedAdtId = hir_def::AdtId; type InternedAdtId = hir_def::AdtId;
type Identifier = TypeAliasId; type Identifier = TypeAliasId;
@ -88,68 +95,51 @@ impl chalk_ir::interner::Interner for Interner {
} }
fn debug_opaque_ty_id( fn debug_opaque_ty_id(
opaque_ty_id: chalk_ir::OpaqueTyId<Self>, opaque_ty_id: OpaqueTyId,
fmt: &mut fmt::Formatter<'_>, fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> { ) -> Option<fmt::Result> {
Some(write!(fmt, "OpaqueTy#{}", opaque_ty_id.0)) Some(write!(fmt, "OpaqueTy#{}", opaque_ty_id.0))
} }
fn debug_fn_def_id( fn debug_fn_def_id(fn_def_id: FnDefId, fmt: &mut fmt::Formatter<'_>) -> Option<fmt::Result> {
fn_def_id: chalk_ir::FnDefId<Self>,
fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> {
tls::with_current_program(|prog| Some(prog?.debug_fn_def_id(fn_def_id, fmt))) tls::with_current_program(|prog| Some(prog?.debug_fn_def_id(fn_def_id, fmt)))
} }
fn debug_closure_id( fn debug_closure_id(
_fn_def_id: chalk_ir::ClosureId<Self>, _fn_def_id: ClosureId,
_fmt: &mut fmt::Formatter<'_>, _fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> { ) -> Option<fmt::Result> {
None None
} }
fn debug_alias( fn debug_alias(alias: &AliasTy, fmt: &mut fmt::Formatter<'_>) -> Option<fmt::Result> {
alias: &chalk_ir::AliasTy<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> {
use std::fmt::Debug; use std::fmt::Debug;
match alias { match alias {
chalk_ir::AliasTy::Projection(projection_ty) => { AliasTy::Projection(projection_ty) => Interner::debug_projection_ty(projection_ty, fmt),
Interner::debug_projection_ty(projection_ty, fmt) AliasTy::Opaque(opaque_ty) => Some(opaque_ty.fmt(fmt)),
}
chalk_ir::AliasTy::Opaque(opaque_ty) => Some(opaque_ty.fmt(fmt)),
} }
} }
fn debug_projection_ty( fn debug_projection_ty(
proj: &chalk_ir::ProjectionTy<Interner>, proj: &ProjectionTy,
fmt: &mut fmt::Formatter<'_>, fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> { ) -> Option<fmt::Result> {
tls::with_current_program(|prog| Some(prog?.debug_projection_ty(proj, fmt))) tls::with_current_program(|prog| Some(prog?.debug_projection_ty(proj, fmt)))
} }
fn debug_opaque_ty( fn debug_opaque_ty(opaque_ty: &OpaqueTy, fmt: &mut fmt::Formatter<'_>) -> Option<fmt::Result> {
opaque_ty: &chalk_ir::OpaqueTy<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> {
Some(write!(fmt, "{:?}", opaque_ty.opaque_ty_id)) Some(write!(fmt, "{:?}", opaque_ty.opaque_ty_id))
} }
fn debug_ty(ty: &chalk_ir::Ty<Interner>, fmt: &mut fmt::Formatter<'_>) -> Option<fmt::Result> { fn debug_ty(ty: &Ty, fmt: &mut fmt::Formatter<'_>) -> Option<fmt::Result> {
Some(write!(fmt, "{:?}", ty.data(Interner))) Some(write!(fmt, "{:?}", ty.data(Interner)))
} }
fn debug_lifetime( fn debug_lifetime(lifetime: &Lifetime, fmt: &mut fmt::Formatter<'_>) -> Option<fmt::Result> {
lifetime: &chalk_ir::Lifetime<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> {
Some(write!(fmt, "{:?}", lifetime.data(Interner))) Some(write!(fmt, "{:?}", lifetime.data(Interner)))
} }
fn debug_const( fn debug_const(constant: &Const, fmt: &mut fmt::Formatter<'_>) -> Option<fmt::Result> {
constant: &chalk_ir::Const<Self>,
fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> {
Some(write!(fmt, "{:?}", constant.data(Interner))) Some(write!(fmt, "{:?}", constant.data(Interner)))
} }
@ -161,102 +151,99 @@ impl chalk_ir::interner::Interner for Interner {
} }
fn debug_variable_kinds( fn debug_variable_kinds(
variable_kinds: &chalk_ir::VariableKinds<Self>, variable_kinds: &VariableKinds,
fmt: &mut fmt::Formatter<'_>, fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> { ) -> Option<fmt::Result> {
Some(write!(fmt, "{:?}", variable_kinds.as_slice(Interner))) Some(write!(fmt, "{:?}", variable_kinds.as_slice(Interner)))
} }
fn debug_variable_kinds_with_angles( fn debug_variable_kinds_with_angles(
variable_kinds: &chalk_ir::VariableKinds<Self>, variable_kinds: &VariableKinds,
fmt: &mut fmt::Formatter<'_>, fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> { ) -> Option<fmt::Result> {
Some(write!(fmt, "{:?}", variable_kinds.inner_debug(Interner))) Some(write!(fmt, "{:?}", variable_kinds.inner_debug(Interner)))
} }
fn debug_canonical_var_kinds( fn debug_canonical_var_kinds(
canonical_var_kinds: &chalk_ir::CanonicalVarKinds<Self>, canonical_var_kinds: &CanonicalVarKinds,
fmt: &mut fmt::Formatter<'_>, fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> { ) -> Option<fmt::Result> {
Some(write!(fmt, "{:?}", canonical_var_kinds.as_slice(Interner))) Some(write!(fmt, "{:?}", canonical_var_kinds.as_slice(Interner)))
} }
fn debug_goal(goal: &Goal<Interner>, fmt: &mut fmt::Formatter<'_>) -> Option<fmt::Result> { fn debug_goal(goal: &Goal, fmt: &mut fmt::Formatter<'_>) -> Option<fmt::Result> {
let goal_data = goal.data(Interner); let goal_data = goal.data(Interner);
Some(write!(fmt, "{goal_data:?}")) Some(write!(fmt, "{goal_data:?}"))
} }
fn debug_goals( fn debug_goals(goals: &Goals, fmt: &mut fmt::Formatter<'_>) -> Option<fmt::Result> {
goals: &chalk_ir::Goals<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> {
Some(write!(fmt, "{:?}", goals.debug(Interner))) Some(write!(fmt, "{:?}", goals.debug(Interner)))
} }
fn debug_program_clause_implication( fn debug_program_clause_implication(
pci: &chalk_ir::ProgramClauseImplication<Interner>, pci: &ProgramClauseImplication<Self>,
fmt: &mut fmt::Formatter<'_>, fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> { ) -> Option<fmt::Result> {
Some(write!(fmt, "{:?}", pci.debug(Interner))) Some(write!(fmt, "{:?}", pci.debug(Interner)))
} }
fn debug_program_clause( fn debug_program_clause(
clause: &chalk_ir::ProgramClause<Self>, clause: &ProgramClause,
fmt: &mut fmt::Formatter<'_>, fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> { ) -> Option<fmt::Result> {
Some(write!(fmt, "{:?}", clause.data(Interner))) Some(write!(fmt, "{:?}", clause.data(Interner)))
} }
fn debug_program_clauses( fn debug_program_clauses(
clauses: &chalk_ir::ProgramClauses<Self>, clauses: &ProgramClauses,
fmt: &mut fmt::Formatter<'_>, fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> { ) -> Option<fmt::Result> {
Some(write!(fmt, "{:?}", clauses.as_slice(Interner))) Some(write!(fmt, "{:?}", clauses.as_slice(Interner)))
} }
fn debug_substitution( fn debug_substitution(
substitution: &chalk_ir::Substitution<Interner>, substitution: &Substitution,
fmt: &mut fmt::Formatter<'_>, fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> { ) -> Option<fmt::Result> {
Some(write!(fmt, "{:?}", substitution.debug(Interner))) Some(write!(fmt, "{:?}", substitution.debug(Interner)))
} }
fn debug_separator_trait_ref( fn debug_separator_trait_ref(
separator_trait_ref: &chalk_ir::SeparatorTraitRef<'_, Interner>, separator_trait_ref: &SeparatorTraitRef<'_, Interner>,
fmt: &mut fmt::Formatter<'_>, fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> { ) -> Option<fmt::Result> {
Some(write!(fmt, "{:?}", separator_trait_ref.debug(Interner))) Some(write!(fmt, "{:?}", separator_trait_ref.debug(Interner)))
} }
fn debug_quantified_where_clauses( fn debug_quantified_where_clauses(
clauses: &chalk_ir::QuantifiedWhereClauses<Self>, clauses: &QuantifiedWhereClauses,
fmt: &mut fmt::Formatter<'_>, fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> { ) -> Option<fmt::Result> {
Some(write!(fmt, "{:?}", clauses.as_slice(Interner))) Some(write!(fmt, "{:?}", clauses.as_slice(Interner)))
} }
fn debug_constraints( fn debug_constraints(
_clauses: &chalk_ir::Constraints<Self>, _clauses: &Constraints,
_fmt: &mut fmt::Formatter<'_>, _fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> { ) -> Option<fmt::Result> {
None None
} }
fn intern_ty(self, kind: chalk_ir::TyKind<Self>) -> Self::InternedType { fn intern_ty(self, kind: TyKind) -> Self::InternedType {
let flags = kind.compute_flags(self); let flags = kind.compute_flags(self);
Interned::new(InternedWrapper(chalk_ir::TyData { kind, flags })) Interned::new(InternedWrapper(TyData { kind, flags }))
} }
fn ty_data(self, ty: &Self::InternedType) -> &chalk_ir::TyData<Self> { fn ty_data(self, ty: &Self::InternedType) -> &TyData {
&ty.0 &ty.0
} }
fn intern_lifetime(self, lifetime: chalk_ir::LifetimeData<Self>) -> Self::InternedLifetime { fn intern_lifetime(self, lifetime: LifetimeData) -> Self::InternedLifetime {
Interned::new(InternedWrapper(lifetime)) Interned::new(InternedWrapper(lifetime))
} }
fn lifetime_data(self, lifetime: &Self::InternedLifetime) -> &chalk_ir::LifetimeData<Self> { fn lifetime_data(self, lifetime: &Self::InternedLifetime) -> &LifetimeData {
&lifetime.0 &lifetime.0
} }
fn intern_const(self, constant: chalk_ir::ConstData<Self>) -> Self::InternedConst { fn intern_const(self, constant: ConstData) -> Self::InternedConst {
Interned::new(InternedWrapper(constant)) Interned::new(InternedWrapper(constant))
} }
fn const_data(self, constant: &Self::InternedConst) -> &chalk_ir::ConstData<Self> { fn const_data(self, constant: &Self::InternedConst) -> &ConstData {
&constant.0 &constant.0
} }
@ -269,36 +256,33 @@ impl chalk_ir::interner::Interner for Interner {
!matches!(c1, ConstScalar::Bytes(..)) || !matches!(c2, ConstScalar::Bytes(..)) || (c1 == c2) !matches!(c1, ConstScalar::Bytes(..)) || !matches!(c2, ConstScalar::Bytes(..)) || (c1 == c2)
} }
fn intern_generic_arg( fn intern_generic_arg(self, parameter: GenericArgData) -> Self::InternedGenericArg {
self,
parameter: chalk_ir::GenericArgData<Self>,
) -> Self::InternedGenericArg {
parameter parameter
} }
fn generic_arg_data( fn generic_arg_data(self, parameter: &Self::InternedGenericArg) -> &GenericArgData {
self,
parameter: &Self::InternedGenericArg,
) -> &chalk_ir::GenericArgData<Self> {
parameter parameter
} }
fn intern_goal(self, goal: GoalData<Self>) -> Self::InternedGoal { fn intern_goal(self, goal: GoalData) -> Self::InternedGoal {
Arc::new(goal) Arc::new(goal)
} }
fn goal_data(self, goal: &Self::InternedGoal) -> &GoalData<Self> { fn goal_data(self, goal: &Self::InternedGoal) -> &GoalData {
goal goal
} }
fn intern_goals<E>( fn intern_goals<E>(
self, self,
data: impl IntoIterator<Item = Result<Goal<Self>, E>>, data: impl IntoIterator<Item = Result<Goal, E>>,
) -> Result<Self::InternedGoals, E> { ) -> Result<Self::InternedGoals, E> {
// let hash =
// std::hash::BuildHasher::hash_one(&BuildHasherDefault::<FxHasher>::default(), &goal);
// Interned::new(InternedWrapper(PreHashedWrapper(goal, hash)))
data.into_iter().collect() data.into_iter().collect()
} }
fn goals_data(self, goals: &Self::InternedGoals) -> &[Goal<Interner>] { fn goals_data(self, goals: &Self::InternedGoals) -> &[Goal] {
goals goals
} }
@ -313,37 +297,28 @@ impl chalk_ir::interner::Interner for Interner {
&substitution.as_ref().0 &substitution.as_ref().0
} }
fn intern_program_clause( fn intern_program_clause(self, data: ProgramClauseData) -> Self::InternedProgramClause {
self,
data: chalk_ir::ProgramClauseData<Self>,
) -> Self::InternedProgramClause {
data data
} }
fn program_clause_data( fn program_clause_data(self, clause: &Self::InternedProgramClause) -> &ProgramClauseData {
self,
clause: &Self::InternedProgramClause,
) -> &chalk_ir::ProgramClauseData<Self> {
clause clause
} }
fn intern_program_clauses<E>( fn intern_program_clauses<E>(
self, self,
data: impl IntoIterator<Item = Result<chalk_ir::ProgramClause<Self>, E>>, data: impl IntoIterator<Item = Result<ProgramClause, E>>,
) -> Result<Self::InternedProgramClauses, E> { ) -> Result<Self::InternedProgramClauses, E> {
Ok(Interned::new(InternedWrapper(data.into_iter().collect::<Result<_, _>>()?))) Ok(Interned::new(InternedWrapper(data.into_iter().collect::<Result<_, _>>()?)))
} }
fn program_clauses_data( fn program_clauses_data(self, clauses: &Self::InternedProgramClauses) -> &[ProgramClause] {
self,
clauses: &Self::InternedProgramClauses,
) -> &[chalk_ir::ProgramClause<Self>] {
clauses clauses
} }
fn intern_quantified_where_clauses<E>( fn intern_quantified_where_clauses<E>(
self, self,
data: impl IntoIterator<Item = Result<chalk_ir::QuantifiedWhereClause<Self>, E>>, data: impl IntoIterator<Item = Result<QuantifiedWhereClause, E>>,
) -> Result<Self::InternedQuantifiedWhereClauses, E> { ) -> Result<Self::InternedQuantifiedWhereClauses, E> {
Ok(Interned::new(InternedWrapper(data.into_iter().collect::<Result<_, _>>()?))) Ok(Interned::new(InternedWrapper(data.into_iter().collect::<Result<_, _>>()?)))
} }
@ -351,27 +326,24 @@ impl chalk_ir::interner::Interner for Interner {
fn quantified_where_clauses_data( fn quantified_where_clauses_data(
self, self,
clauses: &Self::InternedQuantifiedWhereClauses, clauses: &Self::InternedQuantifiedWhereClauses,
) -> &[chalk_ir::QuantifiedWhereClause<Self>] { ) -> &[QuantifiedWhereClause] {
clauses clauses
} }
fn intern_generic_arg_kinds<E>( fn intern_generic_arg_kinds<E>(
self, self,
data: impl IntoIterator<Item = Result<chalk_ir::VariableKind<Self>, E>>, data: impl IntoIterator<Item = Result<VariableKind, E>>,
) -> Result<Self::InternedVariableKinds, E> { ) -> Result<Self::InternedVariableKinds, E> {
Ok(Interned::new(InternedWrapper(data.into_iter().collect::<Result<_, _>>()?))) Ok(Interned::new(InternedWrapper(data.into_iter().collect::<Result<_, _>>()?)))
} }
fn variable_kinds_data( fn variable_kinds_data(self, parameter_kinds: &Self::InternedVariableKinds) -> &[VariableKind] {
self,
parameter_kinds: &Self::InternedVariableKinds,
) -> &[chalk_ir::VariableKind<Self>] {
&parameter_kinds.as_ref().0 &parameter_kinds.as_ref().0
} }
fn intern_canonical_var_kinds<E>( fn intern_canonical_var_kinds<E>(
self, self,
data: impl IntoIterator<Item = Result<chalk_ir::CanonicalVarKind<Self>, E>>, data: impl IntoIterator<Item = Result<CanonicalVarKind, E>>,
) -> Result<Self::InternedCanonicalVarKinds, E> { ) -> Result<Self::InternedCanonicalVarKinds, E> {
Ok(Interned::new(InternedWrapper(data.into_iter().collect::<Result<_, _>>()?))) Ok(Interned::new(InternedWrapper(data.into_iter().collect::<Result<_, _>>()?)))
} }
@ -379,30 +351,30 @@ impl chalk_ir::interner::Interner for Interner {
fn canonical_var_kinds_data( fn canonical_var_kinds_data(
self, self,
canonical_var_kinds: &Self::InternedCanonicalVarKinds, canonical_var_kinds: &Self::InternedCanonicalVarKinds,
) -> &[chalk_ir::CanonicalVarKind<Self>] { ) -> &[CanonicalVarKind] {
canonical_var_kinds canonical_var_kinds
} }
fn intern_constraints<E>( fn intern_constraints<E>(
self, self,
data: impl IntoIterator<Item = Result<chalk_ir::InEnvironment<chalk_ir::Constraint<Self>>, E>>, data: impl IntoIterator<Item = Result<InEnvironment<Constraint>, E>>,
) -> Result<Self::InternedConstraints, E> { ) -> Result<Self::InternedConstraints, E> {
data.into_iter().collect() data.into_iter().collect()
} }
fn constraints_data( fn constraints_data(
self, self,
constraints: &Self::InternedConstraints, constraints: &Self::InternedConstraints,
) -> &[chalk_ir::InEnvironment<chalk_ir::Constraint<Self>>] { ) -> &[InEnvironment<Constraint>] {
constraints constraints
} }
fn intern_variances<E>( fn intern_variances<E>(
self, self,
data: impl IntoIterator<Item = Result<chalk_ir::Variance, E>>, data: impl IntoIterator<Item = Result<Variance, E>>,
) -> Result<Self::InternedVariances, E> { ) -> Result<Self::InternedVariances, E> {
Ok(Interned::new(InternedWrapper(data.into_iter().collect::<Result<_, _>>()?))) data.into_iter().collect::<Result<_, _>>()
} }
fn variances_data(self, variances: &Self::InternedVariances) -> &[chalk_ir::Variance] { fn variances_data(self, variances: &Self::InternedVariances) -> &[Variance] {
variances variances
} }
} }

View File

@ -164,7 +164,7 @@ fn layout_of_simd_ty(
}; };
// Compute the ABI of the element type: // Compute the ABI of the element type:
let e_ly = db.layout_of_ty(e_ty, env.clone())?; let e_ly = db.layout_of_ty(e_ty, env)?;
let Abi::Scalar(e_abi) = e_ly.abi else { let Abi::Scalar(e_abi) = e_ly.abi else {
return Err(LayoutError::Unknown); return Err(LayoutError::Unknown);
}; };
@ -204,17 +204,17 @@ pub fn layout_of_ty_query(
}; };
let cx = LayoutCx { target: &target }; let cx = LayoutCx { target: &target };
let dl = &*cx.current_data_layout(); let dl = &*cx.current_data_layout();
let ty = normalize(db, trait_env.clone(), ty.clone()); let ty = normalize(db, trait_env.clone(), ty);
let result = match ty.kind(Interner) { let result = match ty.kind(Interner) {
TyKind::Adt(AdtId(def), subst) => { TyKind::Adt(AdtId(def), subst) => {
if let hir_def::AdtId::StructId(s) = def { if let hir_def::AdtId::StructId(s) = def {
let data = db.struct_data(*s); let data = db.struct_data(*s);
let repr = data.repr.unwrap_or_default(); let repr = data.repr.unwrap_or_default();
if repr.simd() { if repr.simd() {
return layout_of_simd_ty(db, *s, subst, trait_env.clone(), &target); return layout_of_simd_ty(db, *s, subst, trait_env, &target);
} }
}; };
return db.layout_of_adt(*def, subst.clone(), trait_env.clone()); return db.layout_of_adt(*def, subst.clone(), trait_env);
} }
TyKind::Scalar(s) => match s { TyKind::Scalar(s) => match s {
chalk_ir::Scalar::Bool => Layout::scalar( chalk_ir::Scalar::Bool => Layout::scalar(
@ -280,7 +280,7 @@ pub fn layout_of_ty_query(
} }
TyKind::Array(element, count) => { TyKind::Array(element, count) => {
let count = try_const_usize(db, &count).ok_or(LayoutError::HasErrorConst)? as u64; let count = try_const_usize(db, &count).ok_or(LayoutError::HasErrorConst)? as u64;
let element = db.layout_of_ty(element.clone(), trait_env.clone())?; let element = db.layout_of_ty(element.clone(), trait_env)?;
let size = element.size.checked_mul(count, dl).ok_or(LayoutError::SizeOverflow)?; let size = element.size.checked_mul(count, dl).ok_or(LayoutError::SizeOverflow)?;
let abi = if count != 0 && matches!(element.abi, Abi::Uninhabited) { let abi = if count != 0 && matches!(element.abi, Abi::Uninhabited) {
@ -303,7 +303,7 @@ pub fn layout_of_ty_query(
} }
} }
TyKind::Slice(element) => { TyKind::Slice(element) => {
let element = db.layout_of_ty(element.clone(), trait_env.clone())?; let element = db.layout_of_ty(element.clone(), trait_env)?;
Layout { Layout {
variants: Variants::Single { index: struct_variant_idx() }, variants: Variants::Single { index: struct_variant_idx() },
fields: FieldsShape::Array { stride: element.size, count: 0 }, fields: FieldsShape::Array { stride: element.size, count: 0 },
@ -345,7 +345,7 @@ pub fn layout_of_ty_query(
})) }))
.intern(Interner); .intern(Interner);
} }
unsized_part = normalize(db, trait_env.clone(), unsized_part); unsized_part = normalize(db, trait_env, unsized_part);
let metadata = match unsized_part.kind(Interner) { let metadata = match unsized_part.kind(Interner) {
TyKind::Slice(_) | TyKind::Str => { TyKind::Slice(_) | TyKind::Str => {
scalar_unit(dl, Primitive::Int(dl.ptr_sized_integer(), false)) scalar_unit(dl, Primitive::Int(dl.ptr_sized_integer(), false))
@ -384,7 +384,7 @@ pub fn layout_of_ty_query(
match impl_trait_id { match impl_trait_id {
crate::ImplTraitId::ReturnTypeImplTrait(func, idx) => { crate::ImplTraitId::ReturnTypeImplTrait(func, idx) => {
let infer = db.infer(func.into()); let infer = db.infer(func.into());
return db.layout_of_ty(infer.type_of_rpit[idx].clone(), trait_env.clone()); return db.layout_of_ty(infer.type_of_rpit[idx].clone(), trait_env);
} }
crate::ImplTraitId::AsyncBlockTypeImplTrait(_, _) => { crate::ImplTraitId::AsyncBlockTypeImplTrait(_, _) => {
return Err(LayoutError::NotImplemented) return Err(LayoutError::NotImplemented)

View File

@ -220,6 +220,36 @@ fn recursive() {
); );
} }
#[test]
fn repr_packed() {
size_and_align! {
#[repr(packed)]
struct Goal;
}
size_and_align! {
#[repr(packed(2))]
struct Goal;
}
size_and_align! {
#[repr(packed(4))]
struct Goal;
}
size_and_align! {
#[repr(packed)]
struct Goal(i32);
}
size_and_align! {
#[repr(packed(2))]
struct Goal(i32);
}
size_and_align! {
#[repr(packed(4))]
struct Goal(i32);
}
check_size_and_align("#[repr(packed(5))] struct Goal(i32);", "", 4, 1);
}
#[test] #[test]
fn generic() { fn generic() {
size_and_align! { size_and_align! {

View File

@ -37,22 +37,22 @@ mod tests;
mod test_db; mod test_db;
use std::{ use std::{
collections::{hash_map::Entry, HashMap}, collections::hash_map::Entry,
hash::Hash, hash::{BuildHasherDefault, Hash},
}; };
use chalk_ir::{ use chalk_ir::{
fold::{Shift, TypeFoldable}, fold::{Shift, TypeFoldable},
interner::HasInterner, interner::HasInterner,
visit::{TypeSuperVisitable, TypeVisitable, TypeVisitor}, visit::{TypeSuperVisitable, TypeVisitable, TypeVisitor},
NoSolution, TyData, NoSolution,
}; };
use either::Either; use either::Either;
use hir_def::{hir::ExprId, type_ref::Rawness, GeneralConstId, TypeOrConstParamId}; use hir_def::{hir::ExprId, type_ref::Rawness, GeneralConstId, TypeOrConstParamId};
use hir_expand::name; use hir_expand::name;
use la_arena::{Arena, Idx}; use la_arena::{Arena, Idx};
use mir::{MirEvalError, VTableMap}; use mir::{MirEvalError, VTableMap};
use rustc_hash::FxHashSet; use rustc_hash::{FxHashMap, FxHashSet};
use syntax::ast::{make, ConstArg}; use syntax::ast::{make, ConstArg};
use traits::FnTrait; use traits::FnTrait;
use triomphe::Arc; use triomphe::Arc;
@ -152,32 +152,64 @@ pub type DomainGoal = chalk_ir::DomainGoal<Interner>;
pub type Goal = chalk_ir::Goal<Interner>; pub type Goal = chalk_ir::Goal<Interner>;
pub type AliasEq = chalk_ir::AliasEq<Interner>; pub type AliasEq = chalk_ir::AliasEq<Interner>;
pub type Solution = chalk_solve::Solution<Interner>; pub type Solution = chalk_solve::Solution<Interner>;
pub type Constraint = chalk_ir::Constraint<Interner>;
pub type Constraints = chalk_ir::Constraints<Interner>;
pub type ConstrainedSubst = chalk_ir::ConstrainedSubst<Interner>; pub type ConstrainedSubst = chalk_ir::ConstrainedSubst<Interner>;
pub type Guidance = chalk_solve::Guidance<Interner>; pub type Guidance = chalk_solve::Guidance<Interner>;
pub type WhereClause = chalk_ir::WhereClause<Interner>; pub type WhereClause = chalk_ir::WhereClause<Interner>;
pub type CanonicalVarKind = chalk_ir::CanonicalVarKind<Interner>;
pub type GoalData = chalk_ir::GoalData<Interner>;
pub type Goals = chalk_ir::Goals<Interner>;
pub type ProgramClauseData = chalk_ir::ProgramClauseData<Interner>;
pub type ProgramClause = chalk_ir::ProgramClause<Interner>;
pub type ProgramClauses = chalk_ir::ProgramClauses<Interner>;
pub type TyData = chalk_ir::TyData<Interner>;
pub type Variances = chalk_ir::Variances<Interner>;
/// A constant can have reference to other things. Memory map job is holding /// A constant can have reference to other things. Memory map job is holding
/// the necessary bits of memory of the const eval session to keep the constant /// the necessary bits of memory of the const eval session to keep the constant
/// meaningful. /// meaningful.
#[derive(Debug, Default, Clone, PartialEq, Eq)] #[derive(Debug, Default, Clone, PartialEq, Eq)]
pub struct MemoryMap { pub enum MemoryMap {
pub memory: HashMap<usize, Vec<u8>>, #[default]
pub vtable: VTableMap, Empty,
Simple(Box<[u8]>),
Complex(Box<ComplexMemoryMap>),
} }
impl MemoryMap { #[derive(Debug, Default, Clone, PartialEq, Eq)]
fn insert(&mut self, addr: usize, x: Vec<u8>) { pub struct ComplexMemoryMap {
memory: FxHashMap<usize, Box<[u8]>>,
vtable: VTableMap,
}
impl ComplexMemoryMap {
fn insert(&mut self, addr: usize, val: Box<[u8]>) {
match self.memory.entry(addr) { match self.memory.entry(addr) {
Entry::Occupied(mut e) => { Entry::Occupied(mut e) => {
if e.get().len() < x.len() { if e.get().len() < val.len() {
e.insert(x); e.insert(val);
} }
} }
Entry::Vacant(e) => { Entry::Vacant(e) => {
e.insert(x); e.insert(val);
} }
} }
} }
}
impl MemoryMap {
pub fn vtable_ty(&self, id: usize) -> Result<&Ty, MirEvalError> {
match self {
MemoryMap::Empty | MemoryMap::Simple(_) => Err(MirEvalError::InvalidVTableId(id)),
MemoryMap::Complex(cm) => cm.vtable.ty(id),
}
}
fn simple(v: Box<[u8]>) -> Self {
MemoryMap::Simple(v)
}
/// This functions convert each address by a function `f` which gets the byte intervals and assign an address /// This functions convert each address by a function `f` which gets the byte intervals and assign an address
/// to them. It is useful when you want to load a constant with a memory map in a new memory. You can pass an /// to them. It is useful when you want to load a constant with a memory map in a new memory. You can pass an
@ -185,22 +217,33 @@ impl MemoryMap {
fn transform_addresses( fn transform_addresses(
&self, &self,
mut f: impl FnMut(&[u8], usize) -> Result<usize, MirEvalError>, mut f: impl FnMut(&[u8], usize) -> Result<usize, MirEvalError>,
) -> Result<HashMap<usize, usize>, MirEvalError> { ) -> Result<FxHashMap<usize, usize>, MirEvalError> {
self.memory let mut transform = |(addr, val): (&usize, &Box<[u8]>)| {
.iter() let addr = *addr;
.map(|x| { let align = if addr == 0 { 64 } else { (addr - (addr & (addr - 1))).min(64) };
let addr = *x.0; f(val, align).and_then(|it| Ok((addr, it)))
let align = if addr == 0 { 64 } else { (addr - (addr & (addr - 1))).min(64) }; };
Ok((addr, f(x.1, align)?)) match self {
}) MemoryMap::Empty => Ok(Default::default()),
.collect() MemoryMap::Simple(m) => transform((&0, m)).map(|(addr, val)| {
let mut map = FxHashMap::with_capacity_and_hasher(1, BuildHasherDefault::default());
map.insert(addr, val);
map
}),
MemoryMap::Complex(cm) => cm.memory.iter().map(transform).collect(),
}
} }
fn get<'a>(&'a self, addr: usize, size: usize) -> Option<&'a [u8]> { fn get(&self, addr: usize, size: usize) -> Option<&[u8]> {
if size == 0 { if size == 0 {
Some(&[]) Some(&[])
} else { } else {
self.memory.get(&addr)?.get(0..size) match self {
MemoryMap::Empty => Some(&[]),
MemoryMap::Simple(m) if addr == 0 => m.get(0..size),
MemoryMap::Simple(_) => None,
MemoryMap::Complex(cm) => cm.memory.get(&addr)?.get(0..size),
}
} }
} }
} }
@ -208,7 +251,7 @@ impl MemoryMap {
/// A concrete constant value /// A concrete constant value
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
pub enum ConstScalar { pub enum ConstScalar {
Bytes(Vec<u8>, MemoryMap), Bytes(Box<[u8]>, MemoryMap),
// FIXME: this is a hack to get around chalk not being able to represent unevaluatable // FIXME: this is a hack to get around chalk not being able to represent unevaluatable
// constants // constants
UnevaluatedConst(GeneralConstId, Substitution), UnevaluatedConst(GeneralConstId, Substitution),

View File

@ -1350,7 +1350,7 @@ pub(crate) fn resolve_indexing_op(
ty: Canonical<Ty>, ty: Canonical<Ty>,
index_trait: TraitId, index_trait: TraitId,
) -> Option<ReceiverAdjustments> { ) -> Option<ReceiverAdjustments> {
let mut table = InferenceTable::new(db, env.clone()); let mut table = InferenceTable::new(db, env);
let ty = table.instantiate_canonical(ty); let ty = table.instantiate_canonical(ty);
let deref_chain = autoderef_method_receiver(&mut table, ty); let deref_chain = autoderef_method_receiver(&mut table, ty);
for (ty, adj) in deref_chain { for (ty, adj) in deref_chain {

View File

@ -14,9 +14,10 @@ use crate::{
}; };
use base_db::CrateId; use base_db::CrateId;
use chalk_ir::Mutability; use chalk_ir::Mutability;
use either::Either;
use hir_def::{ use hir_def::{
hir::{BindingId, Expr, ExprId, Ordering, PatId}, hir::{BindingId, Expr, ExprId, Ordering, PatId},
DefWithBodyId, FieldId, StaticId, UnionId, VariantId, DefWithBodyId, FieldId, StaticId, TupleFieldId, UnionId, VariantId,
}; };
use la_arena::{Arena, ArenaMap, Idx, RawIdx}; use la_arena::{Arena, ArenaMap, Idx, RawIdx};
@ -97,16 +98,16 @@ pub enum Operand {
} }
impl Operand { impl Operand {
fn from_concrete_const(data: Vec<u8>, memory_map: MemoryMap, ty: Ty) -> Self { fn from_concrete_const(data: Box<[u8]>, memory_map: MemoryMap, ty: Ty) -> Self {
Operand::Constant(intern_const_scalar(ConstScalar::Bytes(data, memory_map), ty)) Operand::Constant(intern_const_scalar(ConstScalar::Bytes(data, memory_map), ty))
} }
fn from_bytes(data: Vec<u8>, ty: Ty) -> Self { fn from_bytes(data: Box<[u8]>, ty: Ty) -> Self {
Operand::from_concrete_const(data, MemoryMap::default(), ty) Operand::from_concrete_const(data, MemoryMap::default(), ty)
} }
fn const_zst(ty: Ty) -> Operand { fn const_zst(ty: Ty) -> Operand {
Self::from_bytes(vec![], ty) Self::from_bytes(Box::default(), ty)
} }
fn from_fn( fn from_fn(
@ -117,16 +118,16 @@ impl Operand {
let ty = let ty =
chalk_ir::TyKind::FnDef(CallableDefId::FunctionId(func_id).to_chalk(db), generic_args) chalk_ir::TyKind::FnDef(CallableDefId::FunctionId(func_id).to_chalk(db), generic_args)
.intern(Interner); .intern(Interner);
Operand::from_bytes(vec![], ty) Operand::from_bytes(Box::default(), ty)
} }
} }
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum ProjectionElem<V, T> { pub enum ProjectionElem<V, T> {
Deref, Deref,
Field(FieldId), Field(Either<FieldId, TupleFieldId>),
// FIXME: get rid of this, and use FieldId for tuples and closures // FIXME: get rid of this, and use FieldId for tuples and closures
TupleOrClosureField(usize), ClosureField(usize),
Index(V), Index(V),
ConstantIndex { offset: u64, from_end: bool }, ConstantIndex { offset: u64, from_end: bool },
Subslice { from: u64, to: u64 }, Subslice { from: u64, to: u64 },
@ -161,7 +162,7 @@ impl<V, T> ProjectionElem<V, T> {
return TyKind::Error.intern(Interner); return TyKind::Error.intern(Interner);
} }
}, },
ProjectionElem::Field(f) => match &base.kind(Interner) { ProjectionElem::Field(Either::Left(f)) => match &base.kind(Interner) {
TyKind::Adt(_, subst) => { TyKind::Adt(_, subst) => {
db.field_types(f.parent)[f.local_id].clone().substitute(Interner, subst) db.field_types(f.parent)[f.local_id].clone().substitute(Interner, subst)
} }
@ -170,19 +171,25 @@ impl<V, T> ProjectionElem<V, T> {
return TyKind::Error.intern(Interner); return TyKind::Error.intern(Interner);
} }
}, },
ProjectionElem::TupleOrClosureField(f) => match &base.kind(Interner) { ProjectionElem::Field(Either::Right(f)) => match &base.kind(Interner) {
TyKind::Tuple(_, subst) => subst TyKind::Tuple(_, subst) => subst
.as_slice(Interner) .as_slice(Interner)
.get(*f) .get(f.index as usize)
.map(|x| x.assert_ty_ref(Interner)) .map(|x| x.assert_ty_ref(Interner))
.cloned() .cloned()
.unwrap_or_else(|| { .unwrap_or_else(|| {
never!("Out of bound tuple field"); never!("Out of bound tuple field");
TyKind::Error.intern(Interner) TyKind::Error.intern(Interner)
}), }),
_ => {
never!("Only tuple has tuple field");
return TyKind::Error.intern(Interner);
}
},
ProjectionElem::ClosureField(f) => match &base.kind(Interner) {
TyKind::Closure(id, subst) => closure_field(*id, subst, *f), TyKind::Closure(id, subst) => closure_field(*id, subst, *f),
_ => { _ => {
never!("Only tuple or closure has tuple or closure field"); never!("Only closure has closure field");
return TyKind::Error.intern(Interner); return TyKind::Error.intern(Interner);
} }
}, },

View File

@ -205,7 +205,7 @@ fn place_case(db: &dyn HirDatabase, body: &MirBody, lvalue: &Place) -> Projectio
| ProjectionElem::ConstantIndex { .. } | ProjectionElem::ConstantIndex { .. }
| ProjectionElem::Subslice { .. } | ProjectionElem::Subslice { .. }
| ProjectionElem::Field(_) | ProjectionElem::Field(_)
| ProjectionElem::TupleOrClosureField(_) | ProjectionElem::ClosureField(_)
| ProjectionElem::Index(_) => { | ProjectionElem::Index(_) => {
is_part_of = true; is_part_of = true;
} }

View File

@ -1,13 +1,6 @@
//! This module provides a MIR interpreter, which is used in const eval. //! This module provides a MIR interpreter, which is used in const eval.
use std::{ use std::{borrow::Cow, cell::RefCell, fmt::Write, iter, mem, ops::Range};
borrow::Cow,
cell::RefCell,
collections::{HashMap, HashSet},
fmt::Write,
iter, mem,
ops::Range,
};
use base_db::{CrateId, FileId}; use base_db::{CrateId, FileId};
use chalk_ir::{cast::Cast, Mutability}; use chalk_ir::{cast::Cast, Mutability};
@ -40,8 +33,8 @@ use crate::{
name, static_lifetime, name, static_lifetime,
traits::FnTrait, traits::FnTrait,
utils::{detect_variant_from_bytes, ClosureSubst}, utils::{detect_variant_from_bytes, ClosureSubst},
CallableDefId, ClosureId, Const, ConstScalar, FnDefId, Interner, MemoryMap, Substitution, CallableDefId, ClosureId, ComplexMemoryMap, Const, ConstScalar, FnDefId, Interner, MemoryMap,
TraitEnvironment, Ty, TyBuilder, TyExt, TyKind, Substitution, TraitEnvironment, Ty, TyBuilder, TyExt, TyKind,
}; };
use super::{ use super::{
@ -98,6 +91,15 @@ impl VTableMap {
let id = from_bytes!(usize, bytes); let id = from_bytes!(usize, bytes);
self.ty(id) self.ty(id)
} }
pub fn shrink_to_fit(&mut self) {
self.id_to_ty.shrink_to_fit();
self.ty_to_id.shrink_to_fit();
}
fn is_empty(&self) -> bool {
self.id_to_ty.is_empty() && self.ty_to_id.is_empty()
}
} }
#[derive(Debug, Default, Clone, PartialEq, Eq)] #[derive(Debug, Default, Clone, PartialEq, Eq)]
@ -251,13 +253,6 @@ impl From<Interval> for IntervalOrOwned {
} }
impl IntervalOrOwned { impl IntervalOrOwned {
pub(crate) fn to_vec(self, memory: &Evaluator<'_>) -> Result<Vec<u8>> {
Ok(match self {
IntervalOrOwned::Owned(o) => o,
IntervalOrOwned::Borrowed(b) => b.get(memory)?.to_vec(),
})
}
fn get<'a>(&'a self, memory: &'a Evaluator<'a>) -> Result<&'a [u8]> { fn get<'a>(&'a self, memory: &'a Evaluator<'a>) -> Result<&'a [u8]> {
Ok(match self { Ok(match self {
IntervalOrOwned::Owned(o) => o, IntervalOrOwned::Owned(o) => o,
@ -291,8 +286,8 @@ impl Address {
} }
} }
fn to_bytes(&self) -> Vec<u8> { fn to_bytes(&self) -> [u8; mem::size_of::<usize>()] {
usize::to_le_bytes(self.to_usize()).to_vec() usize::to_le_bytes(self.to_usize())
} }
fn to_usize(&self) -> usize { fn to_usize(&self) -> usize {
@ -391,7 +386,7 @@ impl MirEvalError {
write!( write!(
f, f,
"Layout for type `{}` is not available due {err:?}", "Layout for type `{}` is not available due {err:?}",
ty.display(db).with_closure_style(ClosureStyle::ClosureWithId).to_string() ty.display(db).with_closure_style(ClosureStyle::ClosureWithId)
)?; )?;
} }
MirEvalError::MirLowerError(func, err) => { MirEvalError::MirLowerError(func, err) => {
@ -510,6 +505,20 @@ struct Locals {
drop_flags: DropFlags, drop_flags: DropFlags,
} }
pub struct MirOutput {
stdout: Vec<u8>,
stderr: Vec<u8>,
}
impl MirOutput {
pub fn stdout(&self) -> Cow<'_, str> {
String::from_utf8_lossy(&self.stdout)
}
pub fn stderr(&self) -> Cow<'_, str> {
String::from_utf8_lossy(&self.stderr)
}
}
pub fn interpret_mir( pub fn interpret_mir(
db: &dyn HirDatabase, db: &dyn HirDatabase,
body: Arc<MirBody>, body: Arc<MirBody>,
@ -520,27 +529,31 @@ pub fn interpret_mir(
// (and probably should) do better here, for example by excluding bindings outside of the target expression. // (and probably should) do better here, for example by excluding bindings outside of the target expression.
assert_placeholder_ty_is_unused: bool, assert_placeholder_ty_is_unused: bool,
trait_env: Option<Arc<TraitEnvironment>>, trait_env: Option<Arc<TraitEnvironment>>,
) -> (Result<Const>, String, String) { ) -> (Result<Const>, MirOutput) {
let ty = body.locals[return_slot()].ty.clone(); let ty = body.locals[return_slot()].ty.clone();
let mut evaluator = Evaluator::new(db, body.owner, assert_placeholder_ty_is_unused, trait_env); let mut evaluator = Evaluator::new(db, body.owner, assert_placeholder_ty_is_unused, trait_env);
let it: Result<Const> = (|| { let it: Result<Const> = (|| {
if evaluator.ptr_size() != std::mem::size_of::<usize>() { if evaluator.ptr_size() != std::mem::size_of::<usize>() {
not_supported!("targets with different pointer size from host"); not_supported!("targets with different pointer size from host");
} }
let bytes = evaluator.interpret_mir(body.clone(), None.into_iter())?; let interval = evaluator.interpret_mir(body.clone(), None.into_iter())?;
let bytes = interval.get(&evaluator)?;
let mut memory_map = evaluator.create_memory_map( let mut memory_map = evaluator.create_memory_map(
&bytes, bytes,
&ty, &ty,
&Locals { ptr: ArenaMap::new(), body, drop_flags: DropFlags::default() }, &Locals { ptr: ArenaMap::new(), body, drop_flags: DropFlags::default() },
)?; )?;
memory_map.vtable = evaluator.vtable_map.clone(); let bytes = bytes.into();
let memory_map = if memory_map.memory.is_empty() && evaluator.vtable_map.is_empty() {
MemoryMap::Empty
} else {
memory_map.vtable = mem::take(&mut evaluator.vtable_map);
memory_map.vtable.shrink_to_fit();
MemoryMap::Complex(Box::new(memory_map))
};
return Ok(intern_const_scalar(ConstScalar::Bytes(bytes, memory_map), ty)); return Ok(intern_const_scalar(ConstScalar::Bytes(bytes, memory_map), ty));
})(); })();
( (it, MirOutput { stdout: evaluator.stdout, stderr: evaluator.stderr })
it,
String::from_utf8_lossy(&evaluator.stdout).into_owned(),
String::from_utf8_lossy(&evaluator.stderr).into_owned(),
)
} }
#[cfg(test)] #[cfg(test)]
@ -562,7 +575,7 @@ impl Evaluator<'_> {
code_stack: vec![], code_stack: vec![],
vtable_map: VTableMap::default(), vtable_map: VTableMap::default(),
thread_local_storage: TlsData::default(), thread_local_storage: TlsData::default(),
static_locations: HashMap::default(), static_locations: Default::default(),
db, db,
random_state: oorandom::Rand64::new(0), random_state: oorandom::Rand64::new(0),
trait_env: trait_env.unwrap_or_else(|| db.trait_environment_for_body(owner)), trait_env: trait_env.unwrap_or_else(|| db.trait_environment_for_body(owner)),
@ -573,11 +586,11 @@ impl Evaluator<'_> {
stack_depth_limit: 100, stack_depth_limit: 100,
execution_limit: EXECUTION_LIMIT, execution_limit: EXECUTION_LIMIT,
memory_limit: 1000_000_000, // 2GB, 1GB for stack and 1GB for heap memory_limit: 1000_000_000, // 2GB, 1GB for stack and 1GB for heap
layout_cache: RefCell::new(HashMap::default()), layout_cache: RefCell::new(Default::default()),
projected_ty_cache: RefCell::new(HashMap::default()), projected_ty_cache: RefCell::new(Default::default()),
not_special_fn_cache: RefCell::new(HashSet::default()), not_special_fn_cache: RefCell::new(Default::default()),
mir_or_dyn_index_cache: RefCell::new(HashMap::default()), mir_or_dyn_index_cache: RefCell::new(Default::default()),
unused_locals_store: RefCell::new(HashMap::default()), unused_locals_store: RefCell::new(Default::default()),
cached_ptr_size: match db.target_data_layout(crate_id) { cached_ptr_size: match db.target_data_layout(crate_id) {
Some(it) => it.pointer_size.bytes_usize(), Some(it) => it.pointer_size.bytes_usize(),
None => 8, None => 8,
@ -720,13 +733,19 @@ impl Evaluator<'_> {
self.size_of_sized(&inner_ty, locals, "array inner type should be sized")?; self.size_of_sized(&inner_ty, locals, "array inner type should be sized")?;
addr = addr.offset(ty_size * (from as usize)); addr = addr.offset(ty_size * (from as usize));
} }
&ProjectionElem::TupleOrClosureField(f) => { &ProjectionElem::ClosureField(f) => {
let layout = self.layout(&prev_ty)?; let layout = self.layout(&prev_ty)?;
let offset = layout.fields.offset(f).bytes_usize(); let offset = layout.fields.offset(f).bytes_usize();
addr = addr.offset(offset); addr = addr.offset(offset);
metadata = None; // tuple field is always sized metadata = None;
} }
ProjectionElem::Field(f) => { ProjectionElem::Field(Either::Right(f)) => {
let layout = self.layout(&prev_ty)?;
let offset = layout.fields.offset(f.index as usize).bytes_usize();
addr = addr.offset(offset);
metadata = None; // tuple field is always sized FIXME: This is wrong, the tail can be unsized
}
ProjectionElem::Field(Either::Left(f)) => {
let layout = self.layout(&prev_ty)?; let layout = self.layout(&prev_ty)?;
let variant_layout = match &layout.variants { let variant_layout = match &layout.variants {
Variants::Single { .. } => &layout, Variants::Single { .. } => &layout,
@ -797,11 +816,11 @@ impl Evaluator<'_> {
}) })
} }
fn interpret_mir( fn interpret_mir<'slf>(
&mut self, &'slf mut self,
body: Arc<MirBody>, body: Arc<MirBody>,
args: impl Iterator<Item = IntervalOrOwned>, args: impl Iterator<Item = IntervalOrOwned>,
) -> Result<Vec<u8>> { ) -> Result<Interval> {
if let Some(it) = self.stack_depth_limit.checked_sub(1) { if let Some(it) = self.stack_depth_limit.checked_sub(1) {
self.stack_depth_limit = it; self.stack_depth_limit = it;
} else { } else {
@ -831,8 +850,8 @@ impl Evaluator<'_> {
match &statement.kind { match &statement.kind {
StatementKind::Assign(l, r) => { StatementKind::Assign(l, r) => {
let addr = self.place_addr(l, &locals)?; let addr = self.place_addr(l, &locals)?;
let result = self.eval_rvalue(r, &mut locals)?.to_vec(&self)?; let result = self.eval_rvalue(r, &mut locals)?;
self.write_memory(addr, &result)?; self.copy_from_interval_or_owned(addr, result)?;
locals locals
.drop_flags .drop_flags
.add_place(l.clone(), &locals.body.projection_store); .add_place(l.clone(), &locals.body.projection_store);
@ -951,7 +970,7 @@ impl Evaluator<'_> {
None => { None => {
self.code_stack = prev_code_stack; self.code_stack = prev_code_stack;
self.stack_depth_limit += 1; self.stack_depth_limit += 1;
return Ok(return_interval.get(self)?.to_vec()); return Ok(return_interval);
} }
Some(bb) => { Some(bb) => {
// We don't support const promotion, so we can't truncate the stack yet. // We don't support const promotion, so we can't truncate the stack yet.
@ -1044,7 +1063,7 @@ impl Evaluator<'_> {
Rvalue::Use(it) => Borrowed(self.eval_operand(it, locals)?), Rvalue::Use(it) => Borrowed(self.eval_operand(it, locals)?),
Rvalue::Ref(_, p) => { Rvalue::Ref(_, p) => {
let (addr, _, metadata) = self.place_addr_and_ty_and_metadata(p, locals)?; let (addr, _, metadata) = self.place_addr_and_ty_and_metadata(p, locals)?;
let mut r = addr.to_bytes(); let mut r = addr.to_bytes().to_vec();
if let Some(metadata) = metadata { if let Some(metadata) = metadata {
r.extend(metadata.get(self)?); r.extend(metadata.get(self)?);
} }
@ -1277,7 +1296,7 @@ impl Evaluator<'_> {
not_supported!("unsized box initialization"); not_supported!("unsized box initialization");
}; };
let addr = self.heap_allocate(size, align)?; let addr = self.heap_allocate(size, align)?;
Owned(addr.to_bytes()) Owned(addr.to_bytes().to_vec())
} }
Rvalue::CopyForDeref(_) => not_supported!("copy for deref"), Rvalue::CopyForDeref(_) => not_supported!("copy for deref"),
Rvalue::Aggregate(kind, values) => { Rvalue::Aggregate(kind, values) => {
@ -1514,7 +1533,7 @@ impl Evaluator<'_> {
} }
}, },
TyKind::Dyn(_) => { TyKind::Dyn(_) => {
let vtable = self.vtable_map.id(current_ty.clone()); let vtable = self.vtable_map.id(current_ty);
let mut r = Vec::with_capacity(16); let mut r = Vec::with_capacity(16);
let addr = addr.get(self)?; let addr = addr.get(self)?;
r.extend(addr.iter().copied()); r.extend(addr.iter().copied());
@ -1709,7 +1728,18 @@ impl Evaluator<'_> {
} }
let addr = self.heap_allocate(size, align)?; let addr = self.heap_allocate(size, align)?;
self.write_memory(addr, &v)?; self.write_memory(addr, &v)?;
self.patch_addresses(&patch_map, &memory_map.vtable, addr, ty, locals)?; self.patch_addresses(
&patch_map,
|bytes| match &memory_map {
MemoryMap::Empty | MemoryMap::Simple(_) => {
Err(MirEvalError::InvalidVTableId(from_bytes!(usize, bytes)))
}
MemoryMap::Complex(cm) => cm.vtable.ty_of_bytes(bytes),
},
addr,
ty,
locals,
)?;
Ok(Interval::new(addr, size)) Ok(Interval::new(addr, size))
} }
@ -1761,6 +1791,13 @@ impl Evaluator<'_> {
Ok(()) Ok(())
} }
fn copy_from_interval_or_owned(&mut self, addr: Address, r: IntervalOrOwned) -> Result<()> {
match r {
IntervalOrOwned::Borrowed(r) => self.copy_from_interval(addr, r),
IntervalOrOwned::Owned(r) => self.write_memory(addr, &r),
}
}
fn copy_from_interval(&mut self, addr: Address, r: Interval) -> Result<()> { fn copy_from_interval(&mut self, addr: Address, r: Interval) -> Result<()> {
if r.size == 0 { if r.size == 0 {
return Ok(()); return Ok(());
@ -1881,13 +1918,18 @@ impl Evaluator<'_> {
} }
} }
fn create_memory_map(&self, bytes: &[u8], ty: &Ty, locals: &Locals) -> Result<MemoryMap> { fn create_memory_map(
&self,
bytes: &[u8],
ty: &Ty,
locals: &Locals,
) -> Result<ComplexMemoryMap> {
fn rec( fn rec(
this: &Evaluator<'_>, this: &Evaluator<'_>,
bytes: &[u8], bytes: &[u8],
ty: &Ty, ty: &Ty,
locals: &Locals, locals: &Locals,
mm: &mut MemoryMap, mm: &mut ComplexMemoryMap,
) -> Result<()> { ) -> Result<()> {
match ty.kind(Interner) { match ty.kind(Interner) {
TyKind::Ref(_, _, t) => { TyKind::Ref(_, _, t) => {
@ -1897,7 +1939,7 @@ impl Evaluator<'_> {
let addr_usize = from_bytes!(usize, bytes); let addr_usize = from_bytes!(usize, bytes);
mm.insert( mm.insert(
addr_usize, addr_usize,
this.read_memory(Address::from_usize(addr_usize), size)?.to_vec(), this.read_memory(Address::from_usize(addr_usize), size)?.into(),
) )
} }
None => { None => {
@ -1923,7 +1965,7 @@ impl Evaluator<'_> {
let size = element_size * count; let size = element_size * count;
let addr = Address::from_bytes(addr)?; let addr = Address::from_bytes(addr)?;
let b = this.read_memory(addr, size)?; let b = this.read_memory(addr, size)?;
mm.insert(addr.to_usize(), b.to_vec()); mm.insert(addr.to_usize(), b.into());
if let Some(ty) = check_inner { if let Some(ty) = check_inner {
for i in 0..count { for i in 0..count {
let offset = element_size * i; let offset = element_size * i;
@ -1996,15 +2038,15 @@ impl Evaluator<'_> {
} }
Ok(()) Ok(())
} }
let mut mm = MemoryMap::default(); let mut mm = ComplexMemoryMap::default();
rec(self, bytes, ty, locals, &mut mm)?; rec(&self, bytes, ty, locals, &mut mm)?;
Ok(mm) Ok(mm)
} }
fn patch_addresses( fn patch_addresses<'vtable>(
&mut self, &mut self,
patch_map: &HashMap<usize, usize>, patch_map: &FxHashMap<usize, usize>,
old_vtable: &VTableMap, ty_of_bytes: impl Fn(&[u8]) -> Result<&'vtable Ty> + Copy,
addr: Address, addr: Address,
ty: &Ty, ty: &Ty,
locals: &Locals, locals: &Locals,
@ -2031,7 +2073,7 @@ impl Evaluator<'_> {
} }
} }
TyKind::Function(_) => { TyKind::Function(_) => {
let ty = old_vtable.ty_of_bytes(self.read_memory(addr, my_size)?)?.clone(); let ty = ty_of_bytes(self.read_memory(addr, my_size)?)?.clone();
let new_id = self.vtable_map.id(ty); let new_id = self.vtable_map.id(ty);
self.write_memory(addr, &new_id.to_le_bytes())?; self.write_memory(addr, &new_id.to_le_bytes())?;
} }
@ -2042,7 +2084,7 @@ impl Evaluator<'_> {
let ty = ty.clone().substitute(Interner, subst); let ty = ty.clone().substitute(Interner, subst);
self.patch_addresses( self.patch_addresses(
patch_map, patch_map,
old_vtable, ty_of_bytes,
addr.offset(offset), addr.offset(offset),
&ty, &ty,
locals, locals,
@ -2064,7 +2106,7 @@ impl Evaluator<'_> {
let ty = ty.clone().substitute(Interner, subst); let ty = ty.clone().substitute(Interner, subst);
self.patch_addresses( self.patch_addresses(
patch_map, patch_map,
old_vtable, ty_of_bytes,
addr.offset(offset), addr.offset(offset),
&ty, &ty,
locals, locals,
@ -2077,7 +2119,7 @@ impl Evaluator<'_> {
for (id, ty) in subst.iter(Interner).enumerate() { for (id, ty) in subst.iter(Interner).enumerate() {
let ty = ty.assert_ty_ref(Interner); // Tuple only has type argument let ty = ty.assert_ty_ref(Interner); // Tuple only has type argument
let offset = layout.fields.offset(id).bytes_usize(); let offset = layout.fields.offset(id).bytes_usize();
self.patch_addresses(patch_map, old_vtable, addr.offset(offset), ty, locals)?; self.patch_addresses(patch_map, ty_of_bytes, addr.offset(offset), ty, locals)?;
} }
} }
TyKind::Array(inner, len) => { TyKind::Array(inner, len) => {
@ -2089,7 +2131,7 @@ impl Evaluator<'_> {
for i in 0..len { for i in 0..len {
self.patch_addresses( self.patch_addresses(
patch_map, patch_map,
old_vtable, ty_of_bytes,
addr.offset(i * size), addr.offset(i * size),
inner, inner,
locals, locals,
@ -2160,14 +2202,14 @@ impl Evaluator<'_> {
.map_err(|it| MirEvalError::MirLowerErrorForClosure(closure, it))?; .map_err(|it| MirEvalError::MirLowerErrorForClosure(closure, it))?;
let closure_data = if mir_body.locals[mir_body.param_locals[0]].ty.as_reference().is_some() let closure_data = if mir_body.locals[mir_body.param_locals[0]].ty.as_reference().is_some()
{ {
closure_data.addr.to_bytes() closure_data.addr.to_bytes().to_vec()
} else { } else {
closure_data.get(self)?.to_owned() closure_data.get(self)?.to_owned()
}; };
let arg_bytes = iter::once(Ok(closure_data)) let arg_bytes = iter::once(Ok(closure_data))
.chain(args.iter().map(|it| Ok(it.get(&self)?.to_owned()))) .chain(args.iter().map(|it| Ok(it.get(&self)?.to_owned())))
.collect::<Result<Vec<_>>>()?; .collect::<Result<Vec<_>>>()?;
let bytes = self let interval = self
.interpret_mir(mir_body, arg_bytes.into_iter().map(IntervalOrOwned::Owned)) .interpret_mir(mir_body, arg_bytes.into_iter().map(IntervalOrOwned::Owned))
.map_err(|e| { .map_err(|e| {
MirEvalError::InFunction( MirEvalError::InFunction(
@ -2175,7 +2217,7 @@ impl Evaluator<'_> {
vec![(Either::Right(closure), span, locals.body.owner)], vec![(Either::Right(closure), span, locals.body.owner)],
) )
})?; })?;
destination.write_from_bytes(self, &bytes)?; destination.write_from_interval(self, interval)?;
Ok(None) Ok(None)
} }
@ -2368,7 +2410,7 @@ impl Evaluator<'_> {
vec![(Either::Left(def), span, locals.body.owner)], vec![(Either::Left(def), span, locals.body.owner)],
) )
})?; })?;
destination.write_from_bytes(self, &result)?; destination.write_from_interval(self, result)?;
None None
}) })
} }
@ -2546,7 +2588,7 @@ impl Evaluator<'_> {
body, body,
locals, locals,
drop_fn, drop_fn,
[IntervalOrOwned::Owned(addr.to_bytes())].into_iter(), iter::once(IntervalOrOwned::Owned(addr.to_bytes().to_vec())),
span, span,
Interval { addr: Address::Invalid(0), size: 0 }, Interval { addr: Address::Invalid(0), size: 0 },
None, None,
@ -2674,11 +2716,12 @@ pub fn render_const_using_debug_impl(
) else { ) else {
not_supported!("std::fmt::format not found"); not_supported!("std::fmt::format not found");
}; };
let message_string = evaluator.interpret_mir( let interval = evaluator.interpret_mir(
db.mir_body(format_fn.into()).map_err(|e| MirEvalError::MirLowerError(format_fn, e))?, db.mir_body(format_fn.into()).map_err(|e| MirEvalError::MirLowerError(format_fn, e))?,
[IntervalOrOwned::Borrowed(Interval { addr: a3, size: evaluator.ptr_size() * 6 })] [IntervalOrOwned::Borrowed(Interval { addr: a3, size: evaluator.ptr_size() * 6 })]
.into_iter(), .into_iter(),
)?; )?;
let message_string = interval.get(&evaluator)?;
let addr = let addr =
Address::from_bytes(&message_string[evaluator.ptr_size()..2 * evaluator.ptr_size()])?; Address::from_bytes(&message_string[evaluator.ptr_size()..2 * evaluator.ptr_size()])?;
let size = from_bytes!(usize, message_string[2 * evaluator.ptr_size()..]); let size = from_bytes!(usize, message_string[2 * evaluator.ptr_size()..]);

View File

@ -322,12 +322,13 @@ impl Evaluator<'_> {
let hir_def::resolver::ValueNs::FunctionId(format_fn) = format_fn else { let hir_def::resolver::ValueNs::FunctionId(format_fn) = format_fn else {
not_supported!("std::fmt::format is not a function") not_supported!("std::fmt::format is not a function")
}; };
let message_string = self.interpret_mir( let interval = self.interpret_mir(
self.db self.db
.mir_body(format_fn.into()) .mir_body(format_fn.into())
.map_err(|e| MirEvalError::MirLowerError(format_fn, e))?, .map_err(|e| MirEvalError::MirLowerError(format_fn, e))?,
args.map(|x| IntervalOrOwned::Owned(x.clone())), args.map(|x| IntervalOrOwned::Owned(x.clone())),
)?; )?;
let message_string = interval.get(self)?;
let addr = let addr =
Address::from_bytes(&message_string[self.ptr_size()..2 * self.ptr_size()])?; Address::from_bytes(&message_string[self.ptr_size()..2 * self.ptr_size()])?;
let size = from_bytes!(usize, message_string[2 * self.ptr_size()..]); let size = from_bytes!(usize, message_string[2 * self.ptr_size()..]);

View File

@ -31,9 +31,9 @@ fn eval_main(db: &TestDB, file_id: FileId) -> Result<(String, String), MirEvalEr
db.trait_environment(func_id.into()), db.trait_environment(func_id.into()),
) )
.map_err(|e| MirEvalError::MirLowerError(func_id.into(), e))?; .map_err(|e| MirEvalError::MirLowerError(func_id.into(), e))?;
let (result, stdout, stderr) = interpret_mir(db, body, false, None); let (result, output) = interpret_mir(db, body, false, None);
result?; result?;
Ok((stdout, stderr)) Ok((output.stdout().into_owned(), output.stderr().into_owned()))
} }
fn check_pass(ra_fixture: &str) { fn check_pass(ra_fixture: &str) {

View File

@ -15,7 +15,7 @@ use hir_def::{
path::Path, path::Path,
resolver::{resolver_for_expr, HasResolver, ResolveValueResult, ValueNs}, resolver::{resolver_for_expr, HasResolver, ResolveValueResult, ValueNs},
AdtId, DefWithBodyId, EnumVariantId, GeneralConstId, HasModule, ItemContainerId, LocalFieldId, AdtId, DefWithBodyId, EnumVariantId, GeneralConstId, HasModule, ItemContainerId, LocalFieldId,
Lookup, TraitId, TypeOrConstParamId, Lookup, TraitId, TupleId, TypeOrConstParamId,
}; };
use hir_expand::name::Name; use hir_expand::name::Name;
use la_arena::ArenaMap; use la_arena::ArenaMap;
@ -177,7 +177,7 @@ impl MirLowerError {
)?; )?;
writeln!(f, "Provided args: [")?; writeln!(f, "Provided args: [")?;
for g in subst.iter(Interner) { for g in subst.iter(Interner) {
write!(f, " {},", g.display(db).to_string())?; write!(f, " {},", g.display(db))?;
} }
writeln!(f, "]")?; writeln!(f, "]")?;
} }
@ -540,7 +540,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
self.write_bytes_to_place( self.write_bytes_to_place(
then_target, then_target,
place.clone(), place.clone(),
vec![1], Box::new([1]),
TyBuilder::bool(), TyBuilder::bool(),
MirSpan::Unknown, MirSpan::Unknown,
)?; )?;
@ -548,7 +548,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
self.write_bytes_to_place( self.write_bytes_to_place(
else_target, else_target,
place, place,
vec![0], Box::new([0]),
TyBuilder::bool(), TyBuilder::bool(),
MirSpan::Unknown, MirSpan::Unknown,
)?; )?;
@ -602,7 +602,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
generic_args, generic_args,
) )
.intern(Interner); .intern(Interner);
let func = Operand::from_bytes(vec![], ty); let func = Operand::from_bytes(Box::default(), ty);
return self.lower_call_and_args( return self.lower_call_and_args(
func, func,
iter::once(*callee).chain(args.iter().copied()), iter::once(*callee).chain(args.iter().copied()),
@ -615,7 +615,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
let callee_ty = self.expr_ty_after_adjustments(*callee); let callee_ty = self.expr_ty_after_adjustments(*callee);
match &callee_ty.kind(Interner) { match &callee_ty.kind(Interner) {
chalk_ir::TyKind::FnDef(..) => { chalk_ir::TyKind::FnDef(..) => {
let func = Operand::from_bytes(vec![], callee_ty.clone()); let func = Operand::from_bytes(Box::default(), callee_ty.clone());
self.lower_call_and_args( self.lower_call_and_args(
func, func,
args.iter().copied(), args.iter().copied(),
@ -828,12 +828,12 @@ impl<'ctx> MirLowerCtx<'ctx> {
Some(it) => it, Some(it) => it,
None => { None => {
let p = sp.project( let p = sp.project(
ProjectionElem::Field(FieldId { ProjectionElem::Field(Either::Left(FieldId {
parent: variant_id, parent: variant_id,
local_id: LocalFieldId::from_raw(RawIdx::from( local_id: LocalFieldId::from_raw(RawIdx::from(
i as u32, i as u32,
)), )),
}), })),
&mut self.result.projection_store, &mut self.result.projection_store,
); );
Operand::Copy(p) Operand::Copy(p)
@ -855,7 +855,10 @@ impl<'ctx> MirLowerCtx<'ctx> {
let local_id = let local_id =
variant_data.field(name).ok_or(MirLowerError::UnresolvedField)?; variant_data.field(name).ok_or(MirLowerError::UnresolvedField)?;
let place = place.project( let place = place.project(
PlaceElem::Field(FieldId { parent: union_id.into(), local_id }), PlaceElem::Field(Either::Left(FieldId {
parent: union_id.into(),
local_id,
})),
&mut self.result.projection_store, &mut self.result.projection_store,
); );
self.lower_expr_to_place(*expr, place, current) self.lower_expr_to_place(*expr, place, current)
@ -1110,7 +1113,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
Some("start") => lp.take(), Some("start") => lp.take(),
Some("end") => rp.take(), Some("end") => rp.take(),
Some("exhausted") => { Some("exhausted") => {
Some(Operand::from_bytes(vec![0], TyBuilder::bool())) Some(Operand::from_bytes(Box::new([0]), TyBuilder::bool()))
} }
_ => None, _ => None,
}; };
@ -1142,8 +1145,8 @@ impl<'ctx> MirLowerCtx<'ctx> {
.map(|it| match it { .map(|it| match it {
ProjectionElem::Deref => ProjectionElem::Deref, ProjectionElem::Deref => ProjectionElem::Deref,
ProjectionElem::Field(it) => ProjectionElem::Field(it), ProjectionElem::Field(it) => ProjectionElem::Field(it),
ProjectionElem::TupleOrClosureField(it) => { ProjectionElem::ClosureField(it) => {
ProjectionElem::TupleOrClosureField(it) ProjectionElem::ClosureField(it)
} }
ProjectionElem::ConstantIndex { offset, from_end } => { ProjectionElem::ConstantIndex { offset, from_end } => {
ProjectionElem::ConstantIndex { offset, from_end } ProjectionElem::ConstantIndex { offset, from_end }
@ -1273,7 +1276,10 @@ impl<'ctx> MirLowerCtx<'ctx> {
Expr::Tuple { exprs, is_assignee_expr: _ } => { Expr::Tuple { exprs, is_assignee_expr: _ } => {
for (i, expr) in exprs.iter().enumerate() { for (i, expr) in exprs.iter().enumerate() {
let rhs = rhs.project( let rhs = rhs.project(
ProjectionElem::TupleOrClosureField(i), ProjectionElem::Field(Either::Right(TupleFieldId {
tuple: TupleId(!0), // Dummy this as its unused
index: i as u32,
})),
&mut self.result.projection_store, &mut self.result.projection_store,
); );
let Some(c) = self.lower_destructing_assignment(current, *expr, rhs, span)? let Some(c) = self.lower_destructing_assignment(current, *expr, rhs, span)?
@ -1337,11 +1343,14 @@ impl<'ctx> MirLowerCtx<'ctx> {
fn push_field_projection(&mut self, place: &mut Place, expr_id: ExprId) -> Result<()> { fn push_field_projection(&mut self, place: &mut Place, expr_id: ExprId) -> Result<()> {
if let Expr::Field { expr, name } = &self.body[expr_id] { if let Expr::Field { expr, name } = &self.body[expr_id] {
if let TyKind::Tuple(..) = self.expr_ty_after_adjustments(*expr).kind(Interner) { if let TyKind::Tuple(..) = self.expr_ty_after_adjustments(*expr).kind(Interner) {
let index = name let index =
.as_tuple_index() name.as_tuple_index().ok_or(MirLowerError::TypeError("named field on tuple"))?
.ok_or(MirLowerError::TypeError("named field on tuple"))?; as u32;
*place = place.project( *place = place.project(
ProjectionElem::TupleOrClosureField(index), ProjectionElem::Field(Either::Right(TupleFieldId {
tuple: TupleId(!0), // dummy as its unused
index,
})),
&mut self.result.projection_store, &mut self.result.projection_store,
) )
} else { } else {
@ -1386,46 +1395,43 @@ impl<'ctx> MirLowerCtx<'ctx> {
} }
fn lower_literal_to_operand(&mut self, ty: Ty, l: &Literal) -> Result<Operand> { fn lower_literal_to_operand(&mut self, ty: Ty, l: &Literal) -> Result<Operand> {
let size = self let size = || {
.db self.db
.layout_of_ty(ty.clone(), self.db.trait_environment_for_body(self.owner))? .layout_of_ty(ty.clone(), self.db.trait_environment_for_body(self.owner))
.size .map(|it| it.size.bytes_usize())
.bytes_usize(); };
let bytes = match l { const USIZE_SIZE: usize = mem::size_of::<usize>();
let bytes: Box<[_]> = match l {
hir_def::hir::Literal::String(b) => { hir_def::hir::Literal::String(b) => {
let b = b.as_bytes(); let mut data = [0; { 2 * USIZE_SIZE }];
let mut data = Vec::with_capacity(mem::size_of::<usize>() * 2); data[..USIZE_SIZE].copy_from_slice(&0usize.to_le_bytes());
data.extend(0usize.to_le_bytes()); data[USIZE_SIZE..].copy_from_slice(&b.len().to_le_bytes());
data.extend(b.len().to_le_bytes()); let mm = MemoryMap::simple(b.as_bytes().into());
let mut mm = MemoryMap::default(); return Ok(Operand::from_concrete_const(Box::new(data), mm, ty));
mm.insert(0, b.to_vec());
return Ok(Operand::from_concrete_const(data, mm, ty));
} }
hir_def::hir::Literal::CString(b) => { hir_def::hir::Literal::CString(b) => {
let bytes = b.iter().copied().chain(iter::once(0)).collect::<Vec<_>>(); let bytes = b.iter().copied().chain(iter::once(0)).collect::<Box<_>>();
let mut data = Vec::with_capacity(mem::size_of::<usize>() * 2); let mut data = [0; { 2 * USIZE_SIZE }];
data.extend(0usize.to_le_bytes()); data[..USIZE_SIZE].copy_from_slice(&0usize.to_le_bytes());
data.extend(bytes.len().to_le_bytes()); data[USIZE_SIZE..].copy_from_slice(&bytes.len().to_le_bytes());
let mut mm = MemoryMap::default(); let mm = MemoryMap::simple(bytes);
mm.insert(0, bytes); return Ok(Operand::from_concrete_const(Box::new(data), mm, ty));
return Ok(Operand::from_concrete_const(data, mm, ty));
} }
hir_def::hir::Literal::ByteString(b) => { hir_def::hir::Literal::ByteString(b) => {
let mut data = Vec::with_capacity(mem::size_of::<usize>() * 2); let mut data = [0; { 2 * USIZE_SIZE }];
data.extend(0usize.to_le_bytes()); data[..USIZE_SIZE].copy_from_slice(&0usize.to_le_bytes());
data.extend(b.len().to_le_bytes()); data[USIZE_SIZE..].copy_from_slice(&b.len().to_le_bytes());
let mut mm = MemoryMap::default(); let mm = MemoryMap::simple(b.clone());
mm.insert(0, b.to_vec()); return Ok(Operand::from_concrete_const(Box::new(data), mm, ty));
return Ok(Operand::from_concrete_const(data, mm, ty));
} }
hir_def::hir::Literal::Char(c) => u32::from(*c).to_le_bytes().into(), hir_def::hir::Literal::Char(c) => Box::new(u32::from(*c).to_le_bytes()),
hir_def::hir::Literal::Bool(b) => vec![*b as u8], hir_def::hir::Literal::Bool(b) => Box::new([*b as u8]),
hir_def::hir::Literal::Int(it, _) => it.to_le_bytes()[0..size].into(), hir_def::hir::Literal::Int(it, _) => Box::from(&it.to_le_bytes()[0..size()?]),
hir_def::hir::Literal::Uint(it, _) => it.to_le_bytes()[0..size].into(), hir_def::hir::Literal::Uint(it, _) => Box::from(&it.to_le_bytes()[0..size()?]),
hir_def::hir::Literal::Float(f, _) => match size { hir_def::hir::Literal::Float(f, _) => match size()? {
8 => f.into_f64().to_le_bytes().into(), 8 => Box::new(f.into_f64().to_le_bytes()),
4 => f.into_f32().to_le_bytes().into(), 4 => Box::new(f.into_f32().to_le_bytes()),
_ => { _ => {
return Err(MirLowerError::TypeError("float with size other than 4 or 8 bytes")) return Err(MirLowerError::TypeError("float with size other than 4 or 8 bytes"))
} }
@ -1474,7 +1480,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
&mut self, &mut self,
prev_block: BasicBlockId, prev_block: BasicBlockId,
place: Place, place: Place,
cv: Vec<u8>, cv: Box<[u8]>,
ty: Ty, ty: Ty,
span: MirSpan, span: MirSpan,
) -> Result<()> { ) -> Result<()> {
@ -2041,10 +2047,11 @@ pub fn mir_body_for_closure_query(
match (it, y) { match (it, y) {
(ProjectionElem::Deref, ProjectionElem::Deref) => (), (ProjectionElem::Deref, ProjectionElem::Deref) => (),
(ProjectionElem::Field(it), ProjectionElem::Field(y)) if it == y => (), (ProjectionElem::Field(it), ProjectionElem::Field(y)) if it == y => (),
( (ProjectionElem::ClosureField(it), ProjectionElem::ClosureField(y))
ProjectionElem::TupleOrClosureField(it), if it == y =>
ProjectionElem::TupleOrClosureField(y), {
) if it == y => (), ()
}
_ => return false, _ => return false,
} }
} }
@ -2054,7 +2061,7 @@ pub fn mir_body_for_closure_query(
Some(it) => { Some(it) => {
p.local = closure_local; p.local = closure_local;
let mut next_projs = closure_projection.clone(); let mut next_projs = closure_projection.clone();
next_projs.push(PlaceElem::TupleOrClosureField(it.1)); next_projs.push(PlaceElem::ClosureField(it.1));
let prev_projs = p.projection; let prev_projs = p.projection;
if it.0.kind != CaptureKind::ByValue { if it.0.kind != CaptureKind::ByValue {
next_projs.push(ProjectionElem::Deref); next_projs.push(ProjectionElem::Deref);
@ -2063,8 +2070,8 @@ pub fn mir_body_for_closure_query(
prev_projs prev_projs
.lookup(&store) .lookup(&store)
.iter() .iter()
.cloned() .skip(it.0.place.projections.len())
.skip(it.0.place.projections.len()), .cloned(),
); );
p.projection = store.intern(next_projs.into()); p.projection = store.intern(next_projs.into());
} }

View File

@ -218,7 +218,7 @@ impl MirLowerCtx<'_> {
self.push_field_projection(&mut r, expr_id)?; self.push_field_projection(&mut r, expr_id)?;
Ok(Some((r, current))) Ok(Some((r, current)))
} }
Expr::Index { base, index } => { Expr::Index { base, index, is_assignee_expr: _ } => {
let base_ty = self.expr_ty_after_adjustments(*base); let base_ty = self.expr_ty_after_adjustments(*base);
let index_ty = self.expr_ty_after_adjustments(*index); let index_ty = self.expr_ty_after_adjustments(*index);
if index_ty != TyBuilder::usize() if index_ty != TyBuilder::usize()

View File

@ -108,7 +108,12 @@ impl MirLowerCtx<'_> {
current_else, current_else,
args, args,
*ellipsis, *ellipsis,
(0..subst.len(Interner)).map(|i| PlaceElem::TupleOrClosureField(i)), (0..subst.len(Interner)).map(|i| {
PlaceElem::Field(Either::Right(TupleFieldId {
tuple: TupleId(!0), // Dummy as it is unused
index: i as u32,
}))
}),
&(&mut cond_place), &(&mut cond_place),
mode, mode,
)? )?
@ -239,7 +244,7 @@ impl MirLowerCtx<'_> {
); );
} else { } else {
let c = Operand::from_concrete_const( let c = Operand::from_concrete_const(
pattern_len.to_le_bytes().to_vec(), pattern_len.to_le_bytes().into(),
MemoryMap::default(), MemoryMap::default(),
TyBuilder::usize(), TyBuilder::usize(),
); );
@ -566,7 +571,10 @@ impl MirLowerCtx<'_> {
let field_id = let field_id =
variant_data.field(&x.name).ok_or(MirLowerError::UnresolvedField)?; variant_data.field(&x.name).ok_or(MirLowerError::UnresolvedField)?;
Ok(( Ok((
PlaceElem::Field(FieldId { parent: v.into(), local_id: field_id }), PlaceElem::Field(Either::Left(FieldId {
parent: v.into(),
local_id: field_id,
})),
x.pat, x.pat,
)) ))
}) })
@ -574,10 +582,9 @@ impl MirLowerCtx<'_> {
self.pattern_match_adt(current, current_else, it.into_iter(), cond_place, mode)? self.pattern_match_adt(current, current_else, it.into_iter(), cond_place, mode)?
} }
AdtPatternShape::Tuple { args, ellipsis } => { AdtPatternShape::Tuple { args, ellipsis } => {
let fields = variant_data let fields = variant_data.fields().iter().map(|(x, _)| {
.fields() PlaceElem::Field(Either::Left(FieldId { parent: v.into(), local_id: x }))
.iter() });
.map(|(x, _)| PlaceElem::Field(FieldId { parent: v.into(), local_id: x }));
self.pattern_match_tuple_like( self.pattern_match_tuple_like(
current, current,
current_else, current_else,

View File

@ -5,6 +5,7 @@ use std::{
mem, mem,
}; };
use either::Either;
use hir_def::{body::Body, hir::BindingId}; use hir_def::{body::Body, hir::BindingId};
use hir_expand::name::Name; use hir_expand::name::Name;
use la_arena::ArenaMap; use la_arena::ArenaMap;
@ -298,7 +299,7 @@ impl<'a> MirPrettyCtx<'a> {
f(this, local, head); f(this, local, head);
w!(this, ")"); w!(this, ")");
} }
ProjectionElem::Field(field) => { ProjectionElem::Field(Either::Left(field)) => {
let variant_data = field.parent.variant_data(this.db.upcast()); let variant_data = field.parent.variant_data(this.db.upcast());
let name = &variant_data.fields()[field.local_id].name; let name = &variant_data.fields()[field.local_id].name;
match field.parent { match field.parent {
@ -320,7 +321,11 @@ impl<'a> MirPrettyCtx<'a> {
} }
} }
} }
ProjectionElem::TupleOrClosureField(it) => { ProjectionElem::Field(Either::Right(field)) => {
f(this, local, head);
w!(this, ".{}", field.index);
}
ProjectionElem::ClosureField(it) => {
f(this, local, head); f(this, local, head);
w!(this, ".{}", it); w!(this, ".{}", it);
} }

View File

@ -4506,3 +4506,50 @@ fn ttt() {
"#, "#,
); );
} }
#[test]
fn infer_borrow() {
check_types(
r#"
//- minicore: index
pub struct SomeMap<K>;
pub trait Borrow<Borrowed: ?Sized> {
fn borrow(&self) -> &Borrowed;
}
impl<T: ?Sized> Borrow<T> for T {
fn borrow(&self) -> &T {
self
}
}
impl<T: ?Sized> Borrow<T> for &T {
fn borrow(&self) -> &T {
&**self
}
}
impl<K, KB: Borrow<K>> core::ops::Index<KB> for SomeMap<K> {
type Output = ();
fn index(&self, _: KB) -> &() {
&()
}
}
impl<K> core::ops::IndexMut<K> for SomeMap<K> {
fn index_mut(&mut self, _: K) -> &mut () {
&mut ()
}
}
fn foo() {
let mut map = SomeMap;
map["a"] = ();
map;
//^^^ SomeMap<&str>
}
"#,
);
}

View File

@ -1,5 +1,7 @@
//! Attributes & documentation for hir types. //! Attributes & documentation for hir types.
use std::ops::ControlFlow;
use base_db::FileId; use base_db::FileId;
use hir_def::{ use hir_def::{
attr::AttrsWithOwner, attr::AttrsWithOwner,
@ -13,13 +15,13 @@ use hir_expand::{
name::Name, name::Name,
span_map::{RealSpanMap, SpanMapRef}, span_map::{RealSpanMap, SpanMapRef},
}; };
use hir_ty::db::HirDatabase; use hir_ty::{db::HirDatabase, method_resolution};
use syntax::{ast, AstNode}; use syntax::{ast, AstNode};
use crate::{ use crate::{
Adt, AsAssocItem, AssocItem, BuiltinType, Const, ConstParam, DocLinkDef, Enum, ExternCrateDecl, Adt, AsAssocItem, AssocItem, BuiltinType, Const, ConstParam, DocLinkDef, Enum, ExternCrateDecl,
Field, Function, GenericParam, Impl, LifetimeParam, Macro, Module, ModuleDef, Static, Struct, Field, Function, GenericParam, HasCrate, Impl, LifetimeParam, Macro, Module, ModuleDef, Static,
Trait, TraitAlias, TypeAlias, TypeParam, Union, Variant, VariantDef, Struct, Trait, TraitAlias, Type, TypeAlias, TypeParam, Union, Variant, VariantDef,
}; };
pub trait HasAttrs { pub trait HasAttrs {
@ -99,9 +101,6 @@ pub fn resolve_doc_path_on(
link: &str, link: &str,
ns: Option<Namespace>, ns: Option<Namespace>,
) -> Option<DocLinkDef> { ) -> Option<DocLinkDef> {
// AttrDefId::FieldId(it) => it.parent.resolver(db.upcast()),
// AttrDefId::EnumVariantId(it) => it.parent.resolver(db.upcast()),
resolve_doc_path_on_(db, link, def.attr_id(), ns) resolve_doc_path_on_(db, link, def.attr_id(), ns)
} }
@ -205,8 +204,14 @@ fn resolve_assoc_or_field(
} }
}; };
// FIXME: Resolve associated items here, e.g. `Option::map`. Note that associated items take // Resolve inherent items first, then trait items, then fields.
// precedence over fields. if let Some(assoc_item_def) = resolve_assoc_item(db, &ty, &name, ns) {
return Some(assoc_item_def);
}
if let Some(impl_trait_item_def) = resolve_impl_trait_item(db, resolver, &ty, &name, ns) {
return Some(impl_trait_item_def);
}
let variant_def = match ty.as_adt()? { let variant_def = match ty.as_adt()? {
Adt::Struct(it) => it.into(), Adt::Struct(it) => it.into(),
@ -216,6 +221,65 @@ fn resolve_assoc_or_field(
resolve_field(db, variant_def, name, ns) resolve_field(db, variant_def, name, ns)
} }
fn resolve_assoc_item(
db: &dyn HirDatabase,
ty: &Type,
name: &Name,
ns: Option<Namespace>,
) -> Option<DocLinkDef> {
ty.iterate_assoc_items(db, ty.krate(db), move |assoc_item| {
if assoc_item.name(db)? != *name {
return None;
}
as_module_def_if_namespace_matches(assoc_item, ns)
})
}
fn resolve_impl_trait_item(
db: &dyn HirDatabase,
resolver: Resolver,
ty: &Type,
name: &Name,
ns: Option<Namespace>,
) -> Option<DocLinkDef> {
let canonical = ty.canonical();
let krate = ty.krate(db);
let environment = resolver.generic_def().map_or_else(
|| crate::TraitEnvironment::empty(krate.id).into(),
|d| db.trait_environment(d),
);
let traits_in_scope = resolver.traits_in_scope(db.upcast());
let mut result = None;
// `ty.iterate_path_candidates()` require a scope, which is not available when resolving
// attributes here. Use path resolution directly instead.
//
// FIXME: resolve type aliases (which are not yielded by iterate_path_candidates)
method_resolution::iterate_path_candidates(
&canonical,
db,
environment,
&traits_in_scope,
method_resolution::VisibleFromModule::None,
Some(name),
&mut |assoc_item_id| {
// If two traits in scope define the same item, Rustdoc links to no specific trait (for
// instance, given two methods `a`, Rustdoc simply links to `method.a` with no
// disambiguation) so we just pick the first one we find as well.
result = as_module_def_if_namespace_matches(assoc_item_id.into(), ns);
if result.is_some() {
ControlFlow::Break(())
} else {
ControlFlow::Continue(())
}
},
);
result
}
fn resolve_field( fn resolve_field(
db: &dyn HirDatabase, db: &dyn HirDatabase,
def: VariantDef, def: VariantDef,
@ -228,6 +292,19 @@ fn resolve_field(
def.fields(db).into_iter().find(|f| f.name(db) == name).map(DocLinkDef::Field) def.fields(db).into_iter().find(|f| f.name(db) == name).map(DocLinkDef::Field)
} }
fn as_module_def_if_namespace_matches(
assoc_item: AssocItem,
ns: Option<Namespace>,
) -> Option<DocLinkDef> {
let (def, expected_ns) = match assoc_item {
AssocItem::Function(it) => (ModuleDef::Function(it), Namespace::Values),
AssocItem::Const(it) => (ModuleDef::Const(it), Namespace::Values),
AssocItem::TypeAlias(it) => (ModuleDef::TypeAlias(it), Namespace::Types),
};
(ns.unwrap_or(expected_ns) == expected_ns).then(|| DocLinkDef::ModuleDef(def))
}
fn modpath_from_str(db: &dyn HirDatabase, link: &str) -> Option<ModPath> { fn modpath_from_str(db: &dyn HirDatabase, link: &str) -> Option<ModPath> {
// FIXME: this is not how we should get a mod path here. // FIXME: this is not how we should get a mod path here.
let try_get_modpath = |link: &str| { let try_get_modpath = |link: &str| {

View File

@ -19,8 +19,8 @@ use hir_ty::{
use crate::{ use crate::{
Adt, AsAssocItem, AssocItemContainer, Const, ConstParam, Enum, ExternCrateDecl, Field, Adt, AsAssocItem, AssocItemContainer, Const, ConstParam, Enum, ExternCrateDecl, Field,
Function, GenericParam, HasCrate, HasVisibility, LifetimeParam, Macro, Module, SelfParam, Function, GenericParam, HasCrate, HasVisibility, LifetimeParam, Macro, Module, SelfParam,
Static, Struct, Trait, TraitAlias, TyBuilder, Type, TypeAlias, TypeOrConstParam, TypeParam, Static, Struct, Trait, TraitAlias, TupleField, TyBuilder, Type, TypeAlias, TypeOrConstParam,
Union, Variant, TypeParam, Union, Variant,
}; };
impl HirDisplay for Function { impl HirDisplay for Function {
@ -257,6 +257,13 @@ impl HirDisplay for Field {
} }
} }
impl HirDisplay for TupleField {
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
write!(f, "pub {}: ", self.name().display(f.db.upcast()))?;
self.ty(f.db).hir_fmt(f)
}
}
impl HirDisplay for Variant { impl HirDisplay for Variant {
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
write!(f, "{}", self.name(f.db).display(f.db.upcast()))?; write!(f, "{}", self.name(f.db).display(f.db.upcast()))?;

View File

@ -55,7 +55,7 @@ use hir_def::{
AssocItemId, AssocItemLoc, AttrDefId, ConstId, ConstParamId, CrateRootModuleId, DefWithBodyId, AssocItemId, AssocItemLoc, AttrDefId, ConstId, ConstParamId, CrateRootModuleId, DefWithBodyId,
EnumId, EnumVariantId, ExternCrateId, FunctionId, GenericDefId, GenericParamId, HasModule, EnumId, EnumVariantId, ExternCrateId, FunctionId, GenericDefId, GenericParamId, HasModule,
ImplId, InTypeConstId, ItemContainerId, LifetimeParamId, LocalEnumVariantId, LocalFieldId, ImplId, InTypeConstId, ItemContainerId, LifetimeParamId, LocalEnumVariantId, LocalFieldId,
Lookup, MacroExpander, MacroId, ModuleId, StaticId, StructId, TraitAliasId, TraitId, Lookup, MacroExpander, MacroId, ModuleId, StaticId, StructId, TraitAliasId, TraitId, TupleId,
TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId, TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId,
}; };
use hir_expand::{attrs::collect_attrs, name::name, proc_macro::ProcMacroKind, MacroCallKind}; use hir_expand::{attrs::collect_attrs, name::name, proc_macro::ProcMacroKind, MacroCallKind};
@ -1038,6 +1038,29 @@ pub struct Field {
pub(crate) id: LocalFieldId, pub(crate) id: LocalFieldId,
} }
#[derive(Debug, PartialEq, Eq, Copy, Clone, Hash)]
pub struct TupleField {
pub owner: DefWithBodyId,
pub tuple: TupleId,
pub index: u32,
}
impl TupleField {
pub fn name(&self) -> Name {
Name::new_tuple_field(self.index as usize)
}
pub fn ty(&self, db: &dyn HirDatabase) -> Type {
let ty = db.infer(self.owner).tuple_field_access_types[&self.tuple]
.as_slice(Interner)
.get(self.index as usize)
.and_then(|arg| arg.ty(Interner))
.cloned()
.unwrap_or_else(|| TyKind::Error.intern(Interner));
Type { env: db.trait_environment_for_body(self.owner), ty }
}
}
#[derive(Debug, PartialEq, Eq)] #[derive(Debug, PartialEq, Eq)]
pub enum FieldSource { pub enum FieldSource {
Named(ast::RecordField), Named(ast::RecordField),
@ -1070,7 +1093,7 @@ impl Field {
pub fn layout(&self, db: &dyn HirDatabase) -> Result<Layout, LayoutError> { pub fn layout(&self, db: &dyn HirDatabase) -> Result<Layout, LayoutError> {
db.layout_of_ty( db.layout_of_ty(
self.ty(db).ty.clone(), self.ty(db).ty,
db.trait_environment(match hir_def::VariantId::from(self.parent) { db.trait_environment(match hir_def::VariantId::from(self.parent) {
hir_def::VariantId::EnumVariantId(id) => GenericDefId::EnumVariantId(id), hir_def::VariantId::EnumVariantId(id) => GenericDefId::EnumVariantId(id),
hir_def::VariantId::StructId(id) => GenericDefId::AdtId(id.into()), hir_def::VariantId::StructId(id) => GenericDefId::AdtId(id.into()),
@ -1831,7 +1854,7 @@ impl DefWithBody {
let local = Local { parent: self.into(), binding_id }; let local = Local { parent: self.into(), binding_id };
match (need_mut, local.is_mut(db)) { match (need_mut, local.is_mut(db)) {
(mir::MutabilityReason::Unused, _) => { (mir::MutabilityReason::Unused, _) => {
let should_ignore = matches!(body[binding_id].name.as_str(), Some(it) if it.starts_with("_")); let should_ignore = matches!(body[binding_id].name.as_str(), Some(it) if it.starts_with('_'));
if !should_ignore { if !should_ignore {
acc.push(UnusedVariable { local }.into()) acc.push(UnusedVariable { local }.into())
} }
@ -1856,7 +1879,7 @@ impl DefWithBody {
} }
(mir::MutabilityReason::Not, true) => { (mir::MutabilityReason::Not, true) => {
if !infer.mutated_bindings_in_closure.contains(&binding_id) { if !infer.mutated_bindings_in_closure.contains(&binding_id) {
let should_ignore = matches!(body[binding_id].name.as_str(), Some(it) if it.starts_with("_")); let should_ignore = matches!(body[binding_id].name.as_str(), Some(it) if it.starts_with('_'));
if !should_ignore { if !should_ignore {
acc.push(UnusedMut { local }.into()) acc.push(UnusedMut { local }.into())
} }
@ -2160,7 +2183,7 @@ impl Function {
return r; return r;
} }
}; };
let (result, stdout, stderr) = interpret_mir(db, body, false, None); let (result, output) = interpret_mir(db, body, false, None);
let mut text = match result { let mut text = match result {
Ok(_) => "pass".to_string(), Ok(_) => "pass".to_string(),
Err(e) => { Err(e) => {
@ -2169,10 +2192,12 @@ impl Function {
r r
} }
}; };
let stdout = output.stdout().into_owned();
if !stdout.is_empty() { if !stdout.is_empty() {
text += "\n--------- stdout ---------\n"; text += "\n--------- stdout ---------\n";
text += &stdout; text += &stdout;
} }
let stderr = output.stdout().into_owned();
if !stderr.is_empty() { if !stderr.is_empty() {
text += "\n--------- stderr ---------\n"; text += "\n--------- stderr ---------\n";
text += &stderr; text += &stderr;
@ -3648,7 +3673,6 @@ impl Closure {
let (captures, _) = infer.closure_info(&self.id); let (captures, _) = infer.closure_info(&self.id);
captures captures
.iter() .iter()
.cloned()
.map(|capture| Type { .map(|capture| Type {
env: db.trait_environment_for_body(owner), env: db.trait_environment_for_body(owner),
ty: capture.ty(&self.subst), ty: capture.ty(&self.subst),
@ -4121,6 +4145,10 @@ impl Type {
} }
} }
pub(crate) fn canonical(&self) -> Canonical<Ty> {
hir_ty::replace_errors_with_variables(&self.ty)
}
/// Returns types that this type dereferences to (including this type itself). The returned /// Returns types that this type dereferences to (including this type itself). The returned
/// iterator won't yield the same type more than once even if the deref chain contains a cycle. /// iterator won't yield the same type more than once even if the deref chain contains a cycle.
pub fn autoderef(&self, db: &dyn HirDatabase) -> impl Iterator<Item = Type> + '_ { pub fn autoderef(&self, db: &dyn HirDatabase) -> impl Iterator<Item = Type> + '_ {

View File

@ -40,7 +40,7 @@ use crate::{
Access, Adjust, Adjustment, AutoBorrow, BindingMode, BuiltinAttr, Callable, ConstParam, Crate, Access, Adjust, Adjustment, AutoBorrow, BindingMode, BuiltinAttr, Callable, ConstParam, Crate,
DeriveHelper, Field, Function, HasSource, HirFileId, Impl, InFile, Label, LifetimeParam, Local, DeriveHelper, Field, Function, HasSource, HirFileId, Impl, InFile, Label, LifetimeParam, Local,
Macro, Module, ModuleDef, Name, OverloadedDeref, Path, ScopeDef, Struct, ToolModule, Trait, Macro, Module, ModuleDef, Name, OverloadedDeref, Path, ScopeDef, Struct, ToolModule, Trait,
Type, TypeAlias, TypeParam, VariantDef, TupleField, Type, TypeAlias, TypeParam, VariantDef,
}; };
pub enum DescendPreference { pub enum DescendPreference {
@ -428,7 +428,7 @@ impl<'db> SemanticsImpl<'db> {
if let Some(original_string) = ast::String::cast(original_token.clone()) { if let Some(original_string) = ast::String::cast(original_token.clone()) {
if let Some(quote) = original_string.open_quote_text_range() { if let Some(quote) = original_string.open_quote_text_range() {
return self return self
.descend_into_macros(DescendPreference::SameText, original_token.clone()) .descend_into_macros(DescendPreference::SameText, original_token)
.into_iter() .into_iter()
.find_map(|token| { .find_map(|token| {
self.resolve_offset_in_format_args( self.resolve_offset_in_format_args(
@ -1085,14 +1085,14 @@ impl<'db> SemanticsImpl<'db> {
self.analyze(call.syntax())?.resolve_method_call_as_callable(self.db, call) self.analyze(call.syntax())?.resolve_method_call_as_callable(self.db, call)
} }
pub fn resolve_field(&self, field: &ast::FieldExpr) -> Option<Field> { pub fn resolve_field(&self, field: &ast::FieldExpr) -> Option<Either<Field, TupleField>> {
self.analyze(field.syntax())?.resolve_field(self.db, field) self.analyze(field.syntax())?.resolve_field(self.db, field)
} }
pub fn resolve_field_fallback( pub fn resolve_field_fallback(
&self, &self,
field: &ast::FieldExpr, field: &ast::FieldExpr,
) -> Option<Either<Field, Function>> { ) -> Option<Either<Either<Field, TupleField>, Function>> {
self.analyze(field.syntax())?.resolve_field_fallback(self.db, field) self.analyze(field.syntax())?.resolve_field_fallback(self.db, field)
} }

View File

@ -50,7 +50,7 @@ use triomphe::Arc;
use crate::{ use crate::{
db::HirDatabase, semantics::PathResolution, Adt, AssocItem, BindingMode, BuiltinAttr, db::HirDatabase, semantics::PathResolution, Adt, AssocItem, BindingMode, BuiltinAttr,
BuiltinType, Callable, Const, DeriveHelper, Field, Function, Local, Macro, ModuleDef, Static, BuiltinType, Callable, Const, DeriveHelper, Field, Function, Local, Macro, ModuleDef, Static,
Struct, ToolModule, Trait, TraitAlias, Type, TypeAlias, Variant, Struct, ToolModule, Trait, TraitAlias, TupleField, Type, TypeAlias, Variant,
}; };
/// `SourceAnalyzer` is a convenience wrapper which exposes HIR API in terms of /// `SourceAnalyzer` is a convenience wrapper which exposes HIR API in terms of
@ -297,7 +297,11 @@ impl SourceAnalyzer {
Some((f_in_trait, substs)) => Some(Either::Left( Some((f_in_trait, substs)) => Some(Either::Left(
self.resolve_impl_method_or_trait_def(db, f_in_trait, substs).into(), self.resolve_impl_method_or_trait_def(db, f_in_trait, substs).into(),
)), )),
None => inference_result.field_resolution(expr_id).map(Into::into).map(Either::Right), None => inference_result
.field_resolution(expr_id)
.and_then(Either::left)
.map(Into::into)
.map(Either::Right),
} }
} }
@ -305,20 +309,28 @@ impl SourceAnalyzer {
&self, &self,
db: &dyn HirDatabase, db: &dyn HirDatabase,
field: &ast::FieldExpr, field: &ast::FieldExpr,
) -> Option<Field> { ) -> Option<Either<Field, TupleField>> {
let &(def, ..) = self.def.as_ref()?;
let expr_id = self.expr_id(db, &field.clone().into())?; let expr_id = self.expr_id(db, &field.clone().into())?;
self.infer.as_ref()?.field_resolution(expr_id).map(|it| it.into()) self.infer.as_ref()?.field_resolution(expr_id).map(|it| {
it.map_either(Into::into, |f| TupleField { owner: def, tuple: f.tuple, index: f.index })
})
} }
pub(crate) fn resolve_field_fallback( pub(crate) fn resolve_field_fallback(
&self, &self,
db: &dyn HirDatabase, db: &dyn HirDatabase,
field: &ast::FieldExpr, field: &ast::FieldExpr,
) -> Option<Either<Field, Function>> { ) -> Option<Either<Either<Field, TupleField>, Function>> {
let &(def, ..) = self.def.as_ref()?;
let expr_id = self.expr_id(db, &field.clone().into())?; let expr_id = self.expr_id(db, &field.clone().into())?;
let inference_result = self.infer.as_ref()?; let inference_result = self.infer.as_ref()?;
match inference_result.field_resolution(expr_id) { match inference_result.field_resolution(expr_id) {
Some(field) => Some(Either::Left(field.into())), Some(field) => Some(Either::Left(field.map_either(Into::into, |f| TupleField {
owner: def,
tuple: f.tuple,
index: f.index,
}))),
None => inference_result.method_resolution(expr_id).map(|(f, substs)| { None => inference_result.method_resolution(expr_id).map(|(f, substs)| {
Either::Right(self.resolve_impl_method_or_trait_def(db, f, substs).into()) Either::Right(self.resolve_impl_method_or_trait_def(db, f, substs).into())
}), }),

View File

@ -18,11 +18,11 @@ use crate::{Module, ModuleDef, Semantics};
/// possible. /// possible.
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct FileSymbol { pub struct FileSymbol {
// even though name can be derived from the def, we store it for efficiency
pub name: SmolStr, pub name: SmolStr,
pub def: ModuleDef, pub def: ModuleDef,
pub loc: DeclarationLocation, pub loc: DeclarationLocation,
pub container_name: Option<SmolStr>, pub container_name: Option<SmolStr>,
/// Whether this symbol is a doc alias for the original symbol.
pub is_alias: bool, pub is_alias: bool,
pub is_assoc: bool, pub is_assoc: bool,
} }
@ -163,11 +163,9 @@ impl<'a> SymbolCollector<'a> {
} }
// Record renamed imports. // Record renamed imports.
// In case it imports multiple items under different namespaces we just pick one arbitrarily // FIXME: In case it imports multiple items under different namespaces we just pick one arbitrarily
// for now. // for now.
for id in scope.imports() { for id in scope.imports() {
let loc = id.import.lookup(self.db.upcast());
loc.id.item_tree(self.db.upcast());
let source = id.import.child_source(self.db.upcast()); let source = id.import.child_source(self.db.upcast());
let Some(use_tree_src) = source.value.get(id.idx) else { continue }; let Some(use_tree_src) = source.value.get(id.idx) else { continue };
let Some(rename) = use_tree_src.rename() else { continue }; let Some(rename) = use_tree_src.rename() else { continue };

View File

@ -89,12 +89,14 @@ use crate::{AssistContext, AssistId, AssistKind, Assists, GroupLabel};
// ``` // ```
pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let (import_assets, syntax_under_caret) = find_importable_node(ctx)?; let (import_assets, syntax_under_caret) = find_importable_node(ctx)?;
let mut proposed_imports = import_assets.search_for_imports( let mut proposed_imports: Vec<_> = import_assets
&ctx.sema, .search_for_imports(
ctx.config.insert_use.prefix_kind, &ctx.sema,
ctx.config.prefer_no_std, ctx.config.insert_use.prefix_kind,
ctx.config.prefer_no_std, ctx.config.prefer_no_std,
); ctx.config.prefer_no_std,
)
.collect();
if proposed_imports.is_empty() { if proposed_imports.is_empty() {
return None; return None;
} }
@ -113,6 +115,7 @@ pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<
)?; )?;
// we aren't interested in different namespaces // we aren't interested in different namespaces
proposed_imports.sort_by(|a, b| a.import_path.cmp(&b.import_path));
proposed_imports.dedup_by(|a, b| a.import_path == b.import_path); proposed_imports.dedup_by(|a, b| a.import_path == b.import_path);
let current_node = match ctx.covering_element() { let current_node = match ctx.covering_element() {

View File

@ -301,7 +301,7 @@ fn replace_usages(
// add imports across modules where needed // add imports across modules where needed
if let Some((import_scope, path)) = import_data { if let Some((import_scope, path)) = import_data {
let scope = match import_scope.clone() { let scope = match import_scope {
ImportScope::File(it) => ImportScope::File(edit.make_mut(it)), ImportScope::File(it) => ImportScope::File(edit.make_mut(it)),
ImportScope::Module(it) => ImportScope::Module(edit.make_mut(it)), ImportScope::Module(it) => ImportScope::Module(edit.make_mut(it)),
ImportScope::Block(it) => ImportScope::Block(edit.make_mut(it)), ImportScope::Block(it) => ImportScope::Block(edit.make_mut(it)),
@ -329,7 +329,7 @@ fn augment_references_with_imports(
references references
.into_iter() .into_iter()
.filter_map(|FileReference { range, name, .. }| { .filter_map(|FileReference { range, name, .. }| {
let name = name.clone().into_name_like()?; let name = name.into_name_like()?;
ctx.sema.scope(name.syntax()).map(|scope| (range, name, scope.module())) ctx.sema.scope(name.syntax()).map(|scope| (range, name, scope.module()))
}) })
.map(|(range, name, ref_module)| { .map(|(range, name, ref_module)| {

View File

@ -1,5 +1,6 @@
use hir::Semantics; use hir::Semantics;
use ide_db::RootDatabase; use ide_db::RootDatabase;
use syntax::ast::RangeItem;
use syntax::ast::{edit::AstNodeEdit, AstNode, HasName, LetStmt, Name, Pat}; use syntax::ast::{edit::AstNodeEdit, AstNode, HasName, LetStmt, Name, Pat};
use syntax::T; use syntax::T;

View File

@ -49,8 +49,8 @@ pub(crate) fn convert_nested_function_to_closure(
target, target,
|edit| { |edit| {
let params = &param_list.syntax().text().to_string(); let params = &param_list.syntax().text().to_string();
let params = params.strip_prefix("(").unwrap_or(params); let params = params.strip_prefix('(').unwrap_or(params);
let params = params.strip_suffix(")").unwrap_or(params); let params = params.strip_suffix(')').unwrap_or(params);
let mut body = body.to_string(); let mut body = body.to_string();
if !has_semicolon(&function) { if !has_semicolon(&function) {

View File

@ -190,7 +190,7 @@ fn augment_references_with_imports(
ctx.sema.scope(name.syntax()).map(|scope| (name, scope.module())) ctx.sema.scope(name.syntax()).map(|scope| (name, scope.module()))
}) })
.map(|(name, ref_module)| { .map(|(name, ref_module)| {
let new_name = edit.make_mut(name.clone()); let new_name = edit.make_mut(name);
// if the referenced module is not the same as the target one and has not been seen before, add an import // if the referenced module is not the same as the target one and has not been seen before, add an import
let import_data = if ref_module.nearest_non_block_module(ctx.db()) != *target_module let import_data = if ref_module.nearest_non_block_module(ctx.db()) != *target_module

View File

@ -6,6 +6,7 @@ use ide_db::{
defs::Definition, defs::Definition,
helpers::mod_path_to_ast, helpers::mod_path_to_ast,
imports::insert_use::{insert_use, ImportScope, InsertUseConfig}, imports::insert_use::{insert_use, ImportScope, InsertUseConfig},
path_transform::PathTransform,
search::FileReference, search::FileReference,
FxHashSet, RootDatabase, FxHashSet, RootDatabase,
}; };
@ -105,6 +106,16 @@ pub(crate) fn extract_struct_from_enum_variant(
.generic_param_list() .generic_param_list()
.and_then(|known_generics| extract_generic_params(&known_generics, &field_list)); .and_then(|known_generics| extract_generic_params(&known_generics, &field_list));
let generics = generic_params.as_ref().map(|generics| generics.clone_for_update()); let generics = generic_params.as_ref().map(|generics| generics.clone_for_update());
// resolve GenericArg in field_list to actual type
let field_list = field_list.clone_for_update();
if let Some((target_scope, source_scope)) =
ctx.sema.scope(enum_ast.syntax()).zip(ctx.sema.scope(field_list.syntax()))
{
PathTransform::generic_transformation(&target_scope, &source_scope)
.apply(field_list.syntax());
}
let def = let def =
create_struct_def(variant_name.clone(), &variant, &field_list, generics, &enum_ast); create_struct_def(variant_name.clone(), &variant, &field_list, generics, &enum_ast);
@ -244,8 +255,6 @@ fn create_struct_def(
// for fields without any existing visibility, use visibility of enum // for fields without any existing visibility, use visibility of enum
let field_list: ast::FieldList = match field_list { let field_list: ast::FieldList = match field_list {
Either::Left(field_list) => { Either::Left(field_list) => {
let field_list = field_list.clone_for_update();
if let Some(vis) = &enum_vis { if let Some(vis) = &enum_vis {
field_list field_list
.fields() .fields()
@ -254,11 +263,9 @@ fn create_struct_def(
.for_each(|it| insert_vis(it.syntax(), vis.syntax())); .for_each(|it| insert_vis(it.syntax(), vis.syntax()));
} }
field_list.into() field_list.clone().into()
} }
Either::Right(field_list) => { Either::Right(field_list) => {
let field_list = field_list.clone_for_update();
if let Some(vis) = &enum_vis { if let Some(vis) = &enum_vis {
field_list field_list
.fields() .fields()
@ -267,7 +274,7 @@ fn create_struct_def(
.for_each(|it| insert_vis(it.syntax(), vis.syntax())); .for_each(|it| insert_vis(it.syntax(), vis.syntax()));
} }
field_list.into() field_list.clone().into()
} }
}; };
field_list.reindent_to(IndentLevel::single()); field_list.reindent_to(IndentLevel::single());
@ -425,6 +432,59 @@ mod tests {
use super::*; use super::*;
#[test]
fn issue_16197() {
check_assist(
extract_struct_from_enum_variant,
r#"
enum Foo {
Bar $0{ node: Box<Self> },
Nil,
}
"#,
r#"
struct Bar{ node: Box<Foo> }
enum Foo {
Bar(Bar),
Nil,
}
"#,
);
check_assist(
extract_struct_from_enum_variant,
r#"
enum Foo {
Bar $0{ node: Box<Self>, a: Arc<Box<Self>> },
Nil,
}
"#,
r#"
struct Bar{ node: Box<Foo>, a: Arc<Box<Foo>> }
enum Foo {
Bar(Bar),
Nil,
}
"#,
);
check_assist(
extract_struct_from_enum_variant,
r#"
enum Foo {
Nil(Box$0<Self>, Arc<Box<Self>>),
}
"#,
r#"
struct Nil(Box<Foo>, Arc<Box<Foo>>);
enum Foo {
Nil(Nil),
}
"#,
);
}
#[test] #[test]
fn test_extract_struct_several_fields_tuple() { fn test_extract_struct_several_fields_tuple() {
check_assist( check_assist(

View File

@ -112,7 +112,7 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
let insert_place = edit.make_syntax_mut(place); let insert_place = edit.make_syntax_mut(place);
// Adjust ws to insert depending on if this is all inline or on separate lines // Adjust ws to insert depending on if this is all inline or on separate lines
let trailing_ws = if prev_ws.is_some_and(|it| it.text().starts_with("\n")) { let trailing_ws = if prev_ws.is_some_and(|it| it.text().starts_with('\n')) {
format!("\n{indent_to}") format!("\n{indent_to}")
} else { } else {
format!(" ") format!(" ")

View File

@ -147,7 +147,7 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<'
None => { None => {
let name = &strukt_name.to_string(); let name = &strukt_name.to_string();
let params = strukt.generic_param_list(); let params = strukt.generic_param_list();
let ty_params = params.clone(); let ty_params = params;
let where_clause = strukt.where_clause(); let where_clause = strukt.where_clause();
let impl_def = make::impl_( let impl_def = make::impl_(

View File

@ -17,7 +17,7 @@ use syntax::{
self, self,
edit::{self, AstNodeEdit}, edit::{self, AstNodeEdit},
make, AssocItem, GenericArgList, GenericParamList, HasGenericParams, HasName, make, AssocItem, GenericArgList, GenericParamList, HasGenericParams, HasName,
HasTypeBounds, HasVisibility as astHasVisibility, Path, HasTypeBounds, HasVisibility as astHasVisibility, Path, WherePred,
}, },
ted::{self, Position}, ted::{self, Position},
AstNode, NodeOrToken, SmolStr, SyntaxKind, AstNode, NodeOrToken, SmolStr, SyntaxKind,
@ -217,9 +217,9 @@ impl Struct {
}; };
acc.add_group( acc.add_group(
&GroupLabel(format!("Generate delegate impls for field `{}`", field.name)), &GroupLabel(format!("Generate delegate trait impls for field `{}`", field.name)),
AssistId("generate_delegate_trait", ide_db::assists::AssistKind::Generate), AssistId("generate_delegate_trait", ide_db::assists::AssistKind::Generate),
format!("Generate delegate impl `{}` for `{}`", signature, field.name), format!("Generate delegate trait impl `{}` for `{}`", signature, field.name),
field.range, field.range,
|builder| { |builder| {
builder.insert( builder.insert(
@ -243,12 +243,12 @@ fn generate_impl(
let db = ctx.db(); let db = ctx.db();
let ast_strukt = &strukt.strukt; let ast_strukt = &strukt.strukt;
let strukt_ty = make::ty_path(make::ext::ident_path(&strukt.name.to_string())); let strukt_ty = make::ty_path(make::ext::ident_path(&strukt.name.to_string()));
let strukt_params = ast_strukt.generic_param_list();
match delegee { match delegee {
Delegee::Bound(delegee) => { Delegee::Bound(delegee) => {
let bound_def = ctx.sema.source(delegee.to_owned())?.value; let bound_def = ctx.sema.source(delegee.to_owned())?.value;
let bound_params = bound_def.generic_param_list(); let bound_params = bound_def.generic_param_list();
let strukt_params = ast_strukt.generic_param_list();
delegate = make::impl_trait( delegate = make::impl_trait(
delegee.is_unsafe(db), delegee.is_unsafe(db),
@ -266,11 +266,8 @@ fn generate_impl(
.clone_for_update(); .clone_for_update();
// Goto link : https://doc.rust-lang.org/reference/paths.html#qualified-paths // Goto link : https://doc.rust-lang.org/reference/paths.html#qualified-paths
let qualified_path_type = make::path_from_text(&format!( let qualified_path_type =
"<{} as {}>", make::path_from_text(&format!("<{} as {}>", field_ty, delegate.trait_()?));
field_ty.to_string(),
delegate.trait_()?.to_string()
));
let delegate_assoc_items = delegate.get_or_create_assoc_item_list(); let delegate_assoc_items = delegate.get_or_create_assoc_item_list();
match bound_def.assoc_item_list() { match bound_def.assoc_item_list() {
@ -295,63 +292,73 @@ fn generate_impl(
} }
Delegee::Impls(trait_, old_impl) => { Delegee::Impls(trait_, old_impl) => {
let old_impl = ctx.sema.source(old_impl.to_owned())?.value; let old_impl = ctx.sema.source(old_impl.to_owned())?.value;
let old_impl_params = old_impl.generic_param_list();
// 1) Resolve conflicts between generic parameters in old_impl and
// those in strukt.
//
// These generics parameters will also be used in `field_ty` and
// `where_clauses`, so we should substitude arguments in them as well.
let strukt_params = resolve_name_conflicts(strukt_params, &old_impl_params);
let (field_ty, ty_where_clause) = match &strukt_params {
Some(strukt_params) => {
let args = strukt_params.to_generic_args();
let field_ty = rename_strukt_args(ctx, ast_strukt, field_ty, &args)?;
let where_clause = ast_strukt
.where_clause()
.and_then(|wc| Some(rename_strukt_args(ctx, ast_strukt, &wc, &args)?));
(field_ty, where_clause)
}
None => (field_ty.clone_for_update(), None),
};
// 2) Handle instantiated generics in `field_ty`.
// 2.1) Some generics used in `self_ty` may be instantiated, so they
// are no longer generics, we should remove and instantiate those
// generics in advance.
// `old_trait_args` contains names of generic args for trait in `old_impl` // `old_trait_args` contains names of generic args for trait in `old_impl`
let old_trait_args = old_impl let old_impl_trait_args = old_impl
.trait_()? .trait_()?
.generic_arg_list() .generic_arg_list()
.map(|l| l.generic_args().map(|arg| arg.to_string())) .map(|l| l.generic_args().map(|arg| arg.to_string()))
.map_or_else(|| FxHashSet::default(), |it| it.collect()); .map_or_else(|| FxHashSet::default(), |it| it.collect());
let old_impl_params = old_impl.generic_param_list(); let trait_gen_params = remove_instantiated_params(
&old_impl.self_ty()?,
old_impl_params.clone(),
&old_impl_trait_args,
);
// Resolve conflicts with generic parameters in strukt. // 2.2) Generate generic args applied on impl.
// These generics parameters will also be used in `field_ty` and `where_clauses`, let transform_args = generate_args_for_impl(
// so we should substitude arguments in them as well. old_impl_params,
let (renamed_strukt_params, field_ty, ty_where_clause) = if let Some(strukt_params) = &old_impl.self_ty()?,
resolve_conflicts_for_strukt(ast_strukt, old_impl_params.as_ref()) &field_ty,
{ &trait_gen_params,
let strukt_args = strukt_params.to_generic_args(); &old_impl_trait_args,
let field_ty = );
subst_name_in_strukt(ctx, ast_strukt, field_ty, strukt_args.clone())?;
let wc = ast_strukt
.where_clause()
.and_then(|wc| Some(subst_name_in_strukt(ctx, ast_strukt, &wc, strukt_args)?));
(Some(strukt_params), field_ty, wc)
} else {
(None, field_ty.clone_for_update(), None)
};
// Some generics used in `field_ty` may be instantiated, so they are no longer
// `generics`. We should remove them from generics params, and use the rest params.
let trait_gen_params =
remove_instantiated_params(&old_impl.self_ty()?, old_impl_params, &old_trait_args);
// Generate generic args that applied to current impl, this step will also remove unused params
let args_for_impl =
get_args_for_impl(&old_impl, &field_ty, &trait_gen_params, &old_trait_args);
// 2.3) Instantiate generics with `transform_impl`, this step also
// remove unused params.
let mut trait_gen_args = old_impl.trait_()?.generic_arg_list(); let mut trait_gen_args = old_impl.trait_()?.generic_arg_list();
if let Some(arg_list) = &mut trait_gen_args { if let Some(trait_args) = &mut trait_gen_args {
*arg_list = arg_list.clone_for_update(); *trait_args = trait_args.clone_for_update();
transform_impl(ctx, ast_strukt, &old_impl, &args_for_impl, &arg_list.syntax())?; transform_impl(ctx, ast_strukt, &old_impl, &transform_args, &trait_args.syntax())?;
} }
let mut type_gen_args = let type_gen_args = strukt_params.clone().map(|params| params.to_generic_args());
renamed_strukt_params.clone().map(|params| params.to_generic_args());
if let Some(type_args) = &mut type_gen_args {
*type_args = type_args.clone_for_update();
transform_impl(ctx, ast_strukt, &old_impl, &args_for_impl, &type_args.syntax())?;
}
let path_type = make::ty(&trait_.name(db).to_smol_str()).clone_for_update(); let path_type = make::ty(&trait_.name(db).to_smol_str()).clone_for_update();
transform_impl(ctx, ast_strukt, &old_impl, &args_for_impl, &path_type.syntax())?; transform_impl(ctx, ast_strukt, &old_impl, &transform_args, &path_type.syntax())?;
// 3) Generate delegate trait impl
delegate = make::impl_trait( delegate = make::impl_trait(
trait_.is_unsafe(db), trait_.is_unsafe(db),
trait_gen_params, trait_gen_params,
trait_gen_args, trait_gen_args,
renamed_strukt_params, strukt_params,
type_gen_args, type_gen_args,
trait_.is_auto(db), trait_.is_auto(db),
path_type, path_type,
@ -363,30 +370,26 @@ fn generate_impl(
.clone_for_update(); .clone_for_update();
// Goto link : https://doc.rust-lang.org/reference/paths.html#qualified-paths // Goto link : https://doc.rust-lang.org/reference/paths.html#qualified-paths
let qualified_path_type = make::path_from_text(&format!( let qualified_path_type =
"<{} as {}>", make::path_from_text(&format!("<{} as {}>", field_ty, delegate.trait_()?));
field_ty.to_string(),
delegate.trait_()?.to_string()
));
// 4) Transform associated items in delegte trait impl
let delegate_assoc_items = delegate.get_or_create_assoc_item_list(); let delegate_assoc_items = delegate.get_or_create_assoc_item_list();
for item in old_impl for item in old_impl
.get_or_create_assoc_item_list() .get_or_create_assoc_item_list()
.assoc_items() .assoc_items()
.filter(|item| matches!(item, AssocItem::MacroCall(_)).not()) .filter(|item| matches!(item, AssocItem::MacroCall(_)).not())
{ {
let assoc = process_assoc_item( let item = item.clone_for_update();
transform_assoc_item(ctx, ast_strukt, &old_impl, &args_for_impl, item)?, transform_impl(ctx, ast_strukt, &old_impl, &transform_args, item.syntax())?;
qualified_path_type.clone(),
&field_name,
)?;
let assoc = process_assoc_item(item, qualified_path_type.clone(), &field_name)?;
delegate_assoc_items.add_item(assoc); delegate_assoc_items.add_item(assoc);
} }
// Remove unused where clauses // 5) Remove useless where clauses
if let Some(wc) = delegate.where_clause() { if let Some(wc) = delegate.where_clause() {
remove_useless_where_clauses(&delegate, wc)?; remove_useless_where_clauses(&delegate.trait_()?, &delegate.self_ty()?, wc);
} }
} }
} }
@ -394,32 +397,6 @@ fn generate_impl(
Some(delegate) Some(delegate)
} }
fn transform_assoc_item(
ctx: &AssistContext<'_>,
strukt: &ast::Struct,
old_impl: &ast::Impl,
args: &Option<GenericArgList>,
item: AssocItem,
) -> Option<AssocItem> {
let source_scope = ctx.sema.scope(&item.syntax()).unwrap();
let target_scope = ctx.sema.scope(&strukt.syntax())?;
let hir_old_impl = ctx.sema.to_impl_def(old_impl)?;
let item = item.clone_for_update();
let transform = args.as_ref().map_or_else(
|| PathTransform::generic_transformation(&target_scope, &source_scope),
|args| {
PathTransform::impl_transformation(
&target_scope,
&source_scope,
hir_old_impl,
args.clone(),
)
},
);
transform.apply(&item.syntax());
Some(item)
}
fn transform_impl( fn transform_impl(
ctx: &AssistContext<'_>, ctx: &AssistContext<'_>,
strukt: &ast::Struct, strukt: &ast::Struct,
@ -463,11 +440,11 @@ fn remove_instantiated_params(
.segments() .segments()
.filter_map(|seg| seg.generic_arg_list()) .filter_map(|seg| seg.generic_arg_list())
.flat_map(|it| it.generic_args()) .flat_map(|it| it.generic_args())
// However, if the param is also used in the trait arguments, it shouldn't be removed. // However, if the param is also used in the trait arguments,
// it shouldn't be removed now, which will be instantiated in
// later `path_transform`
.filter(|arg| !old_trait_args.contains(&arg.to_string())) .filter(|arg| !old_trait_args.contains(&arg.to_string()))
.for_each(|arg| { .for_each(|arg| new_gpl.remove_generic_arg(&arg));
new_gpl.remove_generic_arg(&arg);
});
(new_gpl.generic_params().count() > 0).then_some(new_gpl) (new_gpl.generic_params().count() > 0).then_some(new_gpl)
}) })
} }
@ -475,49 +452,37 @@ fn remove_instantiated_params(
} }
} }
fn remove_useless_where_clauses(delegate: &ast::Impl, wc: ast::WhereClause) -> Option<()> { fn remove_useless_where_clauses(trait_ty: &ast::Type, self_ty: &ast::Type, wc: ast::WhereClause) {
let trait_args = let live_generics = [trait_ty, self_ty]
delegate.trait_()?.generic_arg_list().map(|trait_args| trait_args.generic_args()); .into_iter()
let strukt_args = .flat_map(|ty| ty.generic_arg_list())
delegate.self_ty()?.generic_arg_list().map(|strukt_args| strukt_args.generic_args()); .flat_map(|gal| gal.generic_args())
let used_generic_names = match (trait_args, strukt_args) { .map(|x| x.to_string())
(None, None) => None, .collect::<FxHashSet<_>>();
(None, Some(y)) => Some(y.map(|arg| arg.to_string()).collect::<FxHashSet<_>>()),
(Some(x), None) => Some(x.map(|arg| arg.to_string()).collect::<FxHashSet<_>>()),
(Some(x), Some(y)) => Some(x.chain(y).map(|arg| arg.to_string()).collect::<FxHashSet<_>>()),
};
// Keep clauses that have generic clauses after substitution, and remove the rest // Keep where-clauses that have generics after substitution, and remove the
if let Some(used_generic_names) = used_generic_names { // rest.
wc.predicates() let has_live_generics = |pred: &WherePred| {
.filter(|pred| { pred.syntax()
pred.syntax() .descendants_with_tokens()
.descendants_with_tokens() .filter_map(|e| e.into_token())
.filter_map(|e| e.into_token()) .any(|e| e.kind() == SyntaxKind::IDENT && live_generics.contains(&e.to_string()))
.find(|e| { .not()
e.kind() == SyntaxKind::IDENT && used_generic_names.contains(&e.to_string()) };
}) wc.predicates().filter(has_live_generics).for_each(|pred| wc.remove_predicate(pred));
.is_none()
})
.for_each(|pred| {
wc.remove_predicate(pred);
});
} else {
wc.predicates().for_each(|pred| wc.remove_predicate(pred));
}
if wc.predicates().count() == 0 { if wc.predicates().count() == 0 {
// Remove useless whitespaces // Remove useless whitespaces
wc.syntax() [syntax::Direction::Prev, syntax::Direction::Next]
.siblings_with_tokens(syntax::Direction::Prev) .into_iter()
.skip(1) .flat_map(|dir| {
.take_while(|node_or_tok| node_or_tok.kind() == SyntaxKind::WHITESPACE) wc.syntax()
.for_each(|ws| ted::remove(ws)); .siblings_with_tokens(dir)
wc.syntax() .skip(1)
.siblings_with_tokens(syntax::Direction::Next) .take_while(|node_or_tok| node_or_tok.kind() == SyntaxKind::WHITESPACE)
.skip(1) })
.take_while(|node_or_tok| node_or_tok.kind() == SyntaxKind::WHITESPACE)
.for_each(|ws| ted::remove(ws)); .for_each(|ws| ted::remove(ws));
ted::insert( ted::insert(
ted::Position::after(wc.syntax()), ted::Position::after(wc.syntax()),
NodeOrToken::Token(make::token(SyntaxKind::WHITESPACE)), NodeOrToken::Token(make::token(SyntaxKind::WHITESPACE)),
@ -525,84 +490,63 @@ fn remove_useless_where_clauses(delegate: &ast::Impl, wc: ast::WhereClause) -> O
// Remove where clause // Remove where clause
ted::remove(wc.syntax()); ted::remove(wc.syntax());
} }
Some(())
} }
fn get_args_for_impl( // Generate generic args that should be apply to current impl.
old_impl: &ast::Impl, //
// For exmaple, say we have implementation `impl<A, B, C> Trait for B<A>`,
// and `b: B<T>` in struct `S<T>`. Then the `A` should be instantiated to `T`.
// While the last two generic args `B` and `C` doesn't change, it remains
// `<B, C>`. So we apply `<T, B, C>` as generic arguments to impl.
fn generate_args_for_impl(
old_impl_gpl: Option<GenericParamList>,
self_ty: &ast::Type,
field_ty: &ast::Type, field_ty: &ast::Type,
trait_params: &Option<GenericParamList>, trait_params: &Option<GenericParamList>,
old_trait_args: &FxHashSet<String>, old_trait_args: &FxHashSet<String>,
) -> Option<ast::GenericArgList> { ) -> Option<ast::GenericArgList> {
// Generate generic args that should be apply to current impl let Some(old_impl_args) = old_impl_gpl.map(|gpl| gpl.to_generic_args().generic_args()) else {
// return None;
// For exmaple, if we have `impl<A, B, C> Trait for B<A>`, and `b: B<T>` in `S<T>`, };
// then the generic `A` should be renamed to `T`. While the last two generic args // Create pairs of the args of `self_ty` and corresponding `field_ty` to
// doesn't change, it renames <B, C>. So we apply `<T, B C>` as generic arguments // form the substitution list
// to impl. let mut arg_substs = FxHashMap::default();
let old_impl_params = old_impl.generic_param_list();
let self_ty = old_impl.self_ty();
if let (Some(old_impl_gpl), Some(self_ty)) = (old_impl_params, self_ty) { match field_ty {
// Make pair of the arguments of `field_ty` and `old_strukt_args` to field_ty @ ast::Type::PathType(_) => {
// get the list for substitution let field_args = field_ty.generic_arg_list().map(|gal| gal.generic_args());
let mut arg_substs = FxHashMap::default(); let self_ty_args = self_ty.generic_arg_list().map(|gal| gal.generic_args());
if let (Some(field_args), Some(self_ty_args)) = (field_args, self_ty_args) {
match field_ty { self_ty_args.zip(field_args).for_each(|(self_ty_arg, field_arg)| {
field_ty @ ast::Type::PathType(_) => { arg_substs.entry(self_ty_arg.to_string()).or_insert(field_arg);
let field_args = field_ty.generic_arg_list(); })
if let (Some(field_args), Some(old_impl_args)) =
(field_args, self_ty.generic_arg_list())
{
field_args.generic_args().zip(old_impl_args.generic_args()).for_each(
|(field_arg, impl_arg)| {
arg_substs.entry(impl_arg.to_string()).or_insert(field_arg);
},
)
}
} }
_ => {}
} }
_ => {}
let args = old_impl_gpl
.to_generic_args()
.generic_args()
.map(|old_arg| {
arg_substs.get(&old_arg.to_string()).map_or_else(
|| old_arg.clone(),
|replace_with| {
// The old_arg will be replaced, so it becomes redundant
let old_arg_name = old_arg.to_string();
if old_trait_args.contains(&old_arg_name) {
// However, we should check type bounds and where clauses on old_arg,
// if it has type bound, we should keep the type bound.
// match trait_params.and_then(|params| params.remove_generic_arg(&old_arg)) {
// Some(ast::GenericParam::TypeParam(ty)) => {
// ty.type_bound_list().and_then(|bounds| )
// }
// _ => {}
// }
if let Some(params) = trait_params {
params.remove_generic_arg(&old_arg);
}
}
replace_with.clone()
},
)
})
.collect_vec();
args.is_empty().not().then(|| make::generic_arg_list(args.into_iter()))
} else {
None
} }
let args = old_impl_args
.map(|old_arg| {
arg_substs.get(&old_arg.to_string()).map_or_else(
|| old_arg.clone(),
|replace_with| {
// The old_arg will be replaced, so it becomes redundant
if trait_params.is_some() && old_trait_args.contains(&old_arg.to_string()) {
trait_params.as_ref().unwrap().remove_generic_arg(&old_arg)
}
replace_with.clone()
},
)
})
.collect_vec();
args.is_empty().not().then(|| make::generic_arg_list(args.into_iter()))
} }
fn subst_name_in_strukt<N>( fn rename_strukt_args<N>(
ctx: &AssistContext<'_>, ctx: &AssistContext<'_>,
strukt: &ast::Struct, strukt: &ast::Struct,
item: &N, item: &N,
args: GenericArgList, args: &GenericArgList,
) -> Option<N> ) -> Option<N>
where where
N: ast::AstNode, N: ast::AstNode,
@ -611,9 +555,11 @@ where
let hir_adt = hir::Adt::from(hir_strukt); let hir_adt = hir::Adt::from(hir_strukt);
let item = item.clone_for_update(); let item = item.clone_for_update();
let item_scope = ctx.sema.scope(item.syntax())?; let scope = ctx.sema.scope(item.syntax())?;
let transform = PathTransform::adt_transformation(&item_scope, &item_scope, hir_adt, args);
let transform = PathTransform::adt_transformation(&scope, &scope, hir_adt, args.clone());
transform.apply(&item.syntax()); transform.apply(&item.syntax());
Some(item) Some(item)
} }
@ -627,16 +573,16 @@ fn has_self_type(trait_: hir::Trait, ctx: &AssistContext<'_>) -> Option<()> {
.map(|_| ()) .map(|_| ())
} }
fn resolve_conflicts_for_strukt( fn resolve_name_conflicts(
strukt: &ast::Struct, strukt_params: Option<ast::GenericParamList>,
old_impl_params: Option<&ast::GenericParamList>, old_impl_params: &Option<ast::GenericParamList>,
) -> Option<ast::GenericParamList> { ) -> Option<ast::GenericParamList> {
match (strukt.generic_param_list(), old_impl_params) { match (strukt_params, old_impl_params) {
(Some(old_strukt_params), Some(old_impl_params)) => { (Some(old_strukt_params), Some(old_impl_params)) => {
let params = make::generic_param_list(std::iter::empty()).clone_for_update(); let params = make::generic_param_list(std::iter::empty()).clone_for_update();
for old_strukt_param in old_strukt_params.generic_params() { for old_strukt_param in old_strukt_params.generic_params() {
// Get old name from `strukt`` // Get old name from `strukt`
let mut name = SmolStr::from(match &old_strukt_param { let mut name = SmolStr::from(match &old_strukt_param {
ast::GenericParam::ConstParam(c) => c.name()?.to_string(), ast::GenericParam::ConstParam(c) => c.name()?.to_string(),
ast::GenericParam::LifetimeParam(l) => { ast::GenericParam::LifetimeParam(l) => {
@ -807,7 +753,7 @@ fn ty_assoc_item(item: syntax::ast::TypeAlias, qual_path_ty: Path) -> Option<Ass
} }
fn qualified_path(qual_path_ty: ast::Path, path_expr_seg: ast::Path) -> ast::Path { fn qualified_path(qual_path_ty: ast::Path, path_expr_seg: ast::Path) -> ast::Path {
make::path_from_text(&format!("{}::{}", qual_path_ty.to_string(), path_expr_seg.to_string())) make::path_from_text(&format!("{}::{}", qual_path_ty, path_expr_seg))
} }
#[cfg(test)] #[cfg(test)]

View File

@ -432,7 +432,7 @@ fn get_fn_target(
} }
None => next_space_for_fn_after_call_site(ast::CallableExpr::Call(call))?, None => next_space_for_fn_after_call_site(ast::CallableExpr::Call(call))?,
}; };
Some((target.clone(), file)) Some((target, file))
} }
fn get_method_target( fn get_method_target(

View File

@ -47,7 +47,7 @@ pub(crate) fn generate_mut_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_>
let impl_def = ctx.find_node_at_offset::<ast::Impl>()?.clone_for_update(); let impl_def = ctx.find_node_at_offset::<ast::Impl>()?.clone_for_update();
let trait_ = impl_def.trait_()?; let trait_ = impl_def.trait_()?;
if let ast::Type::PathType(trait_path) = trait_.clone() { if let ast::Type::PathType(trait_path) = trait_ {
let trait_type = ctx.sema.resolve_trait(&trait_path.path()?)?; let trait_type = ctx.sema.resolve_trait(&trait_path.path()?)?;
let scope = ctx.sema.scope(trait_path.syntax())?; let scope = ctx.sema.scope(trait_path.syntax())?;
if trait_type != FamousDefs(&ctx.sema, scope.krate()).core_convert_Index()? { if trait_type != FamousDefs(&ctx.sema, scope.krate()).core_convert_Index()? {
@ -105,7 +105,7 @@ pub(crate) fn generate_mut_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_>
"Generate `IndexMut` impl from this `Index` trait", "Generate `IndexMut` impl from this `Index` trait",
target, target,
|edit| { |edit| {
edit.insert(target.start(), format!("$0{}\n\n", impl_def.to_string())); edit.insert(target.start(), format!("$0{}\n\n", impl_def));
}, },
) )
} }

View File

@ -128,7 +128,7 @@ pub(crate) fn generate_trait_from_impl(acc: &mut Assists, ctx: &AssistContext<'_
builder.replace_snippet( builder.replace_snippet(
snippet_cap, snippet_cap,
impl_name.syntax().text_range(), impl_name.syntax().text_range(),
format!("${{0:TraitName}}{} for {}", arg_list, impl_name.to_string()), format!("${{0:TraitName}}{} for {}", arg_list, impl_name),
); );
// Insert trait before TraitImpl // Insert trait before TraitImpl
@ -144,17 +144,13 @@ pub(crate) fn generate_trait_from_impl(acc: &mut Assists, ctx: &AssistContext<'_
} else { } else {
builder.replace( builder.replace(
impl_name.syntax().text_range(), impl_name.syntax().text_range(),
format!("NewTrait{} for {}", arg_list, impl_name.to_string()), format!("NewTrait{} for {}", arg_list, impl_name),
); );
// Insert trait before TraitImpl // Insert trait before TraitImpl
builder.insert( builder.insert(
impl_ast.syntax().text_range().start(), impl_ast.syntax().text_range().start(),
format!( format!("{}\n\n{}", trait_ast, IndentLevel::from_node(impl_ast.syntax())),
"{}\n\n{}",
trait_ast.to_string(),
IndentLevel::from_node(impl_ast.syntax())
),
); );
} }

View File

@ -37,11 +37,9 @@ use crate::{
// ``` // ```
pub(crate) fn qualify_path(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { pub(crate) fn qualify_path(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let (import_assets, syntax_under_caret) = find_importable_node(ctx)?; let (import_assets, syntax_under_caret) = find_importable_node(ctx)?;
let mut proposed_imports = import_assets.search_for_relative_paths( let mut proposed_imports: Vec<_> = import_assets
&ctx.sema, .search_for_relative_paths(&ctx.sema, ctx.config.prefer_no_std, ctx.config.prefer_prelude)
ctx.config.prefer_no_std, .collect();
ctx.config.prefer_prelude,
);
if proposed_imports.is_empty() { if proposed_imports.is_empty() {
return None; return None;
} }
@ -82,6 +80,7 @@ pub(crate) fn qualify_path(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option
}; };
// we aren't interested in different namespaces // we aren't interested in different namespaces
proposed_imports.sort_by(|a, b| a.import_path.cmp(&b.import_path));
proposed_imports.dedup_by(|a, b| a.import_path == b.import_path); proposed_imports.dedup_by(|a, b| a.import_path == b.import_path);
let group_label = group_label(candidate); let group_label = group_label(candidate);

View File

@ -43,7 +43,7 @@ pub(crate) fn remove_parentheses(acc: &mut Assists, ctx: &AssistContext<'_>) ->
let prev_token = parens.syntax().first_token().and_then(|it| it.prev_token()); let prev_token = parens.syntax().first_token().and_then(|it| it.prev_token());
let need_to_add_ws = match prev_token { let need_to_add_ws = match prev_token {
Some(it) => { Some(it) => {
let tokens = vec![T![&], T![!], T!['('], T!['['], T!['{']]; let tokens = [T![&], T![!], T!['('], T!['['], T!['{']];
it.kind() != SyntaxKind::WHITESPACE && !tokens.contains(&it.kind()) it.kind() != SyntaxKind::WHITESPACE && !tokens.contains(&it.kind())
} }
None => false, None => false,

View File

@ -74,7 +74,6 @@ pub(crate) fn replace_derive_with_manual_impl(
current_crate, current_crate,
NameToImport::exact_case_sensitive(path.segments().last()?.to_string()), NameToImport::exact_case_sensitive(path.segments().last()?.to_string()),
items_locator::AssocSearchMode::Exclude, items_locator::AssocSearchMode::Exclude,
Some(items_locator::DEFAULT_QUERY_SEARCH_LIMIT.inner()),
) )
.filter_map(|item| match item.as_module_def()? { .filter_map(|item| match item.as_module_def()? {
ModuleDef::Trait(trait_) => Some(trait_), ModuleDef::Trait(trait_) => Some(trait_),

View File

@ -263,7 +263,6 @@ fn import_on_the_fly(
ctx.config.prefer_no_std, ctx.config.prefer_no_std,
ctx.config.prefer_prelude, ctx.config.prefer_prelude,
) )
.into_iter()
.filter(ns_filter) .filter(ns_filter)
.filter(|import| { .filter(|import| {
let original_item = &import.original_item; let original_item = &import.original_item;
@ -271,8 +270,14 @@ fn import_on_the_fly(
&& !ctx.is_item_hidden(original_item) && !ctx.is_item_hidden(original_item)
&& ctx.check_stability(original_item.attrs(ctx.db).as_deref()) && ctx.check_stability(original_item.attrs(ctx.db).as_deref())
}) })
.sorted_by_key(|located_import| { .sorted_by(|a, b| {
compute_fuzzy_completion_order_key(&located_import.import_path, &user_input_lowercased) let key = |import_path| {
(
compute_fuzzy_completion_order_key(import_path, &user_input_lowercased),
import_path,
)
};
key(&a.import_path).cmp(&key(&b.import_path))
}) })
.filter_map(|import| { .filter_map(|import| {
render_resolution_with_import(RenderContext::new(ctx), path_ctx, import) render_resolution_with_import(RenderContext::new(ctx), path_ctx, import)
@ -310,7 +315,6 @@ fn import_on_the_fly_pat_(
ctx.config.prefer_no_std, ctx.config.prefer_no_std,
ctx.config.prefer_prelude, ctx.config.prefer_prelude,
) )
.into_iter()
.filter(ns_filter) .filter(ns_filter)
.filter(|import| { .filter(|import| {
let original_item = &import.original_item; let original_item = &import.original_item;
@ -318,8 +322,14 @@ fn import_on_the_fly_pat_(
&& !ctx.is_item_hidden(original_item) && !ctx.is_item_hidden(original_item)
&& ctx.check_stability(original_item.attrs(ctx.db).as_deref()) && ctx.check_stability(original_item.attrs(ctx.db).as_deref())
}) })
.sorted_by_key(|located_import| { .sorted_by(|a, b| {
compute_fuzzy_completion_order_key(&located_import.import_path, &user_input_lowercased) let key = |import_path| {
(
compute_fuzzy_completion_order_key(import_path, &user_input_lowercased),
import_path,
)
};
key(&a.import_path).cmp(&key(&b.import_path))
}) })
.filter_map(|import| { .filter_map(|import| {
render_resolution_with_import_pat(RenderContext::new(ctx), pattern_ctx, import) render_resolution_with_import_pat(RenderContext::new(ctx), pattern_ctx, import)
@ -352,13 +362,18 @@ fn import_on_the_fly_method(
ctx.config.prefer_no_std, ctx.config.prefer_no_std,
ctx.config.prefer_prelude, ctx.config.prefer_prelude,
) )
.into_iter()
.filter(|import| { .filter(|import| {
!ctx.is_item_hidden(&import.item_to_import) !ctx.is_item_hidden(&import.item_to_import)
&& !ctx.is_item_hidden(&import.original_item) && !ctx.is_item_hidden(&import.original_item)
}) })
.sorted_by_key(|located_import| { .sorted_by(|a, b| {
compute_fuzzy_completion_order_key(&located_import.import_path, &user_input_lowercased) let key = |import_path| {
(
compute_fuzzy_completion_order_key(import_path, &user_input_lowercased),
import_path,
)
};
key(&a.import_path).cmp(&key(&b.import_path))
}) })
.for_each(|import| match import.original_item { .for_each(|import| match import.original_item {
ItemInNs::Values(hir::ModuleDef::Function(f)) => { ItemInNs::Values(hir::ModuleDef::Function(f)) => {
@ -407,7 +422,8 @@ fn compute_fuzzy_completion_order_key(
) -> usize { ) -> usize {
cov_mark::hit!(certain_fuzzy_order_test); cov_mark::hit!(certain_fuzzy_order_test);
let import_name = match proposed_mod_path.segments().last() { let import_name = match proposed_mod_path.segments().last() {
Some(name) => name.to_smol_str().to_lowercase(), // FIXME: nasty alloc, this is a hot path!
Some(name) => name.to_smol_str().to_ascii_lowercase(),
None => return usize::MAX, None => return usize::MAX,
}; };
match import_name.match_indices(user_input_lowercased).next() { match import_name.match_indices(user_input_lowercased).next() {

View File

@ -256,7 +256,6 @@ pub fn resolve_completion_edits(
current_crate, current_crate,
NameToImport::exact_case_sensitive(imported_name), NameToImport::exact_case_sensitive(imported_name),
items_locator::AssocSearchMode::Include, items_locator::AssocSearchMode::Include,
Some(items_locator::DEFAULT_QUERY_SEARCH_LIMIT.inner()),
); );
let import = items_with_name let import = items_with_name
.filter_map(|candidate| { .filter_map(|candidate| {

View File

@ -599,6 +599,7 @@ fn main() {
expect![[r#" expect![[r#"
fn weird_function() (use dep::test_mod::TestTrait) fn() DEPRECATED fn weird_function() (use dep::test_mod::TestTrait) fn() DEPRECATED
ct SPECIAL_CONST (use dep::test_mod::TestTrait) u8 DEPRECATED ct SPECIAL_CONST (use dep::test_mod::TestTrait) u8 DEPRECATED
me random_method() (use dep::test_mod::TestTrait) fn(&self) DEPRECATED
"#]], "#]],
); );
} }

View File

@ -6,18 +6,22 @@
// FIXME: this badly needs rename/rewrite (matklad, 2020-02-06). // FIXME: this badly needs rename/rewrite (matklad, 2020-02-06).
use arrayvec::ArrayVec; use arrayvec::ArrayVec;
use either::Either;
use hir::{ use hir::{
Adt, AsAssocItem, AssocItem, BuiltinAttr, BuiltinType, Const, Crate, DeriveHelper, DocLinkDef, Adt, AsAssocItem, AssocItem, AttributeTemplate, BuiltinAttr, BuiltinType, Const, Crate,
ExternCrateDecl, Field, Function, GenericParam, HasVisibility, Impl, Label, Local, Macro, DefWithBody, DeriveHelper, DocLinkDef, ExternCrateDecl, Field, Function, GenericParam,
Module, ModuleDef, Name, PathResolution, Semantics, Static, ToolModule, Trait, TraitAlias, HasVisibility, HirDisplay, Impl, Label, Local, Macro, Module, ModuleDef, Name, PathResolution,
TypeAlias, Variant, Visibility, Semantics, Static, ToolModule, Trait, TraitAlias, TupleField, TypeAlias, Variant, VariantDef,
Visibility,
}; };
use stdx::impl_from; use stdx::{format_to, impl_from};
use syntax::{ use syntax::{
ast::{self, AstNode}, ast::{self, AstNode},
match_ast, SyntaxKind, SyntaxNode, SyntaxToken, match_ast, SyntaxKind, SyntaxNode, SyntaxToken,
}; };
use crate::documentation::{Documentation, HasDocs};
use crate::famous_defs::FamousDefs;
use crate::RootDatabase; use crate::RootDatabase;
// FIXME: a more precise name would probably be `Symbol`? // FIXME: a more precise name would probably be `Symbol`?
@ -25,6 +29,7 @@ use crate::RootDatabase;
pub enum Definition { pub enum Definition {
Macro(Macro), Macro(Macro),
Field(Field), Field(Field),
TupleField(TupleField),
Module(Module), Module(Module),
Function(Function), Function(Function),
Adt(Adt), Adt(Adt),
@ -76,13 +81,21 @@ impl Definition {
Definition::Label(it) => it.module(db), Definition::Label(it) => it.module(db),
Definition::ExternCrateDecl(it) => it.module(db), Definition::ExternCrateDecl(it) => it.module(db),
Definition::DeriveHelper(it) => it.derive().module(db), Definition::DeriveHelper(it) => it.derive().module(db),
Definition::BuiltinAttr(_) | Definition::BuiltinType(_) | Definition::ToolModule(_) => { Definition::BuiltinAttr(_)
return None | Definition::BuiltinType(_)
} | Definition::TupleField(_)
| Definition::ToolModule(_) => return None,
}; };
Some(module) Some(module)
} }
pub fn enclosing_definition(&self, db: &RootDatabase) -> Option<Definition> {
match self {
Definition::Local(it) => it.parent(db).try_into().ok(),
_ => None,
}
}
pub fn visibility(&self, db: &RootDatabase) -> Option<Visibility> { pub fn visibility(&self, db: &RootDatabase) -> Option<Visibility> {
let vis = match self { let vis = match self {
Definition::Field(sf) => sf.visibility(db), Definition::Field(sf) => sf.visibility(db),
@ -96,7 +109,7 @@ impl Definition {
Definition::TypeAlias(it) => it.visibility(db), Definition::TypeAlias(it) => it.visibility(db),
Definition::Variant(it) => it.visibility(db), Definition::Variant(it) => it.visibility(db),
Definition::ExternCrateDecl(it) => it.visibility(db), Definition::ExternCrateDecl(it) => it.visibility(db),
Definition::BuiltinType(_) => Visibility::Public, Definition::BuiltinType(_) | Definition::TupleField(_) => Visibility::Public,
Definition::Macro(_) => return None, Definition::Macro(_) => return None,
Definition::BuiltinAttr(_) Definition::BuiltinAttr(_)
| Definition::ToolModule(_) | Definition::ToolModule(_)
@ -123,6 +136,7 @@ impl Definition {
Definition::TraitAlias(it) => it.name(db), Definition::TraitAlias(it) => it.name(db),
Definition::TypeAlias(it) => it.name(db), Definition::TypeAlias(it) => it.name(db),
Definition::BuiltinType(it) => it.name(), Definition::BuiltinType(it) => it.name(),
Definition::TupleField(it) => it.name(),
Definition::SelfType(_) => return None, Definition::SelfType(_) => return None,
Definition::Local(it) => it.name(db), Definition::Local(it) => it.name(db),
Definition::GenericParam(it) => it.name(db), Definition::GenericParam(it) => it.name(db),
@ -134,6 +148,127 @@ impl Definition {
}; };
Some(name) Some(name)
} }
pub fn docs(
&self,
db: &RootDatabase,
famous_defs: Option<&FamousDefs<'_, '_>>,
) -> Option<Documentation> {
let docs = match self {
Definition::Macro(it) => it.docs(db),
Definition::Field(it) => it.docs(db),
Definition::Module(it) => it.docs(db),
Definition::Function(it) => it.docs(db),
Definition::Adt(it) => it.docs(db),
Definition::Variant(it) => it.docs(db),
Definition::Const(it) => it.docs(db),
Definition::Static(it) => it.docs(db),
Definition::Trait(it) => it.docs(db),
Definition::TraitAlias(it) => it.docs(db),
Definition::TypeAlias(it) => it.docs(db),
Definition::BuiltinType(it) => {
famous_defs.and_then(|fd| {
// std exposes prim_{} modules with docstrings on the root to document the builtins
let primitive_mod = format!("prim_{}", it.name().display(fd.0.db));
let doc_owner = find_std_module(fd, &primitive_mod)?;
doc_owner.docs(fd.0.db)
})
}
Definition::Local(_) => None,
Definition::SelfType(impl_def) => {
impl_def.self_ty(db).as_adt().map(|adt| adt.docs(db))?
}
Definition::GenericParam(_) => None,
Definition::Label(_) => None,
Definition::ExternCrateDecl(it) => it.docs(db),
Definition::BuiltinAttr(it) => {
let name = it.name(db);
let AttributeTemplate { word, list, name_value_str } = it.template(db)?;
let mut docs = "Valid forms are:".to_owned();
if word {
format_to!(docs, "\n - #\\[{}]", name);
}
if let Some(list) = list {
format_to!(docs, "\n - #\\[{}({})]", name, list);
}
if let Some(name_value_str) = name_value_str {
format_to!(docs, "\n - #\\[{} = {}]", name, name_value_str);
}
Some(Documentation::new(docs.replace('*', "\\*")))
}
Definition::ToolModule(_) => None,
Definition::DeriveHelper(_) => None,
Definition::TupleField(_) => None,
};
docs.or_else(|| {
// docs are missing, for assoc items of trait impls try to fall back to the docs of the
// original item of the trait
let assoc = self.as_assoc_item(db)?;
let trait_ = assoc.containing_trait_impl(db)?;
let name = Some(assoc.name(db)?);
let item = trait_.items(db).into_iter().find(|it| it.name(db) == name)?;
item.docs(db)
})
}
pub fn label(&self, db: &RootDatabase) -> Option<String> {
let label = match *self {
Definition::Macro(it) => it.display(db).to_string(),
Definition::Field(it) => it.display(db).to_string(),
Definition::TupleField(it) => it.display(db).to_string(),
Definition::Module(it) => it.display(db).to_string(),
Definition::Function(it) => it.display(db).to_string(),
Definition::Adt(it) => it.display(db).to_string(),
Definition::Variant(it) => it.display(db).to_string(),
Definition::Const(it) => it.display(db).to_string(),
Definition::Static(it) => it.display(db).to_string(),
Definition::Trait(it) => it.display(db).to_string(),
Definition::TraitAlias(it) => it.display(db).to_string(),
Definition::TypeAlias(it) => it.display(db).to_string(),
Definition::BuiltinType(it) => it.name().display(db).to_string(),
Definition::Local(it) => {
let ty = it.ty(db);
let ty = ty.display_truncated(db, None);
let is_mut = if it.is_mut(db) { "mut " } else { "" };
let desc = match it.primary_source(db).into_ident_pat() {
Some(ident) => {
let name = it.name(db);
let let_kw = if ident.syntax().parent().map_or(false, |p| {
p.kind() == SyntaxKind::LET_STMT || p.kind() == SyntaxKind::LET_EXPR
}) {
"let "
} else {
""
};
format!("{let_kw}{is_mut}{}: {ty}", name.display(db))
}
None => format!("{is_mut}self: {ty}"),
};
desc
}
Definition::SelfType(impl_def) => {
impl_def.self_ty(db).as_adt().and_then(|adt| Definition::Adt(adt).label(db))?
}
Definition::GenericParam(it) => it.display(db).to_string(),
Definition::Label(it) => it.name(db).display(db).to_string(),
Definition::ExternCrateDecl(it) => it.display(db).to_string(),
Definition::BuiltinAttr(it) => format!("#[{}]", it.name(db)),
Definition::ToolModule(it) => it.name(db).to_string(),
Definition::DeriveHelper(it) => format!("derive_helper {}", it.name(db).display(db)),
};
Some(label)
}
}
fn find_std_module(famous_defs: &FamousDefs<'_, '_>, name: &str) -> Option<hir::Module> {
let db = famous_defs.0.db;
let std_crate = famous_defs.std()?;
let std_root_module = std_crate.root_module();
std_root_module.children(db).find(|module| {
module.name(db).map_or(false, |module| module.display(db).to_string() == name)
})
} }
// FIXME: IdentClass as a name no longer fits // FIXME: IdentClass as a name no longer fits
@ -502,9 +637,11 @@ impl NameRefClass {
ast::FieldExpr(field_expr) => { ast::FieldExpr(field_expr) => {
sema.resolve_field_fallback(&field_expr) sema.resolve_field_fallback(&field_expr)
.map(|it| { .map(|it| {
it.map_left(Definition::Field) NameRefClass::Definition(match it {
.map_right(Definition::Function) Either::Left(Either::Left(field)) => Definition::Field(field),
.either(NameRefClass::Definition, NameRefClass::Definition) Either::Left(Either::Right(field)) => Definition::TupleField(field),
Either::Right(fun) => Definition::Function(fun),
})
}) })
}, },
ast::RecordPatField(record_pat_field) => { ast::RecordPatField(record_pat_field) => {
@ -662,3 +799,22 @@ impl From<DocLinkDef> for Definition {
} }
} }
} }
impl From<VariantDef> for Definition {
fn from(def: VariantDef) -> Self {
ModuleDef::from(def).into()
}
}
impl TryFrom<DefWithBody> for Definition {
type Error = ();
fn try_from(def: DefWithBody) -> Result<Self, Self::Error> {
match def {
DefWithBody::Function(it) => Ok(it.into()),
DefWithBody::Static(it) => Ok(it.into()),
DefWithBody::Const(it) => Ok(it.into()),
DefWithBody::Variant(it) => Ok(it.into()),
DefWithBody::InTypeConst(_) => Err(()),
}
}
}

View File

@ -207,7 +207,7 @@ impl ImportAssets {
prefix_kind: PrefixKind, prefix_kind: PrefixKind,
prefer_no_std: bool, prefer_no_std: bool,
prefer_prelude: bool, prefer_prelude: bool,
) -> Vec<LocatedImport> { ) -> impl Iterator<Item = LocatedImport> {
let _p = profile::span("import_assets::search_for_imports"); let _p = profile::span("import_assets::search_for_imports");
self.search_for(sema, Some(prefix_kind), prefer_no_std, prefer_prelude) self.search_for(sema, Some(prefix_kind), prefer_no_std, prefer_prelude)
} }
@ -218,7 +218,7 @@ impl ImportAssets {
sema: &Semantics<'_, RootDatabase>, sema: &Semantics<'_, RootDatabase>,
prefer_no_std: bool, prefer_no_std: bool,
prefer_prelude: bool, prefer_prelude: bool,
) -> Vec<LocatedImport> { ) -> impl Iterator<Item = LocatedImport> {
let _p = profile::span("import_assets::search_for_relative_paths"); let _p = profile::span("import_assets::search_for_relative_paths");
self.search_for(sema, None, prefer_no_std, prefer_prelude) self.search_for(sema, None, prefer_no_std, prefer_prelude)
} }
@ -259,9 +259,15 @@ impl ImportAssets {
prefixed: Option<PrefixKind>, prefixed: Option<PrefixKind>,
prefer_no_std: bool, prefer_no_std: bool,
prefer_prelude: bool, prefer_prelude: bool,
) -> Vec<LocatedImport> { ) -> impl Iterator<Item = LocatedImport> {
let _p = profile::span("import_assets::search_for"); let _p = profile::span("import_assets::search_for");
let scope = match sema.scope(&self.candidate_node) {
Some(it) => it,
None => return <FxHashSet<_>>::default().into_iter(),
};
let krate = self.module_with_candidate.krate();
let scope_definitions = self.scope_definitions(sema); let scope_definitions = self.scope_definitions(sema);
let mod_path = |item| { let mod_path = |item| {
get_mod_path( get_mod_path(
@ -272,30 +278,30 @@ impl ImportAssets {
prefer_no_std, prefer_no_std,
prefer_prelude, prefer_prelude,
) )
}; .filter(|path| path.len() > 1)
let krate = self.module_with_candidate.krate();
let scope = match sema.scope(&self.candidate_node) {
Some(it) => it,
None => return Vec::new(),
}; };
match &self.import_candidate { match &self.import_candidate {
ImportCandidate::Path(path_candidate) => { ImportCandidate::Path(path_candidate) => {
path_applicable_imports(sema, krate, path_candidate, mod_path) path_applicable_imports(sema, krate, path_candidate, mod_path, |item_to_import| {
} !scope_definitions.contains(&ScopeDef::from(item_to_import))
ImportCandidate::TraitAssocItem(trait_candidate) => { })
trait_applicable_items(sema, krate, &scope, trait_candidate, true, mod_path)
}
ImportCandidate::TraitMethod(trait_candidate) => {
trait_applicable_items(sema, krate, &scope, trait_candidate, false, mod_path)
} }
ImportCandidate::TraitAssocItem(trait_candidate)
| ImportCandidate::TraitMethod(trait_candidate) => trait_applicable_items(
sema,
krate,
&scope,
trait_candidate,
matches!(self.import_candidate, ImportCandidate::TraitAssocItem(_)),
mod_path,
|trait_to_import| {
!scope_definitions
.contains(&ScopeDef::ModuleDef(ModuleDef::Trait(trait_to_import)))
},
),
} }
.into_iter() .into_iter()
.filter(|import| import.import_path.len() > 1)
.filter(|import| !scope_definitions.contains(&ScopeDef::from(import.item_to_import)))
.sorted_by(|a, b| a.import_path.cmp(&b.import_path))
.collect()
} }
fn scope_definitions(&self, sema: &Semantics<'_, RootDatabase>) -> FxHashSet<ScopeDef> { fn scope_definitions(&self, sema: &Semantics<'_, RootDatabase>) -> FxHashSet<ScopeDef> {
@ -315,6 +321,7 @@ fn path_applicable_imports(
current_crate: Crate, current_crate: Crate,
path_candidate: &PathImportCandidate, path_candidate: &PathImportCandidate,
mod_path: impl Fn(ItemInNs) -> Option<ModPath> + Copy, mod_path: impl Fn(ItemInNs) -> Option<ModPath> + Copy,
scope_filter: impl Fn(ItemInNs) -> bool + Copy,
) -> FxHashSet<LocatedImport> { ) -> FxHashSet<LocatedImport> {
let _p = profile::span("import_assets::path_applicable_imports"); let _p = profile::span("import_assets::path_applicable_imports");
@ -333,12 +340,15 @@ fn path_applicable_imports(
// //
// see also an ignored test under FIXME comment in the qualify_path.rs module // see also an ignored test under FIXME comment in the qualify_path.rs module
AssocSearchMode::Exclude, AssocSearchMode::Exclude,
Some(DEFAULT_QUERY_SEARCH_LIMIT.inner()),
) )
.filter_map(|item| { .filter_map(|item| {
if !scope_filter(item) {
return None;
}
let mod_path = mod_path(item)?; let mod_path = mod_path(item)?;
Some(LocatedImport::new(mod_path, item, item)) Some(LocatedImport::new(mod_path, item, item))
}) })
.take(DEFAULT_QUERY_SEARCH_LIMIT.inner())
.collect() .collect()
} }
Some(qualifier) => items_locator::items_with_name( Some(qualifier) => items_locator::items_with_name(
@ -346,9 +356,9 @@ fn path_applicable_imports(
current_crate, current_crate,
path_candidate.name.clone(), path_candidate.name.clone(),
AssocSearchMode::Include, AssocSearchMode::Include,
Some(DEFAULT_QUERY_SEARCH_LIMIT.inner()),
) )
.filter_map(|item| import_for_item(sema.db, mod_path, &qualifier, item)) .filter_map(|item| import_for_item(sema.db, mod_path, &qualifier, item, scope_filter))
.take(DEFAULT_QUERY_SEARCH_LIMIT.inner())
.collect(), .collect(),
} }
} }
@ -358,6 +368,7 @@ fn import_for_item(
mod_path: impl Fn(ItemInNs) -> Option<ModPath>, mod_path: impl Fn(ItemInNs) -> Option<ModPath>,
unresolved_qualifier: &[SmolStr], unresolved_qualifier: &[SmolStr],
original_item: ItemInNs, original_item: ItemInNs,
scope_filter: impl Fn(ItemInNs) -> bool,
) -> Option<LocatedImport> { ) -> Option<LocatedImport> {
let _p = profile::span("import_assets::import_for_item"); let _p = profile::span("import_assets::import_for_item");
let [first_segment, ..] = unresolved_qualifier else { return None }; let [first_segment, ..] = unresolved_qualifier else { return None };
@ -413,15 +424,16 @@ fn import_for_item(
// especially in case of lazy completion edit resolutions. // especially in case of lazy completion edit resolutions.
return None; return None;
} }
(false, Some(trait_to_import)) => { (false, Some(trait_to_import)) if scope_filter(trait_to_import) => {
LocatedImport::new(mod_path(trait_to_import)?, trait_to_import, original_item) LocatedImport::new(mod_path(trait_to_import)?, trait_to_import, original_item)
} }
(true, None) => { (true, None) if scope_filter(original_item_candidate) => {
LocatedImport::new(import_path_candidate, original_item_candidate, original_item) LocatedImport::new(import_path_candidate, original_item_candidate, original_item)
} }
(false, None) => { (false, None) if scope_filter(segment_import) => {
LocatedImport::new(mod_path(segment_import)?, segment_import, original_item) LocatedImport::new(mod_path(segment_import)?, segment_import, original_item)
} }
_ => return None,
}) })
} }
@ -490,6 +502,7 @@ fn trait_applicable_items(
trait_candidate: &TraitImportCandidate, trait_candidate: &TraitImportCandidate,
trait_assoc_item: bool, trait_assoc_item: bool,
mod_path: impl Fn(ItemInNs) -> Option<ModPath>, mod_path: impl Fn(ItemInNs) -> Option<ModPath>,
scope_filter: impl Fn(hir::Trait) -> bool,
) -> FxHashSet<LocatedImport> { ) -> FxHashSet<LocatedImport> {
let _p = profile::span("import_assets::trait_applicable_items"); let _p = profile::span("import_assets::trait_applicable_items");
@ -500,22 +513,24 @@ fn trait_applicable_items(
let related_traits = inherent_traits.chain(env_traits).collect::<FxHashSet<_>>(); let related_traits = inherent_traits.chain(env_traits).collect::<FxHashSet<_>>();
let mut required_assoc_items = FxHashSet::default(); let mut required_assoc_items = FxHashSet::default();
let trait_candidates = items_locator::items_with_name( let trait_candidates: FxHashSet<_> = items_locator::items_with_name(
sema, sema,
current_crate, current_crate,
trait_candidate.assoc_item_name.clone(), trait_candidate.assoc_item_name.clone(),
AssocSearchMode::AssocItemsOnly, AssocSearchMode::AssocItemsOnly,
Some(DEFAULT_QUERY_SEARCH_LIMIT.inner()),
) )
.filter_map(|input| item_as_assoc(db, input)) .filter_map(|input| item_as_assoc(db, input))
.filter_map(|assoc| { .filter_map(|assoc| {
if !trait_assoc_item && matches!(assoc, AssocItem::Const(_) | AssocItem::TypeAlias(_)) {
return None;
}
let assoc_item_trait = assoc.containing_trait(db)?; let assoc_item_trait = assoc.containing_trait(db)?;
if related_traits.contains(&assoc_item_trait) { if related_traits.contains(&assoc_item_trait) {
None return None;
} else {
required_assoc_items.insert(assoc);
Some(assoc_item_trait.into())
} }
required_assoc_items.insert(assoc);
Some(assoc_item_trait.into())
}) })
.collect(); .collect();
@ -531,12 +546,8 @@ fn trait_applicable_items(
None, None,
|assoc| { |assoc| {
if required_assoc_items.contains(&assoc) { if required_assoc_items.contains(&assoc) {
if let AssocItem::Function(f) = assoc { let located_trait =
if f.self_param(db).is_some() { assoc.containing_trait(db).filter(|&it| scope_filter(it))?;
return None;
}
}
let located_trait = assoc.containing_trait(db)?;
let trait_item = ItemInNs::from(ModuleDef::from(located_trait)); let trait_item = ItemInNs::from(ModuleDef::from(located_trait));
let import_path = trait_import_paths let import_path = trait_import_paths
.entry(trait_item) .entry(trait_item)
@ -561,7 +572,8 @@ fn trait_applicable_items(
|function| { |function| {
let assoc = function.as_assoc_item(db)?; let assoc = function.as_assoc_item(db)?;
if required_assoc_items.contains(&assoc) { if required_assoc_items.contains(&assoc) {
let located_trait = assoc.containing_trait(db)?; let located_trait =
assoc.containing_trait(db).filter(|&it| scope_filter(it))?;
let trait_item = ItemInNs::from(ModuleDef::from(located_trait)); let trait_item = ItemInNs::from(ModuleDef::from(located_trait));
let import_path = trait_import_paths let import_path = trait_import_paths
.entry(trait_item) .entry(trait_item)

View File

@ -19,26 +19,24 @@ pub fn items_with_name<'a>(
krate: Crate, krate: Crate,
name: NameToImport, name: NameToImport,
assoc_item_search: AssocSearchMode, assoc_item_search: AssocSearchMode,
limit: Option<usize>,
) -> impl Iterator<Item = ItemInNs> + 'a { ) -> impl Iterator<Item = ItemInNs> + 'a {
let _p = profile::span("items_with_name").detail(|| { let _p = profile::span("items_with_name").detail(|| {
format!( format!(
"Name: {}, crate: {:?}, assoc items: {:?}, limit: {:?}", "Name: {}, crate: {:?}, assoc items: {:?}",
name.text(), name.text(),
assoc_item_search, assoc_item_search,
krate.display_name(sema.db).map(|name| name.to_string()), krate.display_name(sema.db).map(|name| name.to_string()),
limit,
) )
}); });
let prefix = matches!(name, NameToImport::Prefix(..)); let prefix = matches!(name, NameToImport::Prefix(..));
let (mut local_query, mut external_query) = match name { let (local_query, external_query) = match name {
NameToImport::Prefix(exact_name, case_sensitive) NameToImport::Prefix(exact_name, case_sensitive)
| NameToImport::Exact(exact_name, case_sensitive) => { | NameToImport::Exact(exact_name, case_sensitive) => {
let mut local_query = symbol_index::Query::new(exact_name.clone()); let mut local_query = symbol_index::Query::new(exact_name.clone());
local_query.assoc_search_mode(assoc_item_search);
let mut external_query = let mut external_query =
// import_map::Query::new(exact_name).assoc_search_mode(assoc_item_search); import_map::Query::new(exact_name).assoc_search_mode(assoc_item_search);
import_map::Query::new(exact_name);
if prefix { if prefix {
local_query.prefix(); local_query.prefix();
external_query = external_query.prefix(); external_query = external_query.prefix();
@ -55,8 +53,9 @@ pub fn items_with_name<'a>(
NameToImport::Fuzzy(fuzzy_search_string, case_sensitive) => { NameToImport::Fuzzy(fuzzy_search_string, case_sensitive) => {
let mut local_query = symbol_index::Query::new(fuzzy_search_string.clone()); let mut local_query = symbol_index::Query::new(fuzzy_search_string.clone());
local_query.fuzzy(); local_query.fuzzy();
local_query.assoc_search_mode(assoc_item_search);
let mut external_query = import_map::Query::new(fuzzy_search_string.clone()) let mut external_query = import_map::Query::new(fuzzy_search_string)
.fuzzy() .fuzzy()
.assoc_search_mode(assoc_item_search); .assoc_search_mode(assoc_item_search);
@ -69,18 +68,12 @@ pub fn items_with_name<'a>(
} }
}; };
if let Some(limit) = limit { find_items(sema, krate, local_query, external_query)
external_query = external_query.limit(limit);
local_query.limit(limit);
}
find_items(sema, krate, assoc_item_search, local_query, external_query)
} }
fn find_items<'a>( fn find_items<'a>(
sema: &'a Semantics<'_, RootDatabase>, sema: &'a Semantics<'_, RootDatabase>,
krate: Crate, krate: Crate,
assoc_item_search: AssocSearchMode,
local_query: symbol_index::Query, local_query: symbol_index::Query,
external_query: import_map::Query, external_query: import_map::Query,
) -> impl Iterator<Item = ItemInNs> + 'a { ) -> impl Iterator<Item = ItemInNs> + 'a {
@ -98,18 +91,12 @@ fn find_items<'a>(
}); });
// Query the local crate using the symbol index. // Query the local crate using the symbol index.
let local_results = local_query let mut local_results = Vec::new();
.search(&symbol_index::crate_symbols(db, krate)) local_query.search(&symbol_index::crate_symbols(db, krate), |local_candidate| {
.into_iter() local_results.push(match local_candidate.def {
.filter(move |candidate| match assoc_item_search {
AssocSearchMode::Include => true,
AssocSearchMode::Exclude => !candidate.is_assoc,
AssocSearchMode::AssocItemsOnly => candidate.is_assoc,
})
.map(|local_candidate| match local_candidate.def {
hir::ModuleDef::Macro(macro_def) => ItemInNs::Macros(macro_def), hir::ModuleDef::Macro(macro_def) => ItemInNs::Macros(macro_def),
def => ItemInNs::from(def), def => ItemInNs::from(def),
}); })
});
external_importables.chain(local_results) local_results.into_iter().chain(external_importables)
} }

View File

@ -159,7 +159,7 @@ impl<'a> PathTransform<'a> {
.for_each(|(k, v)| match (k.split(db), v) { .for_each(|(k, v)| match (k.split(db), v) {
(Either::Right(k), Some(TypeOrConst::Either(v))) => { (Either::Right(k), Some(TypeOrConst::Either(v))) => {
if let Some(ty) = v.ty() { if let Some(ty) = v.ty() {
type_substs.insert(k, ty.clone()); type_substs.insert(k, ty);
} }
} }
(Either::Right(k), None) => { (Either::Right(k), None) => {

View File

@ -198,6 +198,7 @@ impl Definition {
Definition::SelfType(_) => return None, Definition::SelfType(_) => return None,
Definition::BuiltinAttr(_) => return None, Definition::BuiltinAttr(_) => return None,
Definition::ToolModule(_) => return None, Definition::ToolModule(_) => return None,
Definition::TupleField(_) => return None,
// FIXME: This should be doable in theory // FIXME: This should be doable in theory
Definition::DeriveHelper(_) => return None, Definition::DeriveHelper(_) => return None,
}; };

View File

@ -539,7 +539,7 @@ impl<'a> FindUsages<'a> {
tree.token_at_offset(offset).into_iter().for_each(|token| { tree.token_at_offset(offset).into_iter().for_each(|token| {
let Some(str_token) = ast::String::cast(token.clone()) else { return }; let Some(str_token) = ast::String::cast(token.clone()) else { return };
if let Some((range, nameres)) = if let Some((range, nameres)) =
sema.check_for_format_args_template(token.clone(), offset) sema.check_for_format_args_template(token, offset)
{ {
if self.found_format_args_ref(file_id, range, str_token, nameres, sink) { if self.found_format_args_ref(file_id, range, str_token, nameres, sink) {
return; return;

View File

@ -341,13 +341,13 @@ impl SourceChangeBuilder {
/// Adds a tabstop snippet to place the cursor before `token` /// Adds a tabstop snippet to place the cursor before `token`
pub fn add_tabstop_before_token(&mut self, _cap: SnippetCap, token: SyntaxToken) { pub fn add_tabstop_before_token(&mut self, _cap: SnippetCap, token: SyntaxToken) {
assert!(token.parent().is_some()); assert!(token.parent().is_some());
self.add_snippet(PlaceSnippet::Before(token.clone().into())); self.add_snippet(PlaceSnippet::Before(token.into()));
} }
/// Adds a tabstop snippet to place the cursor after `token` /// Adds a tabstop snippet to place the cursor after `token`
pub fn add_tabstop_after_token(&mut self, _cap: SnippetCap, token: SyntaxToken) { pub fn add_tabstop_after_token(&mut self, _cap: SnippetCap, token: SyntaxToken) {
assert!(token.parent().is_some()); assert!(token.parent().is_some());
self.add_snippet(PlaceSnippet::After(token.clone().into())); self.add_snippet(PlaceSnippet::After(token.into()));
} }
/// Adds a snippet to move the cursor selected over `node` /// Adds a snippet to move the cursor selected over `node`

View File

@ -31,9 +31,10 @@ use base_db::{
salsa::{self, ParallelDatabase}, salsa::{self, ParallelDatabase},
SourceDatabaseExt, SourceRootId, Upcast, SourceDatabaseExt, SourceRootId, Upcast,
}; };
use fst::{self, Streamer}; use fst::{self, raw::IndexedValue, Automaton, Streamer};
use hir::{ use hir::{
db::HirDatabase, db::HirDatabase,
import_map::{AssocSearchMode, SearchMode},
symbols::{FileSymbol, SymbolCollector}, symbols::{FileSymbol, SymbolCollector},
Crate, Module, Crate, Module,
}; };
@ -43,22 +44,15 @@ use triomphe::Arc;
use crate::RootDatabase; use crate::RootDatabase;
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
enum SearchMode {
Fuzzy,
Exact,
Prefix,
}
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct Query { pub struct Query {
query: String, query: String,
lowercased: String, lowercased: String,
mode: SearchMode,
assoc_mode: AssocSearchMode,
case_sensitive: bool,
only_types: bool, only_types: bool,
libs: bool, libs: bool,
mode: SearchMode,
case_sensitive: bool,
limit: usize,
} }
impl Query { impl Query {
@ -70,8 +64,8 @@ impl Query {
only_types: false, only_types: false,
libs: false, libs: false,
mode: SearchMode::Fuzzy, mode: SearchMode::Fuzzy,
assoc_mode: AssocSearchMode::Include,
case_sensitive: false, case_sensitive: false,
limit: usize::max_value(),
} }
} }
@ -95,12 +89,13 @@ impl Query {
self.mode = SearchMode::Prefix; self.mode = SearchMode::Prefix;
} }
pub fn case_sensitive(&mut self) { /// Specifies whether we want to include associated items in the result.
self.case_sensitive = true; pub fn assoc_search_mode(&mut self, assoc_mode: AssocSearchMode) {
self.assoc_mode = assoc_mode;
} }
pub fn limit(&mut self, limit: usize) { pub fn case_sensitive(&mut self) {
self.limit = limit self.case_sensitive = true;
} }
} }
@ -225,7 +220,9 @@ pub fn world_symbols(db: &RootDatabase, query: Query) -> Vec<FileSymbol> {
indices.iter().flat_map(|indices| indices.iter().cloned()).collect() indices.iter().flat_map(|indices| indices.iter().cloned()).collect()
}; };
query.search(&indices) let mut res = vec![];
query.search(&indices, |f| res.push(f.clone()));
res
} }
#[derive(Default)] #[derive(Default)]
@ -285,6 +282,7 @@ impl SymbolIndex {
builder.insert(key, value).unwrap(); builder.insert(key, value).unwrap();
} }
// FIXME: fst::Map should ideally have a way to shrink the backing buffer without the unwrap dance
let map = fst::Map::new({ let map = fst::Map::new({
let mut buf = builder.into_inner().unwrap(); let mut buf = builder.into_inner().unwrap();
buf.shrink_to_fit(); buf.shrink_to_fit();
@ -317,22 +315,54 @@ impl SymbolIndex {
} }
impl Query { impl Query {
pub(crate) fn search(self, indices: &[Arc<SymbolIndex>]) -> Vec<FileSymbol> { pub(crate) fn search<'sym>(
self,
indices: &'sym [Arc<SymbolIndex>],
cb: impl FnMut(&'sym FileSymbol),
) {
let _p = profile::span("symbol_index::Query::search"); let _p = profile::span("symbol_index::Query::search");
let mut op = fst::map::OpBuilder::new(); let mut op = fst::map::OpBuilder::new();
for file_symbols in indices.iter() { match self.mode {
let automaton = fst::automaton::Subsequence::new(&self.lowercased); SearchMode::Exact => {
op = op.add(file_symbols.map.search(automaton)) let automaton = fst::automaton::Str::new(&self.lowercased);
for index in indices.iter() {
op = op.add(index.map.search(&automaton));
}
self.search_maps(&indices, op.union(), cb)
}
SearchMode::Fuzzy => {
let automaton = fst::automaton::Subsequence::new(&self.lowercased);
for index in indices.iter() {
op = op.add(index.map.search(&automaton));
}
self.search_maps(&indices, op.union(), cb)
}
SearchMode::Prefix => {
let automaton = fst::automaton::Str::new(&self.lowercased).starts_with();
for index in indices.iter() {
op = op.add(index.map.search(&automaton));
}
self.search_maps(&indices, op.union(), cb)
}
} }
let mut stream = op.union(); }
let mut res = Vec::new();
fn search_maps<'sym>(
&self,
indices: &'sym [Arc<SymbolIndex>],
mut stream: fst::map::Union<'_>,
mut cb: impl FnMut(&'sym FileSymbol),
) {
while let Some((_, indexed_values)) = stream.next() { while let Some((_, indexed_values)) = stream.next() {
for indexed_value in indexed_values { for &IndexedValue { index, value } in indexed_values {
let symbol_index = &indices[indexed_value.index]; let symbol_index = &indices[index];
let (start, end) = SymbolIndex::map_value_to_range(indexed_value.value); let (start, end) = SymbolIndex::map_value_to_range(value);
for symbol in &symbol_index.symbols[start..end] { for symbol in &symbol_index.symbols[start..end] {
if self.only_types let non_type_for_type_only_query = self.only_types
&& !matches!( && !matches!(
symbol.def, symbol.def,
hir::ModuleDef::Adt(..) hir::ModuleDef::Adt(..)
@ -340,38 +370,23 @@ impl Query {
| hir::ModuleDef::BuiltinType(..) | hir::ModuleDef::BuiltinType(..)
| hir::ModuleDef::TraitAlias(..) | hir::ModuleDef::TraitAlias(..)
| hir::ModuleDef::Trait(..) | hir::ModuleDef::Trait(..)
) );
{ if non_type_for_type_only_query || !self.matches_assoc_mode(symbol.is_assoc) {
continue; continue;
} }
let skip = match self.mode { if self.mode.check(&self.query, self.case_sensitive, &symbol.name) {
SearchMode::Fuzzy => { cb(symbol);
self.case_sensitive
&& self.query.chars().any(|c| !symbol.name.contains(c))
}
SearchMode::Exact => symbol.name != self.query,
SearchMode::Prefix if self.case_sensitive => {
!symbol.name.starts_with(&self.query)
}
SearchMode::Prefix => symbol
.name
.chars()
.zip(self.lowercased.chars())
.all(|(n, q)| n.to_lowercase().next() == Some(q)),
};
if skip {
continue;
}
res.push(symbol.clone());
if res.len() >= self.limit {
return res;
} }
} }
} }
} }
res }
fn matches_assoc_mode(&self, is_trait_assoc_item: bool) -> bool {
match (is_trait_assoc_item, self.assoc_mode) {
(true, AssocSearchMode::Exclude) | (false, AssocSearchMode::AssocItemsOnly) => false,
_ => true,
}
} }
} }

View File

@ -1,5 +1,10 @@
use hir::{Const, Function, HasSource, TypeAlias}; use hir::{db::ExpandDatabase, Const, Function, HasSource, HirDisplay, TypeAlias};
use ide_db::base_db::FileRange; use ide_db::{
assists::{Assist, AssistId, AssistKind},
label::Label,
source_change::SourceChangeBuilder,
};
use text_edit::TextRange;
use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext}; use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext};
@ -10,47 +15,195 @@ pub(crate) fn trait_impl_redundant_assoc_item(
ctx: &DiagnosticsContext<'_>, ctx: &DiagnosticsContext<'_>,
d: &hir::TraitImplRedundantAssocItems, d: &hir::TraitImplRedundantAssocItems,
) -> Diagnostic { ) -> Diagnostic {
let name = d.assoc_item.0.clone();
let assoc_item = d.assoc_item.1;
let db = ctx.sema.db; let db = ctx.sema.db;
let name = d.assoc_item.0.clone();
let redundant_assoc_item_name = name.display(db);
let assoc_item = d.assoc_item.1;
let default_range = d.impl_.syntax_node_ptr().text_range(); let default_range = d.impl_.syntax_node_ptr().text_range();
let trait_name = d.trait_.name(db).to_smol_str(); let trait_name = d.trait_.name(db).to_smol_str();
let (redundant_item_name, diagnostic_range) = match assoc_item { let (redundant_item_name, diagnostic_range, redundant_item_def) = match assoc_item {
hir::AssocItem::Function(id) => ( hir::AssocItem::Function(id) => {
format!("`fn {}`", name.display(db)), let function = Function::from(id);
Function::from(id) (
.source(db) format!("`fn {}`", redundant_assoc_item_name),
.map(|it| it.syntax().value.text_range()) function
.unwrap_or(default_range), .source(db)
), .map(|it| it.syntax().value.text_range())
hir::AssocItem::Const(id) => ( .unwrap_or(default_range),
format!("`const {}`", name.display(db)), format!("\n {};", function.display(db)),
Const::from(id) )
.source(db) }
.map(|it| it.syntax().value.text_range()) hir::AssocItem::Const(id) => {
.unwrap_or(default_range), let constant = Const::from(id);
), (
hir::AssocItem::TypeAlias(id) => ( format!("`const {}`", redundant_assoc_item_name),
format!("`type {}`", name.display(db)), constant
TypeAlias::from(id) .source(db)
.source(db) .map(|it| it.syntax().value.text_range())
.map(|it| it.syntax().value.text_range()) .unwrap_or(default_range),
.unwrap_or(default_range), format!("\n {};", constant.display(db)),
), )
}
hir::AssocItem::TypeAlias(id) => {
let type_alias = TypeAlias::from(id);
(
format!("`type {}`", redundant_assoc_item_name),
type_alias
.source(db)
.map(|it| it.syntax().value.text_range())
.unwrap_or(default_range),
format!("\n type {};", type_alias.name(ctx.sema.db).to_smol_str()),
)
}
}; };
Diagnostic::new( Diagnostic::new(
DiagnosticCode::RustcHardError("E0407"), DiagnosticCode::RustcHardError("E0407"),
format!("{redundant_item_name} is not a member of trait `{trait_name}`"), format!("{redundant_item_name} is not a member of trait `{trait_name}`"),
FileRange { file_id: d.file_id.file_id().unwrap(), range: diagnostic_range }, hir::InFile::new(d.file_id, diagnostic_range).original_node_file_range_rooted(db),
) )
.with_fixes(quickfix_for_redundant_assoc_item(
ctx,
d,
redundant_item_def,
diagnostic_range,
))
}
/// add assoc item into the trait def body
fn quickfix_for_redundant_assoc_item(
ctx: &DiagnosticsContext<'_>,
d: &hir::TraitImplRedundantAssocItems,
redundant_item_def: String,
range: TextRange,
) -> Option<Vec<Assist>> {
let add_assoc_item_def = |builder: &mut SourceChangeBuilder| -> Option<()> {
let db = ctx.sema.db;
let root = db.parse_or_expand(d.file_id);
// don't modify trait def in outer crate
let current_crate = ctx.sema.scope(&d.impl_.syntax_node_ptr().to_node(&root))?.krate();
let trait_def_crate = d.trait_.module(db).krate();
if trait_def_crate != current_crate {
return None;
}
let trait_def = d.trait_.source(db)?.value;
let l_curly = trait_def.assoc_item_list()?.l_curly_token()?.text_range();
let where_to_insert =
hir::InFile::new(d.file_id, l_curly).original_node_file_range_rooted(db).range;
Some(builder.insert(where_to_insert.end(), redundant_item_def))
};
let file_id = d.file_id.file_id()?;
let mut source_change_builder = SourceChangeBuilder::new(file_id);
add_assoc_item_def(&mut source_change_builder)?;
Some(vec![Assist {
id: AssistId("add assoc item def into trait def", AssistKind::QuickFix),
label: Label::new("Add assoc item def into trait def".to_string()),
group: None,
target: range,
source_change: Some(source_change_builder.finish()),
trigger_signature_help: false,
}])
} }
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use crate::tests::check_diagnostics; use crate::tests::{check_diagnostics, check_fix, check_no_fix};
#[test]
fn quickfix_for_assoc_func() {
check_fix(
r#"
trait Marker {
fn boo();
}
struct Foo;
impl Marker for Foo {
fn$0 bar(_a: i32, _b: String) -> String {}
fn boo() {}
}
"#,
r#"
trait Marker {
fn bar(_a: i32, _b: String) -> String;
fn boo();
}
struct Foo;
impl Marker for Foo {
fn bar(_a: i32, _b: String) -> String {}
fn boo() {}
}
"#,
)
}
#[test]
fn quickfix_for_assoc_const() {
check_fix(
r#"
trait Marker {
fn foo () {}
}
struct Foo;
impl Marker for Foo {
const FLAG: bool$0 = false;
}
"#,
r#"
trait Marker {
const FLAG: bool;
fn foo () {}
}
struct Foo;
impl Marker for Foo {
const FLAG: bool = false;
}
"#,
)
}
#[test]
fn quickfix_for_assoc_type() {
check_fix(
r#"
trait Marker {
}
struct Foo;
impl Marker for Foo {
type T = i32;$0
}
"#,
r#"
trait Marker {
type T;
}
struct Foo;
impl Marker for Foo {
type T = i32;
}
"#,
)
}
#[test]
fn quickfix_dont_work() {
check_no_fix(
r#"
//- /dep.rs crate:dep
trait Marker {
}
//- /main.rs crate:main deps:dep
struct Foo;
impl dep::Marker for Foo {
type T = i32;$0
}
"#,
)
}
#[test] #[test]
fn trait_with_default_value() { fn trait_with_default_value() {
@ -64,12 +217,12 @@ trait Marker {
struct Foo; struct Foo;
impl Marker for Foo { impl Marker for Foo {
type T = i32; type T = i32;
//^^^^^^^^^^^^^ error: `type T` is not a member of trait `Marker` //^^^^^^^^^^^^^ 💡 error: `type T` is not a member of trait `Marker`
const FLAG: bool = true; const FLAG: bool = true;
fn bar() {} fn bar() {}
//^^^^^^^^^^^ error: `fn bar` is not a member of trait `Marker` //^^^^^^^^^^^ 💡 error: `fn bar` is not a member of trait `Marker`
fn boo() {} fn boo() {}
} }

View File

@ -160,7 +160,7 @@ fn assoc_func_fix(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedMethodCall) -
// if receiver should be pass as first arg in the assoc func, // if receiver should be pass as first arg in the assoc func,
// we could omit generic parameters cause compiler can deduce it automatically // we could omit generic parameters cause compiler can deduce it automatically
if !need_to_take_receiver_as_first_arg && !generic_parameters.is_empty() { if !need_to_take_receiver_as_first_arg && !generic_parameters.is_empty() {
let generic_parameters = generic_parameters.join(", ").to_string(); let generic_parameters = generic_parameters.join(", ");
receiver_type_adt_name = receiver_type_adt_name =
format!("{}::<{}>", receiver_type_adt_name, generic_parameters); format!("{}::<{}>", receiver_type_adt_name, generic_parameters);
} }

View File

@ -58,9 +58,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedModule) -> Option<Vec<
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use expect_test::expect; use crate::tests::check_diagnostics;
use crate::tests::{check_diagnostics, check_expect};
#[test] #[test]
fn unresolved_module() { fn unresolved_module() {
@ -78,99 +76,11 @@ mod baz {}
#[test] #[test]
fn test_unresolved_module_diagnostic() { fn test_unresolved_module_diagnostic() {
check_expect( check_diagnostics(
r#"mod foo;"#, r#"
expect![[r#" mod foo;
[ //^^^^^^^^ 💡 error: unresolved module, can't find module file: foo.rs, or foo/mod.rs
Diagnostic { "#,
code: RustcHardError(
"E0583",
),
message: "unresolved module, can't find module file: foo.rs, or foo/mod.rs",
range: FileRange {
file_id: FileId(
0,
),
range: 0..8,
},
severity: Error,
unused: false,
experimental: false,
fixes: Some(
[
Assist {
id: AssistId(
"create_module",
QuickFix,
),
label: "Create module at `foo.rs`",
group: None,
target: 0..8,
source_change: Some(
SourceChange {
source_file_edits: {},
file_system_edits: [
CreateFile {
dst: AnchoredPathBuf {
anchor: FileId(
0,
),
path: "foo.rs",
},
initial_contents: "",
},
],
is_snippet: false,
},
),
trigger_signature_help: false,
},
Assist {
id: AssistId(
"create_module",
QuickFix,
),
label: "Create module at `foo/mod.rs`",
group: None,
target: 0..8,
source_change: Some(
SourceChange {
source_file_edits: {},
file_system_edits: [
CreateFile {
dst: AnchoredPathBuf {
anchor: FileId(
0,
),
path: "foo/mod.rs",
},
initial_contents: "",
},
],
is_snippet: false,
},
),
trigger_signature_help: false,
},
],
),
main_node: Some(
InFileWrapper {
file_id: FileId(
0,
),
value: MODULE@0..8
MOD_KW@0..3 "mod"
WHITESPACE@3..4 " "
NAME@4..7
IDENT@4..7 "foo"
SEMICOLON@7..8 ";"
,
},
),
},
]
"#]],
); );
} }
} }

View File

@ -1,7 +1,6 @@
#[cfg(not(feature = "in-rust-tree"))] #[cfg(not(feature = "in-rust-tree"))]
mod sourcegen; mod sourcegen;
use expect_test::Expect;
use ide_db::{ use ide_db::{
assists::AssistResolveStrategy, base_db::SourceDatabaseExt, LineIndexDatabase, RootDatabase, assists::AssistResolveStrategy, base_db::SourceDatabaseExt, LineIndexDatabase, RootDatabase,
}; };
@ -43,8 +42,9 @@ fn check_nth_fix(nth: usize, ra_fixture_before: &str, ra_fixture_after: &str) {
super::diagnostics(&db, &conf, &AssistResolveStrategy::All, file_position.file_id) super::diagnostics(&db, &conf, &AssistResolveStrategy::All, file_position.file_id)
.pop() .pop()
.expect("no diagnostics"); .expect("no diagnostics");
let fix = let fix = &diagnostic
&diagnostic.fixes.expect(&format!("{:?} diagnostic misses fixes", diagnostic.code))[nth]; .fixes
.unwrap_or_else(|| panic!("{:?} diagnostic misses fixes", diagnostic.code))[nth];
let actual = { let actual = {
let source_change = fix.source_change.as_ref().unwrap(); let source_change = fix.source_change.as_ref().unwrap();
let file_id = *source_change.source_file_edits.keys().next().unwrap(); let file_id = *source_change.source_file_edits.keys().next().unwrap();
@ -82,17 +82,6 @@ pub(crate) fn check_no_fix(ra_fixture: &str) {
assert!(diagnostic.fixes.is_none(), "got a fix when none was expected: {diagnostic:?}"); assert!(diagnostic.fixes.is_none(), "got a fix when none was expected: {diagnostic:?}");
} }
pub(crate) fn check_expect(ra_fixture: &str, expect: Expect) {
let (db, file_id) = RootDatabase::with_single_file(ra_fixture);
let diagnostics = super::diagnostics(
&db,
&DiagnosticsConfig::test_sample(),
&AssistResolveStrategy::All,
file_id,
);
expect.assert_debug_eq(&diagnostics)
}
#[track_caller] #[track_caller]
pub(crate) fn check_diagnostics(ra_fixture: &str) { pub(crate) fn check_diagnostics(ra_fixture: &str) {
let mut config = DiagnosticsConfig::test_sample(); let mut config = DiagnosticsConfig::test_sample();

View File

@ -219,6 +219,7 @@ pub(crate) fn resolve_doc_path_for_def(
Definition::BuiltinAttr(_) Definition::BuiltinAttr(_)
| Definition::ToolModule(_) | Definition::ToolModule(_)
| Definition::BuiltinType(_) | Definition::BuiltinType(_)
| Definition::TupleField(_)
| Definition::Local(_) | Definition::Local(_)
| Definition::GenericParam(_) | Definition::GenericParam(_)
| Definition::Label(_) | Definition::Label(_)
@ -639,6 +640,7 @@ fn filename_and_frag_for_def(
} }
Definition::Local(_) Definition::Local(_)
| Definition::GenericParam(_) | Definition::GenericParam(_)
| Definition::TupleField(_)
| Definition::Label(_) | Definition::Label(_)
| Definition::BuiltinAttr(_) | Definition::BuiltinAttr(_)
| Definition::ToolModule(_) | Definition::ToolModule(_)

View File

@ -462,14 +462,15 @@ mod module {}
fn doc_links_inherent_impl_items() { fn doc_links_inherent_impl_items() {
check_doc_links( check_doc_links(
r#" r#"
// /// [`Struct::CONST`] /// [`Struct::CONST`]
// /// [`Struct::function`] /// [`Struct::function`]
/// FIXME #9694
struct Struct$0; struct Struct$0;
impl Struct { impl Struct {
const CONST: () = (); const CONST: () = ();
// ^^^^^ Struct::CONST
fn function() {} fn function() {}
// ^^^^^^^^ Struct::function
} }
"#, "#,
) )
@ -482,12 +483,13 @@ fn doc_links_trait_impl_items() {
trait Trait { trait Trait {
type Type; type Type;
const CONST: usize; const CONST: usize;
// ^^^^^ Struct::CONST
fn function(); fn function();
// ^^^^^^^^ Struct::function
} }
// /// [`Struct::Type`] // FIXME #9694: [`Struct::Type`]
// /// [`Struct::CONST`] /// [`Struct::CONST`]
// /// [`Struct::function`] /// [`Struct::function`]
/// FIXME #9694
struct Struct$0; struct Struct$0;
impl Trait for Struct { impl Trait for Struct {

View File

@ -79,7 +79,7 @@ pub(crate) fn goto_definition(
return Some(vec![x]); return Some(vec![x]);
} }
if let Some(x) = try_lookup_macro_def_in_macro_use(sema, token.clone()) { if let Some(x) = try_lookup_macro_def_in_macro_use(sema, token) {
return Some(vec![x]); return Some(vec![x]);
} }
} }

View File

@ -1,15 +1,12 @@
//! Logic for rendering the different hover messages //! Logic for rendering the different hover messages
use std::fmt::Display;
use either::Either; use either::Either;
use hir::{ use hir::{
Adt, AsAssocItem, AttributeTemplate, CaptureKind, HasSource, HirDisplay, Layout, LayoutError, Adt, AsAssocItem, CaptureKind, HasSource, HirDisplay, Layout, LayoutError, Semantics, TypeInfo,
Semantics, TypeInfo,
}; };
use ide_db::{ use ide_db::{
base_db::SourceDatabase, base_db::SourceDatabase,
defs::Definition, defs::Definition,
documentation::{Documentation, HasDocs}, documentation::HasDocs,
famous_defs::FamousDefs, famous_defs::FamousDefs,
generated::lints::{CLIPPY_LINTS, DEFAULT_LINTS, FEATURES}, generated::lints::{CLIPPY_LINTS, DEFAULT_LINTS, FEATURES},
syntax_helpers::insert_whitespace_into_node, syntax_helpers::insert_whitespace_into_node,
@ -20,9 +17,7 @@ use stdx::format_to;
use syntax::{ use syntax::{
algo, algo,
ast::{self, RecordPat}, ast::{self, RecordPat},
match_ast, AstNode, Direction, match_ast, AstNode, Direction, SyntaxToken, T,
SyntaxKind::{LET_EXPR, LET_STMT},
SyntaxToken, T,
}; };
use crate::{ use crate::{
@ -393,48 +388,23 @@ pub(super) fn definition(
config: &HoverConfig, config: &HoverConfig,
) -> Option<Markup> { ) -> Option<Markup> {
let mod_path = definition_mod_path(db, &def); let mod_path = definition_mod_path(db, &def);
let (label, docs) = match def { let label = def.label(db)?;
Definition::Macro(it) => label_and_docs(db, it), let docs = def.docs(db, famous_defs);
Definition::Field(it) => label_and_layout_info_and_docs(
db, let value = match def {
it, Definition::Variant(it) => {
config, if !it.parent_enum(db).is_data_carrying(db) {
|&it| it.layout(db), match it.eval(db) {
|_| { Ok(it) => {
let var_def = it.parent_def(db); Some(if it >= 10 { format!("{it} ({it:#X})") } else { format!("{it}") })
match var_def {
hir::VariantDef::Struct(s) => {
Adt::from(s).layout(db).ok().and_then(|layout| layout.field_offset(it))
} }
_ => None, Err(_) => it.value(db).map(|it| format!("{it:?}")),
} }
}, } else {
), None
Definition::Module(it) => label_and_docs(db, it), }
Definition::Function(it) => label_and_docs(db, it),
Definition::Adt(it) => {
label_and_layout_info_and_docs(db, it, config, |&it| it.layout(db), |_| None)
} }
Definition::Variant(it) => label_value_and_layout_info_and_docs( Definition::Const(it) => {
db,
it,
config,
|&it| {
if !it.parent_enum(db).is_data_carrying(db) {
match it.eval(db) {
Ok(it) => {
Some(if it >= 10 { format!("{it} ({it:#X})") } else { format!("{it}") })
}
Err(_) => it.value(db).map(|it| format!("{it:?}")),
}
} else {
None
}
},
|it| it.layout(db),
|layout| layout.enum_tag_size(),
),
Definition::Const(it) => label_value_and_docs(db, it, |it| {
let body = it.render_eval(db); let body = it.render_eval(db);
match body { match body {
Ok(it) => Some(it), Ok(it) => Some(it),
@ -447,53 +417,59 @@ pub(super) fn definition(
Some(body.to_string()) Some(body.to_string())
} }
} }
}), }
Definition::Static(it) => label_value_and_docs(db, it, |it| { Definition::Static(it) => {
let source = it.source(db)?; let source = it.source(db)?;
let mut body = source.value.body()?.syntax().clone(); let mut body = source.value.body()?.syntax().clone();
if source.file_id.is_macro() { if source.file_id.is_macro() {
body = insert_whitespace_into_node::insert_ws_into(body); body = insert_whitespace_into_node::insert_ws_into(body);
} }
Some(body.to_string()) Some(body.to_string())
}),
Definition::Trait(it) => label_and_docs(db, it),
Definition::TraitAlias(it) => label_and_docs(db, it),
Definition::TypeAlias(it) => {
label_and_layout_info_and_docs(db, it, config, |&it| it.ty(db).layout(db), |_| None)
}
Definition::BuiltinType(it) => {
return famous_defs
.and_then(|fd| builtin(fd, it))
.or_else(|| Some(Markup::fenced_block(&it.name().display(db))))
}
Definition::Local(it) => return local(db, it, config),
Definition::SelfType(impl_def) => {
impl_def.self_ty(db).as_adt().map(|adt| label_and_docs(db, adt))?
}
Definition::GenericParam(it) => (it.display(db).to_string(), None),
Definition::Label(it) => return Some(Markup::fenced_block(&it.name(db).display(db))),
Definition::ExternCrateDecl(it) => label_and_docs(db, it),
// FIXME: We should be able to show more info about these
Definition::BuiltinAttr(it) => return render_builtin_attr(db, it),
Definition::ToolModule(it) => return Some(Markup::fenced_block(&it.name(db))),
Definition::DeriveHelper(it) => {
(format!("derive_helper {}", it.name(db).display(db)), None)
} }
_ => None,
}; };
let docs = docs let layout_info = match def {
.filter(|_| config.documentation) Definition::Field(it) => render_memory_layout(
.or_else(|| { config.memory_layout,
// docs are missing, for assoc items of trait impls try to fall back to the docs of the || it.layout(db),
// original item of the trait |_| {
let assoc = def.as_assoc_item(db)?; let var_def = it.parent_def(db);
let trait_ = assoc.containing_trait_impl(db)?; match var_def {
let name = Some(assoc.name(db)?); hir::VariantDef::Struct(s) => {
let item = trait_.items(db).into_iter().find(|it| it.name(db) == name)?; Adt::from(s).layout(db).ok().and_then(|layout| layout.field_offset(it))
item.docs(db) }
}) _ => None,
.map(Into::into); }
markup(docs, label, mod_path) },
|_| None,
),
Definition::Adt(it) => {
render_memory_layout(config.memory_layout, || it.layout(db), |_| None, |_| None)
}
Definition::Variant(it) => render_memory_layout(
config.memory_layout,
|| it.layout(db),
|_| None,
|layout| layout.enum_tag_size(),
),
Definition::TypeAlias(it) => {
render_memory_layout(config.memory_layout, || it.ty(db).layout(db), |_| None, |_| None)
}
Definition::Local(it) => {
render_memory_layout(config.memory_layout, || it.ty(db).layout(db), |_| None, |_| None)
}
_ => None,
};
let label = match (value, layout_info) {
(Some(value), Some(layout_info)) => format!("{label} = {value}{layout_info}"),
(Some(value), None) => format!("{label} = {value}"),
(None, Some(layout_info)) => format!("{label}{layout_info}"),
(None, None) => label,
};
markup(docs.map(Into::into), label, mod_path)
} }
fn type_info( fn type_info(
@ -595,114 +571,16 @@ fn closure_ty(
Some(res) Some(res)
} }
fn render_builtin_attr(db: &RootDatabase, attr: hir::BuiltinAttr) -> Option<Markup> {
let name = attr.name(db);
let desc = format!("#[{name}]");
let AttributeTemplate { word, list, name_value_str } = match attr.template(db) {
Some(template) => template,
None => return Some(Markup::fenced_block(&attr.name(db))),
};
let mut docs = "Valid forms are:".to_owned();
if word {
format_to!(docs, "\n - #\\[{}]", name);
}
if let Some(list) = list {
format_to!(docs, "\n - #\\[{}({})]", name, list);
}
if let Some(name_value_str) = name_value_str {
format_to!(docs, "\n - #\\[{} = {}]", name, name_value_str);
}
markup(Some(docs.replace('*', "\\*")), desc, None)
}
fn label_and_docs<D>(db: &RootDatabase, def: D) -> (String, Option<Documentation>)
where
D: HasDocs + HirDisplay,
{
let label = def.display(db).to_string();
let docs = def.docs(db);
(label, docs)
}
fn label_and_layout_info_and_docs<D, E, E2>(
db: &RootDatabase,
def: D,
config: &HoverConfig,
layout_extractor: E,
layout_offset_extractor: E2,
) -> (String, Option<Documentation>)
where
D: HasDocs + HirDisplay,
E: Fn(&D) -> Result<Layout, LayoutError>,
E2: Fn(&Layout) -> Option<u64>,
{
let mut label = def.display(db).to_string();
if let Some(layout) = render_memory_layout(
config.memory_layout,
|| layout_extractor(&def),
layout_offset_extractor,
|_| None,
) {
format_to!(label, "{layout}");
}
let docs = def.docs(db);
(label, docs)
}
fn label_value_and_layout_info_and_docs<D, E, E2, E3, V>(
db: &RootDatabase,
def: D,
config: &HoverConfig,
value_extractor: E,
layout_extractor: E2,
layout_tag_extractor: E3,
) -> (String, Option<Documentation>)
where
D: HasDocs + HirDisplay,
E: Fn(&D) -> Option<V>,
E2: Fn(&D) -> Result<Layout, LayoutError>,
E3: Fn(&Layout) -> Option<usize>,
V: Display,
{
let value = value_extractor(&def);
let mut label = match value {
Some(value) => format!("{} = {value}", def.display(db)),
None => def.display(db).to_string(),
};
if let Some(layout) = render_memory_layout(
config.memory_layout,
|| layout_extractor(&def),
|_| None,
layout_tag_extractor,
) {
format_to!(label, "{layout}");
}
let docs = def.docs(db);
(label, docs)
}
fn label_value_and_docs<D, E, V>(
db: &RootDatabase,
def: D,
value_extractor: E,
) -> (String, Option<Documentation>)
where
D: HasDocs + HirDisplay,
E: Fn(&D) -> Option<V>,
V: Display,
{
let label = if let Some(value) = value_extractor(&def) {
format!("{} = {value}", def.display(db))
} else {
def.display(db).to_string()
};
let docs = def.docs(db);
(label, docs)
}
fn definition_mod_path(db: &RootDatabase, def: &Definition) -> Option<String> { fn definition_mod_path(db: &RootDatabase, def: &Definition) -> Option<String> {
if let Definition::GenericParam(_) = def { if matches!(
def,
Definition::GenericParam(_)
| Definition::BuiltinType(_)
| Definition::Local(_)
| Definition::Label(_)
| Definition::BuiltinAttr(_)
| Definition::ToolModule(_)
) {
return None; return None;
} }
def.module(db).map(|module| path(db, module, definition_owner_name(db, def))) def.module(db).map(|module| path(db, module, definition_owner_name(db, def)))
@ -724,14 +602,6 @@ fn markup(docs: Option<String>, desc: String, mod_path: Option<String>) -> Optio
Some(buf.into()) Some(buf.into())
} }
fn builtin(famous_defs: &FamousDefs<'_, '_>, builtin: hir::BuiltinType) -> Option<Markup> {
// std exposes prim_{} modules with docstrings on the root to document the builtins
let primitive_mod = format!("prim_{}", builtin.name().display(famous_defs.0.db));
let doc_owner = find_std_module(famous_defs, &primitive_mod)?;
let docs = doc_owner.docs(famous_defs.0.db)?;
markup(Some(docs.into()), builtin.name().display(famous_defs.0.db).to_string(), None)
}
fn find_std_module(famous_defs: &FamousDefs<'_, '_>, name: &str) -> Option<hir::Module> { fn find_std_module(famous_defs: &FamousDefs<'_, '_>, name: &str) -> Option<hir::Module> {
let db = famous_defs.0.db; let db = famous_defs.0.db;
let std_crate = famous_defs.std()?; let std_crate = famous_defs.std()?;
@ -741,34 +611,6 @@ fn find_std_module(famous_defs: &FamousDefs<'_, '_>, name: &str) -> Option<hir::
}) })
} }
fn local(db: &RootDatabase, it: hir::Local, config: &HoverConfig) -> Option<Markup> {
let ty = it.ty(db);
let ty = ty.display_truncated(db, None);
let is_mut = if it.is_mut(db) { "mut " } else { "" };
let mut desc = match it.primary_source(db).into_ident_pat() {
Some(ident) => {
let name = it.name(db);
let let_kw = if ident
.syntax()
.parent()
.map_or(false, |p| p.kind() == LET_STMT || p.kind() == LET_EXPR)
{
"let "
} else {
""
};
format!("{let_kw}{is_mut}{}: {ty}", name.display(db))
}
None => format!("{is_mut}self: {ty}"),
};
if let Some(layout) =
render_memory_layout(config.memory_layout, || it.ty(db).layout(db), |_| None, |_| None)
{
format_to!(desc, "{layout}");
}
markup(None, desc, None)
}
fn render_memory_layout( fn render_memory_layout(
config: Option<MemoryLayoutHoverConfig>, config: Option<MemoryLayoutHoverConfig>,
layout: impl FnOnce() -> Result<Layout, LayoutError>, layout: impl FnOnce() -> Result<Layout, LayoutError>,

View File

@ -32,6 +32,7 @@ mod fn_lifetime_fn;
mod implicit_static; mod implicit_static;
mod param_name; mod param_name;
mod implicit_drop; mod implicit_drop;
mod range_exclusive;
#[derive(Clone, Debug, PartialEq, Eq)] #[derive(Clone, Debug, PartialEq, Eq)]
pub struct InlayHintsConfig { pub struct InlayHintsConfig {
@ -51,6 +52,7 @@ pub struct InlayHintsConfig {
pub param_names_for_lifetime_elision_hints: bool, pub param_names_for_lifetime_elision_hints: bool,
pub hide_named_constructor_hints: bool, pub hide_named_constructor_hints: bool,
pub hide_closure_initialization_hints: bool, pub hide_closure_initialization_hints: bool,
pub range_exclusive_hints: bool,
pub closure_style: ClosureStyle, pub closure_style: ClosureStyle,
pub max_length: Option<usize>, pub max_length: Option<usize>,
pub closing_brace_hints_min_lines: Option<usize>, pub closing_brace_hints_min_lines: Option<usize>,
@ -127,6 +129,7 @@ pub enum InlayKind {
Parameter, Parameter,
Type, Type,
Drop, Drop,
RangeExclusive,
} }
#[derive(Debug)] #[derive(Debug)]
@ -517,13 +520,20 @@ fn hints(
closure_captures::hints(hints, famous_defs, config, file_id, it.clone()); closure_captures::hints(hints, famous_defs, config, file_id, it.clone());
closure_ret::hints(hints, famous_defs, config, file_id, it) closure_ret::hints(hints, famous_defs, config, file_id, it)
}, },
ast::Expr::RangeExpr(it) => range_exclusive::hints(hints, config, it),
_ => None, _ => None,
} }
}, },
ast::Pat(it) => { ast::Pat(it) => {
binding_mode::hints(hints, sema, config, &it); binding_mode::hints(hints, sema, config, &it);
if let ast::Pat::IdentPat(it) = it { match it {
bind_pat::hints(hints, famous_defs, config, file_id, &it); ast::Pat::IdentPat(it) => {
bind_pat::hints(hints, famous_defs, config, file_id, &it);
}
ast::Pat::RangePat(it) => {
range_exclusive::hints(hints, config, it);
}
_ => {}
} }
Some(()) Some(())
}, },
@ -593,7 +603,6 @@ mod tests {
use hir::ClosureStyle; use hir::ClosureStyle;
use itertools::Itertools; use itertools::Itertools;
use test_utils::extract_annotations; use test_utils::extract_annotations;
use text_edit::{TextRange, TextSize};
use crate::inlay_hints::{AdjustmentHints, AdjustmentHintsMode}; use crate::inlay_hints::{AdjustmentHints, AdjustmentHintsMode};
use crate::DiscriminantHints; use crate::DiscriminantHints;
@ -622,6 +631,7 @@ mod tests {
closing_brace_hints_min_lines: None, closing_brace_hints_min_lines: None,
fields_to_resolve: InlayFieldsToResolve::empty(), fields_to_resolve: InlayFieldsToResolve::empty(),
implicit_drop_hints: false, implicit_drop_hints: false,
range_exclusive_hints: false,
}; };
pub(super) const TEST_CONFIG: InlayHintsConfig = InlayHintsConfig { pub(super) const TEST_CONFIG: InlayHintsConfig = InlayHintsConfig {
type_hints: true, type_hints: true,
@ -654,29 +664,6 @@ mod tests {
assert_eq!(expected, actual, "\nExpected:\n{expected:#?}\n\nActual:\n{actual:#?}"); assert_eq!(expected, actual, "\nExpected:\n{expected:#?}\n\nActual:\n{actual:#?}");
} }
#[track_caller]
pub(super) fn check_expect(config: InlayHintsConfig, ra_fixture: &str, expect: Expect) {
let (analysis, file_id) = fixture::file(ra_fixture);
let inlay_hints = analysis.inlay_hints(&config, file_id, None).unwrap();
expect.assert_debug_eq(&inlay_hints)
}
#[track_caller]
pub(super) fn check_expect_clear_loc(
config: InlayHintsConfig,
ra_fixture: &str,
expect: Expect,
) {
let (analysis, file_id) = fixture::file(ra_fixture);
let mut inlay_hints = analysis.inlay_hints(&config, file_id, None).unwrap();
inlay_hints.iter_mut().flat_map(|hint| &mut hint.label.parts).for_each(|hint| {
if let Some(loc) = &mut hint.linked_location {
loc.range = TextRange::empty(TextSize::from(0));
}
});
expect.assert_debug_eq(&inlay_hints)
}
/// Computes inlay hints for the fixture, applies all the provided text edits and then runs /// Computes inlay hints for the fixture, applies all the provided text edits and then runs
/// expect test. /// expect test.
#[track_caller] #[track_caller]

View File

@ -75,12 +75,12 @@ pub(super) fn hints(
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use expect_test::expect; use expect_test::{expect, Expect};
use text_edit::{TextRange, TextSize};
use crate::{ use crate::{
inlay_hints::tests::{ fixture,
check_expect, check_expect_clear_loc, check_with_config, DISABLED_CONFIG, TEST_CONFIG, inlay_hints::tests::{check_with_config, DISABLED_CONFIG, TEST_CONFIG},
},
InlayHintsConfig, InlayHintsConfig,
}; };
@ -89,6 +89,33 @@ mod tests {
check_with_config(InlayHintsConfig { chaining_hints: true, ..DISABLED_CONFIG }, ra_fixture); check_with_config(InlayHintsConfig { chaining_hints: true, ..DISABLED_CONFIG }, ra_fixture);
} }
#[track_caller]
pub(super) fn check_expect(config: InlayHintsConfig, ra_fixture: &str, expect: Expect) {
let (analysis, file_id) = fixture::file(ra_fixture);
let inlay_hints = analysis.inlay_hints(&config, file_id, None).unwrap();
let filtered =
inlay_hints.into_iter().map(|hint| (hint.range, hint.label)).collect::<Vec<_>>();
expect.assert_debug_eq(&filtered)
}
#[track_caller]
pub(super) fn check_expect_clear_loc(
config: InlayHintsConfig,
ra_fixture: &str,
expect: Expect,
) {
let (analysis, file_id) = fixture::file(ra_fixture);
let mut inlay_hints = analysis.inlay_hints(&config, file_id, None).unwrap();
inlay_hints.iter_mut().flat_map(|hint| &mut hint.label.parts).for_each(|hint| {
if let Some(loc) = &mut hint.linked_location {
loc.range = TextRange::empty(TextSize::from(0));
}
});
let filtered =
inlay_hints.into_iter().map(|hint| (hint.range, hint.label)).collect::<Vec<_>>();
expect.assert_debug_eq(&filtered)
}
#[test] #[test]
fn chaining_hints_ignore_comments() { fn chaining_hints_ignore_comments() {
check_expect( check_expect(
@ -109,13 +136,9 @@ fn main() {
"#, "#,
expect![[r#" expect![[r#"
[ [
InlayHint { (
range: 147..172, 147..172,
position: After, [
pad_left: true,
pad_right: false,
kind: Chaining,
label: [
"", "",
InlayHintLabelPart { InlayHintLabelPart {
text: "B", text: "B",
@ -131,16 +154,10 @@ fn main() {
}, },
"", "",
], ],
text_edit: None, ),
needs_resolve: true, (
}, 147..154,
InlayHint { [
range: 147..154,
position: After,
pad_left: true,
pad_right: false,
kind: Chaining,
label: [
"", "",
InlayHintLabelPart { InlayHintLabelPart {
text: "A", text: "A",
@ -156,9 +173,7 @@ fn main() {
}, },
"", "",
], ],
text_edit: None, ),
needs_resolve: true,
},
] ]
"#]], "#]],
); );
@ -204,13 +219,9 @@ fn main() {
}"#, }"#,
expect![[r#" expect![[r#"
[ [
InlayHint { (
range: 143..190, 143..190,
position: After, [
pad_left: true,
pad_right: false,
kind: Chaining,
label: [
"", "",
InlayHintLabelPart { InlayHintLabelPart {
text: "C", text: "C",
@ -226,16 +237,10 @@ fn main() {
}, },
"", "",
], ],
text_edit: None, ),
needs_resolve: true, (
}, 143..179,
InlayHint { [
range: 143..179,
position: After,
pad_left: true,
pad_right: false,
kind: Chaining,
label: [
"", "",
InlayHintLabelPart { InlayHintLabelPart {
text: "B", text: "B",
@ -251,9 +256,7 @@ fn main() {
}, },
"", "",
], ],
text_edit: None, ),
needs_resolve: true,
},
] ]
"#]], "#]],
); );
@ -283,13 +286,9 @@ fn main() {
}"#, }"#,
expect![[r#" expect![[r#"
[ [
InlayHint { (
range: 143..190, 143..190,
position: After, [
pad_left: true,
pad_right: false,
kind: Chaining,
label: [
"", "",
InlayHintLabelPart { InlayHintLabelPart {
text: "C", text: "C",
@ -305,16 +304,10 @@ fn main() {
}, },
"", "",
], ],
text_edit: None, ),
needs_resolve: true, (
}, 143..179,
InlayHint { [
range: 143..179,
position: After,
pad_left: true,
pad_right: false,
kind: Chaining,
label: [
"", "",
InlayHintLabelPart { InlayHintLabelPart {
text: "B", text: "B",
@ -330,9 +323,7 @@ fn main() {
}, },
"", "",
], ],
text_edit: None, ),
needs_resolve: true,
},
] ]
"#]], "#]],
); );
@ -363,13 +354,9 @@ fn main() {
"#, "#,
expect![[r#" expect![[r#"
[ [
InlayHint { (
range: 246..283, 246..283,
position: After, [
pad_left: true,
pad_right: false,
kind: Chaining,
label: [
"", "",
InlayHintLabelPart { InlayHintLabelPart {
text: "B", text: "B",
@ -398,16 +385,10 @@ fn main() {
}, },
"<i32, bool>>", "<i32, bool>>",
], ],
text_edit: None, ),
needs_resolve: true, (
}, 246..265,
InlayHint { [
range: 246..265,
position: After,
pad_left: true,
pad_right: false,
kind: Chaining,
label: [
"", "",
InlayHintLabelPart { InlayHintLabelPart {
text: "A", text: "A",
@ -436,9 +417,7 @@ fn main() {
}, },
"<i32, bool>>", "<i32, bool>>",
], ],
text_edit: None, ),
needs_resolve: true,
},
] ]
"#]], "#]],
); );
@ -471,13 +450,9 @@ fn main() {
"#, "#,
expect![[r#" expect![[r#"
[ [
InlayHint { (
range: 174..241, 174..241,
position: After, [
pad_left: true,
pad_right: false,
kind: Chaining,
label: [
"impl ", "impl ",
InlayHintLabelPart { InlayHintLabelPart {
text: "Iterator", text: "Iterator",
@ -506,16 +481,10 @@ fn main() {
}, },
" = ()>", " = ()>",
], ],
text_edit: None, ),
needs_resolve: true, (
}, 174..224,
InlayHint { [
range: 174..224,
position: After,
pad_left: true,
pad_right: false,
kind: Chaining,
label: [
"impl ", "impl ",
InlayHintLabelPart { InlayHintLabelPart {
text: "Iterator", text: "Iterator",
@ -544,16 +513,10 @@ fn main() {
}, },
" = ()>", " = ()>",
], ],
text_edit: None, ),
needs_resolve: true, (
}, 174..206,
InlayHint { [
range: 174..206,
position: After,
pad_left: true,
pad_right: false,
kind: Chaining,
label: [
"impl ", "impl ",
InlayHintLabelPart { InlayHintLabelPart {
text: "Iterator", text: "Iterator",
@ -582,16 +545,10 @@ fn main() {
}, },
" = ()>", " = ()>",
], ],
text_edit: None, ),
needs_resolve: true, (
}, 174..189,
InlayHint { [
range: 174..189,
position: After,
pad_left: true,
pad_right: false,
kind: Chaining,
label: [
"&mut ", "&mut ",
InlayHintLabelPart { InlayHintLabelPart {
text: "MyIter", text: "MyIter",
@ -607,9 +564,7 @@ fn main() {
}, },
"", "",
], ],
text_edit: None, ),
needs_resolve: true,
},
] ]
"#]], "#]],
); );
@ -639,13 +594,9 @@ fn main() {
"#, "#,
expect![[r#" expect![[r#"
[ [
InlayHint { (
range: 124..130, 124..130,
position: After, [
pad_left: true,
pad_right: false,
kind: Type,
label: [
"", "",
InlayHintLabelPart { InlayHintLabelPart {
text: "Struct", text: "Struct",
@ -661,25 +612,10 @@ fn main() {
}, },
"", "",
], ],
text_edit: Some( ),
TextEdit { (
indels: [ 145..185,
Indel { [
insert: ": Struct",
delete: 130..130,
},
],
},
),
needs_resolve: true,
},
InlayHint {
range: 145..185,
position: After,
pad_left: true,
pad_right: false,
kind: Chaining,
label: [
"", "",
InlayHintLabelPart { InlayHintLabelPart {
text: "Struct", text: "Struct",
@ -695,16 +631,10 @@ fn main() {
}, },
"", "",
], ],
text_edit: None, ),
needs_resolve: true, (
}, 145..168,
InlayHint { [
range: 145..168,
position: After,
pad_left: true,
pad_right: false,
kind: Chaining,
label: [
"", "",
InlayHintLabelPart { InlayHintLabelPart {
text: "Struct", text: "Struct",
@ -720,16 +650,10 @@ fn main() {
}, },
"", "",
], ],
text_edit: None, ),
needs_resolve: true, (
}, 222..228,
InlayHint { [
range: 222..228,
position: Before,
pad_left: false,
pad_right: true,
kind: Parameter,
label: [
InlayHintLabelPart { InlayHintLabelPart {
text: "self", text: "self",
linked_location: Some( linked_location: Some(
@ -743,9 +667,7 @@ fn main() {
tooltip: "", tooltip: "",
}, },
], ],
text_edit: None, ),
needs_resolve: true,
},
] ]
"#]], "#]],
); );

View File

@ -0,0 +1,121 @@
//! Implementation of "range exclusive" inlay hints:
//! ```no_run
//! for i in 0../* < */10 {}
//! if let ../* < */100 = 50 {}
//! ```
use syntax::{ast, SyntaxToken, T};
use crate::{InlayHint, InlayHintsConfig};
pub(super) fn hints(
acc: &mut Vec<InlayHint>,
config: &InlayHintsConfig,
range: impl ast::RangeItem,
) -> Option<()> {
(config.range_exclusive_hints && range.end().is_some())
.then(|| {
range.op_token().filter(|token| token.kind() == T![..]).map(|token| {
acc.push(inlay_hint(token));
})
})
.flatten()
}
fn inlay_hint(token: SyntaxToken) -> InlayHint {
InlayHint {
range: token.text_range(),
position: crate::InlayHintPosition::After,
pad_left: false,
pad_right: false,
kind: crate::InlayKind::RangeExclusive,
label: crate::InlayHintLabel::from("<"),
text_edit: None,
needs_resolve: false,
}
}
#[cfg(test)]
mod tests {
use crate::{
inlay_hints::tests::{check_with_config, DISABLED_CONFIG},
InlayHintsConfig,
};
#[test]
fn range_exclusive_expression_bounded_above_hints() {
check_with_config(
InlayHintsConfig { range_exclusive_hints: true, ..DISABLED_CONFIG },
r#"
fn main() {
let a = 0..10;
//^^<
let b = ..100;
//^^<
let c = (2 - 1)..(7 * 8)
//^^<
}"#,
);
}
#[test]
fn range_exclusive_expression_unbounded_above_no_hints() {
check_with_config(
InlayHintsConfig { range_exclusive_hints: true, ..DISABLED_CONFIG },
r#"
fn main() {
let a = 0..;
let b = ..;
}"#,
);
}
#[test]
fn range_inclusive_expression_no_hints() {
check_with_config(
InlayHintsConfig { range_exclusive_hints: true, ..DISABLED_CONFIG },
r#"
fn main() {
let a = 0..=10;
let b = ..=100;
}"#,
);
}
#[test]
fn range_exclusive_pattern_bounded_above_hints() {
check_with_config(
InlayHintsConfig { range_exclusive_hints: true, ..DISABLED_CONFIG },
r#"
fn main() {
if let 0..10 = 0 {}
//^^<
if let ..100 = 0 {}
//^^<
}"#,
);
}
#[test]
fn range_exclusive_pattern_unbounded_above_no_hints() {
check_with_config(
InlayHintsConfig { range_exclusive_hints: true, ..DISABLED_CONFIG },
r#"
fn main() {
if let 0.. = 0 {}
if let .. = 0 {}
}"#,
);
}
#[test]
fn range_inclusive_pattern_no_hints() {
check_with_config(
InlayHintsConfig { range_exclusive_hints: true, ..DISABLED_CONFIG },
r#"
fn main() {
if let 0..=10 = 0 {}
if let ..=100 = 0 {}
}"#,
);
}
}

View File

@ -99,7 +99,10 @@ pub use crate::{
}, },
join_lines::JoinLinesConfig, join_lines::JoinLinesConfig,
markup::Markup, markup::Markup,
moniker::{MonikerDescriptorKind, MonikerKind, MonikerResult, PackageInformation}, moniker::{
MonikerDescriptorKind, MonikerKind, MonikerResult, PackageInformation,
SymbolInformationKind,
},
move_item::Direction, move_item::Direction,
navigation_target::{NavigationTarget, UpmappingResult}, navigation_target::{NavigationTarget, UpmappingResult},
prime_caches::ParallelPrimeCachesProgress, prime_caches::ParallelPrimeCachesProgress,
@ -411,11 +414,12 @@ impl Analysis {
} }
/// Fuzzy searches for a symbol. /// Fuzzy searches for a symbol.
pub fn symbol_search(&self, query: Query) -> Cancellable<Vec<NavigationTarget>> { pub fn symbol_search(&self, query: Query, limit: usize) -> Cancellable<Vec<NavigationTarget>> {
self.with_db(|db| { self.with_db(|db| {
symbol_index::world_symbols(db, query) symbol_index::world_symbols(db, query)
.into_iter() // xx: should we make this a par iter? .into_iter() // xx: should we make this a par iter?
.filter_map(|s| s.try_to_nav(db)) .filter_map(|s| s.try_to_nav(db))
.take(limit)
.map(UpmappingResult::call_site) .map(UpmappingResult::call_site)
.collect::<Vec<_>>() .collect::<Vec<_>>()
}) })

View File

@ -1,7 +1,7 @@
//! This module generates [moniker](https://microsoft.github.io/language-server-protocol/specifications/lsif/0.6.0/specification/#exportsImports) //! This module generates [moniker](https://microsoft.github.io/language-server-protocol/specifications/lsif/0.6.0/specification/#exportsImports)
//! for LSIF and LSP. //! for LSIF and LSP.
use hir::{AsAssocItem, AssocItemContainer, Crate, DescendPreference, Semantics}; use hir::{Adt, AsAssocItem, AssocItemContainer, Crate, DescendPreference, MacroKind, Semantics};
use ide_db::{ use ide_db::{
base_db::{CrateOrigin, FilePosition, LangCrateOrigin}, base_db::{CrateOrigin, FilePosition, LangCrateOrigin},
defs::{Definition, IdentClass}, defs::{Definition, IdentClass},
@ -25,6 +25,62 @@ pub enum MonikerDescriptorKind {
Meta, Meta,
} }
// Subset of scip_types::SymbolInformation::Kind
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum SymbolInformationKind {
AssociatedType,
Attribute,
Constant,
Enum,
EnumMember,
Field,
Function,
Macro,
Method,
Module,
Parameter,
SelfParameter,
StaticMethod,
StaticVariable,
Struct,
Trait,
TraitMethod,
Type,
TypeAlias,
TypeParameter,
Union,
Variable,
}
impl From<SymbolInformationKind> for MonikerDescriptorKind {
fn from(value: SymbolInformationKind) -> Self {
match value {
SymbolInformationKind::AssociatedType => Self::TypeParameter,
SymbolInformationKind::Attribute => Self::Macro,
SymbolInformationKind::Constant => Self::Term,
SymbolInformationKind::Enum => Self::Type,
SymbolInformationKind::EnumMember => Self::Type,
SymbolInformationKind::Field => Self::Term,
SymbolInformationKind::Function => Self::Method,
SymbolInformationKind::Macro => Self::Macro,
SymbolInformationKind::Method => Self::Method,
SymbolInformationKind::Module => Self::Namespace,
SymbolInformationKind::Parameter => Self::Parameter,
SymbolInformationKind::SelfParameter => Self::Parameter,
SymbolInformationKind::StaticMethod => Self::Method,
SymbolInformationKind::StaticVariable => Self::Meta,
SymbolInformationKind::Struct => Self::Type,
SymbolInformationKind::Trait => Self::Type,
SymbolInformationKind::TraitMethod => Self::Method,
SymbolInformationKind::Type => Self::Type,
SymbolInformationKind::TypeAlias => Self::Type,
SymbolInformationKind::TypeParameter => Self::TypeParameter,
SymbolInformationKind::Union => Self::Type,
SymbolInformationKind::Variable => Self::Term,
}
}
}
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct MonikerDescriptor { pub struct MonikerDescriptor {
pub name: String, pub name: String,
@ -112,6 +168,69 @@ pub(crate) fn moniker(
Some(RangeInfo::new(original_token.text_range(), navs)) Some(RangeInfo::new(original_token.text_range(), navs))
} }
pub(crate) fn def_to_kind(db: &RootDatabase, def: Definition) -> SymbolInformationKind {
use SymbolInformationKind::*;
match def {
Definition::Macro(it) => match it.kind(db) {
MacroKind::Declarative => Macro,
MacroKind::Derive => Attribute,
MacroKind::BuiltIn => Macro,
MacroKind::Attr => Attribute,
MacroKind::ProcMacro => Macro,
},
Definition::Field(..) | Definition::TupleField(..) => Field,
Definition::Module(..) => Module,
Definition::Function(it) => {
if it.as_assoc_item(db).is_some() {
if it.has_self_param(db) {
if it.has_body(db) {
Method
} else {
TraitMethod
}
} else {
StaticMethod
}
} else {
Function
}
}
Definition::Adt(Adt::Struct(..)) => Struct,
Definition::Adt(Adt::Union(..)) => Union,
Definition::Adt(Adt::Enum(..)) => Enum,
Definition::Variant(..) => EnumMember,
Definition::Const(..) => Constant,
Definition::Static(..) => StaticVariable,
Definition::Trait(..) => Trait,
Definition::TraitAlias(..) => Trait,
Definition::TypeAlias(it) => {
if it.as_assoc_item(db).is_some() {
AssociatedType
} else {
TypeAlias
}
}
Definition::BuiltinType(..) => Type,
Definition::SelfType(..) => TypeAlias,
Definition::GenericParam(..) => TypeParameter,
Definition::Local(it) => {
if it.is_self(db) {
SelfParameter
} else if it.is_param(db) {
Parameter
} else {
Variable
}
}
Definition::Label(..) => Variable, // For lack of a better variant
Definition::DeriveHelper(..) => Attribute,
Definition::BuiltinAttr(..) => Attribute,
Definition::ToolModule(..) => Module,
Definition::ExternCrateDecl(..) => Module,
}
}
pub(crate) fn def_to_moniker( pub(crate) fn def_to_moniker(
db: &RootDatabase, db: &RootDatabase,
def: Definition, def: Definition,
@ -134,7 +253,7 @@ pub(crate) fn def_to_moniker(
description.extend(module.path_to_root(db).into_iter().filter_map(|x| { description.extend(module.path_to_root(db).into_iter().filter_map(|x| {
Some(MonikerDescriptor { Some(MonikerDescriptor {
name: x.name(db)?.display(db).to_string(), name: x.name(db)?.display(db).to_string(),
desc: MonikerDescriptorKind::Namespace, desc: def_to_kind(db, x.into()).into(),
}) })
})); }));
@ -147,7 +266,7 @@ pub(crate) fn def_to_moniker(
// we have to include the trait name as part of the moniker for uniqueness. // we have to include the trait name as part of the moniker for uniqueness.
description.push(MonikerDescriptor { description.push(MonikerDescriptor {
name: trait_.name(db).display(db).to_string(), name: trait_.name(db).display(db).to_string(),
desc: MonikerDescriptorKind::Type, desc: def_to_kind(db, trait_.into()).into(),
}); });
} }
AssocItemContainer::Impl(impl_) => { AssocItemContainer::Impl(impl_) => {
@ -156,14 +275,14 @@ pub(crate) fn def_to_moniker(
if let Some(adt) = impl_.self_ty(db).as_adt() { if let Some(adt) = impl_.self_ty(db).as_adt() {
description.push(MonikerDescriptor { description.push(MonikerDescriptor {
name: adt.name(db).display(db).to_string(), name: adt.name(db).display(db).to_string(),
desc: MonikerDescriptorKind::Type, desc: def_to_kind(db, adt.into()).into(),
}); });
} }
if let Some(trait_) = impl_.trait_(db) { if let Some(trait_) = impl_.trait_(db) {
description.push(MonikerDescriptor { description.push(MonikerDescriptor {
name: trait_.name(db).display(db).to_string(), name: trait_.name(db).display(db).to_string(),
desc: MonikerDescriptorKind::Type, desc: def_to_kind(db, trait_.into()).into(),
}); });
} }
} }
@ -173,21 +292,26 @@ pub(crate) fn def_to_moniker(
if let Definition::Field(it) = def { if let Definition::Field(it) = def {
description.push(MonikerDescriptor { description.push(MonikerDescriptor {
name: it.parent_def(db).name(db).display(db).to_string(), name: it.parent_def(db).name(db).display(db).to_string(),
desc: MonikerDescriptorKind::Type, desc: def_to_kind(db, it.parent_def(db).into()).into(),
}); });
} }
// Qualify locals/parameters by their parent definition name. // Qualify locals/parameters by their parent definition name.
if let Definition::Local(it) = def { if let Definition::Local(it) = def {
let parent_name = it.parent(db).name(db); let parent = Definition::try_from(it.parent(db)).ok();
if let Some(name) = parent_name { if let Some(parent) = parent {
description.push(MonikerDescriptor { let parent_name = parent.name(db);
name: name.display(db).to_string(), if let Some(name) = parent_name {
desc: MonikerDescriptorKind::Method, description.push(MonikerDescriptor {
}); name: name.display(db).to_string(),
desc: def_to_kind(db, parent).into(),
});
}
} }
} }
let desc = def_to_kind(db, def).into();
let name_desc = match def { let name_desc = match def {
// These are handled by top-level guard (for performance). // These are handled by top-level guard (for performance).
Definition::GenericParam(_) Definition::GenericParam(_)
@ -201,67 +325,54 @@ pub(crate) fn def_to_moniker(
return None; return None;
} }
MonikerDescriptor { MonikerDescriptor { name: local.name(db).display(db).to_string(), desc }
name: local.name(db).display(db).to_string(), }
desc: MonikerDescriptorKind::Parameter, Definition::Macro(m) => {
} MonikerDescriptor { name: m.name(db).display(db).to_string(), desc }
}
Definition::Function(f) => {
MonikerDescriptor { name: f.name(db).display(db).to_string(), desc }
}
Definition::Variant(v) => {
MonikerDescriptor { name: v.name(db).display(db).to_string(), desc }
}
Definition::Const(c) => {
MonikerDescriptor { name: c.name(db)?.display(db).to_string(), desc }
}
Definition::Trait(trait_) => {
MonikerDescriptor { name: trait_.name(db).display(db).to_string(), desc }
}
Definition::TraitAlias(ta) => {
MonikerDescriptor { name: ta.name(db).display(db).to_string(), desc }
}
Definition::TypeAlias(ta) => {
MonikerDescriptor { name: ta.name(db).display(db).to_string(), desc }
}
Definition::Module(m) => {
MonikerDescriptor { name: m.name(db)?.display(db).to_string(), desc }
}
Definition::BuiltinType(b) => {
MonikerDescriptor { name: b.name().display(db).to_string(), desc }
} }
Definition::Macro(m) => MonikerDescriptor {
name: m.name(db).display(db).to_string(),
desc: MonikerDescriptorKind::Macro,
},
Definition::Function(f) => MonikerDescriptor {
name: f.name(db).display(db).to_string(),
desc: MonikerDescriptorKind::Method,
},
Definition::Variant(v) => MonikerDescriptor {
name: v.name(db).display(db).to_string(),
desc: MonikerDescriptorKind::Type,
},
Definition::Const(c) => MonikerDescriptor {
name: c.name(db)?.display(db).to_string(),
desc: MonikerDescriptorKind::Term,
},
Definition::Trait(trait_) => MonikerDescriptor {
name: trait_.name(db).display(db).to_string(),
desc: MonikerDescriptorKind::Type,
},
Definition::TraitAlias(ta) => MonikerDescriptor {
name: ta.name(db).display(db).to_string(),
desc: MonikerDescriptorKind::Type,
},
Definition::TypeAlias(ta) => MonikerDescriptor {
name: ta.name(db).display(db).to_string(),
desc: MonikerDescriptorKind::TypeParameter,
},
Definition::Module(m) => MonikerDescriptor {
name: m.name(db)?.display(db).to_string(),
desc: MonikerDescriptorKind::Namespace,
},
Definition::BuiltinType(b) => MonikerDescriptor {
name: b.name().display(db).to_string(),
desc: MonikerDescriptorKind::Type,
},
Definition::SelfType(imp) => MonikerDescriptor { Definition::SelfType(imp) => MonikerDescriptor {
name: imp.self_ty(db).as_adt()?.name(db).display(db).to_string(), name: imp.self_ty(db).as_adt()?.name(db).display(db).to_string(),
desc: MonikerDescriptorKind::Type, desc,
},
Definition::Field(it) => MonikerDescriptor {
name: it.name(db).display(db).to_string(),
desc: MonikerDescriptorKind::Term,
},
Definition::Adt(adt) => MonikerDescriptor {
name: adt.name(db).display(db).to_string(),
desc: MonikerDescriptorKind::Type,
},
Definition::Static(s) => MonikerDescriptor {
name: s.name(db).display(db).to_string(),
desc: MonikerDescriptorKind::Meta,
},
Definition::ExternCrateDecl(m) => MonikerDescriptor {
name: m.name(db).display(db).to_string(),
desc: MonikerDescriptorKind::Namespace,
}, },
Definition::Field(it) => {
MonikerDescriptor { name: it.name(db).display(db).to_string(), desc }
}
Definition::TupleField(it) => {
MonikerDescriptor { name: it.name().display(db).to_string(), desc }
}
Definition::Adt(adt) => {
MonikerDescriptor { name: adt.name(db).display(db).to_string(), desc }
}
Definition::Static(s) => {
MonikerDescriptor { name: s.name(db).display(db).to_string(), desc }
}
Definition::ExternCrateDecl(m) => {
MonikerDescriptor { name: m.name(db).display(db).to_string(), desc }
}
}; };
description.push(name_desc); description.push(name_desc);

View File

@ -237,7 +237,7 @@ impl TryToNav for Definition {
Definition::TraitAlias(it) => it.try_to_nav(db), Definition::TraitAlias(it) => it.try_to_nav(db),
Definition::TypeAlias(it) => it.try_to_nav(db), Definition::TypeAlias(it) => it.try_to_nav(db),
Definition::ExternCrateDecl(it) => Some(it.try_to_nav(db)?), Definition::ExternCrateDecl(it) => Some(it.try_to_nav(db)?),
Definition::BuiltinType(_) => None, Definition::BuiltinType(_) | Definition::TupleField(_) => None,
Definition::ToolModule(_) => None, Definition::ToolModule(_) => None,
Definition::BuiltinAttr(_) => None, Definition::BuiltinAttr(_) => None,
// FIXME: The focus range should be set to the helper declaration // FIXME: The focus range should be set to the helper declaration
@ -724,11 +724,8 @@ fn orig_range_with_focus(
) -> UpmappingResult<(FileRange, Option<TextRange>)> { ) -> UpmappingResult<(FileRange, Option<TextRange>)> {
let Some(name) = name else { return orig_range(db, hir_file, value) }; let Some(name) = name else { return orig_range(db, hir_file, value) };
let call_range = || { let call_kind =
db.lookup_intern_macro_call(hir_file.macro_file().unwrap().macro_call_id) || db.lookup_intern_macro_call(hir_file.macro_file().unwrap().macro_call_id).kind;
.kind
.original_call_range(db)
};
let def_range = || { let def_range = || {
db.lookup_intern_macro_call(hir_file.macro_file().unwrap().macro_call_id) db.lookup_intern_macro_call(hir_file.macro_file().unwrap().macro_call_id)
@ -755,7 +752,22 @@ fn orig_range_with_focus(
} }
// name lies outside the node, so instead point to the macro call which // name lies outside the node, so instead point to the macro call which
// *should* contain the name // *should* contain the name
_ => call_range(), _ => {
let kind = call_kind();
let range = kind.clone().original_call_range_with_body(db);
//If the focus range is in the attribute/derive body, we
// need to point the call site to the entire body, if not, fall back
// to the name range of the attribute/derive call
// FIXME: Do this differently, this is very inflexible the caller
// should choose this behavior
if range.file_id == focus_range.file_id
&& range.range.contains_range(focus_range.range)
{
range
} else {
kind.original_call_range(db)
}
}
}, },
Some(focus_range), Some(focus_range),
), ),
@ -784,7 +796,7 @@ fn orig_range_with_focus(
// node is in macro def, just show the focus // node is in macro def, just show the focus
_ => ( _ => (
// show the macro call // show the macro call
(call_range(), None), (call_kind().original_call_range(db), None),
Some((focus_range, Some(focus_range))), Some((focus_range, Some(focus_range))),
), ),
} }
@ -848,7 +860,7 @@ fn foo() { enum FooInner { } }
"#, "#,
); );
let navs = analysis.symbol_search(Query::new("FooInner".to_string())).unwrap(); let navs = analysis.symbol_search(Query::new("FooInner".to_string()), !0).unwrap();
expect![[r#" expect![[r#"
[ [
NavigationTarget { NavigationTarget {
@ -886,7 +898,7 @@ struct Foo;
"#, "#,
); );
let navs = analysis.symbol_search(Query::new("foo".to_string())).unwrap(); let navs = analysis.symbol_search(Query::new("foo".to_string()), !0).unwrap();
assert_eq!(navs.len(), 2) assert_eq!(navs.len(), 2)
} }
} }

View File

@ -366,6 +366,7 @@ fn text_edit_from_self_param(self_param: &ast::SelfParam, new_name: &str) -> Opt
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use expect_test::{expect, Expect}; use expect_test::{expect, Expect};
use ide_db::source_change::SourceChange;
use stdx::trim_indent; use stdx::trim_indent;
use test_utils::assert_eq_text; use test_utils::assert_eq_text;
use text_edit::TextEdit; use text_edit::TextEdit;
@ -418,7 +419,7 @@ mod tests {
let (analysis, position) = fixture::position(ra_fixture); let (analysis, position) = fixture::position(ra_fixture);
let source_change = let source_change =
analysis.rename(position, new_name).unwrap().expect("Expect returned a RenameError"); analysis.rename(position, new_name).unwrap().expect("Expect returned a RenameError");
expect.assert_debug_eq(&source_change) expect.assert_eq(&filter_expect(source_change))
} }
fn check_expect_will_rename_file(new_name: &str, ra_fixture: &str, expect: Expect) { fn check_expect_will_rename_file(new_name: &str, ra_fixture: &str, expect: Expect) {
@ -427,7 +428,7 @@ mod tests {
.will_rename_file(position.file_id, new_name) .will_rename_file(position.file_id, new_name)
.unwrap() .unwrap()
.expect("Expect returned a RenameError"); .expect("Expect returned a RenameError");
expect.assert_debug_eq(&source_change) expect.assert_eq(&filter_expect(source_change))
} }
fn check_prepare(ra_fixture: &str, expect: Expect) { fn check_prepare(ra_fixture: &str, expect: Expect) {
@ -444,6 +445,19 @@ mod tests {
}; };
} }
fn filter_expect(source_change: SourceChange) -> String {
let source_file_edits = source_change
.source_file_edits
.into_iter()
.map(|(id, (text_edit, _))| (id, text_edit.into_iter().collect::<Vec<_>>()))
.collect::<Vec<_>>();
format!(
"source_file_edits: {:#?}\nfile_system_edits: {:#?}\n",
source_file_edits, source_change.file_system_edits
)
}
#[test] #[test]
fn test_prepare_rename_namelikes() { fn test_prepare_rename_namelikes() {
check_prepare(r"fn name$0<'lifetime>() {}", expect![[r#"3..7: name"#]]); check_prepare(r"fn name$0<'lifetime>() {}", expect![[r#"3..7: name"#]]);
@ -916,37 +930,32 @@ mod foo$0;
// empty // empty
"#, "#,
expect![[r#" expect![[r#"
SourceChange { source_file_edits: [
source_file_edits: { (
FileId( FileId(
1, 1,
): (
TextEdit {
indels: [
Indel {
insert: "foo2",
delete: 4..7,
},
],
},
None,
), ),
}, [
file_system_edits: [ Indel {
MoveFile { insert: "foo2",
src: FileId( delete: 4..7,
},
],
),
]
file_system_edits: [
MoveFile {
src: FileId(
2,
),
dst: AnchoredPathBuf {
anchor: FileId(
2, 2,
), ),
dst: AnchoredPathBuf { path: "foo2.rs",
anchor: FileId(
2,
),
path: "foo2.rs",
},
}, },
], },
is_snippet: false, ]
}
"#]], "#]],
); );
} }
@ -968,50 +977,43 @@ pub struct FooContent;
use crate::foo$0::FooContent; use crate::foo$0::FooContent;
"#, "#,
expect![[r#" expect![[r#"
SourceChange { source_file_edits: [
source_file_edits: { (
FileId( FileId(
0, 0,
): (
TextEdit {
indels: [
Indel {
insert: "quux",
delete: 8..11,
},
],
},
None,
), ),
[
Indel {
insert: "quux",
delete: 8..11,
},
],
),
(
FileId( FileId(
2, 2,
): (
TextEdit {
indels: [
Indel {
insert: "quux",
delete: 11..14,
},
],
},
None,
), ),
}, [
file_system_edits: [ Indel {
MoveFile { insert: "quux",
src: FileId( delete: 11..14,
},
],
),
]
file_system_edits: [
MoveFile {
src: FileId(
1,
),
dst: AnchoredPathBuf {
anchor: FileId(
1, 1,
), ),
dst: AnchoredPathBuf { path: "quux.rs",
anchor: FileId(
1,
),
path: "quux.rs",
},
}, },
], },
is_snippet: false, ]
}
"#]], "#]],
); );
} }
@ -1027,43 +1029,38 @@ mod fo$0o;
// empty // empty
"#, "#,
expect![[r#" expect![[r#"
SourceChange { source_file_edits: [
source_file_edits: { (
FileId( FileId(
0, 0,
): (
TextEdit {
indels: [
Indel {
insert: "foo2",
delete: 4..7,
},
],
},
None,
), ),
}, [
file_system_edits: [ Indel {
MoveDir { insert: "foo2",
src: AnchoredPathBuf { delete: 4..7,
anchor: FileId(
1,
),
path: "../foo",
}, },
src_id: FileId( ],
),
]
file_system_edits: [
MoveDir {
src: AnchoredPathBuf {
anchor: FileId(
1, 1,
), ),
dst: AnchoredPathBuf { path: "../foo",
anchor: FileId(
1,
),
path: "../foo2",
},
}, },
], src_id: FileId(
is_snippet: false, 1,
} ),
dst: AnchoredPathBuf {
anchor: FileId(
1,
),
path: "../foo2",
},
},
]
"#]], "#]],
); );
} }
@ -1080,37 +1077,32 @@ mod outer { mod fo$0o; }
// empty // empty
"#, "#,
expect![[r#" expect![[r#"
SourceChange { source_file_edits: [
source_file_edits: { (
FileId( FileId(
0, 0,
): (
TextEdit {
indels: [
Indel {
insert: "bar",
delete: 16..19,
},
],
},
None,
), ),
}, [
file_system_edits: [ Indel {
MoveFile { insert: "bar",
src: FileId( delete: 16..19,
},
],
),
]
file_system_edits: [
MoveFile {
src: FileId(
1,
),
dst: AnchoredPathBuf {
anchor: FileId(
1, 1,
), ),
dst: AnchoredPathBuf { path: "bar.rs",
anchor: FileId(
1,
),
path: "bar.rs",
},
}, },
], },
is_snippet: false, ]
}
"#]], "#]],
); );
} }
@ -1156,50 +1148,43 @@ pub mod foo$0;
// pub fn fun() {} // pub fn fun() {}
"#, "#,
expect![[r#" expect![[r#"
SourceChange { source_file_edits: [
source_file_edits: { (
FileId( FileId(
0, 0,
): (
TextEdit {
indels: [
Indel {
insert: "foo2",
delete: 27..30,
},
],
},
None,
), ),
[
Indel {
insert: "foo2",
delete: 27..30,
},
],
),
(
FileId( FileId(
1, 1,
): (
TextEdit {
indels: [
Indel {
insert: "foo2",
delete: 8..11,
},
],
},
None,
), ),
}, [
file_system_edits: [ Indel {
MoveFile { insert: "foo2",
src: FileId( delete: 8..11,
},
],
),
]
file_system_edits: [
MoveFile {
src: FileId(
2,
),
dst: AnchoredPathBuf {
anchor: FileId(
2, 2,
), ),
dst: AnchoredPathBuf { path: "foo2.rs",
anchor: FileId(
2,
),
path: "foo2.rs",
},
}, },
], },
is_snippet: false, ]
}
"#]], "#]],
); );
} }
@ -1229,54 +1214,49 @@ mod quux;
// empty // empty
"#, "#,
expect![[r#" expect![[r#"
SourceChange { source_file_edits: [
source_file_edits: { (
FileId( FileId(
0, 0,
): (
TextEdit {
indels: [
Indel {
insert: "foo2",
delete: 4..7,
},
],
},
None,
), ),
[
Indel {
insert: "foo2",
delete: 4..7,
},
],
),
]
file_system_edits: [
MoveFile {
src: FileId(
1,
),
dst: AnchoredPathBuf {
anchor: FileId(
1,
),
path: "foo2.rs",
},
}, },
file_system_edits: [ MoveDir {
MoveFile { src: AnchoredPathBuf {
src: FileId( anchor: FileId(
1, 1,
), ),
dst: AnchoredPathBuf { path: "foo",
anchor: FileId(
1,
),
path: "foo2.rs",
},
}, },
MoveDir { src_id: FileId(
src: AnchoredPathBuf { 1,
anchor: FileId( ),
1, dst: AnchoredPathBuf {
), anchor: FileId(
path: "foo",
},
src_id: FileId(
1, 1,
), ),
dst: AnchoredPathBuf { path: "foo2",
anchor: FileId(
1,
),
path: "foo2",
},
}, },
], },
is_snippet: false, ]
}
"#]], "#]],
) )
} }
@ -1344,12 +1324,9 @@ fn foo() {}
mod bar$0; mod bar$0;
"#, "#,
expect![[r#" expect![[r#"
SourceChange { source_file_edits: []
source_file_edits: {}, file_system_edits: []
file_system_edits: [], "#]],
is_snippet: false,
}
"#]],
) )
} }
@ -1370,58 +1347,53 @@ pub mod bar;
pub fn baz() {} pub fn baz() {}
"#, "#,
expect![[r#" expect![[r#"
SourceChange { source_file_edits: [
source_file_edits: { (
FileId( FileId(
0, 0,
): (
TextEdit {
indels: [
Indel {
insert: "r#fn",
delete: 4..7,
},
Indel {
insert: "r#fn",
delete: 22..25,
},
],
},
None,
), ),
[
Indel {
insert: "r#fn",
delete: 4..7,
},
Indel {
insert: "r#fn",
delete: 22..25,
},
],
),
]
file_system_edits: [
MoveFile {
src: FileId(
1,
),
dst: AnchoredPathBuf {
anchor: FileId(
1,
),
path: "fn.rs",
},
}, },
file_system_edits: [ MoveDir {
MoveFile { src: AnchoredPathBuf {
src: FileId( anchor: FileId(
1, 1,
), ),
dst: AnchoredPathBuf { path: "foo",
anchor: FileId(
1,
),
path: "fn.rs",
},
}, },
MoveDir { src_id: FileId(
src: AnchoredPathBuf { 1,
anchor: FileId( ),
1, dst: AnchoredPathBuf {
), anchor: FileId(
path: "foo",
},
src_id: FileId(
1, 1,
), ),
dst: AnchoredPathBuf { path: "fn",
anchor: FileId(
1,
),
path: "fn",
},
}, },
], },
is_snippet: false, ]
}
"#]], "#]],
); );
} }
@ -1443,58 +1415,53 @@ pub mod bar;
pub fn baz() {} pub fn baz() {}
"#, "#,
expect![[r#" expect![[r#"
SourceChange { source_file_edits: [
source_file_edits: { (
FileId( FileId(
0, 0,
): (
TextEdit {
indels: [
Indel {
insert: "foo",
delete: 4..8,
},
Indel {
insert: "foo",
delete: 23..27,
},
],
},
None,
), ),
[
Indel {
insert: "foo",
delete: 4..8,
},
Indel {
insert: "foo",
delete: 23..27,
},
],
),
]
file_system_edits: [
MoveFile {
src: FileId(
1,
),
dst: AnchoredPathBuf {
anchor: FileId(
1,
),
path: "foo.rs",
},
}, },
file_system_edits: [ MoveDir {
MoveFile { src: AnchoredPathBuf {
src: FileId( anchor: FileId(
1, 1,
), ),
dst: AnchoredPathBuf { path: "fn",
anchor: FileId(
1,
),
path: "foo.rs",
},
}, },
MoveDir { src_id: FileId(
src: AnchoredPathBuf { 1,
anchor: FileId( ),
1, dst: AnchoredPathBuf {
), anchor: FileId(
path: "fn",
},
src_id: FileId(
1, 1,
), ),
dst: AnchoredPathBuf { path: "foo",
anchor: FileId(
1,
),
path: "foo",
},
}, },
], },
is_snippet: false, ]
}
"#]], "#]],
); );
} }

File diff suppressed because it is too large Load Diff

View File

@ -17,7 +17,7 @@ use crate::navigation_target::UpmappingResult;
use crate::{ use crate::{
hover::hover_for_definition, hover::hover_for_definition,
inlay_hints::AdjustmentHintsMode, inlay_hints::AdjustmentHintsMode,
moniker::{def_to_moniker, MonikerResult}, moniker::{def_to_kind, def_to_moniker, MonikerResult, SymbolInformationKind},
parent_module::crates_for, parent_module::crates_for,
Analysis, Fold, HoverConfig, HoverResult, InlayHint, InlayHintsConfig, TryToNav, Analysis, Fold, HoverConfig, HoverResult, InlayHint, InlayHintsConfig, TryToNav,
}; };
@ -46,6 +46,10 @@ pub struct TokenStaticData {
pub definition: Option<FileRange>, pub definition: Option<FileRange>,
pub references: Vec<ReferenceData>, pub references: Vec<ReferenceData>,
pub moniker: Option<MonikerResult>, pub moniker: Option<MonikerResult>,
pub display_name: Option<String>,
pub enclosing_moniker: Option<MonikerResult>,
pub signature: Option<String>,
pub kind: SymbolInformationKind,
} }
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@ -129,6 +133,7 @@ impl StaticIndex<'_> {
closure_capture_hints: false, closure_capture_hints: false,
closing_brace_hints_min_lines: Some(25), closing_brace_hints_min_lines: Some(25),
fields_to_resolve: InlayFieldsToResolve::empty(), fields_to_resolve: InlayFieldsToResolve::empty(),
range_exclusive_hints: false,
}, },
file_id, file_id,
None, None,
@ -172,6 +177,12 @@ impl StaticIndex<'_> {
}), }),
references: vec![], references: vec![],
moniker: current_crate.and_then(|cc| def_to_moniker(self.db, def, cc)), moniker: current_crate.and_then(|cc| def_to_moniker(self.db, def, cc)),
display_name: def.name(self.db).map(|name| name.display(self.db).to_string()),
enclosing_moniker: current_crate
.zip(def.enclosing_definition(self.db))
.and_then(|(cc, enclosing_def)| def_to_moniker(self.db, enclosing_def, cc)),
signature: def.label(self.db),
kind: def_to_kind(self.db, def),
}); });
self.def_map.insert(def, it); self.def_map.insert(def, it);
it it

View File

@ -1,5 +1,6 @@
//! Computes color for a single element. //! Computes color for a single element.
use either::Either;
use hir::{AsAssocItem, HasVisibility, MacroFileIdExt, Semantics}; use hir::{AsAssocItem, HasVisibility, MacroFileIdExt, Semantics};
use ide_db::{ use ide_db::{
defs::{Definition, IdentClass, NameClass, NameRefClass}, defs::{Definition, IdentClass, NameClass, NameRefClass},
@ -359,7 +360,9 @@ pub(super) fn highlight_def(
let db = sema.db; let db = sema.db;
let mut h = match def { let mut h = match def {
Definition::Macro(m) => Highlight::new(HlTag::Symbol(m.kind(sema.db).into())), Definition::Macro(m) => Highlight::new(HlTag::Symbol(m.kind(sema.db).into())),
Definition::Field(_) => Highlight::new(HlTag::Symbol(SymbolKind::Field)), Definition::Field(_) | Definition::TupleField(_) => {
Highlight::new(HlTag::Symbol(SymbolKind::Field))
}
Definition::Module(module) => { Definition::Module(module) => {
let mut h = Highlight::new(HlTag::Symbol(SymbolKind::Module)); let mut h = Highlight::new(HlTag::Symbol(SymbolKind::Module));
if module.is_crate_root() { if module.is_crate_root() {
@ -647,8 +650,11 @@ fn highlight_name_ref_by_syntax(
let h = HlTag::Symbol(SymbolKind::Field); let h = HlTag::Symbol(SymbolKind::Field);
let is_union = ast::FieldExpr::cast(parent) let is_union = ast::FieldExpr::cast(parent)
.and_then(|field_expr| sema.resolve_field(&field_expr)) .and_then(|field_expr| sema.resolve_field(&field_expr))
.map_or(false, |field| { .map_or(false, |field| match field {
matches!(field.parent_def(sema.db), hir::VariantDef::Union(_)) Either::Left(field) => {
matches!(field.parent_def(sema.db), hir::VariantDef::Union(_))
}
Either::Right(_) => false,
}); });
if is_union { if is_union {
h | HlMod::Unsafe h | HlMod::Unsafe

View File

@ -301,7 +301,7 @@ fn module_def_to_hl_tag(def: Definition) -> HlTag {
Definition::TypeAlias(_) => SymbolKind::TypeAlias, Definition::TypeAlias(_) => SymbolKind::TypeAlias,
Definition::BuiltinType(_) => return HlTag::BuiltinType, Definition::BuiltinType(_) => return HlTag::BuiltinType,
Definition::Macro(_) => SymbolKind::Macro, Definition::Macro(_) => SymbolKind::Macro,
Definition::Field(_) => SymbolKind::Field, Definition::Field(_) | Definition::TupleField(_) => SymbolKind::Field,
Definition::SelfType(_) => SymbolKind::Impl, Definition::SelfType(_) => SymbolKind::Impl,
Definition::Local(_) => SymbolKind::Local, Definition::Local(_) => SymbolKind::Local,
Definition::GenericParam(gp) => match gp { Definition::GenericParam(gp) => match gp {

View File

@ -96,7 +96,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="macro default_library library">include</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="none macro">concat</span><span class="punctuation macro">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"foo/"</span><span class="comma macro">,</span> <span class="string_literal macro">"foo.rs"</span><span class="parenthesis macro">)</span><span class="parenthesis macro">)</span><span class="semicolon">;</span> <span class="macro default_library library">include</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="none macro">concat</span><span class="punctuation macro">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"foo/"</span><span class="comma macro">,</span> <span class="string_literal macro">"foo.rs"</span><span class="parenthesis macro">)</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
<span class="keyword">fn</span> <span class="function declaration">main</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span> <span class="keyword">fn</span> <span class="function declaration">main</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
<span class="macro default_library library">format_args</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"Hello, </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">!"</span><span class="comma macro">,</span> <span class="numeric_literal macro">92</span><span class="parenthesis macro">)</span><span class="semicolon">;</span> <span class="macro default_library library">format_args</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"Hello, </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">!"</span><span class="comma macro">,</span> <span class="parenthesis macro">(</span><span class="numeric_literal macro">92</span><span class="comma macro">,</span><span class="parenthesis macro">)</span><span class="operator macro">.</span><span class="field library macro">0</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
<span class="macro">dont_color_me_braces</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="parenthesis macro">)</span><span class="semicolon">;</span> <span class="macro">dont_color_me_braces</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
<span class="macro">noop</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="macro macro">noop</span><span class="macro_bang macro">!</span><span class="parenthesis macro">(</span><span class="numeric_literal macro">1</span><span class="parenthesis macro">)</span><span class="parenthesis macro">)</span><span class="semicolon">;</span> <span class="macro">noop</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="macro macro">noop</span><span class="macro_bang macro">!</span><span class="parenthesis macro">(</span><span class="numeric_literal macro">1</span><span class="parenthesis macro">)</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
<span class="brace">}</span> <span class="brace">}</span>

View File

@ -103,7 +103,7 @@ macro without_args {
include!(concat!("foo/", "foo.rs")); include!(concat!("foo/", "foo.rs"));
fn main() { fn main() {
format_args!("Hello, {}!", 92); format_args!("Hello, {}!", (92,).0);
dont_color_me_braces!(); dont_color_me_braces!();
noop!(noop!(1)); noop!(noop!(1));
} }

View File

@ -69,7 +69,7 @@ impl FieldOrTupleIdx {
.as_str() .as_str()
.map(|s| s.to_owned()) .map(|s| s.to_owned())
.unwrap_or_else(|| format!(".{}", f.name(db).as_tuple_index().unwrap())), .unwrap_or_else(|| format!(".{}", f.name(db).as_tuple_index().unwrap())),
FieldOrTupleIdx::TupleIdx(i) => format!(".{i}").to_owned(), FieldOrTupleIdx::TupleIdx(i) => format!(".{i}"),
} }
} }
} }
@ -203,7 +203,7 @@ pub(crate) fn view_memory_layout(
let mut nodes = vec![MemoryLayoutNode { let mut nodes = vec![MemoryLayoutNode {
item_name, item_name,
typename: typename.clone(), typename,
size: layout.size(), size: layout.size(),
offset: 0, offset: 0,
alignment: layout.align(), alignment: layout.align(),

View File

@ -13,7 +13,7 @@ doctest = false
cargo_metadata = "0.18.1" cargo_metadata = "0.18.1"
# local deps # local deps
toolchain = { path = "../../toolchain", version = "0.0.0" } toolchain.workspace = true
[features] [features]
sysroot-abi = [] sysroot-abi = []

View File

@ -274,7 +274,7 @@ impl CargoWorkspace {
other_options.append( other_options.append(
&mut targets &mut targets
.into_iter() .into_iter()
.flat_map(|target| ["--filter-platform".to_owned().to_string(), target]) .flat_map(|target| ["--filter-platform".to_owned(), target])
.collect(), .collect(),
); );
} }

View File

@ -6,7 +6,7 @@
use std::{env, fs, iter, ops, path::PathBuf, process::Command}; use std::{env, fs, iter, ops, path::PathBuf, process::Command};
use anyhow::{format_err, Result}; use anyhow::{format_err, Context, Result};
use base_db::CrateName; use base_db::CrateName;
use la_arena::{Arena, Idx}; use la_arena::{Arena, Idx};
use paths::{AbsPath, AbsPathBuf}; use paths::{AbsPath, AbsPathBuf};
@ -119,12 +119,15 @@ impl Sysroot {
get_rustc_src(&self.root) get_rustc_src(&self.root)
} }
pub fn discover_rustc(&self) -> Result<AbsPathBuf, std::io::Error> { pub fn discover_rustc(&self) -> anyhow::Result<AbsPathBuf> {
let rustc = self.root.join("bin/rustc"); let rustc = self.root.join("bin/rustc");
tracing::debug!(?rustc, "checking for rustc binary at location"); tracing::debug!(?rustc, "checking for rustc binary at location");
match fs::metadata(&rustc) { match fs::metadata(&rustc) {
Ok(_) => Ok(rustc), Ok(_) => Ok(rustc),
Err(e) => Err(e), Err(e) => Err(e).context(format!(
"failed to discover rustc in sysroot: {:?}",
AsRef::<std::path::Path>::as_ref(&self.root)
)),
} }
} }

View File

@ -1277,7 +1277,7 @@ fn add_target_crate_root(
inject_cargo_env(pkg, &mut env); inject_cargo_env(pkg, &mut env);
if let Ok(cname) = String::from_str(cargo_name) { if let Ok(cname) = String::from_str(cargo_name) {
// CARGO_CRATE_NAME is the name of the Cargo target with - converted to _, such as the name of the library, binary, example, integration test, or benchmark. // CARGO_CRATE_NAME is the name of the Cargo target with - converted to _, such as the name of the library, binary, example, integration test, or benchmark.
env.set("CARGO_CRATE_NAME", cname.replace("-", "_")); env.set("CARGO_CRATE_NAME", cname.replace('-', "_"));
} }
if let Some(envs) = build_data.map(|it| &it.envs) { if let Some(envs) = build_data.map(|it| &it.envs) {

View File

@ -792,6 +792,7 @@ impl flags::AnalysisStats {
max_length: Some(25), max_length: Some(25),
closing_brace_hints_min_lines: Some(20), closing_brace_hints_min_lines: Some(20),
fields_to_resolve: InlayFieldsToResolve::empty(), fields_to_resolve: InlayFieldsToResolve::empty(),
range_exclusive_hints: true,
}, },
file_id, file_id,
None, None,

View File

@ -7,8 +7,8 @@ use std::{
}; };
use ide::{ use ide::{
LineCol, MonikerDescriptorKind, StaticIndex, StaticIndexedFile, TextRange, TokenId, LineCol, MonikerDescriptorKind, MonikerResult, StaticIndex, StaticIndexedFile,
TokenStaticData, SymbolInformationKind, TextRange, TokenId,
}; };
use ide_db::LineIndexDatabase; use ide_db::LineIndexDatabase;
use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice}; use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice};
@ -78,6 +78,7 @@ impl flags::Scip {
let mut symbols_emitted: HashSet<TokenId> = HashSet::default(); let mut symbols_emitted: HashSet<TokenId> = HashSet::default();
let mut tokens_to_symbol: HashMap<TokenId, String> = HashMap::new(); let mut tokens_to_symbol: HashMap<TokenId, String> = HashMap::new();
let mut tokens_to_enclosing_symbol: HashMap<TokenId, Option<String>> = HashMap::new();
for StaticIndexedFile { file_id, tokens, .. } in si.files { for StaticIndexedFile { file_id, tokens, .. } in si.files {
let mut local_count = 0; let mut local_count = 0;
@ -109,10 +110,24 @@ impl flags::Scip {
let symbol = tokens_to_symbol let symbol = tokens_to_symbol
.entry(id) .entry(id)
.or_insert_with(|| { .or_insert_with(|| {
let symbol = token_to_symbol(token).unwrap_or_else(&mut new_local_symbol); let symbol = token
.moniker
.as_ref()
.map(moniker_to_symbol)
.unwrap_or_else(&mut new_local_symbol);
scip::symbol::format_symbol(symbol) scip::symbol::format_symbol(symbol)
}) })
.clone(); .clone();
let enclosing_symbol = tokens_to_enclosing_symbol
.entry(id)
.or_insert_with(|| {
token
.enclosing_moniker
.as_ref()
.map(moniker_to_symbol)
.map(scip::symbol::format_symbol)
})
.clone();
let mut symbol_roles = Default::default(); let mut symbol_roles = Default::default();
@ -128,15 +143,22 @@ impl flags::Scip {
.map(|hover| hover.markup.as_str()) .map(|hover| hover.markup.as_str())
.filter(|it| !it.is_empty()) .filter(|it| !it.is_empty())
.map(|it| vec![it.to_owned()]); .map(|it| vec![it.to_owned()]);
let signature_documentation =
token.signature.clone().map(|text| scip_types::Document {
relative_path: relative_path.clone(),
language: "rust".to_string(),
text,
..Default::default()
});
let symbol_info = scip_types::SymbolInformation { let symbol_info = scip_types::SymbolInformation {
symbol: symbol.clone(), symbol: symbol.clone(),
documentation: documentation.unwrap_or_default(), documentation: documentation.unwrap_or_default(),
relationships: Vec::new(), relationships: Vec::new(),
special_fields: Default::default(), special_fields: Default::default(),
kind: Default::default(), kind: symbol_kind(token.kind).into(),
display_name: String::new(), display_name: token.display_name.clone().unwrap_or_default(),
signature_documentation: Default::default(), signature_documentation: signature_documentation.into(),
enclosing_symbol: String::new(), enclosing_symbol: enclosing_symbol.unwrap_or_default(),
}; };
symbols.push(symbol_info) symbols.push(symbol_info)
@ -228,14 +250,36 @@ fn new_descriptor(name: &str, suffix: scip_types::descriptor::Suffix) -> scip_ty
} }
} }
/// Loosely based on `def_to_moniker` fn symbol_kind(kind: SymbolInformationKind) -> scip_types::symbol_information::Kind {
/// use scip_types::symbol_information::Kind as ScipKind;
/// Only returns a Symbol when it's a non-local symbol. match kind {
/// So if the visibility isn't outside of a document, then it will return None SymbolInformationKind::AssociatedType => ScipKind::AssociatedType,
fn token_to_symbol(token: &TokenStaticData) -> Option<scip_types::Symbol> { SymbolInformationKind::Attribute => ScipKind::Attribute,
use scip_types::descriptor::Suffix::*; SymbolInformationKind::Constant => ScipKind::Constant,
SymbolInformationKind::Enum => ScipKind::Enum,
SymbolInformationKind::EnumMember => ScipKind::EnumMember,
SymbolInformationKind::Field => ScipKind::Field,
SymbolInformationKind::Function => ScipKind::Function,
SymbolInformationKind::Macro => ScipKind::Macro,
SymbolInformationKind::Method => ScipKind::Method,
SymbolInformationKind::Module => ScipKind::Module,
SymbolInformationKind::Parameter => ScipKind::Parameter,
SymbolInformationKind::SelfParameter => ScipKind::SelfParameter,
SymbolInformationKind::StaticMethod => ScipKind::StaticMethod,
SymbolInformationKind::StaticVariable => ScipKind::StaticVariable,
SymbolInformationKind::Struct => ScipKind::Struct,
SymbolInformationKind::Trait => ScipKind::Trait,
SymbolInformationKind::TraitMethod => ScipKind::TraitMethod,
SymbolInformationKind::Type => ScipKind::Type,
SymbolInformationKind::TypeAlias => ScipKind::TypeAlias,
SymbolInformationKind::TypeParameter => ScipKind::TypeParameter,
SymbolInformationKind::Union => ScipKind::Union,
SymbolInformationKind::Variable => ScipKind::Variable,
}
}
let moniker = token.moniker.as_ref()?; fn moniker_to_symbol(moniker: &MonikerResult) -> scip_types::Symbol {
use scip_types::descriptor::Suffix::*;
let package_name = moniker.package_information.name.clone(); let package_name = moniker.package_information.name.clone();
let version = moniker.package_information.version.clone(); let version = moniker.package_information.version.clone();
@ -260,7 +304,7 @@ fn token_to_symbol(token: &TokenStaticData) -> Option<scip_types::Symbol> {
}) })
.collect(); .collect();
Some(scip_types::Symbol { scip_types::Symbol {
scheme: "rust-analyzer".into(), scheme: "rust-analyzer".into(),
package: Some(scip_types::Package { package: Some(scip_types::Package {
manager: "cargo".to_string(), manager: "cargo".to_string(),
@ -271,7 +315,7 @@ fn token_to_symbol(token: &TokenStaticData) -> Option<scip_types::Symbol> {
.into(), .into(),
descriptors, descriptors,
special_fields: Default::default(), special_fields: Default::default(),
}) }
} }
#[cfg(test)] #[cfg(test)]
@ -309,7 +353,7 @@ mod test {
for &(range, id) in &file.tokens { for &(range, id) in &file.tokens {
if range.contains(offset - TextSize::from(1)) { if range.contains(offset - TextSize::from(1)) {
let token = si.tokens.get(id).unwrap(); let token = si.tokens.get(id).unwrap();
found_symbol = token_to_symbol(token); found_symbol = token.moniker.as_ref().map(moniker_to_symbol);
break; break;
} }
} }
@ -359,6 +403,21 @@ pub mod module {
); );
} }
#[test]
fn symbol_for_trait_alias() {
check_symbol(
r#"
//- /foo/lib.rs crate:foo@0.1.0,https://a.b/foo.git library
#![feature(trait_alias)]
pub mod module {
pub trait MyTrait {}
pub trait MyTraitAlias$0 = MyTrait;
}
"#,
"rust-analyzer cargo foo 0.1.0 module/MyTraitAlias#",
);
}
#[test] #[test]
fn symbol_for_trait_constant() { fn symbol_for_trait_constant() {
check_symbol( check_symbol(
@ -525,4 +584,15 @@ pub mod example_mod {
"rust-analyzer cargo main . foo/Bar#", "rust-analyzer cargo main . foo/Bar#",
); );
} }
#[test]
fn symbol_for_for_type_alias() {
check_symbol(
r#"
//- /lib.rs crate:main
pub type MyTypeAlias$0 = u8;
"#,
"rust-analyzer cargo main . MyTypeAlias#",
);
}
} }

View File

@ -399,6 +399,8 @@ config_data! {
/// Whether to show function parameter name inlay hints at the call /// Whether to show function parameter name inlay hints at the call
/// site. /// site.
inlayHints_parameterHints_enable: bool = "true", inlayHints_parameterHints_enable: bool = "true",
/// Whether to show exclusive range inlay hints.
inlayHints_rangeExclusiveHints_enable: bool = "false",
/// Whether to show inlay hints for compiler inserted reborrows. /// Whether to show inlay hints for compiler inserted reborrows.
/// This setting is deprecated in favor of #rust-analyzer.inlayHints.expressionAdjustmentHints.enable#. /// This setting is deprecated in favor of #rust-analyzer.inlayHints.expressionAdjustmentHints.enable#.
inlayHints_reborrowHints_enable: ReborrowHintsDef = "\"never\"", inlayHints_reborrowHints_enable: ReborrowHintsDef = "\"never\"",
@ -1464,6 +1466,7 @@ impl Config {
} else { } else {
None None
}, },
range_exclusive_hints: self.data.inlayHints_rangeExclusiveHints_enable,
fields_to_resolve: InlayFieldsToResolve { fields_to_resolve: InlayFieldsToResolve {
resolve_text_edits: client_capability_fields.contains("textEdits"), resolve_text_edits: client_capability_fields.contains("textEdits"),
resolve_hint_tooltip: client_capability_fields.contains("tooltip"), resolve_hint_tooltip: client_capability_fields.contains("tooltip"),

View File

@ -458,7 +458,6 @@ pub(crate) fn handle_workspace_symbol(
let config = snap.config.workspace_symbol(); let config = snap.config.workspace_symbol();
let (all_symbols, libs) = decide_search_scope_and_kind(&params, &config); let (all_symbols, libs) = decide_search_scope_and_kind(&params, &config);
let limit = config.search_limit;
let query = { let query = {
let query: String = params.query.chars().filter(|&c| c != '#' && c != '*').collect(); let query: String = params.query.chars().filter(|&c| c != '#' && c != '*').collect();
@ -469,14 +468,11 @@ pub(crate) fn handle_workspace_symbol(
if libs { if libs {
q.libs(); q.libs();
} }
q.limit(limit);
q q
}; };
let mut res = exec_query(&snap, query)?; let mut res = exec_query(&snap, query, config.search_limit)?;
if res.is_empty() && !all_symbols { if res.is_empty() && !all_symbols {
let mut query = Query::new(params.query); res = exec_query(&snap, Query::new(params.query), config.search_limit)?;
query.limit(limit);
res = exec_query(&snap, query)?;
} }
return Ok(Some(lsp_types::WorkspaceSymbolResponse::Nested(res))); return Ok(Some(lsp_types::WorkspaceSymbolResponse::Nested(res)));
@ -519,9 +515,10 @@ pub(crate) fn handle_workspace_symbol(
fn exec_query( fn exec_query(
snap: &GlobalStateSnapshot, snap: &GlobalStateSnapshot,
query: Query, query: Query,
limit: usize,
) -> anyhow::Result<Vec<lsp_types::WorkspaceSymbol>> { ) -> anyhow::Result<Vec<lsp_types::WorkspaceSymbol>> {
let mut res = Vec::new(); let mut res = Vec::new();
for nav in snap.analysis.symbol_search(query)? { for nav in snap.analysis.symbol_search(query, limit)? {
let container_name = nav.container_name.as_ref().map(|v| v.to_string()); let container_name = nav.container_name.as_ref().map(|v| v.to_string());
let info = lsp_types::WorkspaceSymbol { let info = lsp_types::WorkspaceSymbol {

View File

@ -1,6 +1,8 @@
[package] [package]
name = "span" name = "span"
version = "0.0.0" version = "0.0.0"
description = "TBD"
rust-version.workspace = true rust-version.workspace = true
edition.workspace = true edition.workspace = true
license.workspace = true license.workspace = true
@ -18,4 +20,4 @@ syntax.workspace = true
stdx.workspace = true stdx.workspace = true
[lints] [lints]
workspace = true workspace = true

View File

@ -59,6 +59,17 @@ impl<T, U> TupleExt for (T, U) {
} }
} }
impl<T, U, V> TupleExt for (T, U, V) {
type Head = T;
type Tail = V;
fn head(self) -> Self::Head {
self.0
}
fn tail(self) -> Self::Tail {
self.2
}
}
pub fn to_lower_snake_case(s: &str) -> String { pub fn to_lower_snake_case(s: &str) -> String {
to_snake_case(s, char::to_lowercase) to_snake_case(s, char::to_lowercase)
} }

View File

@ -136,6 +136,16 @@ where
{ {
} }
/// Trait to describe operations common to both `RangeExpr` and `RangePat`.
pub trait RangeItem {
type Bound;
fn start(&self) -> Option<Self::Bound>;
fn end(&self) -> Option<Self::Bound>;
fn op_kind(&self) -> Option<RangeOp>;
fn op_token(&self) -> Option<SyntaxToken>;
}
mod support { mod support {
use super::{AstChildren, AstNode, SyntaxKind, SyntaxNode, SyntaxToken}; use super::{AstChildren, AstNode, SyntaxKind, SyntaxNode, SyntaxToken};

View File

@ -293,13 +293,12 @@ impl ast::GenericParamList {
} }
/// Removes the corresponding generic arg /// Removes the corresponding generic arg
pub fn remove_generic_arg(&self, generic_arg: &ast::GenericArg) -> Option<GenericParam> { pub fn remove_generic_arg(&self, generic_arg: &ast::GenericArg) {
let param_to_remove = self.find_generic_arg(generic_arg); let param_to_remove = self.find_generic_arg(generic_arg);
if let Some(param) = &param_to_remove { if let Some(param) = &param_to_remove {
self.remove_generic_param(param.clone()); self.remove_generic_param(param.clone());
} }
param_to_remove
} }
/// Constructs a matching [`ast::GenericArgList`] /// Constructs a matching [`ast::GenericArgList`]

Some files were not shown because too many files have changed in this diff Show More