mirror of
https://github.com/rust-lang/rust.git
synced 2024-12-02 03:33:59 +00:00
introduce SourceAnalyzer
This commit is contained in:
parent
e6e2571bdf
commit
10d66d63d7
@ -1,7 +1,6 @@
|
||||
use hir::{
|
||||
HirDisplay, Ty,
|
||||
db::HirDatabase,
|
||||
source_binder::function_from_child_node,
|
||||
};
|
||||
use ra_syntax::{
|
||||
SyntaxKind,
|
||||
@ -30,11 +29,8 @@ pub(crate) fn add_explicit_type(mut ctx: AssistCtx<impl HirDatabase>) -> Option<
|
||||
}
|
||||
// Infer type
|
||||
let db = ctx.db;
|
||||
let func = function_from_child_node(db, ctx.frange.file_id, pat.syntax())?;
|
||||
let inference_res = func.infer(db);
|
||||
let source_map = func.body_source_map(db);
|
||||
let expr_id = source_map.node_expr(expr.into())?;
|
||||
let ty = inference_res[expr_id].clone();
|
||||
let analyzer = hir::SourceAnalyser::new(db, ctx.frange.file_id, stmt.syntax());
|
||||
let ty = analyzer.type_of(db, expr)?;
|
||||
// Assist not applicable if the type is unknown
|
||||
if is_unknown(&ty) {
|
||||
return None;
|
||||
|
@ -2,7 +2,6 @@ use std::fmt::Write;
|
||||
|
||||
use crate::{Assist, AssistId, AssistCtx};
|
||||
|
||||
use hir::Resolver;
|
||||
use hir::db::HirDatabase;
|
||||
use ra_syntax::{SmolStr, SyntaxKind, TextRange, TextUnit, TreeArc};
|
||||
use ra_syntax::ast::{self, AstNode, AstToken, FnDef, ImplItem, ImplItemKind, NameOwner};
|
||||
@ -46,9 +45,9 @@ fn add_missing_impl_members_inner(
|
||||
let trait_def = {
|
||||
let file_id = ctx.frange.file_id;
|
||||
let position = FilePosition { file_id, offset: impl_node.syntax().range().start() };
|
||||
let resolver = hir::source_binder::resolver_for_position(ctx.db, position);
|
||||
let analyser = hir::SourceAnalyser::new(ctx.db, position.file_id, impl_node.syntax());
|
||||
|
||||
resolve_target_trait_def(ctx.db, &resolver, impl_node)?
|
||||
resolve_target_trait_def(ctx.db, &analyser, impl_node)?
|
||||
};
|
||||
|
||||
let missing_fns: Vec<_> = {
|
||||
@ -122,14 +121,14 @@ fn add_missing_impl_members_inner(
|
||||
/// implemented) to a `ast::TraitDef`.
|
||||
fn resolve_target_trait_def(
|
||||
db: &impl HirDatabase,
|
||||
resolver: &Resolver,
|
||||
binder: &hir::SourceAnalyser,
|
||||
impl_block: &ast::ImplBlock,
|
||||
) -> Option<TreeArc<ast::TraitDef>> {
|
||||
let ast_path = impl_block.target_trait().map(AstNode::syntax).and_then(ast::PathType::cast)?;
|
||||
let hir_path = ast_path.path().and_then(hir::Path::from_ast)?;
|
||||
let ast_path =
|
||||
impl_block.target_trait().map(AstNode::syntax).and_then(ast::PathType::cast)?.path()?;
|
||||
|
||||
match resolver.resolve_path(db, &hir_path).take_types() {
|
||||
Some(hir::Resolution::Def(hir::ModuleDef::Trait(def))) => Some(def.source(db).1),
|
||||
match binder.resolve_path(db, &ast_path) {
|
||||
Some(hir::PathResolution::Def(hir::ModuleDef::Trait(def))) => Some(def.source(db).1),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
use std::fmt::Write;
|
||||
|
||||
use hir::{
|
||||
AdtDef, FieldSource, source_binder,
|
||||
AdtDef, FieldSource,
|
||||
db::HirDatabase,
|
||||
};
|
||||
use ra_syntax::ast::{self, AstNode};
|
||||
@ -20,12 +20,8 @@ pub(crate) fn fill_match_arms(mut ctx: AssistCtx<impl HirDatabase>) -> Option<As
|
||||
}
|
||||
|
||||
let expr = match_expr.expr()?;
|
||||
let function =
|
||||
source_binder::function_from_child_node(ctx.db, ctx.frange.file_id, expr.syntax())?;
|
||||
let infer_result = function.infer(ctx.db);
|
||||
let source_map = function.body_source_map(ctx.db);
|
||||
let node_expr = source_map.node_expr(expr)?;
|
||||
let match_expr_ty = infer_result[node_expr].clone();
|
||||
let analyzer = hir::SourceAnalyser::new(ctx.db, ctx.frange.file_id, expr.syntax());
|
||||
let match_expr_ty = analyzer.type_of(ctx.db, expr)?;
|
||||
let enum_def = match_expr_ty.autoderef(ctx.db).find_map(|ty| match ty.as_adt() {
|
||||
Some((AdtDef::Enum(e), _)) => Some(e),
|
||||
_ => None,
|
||||
|
@ -1,6 +1,6 @@
|
||||
use std::fmt::Write;
|
||||
|
||||
use hir::{AdtDef, db::HirDatabase, source_binder::function_from_child_node};
|
||||
use hir::{AdtDef, db::HirDatabase};
|
||||
|
||||
use ra_syntax::ast::{self, AstNode};
|
||||
|
||||
@ -51,15 +51,12 @@ where
|
||||
}
|
||||
|
||||
fn evaluate_struct_def_fields(&mut self) -> Option<()> {
|
||||
let function = function_from_child_node(
|
||||
let analyzer = hir::SourceAnalyser::new(
|
||||
self.ctx.db,
|
||||
self.ctx.frange.file_id,
|
||||
self.struct_lit.syntax(),
|
||||
)?;
|
||||
let infer_result = function.infer(self.ctx.db);
|
||||
let source_map = function.body_source_map(self.ctx.db);
|
||||
let node_expr = source_map.node_expr(self.struct_lit.into())?;
|
||||
let struct_lit_ty = infer_result[node_expr].clone();
|
||||
);
|
||||
let struct_lit_ty = analyzer.type_of(self.ctx.db, self.struct_lit.into())?;
|
||||
let struct_def = match struct_lit_ty.as_adt() {
|
||||
Some((AdtDef::Struct(s), _)) => s,
|
||||
_ => return None,
|
||||
|
@ -450,10 +450,6 @@ impl DefWithBody {
|
||||
db.infer(*self)
|
||||
}
|
||||
|
||||
pub fn body_source_map(&self, db: &impl HirDatabase) -> Arc<BodySourceMap> {
|
||||
db.body_with_source_map(*self).1
|
||||
}
|
||||
|
||||
pub fn body(&self, db: &impl HirDatabase) -> Arc<Body> {
|
||||
db.body_hir(*self)
|
||||
}
|
||||
@ -523,7 +519,7 @@ impl Function {
|
||||
self.signature(db).name.clone()
|
||||
}
|
||||
|
||||
pub fn body_source_map(&self, db: &impl HirDatabase) -> Arc<BodySourceMap> {
|
||||
pub(crate) fn body_source_map(&self, db: &impl HirDatabase) -> Arc<BodySourceMap> {
|
||||
db.body_with_source_map((*self).into()).1
|
||||
}
|
||||
|
||||
@ -606,7 +602,7 @@ impl Const {
|
||||
db.infer((*self).into())
|
||||
}
|
||||
|
||||
pub fn body_source_map(&self, db: &impl HirDatabase) -> Arc<BodySourceMap> {
|
||||
pub(crate) fn body_source_map(&self, db: &impl HirDatabase) -> Arc<BodySourceMap> {
|
||||
db.body_with_source_map((*self).into()).1
|
||||
}
|
||||
|
||||
@ -679,7 +675,7 @@ impl Static {
|
||||
db.infer((*self).into())
|
||||
}
|
||||
|
||||
pub fn body_source_map(&self, db: &impl HirDatabase) -> Arc<BodySourceMap> {
|
||||
pub(crate) fn body_source_map(&self, db: &impl HirDatabase) -> Arc<BodySourceMap> {
|
||||
db.body_with_source_map((*self).into()).1
|
||||
}
|
||||
}
|
||||
|
@ -117,31 +117,27 @@ impl Index<PatId> for Body {
|
||||
}
|
||||
|
||||
impl BodySourceMap {
|
||||
pub fn expr_syntax(&self, expr: ExprId) -> Option<SyntaxNodePtr> {
|
||||
pub(crate) fn expr_syntax(&self, expr: ExprId) -> Option<SyntaxNodePtr> {
|
||||
self.expr_map_back.get(expr).cloned()
|
||||
}
|
||||
|
||||
pub fn syntax_expr(&self, ptr: SyntaxNodePtr) -> Option<ExprId> {
|
||||
pub(crate) fn syntax_expr(&self, ptr: SyntaxNodePtr) -> Option<ExprId> {
|
||||
self.expr_map.get(&ptr).cloned()
|
||||
}
|
||||
|
||||
pub fn node_expr(&self, node: &ast::Expr) -> Option<ExprId> {
|
||||
pub(crate) fn node_expr(&self, node: &ast::Expr) -> Option<ExprId> {
|
||||
self.expr_map.get(&SyntaxNodePtr::new(node.syntax())).cloned()
|
||||
}
|
||||
|
||||
pub fn pat_syntax(&self, pat: PatId) -> Option<PatPtr> {
|
||||
pub(crate) fn pat_syntax(&self, pat: PatId) -> Option<PatPtr> {
|
||||
self.pat_map_back.get(pat).cloned()
|
||||
}
|
||||
|
||||
pub fn syntax_pat(&self, ptr: PatPtr) -> Option<PatId> {
|
||||
self.pat_map.get(&ptr).cloned()
|
||||
}
|
||||
|
||||
pub fn node_pat(&self, node: &ast::Pat) -> Option<PatId> {
|
||||
pub(crate) fn node_pat(&self, node: &ast::Pat) -> Option<PatId> {
|
||||
self.pat_map.get(&Either::A(AstPtr::new(node))).cloned()
|
||||
}
|
||||
|
||||
pub fn field_syntax(&self, expr: ExprId, field: usize) -> AstPtr<ast::NamedField> {
|
||||
pub(crate) fn field_syntax(&self, expr: ExprId, field: usize) -> AstPtr<ast::NamedField> {
|
||||
self.field_map[&(expr, field)].clone()
|
||||
}
|
||||
}
|
||||
|
@ -109,7 +109,7 @@ impl ExprScopes {
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct ScopesWithSourceMap {
|
||||
pub source_map: Arc<BodySourceMap>,
|
||||
pub(crate) source_map: Arc<BodySourceMap>,
|
||||
pub scopes: Arc<ExprScopes>,
|
||||
}
|
||||
|
||||
|
@ -66,6 +66,7 @@ pub use self::{
|
||||
adt::AdtDef,
|
||||
expr::{ExprScopes, ScopesWithSourceMap, ScopeEntryWithSyntax},
|
||||
resolve::{Resolver, Resolution},
|
||||
source_binder::{SourceAnalyser, PathResolution},
|
||||
};
|
||||
|
||||
pub use self::code_model_api::{
|
||||
|
@ -5,15 +5,17 @@
|
||||
///
|
||||
/// So, this modules should not be used during hir construction, it exists
|
||||
/// purely for "IDE needs".
|
||||
use std::sync::Arc;
|
||||
|
||||
use ra_db::{FileId, FilePosition};
|
||||
use ra_syntax::{
|
||||
SyntaxNode,
|
||||
SyntaxNode, AstPtr,
|
||||
ast::{self, AstNode, NameOwner},
|
||||
algo::{find_node_at_offset, find_token_at_offset},
|
||||
};
|
||||
|
||||
use crate::{
|
||||
HirDatabase, Function, Struct, Enum,Const,Static,
|
||||
HirDatabase, Function, Struct, Enum, Const, Static, Either,
|
||||
AsName, Module, HirFileId, Crate, Trait, Resolver,
|
||||
ids::LocationCtx,
|
||||
expr, AstId
|
||||
@ -258,3 +260,97 @@ fn try_get_resolver_for_node(
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
// Name is bad, don't use inside HIR
|
||||
#[derive(Debug)]
|
||||
pub struct SourceAnalyser {
|
||||
resolver: Resolver,
|
||||
body_source_map: Option<Arc<crate::expr::BodySourceMap>>,
|
||||
infer: Option<Arc<crate::ty::InferenceResult>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum PathResolution {
|
||||
/// An item
|
||||
Def(crate::ModuleDef),
|
||||
/// A local binding (only value namespace)
|
||||
LocalBinding(crate::expr::PatId),
|
||||
/// A generic parameter
|
||||
GenericParam(u32),
|
||||
SelfType(crate::ImplBlock),
|
||||
AssocItem(crate::ImplItem),
|
||||
}
|
||||
|
||||
impl SourceAnalyser {
|
||||
pub fn new(db: &impl HirDatabase, file_id: FileId, node: &SyntaxNode) -> SourceAnalyser {
|
||||
let resolver = resolver_for_node(db, file_id, node);
|
||||
let function = function_from_child_node(db, file_id, node);
|
||||
if let Some(function) = function {
|
||||
SourceAnalyser {
|
||||
resolver,
|
||||
body_source_map: Some(function.body_source_map(db)),
|
||||
infer: Some(function.infer(db)),
|
||||
}
|
||||
} else {
|
||||
SourceAnalyser { resolver, body_source_map: None, infer: None }
|
||||
}
|
||||
}
|
||||
|
||||
pub fn type_of(&self, _db: &impl HirDatabase, expr: &ast::Expr) -> Option<crate::Ty> {
|
||||
let expr_id = self.body_source_map.as_ref()?.node_expr(expr)?;
|
||||
Some(self.infer.as_ref()?[expr_id].clone())
|
||||
}
|
||||
|
||||
pub fn type_of_pat(&self, _db: &impl HirDatabase, pat: &ast::Pat) -> Option<crate::Ty> {
|
||||
let pat_id = self.body_source_map.as_ref()?.node_pat(pat)?;
|
||||
Some(self.infer.as_ref()?[pat_id].clone())
|
||||
}
|
||||
|
||||
pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> {
|
||||
let expr_id = self.body_source_map.as_ref()?.node_expr(call.into())?;
|
||||
self.infer.as_ref()?.method_resolution(expr_id)
|
||||
}
|
||||
|
||||
pub fn resolve_field(&self, field: &ast::FieldExpr) -> Option<crate::StructField> {
|
||||
let expr_id = self.body_source_map.as_ref()?.node_expr(field.into())?;
|
||||
self.infer.as_ref()?.field_resolution(expr_id)
|
||||
}
|
||||
|
||||
pub fn resolve_path(&self, db: &impl HirDatabase, path: &ast::Path) -> Option<PathResolution> {
|
||||
if let Some(path_expr) = path.syntax().parent().and_then(ast::PathExpr::cast) {
|
||||
let expr_id = self.body_source_map.as_ref()?.node_expr(path_expr.into())?;
|
||||
if let Some(assoc) = self.infer.as_ref()?.assoc_resolutions_for_expr(expr_id) {
|
||||
return Some(PathResolution::AssocItem(assoc));
|
||||
}
|
||||
}
|
||||
if let Some(path_pat) = path.syntax().parent().and_then(ast::PathPat::cast) {
|
||||
let pat_id = self.body_source_map.as_ref()?.node_pat(path_pat.into())?;
|
||||
if let Some(assoc) = self.infer.as_ref()?.assoc_resolutions_for_pat(pat_id) {
|
||||
return Some(PathResolution::AssocItem(assoc));
|
||||
}
|
||||
}
|
||||
let hir_path = crate::Path::from_ast(path)?;
|
||||
let res = self.resolver.resolve_path(db, &hir_path);
|
||||
let res = res.clone().take_types().or_else(|| res.take_values())?;
|
||||
Some(res.into())
|
||||
}
|
||||
|
||||
pub fn pat_syntax(
|
||||
&self,
|
||||
_db: &impl HirDatabase,
|
||||
pat: crate::expr::PatId,
|
||||
) -> Option<Either<AstPtr<ast::Pat>, AstPtr<ast::SelfParam>>> {
|
||||
self.body_source_map.as_ref()?.pat_syntax(pat)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<crate::Resolution> for PathResolution {
|
||||
fn from(res: crate::Resolution) -> PathResolution {
|
||||
match res {
|
||||
crate::Resolution::Def(it) => PathResolution::Def(it),
|
||||
crate::Resolution::LocalBinding(it) => PathResolution::LocalBinding(it),
|
||||
crate::Resolution::GenericParam(it) => PathResolution::GenericParam(it),
|
||||
crate::Resolution::SelfType(it) => PathResolution::SelfType(it),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -4,17 +4,10 @@ use crate::completion::{CompletionContext, Completions};
|
||||
|
||||
/// Complete dot accesses, i.e. fields or methods (currently only fields).
|
||||
pub(super) fn complete_dot(acc: &mut Completions, ctx: &CompletionContext) {
|
||||
let (function, receiver) = match (&ctx.function, ctx.dot_receiver) {
|
||||
(Some(function), Some(receiver)) => (function, receiver),
|
||||
_ => return,
|
||||
};
|
||||
let infer_result = function.infer(ctx.db);
|
||||
let source_map = function.body_source_map(ctx.db);
|
||||
let expr = match source_map.node_expr(receiver) {
|
||||
Some(expr) => expr,
|
||||
let receiver_ty = match ctx.dot_receiver.and_then(|it| ctx.analyzer.type_of(ctx.db, it)) {
|
||||
Some(it) => it,
|
||||
None => return,
|
||||
};
|
||||
let receiver_ty = infer_result[expr].clone();
|
||||
if !ctx.is_call {
|
||||
complete_fields(acc, ctx, receiver_ty.clone());
|
||||
}
|
||||
|
@ -4,17 +4,10 @@ use crate::completion::{CompletionContext, Completions};
|
||||
|
||||
/// Complete fields in fields literals.
|
||||
pub(super) fn complete_struct_literal(acc: &mut Completions, ctx: &CompletionContext) {
|
||||
let (function, struct_lit) = match (&ctx.function, ctx.struct_lit_syntax) {
|
||||
(Some(function), Some(struct_lit)) => (function, struct_lit),
|
||||
_ => return,
|
||||
};
|
||||
let infer_result = function.infer(ctx.db);
|
||||
let source_map = function.body_source_map(ctx.db);
|
||||
let expr = match source_map.node_expr(struct_lit.into()) {
|
||||
Some(expr) => expr,
|
||||
let ty = match ctx.struct_lit_syntax.and_then(|it| ctx.analyzer.type_of(ctx.db, it.into())) {
|
||||
Some(it) => it,
|
||||
None => return,
|
||||
};
|
||||
let ty = infer_result[expr].clone();
|
||||
let (adt, substs) = match ty.as_adt() {
|
||||
Some(res) => res,
|
||||
_ => return,
|
||||
|
@ -14,6 +14,7 @@ use crate::{db, FilePosition};
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct CompletionContext<'a> {
|
||||
pub(super) db: &'a db::RootDatabase,
|
||||
pub(super) analyzer: hir::SourceAnalyser,
|
||||
pub(super) offset: TextUnit,
|
||||
pub(super) token: SyntaxToken<'a>,
|
||||
pub(super) resolver: Resolver,
|
||||
@ -50,8 +51,10 @@ impl<'a> CompletionContext<'a> {
|
||||
let resolver = source_binder::resolver_for_position(db, position);
|
||||
let module = source_binder::module_from_position(db, position);
|
||||
let token = find_token_at_offset(original_file.syntax(), position.offset).left_biased()?;
|
||||
let analyzer = hir::SourceAnalyser::new(db, position.file_id, token.parent());
|
||||
let mut ctx = CompletionContext {
|
||||
db,
|
||||
analyzer,
|
||||
token,
|
||||
offset: position.offset,
|
||||
resolver,
|
||||
|
@ -1,11 +1,11 @@
|
||||
use ra_db::{FileId, SourceDatabase};
|
||||
use ra_syntax::{
|
||||
SyntaxNode, SyntaxNodePtr, AstNode, SmolStr, TextRange, TreeArc,
|
||||
SyntaxNode, AstNode, SmolStr, TextRange, TreeArc, AstPtr,
|
||||
SyntaxKind::{self, NAME},
|
||||
ast::{self, NameOwner, VisibilityOwner, TypeAscriptionOwner},
|
||||
algo::visit::{visitor, Visitor},
|
||||
};
|
||||
use hir::{ModuleSource, FieldSource, Name, ImplItem};
|
||||
use hir::{ModuleSource, FieldSource, ImplItem, Either};
|
||||
|
||||
use crate::{FileSymbol, db::RootDatabase};
|
||||
|
||||
@ -74,15 +74,25 @@ impl NavigationTarget {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn from_scope_entry(
|
||||
pub(crate) fn from_pat(
|
||||
db: &RootDatabase,
|
||||
file_id: FileId,
|
||||
name: Name,
|
||||
ptr: SyntaxNodePtr,
|
||||
pat: Either<AstPtr<ast::Pat>, AstPtr<ast::SelfParam>>,
|
||||
) -> NavigationTarget {
|
||||
let file = db.parse(file_id);
|
||||
let (name, full_range) = match pat {
|
||||
Either::A(pat) => match pat.to_node(&file).kind() {
|
||||
ast::PatKind::BindPat(pat) => {
|
||||
return NavigationTarget::from_bind_pat(file_id, &pat)
|
||||
}
|
||||
_ => ("_".into(), pat.syntax_node_ptr().range()),
|
||||
},
|
||||
Either::B(slf) => ("self".into(), slf.syntax_node_ptr().range()),
|
||||
};
|
||||
NavigationTarget {
|
||||
file_id,
|
||||
name: name.to_string().into(),
|
||||
full_range: ptr.range(),
|
||||
name,
|
||||
full_range,
|
||||
focus_range: None,
|
||||
kind: NAME,
|
||||
container_name: None,
|
||||
@ -229,6 +239,7 @@ impl NavigationTarget {
|
||||
|
||||
/// Allows `NavigationTarget` to be created from a `NameOwner`
|
||||
pub(crate) fn from_named(file_id: FileId, node: &impl ast::NameOwner) -> NavigationTarget {
|
||||
//FIXME: use `_` instead of empty string
|
||||
let name = node.name().map(|it| it.text().clone()).unwrap_or_default();
|
||||
let focus_range = node.name().map(|it| it.syntax().range());
|
||||
NavigationTarget::from_syntax(file_id, name, focus_range, node.syntax())
|
||||
|
@ -5,7 +5,6 @@ use ra_syntax::{
|
||||
SyntaxNode,
|
||||
};
|
||||
use test_utils::tested_by;
|
||||
use hir::Resolution;
|
||||
|
||||
use crate::{FilePosition, NavigationTarget, db::RootDatabase, RangeInfo};
|
||||
|
||||
@ -48,127 +47,72 @@ pub(crate) fn reference_definition(
|
||||
) -> ReferenceResult {
|
||||
use self::ReferenceResult::*;
|
||||
|
||||
let function = hir::source_binder::function_from_child_node(db, file_id, name_ref.syntax());
|
||||
let analyzer = hir::SourceAnalyser::new(db, file_id, name_ref.syntax());
|
||||
|
||||
if let Some(function) = function {
|
||||
// Check if it is a method
|
||||
if let Some(method_call) = name_ref.syntax().parent().and_then(ast::MethodCallExpr::cast) {
|
||||
tested_by!(goto_definition_works_for_methods);
|
||||
let infer_result = function.infer(db);
|
||||
let source_map = function.body_source_map(db);
|
||||
let expr = ast::Expr::cast(method_call.syntax()).unwrap();
|
||||
if let Some(func) =
|
||||
source_map.node_expr(expr).and_then(|it| infer_result.method_resolution(it))
|
||||
{
|
||||
return Exact(NavigationTarget::from_function(db, func));
|
||||
};
|
||||
}
|
||||
// It could also be a field access
|
||||
if let Some(field_expr) = name_ref.syntax().parent().and_then(ast::FieldExpr::cast) {
|
||||
tested_by!(goto_definition_works_for_fields);
|
||||
let infer_result = function.infer(db);
|
||||
let source_map = function.body_source_map(db);
|
||||
let expr = ast::Expr::cast(field_expr.syntax()).unwrap();
|
||||
if let Some(field) =
|
||||
source_map.node_expr(expr).and_then(|it| infer_result.field_resolution(it))
|
||||
{
|
||||
return Exact(NavigationTarget::from_field(db, field));
|
||||
};
|
||||
// Special cases:
|
||||
|
||||
// Check if it is a method
|
||||
if let Some(method_call) = name_ref.syntax().parent().and_then(ast::MethodCallExpr::cast) {
|
||||
tested_by!(goto_definition_works_for_methods);
|
||||
if let Some(func) = analyzer.resolve_method_call(method_call) {
|
||||
return Exact(NavigationTarget::from_function(db, func));
|
||||
}
|
||||
}
|
||||
// It could also be a field access
|
||||
if let Some(field_expr) = name_ref.syntax().parent().and_then(ast::FieldExpr::cast) {
|
||||
tested_by!(goto_definition_works_for_fields);
|
||||
if let Some(field) = analyzer.resolve_field(field_expr) {
|
||||
return Exact(NavigationTarget::from_field(db, field));
|
||||
};
|
||||
}
|
||||
|
||||
// It could also be a named field
|
||||
if let Some(field_expr) = name_ref.syntax().parent().and_then(ast::NamedField::cast) {
|
||||
tested_by!(goto_definition_works_for_named_fields);
|
||||
// It could also be a named field
|
||||
if let Some(field_expr) = name_ref.syntax().parent().and_then(ast::NamedField::cast) {
|
||||
tested_by!(goto_definition_works_for_named_fields);
|
||||
|
||||
let infer_result = function.infer(db);
|
||||
let source_map = function.body_source_map(db);
|
||||
let struct_lit = field_expr.syntax().ancestors().find_map(ast::StructLit::cast);
|
||||
|
||||
let struct_lit = field_expr.syntax().ancestors().find_map(ast::StructLit::cast);
|
||||
if let Some(ty) = struct_lit.and_then(|lit| analyzer.type_of(db, lit.into())) {
|
||||
if let Some((hir::AdtDef::Struct(s), _)) = ty.as_adt() {
|
||||
let hir_path = hir::Path::from_name_ref(name_ref);
|
||||
let hir_name = hir_path.as_ident().unwrap();
|
||||
|
||||
if let Some(expr) = struct_lit.and_then(|lit| source_map.node_expr(lit.into())) {
|
||||
let ty = infer_result[expr].clone();
|
||||
if let Some((hir::AdtDef::Struct(s), _)) = ty.as_adt() {
|
||||
let hir_path = hir::Path::from_name_ref(name_ref);
|
||||
let hir_name = hir_path.as_ident().unwrap();
|
||||
|
||||
if let Some(field) = s.field(db, hir_name) {
|
||||
return Exact(NavigationTarget::from_field(db, field));
|
||||
}
|
||||
if let Some(field) = s.field(db, hir_name) {
|
||||
return Exact(NavigationTarget::from_field(db, field));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Try name resolution
|
||||
let resolver = hir::source_binder::resolver_for_node(db, file_id, name_ref.syntax());
|
||||
if let Some(path) =
|
||||
name_ref.syntax().ancestors().find_map(ast::Path::cast).and_then(hir::Path::from_ast)
|
||||
{
|
||||
let resolved = resolver.resolve_path(db, &path);
|
||||
match resolved.clone().take_types().or_else(|| resolved.take_values()) {
|
||||
Some(Resolution::Def(def)) => return Exact(NavigationTarget::from_def(db, def)),
|
||||
Some(Resolution::LocalBinding(pat)) => {
|
||||
let body = resolver.body().expect("no body for local binding");
|
||||
let source_map = body.owner().body_source_map(db);
|
||||
let ptr = source_map.pat_syntax(pat).expect("pattern not found in syntax mapping");
|
||||
let name =
|
||||
path.as_ident().cloned().expect("local binding from a multi-segment path");
|
||||
let ptr = ptr.either(|it| it.into(), |it| it.into());
|
||||
let nav = NavigationTarget::from_scope_entry(file_id, name, ptr);
|
||||
return Exact(nav);
|
||||
}
|
||||
Some(Resolution::GenericParam(..)) => {
|
||||
// FIXME: go to the generic param def
|
||||
}
|
||||
Some(Resolution::SelfType(impl_block)) => {
|
||||
let ty = impl_block.target_ty(db);
|
||||
|
||||
if let Some((def_id, _)) = ty.as_adt() {
|
||||
return Exact(NavigationTarget::from_adt_def(db, def_id));
|
||||
// General case, a path or a local:
|
||||
if let Some(path) = name_ref.syntax().ancestors().find_map(ast::Path::cast) {
|
||||
if let Some(resolved) = analyzer.resolve_path(db, path) {
|
||||
match resolved {
|
||||
hir::PathResolution::Def(def) => return Exact(NavigationTarget::from_def(db, def)),
|
||||
hir::PathResolution::LocalBinding(pat) => {
|
||||
if let Some(pat) = analyzer.pat_syntax(db, pat) {
|
||||
let nav = NavigationTarget::from_pat(db, file_id, pat);
|
||||
return Exact(nav);
|
||||
}
|
||||
}
|
||||
}
|
||||
None => {
|
||||
// If we failed to resolve then check associated items
|
||||
if let Some(function) = function {
|
||||
// Resolve associated item for path expressions
|
||||
if let Some(path_expr) =
|
||||
name_ref.syntax().ancestors().find_map(ast::PathExpr::cast)
|
||||
{
|
||||
let infer_result = function.infer(db);
|
||||
let source_map = function.body_source_map(db);
|
||||
hir::PathResolution::GenericParam(..) => {
|
||||
// FIXME: go to the generic param def
|
||||
}
|
||||
hir::PathResolution::SelfType(impl_block) => {
|
||||
let ty = impl_block.target_ty(db);
|
||||
|
||||
if let Some(expr) = ast::Expr::cast(path_expr.syntax()) {
|
||||
if let Some(res) = source_map
|
||||
.node_expr(expr)
|
||||
.and_then(|it| infer_result.assoc_resolutions_for_expr(it.into()))
|
||||
{
|
||||
return Exact(NavigationTarget::from_impl_item(db, res));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Resolve associated item for path patterns
|
||||
if let Some(path_pat) =
|
||||
name_ref.syntax().ancestors().find_map(ast::PathPat::cast)
|
||||
{
|
||||
let infer_result = function.infer(db);
|
||||
let source_map = function.body_source_map(db);
|
||||
|
||||
let pat: &ast::Pat = path_pat.into();
|
||||
|
||||
if let Some(res) = source_map
|
||||
.node_pat(pat)
|
||||
.and_then(|it| infer_result.assoc_resolutions_for_pat(it.into()))
|
||||
{
|
||||
return Exact(NavigationTarget::from_impl_item(db, res));
|
||||
}
|
||||
if let Some((def_id, _)) = ty.as_adt() {
|
||||
return Exact(NavigationTarget::from_adt_def(db, def_id));
|
||||
}
|
||||
}
|
||||
hir::PathResolution::AssocItem(assoc) => {
|
||||
return Exact(NavigationTarget::from_impl_item(db, assoc))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If that fails try the index based approach.
|
||||
// Fallback index based approach:
|
||||
let navs = crate::symbol_index::index_resolve(db, name_ref)
|
||||
.into_iter()
|
||||
.map(NavigationTarget::from_symbol)
|
||||
|
@ -132,17 +132,15 @@ pub(crate) fn type_of(db: &RootDatabase, frange: FileRange) -> Option<String> {
|
||||
.ancestors()
|
||||
.take_while(|it| it.range() == leaf_node.range())
|
||||
.find(|&it| ast::Expr::cast(it).is_some() || ast::Pat::cast(it).is_some())?;
|
||||
let parent_fn = node.ancestors().find_map(ast::FnDef::cast)?;
|
||||
let function = hir::source_binder::function_from_source(db, frange.file_id, parent_fn)?;
|
||||
let infer = function.infer(db);
|
||||
let source_map = function.body_source_map(db);
|
||||
if let Some(expr) = ast::Expr::cast(node).and_then(|e| source_map.node_expr(e)) {
|
||||
Some(infer[expr].display(db).to_string())
|
||||
} else if let Some(pat) = ast::Pat::cast(node).and_then(|p| source_map.node_pat(p)) {
|
||||
Some(infer[pat].display(db).to_string())
|
||||
let analyzer = hir::SourceAnalyser::new(db, frange.file_id, node);
|
||||
let ty = if let Some(ty) = ast::Expr::cast(node).and_then(|e| analyzer.type_of(db, e)) {
|
||||
ty
|
||||
} else if let Some(ty) = ast::Pat::cast(node).and_then(|p| analyzer.type_of_pat(db, p)) {
|
||||
ty
|
||||
} else {
|
||||
None
|
||||
}
|
||||
return None;
|
||||
};
|
||||
Some(ty.display(db).to_string())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
Loading…
Reference in New Issue
Block a user