mirror of
https://github.com/rust-lang/rust.git
synced 2024-12-02 03:33:59 +00:00
introduce SourceAnalyzer
This commit is contained in:
parent
e6e2571bdf
commit
10d66d63d7
@ -1,7 +1,6 @@
|
|||||||
use hir::{
|
use hir::{
|
||||||
HirDisplay, Ty,
|
HirDisplay, Ty,
|
||||||
db::HirDatabase,
|
db::HirDatabase,
|
||||||
source_binder::function_from_child_node,
|
|
||||||
};
|
};
|
||||||
use ra_syntax::{
|
use ra_syntax::{
|
||||||
SyntaxKind,
|
SyntaxKind,
|
||||||
@ -30,11 +29,8 @@ pub(crate) fn add_explicit_type(mut ctx: AssistCtx<impl HirDatabase>) -> Option<
|
|||||||
}
|
}
|
||||||
// Infer type
|
// Infer type
|
||||||
let db = ctx.db;
|
let db = ctx.db;
|
||||||
let func = function_from_child_node(db, ctx.frange.file_id, pat.syntax())?;
|
let analyzer = hir::SourceAnalyser::new(db, ctx.frange.file_id, stmt.syntax());
|
||||||
let inference_res = func.infer(db);
|
let ty = analyzer.type_of(db, expr)?;
|
||||||
let source_map = func.body_source_map(db);
|
|
||||||
let expr_id = source_map.node_expr(expr.into())?;
|
|
||||||
let ty = inference_res[expr_id].clone();
|
|
||||||
// Assist not applicable if the type is unknown
|
// Assist not applicable if the type is unknown
|
||||||
if is_unknown(&ty) {
|
if is_unknown(&ty) {
|
||||||
return None;
|
return None;
|
||||||
|
@ -2,7 +2,6 @@ use std::fmt::Write;
|
|||||||
|
|
||||||
use crate::{Assist, AssistId, AssistCtx};
|
use crate::{Assist, AssistId, AssistCtx};
|
||||||
|
|
||||||
use hir::Resolver;
|
|
||||||
use hir::db::HirDatabase;
|
use hir::db::HirDatabase;
|
||||||
use ra_syntax::{SmolStr, SyntaxKind, TextRange, TextUnit, TreeArc};
|
use ra_syntax::{SmolStr, SyntaxKind, TextRange, TextUnit, TreeArc};
|
||||||
use ra_syntax::ast::{self, AstNode, AstToken, FnDef, ImplItem, ImplItemKind, NameOwner};
|
use ra_syntax::ast::{self, AstNode, AstToken, FnDef, ImplItem, ImplItemKind, NameOwner};
|
||||||
@ -46,9 +45,9 @@ fn add_missing_impl_members_inner(
|
|||||||
let trait_def = {
|
let trait_def = {
|
||||||
let file_id = ctx.frange.file_id;
|
let file_id = ctx.frange.file_id;
|
||||||
let position = FilePosition { file_id, offset: impl_node.syntax().range().start() };
|
let position = FilePosition { file_id, offset: impl_node.syntax().range().start() };
|
||||||
let resolver = hir::source_binder::resolver_for_position(ctx.db, position);
|
let analyser = hir::SourceAnalyser::new(ctx.db, position.file_id, impl_node.syntax());
|
||||||
|
|
||||||
resolve_target_trait_def(ctx.db, &resolver, impl_node)?
|
resolve_target_trait_def(ctx.db, &analyser, impl_node)?
|
||||||
};
|
};
|
||||||
|
|
||||||
let missing_fns: Vec<_> = {
|
let missing_fns: Vec<_> = {
|
||||||
@ -122,14 +121,14 @@ fn add_missing_impl_members_inner(
|
|||||||
/// implemented) to a `ast::TraitDef`.
|
/// implemented) to a `ast::TraitDef`.
|
||||||
fn resolve_target_trait_def(
|
fn resolve_target_trait_def(
|
||||||
db: &impl HirDatabase,
|
db: &impl HirDatabase,
|
||||||
resolver: &Resolver,
|
binder: &hir::SourceAnalyser,
|
||||||
impl_block: &ast::ImplBlock,
|
impl_block: &ast::ImplBlock,
|
||||||
) -> Option<TreeArc<ast::TraitDef>> {
|
) -> Option<TreeArc<ast::TraitDef>> {
|
||||||
let ast_path = impl_block.target_trait().map(AstNode::syntax).and_then(ast::PathType::cast)?;
|
let ast_path =
|
||||||
let hir_path = ast_path.path().and_then(hir::Path::from_ast)?;
|
impl_block.target_trait().map(AstNode::syntax).and_then(ast::PathType::cast)?.path()?;
|
||||||
|
|
||||||
match resolver.resolve_path(db, &hir_path).take_types() {
|
match binder.resolve_path(db, &ast_path) {
|
||||||
Some(hir::Resolution::Def(hir::ModuleDef::Trait(def))) => Some(def.source(db).1),
|
Some(hir::PathResolution::Def(hir::ModuleDef::Trait(def))) => Some(def.source(db).1),
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
use std::fmt::Write;
|
use std::fmt::Write;
|
||||||
|
|
||||||
use hir::{
|
use hir::{
|
||||||
AdtDef, FieldSource, source_binder,
|
AdtDef, FieldSource,
|
||||||
db::HirDatabase,
|
db::HirDatabase,
|
||||||
};
|
};
|
||||||
use ra_syntax::ast::{self, AstNode};
|
use ra_syntax::ast::{self, AstNode};
|
||||||
@ -20,12 +20,8 @@ pub(crate) fn fill_match_arms(mut ctx: AssistCtx<impl HirDatabase>) -> Option<As
|
|||||||
}
|
}
|
||||||
|
|
||||||
let expr = match_expr.expr()?;
|
let expr = match_expr.expr()?;
|
||||||
let function =
|
let analyzer = hir::SourceAnalyser::new(ctx.db, ctx.frange.file_id, expr.syntax());
|
||||||
source_binder::function_from_child_node(ctx.db, ctx.frange.file_id, expr.syntax())?;
|
let match_expr_ty = analyzer.type_of(ctx.db, expr)?;
|
||||||
let infer_result = function.infer(ctx.db);
|
|
||||||
let source_map = function.body_source_map(ctx.db);
|
|
||||||
let node_expr = source_map.node_expr(expr)?;
|
|
||||||
let match_expr_ty = infer_result[node_expr].clone();
|
|
||||||
let enum_def = match_expr_ty.autoderef(ctx.db).find_map(|ty| match ty.as_adt() {
|
let enum_def = match_expr_ty.autoderef(ctx.db).find_map(|ty| match ty.as_adt() {
|
||||||
Some((AdtDef::Enum(e), _)) => Some(e),
|
Some((AdtDef::Enum(e), _)) => Some(e),
|
||||||
_ => None,
|
_ => None,
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
use std::fmt::Write;
|
use std::fmt::Write;
|
||||||
|
|
||||||
use hir::{AdtDef, db::HirDatabase, source_binder::function_from_child_node};
|
use hir::{AdtDef, db::HirDatabase};
|
||||||
|
|
||||||
use ra_syntax::ast::{self, AstNode};
|
use ra_syntax::ast::{self, AstNode};
|
||||||
|
|
||||||
@ -51,15 +51,12 @@ where
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn evaluate_struct_def_fields(&mut self) -> Option<()> {
|
fn evaluate_struct_def_fields(&mut self) -> Option<()> {
|
||||||
let function = function_from_child_node(
|
let analyzer = hir::SourceAnalyser::new(
|
||||||
self.ctx.db,
|
self.ctx.db,
|
||||||
self.ctx.frange.file_id,
|
self.ctx.frange.file_id,
|
||||||
self.struct_lit.syntax(),
|
self.struct_lit.syntax(),
|
||||||
)?;
|
);
|
||||||
let infer_result = function.infer(self.ctx.db);
|
let struct_lit_ty = analyzer.type_of(self.ctx.db, self.struct_lit.into())?;
|
||||||
let source_map = function.body_source_map(self.ctx.db);
|
|
||||||
let node_expr = source_map.node_expr(self.struct_lit.into())?;
|
|
||||||
let struct_lit_ty = infer_result[node_expr].clone();
|
|
||||||
let struct_def = match struct_lit_ty.as_adt() {
|
let struct_def = match struct_lit_ty.as_adt() {
|
||||||
Some((AdtDef::Struct(s), _)) => s,
|
Some((AdtDef::Struct(s), _)) => s,
|
||||||
_ => return None,
|
_ => return None,
|
||||||
|
@ -450,10 +450,6 @@ impl DefWithBody {
|
|||||||
db.infer(*self)
|
db.infer(*self)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn body_source_map(&self, db: &impl HirDatabase) -> Arc<BodySourceMap> {
|
|
||||||
db.body_with_source_map(*self).1
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn body(&self, db: &impl HirDatabase) -> Arc<Body> {
|
pub fn body(&self, db: &impl HirDatabase) -> Arc<Body> {
|
||||||
db.body_hir(*self)
|
db.body_hir(*self)
|
||||||
}
|
}
|
||||||
@ -523,7 +519,7 @@ impl Function {
|
|||||||
self.signature(db).name.clone()
|
self.signature(db).name.clone()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn body_source_map(&self, db: &impl HirDatabase) -> Arc<BodySourceMap> {
|
pub(crate) fn body_source_map(&self, db: &impl HirDatabase) -> Arc<BodySourceMap> {
|
||||||
db.body_with_source_map((*self).into()).1
|
db.body_with_source_map((*self).into()).1
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -606,7 +602,7 @@ impl Const {
|
|||||||
db.infer((*self).into())
|
db.infer((*self).into())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn body_source_map(&self, db: &impl HirDatabase) -> Arc<BodySourceMap> {
|
pub(crate) fn body_source_map(&self, db: &impl HirDatabase) -> Arc<BodySourceMap> {
|
||||||
db.body_with_source_map((*self).into()).1
|
db.body_with_source_map((*self).into()).1
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -679,7 +675,7 @@ impl Static {
|
|||||||
db.infer((*self).into())
|
db.infer((*self).into())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn body_source_map(&self, db: &impl HirDatabase) -> Arc<BodySourceMap> {
|
pub(crate) fn body_source_map(&self, db: &impl HirDatabase) -> Arc<BodySourceMap> {
|
||||||
db.body_with_source_map((*self).into()).1
|
db.body_with_source_map((*self).into()).1
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -117,31 +117,27 @@ impl Index<PatId> for Body {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl BodySourceMap {
|
impl BodySourceMap {
|
||||||
pub fn expr_syntax(&self, expr: ExprId) -> Option<SyntaxNodePtr> {
|
pub(crate) fn expr_syntax(&self, expr: ExprId) -> Option<SyntaxNodePtr> {
|
||||||
self.expr_map_back.get(expr).cloned()
|
self.expr_map_back.get(expr).cloned()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn syntax_expr(&self, ptr: SyntaxNodePtr) -> Option<ExprId> {
|
pub(crate) fn syntax_expr(&self, ptr: SyntaxNodePtr) -> Option<ExprId> {
|
||||||
self.expr_map.get(&ptr).cloned()
|
self.expr_map.get(&ptr).cloned()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn node_expr(&self, node: &ast::Expr) -> Option<ExprId> {
|
pub(crate) fn node_expr(&self, node: &ast::Expr) -> Option<ExprId> {
|
||||||
self.expr_map.get(&SyntaxNodePtr::new(node.syntax())).cloned()
|
self.expr_map.get(&SyntaxNodePtr::new(node.syntax())).cloned()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn pat_syntax(&self, pat: PatId) -> Option<PatPtr> {
|
pub(crate) fn pat_syntax(&self, pat: PatId) -> Option<PatPtr> {
|
||||||
self.pat_map_back.get(pat).cloned()
|
self.pat_map_back.get(pat).cloned()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn syntax_pat(&self, ptr: PatPtr) -> Option<PatId> {
|
pub(crate) fn node_pat(&self, node: &ast::Pat) -> Option<PatId> {
|
||||||
self.pat_map.get(&ptr).cloned()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn node_pat(&self, node: &ast::Pat) -> Option<PatId> {
|
|
||||||
self.pat_map.get(&Either::A(AstPtr::new(node))).cloned()
|
self.pat_map.get(&Either::A(AstPtr::new(node))).cloned()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn field_syntax(&self, expr: ExprId, field: usize) -> AstPtr<ast::NamedField> {
|
pub(crate) fn field_syntax(&self, expr: ExprId, field: usize) -> AstPtr<ast::NamedField> {
|
||||||
self.field_map[&(expr, field)].clone()
|
self.field_map[&(expr, field)].clone()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -109,7 +109,7 @@ impl ExprScopes {
|
|||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
pub struct ScopesWithSourceMap {
|
pub struct ScopesWithSourceMap {
|
||||||
pub source_map: Arc<BodySourceMap>,
|
pub(crate) source_map: Arc<BodySourceMap>,
|
||||||
pub scopes: Arc<ExprScopes>,
|
pub scopes: Arc<ExprScopes>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -66,6 +66,7 @@ pub use self::{
|
|||||||
adt::AdtDef,
|
adt::AdtDef,
|
||||||
expr::{ExprScopes, ScopesWithSourceMap, ScopeEntryWithSyntax},
|
expr::{ExprScopes, ScopesWithSourceMap, ScopeEntryWithSyntax},
|
||||||
resolve::{Resolver, Resolution},
|
resolve::{Resolver, Resolution},
|
||||||
|
source_binder::{SourceAnalyser, PathResolution},
|
||||||
};
|
};
|
||||||
|
|
||||||
pub use self::code_model_api::{
|
pub use self::code_model_api::{
|
||||||
|
@ -5,15 +5,17 @@
|
|||||||
///
|
///
|
||||||
/// So, this modules should not be used during hir construction, it exists
|
/// So, this modules should not be used during hir construction, it exists
|
||||||
/// purely for "IDE needs".
|
/// purely for "IDE needs".
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use ra_db::{FileId, FilePosition};
|
use ra_db::{FileId, FilePosition};
|
||||||
use ra_syntax::{
|
use ra_syntax::{
|
||||||
SyntaxNode,
|
SyntaxNode, AstPtr,
|
||||||
ast::{self, AstNode, NameOwner},
|
ast::{self, AstNode, NameOwner},
|
||||||
algo::{find_node_at_offset, find_token_at_offset},
|
algo::{find_node_at_offset, find_token_at_offset},
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
HirDatabase, Function, Struct, Enum,Const,Static,
|
HirDatabase, Function, Struct, Enum, Const, Static, Either,
|
||||||
AsName, Module, HirFileId, Crate, Trait, Resolver,
|
AsName, Module, HirFileId, Crate, Trait, Resolver,
|
||||||
ids::LocationCtx,
|
ids::LocationCtx,
|
||||||
expr, AstId
|
expr, AstId
|
||||||
@ -258,3 +260,97 @@ fn try_get_resolver_for_node(
|
|||||||
None
|
None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Name is bad, don't use inside HIR
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct SourceAnalyser {
|
||||||
|
resolver: Resolver,
|
||||||
|
body_source_map: Option<Arc<crate::expr::BodySourceMap>>,
|
||||||
|
infer: Option<Arc<crate::ty::InferenceResult>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
|
pub enum PathResolution {
|
||||||
|
/// An item
|
||||||
|
Def(crate::ModuleDef),
|
||||||
|
/// A local binding (only value namespace)
|
||||||
|
LocalBinding(crate::expr::PatId),
|
||||||
|
/// A generic parameter
|
||||||
|
GenericParam(u32),
|
||||||
|
SelfType(crate::ImplBlock),
|
||||||
|
AssocItem(crate::ImplItem),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SourceAnalyser {
|
||||||
|
pub fn new(db: &impl HirDatabase, file_id: FileId, node: &SyntaxNode) -> SourceAnalyser {
|
||||||
|
let resolver = resolver_for_node(db, file_id, node);
|
||||||
|
let function = function_from_child_node(db, file_id, node);
|
||||||
|
if let Some(function) = function {
|
||||||
|
SourceAnalyser {
|
||||||
|
resolver,
|
||||||
|
body_source_map: Some(function.body_source_map(db)),
|
||||||
|
infer: Some(function.infer(db)),
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
SourceAnalyser { resolver, body_source_map: None, infer: None }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn type_of(&self, _db: &impl HirDatabase, expr: &ast::Expr) -> Option<crate::Ty> {
|
||||||
|
let expr_id = self.body_source_map.as_ref()?.node_expr(expr)?;
|
||||||
|
Some(self.infer.as_ref()?[expr_id].clone())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn type_of_pat(&self, _db: &impl HirDatabase, pat: &ast::Pat) -> Option<crate::Ty> {
|
||||||
|
let pat_id = self.body_source_map.as_ref()?.node_pat(pat)?;
|
||||||
|
Some(self.infer.as_ref()?[pat_id].clone())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> {
|
||||||
|
let expr_id = self.body_source_map.as_ref()?.node_expr(call.into())?;
|
||||||
|
self.infer.as_ref()?.method_resolution(expr_id)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn resolve_field(&self, field: &ast::FieldExpr) -> Option<crate::StructField> {
|
||||||
|
let expr_id = self.body_source_map.as_ref()?.node_expr(field.into())?;
|
||||||
|
self.infer.as_ref()?.field_resolution(expr_id)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn resolve_path(&self, db: &impl HirDatabase, path: &ast::Path) -> Option<PathResolution> {
|
||||||
|
if let Some(path_expr) = path.syntax().parent().and_then(ast::PathExpr::cast) {
|
||||||
|
let expr_id = self.body_source_map.as_ref()?.node_expr(path_expr.into())?;
|
||||||
|
if let Some(assoc) = self.infer.as_ref()?.assoc_resolutions_for_expr(expr_id) {
|
||||||
|
return Some(PathResolution::AssocItem(assoc));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if let Some(path_pat) = path.syntax().parent().and_then(ast::PathPat::cast) {
|
||||||
|
let pat_id = self.body_source_map.as_ref()?.node_pat(path_pat.into())?;
|
||||||
|
if let Some(assoc) = self.infer.as_ref()?.assoc_resolutions_for_pat(pat_id) {
|
||||||
|
return Some(PathResolution::AssocItem(assoc));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let hir_path = crate::Path::from_ast(path)?;
|
||||||
|
let res = self.resolver.resolve_path(db, &hir_path);
|
||||||
|
let res = res.clone().take_types().or_else(|| res.take_values())?;
|
||||||
|
Some(res.into())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn pat_syntax(
|
||||||
|
&self,
|
||||||
|
_db: &impl HirDatabase,
|
||||||
|
pat: crate::expr::PatId,
|
||||||
|
) -> Option<Either<AstPtr<ast::Pat>, AstPtr<ast::SelfParam>>> {
|
||||||
|
self.body_source_map.as_ref()?.pat_syntax(pat)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<crate::Resolution> for PathResolution {
|
||||||
|
fn from(res: crate::Resolution) -> PathResolution {
|
||||||
|
match res {
|
||||||
|
crate::Resolution::Def(it) => PathResolution::Def(it),
|
||||||
|
crate::Resolution::LocalBinding(it) => PathResolution::LocalBinding(it),
|
||||||
|
crate::Resolution::GenericParam(it) => PathResolution::GenericParam(it),
|
||||||
|
crate::Resolution::SelfType(it) => PathResolution::SelfType(it),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -4,17 +4,10 @@ use crate::completion::{CompletionContext, Completions};
|
|||||||
|
|
||||||
/// Complete dot accesses, i.e. fields or methods (currently only fields).
|
/// Complete dot accesses, i.e. fields or methods (currently only fields).
|
||||||
pub(super) fn complete_dot(acc: &mut Completions, ctx: &CompletionContext) {
|
pub(super) fn complete_dot(acc: &mut Completions, ctx: &CompletionContext) {
|
||||||
let (function, receiver) = match (&ctx.function, ctx.dot_receiver) {
|
let receiver_ty = match ctx.dot_receiver.and_then(|it| ctx.analyzer.type_of(ctx.db, it)) {
|
||||||
(Some(function), Some(receiver)) => (function, receiver),
|
Some(it) => it,
|
||||||
_ => return,
|
|
||||||
};
|
|
||||||
let infer_result = function.infer(ctx.db);
|
|
||||||
let source_map = function.body_source_map(ctx.db);
|
|
||||||
let expr = match source_map.node_expr(receiver) {
|
|
||||||
Some(expr) => expr,
|
|
||||||
None => return,
|
None => return,
|
||||||
};
|
};
|
||||||
let receiver_ty = infer_result[expr].clone();
|
|
||||||
if !ctx.is_call {
|
if !ctx.is_call {
|
||||||
complete_fields(acc, ctx, receiver_ty.clone());
|
complete_fields(acc, ctx, receiver_ty.clone());
|
||||||
}
|
}
|
||||||
|
@ -4,17 +4,10 @@ use crate::completion::{CompletionContext, Completions};
|
|||||||
|
|
||||||
/// Complete fields in fields literals.
|
/// Complete fields in fields literals.
|
||||||
pub(super) fn complete_struct_literal(acc: &mut Completions, ctx: &CompletionContext) {
|
pub(super) fn complete_struct_literal(acc: &mut Completions, ctx: &CompletionContext) {
|
||||||
let (function, struct_lit) = match (&ctx.function, ctx.struct_lit_syntax) {
|
let ty = match ctx.struct_lit_syntax.and_then(|it| ctx.analyzer.type_of(ctx.db, it.into())) {
|
||||||
(Some(function), Some(struct_lit)) => (function, struct_lit),
|
Some(it) => it,
|
||||||
_ => return,
|
|
||||||
};
|
|
||||||
let infer_result = function.infer(ctx.db);
|
|
||||||
let source_map = function.body_source_map(ctx.db);
|
|
||||||
let expr = match source_map.node_expr(struct_lit.into()) {
|
|
||||||
Some(expr) => expr,
|
|
||||||
None => return,
|
None => return,
|
||||||
};
|
};
|
||||||
let ty = infer_result[expr].clone();
|
|
||||||
let (adt, substs) = match ty.as_adt() {
|
let (adt, substs) = match ty.as_adt() {
|
||||||
Some(res) => res,
|
Some(res) => res,
|
||||||
_ => return,
|
_ => return,
|
||||||
|
@ -14,6 +14,7 @@ use crate::{db, FilePosition};
|
|||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub(crate) struct CompletionContext<'a> {
|
pub(crate) struct CompletionContext<'a> {
|
||||||
pub(super) db: &'a db::RootDatabase,
|
pub(super) db: &'a db::RootDatabase,
|
||||||
|
pub(super) analyzer: hir::SourceAnalyser,
|
||||||
pub(super) offset: TextUnit,
|
pub(super) offset: TextUnit,
|
||||||
pub(super) token: SyntaxToken<'a>,
|
pub(super) token: SyntaxToken<'a>,
|
||||||
pub(super) resolver: Resolver,
|
pub(super) resolver: Resolver,
|
||||||
@ -50,8 +51,10 @@ impl<'a> CompletionContext<'a> {
|
|||||||
let resolver = source_binder::resolver_for_position(db, position);
|
let resolver = source_binder::resolver_for_position(db, position);
|
||||||
let module = source_binder::module_from_position(db, position);
|
let module = source_binder::module_from_position(db, position);
|
||||||
let token = find_token_at_offset(original_file.syntax(), position.offset).left_biased()?;
|
let token = find_token_at_offset(original_file.syntax(), position.offset).left_biased()?;
|
||||||
|
let analyzer = hir::SourceAnalyser::new(db, position.file_id, token.parent());
|
||||||
let mut ctx = CompletionContext {
|
let mut ctx = CompletionContext {
|
||||||
db,
|
db,
|
||||||
|
analyzer,
|
||||||
token,
|
token,
|
||||||
offset: position.offset,
|
offset: position.offset,
|
||||||
resolver,
|
resolver,
|
||||||
|
@ -1,11 +1,11 @@
|
|||||||
use ra_db::{FileId, SourceDatabase};
|
use ra_db::{FileId, SourceDatabase};
|
||||||
use ra_syntax::{
|
use ra_syntax::{
|
||||||
SyntaxNode, SyntaxNodePtr, AstNode, SmolStr, TextRange, TreeArc,
|
SyntaxNode, AstNode, SmolStr, TextRange, TreeArc, AstPtr,
|
||||||
SyntaxKind::{self, NAME},
|
SyntaxKind::{self, NAME},
|
||||||
ast::{self, NameOwner, VisibilityOwner, TypeAscriptionOwner},
|
ast::{self, NameOwner, VisibilityOwner, TypeAscriptionOwner},
|
||||||
algo::visit::{visitor, Visitor},
|
algo::visit::{visitor, Visitor},
|
||||||
};
|
};
|
||||||
use hir::{ModuleSource, FieldSource, Name, ImplItem};
|
use hir::{ModuleSource, FieldSource, ImplItem, Either};
|
||||||
|
|
||||||
use crate::{FileSymbol, db::RootDatabase};
|
use crate::{FileSymbol, db::RootDatabase};
|
||||||
|
|
||||||
@ -74,15 +74,25 @@ impl NavigationTarget {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn from_scope_entry(
|
pub(crate) fn from_pat(
|
||||||
|
db: &RootDatabase,
|
||||||
file_id: FileId,
|
file_id: FileId,
|
||||||
name: Name,
|
pat: Either<AstPtr<ast::Pat>, AstPtr<ast::SelfParam>>,
|
||||||
ptr: SyntaxNodePtr,
|
|
||||||
) -> NavigationTarget {
|
) -> NavigationTarget {
|
||||||
|
let file = db.parse(file_id);
|
||||||
|
let (name, full_range) = match pat {
|
||||||
|
Either::A(pat) => match pat.to_node(&file).kind() {
|
||||||
|
ast::PatKind::BindPat(pat) => {
|
||||||
|
return NavigationTarget::from_bind_pat(file_id, &pat)
|
||||||
|
}
|
||||||
|
_ => ("_".into(), pat.syntax_node_ptr().range()),
|
||||||
|
},
|
||||||
|
Either::B(slf) => ("self".into(), slf.syntax_node_ptr().range()),
|
||||||
|
};
|
||||||
NavigationTarget {
|
NavigationTarget {
|
||||||
file_id,
|
file_id,
|
||||||
name: name.to_string().into(),
|
name,
|
||||||
full_range: ptr.range(),
|
full_range,
|
||||||
focus_range: None,
|
focus_range: None,
|
||||||
kind: NAME,
|
kind: NAME,
|
||||||
container_name: None,
|
container_name: None,
|
||||||
@ -229,6 +239,7 @@ impl NavigationTarget {
|
|||||||
|
|
||||||
/// Allows `NavigationTarget` to be created from a `NameOwner`
|
/// Allows `NavigationTarget` to be created from a `NameOwner`
|
||||||
pub(crate) fn from_named(file_id: FileId, node: &impl ast::NameOwner) -> NavigationTarget {
|
pub(crate) fn from_named(file_id: FileId, node: &impl ast::NameOwner) -> NavigationTarget {
|
||||||
|
//FIXME: use `_` instead of empty string
|
||||||
let name = node.name().map(|it| it.text().clone()).unwrap_or_default();
|
let name = node.name().map(|it| it.text().clone()).unwrap_or_default();
|
||||||
let focus_range = node.name().map(|it| it.syntax().range());
|
let focus_range = node.name().map(|it| it.syntax().range());
|
||||||
NavigationTarget::from_syntax(file_id, name, focus_range, node.syntax())
|
NavigationTarget::from_syntax(file_id, name, focus_range, node.syntax())
|
||||||
|
@ -5,7 +5,6 @@ use ra_syntax::{
|
|||||||
SyntaxNode,
|
SyntaxNode,
|
||||||
};
|
};
|
||||||
use test_utils::tested_by;
|
use test_utils::tested_by;
|
||||||
use hir::Resolution;
|
|
||||||
|
|
||||||
use crate::{FilePosition, NavigationTarget, db::RootDatabase, RangeInfo};
|
use crate::{FilePosition, NavigationTarget, db::RootDatabase, RangeInfo};
|
||||||
|
|
||||||
@ -48,30 +47,21 @@ pub(crate) fn reference_definition(
|
|||||||
) -> ReferenceResult {
|
) -> ReferenceResult {
|
||||||
use self::ReferenceResult::*;
|
use self::ReferenceResult::*;
|
||||||
|
|
||||||
let function = hir::source_binder::function_from_child_node(db, file_id, name_ref.syntax());
|
let analyzer = hir::SourceAnalyser::new(db, file_id, name_ref.syntax());
|
||||||
|
|
||||||
|
// Special cases:
|
||||||
|
|
||||||
if let Some(function) = function {
|
|
||||||
// Check if it is a method
|
// Check if it is a method
|
||||||
if let Some(method_call) = name_ref.syntax().parent().and_then(ast::MethodCallExpr::cast) {
|
if let Some(method_call) = name_ref.syntax().parent().and_then(ast::MethodCallExpr::cast) {
|
||||||
tested_by!(goto_definition_works_for_methods);
|
tested_by!(goto_definition_works_for_methods);
|
||||||
let infer_result = function.infer(db);
|
if let Some(func) = analyzer.resolve_method_call(method_call) {
|
||||||
let source_map = function.body_source_map(db);
|
|
||||||
let expr = ast::Expr::cast(method_call.syntax()).unwrap();
|
|
||||||
if let Some(func) =
|
|
||||||
source_map.node_expr(expr).and_then(|it| infer_result.method_resolution(it))
|
|
||||||
{
|
|
||||||
return Exact(NavigationTarget::from_function(db, func));
|
return Exact(NavigationTarget::from_function(db, func));
|
||||||
};
|
}
|
||||||
}
|
}
|
||||||
// It could also be a field access
|
// It could also be a field access
|
||||||
if let Some(field_expr) = name_ref.syntax().parent().and_then(ast::FieldExpr::cast) {
|
if let Some(field_expr) = name_ref.syntax().parent().and_then(ast::FieldExpr::cast) {
|
||||||
tested_by!(goto_definition_works_for_fields);
|
tested_by!(goto_definition_works_for_fields);
|
||||||
let infer_result = function.infer(db);
|
if let Some(field) = analyzer.resolve_field(field_expr) {
|
||||||
let source_map = function.body_source_map(db);
|
|
||||||
let expr = ast::Expr::cast(field_expr.syntax()).unwrap();
|
|
||||||
if let Some(field) =
|
|
||||||
source_map.node_expr(expr).and_then(|it| infer_result.field_resolution(it))
|
|
||||||
{
|
|
||||||
return Exact(NavigationTarget::from_field(db, field));
|
return Exact(NavigationTarget::from_field(db, field));
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@ -80,13 +70,9 @@ pub(crate) fn reference_definition(
|
|||||||
if let Some(field_expr) = name_ref.syntax().parent().and_then(ast::NamedField::cast) {
|
if let Some(field_expr) = name_ref.syntax().parent().and_then(ast::NamedField::cast) {
|
||||||
tested_by!(goto_definition_works_for_named_fields);
|
tested_by!(goto_definition_works_for_named_fields);
|
||||||
|
|
||||||
let infer_result = function.infer(db);
|
|
||||||
let source_map = function.body_source_map(db);
|
|
||||||
|
|
||||||
let struct_lit = field_expr.syntax().ancestors().find_map(ast::StructLit::cast);
|
let struct_lit = field_expr.syntax().ancestors().find_map(ast::StructLit::cast);
|
||||||
|
|
||||||
if let Some(expr) = struct_lit.and_then(|lit| source_map.node_expr(lit.into())) {
|
if let Some(ty) = struct_lit.and_then(|lit| analyzer.type_of(db, lit.into())) {
|
||||||
let ty = infer_result[expr].clone();
|
|
||||||
if let Some((hir::AdtDef::Struct(s), _)) = ty.as_adt() {
|
if let Some((hir::AdtDef::Struct(s), _)) = ty.as_adt() {
|
||||||
let hir_path = hir::Path::from_name_ref(name_ref);
|
let hir_path = hir::Path::from_name_ref(name_ref);
|
||||||
let hir_name = hir_path.as_ident().unwrap();
|
let hir_name = hir_path.as_ident().unwrap();
|
||||||
@ -97,78 +83,36 @@ pub(crate) fn reference_definition(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
// Try name resolution
|
// General case, a path or a local:
|
||||||
let resolver = hir::source_binder::resolver_for_node(db, file_id, name_ref.syntax());
|
if let Some(path) = name_ref.syntax().ancestors().find_map(ast::Path::cast) {
|
||||||
if let Some(path) =
|
if let Some(resolved) = analyzer.resolve_path(db, path) {
|
||||||
name_ref.syntax().ancestors().find_map(ast::Path::cast).and_then(hir::Path::from_ast)
|
match resolved {
|
||||||
{
|
hir::PathResolution::Def(def) => return Exact(NavigationTarget::from_def(db, def)),
|
||||||
let resolved = resolver.resolve_path(db, &path);
|
hir::PathResolution::LocalBinding(pat) => {
|
||||||
match resolved.clone().take_types().or_else(|| resolved.take_values()) {
|
if let Some(pat) = analyzer.pat_syntax(db, pat) {
|
||||||
Some(Resolution::Def(def)) => return Exact(NavigationTarget::from_def(db, def)),
|
let nav = NavigationTarget::from_pat(db, file_id, pat);
|
||||||
Some(Resolution::LocalBinding(pat)) => {
|
|
||||||
let body = resolver.body().expect("no body for local binding");
|
|
||||||
let source_map = body.owner().body_source_map(db);
|
|
||||||
let ptr = source_map.pat_syntax(pat).expect("pattern not found in syntax mapping");
|
|
||||||
let name =
|
|
||||||
path.as_ident().cloned().expect("local binding from a multi-segment path");
|
|
||||||
let ptr = ptr.either(|it| it.into(), |it| it.into());
|
|
||||||
let nav = NavigationTarget::from_scope_entry(file_id, name, ptr);
|
|
||||||
return Exact(nav);
|
return Exact(nav);
|
||||||
}
|
}
|
||||||
Some(Resolution::GenericParam(..)) => {
|
}
|
||||||
|
hir::PathResolution::GenericParam(..) => {
|
||||||
// FIXME: go to the generic param def
|
// FIXME: go to the generic param def
|
||||||
}
|
}
|
||||||
Some(Resolution::SelfType(impl_block)) => {
|
hir::PathResolution::SelfType(impl_block) => {
|
||||||
let ty = impl_block.target_ty(db);
|
let ty = impl_block.target_ty(db);
|
||||||
|
|
||||||
if let Some((def_id, _)) = ty.as_adt() {
|
if let Some((def_id, _)) = ty.as_adt() {
|
||||||
return Exact(NavigationTarget::from_adt_def(db, def_id));
|
return Exact(NavigationTarget::from_adt_def(db, def_id));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
None => {
|
hir::PathResolution::AssocItem(assoc) => {
|
||||||
// If we failed to resolve then check associated items
|
return Exact(NavigationTarget::from_impl_item(db, assoc))
|
||||||
if let Some(function) = function {
|
|
||||||
// Resolve associated item for path expressions
|
|
||||||
if let Some(path_expr) =
|
|
||||||
name_ref.syntax().ancestors().find_map(ast::PathExpr::cast)
|
|
||||||
{
|
|
||||||
let infer_result = function.infer(db);
|
|
||||||
let source_map = function.body_source_map(db);
|
|
||||||
|
|
||||||
if let Some(expr) = ast::Expr::cast(path_expr.syntax()) {
|
|
||||||
if let Some(res) = source_map
|
|
||||||
.node_expr(expr)
|
|
||||||
.and_then(|it| infer_result.assoc_resolutions_for_expr(it.into()))
|
|
||||||
{
|
|
||||||
return Exact(NavigationTarget::from_impl_item(db, res));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Resolve associated item for path patterns
|
|
||||||
if let Some(path_pat) =
|
|
||||||
name_ref.syntax().ancestors().find_map(ast::PathPat::cast)
|
|
||||||
{
|
|
||||||
let infer_result = function.infer(db);
|
|
||||||
let source_map = function.body_source_map(db);
|
|
||||||
|
|
||||||
let pat: &ast::Pat = path_pat.into();
|
|
||||||
|
|
||||||
if let Some(res) = source_map
|
|
||||||
.node_pat(pat)
|
|
||||||
.and_then(|it| infer_result.assoc_resolutions_for_pat(it.into()))
|
|
||||||
{
|
|
||||||
return Exact(NavigationTarget::from_impl_item(db, res));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// If that fails try the index based approach.
|
// Fallback index based approach:
|
||||||
let navs = crate::symbol_index::index_resolve(db, name_ref)
|
let navs = crate::symbol_index::index_resolve(db, name_ref)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(NavigationTarget::from_symbol)
|
.map(NavigationTarget::from_symbol)
|
||||||
|
@ -132,17 +132,15 @@ pub(crate) fn type_of(db: &RootDatabase, frange: FileRange) -> Option<String> {
|
|||||||
.ancestors()
|
.ancestors()
|
||||||
.take_while(|it| it.range() == leaf_node.range())
|
.take_while(|it| it.range() == leaf_node.range())
|
||||||
.find(|&it| ast::Expr::cast(it).is_some() || ast::Pat::cast(it).is_some())?;
|
.find(|&it| ast::Expr::cast(it).is_some() || ast::Pat::cast(it).is_some())?;
|
||||||
let parent_fn = node.ancestors().find_map(ast::FnDef::cast)?;
|
let analyzer = hir::SourceAnalyser::new(db, frange.file_id, node);
|
||||||
let function = hir::source_binder::function_from_source(db, frange.file_id, parent_fn)?;
|
let ty = if let Some(ty) = ast::Expr::cast(node).and_then(|e| analyzer.type_of(db, e)) {
|
||||||
let infer = function.infer(db);
|
ty
|
||||||
let source_map = function.body_source_map(db);
|
} else if let Some(ty) = ast::Pat::cast(node).and_then(|p| analyzer.type_of_pat(db, p)) {
|
||||||
if let Some(expr) = ast::Expr::cast(node).and_then(|e| source_map.node_expr(e)) {
|
ty
|
||||||
Some(infer[expr].display(db).to_string())
|
|
||||||
} else if let Some(pat) = ast::Pat::cast(node).and_then(|p| source_map.node_pat(p)) {
|
|
||||||
Some(infer[pat].display(db).to_string())
|
|
||||||
} else {
|
} else {
|
||||||
None
|
return None;
|
||||||
}
|
};
|
||||||
|
Some(ty.display(db).to_string())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
Loading…
Reference in New Issue
Block a user