mirror of
https://github.com/rust-lang/rust.git
synced 2024-12-12 08:36:03 +00:00
migrate ra_ide_api to the new rowan
This commit is contained in:
parent
0343c4a815
commit
f1abc7bdc6
@ -11,24 +11,24 @@ use crate::{db::RootDatabase, CallInfo, FilePosition, FunctionSignature};
|
||||
/// Computes parameter information for the given call expression.
|
||||
pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option<CallInfo> {
|
||||
let parse = db.parse(position.file_id);
|
||||
let syntax = parse.tree().syntax();
|
||||
let syntax = parse.tree().syntax().clone();
|
||||
|
||||
// Find the calling expression and it's NameRef
|
||||
let calling_node = FnCallNode::with_node(syntax, position.offset)?;
|
||||
let calling_node = FnCallNode::with_node(&syntax, position.offset)?;
|
||||
let name_ref = calling_node.name_ref()?;
|
||||
|
||||
let analyzer = hir::SourceAnalyzer::new(db, position.file_id, name_ref.syntax(), None);
|
||||
let function = match calling_node {
|
||||
let function = match &calling_node {
|
||||
FnCallNode::CallExpr(expr) => {
|
||||
//FIXME: apply subst
|
||||
let (callable_def, _subst) = analyzer.type_of(db, expr.expr()?)?.as_callable()?;
|
||||
let (callable_def, _subst) = analyzer.type_of(db, &expr.expr()?)?.as_callable()?;
|
||||
match callable_def {
|
||||
hir::CallableDef::Function(it) => it,
|
||||
//FIXME: handle other callables
|
||||
_ => return None,
|
||||
}
|
||||
}
|
||||
FnCallNode::MethodCallExpr(expr) => analyzer.resolve_method_call(expr)?,
|
||||
FnCallNode::MethodCallExpr(expr) => analyzer.resolve_method_call(&expr)?,
|
||||
};
|
||||
|
||||
let mut call_info = CallInfo::new(db, function);
|
||||
@ -73,13 +73,13 @@ pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option<Cal
|
||||
Some(call_info)
|
||||
}
|
||||
|
||||
enum FnCallNode<'a> {
|
||||
CallExpr(&'a ast::CallExpr),
|
||||
MethodCallExpr(&'a ast::MethodCallExpr),
|
||||
enum FnCallNode {
|
||||
CallExpr(ast::CallExpr),
|
||||
MethodCallExpr(ast::MethodCallExpr),
|
||||
}
|
||||
|
||||
impl<'a> FnCallNode<'a> {
|
||||
fn with_node(syntax: &'a SyntaxNode, offset: TextUnit) -> Option<FnCallNode<'a>> {
|
||||
impl FnCallNode {
|
||||
fn with_node(syntax: &SyntaxNode, offset: TextUnit) -> Option<FnCallNode> {
|
||||
if let Some(expr) = find_node_at_offset::<ast::CallExpr>(syntax, offset) {
|
||||
return Some(FnCallNode::CallExpr(expr));
|
||||
}
|
||||
@ -89,8 +89,8 @@ impl<'a> FnCallNode<'a> {
|
||||
None
|
||||
}
|
||||
|
||||
fn name_ref(&self) -> Option<&'a ast::NameRef> {
|
||||
match *self {
|
||||
fn name_ref(&self) -> Option<ast::NameRef> {
|
||||
match self {
|
||||
FnCallNode::CallExpr(call_expr) => Some(match call_expr.expr()?.kind() {
|
||||
ast::ExprKind::PathExpr(path_expr) => path_expr.path()?.segment()?.name_ref()?,
|
||||
_ => return None,
|
||||
@ -102,8 +102,8 @@ impl<'a> FnCallNode<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn arg_list(&self) -> Option<&'a ast::ArgList> {
|
||||
match *self {
|
||||
fn arg_list(&self) -> Option<ast::ArgList> {
|
||||
match self {
|
||||
FnCallNode::CallExpr(expr) => expr.arg_list(),
|
||||
FnCallNode::MethodCallExpr(expr) => expr.arg_list(),
|
||||
}
|
||||
|
@ -5,10 +5,11 @@ use rustc_hash::FxHashSet;
|
||||
|
||||
/// Complete dot accesses, i.e. fields or methods (currently only fields).
|
||||
pub(super) fn complete_dot(acc: &mut Completions, ctx: &CompletionContext) {
|
||||
let receiver_ty = match ctx.dot_receiver.and_then(|it| ctx.analyzer.type_of(ctx.db, it)) {
|
||||
Some(it) => it,
|
||||
None => return,
|
||||
};
|
||||
let receiver_ty =
|
||||
match ctx.dot_receiver.as_ref().and_then(|it| ctx.analyzer.type_of(ctx.db, it)) {
|
||||
Some(it) => it,
|
||||
None => return,
|
||||
};
|
||||
if !ctx.is_call {
|
||||
complete_fields(acc, ctx, receiver_ty.clone());
|
||||
}
|
||||
|
@ -20,7 +20,7 @@ pub(super) fn complete_fn_param(acc: &mut Completions, ctx: &CompletionContext)
|
||||
let _ = visitor_ctx(&mut params)
|
||||
.visit::<ast::SourceFile, _>(process)
|
||||
.visit::<ast::ItemList, _>(process)
|
||||
.accept(node);
|
||||
.accept(&node);
|
||||
}
|
||||
params
|
||||
.into_iter()
|
||||
@ -38,10 +38,7 @@ pub(super) fn complete_fn_param(acc: &mut Completions, ctx: &CompletionContext)
|
||||
.add_to(acc)
|
||||
});
|
||||
|
||||
fn process<'a, N: ast::FnDefOwner>(
|
||||
node: &'a N,
|
||||
params: &mut FxHashMap<String, (u32, &'a ast::Param)>,
|
||||
) {
|
||||
fn process<N: ast::FnDefOwner>(node: N, params: &mut FxHashMap<String, (u32, ast::Param)>) {
|
||||
node.functions().filter_map(|it| it.param_list()).flat_map(|it| it.params()).for_each(
|
||||
|param| {
|
||||
let text = param.syntax().text().to_string();
|
||||
|
@ -52,7 +52,7 @@ pub(super) fn complete_expr_keyword(acc: &mut Completions, ctx: &CompletionConte
|
||||
return;
|
||||
}
|
||||
|
||||
let fn_def = match ctx.function_syntax {
|
||||
let fn_def = match &ctx.function_syntax {
|
||||
Some(it) => it,
|
||||
None => return,
|
||||
};
|
||||
@ -65,7 +65,7 @@ pub(super) fn complete_expr_keyword(acc: &mut Completions, ctx: &CompletionConte
|
||||
acc.add(keyword(ctx, "else", "else {$0}"));
|
||||
acc.add(keyword(ctx, "else if", "else if $0 {}"));
|
||||
}
|
||||
if is_in_loop_body(ctx.token) {
|
||||
if is_in_loop_body(&ctx.token) {
|
||||
if ctx.can_be_stmt {
|
||||
acc.add(keyword(ctx, "continue", "continue;"));
|
||||
acc.add(keyword(ctx, "break", "break;"));
|
||||
@ -74,19 +74,19 @@ pub(super) fn complete_expr_keyword(acc: &mut Completions, ctx: &CompletionConte
|
||||
acc.add(keyword(ctx, "break", "break"));
|
||||
}
|
||||
}
|
||||
acc.add_all(complete_return(ctx, fn_def, ctx.can_be_stmt));
|
||||
acc.add_all(complete_return(ctx, &fn_def, ctx.can_be_stmt));
|
||||
}
|
||||
|
||||
fn is_in_loop_body(leaf: SyntaxToken) -> bool {
|
||||
fn is_in_loop_body(leaf: &SyntaxToken) -> bool {
|
||||
for node in leaf.parent().ancestors() {
|
||||
if node.kind() == FN_DEF || node.kind() == LAMBDA_EXPR {
|
||||
break;
|
||||
}
|
||||
let loop_body = visitor()
|
||||
.visit::<ast::ForExpr, _>(LoopBodyOwner::loop_body)
|
||||
.visit::<ast::WhileExpr, _>(LoopBodyOwner::loop_body)
|
||||
.visit::<ast::LoopExpr, _>(LoopBodyOwner::loop_body)
|
||||
.accept(node);
|
||||
.visit::<ast::ForExpr, _>(|it| it.loop_body())
|
||||
.visit::<ast::WhileExpr, _>(|it| it.loop_body())
|
||||
.visit::<ast::LoopExpr, _>(|it| it.loop_body())
|
||||
.accept(&node);
|
||||
if let Some(Some(body)) = loop_body {
|
||||
if leaf.range().is_subrange(&body.syntax().range()) {
|
||||
return true;
|
||||
|
@ -11,7 +11,8 @@ use ra_text_edit::TextEditBuilder;
|
||||
|
||||
fn postfix_snippet(ctx: &CompletionContext, label: &str, detail: &str, snippet: &str) -> Builder {
|
||||
let edit = {
|
||||
let receiver_range = ctx.dot_receiver.expect("no receiver available").syntax().range();
|
||||
let receiver_range =
|
||||
ctx.dot_receiver.as_ref().expect("no receiver available").syntax().range();
|
||||
let delete_range = TextRange::from_to(receiver_range.start(), ctx.source_range().end());
|
||||
let mut builder = TextEditBuilder::default();
|
||||
builder.replace(delete_range, snippet.to_string());
|
||||
@ -38,9 +39,9 @@ fn is_bool_or_unknown(ty: Option<Ty>) -> bool {
|
||||
}
|
||||
|
||||
pub(super) fn complete_postfix(acc: &mut Completions, ctx: &CompletionContext) {
|
||||
if let Some(dot_receiver) = ctx.dot_receiver {
|
||||
if let Some(dot_receiver) = &ctx.dot_receiver {
|
||||
let receiver_text = dot_receiver.syntax().text().to_string();
|
||||
let receiver_ty = ctx.analyzer.type_of(ctx.db, dot_receiver);
|
||||
let receiver_ty = ctx.analyzer.type_of(ctx.db, &dot_receiver);
|
||||
if is_bool_or_unknown(receiver_ty) {
|
||||
postfix_snippet(ctx, "if", "if expr {}", &format!("if {} {{$0}}", receiver_text))
|
||||
.add_to(acc);
|
||||
|
@ -20,8 +20,8 @@ pub(super) fn complete_scope(acc: &mut Completions, ctx: &CompletionContext) {
|
||||
let mut builder = TextEditBuilder::default();
|
||||
builder.replace(ctx.source_range(), name.to_string());
|
||||
auto_import::auto_import_text_edit(
|
||||
ctx.token.parent(),
|
||||
ctx.token.parent(),
|
||||
&ctx.token.parent(),
|
||||
&ctx.token.parent(),
|
||||
&path,
|
||||
&mut builder,
|
||||
);
|
||||
|
@ -4,8 +4,8 @@ use crate::completion::{CompletionContext, Completions};
|
||||
|
||||
/// Complete fields in fields literals.
|
||||
pub(super) fn complete_struct_literal(acc: &mut Completions, ctx: &CompletionContext) {
|
||||
let (ty, variant) = match ctx.struct_lit_syntax.and_then(|it| {
|
||||
Some((ctx.analyzer.type_of(ctx.db, it.into())?, ctx.analyzer.resolve_variant(it)?))
|
||||
let (ty, variant) = match ctx.struct_lit_syntax.as_ref().and_then(|it| {
|
||||
Some((ctx.analyzer.type_of(ctx.db, &it.clone().into())?, ctx.analyzer.resolve_variant(it)?))
|
||||
}) {
|
||||
Some(it) => it,
|
||||
_ => return,
|
||||
|
@ -16,11 +16,11 @@ pub(crate) struct CompletionContext<'a> {
|
||||
pub(super) db: &'a db::RootDatabase,
|
||||
pub(super) analyzer: hir::SourceAnalyzer,
|
||||
pub(super) offset: TextUnit,
|
||||
pub(super) token: SyntaxToken<'a>,
|
||||
pub(super) token: SyntaxToken,
|
||||
pub(super) module: Option<hir::Module>,
|
||||
pub(super) function_syntax: Option<&'a ast::FnDef>,
|
||||
pub(super) use_item_syntax: Option<&'a ast::UseItem>,
|
||||
pub(super) struct_lit_syntax: Option<&'a ast::StructLit>,
|
||||
pub(super) function_syntax: Option<ast::FnDef>,
|
||||
pub(super) use_item_syntax: Option<ast::UseItem>,
|
||||
pub(super) struct_lit_syntax: Option<ast::StructLit>,
|
||||
pub(super) is_param: bool,
|
||||
/// If a name-binding or reference to a const in a pattern.
|
||||
/// Irrefutable patterns (like let) are excluded.
|
||||
@ -35,7 +35,7 @@ pub(crate) struct CompletionContext<'a> {
|
||||
/// Something is typed at the "top" level, in module or impl/trait.
|
||||
pub(super) is_new_item: bool,
|
||||
/// The receiver if this is a field or method access, i.e. writing something.<|>
|
||||
pub(super) dot_receiver: Option<&'a ast::Expr>,
|
||||
pub(super) dot_receiver: Option<ast::Expr>,
|
||||
/// If this is a call (method or function) in particular, i.e. the () are already there.
|
||||
pub(super) is_call: bool,
|
||||
}
|
||||
@ -50,7 +50,7 @@ impl<'a> CompletionContext<'a> {
|
||||
let token =
|
||||
find_token_at_offset(original_parse.tree().syntax(), position.offset).left_biased()?;
|
||||
let analyzer =
|
||||
hir::SourceAnalyzer::new(db, position.file_id, token.parent(), Some(position.offset));
|
||||
hir::SourceAnalyzer::new(db, position.file_id, &token.parent(), Some(position.offset));
|
||||
let mut ctx = CompletionContext {
|
||||
db,
|
||||
analyzer,
|
||||
@ -109,7 +109,7 @@ impl<'a> CompletionContext<'a> {
|
||||
if is_node::<ast::BindPat>(name.syntax()) {
|
||||
let bind_pat = name.syntax().ancestors().find_map(ast::BindPat::cast).unwrap();
|
||||
let parent = bind_pat.syntax().parent();
|
||||
if parent.and_then(ast::MatchArm::cast).is_some()
|
||||
if parent.clone().and_then(ast::MatchArm::cast).is_some()
|
||||
|| parent.and_then(ast::Condition::cast).is_some()
|
||||
{
|
||||
self.is_pat_binding = true;
|
||||
@ -122,7 +122,7 @@ impl<'a> CompletionContext<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn classify_name_ref(&mut self, original_file: &'a SourceFile, name_ref: &ast::NameRef) {
|
||||
fn classify_name_ref(&mut self, original_file: SourceFile, name_ref: ast::NameRef) {
|
||||
let name_range = name_ref.syntax().range();
|
||||
if name_ref.syntax().parent().and_then(ast::NamedField::cast).is_some() {
|
||||
self.struct_lit_syntax = find_node_at_offset(original_file.syntax(), self.offset);
|
||||
@ -153,7 +153,7 @@ impl<'a> CompletionContext<'a> {
|
||||
None => return,
|
||||
};
|
||||
|
||||
if let Some(segment) = ast::PathSegment::cast(parent) {
|
||||
if let Some(segment) = ast::PathSegment::cast(parent.clone()) {
|
||||
let path = segment.parent_path();
|
||||
self.is_call = path
|
||||
.syntax()
|
||||
@ -162,7 +162,7 @@ impl<'a> CompletionContext<'a> {
|
||||
.and_then(|it| it.syntax().parent().and_then(ast::CallExpr::cast))
|
||||
.is_some();
|
||||
|
||||
if let Some(mut path) = hir::Path::from_ast(path) {
|
||||
if let Some(mut path) = hir::Path::from_ast(path.clone()) {
|
||||
if !path.is_ident() {
|
||||
path.segments.pop().unwrap();
|
||||
self.path_prefix = Some(path);
|
||||
@ -179,7 +179,7 @@ impl<'a> CompletionContext<'a> {
|
||||
.syntax()
|
||||
.ancestors()
|
||||
.find_map(|node| {
|
||||
if let Some(stmt) = ast::ExprStmt::cast(node) {
|
||||
if let Some(stmt) = ast::ExprStmt::cast(node.clone()) {
|
||||
return Some(stmt.syntax().range() == name_ref.syntax().range());
|
||||
}
|
||||
if let Some(block) = ast::Block::cast(node) {
|
||||
@ -203,7 +203,7 @@ impl<'a> CompletionContext<'a> {
|
||||
}
|
||||
}
|
||||
}
|
||||
if let Some(field_expr) = ast::FieldExpr::cast(parent) {
|
||||
if let Some(field_expr) = ast::FieldExpr::cast(parent.clone()) {
|
||||
// The receiver comes before the point of insertion of the fake
|
||||
// ident, so it should have the same range in the non-modified file
|
||||
self.dot_receiver = field_expr
|
||||
@ -222,7 +222,7 @@ impl<'a> CompletionContext<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn find_node_with_range<N: AstNode>(syntax: &SyntaxNode, range: TextRange) -> Option<&N> {
|
||||
fn find_node_with_range<N: AstNode>(syntax: &SyntaxNode, range: TextRange) -> Option<N> {
|
||||
find_covering_element(syntax, range).ancestors().find_map(N::cast)
|
||||
}
|
||||
|
||||
|
@ -35,8 +35,8 @@ pub(crate) fn diagnostics(db: &RootDatabase, file_id: FileId) -> Vec<Diagnostic>
|
||||
}));
|
||||
|
||||
for node in parse.tree().syntax().descendants() {
|
||||
check_unnecessary_braces_in_use_statement(&mut res, file_id, node);
|
||||
check_struct_shorthand_initialization(&mut res, file_id, node);
|
||||
check_unnecessary_braces_in_use_statement(&mut res, file_id, &node);
|
||||
check_struct_shorthand_initialization(&mut res, file_id, &node);
|
||||
}
|
||||
let res = RefCell::new(res);
|
||||
let mut sink = DiagnosticSink::new(|d| {
|
||||
@ -60,7 +60,7 @@ pub(crate) fn diagnostics(db: &RootDatabase, file_id: FileId) -> Vec<Diagnostic>
|
||||
})
|
||||
.on::<hir::diagnostics::MissingFields, _>(|d| {
|
||||
let node = d.ast(db);
|
||||
let mut ast_editor = AstEditor::new(&*node);
|
||||
let mut ast_editor = AstEditor::new(node);
|
||||
for f in d.missed_fields.iter() {
|
||||
ast_editor.append_field(&AstBuilder::<NamedField>::from_name(f));
|
||||
}
|
||||
@ -94,11 +94,11 @@ fn check_unnecessary_braces_in_use_statement(
|
||||
file_id: FileId,
|
||||
node: &SyntaxNode,
|
||||
) -> Option<()> {
|
||||
let use_tree_list = ast::UseTreeList::cast(node)?;
|
||||
let use_tree_list = ast::UseTreeList::cast(node.clone())?;
|
||||
if let Some((single_use_tree,)) = use_tree_list.use_trees().collect_tuple() {
|
||||
let range = use_tree_list.syntax().range();
|
||||
let edit =
|
||||
text_edit_for_remove_unnecessary_braces_with_self_in_use_statement(single_use_tree)
|
||||
text_edit_for_remove_unnecessary_braces_with_self_in_use_statement(&single_use_tree)
|
||||
.unwrap_or_else(|| {
|
||||
let to_replace = single_use_tree.syntax().text().to_string();
|
||||
let mut edit_builder = TextEditBuilder::default();
|
||||
@ -141,7 +141,7 @@ fn check_struct_shorthand_initialization(
|
||||
file_id: FileId,
|
||||
node: &SyntaxNode,
|
||||
) -> Option<()> {
|
||||
let struct_lit = ast::StructLit::cast(node)?;
|
||||
let struct_lit = ast::StructLit::cast(node.clone())?;
|
||||
let named_field_list = struct_lit.named_field_list()?;
|
||||
for named_field in named_field_list.fields() {
|
||||
if let (Some(name_ref), Some(expr)) = (named_field.name_ref(), named_field.expr()) {
|
||||
@ -184,7 +184,7 @@ mod tests {
|
||||
let parse = SourceFile::parse(code);
|
||||
let mut diagnostics = Vec::new();
|
||||
for node in parse.tree().syntax().descendants() {
|
||||
func(&mut diagnostics, FileId(0), node);
|
||||
func(&mut diagnostics, FileId(0), &node);
|
||||
}
|
||||
assert!(diagnostics.is_empty());
|
||||
}
|
||||
@ -193,7 +193,7 @@ mod tests {
|
||||
let parse = SourceFile::parse(before);
|
||||
let mut diagnostics = Vec::new();
|
||||
for node in parse.tree().syntax().descendants() {
|
||||
func(&mut diagnostics, FileId(0), node);
|
||||
func(&mut diagnostics, FileId(0), &node);
|
||||
}
|
||||
let diagnostic =
|
||||
diagnostics.pop().unwrap_or_else(|| panic!("no diagnostics for:\n{}\n", before));
|
||||
|
@ -38,7 +38,7 @@ impl FunctionSignature {
|
||||
pub(crate) fn from_hir(db: &db::RootDatabase, function: hir::Function) -> Self {
|
||||
let doc = function.docs(db);
|
||||
let ast_node = function.source(db).ast;
|
||||
FunctionSignature::from(&*ast_node).with_doc_opt(doc)
|
||||
FunctionSignature::from(&ast_node).with_doc_opt(doc)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -5,7 +5,7 @@ use ra_syntax::{
|
||||
ast::{self, DocCommentsOwner},
|
||||
AstNode, AstPtr, SmolStr,
|
||||
SyntaxKind::{self, NAME},
|
||||
SyntaxNode, TextRange, TreeArc,
|
||||
SyntaxNode, TextRange,
|
||||
};
|
||||
|
||||
use super::short_label::ShortLabel;
|
||||
@ -169,7 +169,7 @@ impl NavigationTarget {
|
||||
let file_id = src.file_id.original_file(db);
|
||||
match src.ast {
|
||||
FieldSource::Named(it) => {
|
||||
NavigationTarget::from_named(file_id, &*it, it.doc_comment_text(), it.short_label())
|
||||
NavigationTarget::from_named(file_id, &it, it.doc_comment_text(), it.short_label())
|
||||
}
|
||||
FieldSource::Pos(it) => {
|
||||
NavigationTarget::from_syntax(file_id, "".into(), None, it.syntax(), None, None)
|
||||
@ -179,13 +179,13 @@ impl NavigationTarget {
|
||||
|
||||
pub(crate) fn from_def_source<A, D>(db: &RootDatabase, def: D) -> NavigationTarget
|
||||
where
|
||||
D: HasSource<Ast = TreeArc<A>>,
|
||||
D: HasSource<Ast = A>,
|
||||
A: ast::DocCommentsOwner + ast::NameOwner + ShortLabel,
|
||||
{
|
||||
let src = def.source(db);
|
||||
NavigationTarget::from_named(
|
||||
src.file_id.original_file(db),
|
||||
&*src.ast,
|
||||
&src.ast,
|
||||
src.ast.doc_comment_text(),
|
||||
src.ast.short_label(),
|
||||
)
|
||||
@ -249,7 +249,7 @@ impl NavigationTarget {
|
||||
log::debug!("nav target {}", src.ast.syntax().debug_dump());
|
||||
NavigationTarget::from_named(
|
||||
src.file_id.original_file(db),
|
||||
&*src.ast,
|
||||
&src.ast,
|
||||
src.ast.doc_comment_text(),
|
||||
None,
|
||||
)
|
||||
@ -318,22 +318,18 @@ pub(crate) fn docs_from_symbol(db: &RootDatabase, symbol: &FileSymbol) -> Option
|
||||
let parse = db.parse(symbol.file_id);
|
||||
let node = symbol.ptr.to_node(parse.tree().syntax()).to_owned();
|
||||
|
||||
fn doc_comments<N: ast::DocCommentsOwner>(node: &N) -> Option<String> {
|
||||
node.doc_comment_text()
|
||||
}
|
||||
|
||||
visitor()
|
||||
.visit(doc_comments::<ast::FnDef>)
|
||||
.visit(doc_comments::<ast::StructDef>)
|
||||
.visit(doc_comments::<ast::EnumDef>)
|
||||
.visit(doc_comments::<ast::TraitDef>)
|
||||
.visit(doc_comments::<ast::Module>)
|
||||
.visit(doc_comments::<ast::TypeAliasDef>)
|
||||
.visit(doc_comments::<ast::ConstDef>)
|
||||
.visit(doc_comments::<ast::StaticDef>)
|
||||
.visit(doc_comments::<ast::NamedFieldDef>)
|
||||
.visit(doc_comments::<ast::EnumVariant>)
|
||||
.visit(doc_comments::<ast::MacroCall>)
|
||||
.visit(|it: ast::FnDef| it.doc_comment_text())
|
||||
.visit(|it: ast::StructDef| it.doc_comment_text())
|
||||
.visit(|it: ast::EnumDef| it.doc_comment_text())
|
||||
.visit(|it: ast::TraitDef| it.doc_comment_text())
|
||||
.visit(|it: ast::Module| it.doc_comment_text())
|
||||
.visit(|it: ast::TypeAliasDef| it.doc_comment_text())
|
||||
.visit(|it: ast::ConstDef| it.doc_comment_text())
|
||||
.visit(|it: ast::StaticDef| it.doc_comment_text())
|
||||
.visit(|it: ast::NamedFieldDef| it.doc_comment_text())
|
||||
.visit(|it: ast::EnumVariant| it.doc_comment_text())
|
||||
.visit(|it: ast::MacroCall| it.doc_comment_text())
|
||||
.accept(&node)?
|
||||
}
|
||||
|
||||
@ -345,15 +341,15 @@ pub(crate) fn description_from_symbol(db: &RootDatabase, symbol: &FileSymbol) ->
|
||||
let node = symbol.ptr.to_node(parse.tree().syntax()).to_owned();
|
||||
|
||||
visitor()
|
||||
.visit(|node: &ast::FnDef| node.short_label())
|
||||
.visit(|node: &ast::StructDef| node.short_label())
|
||||
.visit(|node: &ast::EnumDef| node.short_label())
|
||||
.visit(|node: &ast::TraitDef| node.short_label())
|
||||
.visit(|node: &ast::Module| node.short_label())
|
||||
.visit(|node: &ast::TypeAliasDef| node.short_label())
|
||||
.visit(|node: &ast::ConstDef| node.short_label())
|
||||
.visit(|node: &ast::StaticDef| node.short_label())
|
||||
.visit(|node: &ast::NamedFieldDef| node.short_label())
|
||||
.visit(|node: &ast::EnumVariant| node.short_label())
|
||||
.visit(|node: ast::FnDef| node.short_label())
|
||||
.visit(|node: ast::StructDef| node.short_label())
|
||||
.visit(|node: ast::EnumDef| node.short_label())
|
||||
.visit(|node: ast::TraitDef| node.short_label())
|
||||
.visit(|node: ast::Module| node.short_label())
|
||||
.visit(|node: ast::TypeAliasDef| node.short_label())
|
||||
.visit(|node: ast::ConstDef| node.short_label())
|
||||
.visit(|node: ast::StaticDef| node.short_label())
|
||||
.visit(|node: ast::NamedFieldDef| node.short_label())
|
||||
.visit(|node: ast::EnumVariant| node.short_label())
|
||||
.accept(&node)?
|
||||
}
|
||||
|
@ -24,14 +24,14 @@ pub fn file_structure(file: &SourceFile) -> Vec<StructureNode> {
|
||||
for event in file.syntax().preorder() {
|
||||
match event {
|
||||
WalkEvent::Enter(node) => {
|
||||
if let Some(mut symbol) = structure_node(node) {
|
||||
if let Some(mut symbol) = structure_node(&node) {
|
||||
symbol.parent = stack.last().copied();
|
||||
stack.push(res.len());
|
||||
res.push(symbol);
|
||||
}
|
||||
}
|
||||
WalkEvent::Leave(node) => {
|
||||
if structure_node(node).is_some() {
|
||||
if structure_node(&node).is_some() {
|
||||
stack.pop().unwrap();
|
||||
}
|
||||
}
|
||||
@ -41,19 +41,20 @@ pub fn file_structure(file: &SourceFile) -> Vec<StructureNode> {
|
||||
}
|
||||
|
||||
fn structure_node(node: &SyntaxNode) -> Option<StructureNode> {
|
||||
fn decl<N: NameOwner + AttrsOwner>(node: &N) -> Option<StructureNode> {
|
||||
fn decl<N: NameOwner + AttrsOwner>(node: N) -> Option<StructureNode> {
|
||||
decl_with_detail(node, None)
|
||||
}
|
||||
|
||||
fn decl_with_ascription<N: NameOwner + AttrsOwner + TypeAscriptionOwner>(
|
||||
node: &N,
|
||||
node: N,
|
||||
) -> Option<StructureNode> {
|
||||
decl_with_type_ref(node, node.ascribed_type())
|
||||
let ty = node.ascribed_type();
|
||||
decl_with_type_ref(node, ty)
|
||||
}
|
||||
|
||||
fn decl_with_type_ref<N: NameOwner + AttrsOwner>(
|
||||
node: &N,
|
||||
type_ref: Option<&ast::TypeRef>,
|
||||
node: N,
|
||||
type_ref: Option<ast::TypeRef>,
|
||||
) -> Option<StructureNode> {
|
||||
let detail = type_ref.map(|type_ref| {
|
||||
let mut detail = String::new();
|
||||
@ -64,7 +65,7 @@ fn structure_node(node: &SyntaxNode) -> Option<StructureNode> {
|
||||
}
|
||||
|
||||
fn decl_with_detail<N: NameOwner + AttrsOwner>(
|
||||
node: &N,
|
||||
node: N,
|
||||
detail: Option<String>,
|
||||
) -> Option<StructureNode> {
|
||||
let name = node.name()?;
|
||||
@ -82,22 +83,24 @@ fn structure_node(node: &SyntaxNode) -> Option<StructureNode> {
|
||||
|
||||
fn collapse_ws(node: &SyntaxNode, output: &mut String) {
|
||||
let mut can_insert_ws = false;
|
||||
for line in node.text().chunks().flat_map(|chunk| chunk.lines()) {
|
||||
let line = line.trim();
|
||||
if line.is_empty() {
|
||||
if can_insert_ws {
|
||||
output.push_str(" ");
|
||||
can_insert_ws = false;
|
||||
for chunk in node.text().chunks() {
|
||||
for line in chunk.lines() {
|
||||
let line = line.trim();
|
||||
if line.is_empty() {
|
||||
if can_insert_ws {
|
||||
output.push_str(" ");
|
||||
can_insert_ws = false;
|
||||
}
|
||||
} else {
|
||||
output.push_str(line);
|
||||
can_insert_ws = true;
|
||||
}
|
||||
} else {
|
||||
output.push_str(line);
|
||||
can_insert_ws = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
visitor()
|
||||
.visit(|fn_def: &ast::FnDef| {
|
||||
.visit(|fn_def: ast::FnDef| {
|
||||
let mut detail = String::from("fn");
|
||||
if let Some(type_param_list) = fn_def.type_param_list() {
|
||||
collapse_ws(type_param_list.syntax(), &mut detail);
|
||||
@ -117,11 +120,14 @@ fn structure_node(node: &SyntaxNode) -> Option<StructureNode> {
|
||||
.visit(decl::<ast::EnumVariant>)
|
||||
.visit(decl::<ast::TraitDef>)
|
||||
.visit(decl::<ast::Module>)
|
||||
.visit(|td: &ast::TypeAliasDef| decl_with_type_ref(td, td.type_ref()))
|
||||
.visit(|td: ast::TypeAliasDef| {
|
||||
let ty = td.type_ref();
|
||||
decl_with_type_ref(td, ty)
|
||||
})
|
||||
.visit(decl_with_ascription::<ast::NamedFieldDef>)
|
||||
.visit(decl_with_ascription::<ast::ConstDef>)
|
||||
.visit(decl_with_ascription::<ast::StaticDef>)
|
||||
.visit(|im: &ast::ImplBlock| {
|
||||
.visit(|im: ast::ImplBlock| {
|
||||
let target_type = im.target_type()?;
|
||||
let target_trait = im.target_trait();
|
||||
let label = match target_trait {
|
||||
@ -142,14 +148,14 @@ fn structure_node(node: &SyntaxNode) -> Option<StructureNode> {
|
||||
};
|
||||
Some(node)
|
||||
})
|
||||
.visit(|mc: &ast::MacroCall| {
|
||||
.visit(|mc: ast::MacroCall| {
|
||||
let first_token = mc.syntax().first_token().unwrap();
|
||||
if first_token.text().as_str() != "macro_rules" {
|
||||
return None;
|
||||
}
|
||||
decl(mc)
|
||||
})
|
||||
.accept(node)?
|
||||
.accept(&node)?
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
@ -42,7 +42,7 @@ fn try_extend_selection(root: &SyntaxNode, range: TextRange) -> Option<TextRange
|
||||
TokenAtOffset::None => return None,
|
||||
TokenAtOffset::Single(l) => {
|
||||
if string_kinds.contains(&l.kind()) {
|
||||
extend_single_word_in_comment_or_string(l, offset).unwrap_or_else(|| l.range())
|
||||
extend_single_word_in_comment_or_string(&l, offset).unwrap_or_else(|| l.range())
|
||||
} else {
|
||||
l.range()
|
||||
}
|
||||
@ -56,7 +56,7 @@ fn try_extend_selection(root: &SyntaxNode, range: TextRange) -> Option<TextRange
|
||||
if token.range() != range {
|
||||
return Some(token.range());
|
||||
}
|
||||
if let Some(comment) = ast::Comment::cast(token) {
|
||||
if let Some(comment) = ast::Comment::cast(token.clone()) {
|
||||
if let Some(range) = extend_comments(comment) {
|
||||
return Some(range);
|
||||
}
|
||||
@ -73,7 +73,7 @@ fn try_extend_selection(root: &SyntaxNode, range: TextRange) -> Option<TextRange
|
||||
let node = node.ancestors().take_while(|n| n.range() == node.range()).last().unwrap();
|
||||
|
||||
if node.parent().map(|n| list_kinds.contains(&n.kind())) == Some(true) {
|
||||
if let Some(range) = extend_list_item(node) {
|
||||
if let Some(range) = extend_list_item(&node) {
|
||||
return Some(range);
|
||||
}
|
||||
}
|
||||
@ -82,7 +82,7 @@ fn try_extend_selection(root: &SyntaxNode, range: TextRange) -> Option<TextRange
|
||||
}
|
||||
|
||||
fn extend_single_word_in_comment_or_string(
|
||||
leaf: SyntaxToken,
|
||||
leaf: &SyntaxToken,
|
||||
offset: TextUnit,
|
||||
) -> Option<TextRange> {
|
||||
let text: &str = leaf.text();
|
||||
@ -131,9 +131,9 @@ fn extend_ws(root: &SyntaxNode, ws: SyntaxToken, offset: TextUnit) -> TextRange
|
||||
ws.range()
|
||||
}
|
||||
|
||||
fn pick_best<'a>(l: SyntaxToken<'a>, r: SyntaxToken<'a>) -> SyntaxToken<'a> {
|
||||
return if priority(r) > priority(l) { r } else { l };
|
||||
fn priority(n: SyntaxToken) -> usize {
|
||||
fn pick_best<'a>(l: SyntaxToken, r: SyntaxToken) -> SyntaxToken {
|
||||
return if priority(&r) > priority(&l) { r } else { l };
|
||||
fn priority(n: &SyntaxToken) -> usize {
|
||||
match n.kind() {
|
||||
WHITESPACE => 0,
|
||||
IDENT | T![self] | T![super] | T![crate] | LIFETIME => 2,
|
||||
@ -156,7 +156,7 @@ fn extend_list_item(node: &SyntaxNode) -> Option<TextRange> {
|
||||
SyntaxElement::Token(it) => is_single_line_ws(it),
|
||||
})
|
||||
.next()
|
||||
.and_then(|it| it.as_token())
|
||||
.and_then(|it| it.as_token().cloned())
|
||||
.filter(|node| node.kind() == T![,])
|
||||
}
|
||||
|
||||
@ -167,7 +167,7 @@ fn extend_list_item(node: &SyntaxNode) -> Option<TextRange> {
|
||||
// Include any following whitespace when comma if after list item.
|
||||
let final_node = comma_node
|
||||
.next_sibling_or_token()
|
||||
.and_then(|it| it.as_token())
|
||||
.and_then(|it| it.as_token().cloned())
|
||||
.filter(|node| is_single_line_ws(node))
|
||||
.unwrap_or(comma_node);
|
||||
|
||||
@ -178,8 +178,8 @@ fn extend_list_item(node: &SyntaxNode) -> Option<TextRange> {
|
||||
}
|
||||
|
||||
fn extend_comments(comment: ast::Comment) -> Option<TextRange> {
|
||||
let prev = adj_comments(comment, Direction::Prev);
|
||||
let next = adj_comments(comment, Direction::Next);
|
||||
let prev = adj_comments(&comment, Direction::Prev);
|
||||
let next = adj_comments(&comment, Direction::Next);
|
||||
if prev != next {
|
||||
Some(TextRange::from_to(prev.syntax().range().start(), next.syntax().range().end()))
|
||||
} else {
|
||||
@ -187,14 +187,14 @@ fn extend_comments(comment: ast::Comment) -> Option<TextRange> {
|
||||
}
|
||||
}
|
||||
|
||||
fn adj_comments(comment: ast::Comment, dir: Direction) -> ast::Comment {
|
||||
let mut res = comment;
|
||||
fn adj_comments(comment: &ast::Comment, dir: Direction) -> ast::Comment {
|
||||
let mut res = comment.clone();
|
||||
for element in comment.syntax().siblings_with_tokens(dir) {
|
||||
let token = match element.as_token() {
|
||||
None => break,
|
||||
Some(token) => token,
|
||||
};
|
||||
if let Some(c) = ast::Comment::cast(token) {
|
||||
if let Some(c) = ast::Comment::cast(token.clone()) {
|
||||
res = c
|
||||
} else if token.kind() != WHITESPACE || token.text().contains("\n\n") {
|
||||
break;
|
||||
|
@ -30,7 +30,7 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec<Fold> {
|
||||
for element in file.syntax().descendants_with_tokens() {
|
||||
// Fold items that span multiple lines
|
||||
if let Some(kind) = fold_kind(element.kind()) {
|
||||
let is_multiline = match element {
|
||||
let is_multiline = match &element {
|
||||
SyntaxElement::Node(node) => node.text().contains('\n'),
|
||||
SyntaxElement::Token(token) => token.text().contains('\n'),
|
||||
};
|
||||
@ -56,7 +56,7 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec<Fold> {
|
||||
SyntaxElement::Node(node) => {
|
||||
// Fold groups of imports
|
||||
if node.kind() == USE_ITEM && !visited_imports.contains(&node) {
|
||||
if let Some(range) = contiguous_range_for_group(node, &mut visited_imports) {
|
||||
if let Some(range) = contiguous_range_for_group(&node, &mut visited_imports) {
|
||||
res.push(Fold { range, kind: FoldKind::Imports })
|
||||
}
|
||||
}
|
||||
@ -65,7 +65,7 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec<Fold> {
|
||||
if node.kind() == MODULE && !has_visibility(&node) && !visited_mods.contains(&node)
|
||||
{
|
||||
if let Some(range) =
|
||||
contiguous_range_for_group_unless(node, has_visibility, &mut visited_mods)
|
||||
contiguous_range_for_group_unless(&node, has_visibility, &mut visited_mods)
|
||||
{
|
||||
res.push(Fold { range, kind: FoldKind::Mods })
|
||||
}
|
||||
@ -88,24 +88,24 @@ fn fold_kind(kind: SyntaxKind) -> Option<FoldKind> {
|
||||
}
|
||||
|
||||
fn has_visibility(node: &SyntaxNode) -> bool {
|
||||
ast::Module::cast(node).and_then(|m| m.visibility()).is_some()
|
||||
ast::Module::cast(node.clone()).and_then(|m| m.visibility()).is_some()
|
||||
}
|
||||
|
||||
fn contiguous_range_for_group<'a>(
|
||||
first: &'a SyntaxNode,
|
||||
visited: &mut FxHashSet<&'a SyntaxNode>,
|
||||
fn contiguous_range_for_group(
|
||||
first: &SyntaxNode,
|
||||
visited: &mut FxHashSet<SyntaxNode>,
|
||||
) -> Option<TextRange> {
|
||||
contiguous_range_for_group_unless(first, |_| false, visited)
|
||||
}
|
||||
|
||||
fn contiguous_range_for_group_unless<'a>(
|
||||
first: &'a SyntaxNode,
|
||||
unless: impl Fn(&'a SyntaxNode) -> bool,
|
||||
visited: &mut FxHashSet<&'a SyntaxNode>,
|
||||
fn contiguous_range_for_group_unless(
|
||||
first: &SyntaxNode,
|
||||
unless: impl Fn(&SyntaxNode) -> bool,
|
||||
visited: &mut FxHashSet<SyntaxNode>,
|
||||
) -> Option<TextRange> {
|
||||
visited.insert(first);
|
||||
visited.insert(first.clone());
|
||||
|
||||
let mut last = first;
|
||||
let mut last = first.clone();
|
||||
for element in first.siblings_with_tokens(Direction::Next) {
|
||||
let node = match element {
|
||||
SyntaxElement::Token(token) => {
|
||||
@ -123,15 +123,15 @@ fn contiguous_range_for_group_unless<'a>(
|
||||
};
|
||||
|
||||
// Stop if we find a node that doesn't belong to the group
|
||||
if node.kind() != first.kind() || unless(node) {
|
||||
if node.kind() != first.kind() || unless(&node) {
|
||||
break;
|
||||
}
|
||||
|
||||
visited.insert(node);
|
||||
visited.insert(node.clone());
|
||||
last = node;
|
||||
}
|
||||
|
||||
if first != last {
|
||||
if first != &last {
|
||||
Some(TextRange::from_to(first.range().start(), last.range().end()))
|
||||
} else {
|
||||
// The group consists of only one element, therefore it cannot be folded
|
||||
@ -139,11 +139,11 @@ fn contiguous_range_for_group_unless<'a>(
|
||||
}
|
||||
}
|
||||
|
||||
fn contiguous_range_for_comment<'a>(
|
||||
first: ast::Comment<'a>,
|
||||
visited: &mut FxHashSet<ast::Comment<'a>>,
|
||||
fn contiguous_range_for_comment(
|
||||
first: ast::Comment,
|
||||
visited: &mut FxHashSet<ast::Comment>,
|
||||
) -> Option<TextRange> {
|
||||
visited.insert(first);
|
||||
visited.insert(first.clone());
|
||||
|
||||
// Only fold comments of the same flavor
|
||||
let group_kind = first.kind();
|
||||
@ -151,11 +151,11 @@ fn contiguous_range_for_comment<'a>(
|
||||
return None;
|
||||
}
|
||||
|
||||
let mut last = first;
|
||||
let mut last = first.clone();
|
||||
for element in first.syntax().siblings_with_tokens(Direction::Next) {
|
||||
match element {
|
||||
SyntaxElement::Token(token) => {
|
||||
if let Some(ws) = ast::Whitespace::cast(token) {
|
||||
if let Some(ws) = ast::Whitespace::cast(token.clone()) {
|
||||
if !ws.spans_multiple_lines() {
|
||||
// Ignore whitespace without blank lines
|
||||
continue;
|
||||
@ -163,7 +163,7 @@ fn contiguous_range_for_comment<'a>(
|
||||
}
|
||||
if let Some(c) = ast::Comment::cast(token) {
|
||||
if c.kind() == group_kind {
|
||||
visited.insert(c);
|
||||
visited.insert(c.clone());
|
||||
last = c;
|
||||
continue;
|
||||
}
|
||||
@ -193,7 +193,7 @@ mod tests {
|
||||
fn do_check(text: &str, fold_kinds: &[FoldKind]) {
|
||||
let (ranges, text) = extract_ranges(text, "fold");
|
||||
let parse = SourceFile::parse(&text);
|
||||
let folds = folding_ranges(parse.tree());
|
||||
let folds = folding_ranges(&parse.tree());
|
||||
|
||||
assert_eq!(
|
||||
folds.len(),
|
||||
|
@ -20,13 +20,13 @@ pub(crate) fn goto_definition(
|
||||
position: FilePosition,
|
||||
) -> Option<RangeInfo<Vec<NavigationTarget>>> {
|
||||
let parse = db.parse(position.file_id);
|
||||
let syntax = parse.tree().syntax();
|
||||
if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(syntax, position.offset) {
|
||||
let navs = reference_definition(db, position.file_id, name_ref).to_vec();
|
||||
let syntax = parse.tree().syntax().clone();
|
||||
if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(&syntax, position.offset) {
|
||||
let navs = reference_definition(db, position.file_id, &name_ref).to_vec();
|
||||
return Some(RangeInfo::new(name_ref.syntax().range(), navs.to_vec()));
|
||||
}
|
||||
if let Some(name) = find_node_at_offset::<ast::Name>(syntax, position.offset) {
|
||||
let navs = name_definition(db, position.file_id, name)?;
|
||||
if let Some(name) = find_node_at_offset::<ast::Name>(&syntax, position.offset) {
|
||||
let navs = name_definition(db, position.file_id, &name)?;
|
||||
return Some(RangeInfo::new(name.syntax().range(), navs));
|
||||
}
|
||||
None
|
||||
@ -94,7 +94,7 @@ pub(crate) fn name_definition(
|
||||
) -> Option<Vec<NavigationTarget>> {
|
||||
let parent = name.syntax().parent()?;
|
||||
|
||||
if let Some(module) = ast::Module::cast(&parent) {
|
||||
if let Some(module) = ast::Module::cast(parent.clone()) {
|
||||
if module.has_semi() {
|
||||
if let Some(child_module) =
|
||||
hir::source_binder::module_from_declaration(db, file_id, module)
|
||||
@ -114,38 +114,88 @@ pub(crate) fn name_definition(
|
||||
|
||||
fn named_target(file_id: FileId, node: &SyntaxNode) -> Option<NavigationTarget> {
|
||||
visitor()
|
||||
.visit(|node: &ast::StructDef| {
|
||||
NavigationTarget::from_named(file_id, node, node.doc_comment_text(), node.short_label())
|
||||
.visit(|node: ast::StructDef| {
|
||||
NavigationTarget::from_named(
|
||||
file_id,
|
||||
&node,
|
||||
node.doc_comment_text(),
|
||||
node.short_label(),
|
||||
)
|
||||
})
|
||||
.visit(|node: &ast::EnumDef| {
|
||||
NavigationTarget::from_named(file_id, node, node.doc_comment_text(), node.short_label())
|
||||
.visit(|node: ast::EnumDef| {
|
||||
NavigationTarget::from_named(
|
||||
file_id,
|
||||
&node,
|
||||
node.doc_comment_text(),
|
||||
node.short_label(),
|
||||
)
|
||||
})
|
||||
.visit(|node: &ast::EnumVariant| {
|
||||
NavigationTarget::from_named(file_id, node, node.doc_comment_text(), node.short_label())
|
||||
.visit(|node: ast::EnumVariant| {
|
||||
NavigationTarget::from_named(
|
||||
file_id,
|
||||
&node,
|
||||
node.doc_comment_text(),
|
||||
node.short_label(),
|
||||
)
|
||||
})
|
||||
.visit(|node: &ast::FnDef| {
|
||||
NavigationTarget::from_named(file_id, node, node.doc_comment_text(), node.short_label())
|
||||
.visit(|node: ast::FnDef| {
|
||||
NavigationTarget::from_named(
|
||||
file_id,
|
||||
&node,
|
||||
node.doc_comment_text(),
|
||||
node.short_label(),
|
||||
)
|
||||
})
|
||||
.visit(|node: &ast::TypeAliasDef| {
|
||||
NavigationTarget::from_named(file_id, node, node.doc_comment_text(), node.short_label())
|
||||
.visit(|node: ast::TypeAliasDef| {
|
||||
NavigationTarget::from_named(
|
||||
file_id,
|
||||
&node,
|
||||
node.doc_comment_text(),
|
||||
node.short_label(),
|
||||
)
|
||||
})
|
||||
.visit(|node: &ast::ConstDef| {
|
||||
NavigationTarget::from_named(file_id, node, node.doc_comment_text(), node.short_label())
|
||||
.visit(|node: ast::ConstDef| {
|
||||
NavigationTarget::from_named(
|
||||
file_id,
|
||||
&node,
|
||||
node.doc_comment_text(),
|
||||
node.short_label(),
|
||||
)
|
||||
})
|
||||
.visit(|node: &ast::StaticDef| {
|
||||
NavigationTarget::from_named(file_id, node, node.doc_comment_text(), node.short_label())
|
||||
.visit(|node: ast::StaticDef| {
|
||||
NavigationTarget::from_named(
|
||||
file_id,
|
||||
&node,
|
||||
node.doc_comment_text(),
|
||||
node.short_label(),
|
||||
)
|
||||
})
|
||||
.visit(|node: &ast::TraitDef| {
|
||||
NavigationTarget::from_named(file_id, node, node.doc_comment_text(), node.short_label())
|
||||
.visit(|node: ast::TraitDef| {
|
||||
NavigationTarget::from_named(
|
||||
file_id,
|
||||
&node,
|
||||
node.doc_comment_text(),
|
||||
node.short_label(),
|
||||
)
|
||||
})
|
||||
.visit(|node: &ast::NamedFieldDef| {
|
||||
NavigationTarget::from_named(file_id, node, node.doc_comment_text(), node.short_label())
|
||||
.visit(|node: ast::NamedFieldDef| {
|
||||
NavigationTarget::from_named(
|
||||
file_id,
|
||||
&node,
|
||||
node.doc_comment_text(),
|
||||
node.short_label(),
|
||||
)
|
||||
})
|
||||
.visit(|node: &ast::Module| {
|
||||
NavigationTarget::from_named(file_id, node, node.doc_comment_text(), node.short_label())
|
||||
.visit(|node: ast::Module| {
|
||||
NavigationTarget::from_named(
|
||||
file_id,
|
||||
&node,
|
||||
node.doc_comment_text(),
|
||||
node.short_label(),
|
||||
)
|
||||
})
|
||||
.visit(|node: &ast::MacroCall| {
|
||||
NavigationTarget::from_named(file_id, node, node.doc_comment_text(), None)
|
||||
.visit(|node: ast::MacroCall| {
|
||||
NavigationTarget::from_named(file_id, &node, node.doc_comment_text(), None)
|
||||
})
|
||||
.accept(node)
|
||||
}
|
||||
|
@ -13,15 +13,17 @@ pub(crate) fn goto_type_definition(
|
||||
token
|
||||
.parent()
|
||||
.ancestors()
|
||||
.find(|n| ast::Expr::cast(*n).is_some() || ast::Pat::cast(*n).is_some())
|
||||
.find(|n| ast::Expr::cast(n.clone()).is_some() || ast::Pat::cast(n.clone()).is_some())
|
||||
})?;
|
||||
|
||||
let analyzer = hir::SourceAnalyzer::new(db, position.file_id, node, None);
|
||||
let analyzer = hir::SourceAnalyzer::new(db, position.file_id, &node, None);
|
||||
|
||||
let ty: hir::Ty = if let Some(ty) = ast::Expr::cast(node).and_then(|e| analyzer.type_of(db, e))
|
||||
let ty: hir::Ty = if let Some(ty) =
|
||||
ast::Expr::cast(node.clone()).and_then(|e| analyzer.type_of(db, &e))
|
||||
{
|
||||
ty
|
||||
} else if let Some(ty) = ast::Pat::cast(node).and_then(|p| analyzer.type_of_pat(db, p)) {
|
||||
} else if let Some(ty) = ast::Pat::cast(node.clone()).and_then(|p| analyzer.type_of_pat(db, &p))
|
||||
{
|
||||
ty
|
||||
} else {
|
||||
return None;
|
||||
|
@ -6,7 +6,7 @@ use ra_syntax::{
|
||||
visit::{visitor, Visitor},
|
||||
},
|
||||
ast::{self, DocCommentsOwner},
|
||||
AstNode, TreeArc,
|
||||
AstNode,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
@ -104,7 +104,7 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeIn
|
||||
|
||||
let mut no_fallback = false;
|
||||
|
||||
match classify_name_ref(db, &analyzer, name_ref) {
|
||||
match classify_name_ref(db, &analyzer, &name_ref) {
|
||||
Some(Method(it)) => res.extend(from_def_source(db, it)),
|
||||
Some(Macro(it)) => {
|
||||
let src = it.source(db);
|
||||
@ -163,7 +163,7 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeIn
|
||||
|
||||
if res.is_empty() && !no_fallback {
|
||||
// Fallback index based approach:
|
||||
let symbols = crate::symbol_index::index_resolve(db, name_ref);
|
||||
let symbols = crate::symbol_index::index_resolve(db, &name_ref);
|
||||
for sym in symbols {
|
||||
let docs = docs_from_symbol(db, &sym);
|
||||
let desc = description_from_symbol(db, &sym);
|
||||
@ -177,34 +177,32 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeIn
|
||||
} else if let Some(name) = find_node_at_offset::<ast::Name>(file.syntax(), position.offset) {
|
||||
if let Some(parent) = name.syntax().parent() {
|
||||
let text = visitor()
|
||||
.visit(|node: &ast::StructDef| {
|
||||
.visit(|node: ast::StructDef| {
|
||||
hover_text(node.doc_comment_text(), node.short_label())
|
||||
})
|
||||
.visit(|node: &ast::EnumDef| {
|
||||
.visit(|node: ast::EnumDef| hover_text(node.doc_comment_text(), node.short_label()))
|
||||
.visit(|node: ast::EnumVariant| {
|
||||
hover_text(node.doc_comment_text(), node.short_label())
|
||||
})
|
||||
.visit(|node: &ast::EnumVariant| {
|
||||
.visit(|node: ast::FnDef| hover_text(node.doc_comment_text(), node.short_label()))
|
||||
.visit(|node: ast::TypeAliasDef| {
|
||||
hover_text(node.doc_comment_text(), node.short_label())
|
||||
})
|
||||
.visit(|node: &ast::FnDef| hover_text(node.doc_comment_text(), node.short_label()))
|
||||
.visit(|node: &ast::TypeAliasDef| {
|
||||
.visit(|node: ast::ConstDef| {
|
||||
hover_text(node.doc_comment_text(), node.short_label())
|
||||
})
|
||||
.visit(|node: &ast::ConstDef| {
|
||||
.visit(|node: ast::StaticDef| {
|
||||
hover_text(node.doc_comment_text(), node.short_label())
|
||||
})
|
||||
.visit(|node: &ast::StaticDef| {
|
||||
.visit(|node: ast::TraitDef| {
|
||||
hover_text(node.doc_comment_text(), node.short_label())
|
||||
})
|
||||
.visit(|node: &ast::TraitDef| {
|
||||
.visit(|node: ast::NamedFieldDef| {
|
||||
hover_text(node.doc_comment_text(), node.short_label())
|
||||
})
|
||||
.visit(|node: &ast::NamedFieldDef| {
|
||||
hover_text(node.doc_comment_text(), node.short_label())
|
||||
})
|
||||
.visit(|node: &ast::Module| hover_text(node.doc_comment_text(), node.short_label()))
|
||||
.visit(|node: &ast::MacroCall| hover_text(node.doc_comment_text(), None))
|
||||
.accept(parent);
|
||||
.visit(|node: ast::Module| hover_text(node.doc_comment_text(), node.short_label()))
|
||||
.visit(|node: ast::MacroCall| hover_text(node.doc_comment_text(), None))
|
||||
.accept(&parent);
|
||||
|
||||
if let Some(text) = text {
|
||||
res.extend(text);
|
||||
@ -217,8 +215,9 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeIn
|
||||
}
|
||||
|
||||
if range.is_none() {
|
||||
let node = ancestors_at_offset(file.syntax(), position.offset)
|
||||
.find(|n| ast::Expr::cast(*n).is_some() || ast::Pat::cast(*n).is_some())?;
|
||||
let node = ancestors_at_offset(file.syntax(), position.offset).find(|n| {
|
||||
ast::Expr::cast(n.clone()).is_some() || ast::Pat::cast(n.clone()).is_some()
|
||||
})?;
|
||||
let frange = FileRange { file_id: position.file_id, range: node.range() };
|
||||
res.extend(type_of(db, frange).map(rust_code_markup));
|
||||
range = Some(node.range());
|
||||
@ -233,7 +232,7 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeIn
|
||||
|
||||
fn from_def_source<A, D>(db: &RootDatabase, def: D) -> Option<String>
|
||||
where
|
||||
D: HasSource<Ast = TreeArc<A>>,
|
||||
D: HasSource<Ast = A>,
|
||||
A: ast::DocCommentsOwner + ast::NameOwner + ShortLabel,
|
||||
{
|
||||
let src = def.source(db);
|
||||
@ -243,17 +242,17 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeIn
|
||||
|
||||
pub(crate) fn type_of(db: &RootDatabase, frange: FileRange) -> Option<String> {
|
||||
let parse = db.parse(frange.file_id);
|
||||
let syntax = parse.tree().syntax();
|
||||
let leaf_node = find_covering_element(syntax, frange.range);
|
||||
let leaf_node = find_covering_element(parse.tree().syntax(), frange.range);
|
||||
// if we picked identifier, expand to pattern/expression
|
||||
let node = leaf_node
|
||||
.ancestors()
|
||||
.take_while(|it| it.range() == leaf_node.range())
|
||||
.find(|&it| ast::Expr::cast(it).is_some() || ast::Pat::cast(it).is_some())?;
|
||||
let analyzer = hir::SourceAnalyzer::new(db, frange.file_id, node, None);
|
||||
let ty = if let Some(ty) = ast::Expr::cast(node).and_then(|e| analyzer.type_of(db, e)) {
|
||||
.find(|it| ast::Expr::cast(it.clone()).is_some() || ast::Pat::cast(it.clone()).is_some())?;
|
||||
let analyzer = hir::SourceAnalyzer::new(db, frange.file_id, &node, None);
|
||||
let ty = if let Some(ty) = ast::Expr::cast(node.clone()).and_then(|e| analyzer.type_of(db, &e))
|
||||
{
|
||||
ty
|
||||
} else if let Some(ty) = ast::Pat::cast(node).and_then(|p| analyzer.type_of_pat(db, p)) {
|
||||
} else if let Some(ty) = ast::Pat::cast(node).and_then(|p| analyzer.type_of_pat(db, &p)) {
|
||||
ty
|
||||
} else {
|
||||
return None;
|
||||
|
@ -9,19 +9,19 @@ pub(crate) fn goto_implementation(
|
||||
position: FilePosition,
|
||||
) -> Option<RangeInfo<Vec<NavigationTarget>>> {
|
||||
let parse = db.parse(position.file_id);
|
||||
let syntax = parse.tree().syntax();
|
||||
let syntax = parse.tree().syntax().clone();
|
||||
|
||||
let module = source_binder::module_from_position(db, position)?;
|
||||
|
||||
if let Some(nominal_def) = find_node_at_offset::<ast::NominalDef>(syntax, position.offset) {
|
||||
if let Some(nominal_def) = find_node_at_offset::<ast::NominalDef>(&syntax, position.offset) {
|
||||
return Some(RangeInfo::new(
|
||||
nominal_def.syntax().range(),
|
||||
impls_for_def(db, nominal_def, module)?,
|
||||
impls_for_def(db, &nominal_def, module)?,
|
||||
));
|
||||
} else if let Some(trait_def) = find_node_at_offset::<ast::TraitDef>(syntax, position.offset) {
|
||||
} else if let Some(trait_def) = find_node_at_offset::<ast::TraitDef>(&syntax, position.offset) {
|
||||
return Some(RangeInfo::new(
|
||||
trait_def.syntax().range(),
|
||||
impls_for_trait(db, trait_def, module)?,
|
||||
impls_for_trait(db, &trait_def, module)?,
|
||||
));
|
||||
}
|
||||
|
||||
|
@ -27,7 +27,7 @@ pub fn join_lines(file: &SourceFile, range: TextRange) -> TextEdit {
|
||||
SyntaxElement::Token(token) => token.parent(),
|
||||
};
|
||||
let mut edit = TextEditBuilder::default();
|
||||
for token in node.descendants_with_tokens().filter_map(|it| it.as_token()) {
|
||||
for token in node.descendants_with_tokens().filter_map(|it| it.as_token().cloned()) {
|
||||
let range = match range.intersection(&token.range()) {
|
||||
Some(range) => range,
|
||||
None => continue,
|
||||
@ -37,7 +37,7 @@ pub fn join_lines(file: &SourceFile, range: TextRange) -> TextEdit {
|
||||
let pos: TextUnit = (pos as u32).into();
|
||||
let off = token.range().start() + range.start() + pos;
|
||||
if !edit.invalidates_offset(off) {
|
||||
remove_newline(&mut edit, token, off);
|
||||
remove_newline(&mut edit, &token, off);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -45,7 +45,7 @@ pub fn join_lines(file: &SourceFile, range: TextRange) -> TextEdit {
|
||||
edit.finish()
|
||||
}
|
||||
|
||||
fn remove_newline(edit: &mut TextEditBuilder, token: SyntaxToken, offset: TextUnit) {
|
||||
fn remove_newline(edit: &mut TextEditBuilder, token: &SyntaxToken, offset: TextUnit) {
|
||||
if token.kind() != WHITESPACE || token.text().bytes().filter(|&b| b == b'\n').count() != 1 {
|
||||
// The node is either the first or the last in the file
|
||||
let suff = &token.text()[TextRange::from_to(
|
||||
@ -98,9 +98,10 @@ fn remove_newline(edit: &mut TextEditBuilder, token: SyntaxToken, offset: TextUn
|
||||
TextRange::from_to(prev.range().start(), token.range().end()),
|
||||
space.to_string(),
|
||||
);
|
||||
} else if let (Some(_), Some(next)) =
|
||||
(prev.as_token().and_then(ast::Comment::cast), next.as_token().and_then(ast::Comment::cast))
|
||||
{
|
||||
} else if let (Some(_), Some(next)) = (
|
||||
prev.as_token().cloned().and_then(ast::Comment::cast),
|
||||
next.as_token().cloned().and_then(ast::Comment::cast),
|
||||
) {
|
||||
// Removes: newline (incl. surrounding whitespace), start of the next comment
|
||||
edit.delete(TextRange::from_to(
|
||||
token.range().start(),
|
||||
@ -113,16 +114,16 @@ fn remove_newline(edit: &mut TextEditBuilder, token: SyntaxToken, offset: TextUn
|
||||
}
|
||||
|
||||
fn has_comma_after(node: &SyntaxNode) -> bool {
|
||||
match non_trivia_sibling(node.into(), Direction::Next) {
|
||||
match non_trivia_sibling(node.clone().into(), Direction::Next) {
|
||||
Some(n) => n.kind() == T![,],
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
fn join_single_expr_block(edit: &mut TextEditBuilder, token: SyntaxToken) -> Option<()> {
|
||||
fn join_single_expr_block(edit: &mut TextEditBuilder, token: &SyntaxToken) -> Option<()> {
|
||||
let block = ast::Block::cast(token.parent())?;
|
||||
let block_expr = ast::BlockExpr::cast(block.syntax().parent()?)?;
|
||||
let expr = extract_trivial_expression(block)?;
|
||||
let expr = extract_trivial_expression(&block)?;
|
||||
|
||||
let block_range = block_expr.syntax().range();
|
||||
let mut buf = expr.syntax().text().to_string();
|
||||
@ -139,7 +140,7 @@ fn join_single_expr_block(edit: &mut TextEditBuilder, token: SyntaxToken) -> Opt
|
||||
Some(())
|
||||
}
|
||||
|
||||
fn join_single_use_tree(edit: &mut TextEditBuilder, token: SyntaxToken) -> Option<()> {
|
||||
fn join_single_use_tree(edit: &mut TextEditBuilder, token: &SyntaxToken) -> Option<()> {
|
||||
let use_tree_list = ast::UseTreeList::cast(token.parent())?;
|
||||
let (tree,) = use_tree_list.use_trees().collect_tuple()?;
|
||||
edit.replace(use_tree_list.syntax().range(), tree.syntax().text().to_string());
|
||||
@ -504,7 +505,7 @@ fn foo() {
|
||||
fn check_join_lines_sel(before: &str, after: &str) {
|
||||
let (sel, before) = extract_range(before);
|
||||
let parse = SourceFile::parse(&before);
|
||||
let result = join_lines(parse.tree(), sel);
|
||||
let result = join_lines(&parse.tree(), sel);
|
||||
let actual = result.apply(&before);
|
||||
assert_eq_text!(after, &actual);
|
||||
}
|
||||
|
@ -50,7 +50,7 @@ use ra_db::{
|
||||
salsa::{self, ParallelDatabase},
|
||||
CheckCanceled, SourceDatabase,
|
||||
};
|
||||
use ra_syntax::{SourceFile, TextRange, TextUnit, TreeArc};
|
||||
use ra_syntax::{SourceFile, TextRange, TextUnit};
|
||||
use ra_text_edit::TextEdit;
|
||||
use relative_path::RelativePathBuf;
|
||||
|
||||
@ -325,8 +325,8 @@ impl Analysis {
|
||||
}
|
||||
|
||||
/// Gets the syntax tree of the file.
|
||||
pub fn parse(&self, file_id: FileId) -> TreeArc<SourceFile> {
|
||||
self.db.parse(file_id).tree().to_owned()
|
||||
pub fn parse(&self, file_id: FileId) -> SourceFile {
|
||||
self.db.parse(file_id).tree()
|
||||
}
|
||||
|
||||
/// Gets the file's `LineIndex`: data structure to convert between absolute
|
||||
@ -360,7 +360,7 @@ impl Analysis {
|
||||
let parse = self.db.parse(frange.file_id);
|
||||
let file_edit = SourceFileEdit {
|
||||
file_id: frange.file_id,
|
||||
edit: join_lines::join_lines(parse.tree(), frange.range),
|
||||
edit: join_lines::join_lines(&parse.tree(), frange.range),
|
||||
};
|
||||
SourceChange::source_file_edit("join lines", file_edit)
|
||||
}
|
||||
@ -393,13 +393,13 @@ impl Analysis {
|
||||
/// file outline.
|
||||
pub fn file_structure(&self, file_id: FileId) -> Vec<StructureNode> {
|
||||
let parse = self.db.parse(file_id);
|
||||
file_structure(parse.tree())
|
||||
file_structure(&parse.tree())
|
||||
}
|
||||
|
||||
/// Returns the set of folding ranges.
|
||||
pub fn folding_ranges(&self, file_id: FileId) -> Vec<Fold> {
|
||||
let parse = self.db.parse(file_id);
|
||||
folding_ranges::folding_ranges(parse.tree())
|
||||
folding_ranges::folding_ranges(&parse.tree())
|
||||
}
|
||||
|
||||
/// Fuzzy searches for a symbol.
|
||||
|
@ -26,7 +26,7 @@ mod tests {
|
||||
fn do_check(before: &str, after: &str) {
|
||||
let (pos, before) = extract_offset(before);
|
||||
let parse = SourceFile::parse(&before);
|
||||
let new_pos = match matching_brace(parse.tree(), pos) {
|
||||
let new_pos = match matching_brace(&parse.tree(), pos) {
|
||||
None => pos,
|
||||
Some(pos) => pos,
|
||||
};
|
||||
|
@ -26,7 +26,7 @@ pub(crate) fn classify_name_ref(
|
||||
// Check if it is a method
|
||||
if let Some(method_call) = name_ref.syntax().parent().and_then(ast::MethodCallExpr::cast) {
|
||||
tested_by!(goto_definition_works_for_methods);
|
||||
if let Some(func) = analyzer.resolve_method_call(method_call) {
|
||||
if let Some(func) = analyzer.resolve_method_call(&method_call) {
|
||||
return Some(Method(func));
|
||||
}
|
||||
}
|
||||
@ -40,7 +40,7 @@ pub(crate) fn classify_name_ref(
|
||||
.and_then(ast::MacroCall::cast)
|
||||
{
|
||||
tested_by!(goto_definition_works_for_macros);
|
||||
if let Some(mac) = analyzer.resolve_macro_call(db, macro_call) {
|
||||
if let Some(mac) = analyzer.resolve_macro_call(db, ¯o_call) {
|
||||
return Some(Macro(mac));
|
||||
}
|
||||
}
|
||||
@ -48,7 +48,7 @@ pub(crate) fn classify_name_ref(
|
||||
// It could also be a field access
|
||||
if let Some(field_expr) = name_ref.syntax().parent().and_then(ast::FieldExpr::cast) {
|
||||
tested_by!(goto_definition_works_for_fields);
|
||||
if let Some(field) = analyzer.resolve_field(field_expr) {
|
||||
if let Some(field) = analyzer.resolve_field(&field_expr) {
|
||||
return Some(FieldAccess(field));
|
||||
};
|
||||
}
|
||||
@ -59,7 +59,7 @@ pub(crate) fn classify_name_ref(
|
||||
|
||||
let struct_lit = field_expr.syntax().ancestors().find_map(ast::StructLit::cast);
|
||||
|
||||
if let Some(ty) = struct_lit.and_then(|lit| analyzer.type_of(db, lit.into())) {
|
||||
if let Some(ty) = struct_lit.and_then(|lit| analyzer.type_of(db, &lit.into())) {
|
||||
if let Some((hir::AdtDef::Struct(s), _)) = ty.as_adt() {
|
||||
let hir_path = hir::Path::from_name_ref(name_ref);
|
||||
let hir_name = hir_path.as_ident().unwrap();
|
||||
@ -73,7 +73,7 @@ pub(crate) fn classify_name_ref(
|
||||
|
||||
// General case, a path or a local:
|
||||
if let Some(path) = name_ref.syntax().ancestors().find_map(ast::Path::cast) {
|
||||
if let Some(resolved) = analyzer.resolve_path(db, path) {
|
||||
if let Some(resolved) = analyzer.resolve_path(db, &path) {
|
||||
return match resolved {
|
||||
hir::PathResolution::Def(def) => Some(Def(def)),
|
||||
hir::PathResolution::LocalBinding(Either::A(pat)) => Some(Pat(pat)),
|
||||
|
@ -50,11 +50,11 @@ pub(crate) fn find_all_refs(
|
||||
position: FilePosition,
|
||||
) -> Option<ReferenceSearchResult> {
|
||||
let parse = db.parse(position.file_id);
|
||||
let (binding, analyzer) = find_binding(db, parse.tree(), position)?;
|
||||
let declaration = NavigationTarget::from_bind_pat(position.file_id, binding);
|
||||
let (binding, analyzer) = find_binding(db, &parse.tree(), position)?;
|
||||
let declaration = NavigationTarget::from_bind_pat(position.file_id, &binding);
|
||||
|
||||
let references = analyzer
|
||||
.find_all_refs(binding)
|
||||
.find_all_refs(&binding)
|
||||
.into_iter()
|
||||
.map(move |ref_desc| FileRange { file_id: position.file_id, range: ref_desc.range })
|
||||
.collect::<Vec<_>>();
|
||||
@ -63,9 +63,9 @@ pub(crate) fn find_all_refs(
|
||||
|
||||
fn find_binding<'a>(
|
||||
db: &RootDatabase,
|
||||
source_file: &'a SourceFile,
|
||||
source_file: &SourceFile,
|
||||
position: FilePosition,
|
||||
) -> Option<(&'a ast::BindPat, hir::SourceAnalyzer)> {
|
||||
) -> Option<(ast::BindPat, hir::SourceAnalyzer)> {
|
||||
let syntax = source_file.syntax();
|
||||
if let Some(binding) = find_node_at_offset::<ast::BindPat>(syntax, position.offset) {
|
||||
let analyzer = hir::SourceAnalyzer::new(db, position.file_id, binding.syntax(), None);
|
||||
@ -73,7 +73,7 @@ pub(crate) fn find_all_refs(
|
||||
};
|
||||
let name_ref = find_node_at_offset::<ast::NameRef>(syntax, position.offset)?;
|
||||
let analyzer = hir::SourceAnalyzer::new(db, position.file_id, name_ref.syntax(), None);
|
||||
let resolved = analyzer.resolve_local_name(name_ref)?;
|
||||
let resolved = analyzer.resolve_local_name(&name_ref)?;
|
||||
if let Either::A(ptr) = resolved.ptr() {
|
||||
if let ast::PatKind::BindPat(binding) = ptr.to_node(source_file.syntax()).kind() {
|
||||
return Some((binding, analyzer));
|
||||
@ -89,10 +89,10 @@ pub(crate) fn rename(
|
||||
new_name: &str,
|
||||
) -> Option<SourceChange> {
|
||||
let parse = db.parse(position.file_id);
|
||||
let syntax = parse.tree().syntax();
|
||||
|
||||
if let Some((ast_name, ast_module)) = find_name_and_module_at_offset(syntax, position) {
|
||||
rename_mod(db, ast_name, ast_module, position, new_name)
|
||||
if let Some((ast_name, ast_module)) =
|
||||
find_name_and_module_at_offset(parse.tree().syntax(), position)
|
||||
{
|
||||
rename_mod(db, &ast_name, &ast_module, position, new_name)
|
||||
} else {
|
||||
rename_reference(db, position, new_name)
|
||||
}
|
||||
@ -101,14 +101,10 @@ pub(crate) fn rename(
|
||||
fn find_name_and_module_at_offset(
|
||||
syntax: &SyntaxNode,
|
||||
position: FilePosition,
|
||||
) -> Option<(&ast::Name, &ast::Module)> {
|
||||
let ast_name = find_node_at_offset::<ast::Name>(syntax, position.offset);
|
||||
let ast_name_parent = ast::Module::cast(ast_name?.syntax().parent()?);
|
||||
|
||||
if let (Some(ast_module), Some(name)) = (ast_name_parent, ast_name) {
|
||||
return Some((name, ast_module));
|
||||
}
|
||||
None
|
||||
) -> Option<(ast::Name, ast::Module)> {
|
||||
let ast_name = find_node_at_offset::<ast::Name>(syntax, position.offset)?;
|
||||
let ast_module = ast::Module::cast(ast_name.syntax().parent()?)?;
|
||||
Some((ast_name, ast_module))
|
||||
}
|
||||
|
||||
fn source_edit_from_fileid_range(
|
||||
@ -135,7 +131,8 @@ fn rename_mod(
|
||||
) -> Option<SourceChange> {
|
||||
let mut source_file_edits = Vec::new();
|
||||
let mut file_system_edits = Vec::new();
|
||||
if let Some(module) = source_binder::module_from_declaration(db, position.file_id, &ast_module)
|
||||
if let Some(module) =
|
||||
source_binder::module_from_declaration(db, position.file_id, ast_module.clone())
|
||||
{
|
||||
let src = module.definition_source(db);
|
||||
let file_id = src.file_id.as_original_file();
|
||||
|
@ -26,8 +26,8 @@ pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec<Runnable> {
|
||||
parse.tree().syntax().descendants().filter_map(|i| runnable(db, file_id, i)).collect()
|
||||
}
|
||||
|
||||
fn runnable(db: &RootDatabase, file_id: FileId, item: &SyntaxNode) -> Option<Runnable> {
|
||||
if let Some(fn_def) = ast::FnDef::cast(item) {
|
||||
fn runnable(db: &RootDatabase, file_id: FileId, item: SyntaxNode) -> Option<Runnable> {
|
||||
if let Some(fn_def) = ast::FnDef::cast(item.clone()) {
|
||||
runnable_fn(fn_def)
|
||||
} else if let Some(m) = ast::Module::cast(item) {
|
||||
runnable_mod(db, file_id, m)
|
||||
@ -36,8 +36,8 @@ fn runnable(db: &RootDatabase, file_id: FileId, item: &SyntaxNode) -> Option<Run
|
||||
}
|
||||
}
|
||||
|
||||
fn runnable_fn(fn_def: &ast::FnDef) -> Option<Runnable> {
|
||||
let name = fn_def.name()?.text();
|
||||
fn runnable_fn(fn_def: ast::FnDef) -> Option<Runnable> {
|
||||
let name = fn_def.name()?.text().clone();
|
||||
let kind = if name == "main" {
|
||||
RunnableKind::Bin
|
||||
} else if fn_def.has_atom_attr("test") {
|
||||
@ -50,7 +50,7 @@ fn runnable_fn(fn_def: &ast::FnDef) -> Option<Runnable> {
|
||||
Some(Runnable { range: fn_def.syntax().range(), kind })
|
||||
}
|
||||
|
||||
fn runnable_mod(db: &RootDatabase, file_id: FileId, module: &ast::Module) -> Option<Runnable> {
|
||||
fn runnable_mod(db: &RootDatabase, file_id: FileId, module: ast::Module) -> Option<Runnable> {
|
||||
let has_test_function = module
|
||||
.item_list()?
|
||||
.items()
|
||||
|
@ -104,7 +104,7 @@ impl FromIterator<TableEntry<MacroFile, Option<Parse<SyntaxNode>>>> for SyntaxTr
|
||||
let mut res = SyntaxTreeStats::default();
|
||||
for entry in iter {
|
||||
res.total += 1;
|
||||
if let Some(tree) = entry.value.and_then(|it| it).map(|it| it.tree().to_owned()) {
|
||||
if let Some(tree) = entry.value.and_then(|it| it).map(|it| it.syntax_node()) {
|
||||
res.retained += 1;
|
||||
res.retained_size += tree.memory_size_of_subtree();
|
||||
}
|
||||
|
@ -61,7 +61,7 @@ fn file_symbols(db: &impl SymbolsDatabase, file_id: FileId) -> Arc<SymbolIndex>
|
||||
db.check_canceled();
|
||||
let parse = db.parse(file_id);
|
||||
|
||||
let symbols = source_file_to_file_symbols(parse.tree(), file_id);
|
||||
let symbols = source_file_to_file_symbols(&parse.tree(), file_id);
|
||||
|
||||
// FIXME: add macros here
|
||||
|
||||
@ -173,7 +173,7 @@ impl SymbolIndex {
|
||||
files: impl ParallelIterator<Item = (FileId, Parse<ast::SourceFile>)>,
|
||||
) -> SymbolIndex {
|
||||
let symbols = files
|
||||
.flat_map(|(file_id, file)| source_file_to_file_symbols(file.tree(), file_id))
|
||||
.flat_map(|(file_id, file)| source_file_to_file_symbols(&file.tree(), file_id))
|
||||
.collect::<Vec<_>>();
|
||||
SymbolIndex::new(symbols)
|
||||
}
|
||||
@ -249,7 +249,7 @@ fn source_file_to_file_symbols(source_file: &SourceFile, file_id: FileId) -> Vec
|
||||
for event in source_file.syntax().preorder() {
|
||||
match event {
|
||||
WalkEvent::Enter(node) => {
|
||||
if let Some(mut symbol) = to_file_symbol(node, file_id) {
|
||||
if let Some(mut symbol) = to_file_symbol(&node, file_id) {
|
||||
symbol.container_name = stack.last().cloned();
|
||||
|
||||
stack.push(symbol.name.clone());
|
||||
@ -258,7 +258,7 @@ fn source_file_to_file_symbols(source_file: &SourceFile, file_id: FileId) -> Vec
|
||||
}
|
||||
|
||||
WalkEvent::Leave(node) => {
|
||||
if to_symbol(node).is_some() {
|
||||
if to_symbol(&node).is_some() {
|
||||
stack.pop();
|
||||
}
|
||||
}
|
||||
@ -269,7 +269,7 @@ fn source_file_to_file_symbols(source_file: &SourceFile, file_id: FileId) -> Vec
|
||||
}
|
||||
|
||||
fn to_symbol(node: &SyntaxNode) -> Option<(SmolStr, SyntaxNodePtr, TextRange)> {
|
||||
fn decl<N: NameOwner>(node: &N) -> Option<(SmolStr, SyntaxNodePtr, TextRange)> {
|
||||
fn decl<N: NameOwner>(node: N) -> Option<(SmolStr, SyntaxNodePtr, TextRange)> {
|
||||
let name = node.name()?;
|
||||
let name_range = name.syntax().range();
|
||||
let name = name.text().clone();
|
||||
|
@ -31,8 +31,8 @@ fn is_control_keyword(kind: SyntaxKind) -> bool {
|
||||
}
|
||||
}
|
||||
|
||||
fn is_variable_mutable(db: &RootDatabase, analyzer: &hir::SourceAnalyzer, pat: &ast::Pat) -> bool {
|
||||
let ty = analyzer.type_of_pat(db, pat).unwrap_or(Ty::Unknown);
|
||||
fn is_variable_mutable(db: &RootDatabase, analyzer: &hir::SourceAnalyzer, pat: ast::Pat) -> bool {
|
||||
let ty = analyzer.type_of_pat(db, &pat).unwrap_or(Ty::Unknown);
|
||||
let is_ty_mut = {
|
||||
if let Some((_, mutability)) = ty.as_reference() {
|
||||
match mutability {
|
||||
@ -55,7 +55,7 @@ fn is_variable_mutable(db: &RootDatabase, analyzer: &hir::SourceAnalyzer, pat: &
|
||||
pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRange> {
|
||||
let _p = profile("highlight");
|
||||
let parse = db.parse(file_id);
|
||||
let root = parse.tree().syntax();
|
||||
let root = parse.tree().syntax().clone();
|
||||
|
||||
fn calc_binding_hash(file_id: FileId, text: &SmolStr, shadow_count: u32) -> u64 {
|
||||
fn hash<T: std::hash::Hash + std::fmt::Debug>(x: T) -> u64 {
|
||||
@ -70,6 +70,7 @@ pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRa
|
||||
}
|
||||
|
||||
// Visited nodes to handle highlighting priorities
|
||||
// FIXME: retain only ranges here
|
||||
let mut highlighted: FxHashSet<SyntaxElement> = FxHashSet::default();
|
||||
let mut bindings_shadow_count: FxHashMap<SmolStr, u32> = FxHashMap::default();
|
||||
|
||||
@ -84,14 +85,14 @@ pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRa
|
||||
STRING | RAW_STRING | RAW_BYTE_STRING | BYTE_STRING => "string",
|
||||
ATTR => "attribute",
|
||||
NAME_REF => {
|
||||
if let Some(name_ref) = node.as_node().and_then(ast::NameRef::cast) {
|
||||
if let Some(name_ref) = node.as_node().cloned().and_then(ast::NameRef::cast) {
|
||||
// FIXME: revisit this after #1340
|
||||
use crate::name_ref_kind::{classify_name_ref, NameRefKind::*};
|
||||
use hir::{ImplItem, ModuleDef};
|
||||
|
||||
// FIXME: try to reuse the SourceAnalyzers
|
||||
let analyzer = hir::SourceAnalyzer::new(db, file_id, name_ref.syntax(), None);
|
||||
match classify_name_ref(db, &analyzer, name_ref) {
|
||||
match classify_name_ref(db, &analyzer, &name_ref) {
|
||||
Some(Method(_)) => "function",
|
||||
Some(Macro(_)) => "macro",
|
||||
Some(FieldAccess(_)) => "field",
|
||||
@ -113,13 +114,13 @@ pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRa
|
||||
Some(Pat(ptr)) => {
|
||||
binding_hash = Some({
|
||||
let text =
|
||||
ptr.syntax_node_ptr().to_node(root).text().to_smol_string();
|
||||
ptr.syntax_node_ptr().to_node(&root).text().to_smol_string();
|
||||
let shadow_count =
|
||||
bindings_shadow_count.entry(text.clone()).or_default();
|
||||
calc_binding_hash(file_id, &text, *shadow_count)
|
||||
});
|
||||
|
||||
if is_variable_mutable(db, &analyzer, ptr.to_node(root)) {
|
||||
if is_variable_mutable(db, &analyzer, ptr.to_node(&root)) {
|
||||
"variable.mut"
|
||||
} else {
|
||||
"variable"
|
||||
@ -134,7 +135,7 @@ pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRa
|
||||
}
|
||||
}
|
||||
NAME => {
|
||||
if let Some(name) = node.as_node().and_then(ast::Name::cast) {
|
||||
if let Some(name) = node.as_node().cloned().and_then(ast::Name::cast) {
|
||||
let analyzer = hir::SourceAnalyzer::new(db, file_id, name.syntax(), None);
|
||||
if let Some(pat) = name.syntax().ancestors().find_map(ast::Pat::cast) {
|
||||
binding_hash = Some({
|
||||
@ -176,12 +177,11 @@ pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRa
|
||||
k if is_control_keyword(k) => "keyword.control",
|
||||
k if k.is_keyword() => "keyword",
|
||||
_ => {
|
||||
// let analyzer = hir::SourceAnalyzer::new(db, file_id, name_ref.syntax(), None);
|
||||
if let Some(macro_call) = node.as_node().and_then(ast::MacroCall::cast) {
|
||||
if let Some(macro_call) = node.as_node().cloned().and_then(ast::MacroCall::cast) {
|
||||
if let Some(path) = macro_call.path() {
|
||||
if let Some(segment) = path.segment() {
|
||||
if let Some(name_ref) = segment.name_ref() {
|
||||
highlighted.insert(name_ref.syntax().into());
|
||||
highlighted.insert(name_ref.syntax().clone().into());
|
||||
let range_start = name_ref.syntax().range().start();
|
||||
let mut range_end = name_ref.syntax().range().end();
|
||||
for sibling in path.syntax().siblings_with_tokens(Direction::Next) {
|
||||
@ -230,7 +230,8 @@ pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: boo
|
||||
let mut buf = String::new();
|
||||
buf.push_str(&STYLE);
|
||||
buf.push_str("<pre><code>");
|
||||
let tokens = parse.tree().syntax().descendants_with_tokens().filter_map(|it| it.as_token());
|
||||
let tokens =
|
||||
parse.tree().syntax().descendants_with_tokens().filter_map(|it| it.as_token().cloned());
|
||||
for token in tokens {
|
||||
could_intersect.retain(|it| token.range().start() <= it.range.end());
|
||||
while let Some(r) = ranges.get(frontier) {
|
||||
|
@ -18,7 +18,7 @@ pub(crate) fn syntax_tree(
|
||||
let node = match algo::find_covering_element(parse.tree().syntax(), text_range) {
|
||||
SyntaxElement::Node(node) => node,
|
||||
SyntaxElement::Token(token) => {
|
||||
if let Some(tree) = syntax_tree_for_string(token, text_range) {
|
||||
if let Some(tree) = syntax_tree_for_string(&token, text_range) {
|
||||
return tree;
|
||||
}
|
||||
token.parent()
|
||||
@ -33,7 +33,7 @@ pub(crate) fn syntax_tree(
|
||||
|
||||
/// Attempts parsing the selected contents of a string literal
|
||||
/// as rust syntax and returns its syntax tree
|
||||
fn syntax_tree_for_string(token: SyntaxToken, text_range: TextRange) -> Option<String> {
|
||||
fn syntax_tree_for_string(token: &SyntaxToken, text_range: TextRange) -> Option<String> {
|
||||
// When the range is inside a string
|
||||
// we'll attempt parsing it as rust syntax
|
||||
// to provide the syntax tree of the contents of the string
|
||||
@ -43,7 +43,7 @@ fn syntax_tree_for_string(token: SyntaxToken, text_range: TextRange) -> Option<S
|
||||
}
|
||||
}
|
||||
|
||||
fn syntax_tree_for_token(node: SyntaxToken, text_range: TextRange) -> Option<String> {
|
||||
fn syntax_tree_for_token(node: &SyntaxToken, text_range: TextRange) -> Option<String> {
|
||||
// Range of the full node
|
||||
let node_range = node.range();
|
||||
let text = node.text().to_string();
|
||||
|
@ -1,15 +1,16 @@
|
||||
use crate::{db::RootDatabase, SourceChange, SourceFileEdit};
|
||||
use ra_db::{FilePosition, SourceDatabase};
|
||||
use ra_fmt::leading_indent;
|
||||
use ra_syntax::{
|
||||
algo::{find_node_at_offset, find_token_at_offset, TokenAtOffset},
|
||||
ast::{self, AstToken},
|
||||
AstNode, SourceFile,
|
||||
AstNode, SmolStr, SourceFile,
|
||||
SyntaxKind::*,
|
||||
SyntaxToken, TextRange, TextUnit,
|
||||
};
|
||||
use ra_text_edit::{TextEdit, TextEditBuilder};
|
||||
|
||||
use crate::{db::RootDatabase, SourceChange, SourceFileEdit};
|
||||
|
||||
pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<SourceChange> {
|
||||
let parse = db.parse(position.file_id);
|
||||
let file = parse.tree();
|
||||
@ -43,15 +44,15 @@ pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<Sour
|
||||
)
|
||||
}
|
||||
|
||||
fn node_indent<'a>(file: &'a SourceFile, token: SyntaxToken) -> Option<&'a str> {
|
||||
fn node_indent(file: &SourceFile, token: &SyntaxToken) -> Option<SmolStr> {
|
||||
let ws = match find_token_at_offset(file.syntax(), token.range().start()) {
|
||||
TokenAtOffset::Between(l, r) => {
|
||||
assert!(r == token);
|
||||
assert!(r == *token);
|
||||
l
|
||||
}
|
||||
TokenAtOffset::Single(n) => {
|
||||
assert!(n == token);
|
||||
return Some("");
|
||||
assert!(n == *token);
|
||||
return Some("".into());
|
||||
}
|
||||
TokenAtOffset::None => unreachable!(),
|
||||
};
|
||||
@ -60,12 +61,12 @@ fn node_indent<'a>(file: &'a SourceFile, token: SyntaxToken) -> Option<&'a str>
|
||||
}
|
||||
let text = ws.text();
|
||||
let pos = text.rfind('\n').map(|it| it + 1).unwrap_or(0);
|
||||
Some(&text[pos..])
|
||||
Some(text[pos..].into())
|
||||
}
|
||||
|
||||
pub fn on_eq_typed(file: &SourceFile, eq_offset: TextUnit) -> Option<TextEdit> {
|
||||
assert_eq!(file.syntax().text().char_at(eq_offset), Some('='));
|
||||
let let_stmt: &ast::LetStmt = find_node_at_offset(file.syntax(), eq_offset)?;
|
||||
let let_stmt: ast::LetStmt = find_node_at_offset(file.syntax(), eq_offset)?;
|
||||
if let_stmt.has_semi() {
|
||||
return None;
|
||||
}
|
||||
@ -141,7 +142,7 @@ mod tests {
|
||||
edit.insert(offset, "=".to_string());
|
||||
let before = edit.finish().apply(&before);
|
||||
let parse = SourceFile::parse(&before);
|
||||
if let Some(result) = on_eq_typed(parse.tree(), offset) {
|
||||
if let Some(result) = on_eq_typed(&parse.tree(), offset) {
|
||||
let actual = result.apply(&before);
|
||||
assert_eq_text!(after, &actual);
|
||||
} else {
|
||||
|
Loading…
Reference in New Issue
Block a user