mirror of
https://github.com/rust-lang/rust.git
synced 2025-05-14 02:49:40 +00:00
Merge #1545
1545: migrate ra_syntax to the new rowan API r=matklad a=matklad Co-authored-by: Aleksey Kladov <aleksey.kladov@gmail.com>
This commit is contained in:
commit
f209843e31
8
Cargo.lock
generated
8
Cargo.lock
generated
@ -1313,7 +1313,7 @@ dependencies = [
|
||||
"itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"ra_parser 0.1.0",
|
||||
"ra_text_edit 0.1.0",
|
||||
"rowan 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rowan 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"smol_str 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"test_utils 0.1.0",
|
||||
"unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -1584,11 +1584,11 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "rowan"
|
||||
version = "0.5.5"
|
||||
version = "0.5.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"colosseum 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"parking_lot 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"parking_lot 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"smol_str 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"text_unit 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -2275,7 +2275,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
"checksum relative-path 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0e7790c7f1cc73d831d28dc5a7deb316a006e7848e6a7f467cdb10a0a9e0fb1c"
|
||||
"checksum remove_dir_all 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "4a83fa3702a688b9359eccba92d153ac33fd2e8462f9e0e3fdf155239ea7792e"
|
||||
"checksum ron 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "17f52a24414403f81528b67488cf8edc4eda977d3af1646bb6b106a600ead78f"
|
||||
"checksum rowan 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)" = "500ba7550373d42593a5228085bad391517378fa31ad2a84defe100dd8259fef"
|
||||
"checksum rowan 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)" = "0c433ffe99ac9b96fa9882805d05eee5d750c9202fb42d0546c556e5d70d54be"
|
||||
"checksum rustc-demangle 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)" = "a7f4dccf6f4891ebcc0c39f9b6eb1a83b9bf5d747cb439ec6fba4f3b977038af"
|
||||
"checksum rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7540fc8b0c49f096ee9c961cda096467dce8084bec6bdca2fc83895fd9b28cb8"
|
||||
"checksum rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a"
|
||||
|
@ -9,7 +9,7 @@ use crate::{Assist, AssistCtx, AssistId};
|
||||
|
||||
pub(crate) fn add_derive(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
|
||||
let nominal = ctx.node_at_offset::<ast::NominalDef>()?;
|
||||
let node_start = derive_insertion_offset(nominal)?;
|
||||
let node_start = derive_insertion_offset(&nominal)?;
|
||||
ctx.add_action(AssistId("add_derive"), "add `#[derive]`", |edit| {
|
||||
let derive_attr = nominal
|
||||
.attrs()
|
||||
|
@ -27,7 +27,7 @@ pub(crate) fn add_explicit_type(mut ctx: AssistCtx<impl HirDatabase>) -> Option<
|
||||
// Infer type
|
||||
let db = ctx.db;
|
||||
let analyzer = hir::SourceAnalyzer::new(db, ctx.frange.file_id, stmt.syntax(), None);
|
||||
let ty = analyzer.type_of(db, expr)?;
|
||||
let ty = analyzer.type_of(db, &expr)?;
|
||||
// Assist not applicable if the type is unknown
|
||||
if is_unknown(&ty) {
|
||||
return None;
|
||||
|
@ -16,7 +16,7 @@ pub(crate) fn add_impl(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
|
||||
let start_offset = nominal.syntax().range().end();
|
||||
let mut buf = String::new();
|
||||
buf.push_str("\n\nimpl");
|
||||
if let Some(type_params) = type_params {
|
||||
if let Some(type_params) = &type_params {
|
||||
type_params.syntax().text().push_to(&mut buf);
|
||||
}
|
||||
buf.push_str(" ");
|
||||
@ -25,9 +25,9 @@ pub(crate) fn add_impl(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
|
||||
let lifetime_params = type_params
|
||||
.lifetime_params()
|
||||
.filter_map(|it| it.lifetime_token())
|
||||
.map(|it| it.text());
|
||||
.map(|it| it.text().clone());
|
||||
let type_params =
|
||||
type_params.type_params().filter_map(|it| it.name()).map(|it| it.text());
|
||||
type_params.type_params().filter_map(|it| it.name()).map(|it| it.text().clone());
|
||||
join(lifetime_params.chain(type_params)).surround_with("<", ">").to_buf(&mut buf);
|
||||
}
|
||||
buf.push_str(" {\n");
|
||||
|
@ -5,8 +5,8 @@ use crate::{
|
||||
|
||||
use hir::{db::HirDatabase, HasSource};
|
||||
use ra_db::FilePosition;
|
||||
use ra_syntax::ast::{self, AstNode, ImplItem, ImplItemKind, NameOwner};
|
||||
use ra_syntax::{SmolStr, TreeArc};
|
||||
use ra_syntax::ast::{self, AstNode, ImplItemKind, NameOwner};
|
||||
use ra_syntax::SmolStr;
|
||||
|
||||
#[derive(PartialEq)]
|
||||
enum AddMissingImplMembersMode {
|
||||
@ -46,16 +46,16 @@ fn add_missing_impl_members_inner(
|
||||
let position = FilePosition { file_id, offset: impl_node.syntax().range().start() };
|
||||
let analyzer = hir::SourceAnalyzer::new(ctx.db, position.file_id, impl_node.syntax(), None);
|
||||
|
||||
resolve_target_trait_def(ctx.db, &analyzer, impl_node)?
|
||||
resolve_target_trait_def(ctx.db, &analyzer, &impl_node)?
|
||||
};
|
||||
|
||||
let def_name = |kind| -> Option<&SmolStr> {
|
||||
let def_name = |kind| -> Option<SmolStr> {
|
||||
match kind {
|
||||
ImplItemKind::FnDef(def) => def.name(),
|
||||
ImplItemKind::TypeAliasDef(def) => def.name(),
|
||||
ImplItemKind::ConstDef(def) => def.name(),
|
||||
ast::ImplItemKind::FnDef(def) => def.name(),
|
||||
ast::ImplItemKind::TypeAliasDef(def) => def.name(),
|
||||
ast::ImplItemKind::ConstDef(def) => def.name(),
|
||||
}
|
||||
.map(ast::Name::text)
|
||||
.map(|it| it.text().clone())
|
||||
};
|
||||
|
||||
let trait_items = trait_def.item_list()?.impl_items();
|
||||
@ -78,18 +78,13 @@ fn add_missing_impl_members_inner(
|
||||
|
||||
ctx.add_action(AssistId(assist_id), label, |edit| {
|
||||
let n_existing_items = impl_item_list.impl_items().count();
|
||||
let items: Vec<_> = missing_items
|
||||
.into_iter()
|
||||
.map(|it| match it.kind() {
|
||||
ImplItemKind::FnDef(def) => {
|
||||
strip_docstring(ImplItem::cast(add_body(def).syntax()).unwrap())
|
||||
}
|
||||
_ => strip_docstring(it),
|
||||
})
|
||||
.collect();
|
||||
let items = missing_items.into_iter().map(|it| match it.kind() {
|
||||
ImplItemKind::FnDef(def) => strip_docstring(add_body(def).into()),
|
||||
_ => strip_docstring(it),
|
||||
});
|
||||
let mut ast_editor = AstEditor::new(impl_item_list);
|
||||
|
||||
ast_editor.append_items(items.iter().map(|it| &**it));
|
||||
ast_editor.append_items(items);
|
||||
|
||||
let first_new_item = ast_editor.ast().impl_items().nth(n_existing_items).unwrap();
|
||||
let cursor_position = first_new_item.syntax().range().start();
|
||||
@ -101,14 +96,14 @@ fn add_missing_impl_members_inner(
|
||||
ctx.build()
|
||||
}
|
||||
|
||||
fn strip_docstring(item: &ast::ImplItem) -> TreeArc<ast::ImplItem> {
|
||||
fn strip_docstring(item: ast::ImplItem) -> ast::ImplItem {
|
||||
let mut ast_editor = AstEditor::new(item);
|
||||
ast_editor.strip_attrs_and_docs();
|
||||
ast_editor.ast().to_owned()
|
||||
}
|
||||
|
||||
fn add_body(fn_def: &ast::FnDef) -> TreeArc<ast::FnDef> {
|
||||
let mut ast_editor = AstEditor::new(fn_def);
|
||||
fn add_body(fn_def: ast::FnDef) -> ast::FnDef {
|
||||
let mut ast_editor = AstEditor::new(fn_def.clone());
|
||||
if fn_def.body().is_none() {
|
||||
ast_editor.set_body(&AstBuilder::<ast::Block>::single_expr(
|
||||
&AstBuilder::<ast::Expr>::unimplemented(),
|
||||
@ -123,9 +118,12 @@ fn resolve_target_trait_def(
|
||||
db: &impl HirDatabase,
|
||||
analyzer: &hir::SourceAnalyzer,
|
||||
impl_block: &ast::ImplBlock,
|
||||
) -> Option<TreeArc<ast::TraitDef>> {
|
||||
let ast_path =
|
||||
impl_block.target_trait().map(AstNode::syntax).and_then(ast::PathType::cast)?.path()?;
|
||||
) -> Option<ast::TraitDef> {
|
||||
let ast_path = impl_block
|
||||
.target_trait()
|
||||
.map(|it| it.syntax().clone())
|
||||
.and_then(ast::PathType::cast)?
|
||||
.path()?;
|
||||
|
||||
match analyzer.resolve_path(db, &ast_path) {
|
||||
Some(hir::PathResolution::Def(hir::ModuleDef::Trait(def))) => Some(def.source(db).ast),
|
||||
|
@ -49,7 +49,7 @@ pub(crate) enum Assist {
|
||||
pub(crate) struct AssistCtx<'a, DB> {
|
||||
pub(crate) db: &'a DB,
|
||||
pub(crate) frange: FileRange,
|
||||
source_file: &'a SourceFile,
|
||||
source_file: SourceFile,
|
||||
should_compute_edit: bool,
|
||||
assist: Assist,
|
||||
}
|
||||
@ -59,7 +59,7 @@ impl<'a, DB> Clone for AssistCtx<'a, DB> {
|
||||
AssistCtx {
|
||||
db: self.db,
|
||||
frange: self.frange,
|
||||
source_file: self.source_file,
|
||||
source_file: self.source_file.clone(),
|
||||
should_compute_edit: self.should_compute_edit,
|
||||
assist: self.assist.clone(),
|
||||
}
|
||||
@ -104,18 +104,18 @@ impl<'a, DB: HirDatabase> AssistCtx<'a, DB> {
|
||||
Some(self.assist)
|
||||
}
|
||||
|
||||
pub(crate) fn token_at_offset(&self) -> TokenAtOffset<SyntaxToken<'a>> {
|
||||
pub(crate) fn token_at_offset(&self) -> TokenAtOffset<SyntaxToken> {
|
||||
find_token_at_offset(self.source_file.syntax(), self.frange.range.start())
|
||||
}
|
||||
|
||||
pub(crate) fn node_at_offset<N: AstNode>(&self) -> Option<&'a N> {
|
||||
pub(crate) fn node_at_offset<N: AstNode>(&self) -> Option<N> {
|
||||
find_node_at_offset(self.source_file.syntax(), self.frange.range.start())
|
||||
}
|
||||
pub(crate) fn covering_element(&self) -> SyntaxElement<'a> {
|
||||
pub(crate) fn covering_element(&self) -> SyntaxElement {
|
||||
find_covering_element(self.source_file.syntax(), self.frange.range)
|
||||
}
|
||||
|
||||
pub(crate) fn covering_node_for_range(&self, range: TextRange) -> SyntaxElement<'a> {
|
||||
pub(crate) fn covering_node_for_range(&self, range: TextRange) -> SyntaxElement {
|
||||
find_covering_element(self.source_file.syntax(), range)
|
||||
}
|
||||
}
|
||||
@ -139,7 +139,7 @@ impl AssistBuilder {
|
||||
) {
|
||||
let mut replace_with = replace_with.into();
|
||||
if let Some(indent) = leading_indent(node) {
|
||||
replace_with = reindent(&replace_with, indent)
|
||||
replace_with = reindent(&replace_with, &indent)
|
||||
}
|
||||
self.replace(node.range(), replace_with)
|
||||
}
|
||||
|
@ -4,18 +4,18 @@ use arrayvec::ArrayVec;
|
||||
use hir::Name;
|
||||
use ra_fmt::leading_indent;
|
||||
use ra_syntax::{
|
||||
ast, AstNode, Direction, InsertPosition, SourceFile, SyntaxElement, SyntaxKind::*, TreeArc, T,
|
||||
ast, AstNode, Direction, InsertPosition, SourceFile, SyntaxElement, SyntaxKind::*, T,
|
||||
};
|
||||
use ra_text_edit::TextEditBuilder;
|
||||
|
||||
pub struct AstEditor<N: AstNode> {
|
||||
original_ast: TreeArc<N>,
|
||||
ast: TreeArc<N>,
|
||||
original_ast: N,
|
||||
ast: N,
|
||||
}
|
||||
|
||||
impl<N: AstNode> AstEditor<N> {
|
||||
pub fn new(node: &N) -> AstEditor<N> {
|
||||
AstEditor { original_ast: node.to_owned(), ast: node.to_owned() }
|
||||
pub fn new(node: N) -> AstEditor<N> {
|
||||
AstEditor { original_ast: node.clone(), ast: node }
|
||||
}
|
||||
|
||||
pub fn into_text_edit(self, builder: &mut TextEditBuilder) {
|
||||
@ -26,27 +26,27 @@ impl<N: AstNode> AstEditor<N> {
|
||||
}
|
||||
|
||||
pub fn ast(&self) -> &N {
|
||||
&*self.ast
|
||||
&self.ast
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
fn insert_children<'a>(
|
||||
fn insert_children(
|
||||
&self,
|
||||
position: InsertPosition<SyntaxElement<'_>>,
|
||||
to_insert: impl Iterator<Item = SyntaxElement<'a>>,
|
||||
) -> TreeArc<N> {
|
||||
position: InsertPosition<SyntaxElement>,
|
||||
to_insert: impl Iterator<Item = SyntaxElement>,
|
||||
) -> N {
|
||||
let new_syntax = self.ast().syntax().insert_children(position, to_insert);
|
||||
N::cast(&new_syntax).unwrap().to_owned()
|
||||
N::cast(new_syntax).unwrap()
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
fn replace_children<'a>(
|
||||
fn replace_children(
|
||||
&self,
|
||||
to_delete: RangeInclusive<SyntaxElement<'_>>,
|
||||
to_insert: impl Iterator<Item = SyntaxElement<'a>>,
|
||||
) -> TreeArc<N> {
|
||||
to_delete: RangeInclusive<SyntaxElement>,
|
||||
to_insert: impl Iterator<Item = SyntaxElement>,
|
||||
) -> N {
|
||||
let new_syntax = self.ast().syntax().replace_children(to_delete, to_insert);
|
||||
N::cast(&new_syntax).unwrap().to_owned()
|
||||
N::cast(new_syntax).unwrap()
|
||||
}
|
||||
|
||||
fn do_make_multiline(&mut self) {
|
||||
@ -66,16 +66,18 @@ impl<N: AstNode> AstEditor<N> {
|
||||
if ws.text().contains('\n') {
|
||||
return;
|
||||
}
|
||||
Some(ws)
|
||||
Some(ws.clone())
|
||||
}
|
||||
};
|
||||
|
||||
let indent = leading_indent(self.ast().syntax()).unwrap_or("");
|
||||
let indent = leading_indent(self.ast().syntax()).unwrap_or("".into());
|
||||
let ws = tokens::WsBuilder::new(&format!("\n{}", indent));
|
||||
let to_insert = iter::once(ws.ws().into());
|
||||
self.ast = match existing_ws {
|
||||
None => self.insert_children(InsertPosition::After(l_curly), to_insert),
|
||||
Some(ws) => self.replace_children(RangeInclusive::new(ws.into(), ws.into()), to_insert),
|
||||
Some(ws) => {
|
||||
self.replace_children(RangeInclusive::new(ws.clone().into(), ws.into()), to_insert)
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
@ -95,7 +97,7 @@ impl AstEditor<ast::NamedFieldList> {
|
||||
let space = if is_multiline {
|
||||
ws = tokens::WsBuilder::new(&format!(
|
||||
"\n{} ",
|
||||
leading_indent(self.ast().syntax()).unwrap_or("")
|
||||
leading_indent(self.ast().syntax()).unwrap_or("".into())
|
||||
));
|
||||
ws.ws()
|
||||
} else {
|
||||
@ -104,7 +106,7 @@ impl AstEditor<ast::NamedFieldList> {
|
||||
|
||||
let mut to_insert: ArrayVec<[SyntaxElement; 4]> = ArrayVec::new();
|
||||
to_insert.push(space.into());
|
||||
to_insert.push(field.syntax().into());
|
||||
to_insert.push(field.syntax().clone().into());
|
||||
to_insert.push(tokens::comma().into());
|
||||
|
||||
macro_rules! after_l_curly {
|
||||
@ -127,7 +129,7 @@ impl AstEditor<ast::NamedFieldList> {
|
||||
InsertPosition::After(comma)
|
||||
} else {
|
||||
to_insert.insert(0, tokens::comma().into());
|
||||
InsertPosition::After($anchor.syntax().into())
|
||||
InsertPosition::After($anchor.syntax().clone().into())
|
||||
}
|
||||
};
|
||||
};
|
||||
@ -144,7 +146,9 @@ impl AstEditor<ast::NamedFieldList> {
|
||||
None => after_l_curly!(),
|
||||
}
|
||||
}
|
||||
InsertPosition::Before(anchor) => InsertPosition::Before(anchor.syntax().into()),
|
||||
InsertPosition::Before(anchor) => {
|
||||
InsertPosition::Before(anchor.syntax().clone().into())
|
||||
}
|
||||
InsertPosition::After(anchor) => after_field!(anchor),
|
||||
};
|
||||
|
||||
@ -157,7 +161,7 @@ impl AstEditor<ast::NamedFieldList> {
|
||||
}
|
||||
|
||||
impl AstEditor<ast::ItemList> {
|
||||
pub fn append_items<'a>(&mut self, items: impl Iterator<Item = &'a ast::ImplItem>) {
|
||||
pub fn append_items(&mut self, items: impl Iterator<Item = ast::ImplItem>) {
|
||||
let n_existing_items = self.ast().impl_items().count();
|
||||
if n_existing_items == 0 {
|
||||
self.do_make_multiline();
|
||||
@ -165,22 +169,23 @@ impl AstEditor<ast::ItemList> {
|
||||
items.for_each(|it| self.append_item(it));
|
||||
}
|
||||
|
||||
pub fn append_item(&mut self, item: &ast::ImplItem) {
|
||||
pub fn append_item(&mut self, item: ast::ImplItem) {
|
||||
let (indent, position) = match self.ast().impl_items().last() {
|
||||
Some(it) => (
|
||||
leading_indent(it.syntax()).unwrap_or("").to_string(),
|
||||
InsertPosition::After(it.syntax().into()),
|
||||
leading_indent(it.syntax()).unwrap_or_default().to_string(),
|
||||
InsertPosition::After(it.syntax().clone().into()),
|
||||
),
|
||||
None => match self.l_curly() {
|
||||
Some(it) => (
|
||||
" ".to_string() + leading_indent(self.ast().syntax()).unwrap_or(""),
|
||||
" ".to_string() + &leading_indent(self.ast().syntax()).unwrap_or_default(),
|
||||
InsertPosition::After(it),
|
||||
),
|
||||
None => return,
|
||||
},
|
||||
};
|
||||
let ws = tokens::WsBuilder::new(&format!("\n{}", indent));
|
||||
let to_insert: ArrayVec<[SyntaxElement; 2]> = [ws.ws().into(), item.syntax().into()].into();
|
||||
let to_insert: ArrayVec<[SyntaxElement; 2]> =
|
||||
[ws.ws().into(), item.syntax().clone().into()].into();
|
||||
self.ast = self.insert_children(position, to_insert.into_iter());
|
||||
}
|
||||
|
||||
@ -197,9 +202,9 @@ impl AstEditor<ast::ImplItem> {
|
||||
.children_with_tokens()
|
||||
.find(|it| it.kind() == ATTR || it.kind() == COMMENT)
|
||||
{
|
||||
let end = match start.next_sibling_or_token() {
|
||||
Some(el) if el.kind() == WHITESPACE => el,
|
||||
Some(_) | None => start,
|
||||
let end = match &start.next_sibling_or_token() {
|
||||
Some(el) if el.kind() == WHITESPACE => el.clone(),
|
||||
Some(_) | None => start.clone(),
|
||||
};
|
||||
self.ast = self.replace_children(RangeInclusive::new(start, end), iter::empty());
|
||||
}
|
||||
@ -210,18 +215,18 @@ impl AstEditor<ast::FnDef> {
|
||||
pub fn set_body(&mut self, body: &ast::Block) {
|
||||
let mut to_insert: ArrayVec<[SyntaxElement; 2]> = ArrayVec::new();
|
||||
let old_body_or_semi: SyntaxElement = if let Some(old_body) = self.ast().body() {
|
||||
old_body.syntax().into()
|
||||
old_body.syntax().clone().into()
|
||||
} else if let Some(semi) = self.ast().semicolon_token() {
|
||||
to_insert.push(tokens::single_space().into());
|
||||
semi.into()
|
||||
} else {
|
||||
to_insert.push(tokens::single_space().into());
|
||||
to_insert.push(body.syntax().into());
|
||||
to_insert.push(body.syntax().clone().into());
|
||||
self.ast = self.insert_children(InsertPosition::Last, to_insert.into_iter());
|
||||
return;
|
||||
};
|
||||
to_insert.push(body.syntax().into());
|
||||
let replace_range = RangeInclusive::new(old_body_or_semi, old_body_or_semi);
|
||||
to_insert.push(body.syntax().clone().into());
|
||||
let replace_range = RangeInclusive::new(old_body_or_semi.clone(), old_body_or_semi);
|
||||
self.ast = self.replace_children(replace_range, to_insert.into_iter())
|
||||
}
|
||||
}
|
||||
@ -231,15 +236,15 @@ pub struct AstBuilder<N: AstNode> {
|
||||
}
|
||||
|
||||
impl AstBuilder<ast::NamedField> {
|
||||
pub fn from_name(name: &Name) -> TreeArc<ast::NamedField> {
|
||||
pub fn from_name(name: &Name) -> ast::NamedField {
|
||||
ast_node_from_file_text(&format!("fn f() {{ S {{ {}: (), }} }}", name))
|
||||
}
|
||||
|
||||
fn from_text(text: &str) -> TreeArc<ast::NamedField> {
|
||||
fn from_text(text: &str) -> ast::NamedField {
|
||||
ast_node_from_file_text(&format!("fn f() {{ S {{ {}, }} }}", text))
|
||||
}
|
||||
|
||||
pub fn from_pieces(name: &ast::NameRef, expr: Option<&ast::Expr>) -> TreeArc<ast::NamedField> {
|
||||
pub fn from_pieces(name: &ast::NameRef, expr: Option<&ast::Expr>) -> ast::NamedField {
|
||||
match expr {
|
||||
Some(expr) => Self::from_text(&format!("{}: {}", name.syntax(), expr.syntax())),
|
||||
None => Self::from_text(&name.syntax().to_string()),
|
||||
@ -248,36 +253,36 @@ impl AstBuilder<ast::NamedField> {
|
||||
}
|
||||
|
||||
impl AstBuilder<ast::Block> {
|
||||
fn from_text(text: &str) -> TreeArc<ast::Block> {
|
||||
fn from_text(text: &str) -> ast::Block {
|
||||
ast_node_from_file_text(&format!("fn f() {}", text))
|
||||
}
|
||||
|
||||
pub fn single_expr(e: &ast::Expr) -> TreeArc<ast::Block> {
|
||||
pub fn single_expr(e: &ast::Expr) -> ast::Block {
|
||||
Self::from_text(&format!("{{ {} }}", e.syntax()))
|
||||
}
|
||||
}
|
||||
|
||||
impl AstBuilder<ast::Expr> {
|
||||
fn from_text(text: &str) -> TreeArc<ast::Expr> {
|
||||
fn from_text(text: &str) -> ast::Expr {
|
||||
ast_node_from_file_text(&format!("fn f() {{ {}; }}", text))
|
||||
}
|
||||
|
||||
pub fn unit() -> TreeArc<ast::Expr> {
|
||||
pub fn unit() -> ast::Expr {
|
||||
Self::from_text("()")
|
||||
}
|
||||
|
||||
pub fn unimplemented() -> TreeArc<ast::Expr> {
|
||||
pub fn unimplemented() -> ast::Expr {
|
||||
Self::from_text("unimplemented!()")
|
||||
}
|
||||
}
|
||||
|
||||
impl AstBuilder<ast::NameRef> {
|
||||
pub fn new(text: &str) -> TreeArc<ast::NameRef> {
|
||||
pub fn new(text: &str) -> ast::NameRef {
|
||||
ast_node_from_file_text(&format!("fn f() {{ {}; }}", text))
|
||||
}
|
||||
}
|
||||
|
||||
fn ast_node_from_file_text<N: AstNode>(text: &str) -> TreeArc<N> {
|
||||
fn ast_node_from_file_text<N: AstNode>(text: &str) -> N {
|
||||
let parse = SourceFile::parse(text);
|
||||
let res = parse.tree().syntax().descendants().find_map(N::cast).unwrap().to_owned();
|
||||
res
|
||||
@ -285,47 +290,49 @@ fn ast_node_from_file_text<N: AstNode>(text: &str) -> TreeArc<N> {
|
||||
|
||||
mod tokens {
|
||||
use once_cell::sync::Lazy;
|
||||
use ra_syntax::{AstNode, SourceFile, SyntaxKind::*, SyntaxToken, TreeArc, T};
|
||||
use ra_syntax::{AstNode, Parse, SourceFile, SyntaxKind::*, SyntaxToken, T};
|
||||
|
||||
static SOURCE_FILE: Lazy<TreeArc<SourceFile>> =
|
||||
Lazy::new(|| SourceFile::parse(",\n; ;").tree().to_owned());
|
||||
static SOURCE_FILE: Lazy<Parse<SourceFile>> = Lazy::new(|| SourceFile::parse(",\n; ;"));
|
||||
|
||||
pub(crate) fn comma() -> SyntaxToken<'static> {
|
||||
pub(crate) fn comma() -> SyntaxToken {
|
||||
SOURCE_FILE
|
||||
.tree()
|
||||
.syntax()
|
||||
.descendants_with_tokens()
|
||||
.filter_map(|it| it.as_token())
|
||||
.filter_map(|it| it.as_token().cloned())
|
||||
.find(|it| it.kind() == T![,])
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
pub(crate) fn single_space() -> SyntaxToken<'static> {
|
||||
pub(crate) fn single_space() -> SyntaxToken {
|
||||
SOURCE_FILE
|
||||
.tree()
|
||||
.syntax()
|
||||
.descendants_with_tokens()
|
||||
.filter_map(|it| it.as_token())
|
||||
.filter_map(|it| it.as_token().cloned())
|
||||
.find(|it| it.kind() == WHITESPACE && it.text().as_str() == " ")
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub(crate) fn single_newline() -> SyntaxToken<'static> {
|
||||
pub(crate) fn single_newline() -> SyntaxToken {
|
||||
SOURCE_FILE
|
||||
.tree()
|
||||
.syntax()
|
||||
.descendants_with_tokens()
|
||||
.filter_map(|it| it.as_token())
|
||||
.filter_map(|it| it.as_token().cloned())
|
||||
.find(|it| it.kind() == WHITESPACE && it.text().as_str() == "\n")
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
pub(crate) struct WsBuilder(TreeArc<SourceFile>);
|
||||
pub(crate) struct WsBuilder(SourceFile);
|
||||
|
||||
impl WsBuilder {
|
||||
pub(crate) fn new(text: &str) -> WsBuilder {
|
||||
WsBuilder(SourceFile::parse(text).ok().unwrap())
|
||||
}
|
||||
pub(crate) fn ws(&self) -> SyntaxToken<'_> {
|
||||
self.0.syntax().first_child_or_token().unwrap().as_token().unwrap()
|
||||
pub(crate) fn ws(&self) -> SyntaxToken {
|
||||
self.0.syntax().first_child_or_token().unwrap().as_token().cloned().unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -12,25 +12,25 @@ use ra_syntax::{
|
||||
SyntaxNode, TextRange, T,
|
||||
};
|
||||
|
||||
fn collect_path_segments_raw<'a>(
|
||||
segments: &mut Vec<&'a ast::PathSegment>,
|
||||
mut path: &'a ast::Path,
|
||||
fn collect_path_segments_raw(
|
||||
segments: &mut Vec<ast::PathSegment>,
|
||||
mut path: ast::Path,
|
||||
) -> Option<usize> {
|
||||
let oldlen = segments.len();
|
||||
loop {
|
||||
let mut children = path.syntax().children_with_tokens();
|
||||
let (first, second, third) = (
|
||||
children.next().map(|n| (n, n.kind())),
|
||||
children.next().map(|n| (n, n.kind())),
|
||||
children.next().map(|n| (n, n.kind())),
|
||||
children.next().map(|n| (n.clone(), n.kind())),
|
||||
children.next().map(|n| (n.clone(), n.kind())),
|
||||
children.next().map(|n| (n.clone(), n.kind())),
|
||||
);
|
||||
match (first, second, third) {
|
||||
(Some((subpath, PATH)), Some((_, T![::])), Some((segment, PATH_SEGMENT))) => {
|
||||
path = ast::Path::cast(subpath.as_node()?)?;
|
||||
segments.push(ast::PathSegment::cast(segment.as_node()?)?);
|
||||
path = ast::Path::cast(subpath.as_node()?.clone())?;
|
||||
segments.push(ast::PathSegment::cast(segment.as_node()?.clone())?);
|
||||
}
|
||||
(Some((segment, PATH_SEGMENT)), _, _) => {
|
||||
segments.push(ast::PathSegment::cast(segment.as_node()?)?);
|
||||
segments.push(ast::PathSegment::cast(segment.as_node()?.clone())?);
|
||||
break;
|
||||
}
|
||||
(_, _, _) => return None,
|
||||
@ -60,7 +60,7 @@ fn fmt_segments_raw(segments: &[SmolStr], buf: &mut String) {
|
||||
}
|
||||
|
||||
// Returns the numeber of common segments.
|
||||
fn compare_path_segments(left: &[SmolStr], right: &[&ast::PathSegment]) -> usize {
|
||||
fn compare_path_segments(left: &[SmolStr], right: &[ast::PathSegment]) -> usize {
|
||||
left.iter().zip(right).filter(|(l, r)| compare_path_segment(l, r)).count()
|
||||
}
|
||||
|
||||
@ -81,12 +81,12 @@ fn compare_path_segment_with_name(a: &SmolStr, b: &ast::Name) -> bool {
|
||||
a == b.text()
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
enum ImportAction<'a> {
|
||||
#[derive(Clone)]
|
||||
enum ImportAction {
|
||||
Nothing,
|
||||
// Add a brand new use statement.
|
||||
AddNewUse {
|
||||
anchor: Option<&'a SyntaxNode>, // anchor node
|
||||
anchor: Option<SyntaxNode>, // anchor node
|
||||
add_after_anchor: bool,
|
||||
},
|
||||
|
||||
@ -94,9 +94,9 @@ enum ImportAction<'a> {
|
||||
AddNestedImport {
|
||||
// how may segments matched with the target path
|
||||
common_segments: usize,
|
||||
path_to_split: &'a ast::Path,
|
||||
path_to_split: ast::Path,
|
||||
// the first segment of path_to_split we want to add into the new nested list
|
||||
first_segment_to_split: Option<&'a ast::PathSegment>,
|
||||
first_segment_to_split: Option<ast::PathSegment>,
|
||||
// Wether to add 'self' in addition to the target path
|
||||
add_self: bool,
|
||||
},
|
||||
@ -104,20 +104,20 @@ enum ImportAction<'a> {
|
||||
AddInTreeList {
|
||||
common_segments: usize,
|
||||
// The UseTreeList where to add the target path
|
||||
tree_list: &'a ast::UseTreeList,
|
||||
tree_list: ast::UseTreeList,
|
||||
add_self: bool,
|
||||
},
|
||||
}
|
||||
|
||||
impl<'a> ImportAction<'a> {
|
||||
fn add_new_use(anchor: Option<&'a SyntaxNode>, add_after_anchor: bool) -> Self {
|
||||
impl ImportAction {
|
||||
fn add_new_use(anchor: Option<SyntaxNode>, add_after_anchor: bool) -> Self {
|
||||
ImportAction::AddNewUse { anchor, add_after_anchor }
|
||||
}
|
||||
|
||||
fn add_nested_import(
|
||||
common_segments: usize,
|
||||
path_to_split: &'a ast::Path,
|
||||
first_segment_to_split: Option<&'a ast::PathSegment>,
|
||||
path_to_split: ast::Path,
|
||||
first_segment_to_split: Option<ast::PathSegment>,
|
||||
add_self: bool,
|
||||
) -> Self {
|
||||
ImportAction::AddNestedImport {
|
||||
@ -130,14 +130,14 @@ impl<'a> ImportAction<'a> {
|
||||
|
||||
fn add_in_tree_list(
|
||||
common_segments: usize,
|
||||
tree_list: &'a ast::UseTreeList,
|
||||
tree_list: ast::UseTreeList,
|
||||
add_self: bool,
|
||||
) -> Self {
|
||||
ImportAction::AddInTreeList { common_segments, tree_list, add_self }
|
||||
}
|
||||
|
||||
fn better<'b>(left: &'b ImportAction<'a>, right: &'b ImportAction<'a>) -> &'b ImportAction<'a> {
|
||||
if left.is_better(right) {
|
||||
fn better(left: ImportAction, right: ImportAction) -> ImportAction {
|
||||
if left.is_better(&right) {
|
||||
left
|
||||
} else {
|
||||
right
|
||||
@ -166,12 +166,12 @@ impl<'a> ImportAction<'a> {
|
||||
|
||||
// Find out the best ImportAction to import target path against current_use_tree.
|
||||
// If current_use_tree has a nested import the function gets called recursively on every UseTree inside a UseTreeList.
|
||||
fn walk_use_tree_for_best_action<'a>(
|
||||
current_path_segments: &mut Vec<&'a ast::PathSegment>, // buffer containing path segments
|
||||
current_parent_use_tree_list: Option<&'a ast::UseTreeList>, // will be Some value if we are in a nested import
|
||||
current_use_tree: &'a ast::UseTree, // the use tree we are currently examinating
|
||||
target: &[SmolStr], // the path we want to import
|
||||
) -> ImportAction<'a> {
|
||||
fn walk_use_tree_for_best_action(
|
||||
current_path_segments: &mut Vec<ast::PathSegment>, // buffer containing path segments
|
||||
current_parent_use_tree_list: Option<ast::UseTreeList>, // will be Some value if we are in a nested import
|
||||
current_use_tree: ast::UseTree, // the use tree we are currently examinating
|
||||
target: &[SmolStr], // the path we want to import
|
||||
) -> ImportAction {
|
||||
// We save the number of segments in the buffer so we can restore the correct segments
|
||||
// before returning. Recursive call will add segments so we need to delete them.
|
||||
let prev_len = current_path_segments.len();
|
||||
@ -188,32 +188,36 @@ fn walk_use_tree_for_best_action<'a>(
|
||||
.syntax()
|
||||
.ancestors()
|
||||
.find_map(ast::UseItem::cast)
|
||||
.map(AstNode::syntax),
|
||||
.map(|it| it.syntax().clone()),
|
||||
true,
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
// This can happen only if current_use_tree is a direct child of a UseItem
|
||||
if let Some(name) = alias.and_then(ast::NameOwner::name) {
|
||||
if compare_path_segment_with_name(&target[0], name) {
|
||||
if let Some(name) = alias.and_then(|it| it.name()) {
|
||||
if compare_path_segment_with_name(&target[0], &name) {
|
||||
return ImportAction::Nothing;
|
||||
}
|
||||
}
|
||||
|
||||
collect_path_segments_raw(current_path_segments, path);
|
||||
collect_path_segments_raw(current_path_segments, path.clone());
|
||||
|
||||
// We compare only the new segments added in the line just above.
|
||||
// The first prev_len segments were already compared in 'parent' recursive calls.
|
||||
let left = target.split_at(prev_len).1;
|
||||
let right = current_path_segments.split_at(prev_len).1;
|
||||
let common = compare_path_segments(left, right);
|
||||
let common = compare_path_segments(left, &right);
|
||||
let mut action = match common {
|
||||
0 => ImportAction::add_new_use(
|
||||
// e.g: target is std::fmt and we can have
|
||||
// use foo::bar
|
||||
// We add a brand new use statement
|
||||
current_use_tree.syntax().ancestors().find_map(ast::UseItem::cast).map(AstNode::syntax),
|
||||
current_use_tree
|
||||
.syntax()
|
||||
.ancestors()
|
||||
.find_map(ast::UseItem::cast)
|
||||
.map(|it| it.syntax().clone()),
|
||||
true,
|
||||
),
|
||||
common if common == left.len() && left.len() == right.len() => {
|
||||
@ -223,9 +227,9 @@ fn walk_use_tree_for_best_action<'a>(
|
||||
if let Some(list) = tree_list {
|
||||
// In case 2 we need to add self to the nested list
|
||||
// unless it's already there
|
||||
let has_self = list.use_trees().map(ast::UseTree::path).any(|p| {
|
||||
p.and_then(ast::Path::segment)
|
||||
.and_then(ast::PathSegment::kind)
|
||||
let has_self = list.use_trees().map(|it| it.path()).any(|p| {
|
||||
p.and_then(|it| it.segment())
|
||||
.and_then(|it| it.kind())
|
||||
.filter(|k| *k == ast::PathSegmentKind::SelfKw)
|
||||
.is_some()
|
||||
});
|
||||
@ -248,7 +252,7 @@ fn walk_use_tree_for_best_action<'a>(
|
||||
ImportAction::add_nested_import(
|
||||
prev_len + common,
|
||||
path,
|
||||
Some(segments_to_split[0]),
|
||||
Some(segments_to_split[0].clone()),
|
||||
false,
|
||||
)
|
||||
}
|
||||
@ -263,14 +267,18 @@ fn walk_use_tree_for_best_action<'a>(
|
||||
.syntax()
|
||||
.ancestors()
|
||||
.find_map(ast::UseItem::cast)
|
||||
.map(AstNode::syntax),
|
||||
.map(|it| it.syntax().clone()),
|
||||
true,
|
||||
);
|
||||
if let Some(list) = tree_list {
|
||||
// Case 2, check recursively if the path is already imported in the nested list
|
||||
for u in list.use_trees() {
|
||||
let child_action =
|
||||
walk_use_tree_for_best_action(current_path_segments, Some(list), u, target);
|
||||
let child_action = walk_use_tree_for_best_action(
|
||||
current_path_segments,
|
||||
Some(list.clone()),
|
||||
u,
|
||||
target,
|
||||
);
|
||||
if child_action.is_better(&better_action) {
|
||||
better_action = child_action;
|
||||
if let ImportAction::Nothing = better_action {
|
||||
@ -291,7 +299,7 @@ fn walk_use_tree_for_best_action<'a>(
|
||||
ImportAction::add_nested_import(
|
||||
prev_len + common,
|
||||
path,
|
||||
Some(segments_to_split[0]),
|
||||
Some(segments_to_split[0].clone()),
|
||||
true,
|
||||
)
|
||||
}
|
||||
@ -302,7 +310,7 @@ fn walk_use_tree_for_best_action<'a>(
|
||||
ImportAction::add_nested_import(
|
||||
prev_len + common,
|
||||
path,
|
||||
Some(segments_to_split[0]),
|
||||
Some(segments_to_split[0].clone()),
|
||||
false,
|
||||
)
|
||||
}
|
||||
@ -311,7 +319,7 @@ fn walk_use_tree_for_best_action<'a>(
|
||||
|
||||
// If we are inside a UseTreeList adding a use statement become adding to the existing
|
||||
// tree list.
|
||||
action = match (current_parent_use_tree_list, action) {
|
||||
action = match (current_parent_use_tree_list, action.clone()) {
|
||||
(Some(use_tree_list), ImportAction::AddNewUse { .. }) => {
|
||||
ImportAction::add_in_tree_list(prev_len, use_tree_list, false)
|
||||
}
|
||||
@ -323,19 +331,20 @@ fn walk_use_tree_for_best_action<'a>(
|
||||
action
|
||||
}
|
||||
|
||||
fn best_action_for_target<'b, 'a: 'b>(
|
||||
container: &'a SyntaxNode,
|
||||
anchor: &'a SyntaxNode,
|
||||
target: &'b [SmolStr],
|
||||
) -> ImportAction<'a> {
|
||||
fn best_action_for_target(
|
||||
container: SyntaxNode,
|
||||
anchor: SyntaxNode,
|
||||
target: &[SmolStr],
|
||||
) -> ImportAction {
|
||||
let mut storage = Vec::with_capacity(16); // this should be the only allocation
|
||||
let best_action = container
|
||||
.children()
|
||||
.filter_map(ast::UseItem::cast)
|
||||
.filter_map(ast::UseItem::use_tree)
|
||||
.filter_map(|it| it.use_tree())
|
||||
.map(|u| walk_use_tree_for_best_action(&mut storage, None, u, target))
|
||||
.fold(None, |best, a| {
|
||||
best.and_then(|best| Some(*ImportAction::better(&best, &a))).or_else(|| Some(a))
|
||||
.fold(None, |best, a| match best {
|
||||
Some(best) => Some(ImportAction::better(best, a)),
|
||||
None => Some(a),
|
||||
});
|
||||
|
||||
match best_action {
|
||||
@ -386,7 +395,7 @@ fn make_assist(action: &ImportAction, target: &[SmolStr], edit: &mut TextEditBui
|
||||
}
|
||||
|
||||
fn make_assist_add_new_use(
|
||||
anchor: &Option<&SyntaxNode>,
|
||||
anchor: &Option<SyntaxNode>,
|
||||
after: bool,
|
||||
target: &[SmolStr],
|
||||
edit: &mut TextEditBuilder,
|
||||
@ -396,7 +405,7 @@ fn make_assist_add_new_use(
|
||||
let mut buf = String::new();
|
||||
if after {
|
||||
buf.push_str("\n");
|
||||
if let Some(spaces) = indent {
|
||||
if let Some(spaces) = &indent {
|
||||
buf.push_str(spaces);
|
||||
}
|
||||
}
|
||||
@ -405,8 +414,8 @@ fn make_assist_add_new_use(
|
||||
buf.push_str(";");
|
||||
if !after {
|
||||
buf.push_str("\n\n");
|
||||
if let Some(spaces) = indent {
|
||||
buf.push_str(spaces);
|
||||
if let Some(spaces) = &indent {
|
||||
buf.push_str(&spaces);
|
||||
}
|
||||
}
|
||||
let position = if after { anchor.range().end() } else { anchor.range().start() };
|
||||
@ -444,7 +453,7 @@ fn make_assist_add_in_tree_list(
|
||||
|
||||
fn make_assist_add_nested_import(
|
||||
path: &ast::Path,
|
||||
first_segment_to_split: &Option<&ast::PathSegment>,
|
||||
first_segment_to_split: &Option<ast::PathSegment>,
|
||||
target: &[SmolStr],
|
||||
add_self: bool,
|
||||
edit: &mut TextEditBuilder,
|
||||
@ -482,7 +491,7 @@ fn apply_auto_import(
|
||||
target: &[SmolStr],
|
||||
edit: &mut TextEditBuilder,
|
||||
) {
|
||||
let action = best_action_for_target(container, path.syntax(), target);
|
||||
let action = best_action_for_target(container.clone(), path.syntax().clone(), target);
|
||||
make_assist(&action, target, edit);
|
||||
if let Some(last) = path.segment() {
|
||||
// Here we are assuming the assist will provide a correct use statement
|
||||
@ -522,26 +531,26 @@ pub fn auto_import_text_edit(
|
||||
edit: &mut TextEditBuilder,
|
||||
) {
|
||||
let container = position.ancestors().find_map(|n| {
|
||||
if let Some(module) = ast::Module::cast(n) {
|
||||
return module.item_list().map(ast::AstNode::syntax);
|
||||
if let Some(module) = ast::Module::cast(n.clone()) {
|
||||
return module.item_list().map(|it| it.syntax().clone());
|
||||
}
|
||||
ast::SourceFile::cast(n).map(ast::AstNode::syntax)
|
||||
ast::SourceFile::cast(n).map(|it| it.syntax().clone())
|
||||
});
|
||||
|
||||
if let Some(container) = container {
|
||||
let action = best_action_for_target(container, anchor, target);
|
||||
let action = best_action_for_target(container, anchor.clone(), target);
|
||||
make_assist(&action, target, edit);
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn auto_import(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
|
||||
let path: &ast::Path = ctx.node_at_offset()?;
|
||||
let path: ast::Path = ctx.node_at_offset()?;
|
||||
// We don't want to mess with use statements
|
||||
if path.syntax().ancestors().find_map(ast::UseItem::cast).is_some() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let hir_path = hir::Path::from_ast(path)?;
|
||||
let hir_path = hir::Path::from_ast(path.clone())?;
|
||||
let segments = collect_hir_path_segments(&hir_path);
|
||||
if segments.len() < 2 {
|
||||
return None;
|
||||
@ -554,7 +563,7 @@ pub(crate) fn auto_import(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist
|
||||
format!("import {} in mod {}", fmt_segments(&segments), name.text()),
|
||||
|edit| {
|
||||
let mut text_edit = TextEditBuilder::default();
|
||||
apply_auto_import(item_list.syntax(), path, &segments, &mut text_edit);
|
||||
apply_auto_import(item_list.syntax(), &path, &segments, &mut text_edit);
|
||||
edit.set_edit_builder(text_edit);
|
||||
},
|
||||
);
|
||||
@ -566,7 +575,7 @@ pub(crate) fn auto_import(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist
|
||||
format!("import {} in the current file", fmt_segments(&segments)),
|
||||
|edit| {
|
||||
let mut text_edit = TextEditBuilder::default();
|
||||
apply_auto_import(current_file.syntax(), path, &segments, &mut text_edit);
|
||||
apply_auto_import(current_file.syntax(), &path, &segments, &mut text_edit);
|
||||
edit.set_edit_builder(text_edit);
|
||||
},
|
||||
);
|
||||
|
@ -35,7 +35,7 @@ fn add_vis(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
|
||||
if parent.children().any(|child| child.kind() == VISIBILITY) {
|
||||
return None;
|
||||
}
|
||||
(vis_offset(parent), keyword.range())
|
||||
(vis_offset(&parent), keyword.range())
|
||||
} else {
|
||||
let ident = ctx.token_at_offset().find(|leaf| leaf.kind() == IDENT)?;
|
||||
let field = ident.parent().ancestors().find_map(ast::NamedFieldDef::cast)?;
|
||||
@ -65,7 +65,7 @@ fn vis_offset(node: &SyntaxNode) -> TextUnit {
|
||||
.unwrap_or_else(|| node.range().start())
|
||||
}
|
||||
|
||||
fn change_vis(mut ctx: AssistCtx<impl HirDatabase>, vis: &ast::Visibility) -> Option<Assist> {
|
||||
fn change_vis(mut ctx: AssistCtx<impl HirDatabase>, vis: ast::Visibility) -> Option<Assist> {
|
||||
if vis.syntax().text() == "pub" {
|
||||
ctx.add_action(AssistId("change_visibility"), "change to pub(crate)", |edit| {
|
||||
edit.target(vis.syntax().range());
|
||||
|
@ -27,7 +27,7 @@ pub(crate) fn fill_match_arms(mut ctx: AssistCtx<impl HirDatabase>) -> Option<As
|
||||
let mut arm_iter = arm_list.arms();
|
||||
let first = arm_iter.next();
|
||||
|
||||
match first {
|
||||
match &first {
|
||||
// If there arm list is empty or there is only one trivial arm, then proceed.
|
||||
Some(arm) if is_trivial_arm(arm) => {
|
||||
if arm_iter.next() != None {
|
||||
@ -44,7 +44,7 @@ pub(crate) fn fill_match_arms(mut ctx: AssistCtx<impl HirDatabase>) -> Option<As
|
||||
|
||||
let expr = match_expr.expr()?;
|
||||
let analyzer = hir::SourceAnalyzer::new(ctx.db, ctx.frange.file_id, expr.syntax(), None);
|
||||
let match_expr_ty = analyzer.type_of(ctx.db, expr)?;
|
||||
let match_expr_ty = analyzer.type_of(ctx.db, &expr)?;
|
||||
let enum_def = analyzer.autoderef(ctx.db, match_expr_ty).find_map(|ty| match ty.as_adt() {
|
||||
Some((AdtDef::Enum(e), _)) => Some(e),
|
||||
_ => None,
|
||||
|
@ -6,8 +6,8 @@ use crate::{Assist, AssistCtx, AssistId};
|
||||
/// Flip binary expression assist.
|
||||
pub(crate) fn flip_binexpr(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
|
||||
let expr = ctx.node_at_offset::<BinExpr>()?;
|
||||
let lhs = expr.lhs()?.syntax();
|
||||
let rhs = expr.rhs()?.syntax();
|
||||
let lhs = expr.lhs()?.syntax().clone();
|
||||
let rhs = expr.rhs()?.syntax().clone();
|
||||
let op_range = expr.op_token()?.range();
|
||||
// The assist should be applied only if the cursor is on the operator
|
||||
let cursor_in_range = ctx.frange.range.is_subrange(&op_range);
|
||||
|
@ -5,8 +5,8 @@ use crate::{Assist, AssistCtx, AssistId};
|
||||
|
||||
pub(crate) fn flip_comma(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
|
||||
let comma = ctx.token_at_offset().find(|leaf| leaf.kind() == T![,])?;
|
||||
let prev = non_trivia_sibling(comma.into(), Direction::Prev)?;
|
||||
let next = non_trivia_sibling(comma.into(), Direction::Next)?;
|
||||
let prev = non_trivia_sibling(comma.clone().into(), Direction::Prev)?;
|
||||
let next = non_trivia_sibling(comma.clone().into(), Direction::Next)?;
|
||||
ctx.add_action(AssistId("flip_comma"), "flip comma", |edit| {
|
||||
edit.target(comma.range());
|
||||
edit.replace(prev.range(), next.to_string());
|
||||
|
@ -16,18 +16,18 @@ pub(crate) fn inline_local_varialbe(mut ctx: AssistCtx<impl HirDatabase>) -> Opt
|
||||
if bind_pat.is_mutable() {
|
||||
return None;
|
||||
}
|
||||
let initializer_expr = let_stmt.initializer();
|
||||
let initializer_expr = let_stmt.initializer()?;
|
||||
let delete_range = if let Some(whitespace) = let_stmt
|
||||
.syntax()
|
||||
.next_sibling_or_token()
|
||||
.and_then(|it| ast::Whitespace::cast(it.as_token()?))
|
||||
.and_then(|it| ast::Whitespace::cast(it.as_token()?.clone()))
|
||||
{
|
||||
TextRange::from_to(let_stmt.syntax().range().start(), whitespace.syntax().range().end())
|
||||
} else {
|
||||
let_stmt.syntax().range()
|
||||
};
|
||||
let analyzer = hir::SourceAnalyzer::new(ctx.db, ctx.frange.file_id, bind_pat.syntax(), None);
|
||||
let refs = analyzer.find_all_refs(bind_pat);
|
||||
let refs = analyzer.find_all_refs(&bind_pat);
|
||||
|
||||
let mut wrap_in_parens = vec![true; refs.len()];
|
||||
|
||||
@ -45,7 +45,7 @@ pub(crate) fn inline_local_varialbe(mut ctx: AssistCtx<impl HirDatabase>) -> Opt
|
||||
}
|
||||
};
|
||||
|
||||
wrap_in_parens[i] = match (initializer_expr?.kind(), usage_parent.kind()) {
|
||||
wrap_in_parens[i] = match (initializer_expr.kind(), usage_parent.kind()) {
|
||||
(ExprKind::CallExpr(_), _)
|
||||
| (ExprKind::IndexExpr(_), _)
|
||||
| (ExprKind::MethodCallExpr(_), _)
|
||||
@ -71,7 +71,7 @@ pub(crate) fn inline_local_varialbe(mut ctx: AssistCtx<impl HirDatabase>) -> Opt
|
||||
};
|
||||
}
|
||||
|
||||
let init_str = initializer_expr?.syntax().text().to_string();
|
||||
let init_str = initializer_expr.syntax().text().to_string();
|
||||
let init_in_paren = format!("({})", &init_str);
|
||||
|
||||
ctx.add_action(
|
||||
|
@ -20,8 +20,8 @@ pub(crate) fn introduce_variable(mut ctx: AssistCtx<impl HirDatabase>) -> Option
|
||||
return None;
|
||||
}
|
||||
let expr = node.ancestors().find_map(valid_target_expr)?;
|
||||
let (anchor_stmt, wrap_in_block) = anchor_stmt(expr)?;
|
||||
let indent = anchor_stmt.prev_sibling_or_token()?.as_token()?;
|
||||
let (anchor_stmt, wrap_in_block) = anchor_stmt(expr.clone())?;
|
||||
let indent = anchor_stmt.prev_sibling_or_token()?.as_token()?.clone();
|
||||
if indent.kind() != WHITESPACE {
|
||||
return None;
|
||||
}
|
||||
@ -37,9 +37,9 @@ pub(crate) fn introduce_variable(mut ctx: AssistCtx<impl HirDatabase>) -> Option
|
||||
};
|
||||
|
||||
expr.syntax().text().push_to(&mut buf);
|
||||
let full_stmt = ast::ExprStmt::cast(anchor_stmt);
|
||||
let is_full_stmt = if let Some(expr_stmt) = full_stmt {
|
||||
Some(expr.syntax()) == expr_stmt.expr().map(|e| e.syntax())
|
||||
let full_stmt = ast::ExprStmt::cast(anchor_stmt.clone());
|
||||
let is_full_stmt = if let Some(expr_stmt) = &full_stmt {
|
||||
Some(expr.syntax().clone()) == expr_stmt.expr().map(|e| e.syntax().clone())
|
||||
} else {
|
||||
false
|
||||
};
|
||||
@ -81,7 +81,7 @@ pub(crate) fn introduce_variable(mut ctx: AssistCtx<impl HirDatabase>) -> Option
|
||||
|
||||
/// Check whether the node is a valid expression which can be extracted to a variable.
|
||||
/// In general that's true for any expression, but in some cases that would produce invalid code.
|
||||
fn valid_target_expr(node: &SyntaxNode) -> Option<&ast::Expr> {
|
||||
fn valid_target_expr(node: SyntaxNode) -> Option<ast::Expr> {
|
||||
match node.kind() {
|
||||
PATH_EXPR => None,
|
||||
BREAK_EXPR => ast::BreakExpr::cast(node).and_then(|e| e.expr()),
|
||||
@ -96,14 +96,10 @@ fn valid_target_expr(node: &SyntaxNode) -> Option<&ast::Expr> {
|
||||
/// to produce correct code.
|
||||
/// It can be a statement, the last in a block expression or a wanna be block
|
||||
/// expression like a lambda or match arm.
|
||||
fn anchor_stmt(expr: &ast::Expr) -> Option<(&SyntaxNode, bool)> {
|
||||
fn anchor_stmt(expr: ast::Expr) -> Option<(SyntaxNode, bool)> {
|
||||
expr.syntax().ancestors().find_map(|node| {
|
||||
if ast::Stmt::cast(node).is_some() {
|
||||
return Some((node, false));
|
||||
}
|
||||
|
||||
if let Some(expr) = node.parent().and_then(ast::Block::cast).and_then(|it| it.expr()) {
|
||||
if expr.syntax() == node {
|
||||
if expr.syntax() == &node {
|
||||
tested_by!(test_introduce_var_last_expr);
|
||||
return Some((node, false));
|
||||
}
|
||||
@ -115,6 +111,10 @@ fn anchor_stmt(expr: &ast::Expr) -> Option<(&SyntaxNode, bool)> {
|
||||
}
|
||||
}
|
||||
|
||||
if ast::Stmt::cast(node.clone()).is_some() {
|
||||
return Some((node, false));
|
||||
}
|
||||
|
||||
None
|
||||
})
|
||||
}
|
||||
|
@ -18,9 +18,9 @@ pub(crate) fn move_guard_to_arm_body(mut ctx: AssistCtx<impl HirDatabase>) -> Op
|
||||
|
||||
ctx.add_action(AssistId("move_guard_to_arm_body"), "move guard to arm body", |edit| {
|
||||
edit.target(guard.syntax().range());
|
||||
let offseting_amount = match space_before_guard {
|
||||
let offseting_amount = match &space_before_guard {
|
||||
Some(SyntaxElement::Token(tok)) => {
|
||||
if let Some(_) = ast::Whitespace::cast(tok) {
|
||||
if let Some(_) = ast::Whitespace::cast(tok.clone()) {
|
||||
let ele = space_before_guard.unwrap().range();
|
||||
edit.delete(ele);
|
||||
ele.len()
|
||||
@ -39,11 +39,11 @@ pub(crate) fn move_guard_to_arm_body(mut ctx: AssistCtx<impl HirDatabase>) -> Op
|
||||
}
|
||||
|
||||
pub(crate) fn move_arm_cond_to_match_guard(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
|
||||
let match_arm: &MatchArm = ctx.node_at_offset::<MatchArm>()?;
|
||||
let match_arm: MatchArm = ctx.node_at_offset::<MatchArm>()?;
|
||||
let last_match_pat = match_arm.pats().last()?;
|
||||
|
||||
let arm_body = match_arm.expr()?;
|
||||
let if_expr: &IfExpr = IfExpr::cast(arm_body.syntax())?;
|
||||
let if_expr: IfExpr = IfExpr::cast(arm_body.syntax().clone())?;
|
||||
let cond = if_expr.condition()?;
|
||||
let then_block = if_expr.then_branch()?;
|
||||
|
||||
@ -65,7 +65,7 @@ pub(crate) fn move_arm_cond_to_match_guard(mut ctx: AssistCtx<impl HirDatabase>)
|
||||
edit.target(if_expr.syntax().range());
|
||||
let then_only_expr = then_block.statements().next().is_none();
|
||||
|
||||
match then_block.expr() {
|
||||
match &then_block.expr() {
|
||||
Some(then_expr) if then_only_expr => {
|
||||
edit.replace(if_expr.syntax().range(), then_expr.syntax().text())
|
||||
}
|
||||
|
@ -8,7 +8,7 @@ use ra_syntax::{
|
||||
pub(crate) fn remove_dbg(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
|
||||
let macro_call = ctx.node_at_offset::<ast::MacroCall>()?;
|
||||
|
||||
if !is_valid_macrocall(macro_call, "dbg")? {
|
||||
if !is_valid_macrocall(¯o_call, "dbg")? {
|
||||
return None;
|
||||
}
|
||||
|
||||
@ -35,7 +35,7 @@ pub(crate) fn remove_dbg(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist>
|
||||
};
|
||||
|
||||
let macro_content = {
|
||||
let macro_args = macro_call.token_tree()?.syntax();
|
||||
let macro_args = macro_call.token_tree()?.syntax().clone();
|
||||
let range = macro_args.range();
|
||||
let start = range.start() + TextUnit::of_char('(');
|
||||
let end = range.end() - TextUnit::of_char(')');
|
||||
@ -65,7 +65,7 @@ fn is_valid_macrocall(macro_call: &ast::MacroCall, macro_name: &str) -> Option<b
|
||||
return None;
|
||||
}
|
||||
|
||||
let node = macro_call.token_tree()?.syntax();
|
||||
let node = macro_call.token_tree()?.syntax().clone();
|
||||
let first_child = node.first_child_or_token()?;
|
||||
let last_child = node.last_child_or_token()?;
|
||||
|
||||
|
@ -5,7 +5,7 @@ use ra_syntax::{ast, AstNode};
|
||||
use crate::{Assist, AssistCtx, AssistId};
|
||||
|
||||
pub(crate) fn replace_if_let_with_match(mut ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
|
||||
let if_expr: &ast::IfExpr = ctx.node_at_offset()?;
|
||||
let if_expr: ast::IfExpr = ctx.node_at_offset()?;
|
||||
let cond = if_expr.condition()?;
|
||||
let pat = cond.pat()?;
|
||||
let expr = cond.expr()?;
|
||||
@ -25,16 +25,11 @@ pub(crate) fn replace_if_let_with_match(mut ctx: AssistCtx<impl HirDatabase>) ->
|
||||
ctx.build()
|
||||
}
|
||||
|
||||
fn build_match_expr(
|
||||
expr: &ast::Expr,
|
||||
pat1: &ast::Pat,
|
||||
arm1: &ast::Block,
|
||||
arm2: &ast::Block,
|
||||
) -> String {
|
||||
fn build_match_expr(expr: ast::Expr, pat1: ast::Pat, arm1: ast::Block, arm2: ast::Block) -> String {
|
||||
let mut buf = String::new();
|
||||
buf.push_str(&format!("match {} {{\n", expr.syntax().text()));
|
||||
buf.push_str(&format!(" {} => {}\n", pat1.syntax().text(), format_arm(arm1)));
|
||||
buf.push_str(&format!(" _ => {}\n", format_arm(arm2)));
|
||||
buf.push_str(&format!(" {} => {}\n", pat1.syntax().text(), format_arm(&arm1)));
|
||||
buf.push_str(&format!(" _ => {}\n", format_arm(&arm2)));
|
||||
buf.push_str("}");
|
||||
buf
|
||||
}
|
||||
|
@ -7,7 +7,7 @@ use clap::{App, Arg, SubCommand};
|
||||
use flexi_logger::Logger;
|
||||
use ra_ide_api::{file_structure, Analysis};
|
||||
use ra_prof::profile;
|
||||
use ra_syntax::{AstNode, SourceFile, TreeArc};
|
||||
use ra_syntax::{AstNode, SourceFile};
|
||||
|
||||
type Result<T> = std::result::Result<T, Box<dyn Error + Send + Sync>>;
|
||||
|
||||
@ -100,9 +100,9 @@ fn main() -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn file() -> Result<TreeArc<SourceFile>> {
|
||||
fn file() -> Result<SourceFile> {
|
||||
let text = read_stdin()?;
|
||||
Ok(SourceFile::parse(&text).tree().to_owned())
|
||||
Ok(SourceFile::parse(&text).tree())
|
||||
}
|
||||
|
||||
fn read_stdin() -> Result<String> {
|
||||
|
@ -3,7 +3,7 @@
|
||||
use itertools::Itertools;
|
||||
use ra_syntax::{
|
||||
ast::{self, AstNode, AstToken},
|
||||
SyntaxKind,
|
||||
SmolStr, SyntaxKind,
|
||||
SyntaxKind::*,
|
||||
SyntaxNode, SyntaxToken, T,
|
||||
};
|
||||
@ -15,12 +15,12 @@ pub fn reindent(text: &str, indent: &str) -> String {
|
||||
}
|
||||
|
||||
/// If the node is on the beginning of the line, calculate indent.
|
||||
pub fn leading_indent(node: &SyntaxNode) -> Option<&str> {
|
||||
pub fn leading_indent(node: &SyntaxNode) -> Option<SmolStr> {
|
||||
for token in prev_tokens(node.first_token()?) {
|
||||
if let Some(ws) = ast::Whitespace::cast(token) {
|
||||
if let Some(ws) = ast::Whitespace::cast(token.clone()) {
|
||||
let ws_text = ws.text();
|
||||
if let Some(pos) = ws_text.rfind('\n') {
|
||||
return Some(&ws_text[pos + 1..]);
|
||||
return Some(ws_text[pos + 1..].into());
|
||||
}
|
||||
}
|
||||
if token.text().contains('\n') {
|
||||
@ -31,17 +31,17 @@ pub fn leading_indent(node: &SyntaxNode) -> Option<&str> {
|
||||
}
|
||||
|
||||
fn prev_tokens(token: SyntaxToken) -> impl Iterator<Item = SyntaxToken> {
|
||||
successors(token.prev_token(), |&token| token.prev_token())
|
||||
successors(token.prev_token(), |token| token.prev_token())
|
||||
}
|
||||
|
||||
pub fn extract_trivial_expression(block: &ast::Block) -> Option<&ast::Expr> {
|
||||
pub fn extract_trivial_expression(block: &ast::Block) -> Option<ast::Expr> {
|
||||
let expr = block.expr()?;
|
||||
if expr.syntax().text().contains('\n') {
|
||||
return None;
|
||||
}
|
||||
let non_trivial_children = block.syntax().children().filter(|it| match it.kind() {
|
||||
WHITESPACE | T!['{'] | T!['}'] => false,
|
||||
_ => it != &expr.syntax(),
|
||||
_ => it != expr.syntax(),
|
||||
});
|
||||
if non_trivial_children.count() > 0 {
|
||||
return None;
|
||||
|
@ -4,10 +4,7 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use ra_arena::{impl_arena_id, Arena, RawId};
|
||||
use ra_syntax::{
|
||||
ast::{self, NameOwner, StructKind, TypeAscriptionOwner},
|
||||
TreeArc,
|
||||
};
|
||||
use ra_syntax::ast::{self, NameOwner, StructKind, TypeAscriptionOwner};
|
||||
|
||||
use crate::{
|
||||
type_ref::TypeRef, AsName, AstDatabase, Crate, DefDatabase, Enum, EnumVariant, FieldSource,
|
||||
@ -59,11 +56,11 @@ impl StructData {
|
||||
struct_: Struct,
|
||||
) -> Arc<StructData> {
|
||||
let src = struct_.source(db);
|
||||
Arc::new(StructData::new(&*src.ast))
|
||||
Arc::new(StructData::new(&src.ast))
|
||||
}
|
||||
}
|
||||
|
||||
fn variants(enum_def: &ast::EnumDef) -> impl Iterator<Item = &ast::EnumVariant> {
|
||||
fn variants(enum_def: &ast::EnumDef) -> impl Iterator<Item = ast::EnumVariant> {
|
||||
enum_def.variant_list().into_iter().flat_map(|it| it.variants())
|
||||
}
|
||||
|
||||
@ -71,9 +68,9 @@ impl EnumVariant {
|
||||
pub(crate) fn source_impl(
|
||||
self,
|
||||
db: &(impl DefDatabase + AstDatabase),
|
||||
) -> Source<TreeArc<ast::EnumVariant>> {
|
||||
) -> Source<ast::EnumVariant> {
|
||||
let src = self.parent.source(db);
|
||||
let ast = variants(&*src.ast)
|
||||
let ast = variants(&src.ast)
|
||||
.zip(db.enum_data(self.parent).variants.iter())
|
||||
.find(|(_syntax, (id, _))| *id == self.id)
|
||||
.unwrap()
|
||||
@ -96,7 +93,7 @@ impl EnumData {
|
||||
pub(crate) fn enum_data_query(db: &(impl DefDatabase + AstDatabase), e: Enum) -> Arc<EnumData> {
|
||||
let src = e.source(db);
|
||||
let name = src.ast.name().map(|n| n.as_name());
|
||||
let variants = variants(&*src.ast)
|
||||
let variants = variants(&src.ast)
|
||||
.map(|var| EnumVariantData {
|
||||
name: var.name().map(|it| it.as_name()),
|
||||
variant_data: Arc::new(VariantData::new(var.kind())),
|
||||
|
@ -4,10 +4,7 @@ pub(crate) mod docs;
|
||||
use std::sync::Arc;
|
||||
|
||||
use ra_db::{CrateId, Edition, FileId, SourceRootId};
|
||||
use ra_syntax::{
|
||||
ast::{self, NameOwner, TypeAscriptionOwner},
|
||||
TreeArc,
|
||||
};
|
||||
use ra_syntax::ast::{self, NameOwner, TypeAscriptionOwner};
|
||||
|
||||
use crate::{
|
||||
adt::{EnumVariantId, StructFieldId, VariantDef},
|
||||
@ -155,8 +152,8 @@ impl_froms!(
|
||||
);
|
||||
|
||||
pub enum ModuleSource {
|
||||
SourceFile(TreeArc<ast::SourceFile>),
|
||||
Module(TreeArc<ast::Module>),
|
||||
SourceFile(ast::SourceFile),
|
||||
Module(ast::Module),
|
||||
}
|
||||
|
||||
impl ModuleSource {
|
||||
@ -199,7 +196,7 @@ impl Module {
|
||||
self,
|
||||
db: &impl HirDatabase,
|
||||
import: ImportId,
|
||||
) -> Either<TreeArc<ast::UseTree>, TreeArc<ast::ExternCrateItem>> {
|
||||
) -> Either<ast::UseTree, ast::ExternCrateItem> {
|
||||
let src = self.definition_source(db);
|
||||
let (_, source_map) = db.raw_items_with_source_map(src.file_id);
|
||||
source_map.get(&src.ast, import)
|
||||
@ -321,8 +318,8 @@ pub struct StructField {
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum FieldSource {
|
||||
Named(TreeArc<ast::NamedFieldDef>),
|
||||
Pos(TreeArc<ast::PosFieldDef>),
|
||||
Named(ast::NamedFieldDef),
|
||||
Pos(ast::PosFieldDef),
|
||||
}
|
||||
|
||||
impl StructField {
|
||||
@ -736,7 +733,7 @@ impl ConstData {
|
||||
konst: Const,
|
||||
) -> Arc<ConstData> {
|
||||
let node = konst.source(db).ast;
|
||||
const_data_for(&*node)
|
||||
const_data_for(&node)
|
||||
}
|
||||
|
||||
pub(crate) fn static_data_query(
|
||||
@ -744,7 +741,7 @@ impl ConstData {
|
||||
konst: Static,
|
||||
) -> Arc<ConstData> {
|
||||
let node = konst.source(db).ast;
|
||||
const_data_for(&*node)
|
||||
const_data_for(&node)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -71,21 +71,21 @@ pub(crate) fn documentation_query(
|
||||
def: DocDef,
|
||||
) -> Option<Documentation> {
|
||||
match def {
|
||||
DocDef::Module(it) => docs_from_ast(&*it.declaration_source(db)?.ast),
|
||||
DocDef::Module(it) => docs_from_ast(&it.declaration_source(db)?.ast),
|
||||
DocDef::StructField(it) => match it.source(db).ast {
|
||||
FieldSource::Named(named) => docs_from_ast(&*named),
|
||||
FieldSource::Named(named) => docs_from_ast(&named),
|
||||
FieldSource::Pos(..) => None,
|
||||
},
|
||||
DocDef::Struct(it) => docs_from_ast(&*it.source(db).ast),
|
||||
DocDef::Enum(it) => docs_from_ast(&*it.source(db).ast),
|
||||
DocDef::EnumVariant(it) => docs_from_ast(&*it.source(db).ast),
|
||||
DocDef::Static(it) => docs_from_ast(&*it.source(db).ast),
|
||||
DocDef::Const(it) => docs_from_ast(&*it.source(db).ast),
|
||||
DocDef::Function(it) => docs_from_ast(&*it.source(db).ast),
|
||||
DocDef::Union(it) => docs_from_ast(&*it.source(db).ast),
|
||||
DocDef::Trait(it) => docs_from_ast(&*it.source(db).ast),
|
||||
DocDef::TypeAlias(it) => docs_from_ast(&*it.source(db).ast),
|
||||
DocDef::MacroDef(it) => docs_from_ast(&*it.source(db).ast),
|
||||
DocDef::Struct(it) => docs_from_ast(&it.source(db).ast),
|
||||
DocDef::Enum(it) => docs_from_ast(&it.source(db).ast),
|
||||
DocDef::EnumVariant(it) => docs_from_ast(&it.source(db).ast),
|
||||
DocDef::Static(it) => docs_from_ast(&it.source(db).ast),
|
||||
DocDef::Const(it) => docs_from_ast(&it.source(db).ast),
|
||||
DocDef::Function(it) => docs_from_ast(&it.source(db).ast),
|
||||
DocDef::Union(it) => docs_from_ast(&it.source(db).ast),
|
||||
DocDef::Trait(it) => docs_from_ast(&it.source(db).ast),
|
||||
DocDef::TypeAlias(it) => docs_from_ast(&it.source(db).ast),
|
||||
DocDef::MacroDef(it) => docs_from_ast(&it.source(db).ast),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
use ra_syntax::{ast, TreeArc};
|
||||
use ra_syntax::ast;
|
||||
|
||||
use crate::{
|
||||
ids::AstItemDef, AstDatabase, Const, DefDatabase, Enum, EnumVariant, FieldSource, Function,
|
||||
@ -34,7 +34,7 @@ impl Module {
|
||||
pub fn declaration_source(
|
||||
self,
|
||||
db: &(impl DefDatabase + AstDatabase),
|
||||
) -> Option<Source<TreeArc<ast::Module>>> {
|
||||
) -> Option<Source<ast::Module>> {
|
||||
let def_map = db.crate_def_map(self.krate);
|
||||
let decl = def_map[self.module_id].declaration?;
|
||||
let ast = decl.to_node(db);
|
||||
@ -49,62 +49,62 @@ impl HasSource for StructField {
|
||||
}
|
||||
}
|
||||
impl HasSource for Struct {
|
||||
type Ast = TreeArc<ast::StructDef>;
|
||||
fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<TreeArc<ast::StructDef>> {
|
||||
type Ast = ast::StructDef;
|
||||
fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::StructDef> {
|
||||
self.id.source(db)
|
||||
}
|
||||
}
|
||||
impl HasSource for Union {
|
||||
type Ast = TreeArc<ast::StructDef>;
|
||||
fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<TreeArc<ast::StructDef>> {
|
||||
type Ast = ast::StructDef;
|
||||
fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::StructDef> {
|
||||
self.id.source(db)
|
||||
}
|
||||
}
|
||||
impl HasSource for Enum {
|
||||
type Ast = TreeArc<ast::EnumDef>;
|
||||
fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<TreeArc<ast::EnumDef>> {
|
||||
type Ast = ast::EnumDef;
|
||||
fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::EnumDef> {
|
||||
self.id.source(db)
|
||||
}
|
||||
}
|
||||
impl HasSource for EnumVariant {
|
||||
type Ast = TreeArc<ast::EnumVariant>;
|
||||
fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<TreeArc<ast::EnumVariant>> {
|
||||
type Ast = ast::EnumVariant;
|
||||
fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::EnumVariant> {
|
||||
self.source_impl(db)
|
||||
}
|
||||
}
|
||||
impl HasSource for Function {
|
||||
type Ast = TreeArc<ast::FnDef>;
|
||||
fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<TreeArc<ast::FnDef>> {
|
||||
type Ast = ast::FnDef;
|
||||
fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::FnDef> {
|
||||
self.id.source(db)
|
||||
}
|
||||
}
|
||||
impl HasSource for Const {
|
||||
type Ast = TreeArc<ast::ConstDef>;
|
||||
fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<TreeArc<ast::ConstDef>> {
|
||||
type Ast = ast::ConstDef;
|
||||
fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::ConstDef> {
|
||||
self.id.source(db)
|
||||
}
|
||||
}
|
||||
impl HasSource for Static {
|
||||
type Ast = TreeArc<ast::StaticDef>;
|
||||
fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<TreeArc<ast::StaticDef>> {
|
||||
type Ast = ast::StaticDef;
|
||||
fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::StaticDef> {
|
||||
self.id.source(db)
|
||||
}
|
||||
}
|
||||
impl HasSource for Trait {
|
||||
type Ast = TreeArc<ast::TraitDef>;
|
||||
fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<TreeArc<ast::TraitDef>> {
|
||||
type Ast = ast::TraitDef;
|
||||
fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::TraitDef> {
|
||||
self.id.source(db)
|
||||
}
|
||||
}
|
||||
impl HasSource for TypeAlias {
|
||||
type Ast = TreeArc<ast::TypeAliasDef>;
|
||||
fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<TreeArc<ast::TypeAliasDef>> {
|
||||
type Ast = ast::TypeAliasDef;
|
||||
fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::TypeAliasDef> {
|
||||
self.id.source(db)
|
||||
}
|
||||
}
|
||||
impl HasSource for MacroDef {
|
||||
type Ast = TreeArc<ast::MacroCall>;
|
||||
fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<TreeArc<ast::MacroCall>> {
|
||||
type Ast = ast::MacroCall;
|
||||
fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::MacroCall> {
|
||||
Source { file_id: self.id.0.file_id(), ast: self.id.0.to_node(db) }
|
||||
}
|
||||
}
|
||||
|
@ -2,7 +2,7 @@ use std::sync::Arc;
|
||||
|
||||
use parking_lot::Mutex;
|
||||
use ra_db::{salsa, SourceDatabase};
|
||||
use ra_syntax::{ast, Parse, SmolStr, SyntaxNode, TreeArc};
|
||||
use ra_syntax::{ast, Parse, SmolStr, SyntaxNode};
|
||||
|
||||
use crate::{
|
||||
adt::{EnumData, StructData},
|
||||
@ -62,11 +62,11 @@ pub trait AstDatabase: InternDatabase {
|
||||
|
||||
#[salsa::transparent]
|
||||
#[salsa::invoke(crate::source_id::AstIdMap::file_item_query)]
|
||||
fn ast_id_to_node(&self, file_id: HirFileId, ast_id: ErasedFileAstId) -> TreeArc<SyntaxNode>;
|
||||
fn ast_id_to_node(&self, file_id: HirFileId, ast_id: ErasedFileAstId) -> SyntaxNode;
|
||||
|
||||
#[salsa::transparent]
|
||||
#[salsa::invoke(crate::ids::HirFileId::parse_or_expand_query)]
|
||||
fn parse_or_expand(&self, file_id: HirFileId) -> Option<TreeArc<SyntaxNode>>;
|
||||
fn parse_or_expand(&self, file_id: HirFileId) -> Option<SyntaxNode>;
|
||||
|
||||
#[salsa::invoke(crate::ids::HirFileId::parse_macro_query)]
|
||||
fn parse_macro(&self, macro_file: ids::MacroFile) -> Option<Parse<SyntaxNode>>;
|
||||
|
@ -1,6 +1,6 @@
|
||||
use std::{any::Any, fmt};
|
||||
|
||||
use ra_syntax::{ast, AstNode, AstPtr, SyntaxNode, SyntaxNodePtr, TextRange, TreeArc};
|
||||
use ra_syntax::{ast, AstNode, AstPtr, SyntaxNode, SyntaxNodePtr, TextRange};
|
||||
use relative_path::RelativePathBuf;
|
||||
|
||||
use crate::{HirDatabase, HirFileId, Name};
|
||||
@ -33,9 +33,9 @@ pub trait AstDiagnostic {
|
||||
}
|
||||
|
||||
impl dyn Diagnostic {
|
||||
pub fn syntax_node(&self, db: &impl HirDatabase) -> TreeArc<SyntaxNode> {
|
||||
pub fn syntax_node(&self, db: &impl HirDatabase) -> SyntaxNode {
|
||||
let node = db.parse_or_expand(self.file()).unwrap();
|
||||
self.syntax_node_ptr().to_node(&*node).to_owned()
|
||||
self.syntax_node_ptr().to_node(&node)
|
||||
}
|
||||
|
||||
pub fn downcast_ref<D: Diagnostic>(&self) -> Option<&D> {
|
||||
@ -143,11 +143,11 @@ impl Diagnostic for MissingFields {
|
||||
}
|
||||
|
||||
impl AstDiagnostic for MissingFields {
|
||||
type AST = TreeArc<ast::NamedFieldList>;
|
||||
type AST = ast::NamedFieldList;
|
||||
|
||||
fn ast(&self, db: &impl HirDatabase) -> Self::AST {
|
||||
let root = db.parse_or_expand(self.file()).unwrap();
|
||||
let node = self.syntax_node_ptr().to_node(&*root);
|
||||
ast::NamedFieldList::cast(&node).unwrap().to_owned()
|
||||
let node = self.syntax_node_ptr().to_node(&root);
|
||||
ast::NamedFieldList::cast(node).unwrap()
|
||||
}
|
||||
}
|
||||
|
@ -550,7 +550,7 @@ where
|
||||
self.exprs.alloc(block)
|
||||
}
|
||||
|
||||
fn collect_expr(&mut self, expr: &ast::Expr) -> ExprId {
|
||||
fn collect_expr(&mut self, expr: ast::Expr) -> ExprId {
|
||||
let syntax_ptr = SyntaxNodePtr::new(expr.syntax());
|
||||
match expr.kind() {
|
||||
ast::ExprKind::IfExpr(e) => {
|
||||
@ -565,7 +565,8 @@ where
|
||||
.map(|b| match b {
|
||||
ast::ElseBranch::Block(it) => self.collect_block(it),
|
||||
ast::ElseBranch::IfExpr(elif) => {
|
||||
let expr: &ast::Expr = ast::Expr::cast(elif.syntax()).unwrap();
|
||||
let expr: ast::Expr =
|
||||
ast::Expr::cast(elif.syntax().clone()).unwrap();
|
||||
self.collect_expr(expr)
|
||||
}
|
||||
})
|
||||
@ -582,7 +583,7 @@ where
|
||||
let else_branch = e.else_branch().map(|b| match b {
|
||||
ast::ElseBranch::Block(it) => self.collect_block(it),
|
||||
ast::ElseBranch::IfExpr(elif) => {
|
||||
let expr: &ast::Expr = ast::Expr::cast(elif.syntax()).unwrap();
|
||||
let expr: ast::Expr = ast::Expr::cast(elif.syntax().clone()).unwrap();
|
||||
self.collect_expr(expr)
|
||||
}
|
||||
});
|
||||
@ -689,7 +690,7 @@ where
|
||||
let struct_lit = if let Some(nfl) = e.named_field_list() {
|
||||
let fields = nfl
|
||||
.fields()
|
||||
.inspect(|field| field_ptrs.push(AstPtr::new(*field)))
|
||||
.inspect(|field| field_ptrs.push(AstPtr::new(field)))
|
||||
.map(|field| StructLitField {
|
||||
name: field
|
||||
.name_ref()
|
||||
@ -699,7 +700,7 @@ where
|
||||
self.collect_expr(e)
|
||||
} else if let Some(nr) = field.name_ref() {
|
||||
// field shorthand
|
||||
let id = self.exprs.alloc(Expr::Path(Path::from_name_ref(nr)));
|
||||
let id = self.exprs.alloc(Expr::Path(Path::from_name_ref(&nr)));
|
||||
self.source_map
|
||||
.expr_map
|
||||
.insert(SyntaxNodePtr::new(nr.syntax()), id);
|
||||
@ -837,7 +838,7 @@ where
|
||||
let ast_id = self
|
||||
.db
|
||||
.ast_id_map(self.current_file_id)
|
||||
.ast_id(e)
|
||||
.ast_id(&e)
|
||||
.with_file_id(self.current_file_id);
|
||||
|
||||
if let Some(path) = e.path().and_then(Path::from_ast) {
|
||||
@ -845,11 +846,11 @@ where
|
||||
let call_id = MacroCallLoc { def: def.id, ast_id }.id(self.db);
|
||||
let file_id = call_id.as_file(MacroFileKind::Expr);
|
||||
if let Some(node) = self.db.parse_or_expand(file_id) {
|
||||
if let Some(expr) = ast::Expr::cast(&*node) {
|
||||
if let Some(expr) = ast::Expr::cast(node) {
|
||||
log::debug!("macro expansion {}", expr.syntax().debug_dump());
|
||||
let old_file_id =
|
||||
std::mem::replace(&mut self.current_file_id, file_id);
|
||||
let id = self.collect_expr(&expr);
|
||||
let id = self.collect_expr(expr);
|
||||
self.current_file_id = old_file_id;
|
||||
return id;
|
||||
}
|
||||
@ -863,7 +864,7 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
fn collect_expr_opt(&mut self, expr: Option<&ast::Expr>) -> ExprId {
|
||||
fn collect_expr_opt(&mut self, expr: Option<ast::Expr>) -> ExprId {
|
||||
if let Some(expr) = expr {
|
||||
self.collect_expr(expr)
|
||||
} else {
|
||||
@ -871,7 +872,7 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
fn collect_block(&mut self, block: &ast::Block) -> ExprId {
|
||||
fn collect_block(&mut self, block: ast::Block) -> ExprId {
|
||||
let statements = block
|
||||
.statements()
|
||||
.map(|s| match s.kind() {
|
||||
@ -890,7 +891,7 @@ where
|
||||
self.alloc_expr(Expr::Block { statements, tail }, SyntaxNodePtr::new(block.syntax()))
|
||||
}
|
||||
|
||||
fn collect_block_opt(&mut self, block: Option<&ast::Block>) -> ExprId {
|
||||
fn collect_block_opt(&mut self, block: Option<ast::Block>) -> ExprId {
|
||||
if let Some(block) = block {
|
||||
self.collect_block(block)
|
||||
} else {
|
||||
@ -898,7 +899,7 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
fn collect_pat(&mut self, pat: &ast::Pat) -> PatId {
|
||||
fn collect_pat(&mut self, pat: ast::Pat) -> PatId {
|
||||
let pattern = match pat.kind() {
|
||||
ast::PatKind::BindPat(bp) => {
|
||||
let name = bp.name().map(|nr| nr.as_name()).unwrap_or_else(Name::missing);
|
||||
@ -932,7 +933,8 @@ where
|
||||
let mut fields: Vec<_> = field_pat_list
|
||||
.bind_pats()
|
||||
.filter_map(|bind_pat| {
|
||||
let ast_pat = ast::Pat::cast(bind_pat.syntax()).expect("bind pat is a pat");
|
||||
let ast_pat =
|
||||
ast::Pat::cast(bind_pat.syntax().clone()).expect("bind pat is a pat");
|
||||
let pat = self.collect_pat(ast_pat);
|
||||
let name = bind_pat.name()?.as_name();
|
||||
Some(FieldPat { name, pat })
|
||||
@ -953,11 +955,11 @@ where
|
||||
ast::PatKind::LiteralPat(_) => Pat::Missing,
|
||||
ast::PatKind::SlicePat(_) | ast::PatKind::RangePat(_) => Pat::Missing,
|
||||
};
|
||||
let ptr = AstPtr::new(pat);
|
||||
let ptr = AstPtr::new(&pat);
|
||||
self.alloc_pat(pattern, Either::A(ptr))
|
||||
}
|
||||
|
||||
fn collect_pat_opt(&mut self, pat: Option<&ast::Pat>) -> PatId {
|
||||
fn collect_pat_opt(&mut self, pat: Option<ast::Pat>) -> PatId {
|
||||
if let Some(pat) = pat {
|
||||
self.collect_pat(pat)
|
||||
} else {
|
||||
@ -965,20 +967,20 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
fn collect_const_body(&mut self, node: &ast::ConstDef) {
|
||||
fn collect_const_body(&mut self, node: ast::ConstDef) {
|
||||
let body = self.collect_expr_opt(node.body());
|
||||
self.body_expr = Some(body);
|
||||
}
|
||||
|
||||
fn collect_static_body(&mut self, node: &ast::StaticDef) {
|
||||
fn collect_static_body(&mut self, node: ast::StaticDef) {
|
||||
let body = self.collect_expr_opt(node.body());
|
||||
self.body_expr = Some(body);
|
||||
}
|
||||
|
||||
fn collect_fn_body(&mut self, node: &ast::FnDef) {
|
||||
fn collect_fn_body(&mut self, node: ast::FnDef) {
|
||||
if let Some(param_list) = node.param_list() {
|
||||
if let Some(self_param) = param_list.self_param() {
|
||||
let ptr = AstPtr::new(self_param);
|
||||
let ptr = AstPtr::new(&self_param);
|
||||
let param_pat = self.alloc_pat(
|
||||
Pat::Bind {
|
||||
name: SELF_PARAM,
|
||||
@ -1027,17 +1029,17 @@ pub(crate) fn body_with_source_map_query(
|
||||
DefWithBody::Const(ref c) => {
|
||||
let src = c.source(db);
|
||||
collector = ExprCollector::new(def, src.file_id, def.resolver(db), db);
|
||||
collector.collect_const_body(&src.ast)
|
||||
collector.collect_const_body(src.ast)
|
||||
}
|
||||
DefWithBody::Function(ref f) => {
|
||||
let src = f.source(db);
|
||||
collector = ExprCollector::new(def, src.file_id, def.resolver(db), db);
|
||||
collector.collect_fn_body(&src.ast)
|
||||
collector.collect_fn_body(src.ast)
|
||||
}
|
||||
DefWithBody::Static(ref s) => {
|
||||
let src = s.source(db);
|
||||
collector = ExprCollector::new(def, src.file_id, def.resolver(db), db);
|
||||
collector.collect_static_body(&src.ast)
|
||||
collector.collect_static_body(src.ast)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -190,7 +190,7 @@ mod tests {
|
||||
|
||||
let (db, _source_root, file_id) = MockDatabase::with_single_file(&code);
|
||||
let file = db.parse(file_id).ok().unwrap();
|
||||
let marker: &ast::PathExpr = find_node_at_offset(file.syntax(), off).unwrap();
|
||||
let marker: ast::PathExpr = find_node_at_offset(file.syntax(), off).unwrap();
|
||||
let analyzer = SourceAnalyzer::new(&db, file_id, marker.syntax(), None);
|
||||
|
||||
let scopes = analyzer.scopes();
|
||||
@ -290,10 +290,10 @@ mod tests {
|
||||
let file = db.parse(file_id).ok().unwrap();
|
||||
let expected_name = find_node_at_offset::<ast::Name>(file.syntax(), expected_offset.into())
|
||||
.expect("failed to find a name at the target offset");
|
||||
let name_ref: &ast::NameRef = find_node_at_offset(file.syntax(), off).unwrap();
|
||||
let name_ref: ast::NameRef = find_node_at_offset(file.syntax(), off).unwrap();
|
||||
let analyzer = SourceAnalyzer::new(&db, file_id, name_ref.syntax(), None);
|
||||
|
||||
let local_name_entry = analyzer.resolve_local_name(name_ref).unwrap();
|
||||
let local_name_entry = analyzer.resolve_local_name(&name_ref).unwrap();
|
||||
let local_name =
|
||||
local_name_entry.ptr().either(|it| it.syntax_node_ptr(), |it| it.syntax_node_ptr());
|
||||
assert_eq!(local_name.range(), expected_name.syntax().range());
|
||||
|
@ -79,7 +79,7 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
|
||||
.and_then(StructLit::cast)
|
||||
.and_then(|lit| lit.named_field_list())
|
||||
{
|
||||
let field_list_ptr = AstPtr::new(field_list_node);
|
||||
let field_list_ptr = AstPtr::new(&field_list_node);
|
||||
self.sink.push(MissingFields {
|
||||
file: file_id,
|
||||
field_list: field_list_ptr,
|
||||
|
@ -76,17 +76,17 @@ impl GenericParams {
|
||||
generics.parent_params = parent.map(|p| db.generic_params(p));
|
||||
let start = generics.parent_params.as_ref().map(|p| p.params.len()).unwrap_or(0) as u32;
|
||||
match def {
|
||||
GenericDef::Function(it) => generics.fill(&*it.source(db).ast, start),
|
||||
GenericDef::Struct(it) => generics.fill(&*it.source(db).ast, start),
|
||||
GenericDef::Union(it) => generics.fill(&*it.source(db).ast, start),
|
||||
GenericDef::Enum(it) => generics.fill(&*it.source(db).ast, start),
|
||||
GenericDef::Function(it) => generics.fill(&it.source(db).ast, start),
|
||||
GenericDef::Struct(it) => generics.fill(&it.source(db).ast, start),
|
||||
GenericDef::Union(it) => generics.fill(&it.source(db).ast, start),
|
||||
GenericDef::Enum(it) => generics.fill(&it.source(db).ast, start),
|
||||
GenericDef::Trait(it) => {
|
||||
// traits get the Self type as an implicit first type parameter
|
||||
generics.params.push(GenericParam { idx: start, name: SELF_TYPE, default: None });
|
||||
generics.fill(&*it.source(db).ast, start + 1);
|
||||
generics.fill(&it.source(db).ast, start + 1);
|
||||
}
|
||||
GenericDef::TypeAlias(it) => generics.fill(&*it.source(db).ast, start),
|
||||
GenericDef::ImplBlock(it) => generics.fill(&*it.source(db).ast, start),
|
||||
GenericDef::TypeAlias(it) => generics.fill(&it.source(db).ast, start),
|
||||
GenericDef::ImplBlock(it) => generics.fill(&it.source(db).ast, start),
|
||||
GenericDef::EnumVariant(_) => {}
|
||||
}
|
||||
|
||||
@ -102,9 +102,9 @@ impl GenericParams {
|
||||
}
|
||||
}
|
||||
|
||||
fn fill_params(&mut self, params: &ast::TypeParamList, start: u32) {
|
||||
fn fill_params(&mut self, params: ast::TypeParamList, start: u32) {
|
||||
for (idx, type_param) in params.type_params().enumerate() {
|
||||
let name = type_param.name().map(AsName::as_name).unwrap_or_else(Name::missing);
|
||||
let name = type_param.name().map_or_else(Name::missing, |it| it.as_name());
|
||||
let default = type_param.default_type().and_then(|t| t.path()).and_then(Path::from_ast);
|
||||
|
||||
let param = GenericParam { idx: idx as u32 + start, name: name.clone(), default };
|
||||
@ -121,7 +121,7 @@ impl GenericParams {
|
||||
}
|
||||
}
|
||||
|
||||
fn fill_where_predicates(&mut self, where_clause: &ast::WhereClause) {
|
||||
fn fill_where_predicates(&mut self, where_clause: ast::WhereClause) {
|
||||
for pred in where_clause.predicates() {
|
||||
let type_ref = match pred.type_ref() {
|
||||
Some(type_ref) => type_ref,
|
||||
@ -134,7 +134,7 @@ impl GenericParams {
|
||||
}
|
||||
}
|
||||
|
||||
fn add_where_predicate_from_bound(&mut self, bound: &ast::TypeBound, type_ref: TypeRef) {
|
||||
fn add_where_predicate_from_bound(&mut self, bound: ast::TypeBound, type_ref: TypeRef) {
|
||||
let path = bound
|
||||
.type_ref()
|
||||
.and_then(|tr| match tr.kind() {
|
||||
|
@ -6,7 +6,7 @@ use std::{
|
||||
use mbe::MacroRules;
|
||||
use ra_db::{salsa, FileId};
|
||||
use ra_prof::profile;
|
||||
use ra_syntax::{ast, AstNode, Parse, SyntaxNode, TreeArc};
|
||||
use ra_syntax::{ast, AstNode, Parse, SyntaxNode};
|
||||
|
||||
use crate::{AstDatabase, AstId, DefDatabase, FileAstId, InternDatabase, Module, Source};
|
||||
|
||||
@ -58,11 +58,11 @@ impl HirFileId {
|
||||
pub(crate) fn parse_or_expand_query(
|
||||
db: &impl AstDatabase,
|
||||
file_id: HirFileId,
|
||||
) -> Option<TreeArc<SyntaxNode>> {
|
||||
) -> Option<SyntaxNode> {
|
||||
match file_id.0 {
|
||||
HirFileIdRepr::File(file_id) => Some(db.parse(file_id).tree().syntax().to_owned()),
|
||||
HirFileIdRepr::File(file_id) => Some(db.parse(file_id).tree().syntax().clone()),
|
||||
HirFileIdRepr::Macro(macro_file) => {
|
||||
db.parse_macro(macro_file).map(|it| it.tree().to_owned())
|
||||
db.parse_macro(macro_file).map(|it| it.syntax_node())
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -123,7 +123,7 @@ pub struct MacroDefId(pub(crate) AstId<ast::MacroCall>);
|
||||
pub(crate) fn macro_def_query(db: &impl AstDatabase, id: MacroDefId) -> Option<Arc<MacroRules>> {
|
||||
let macro_call = id.0.to_node(db);
|
||||
let arg = macro_call.token_tree()?;
|
||||
let (tt, _) = mbe::ast_to_token_tree(arg).or_else(|| {
|
||||
let (tt, _) = mbe::ast_to_token_tree(&arg).or_else(|| {
|
||||
log::warn!("fail on macro_def to token tree: {:#?}", arg);
|
||||
None
|
||||
})?;
|
||||
@ -138,7 +138,7 @@ pub(crate) fn macro_arg_query(db: &impl AstDatabase, id: MacroCallId) -> Option<
|
||||
let loc = id.loc(db);
|
||||
let macro_call = loc.ast_id.to_node(db);
|
||||
let arg = macro_call.token_tree()?;
|
||||
let (tt, _) = mbe::ast_to_token_tree(arg)?;
|
||||
let (tt, _) = mbe::ast_to_token_tree(&arg)?;
|
||||
Some(Arc::new(tt))
|
||||
}
|
||||
|
||||
@ -262,7 +262,7 @@ pub(crate) trait AstItemDef<N: AstNode>: salsa::InternKey + Clone {
|
||||
let loc = ItemLoc { module: ctx.module, ast_id: ast_id.with_file_id(ctx.file_id) };
|
||||
Self::intern(ctx.db, loc)
|
||||
}
|
||||
fn source(self, db: &(impl AstDatabase + DefDatabase)) -> Source<TreeArc<N>> {
|
||||
fn source(self, db: &(impl AstDatabase + DefDatabase)) -> Source<N> {
|
||||
let loc = self.lookup_intern(db);
|
||||
let ast = loc.ast_id.to_node(db);
|
||||
Source { file_id: loc.ast_id.file_id(), ast }
|
||||
|
@ -4,7 +4,7 @@ use std::sync::Arc;
|
||||
use ra_arena::{impl_arena_id, map::ArenaMap, Arena, RawId};
|
||||
use ra_syntax::{
|
||||
ast::{self, AstNode},
|
||||
AstPtr, SourceFile, TreeArc,
|
||||
AstPtr, SourceFile,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
@ -28,9 +28,9 @@ impl ImplSourceMap {
|
||||
self.map.insert(impl_id, AstPtr::new(impl_block))
|
||||
}
|
||||
|
||||
pub fn get(&self, source: &ModuleSource, impl_id: ImplId) -> TreeArc<ast::ImplBlock> {
|
||||
pub fn get(&self, source: &ModuleSource, impl_id: ImplId) -> ast::ImplBlock {
|
||||
let file = match source {
|
||||
ModuleSource::SourceFile(file) => &*file,
|
||||
ModuleSource::SourceFile(file) => file.clone(),
|
||||
ModuleSource::Module(m) => m.syntax().ancestors().find_map(SourceFile::cast).unwrap(),
|
||||
};
|
||||
|
||||
@ -45,8 +45,8 @@ pub struct ImplBlock {
|
||||
}
|
||||
|
||||
impl HasSource for ImplBlock {
|
||||
type Ast = TreeArc<ast::ImplBlock>;
|
||||
fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<TreeArc<ast::ImplBlock>> {
|
||||
type Ast = ast::ImplBlock;
|
||||
fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::ImplBlock> {
|
||||
let source_map = db.impls_in_module_with_source_map(self.module).1;
|
||||
let src = self.module.definition_source(db);
|
||||
Source { file_id: src.file_id, ast: source_map.get(&src.ast, self.impl_id) }
|
||||
@ -132,9 +132,9 @@ impl ImplData {
|
||||
item_list
|
||||
.impl_items()
|
||||
.map(|item_node| match item_node.kind() {
|
||||
ast::ImplItemKind::FnDef(it) => Function { id: ctx.to_def(it) }.into(),
|
||||
ast::ImplItemKind::ConstDef(it) => Const { id: ctx.to_def(it) }.into(),
|
||||
ast::ImplItemKind::TypeAliasDef(it) => TypeAlias { id: ctx.to_def(it) }.into(),
|
||||
ast::ImplItemKind::FnDef(it) => Function { id: ctx.to_def(&it) }.into(),
|
||||
ast::ImplItemKind::ConstDef(it) => Const { id: ctx.to_def(&it) }.into(),
|
||||
ast::ImplItemKind::TypeAliasDef(it) => TypeAlias { id: ctx.to_def(&it) }.into(),
|
||||
})
|
||||
.collect()
|
||||
} else {
|
||||
@ -202,20 +202,20 @@ impl ModuleImplBlocks {
|
||||
|
||||
let src = m.module.definition_source(db);
|
||||
let node = match &src.ast {
|
||||
ModuleSource::SourceFile(node) => node.syntax(),
|
||||
ModuleSource::SourceFile(node) => node.syntax().clone(),
|
||||
ModuleSource::Module(node) => {
|
||||
node.item_list().expect("inline module should have item list").syntax()
|
||||
node.item_list().expect("inline module should have item list").syntax().clone()
|
||||
}
|
||||
};
|
||||
|
||||
for impl_block_ast in node.children().filter_map(ast::ImplBlock::cast) {
|
||||
let impl_block = ImplData::from_ast(db, src.file_id, m.module, impl_block_ast);
|
||||
let impl_block = ImplData::from_ast(db, src.file_id, m.module, &impl_block_ast);
|
||||
let id = m.impls.alloc(impl_block);
|
||||
for &impl_item in &m.impls[id].items {
|
||||
m.impls_by_def.insert(impl_item, id);
|
||||
}
|
||||
|
||||
source_map.insert(id, impl_block_ast);
|
||||
source_map.insert(id, &impl_block_ast);
|
||||
}
|
||||
|
||||
m
|
||||
|
@ -1,7 +1,7 @@
|
||||
use rustc_hash::FxHashMap;
|
||||
use std::sync::Arc;
|
||||
|
||||
use ra_syntax::{ast::AttrsOwner, SmolStr, TreeArc};
|
||||
use ra_syntax::{ast::AttrsOwner, SmolStr};
|
||||
|
||||
use crate::{
|
||||
AstDatabase, Crate, DefDatabase, Enum, Function, HasSource, HirDatabase, ImplBlock, Module,
|
||||
@ -95,7 +95,7 @@ impl LangItems {
|
||||
// Look for impl targets
|
||||
for impl_block in module.impl_blocks(db) {
|
||||
let src = impl_block.source(db);
|
||||
if let Some(lang_item_name) = lang_item_name(&*src.ast) {
|
||||
if let Some(lang_item_name) = lang_item_name(&src.ast) {
|
||||
self.items
|
||||
.entry(lang_item_name)
|
||||
.or_insert_with(|| LangItemTarget::ImplBlock(impl_block));
|
||||
@ -137,11 +137,11 @@ impl LangItems {
|
||||
item: T,
|
||||
constructor: fn(T) -> LangItemTarget,
|
||||
) where
|
||||
T: Copy + HasSource<Ast = TreeArc<N>>,
|
||||
T: Copy + HasSource<Ast = N>,
|
||||
N: AttrsOwner,
|
||||
{
|
||||
let node = item.source(db).ast;
|
||||
if let Some(lang_item_name) = lang_item_name(&*node) {
|
||||
if let Some(lang_item_name) = lang_item_name(&node) {
|
||||
self.items.entry(lang_item_name).or_insert_with(|| constructor(item));
|
||||
}
|
||||
}
|
||||
|
@ -75,7 +75,7 @@ impl AsName for ast::Name {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> AsName for ast::FieldKind<'a> {
|
||||
impl AsName for ast::FieldKind {
|
||||
fn as_name(&self) -> Name {
|
||||
match self {
|
||||
ast::FieldKind::Name(nr) => nr.as_name(),
|
||||
|
@ -3,7 +3,7 @@ use std::{ops::Index, sync::Arc};
|
||||
use ra_arena::{impl_arena_id, map::ArenaMap, Arena, RawId};
|
||||
use ra_syntax::{
|
||||
ast::{self, AttrsOwner, NameOwner},
|
||||
AstNode, AstPtr, SmolStr, SourceFile, TreeArc,
|
||||
AstNode, AstPtr, SmolStr, SourceFile,
|
||||
};
|
||||
use test_utils::tested_by;
|
||||
|
||||
@ -32,7 +32,7 @@ pub struct ImportSourceMap {
|
||||
}
|
||||
|
||||
type ImportSourcePtr = Either<AstPtr<ast::UseTree>, AstPtr<ast::ExternCrateItem>>;
|
||||
type ImportSource = Either<TreeArc<ast::UseTree>, TreeArc<ast::ExternCrateItem>>;
|
||||
type ImportSource = Either<ast::UseTree, ast::ExternCrateItem>;
|
||||
|
||||
impl ImportSourcePtr {
|
||||
fn to_node(self, file: &SourceFile) -> ImportSource {
|
||||
@ -50,11 +50,11 @@ impl ImportSourceMap {
|
||||
|
||||
pub(crate) fn get(&self, source: &ModuleSource, import: ImportId) -> ImportSource {
|
||||
let file = match source {
|
||||
ModuleSource::SourceFile(file) => &*file,
|
||||
ModuleSource::SourceFile(file) => file.clone(),
|
||||
ModuleSource::Module(m) => m.syntax().ancestors().find_map(SourceFile::cast).unwrap(),
|
||||
};
|
||||
|
||||
self.map[import].to_node(file)
|
||||
self.map[import].to_node(&file)
|
||||
}
|
||||
}
|
||||
|
||||
@ -76,8 +76,8 @@ impl RawItems {
|
||||
source_map: ImportSourceMap::default(),
|
||||
};
|
||||
if let Some(node) = db.parse_or_expand(file_id) {
|
||||
if let Some(source_file) = ast::SourceFile::cast(&node) {
|
||||
collector.process_module(None, &*source_file);
|
||||
if let Some(source_file) = ast::SourceFile::cast(node) {
|
||||
collector.process_module(None, source_file);
|
||||
}
|
||||
}
|
||||
(Arc::new(collector.raw_items), Arc::new(collector.source_map))
|
||||
@ -188,7 +188,7 @@ struct RawItemsCollector {
|
||||
}
|
||||
|
||||
impl RawItemsCollector {
|
||||
fn process_module(&mut self, current_module: Option<Module>, body: &impl ast::ModuleItemOwner) {
|
||||
fn process_module(&mut self, current_module: Option<Module>, body: impl ast::ModuleItemOwner) {
|
||||
for item_or_macro in body.items_with_macros() {
|
||||
match item_or_macro {
|
||||
ast::ItemOrMacro::Macro(m) => self.add_macro(current_module, m),
|
||||
@ -197,7 +197,7 @@ impl RawItemsCollector {
|
||||
}
|
||||
}
|
||||
|
||||
fn add_item(&mut self, current_module: Option<Module>, item: &ast::ModuleItem) {
|
||||
fn add_item(&mut self, current_module: Option<Module>, item: ast::ModuleItem) {
|
||||
let (kind, name) = match item.kind() {
|
||||
ast::ModuleItemKind::Module(module) => {
|
||||
self.add_module(current_module, module);
|
||||
@ -216,7 +216,7 @@ impl RawItemsCollector {
|
||||
return;
|
||||
}
|
||||
ast::ModuleItemKind::StructDef(it) => {
|
||||
let id = self.source_ast_id_map.ast_id(it);
|
||||
let id = self.source_ast_id_map.ast_id(&it);
|
||||
let name = it.name();
|
||||
if it.is_union() {
|
||||
(DefKind::Union(id), name)
|
||||
@ -225,22 +225,22 @@ impl RawItemsCollector {
|
||||
}
|
||||
}
|
||||
ast::ModuleItemKind::EnumDef(it) => {
|
||||
(DefKind::Enum(self.source_ast_id_map.ast_id(it)), it.name())
|
||||
(DefKind::Enum(self.source_ast_id_map.ast_id(&it)), it.name())
|
||||
}
|
||||
ast::ModuleItemKind::FnDef(it) => {
|
||||
(DefKind::Function(self.source_ast_id_map.ast_id(it)), it.name())
|
||||
(DefKind::Function(self.source_ast_id_map.ast_id(&it)), it.name())
|
||||
}
|
||||
ast::ModuleItemKind::TraitDef(it) => {
|
||||
(DefKind::Trait(self.source_ast_id_map.ast_id(it)), it.name())
|
||||
(DefKind::Trait(self.source_ast_id_map.ast_id(&it)), it.name())
|
||||
}
|
||||
ast::ModuleItemKind::TypeAliasDef(it) => {
|
||||
(DefKind::TypeAlias(self.source_ast_id_map.ast_id(it)), it.name())
|
||||
(DefKind::TypeAlias(self.source_ast_id_map.ast_id(&it)), it.name())
|
||||
}
|
||||
ast::ModuleItemKind::ConstDef(it) => {
|
||||
(DefKind::Const(self.source_ast_id_map.ast_id(it)), it.name())
|
||||
(DefKind::Const(self.source_ast_id_map.ast_id(&it)), it.name())
|
||||
}
|
||||
ast::ModuleItemKind::StaticDef(it) => {
|
||||
(DefKind::Static(self.source_ast_id_map.ast_id(it)), it.name())
|
||||
(DefKind::Static(self.source_ast_id_map.ast_id(&it)), it.name())
|
||||
}
|
||||
};
|
||||
if let Some(name) = name {
|
||||
@ -250,14 +250,14 @@ impl RawItemsCollector {
|
||||
}
|
||||
}
|
||||
|
||||
fn add_module(&mut self, current_module: Option<Module>, module: &ast::Module) {
|
||||
fn add_module(&mut self, current_module: Option<Module>, module: ast::Module) {
|
||||
let name = match module.name() {
|
||||
Some(it) => it.as_name(),
|
||||
None => return,
|
||||
};
|
||||
|
||||
let attr_path = extract_mod_path_attribute(module);
|
||||
let ast_id = self.source_ast_id_map.ast_id(module);
|
||||
let attr_path = extract_mod_path_attribute(&module);
|
||||
let ast_id = self.source_ast_id_map.ast_id(&module);
|
||||
if module.has_semi() {
|
||||
let item =
|
||||
self.raw_items.modules.alloc(ModuleData::Declaration { name, ast_id, attr_path });
|
||||
@ -278,10 +278,10 @@ impl RawItemsCollector {
|
||||
tested_by!(name_res_works_for_broken_modules);
|
||||
}
|
||||
|
||||
fn add_use_item(&mut self, current_module: Option<Module>, use_item: &ast::UseItem) {
|
||||
fn add_use_item(&mut self, current_module: Option<Module>, use_item: ast::UseItem) {
|
||||
let is_prelude = use_item.has_atom_attr("prelude_import");
|
||||
|
||||
Path::expand_use_item(use_item, |path, use_tree, is_glob, alias| {
|
||||
Path::expand_use_item(&use_item, |path, use_tree, is_glob, alias| {
|
||||
let import_data =
|
||||
ImportData { path, alias, is_glob, is_prelude, is_extern_crate: false };
|
||||
self.push_import(current_module, import_data, Either::A(AstPtr::new(use_tree)));
|
||||
@ -291,11 +291,11 @@ impl RawItemsCollector {
|
||||
fn add_extern_crate_item(
|
||||
&mut self,
|
||||
current_module: Option<Module>,
|
||||
extern_crate: &ast::ExternCrateItem,
|
||||
extern_crate: ast::ExternCrateItem,
|
||||
) {
|
||||
if let Some(name_ref) = extern_crate.name_ref() {
|
||||
let path = Path::from_name_ref(name_ref);
|
||||
let alias = extern_crate.alias().and_then(|a| a.name()).map(AsName::as_name);
|
||||
let path = Path::from_name_ref(&name_ref);
|
||||
let alias = extern_crate.alias().and_then(|a| a.name()).map(|it| it.as_name());
|
||||
let import_data = ImportData {
|
||||
path,
|
||||
alias,
|
||||
@ -303,18 +303,18 @@ impl RawItemsCollector {
|
||||
is_prelude: false,
|
||||
is_extern_crate: true,
|
||||
};
|
||||
self.push_import(current_module, import_data, Either::B(AstPtr::new(extern_crate)));
|
||||
self.push_import(current_module, import_data, Either::B(AstPtr::new(&extern_crate)));
|
||||
}
|
||||
}
|
||||
|
||||
fn add_macro(&mut self, current_module: Option<Module>, m: &ast::MacroCall) {
|
||||
fn add_macro(&mut self, current_module: Option<Module>, m: ast::MacroCall) {
|
||||
let path = match m.path().and_then(Path::from_ast) {
|
||||
Some(it) => it,
|
||||
_ => return,
|
||||
};
|
||||
|
||||
let name = m.name().map(|it| it.as_name());
|
||||
let ast_id = self.source_ast_id_map.ast_id(m);
|
||||
let ast_id = self.source_ast_id_map.ast_id(&m);
|
||||
let export = m.has_atom_attr("macro_export");
|
||||
let m = self.raw_items.macros.alloc(MacroData { ast_id, path, name, export });
|
||||
self.push_item(current_module, RawItem::Macro(m));
|
||||
|
@ -47,9 +47,9 @@ pub enum PathKind {
|
||||
|
||||
impl Path {
|
||||
/// Calls `cb` with all paths, represented by this use item.
|
||||
pub fn expand_use_item<'a>(
|
||||
item: &'a ast::UseItem,
|
||||
mut cb: impl FnMut(Path, &'a ast::UseTree, bool, Option<Name>),
|
||||
pub fn expand_use_item(
|
||||
item: &ast::UseItem,
|
||||
mut cb: impl FnMut(Path, &ast::UseTree, bool, Option<Name>),
|
||||
) {
|
||||
if let Some(tree) = item.use_tree() {
|
||||
expand_use_tree(None, tree, &mut cb);
|
||||
@ -57,7 +57,7 @@ impl Path {
|
||||
}
|
||||
|
||||
/// Converts an `ast::Path` to `Path`. Works with use trees.
|
||||
pub fn from_ast(mut path: &ast::Path) -> Option<Path> {
|
||||
pub fn from_ast(mut path: ast::Path) -> Option<Path> {
|
||||
let mut kind = PathKind::Plain;
|
||||
let mut segments = Vec::new();
|
||||
loop {
|
||||
@ -87,7 +87,7 @@ impl Path {
|
||||
break;
|
||||
}
|
||||
}
|
||||
path = match qualifier(path) {
|
||||
path = match qualifier(&path) {
|
||||
Some(it) => it,
|
||||
None => break,
|
||||
};
|
||||
@ -95,7 +95,7 @@ impl Path {
|
||||
segments.reverse();
|
||||
return Some(Path { kind, segments });
|
||||
|
||||
fn qualifier(path: &ast::Path) -> Option<&ast::Path> {
|
||||
fn qualifier(path: &ast::Path) -> Option<ast::Path> {
|
||||
if let Some(q) = path.qualifier() {
|
||||
return Some(q);
|
||||
}
|
||||
@ -136,7 +136,7 @@ impl Path {
|
||||
}
|
||||
|
||||
impl GenericArgs {
|
||||
pub(crate) fn from_ast(node: &ast::TypeArgList) -> Option<GenericArgs> {
|
||||
pub(crate) fn from_ast(node: ast::TypeArgList) -> Option<GenericArgs> {
|
||||
let mut args = Vec::new();
|
||||
for type_arg in node.type_args() {
|
||||
let type_ref = TypeRef::from_ast_opt(type_arg.type_ref());
|
||||
@ -160,10 +160,10 @@ impl From<Name> for Path {
|
||||
}
|
||||
}
|
||||
|
||||
fn expand_use_tree<'a>(
|
||||
fn expand_use_tree(
|
||||
prefix: Option<Path>,
|
||||
tree: &'a ast::UseTree,
|
||||
cb: &mut impl FnMut(Path, &'a ast::UseTree, bool, Option<Name>),
|
||||
tree: ast::UseTree,
|
||||
cb: &mut impl FnMut(Path, &ast::UseTree, bool, Option<Name>),
|
||||
) {
|
||||
if let Some(use_tree_list) = tree.use_tree_list() {
|
||||
let prefix = match tree.path() {
|
||||
@ -188,7 +188,7 @@ fn expand_use_tree<'a>(
|
||||
if let Some(segment) = ast_path.segment() {
|
||||
if segment.kind() == Some(ast::PathSegmentKind::SelfKw) {
|
||||
if let Some(prefix) = prefix {
|
||||
cb(prefix, tree, false, alias);
|
||||
cb(prefix, &tree, false, alias);
|
||||
return;
|
||||
}
|
||||
}
|
||||
@ -196,7 +196,7 @@ fn expand_use_tree<'a>(
|
||||
}
|
||||
if let Some(path) = convert_path(prefix, ast_path) {
|
||||
let is_glob = tree.has_star();
|
||||
cb(path, tree, is_glob, alias)
|
||||
cb(path, &tree, is_glob, alias)
|
||||
}
|
||||
// FIXME: report errors somewhere
|
||||
// We get here if we do
|
||||
@ -204,7 +204,7 @@ fn expand_use_tree<'a>(
|
||||
}
|
||||
}
|
||||
|
||||
fn convert_path(prefix: Option<Path>, path: &ast::Path) -> Option<Path> {
|
||||
fn convert_path(prefix: Option<Path>, path: ast::Path) -> Option<Path> {
|
||||
let prefix =
|
||||
if let Some(qual) = path.qualifier() { Some(convert_path(prefix, qual)?) } else { prefix };
|
||||
let segment = path.segment()?;
|
||||
|
@ -37,7 +37,7 @@ pub fn module_from_file_id(db: &impl HirDatabase, file_id: FileId) -> Option<Mod
|
||||
pub fn module_from_declaration(
|
||||
db: &impl HirDatabase,
|
||||
file_id: FileId,
|
||||
decl: &ast::Module,
|
||||
decl: ast::Module,
|
||||
) -> Option<Module> {
|
||||
let parent_module = module_from_file_id(db, file_id);
|
||||
let child_name = decl.name();
|
||||
@ -50,8 +50,8 @@ pub fn module_from_declaration(
|
||||
/// Locates the module by position in the source code.
|
||||
pub fn module_from_position(db: &impl HirDatabase, position: FilePosition) -> Option<Module> {
|
||||
let parse = db.parse(position.file_id);
|
||||
match find_node_at_offset::<ast::Module>(parse.tree().syntax(), position.offset) {
|
||||
Some(m) if !m.has_semi() => module_from_inline(db, position.file_id, m),
|
||||
match &find_node_at_offset::<ast::Module>(parse.tree().syntax(), position.offset) {
|
||||
Some(m) if !m.has_semi() => module_from_inline(db, position.file_id, m.clone()),
|
||||
_ => module_from_file_id(db, position.file_id),
|
||||
}
|
||||
}
|
||||
@ -59,12 +59,12 @@ pub fn module_from_position(db: &impl HirDatabase, position: FilePosition) -> Op
|
||||
fn module_from_inline(
|
||||
db: &impl HirDatabase,
|
||||
file_id: FileId,
|
||||
module: &ast::Module,
|
||||
module: ast::Module,
|
||||
) -> Option<Module> {
|
||||
assert!(!module.has_semi());
|
||||
let file_id = file_id.into();
|
||||
let ast_id_map = db.ast_id_map(file_id);
|
||||
let item_id = ast_id_map.ast_id(module).with_file_id(file_id);
|
||||
let item_id = ast_id_map.ast_id(&module).with_file_id(file_id);
|
||||
module_from_source(db, file_id, Some(item_id))
|
||||
}
|
||||
|
||||
@ -127,16 +127,16 @@ fn try_get_resolver_for_node(
|
||||
file_id: FileId,
|
||||
node: &SyntaxNode,
|
||||
) -> Option<Resolver> {
|
||||
if let Some(module) = ast::Module::cast(node) {
|
||||
if let Some(module) = ast::Module::cast(node.clone()) {
|
||||
Some(module_from_declaration(db, file_id, module)?.resolver(db))
|
||||
} else if let Some(_) = ast::SourceFile::cast(node) {
|
||||
} else if let Some(_) = ast::SourceFile::cast(node.clone()) {
|
||||
Some(module_from_source(db, file_id.into(), None)?.resolver(db))
|
||||
} else if let Some(s) = ast::StructDef::cast(node) {
|
||||
} else if let Some(s) = ast::StructDef::cast(node.clone()) {
|
||||
let module = module_from_child_node(db, file_id, s.syntax())?;
|
||||
Some(struct_from_module(db, module, s).resolver(db))
|
||||
} else if let Some(e) = ast::EnumDef::cast(node) {
|
||||
Some(struct_from_module(db, module, &s).resolver(db))
|
||||
} else if let Some(e) = ast::EnumDef::cast(node.clone()) {
|
||||
let module = module_from_child_node(db, file_id, e.syntax())?;
|
||||
Some(enum_from_module(db, module, e).resolver(db))
|
||||
Some(enum_from_module(db, module, &e).resolver(db))
|
||||
} else if node.kind() == FN_DEF || node.kind() == CONST_DEF || node.kind() == STATIC_DEF {
|
||||
Some(def_with_body_from_child_node(db, file_id, node)?.resolver(db))
|
||||
} else {
|
||||
@ -153,14 +153,14 @@ fn def_with_body_from_child_node(
|
||||
let module = module_from_child_node(db, file_id, node)?;
|
||||
let ctx = LocationCtx::new(db, module, file_id.into());
|
||||
node.ancestors().find_map(|node| {
|
||||
if let Some(def) = ast::FnDef::cast(node) {
|
||||
return Some(Function { id: ctx.to_def(def) }.into());
|
||||
if let Some(def) = ast::FnDef::cast(node.clone()) {
|
||||
return Some(Function { id: ctx.to_def(&def) }.into());
|
||||
}
|
||||
if let Some(def) = ast::ConstDef::cast(node) {
|
||||
return Some(Const { id: ctx.to_def(def) }.into());
|
||||
if let Some(def) = ast::ConstDef::cast(node.clone()) {
|
||||
return Some(Const { id: ctx.to_def(&def) }.into());
|
||||
}
|
||||
if let Some(def) = ast::StaticDef::cast(node) {
|
||||
return Some(Static { id: ctx.to_def(def) }.into());
|
||||
if let Some(def) = ast::StaticDef::cast(node.clone()) {
|
||||
return Some(Static { id: ctx.to_def(&def) }.into());
|
||||
}
|
||||
None
|
||||
})
|
||||
@ -237,7 +237,7 @@ impl SourceAnalyzer {
|
||||
SourceAnalyzer {
|
||||
resolver: node
|
||||
.ancestors()
|
||||
.find_map(|node| try_get_resolver_for_node(db, file_id, node))
|
||||
.find_map(|node| try_get_resolver_for_node(db, file_id, &node))
|
||||
.unwrap_or_default(),
|
||||
body_source_map: None,
|
||||
infer: None,
|
||||
@ -257,17 +257,17 @@ impl SourceAnalyzer {
|
||||
}
|
||||
|
||||
pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> {
|
||||
let expr_id = self.body_source_map.as_ref()?.node_expr(call.into())?;
|
||||
let expr_id = self.body_source_map.as_ref()?.node_expr(&call.clone().into())?;
|
||||
self.infer.as_ref()?.method_resolution(expr_id)
|
||||
}
|
||||
|
||||
pub fn resolve_field(&self, field: &ast::FieldExpr) -> Option<crate::StructField> {
|
||||
let expr_id = self.body_source_map.as_ref()?.node_expr(field.into())?;
|
||||
let expr_id = self.body_source_map.as_ref()?.node_expr(&field.clone().into())?;
|
||||
self.infer.as_ref()?.field_resolution(expr_id)
|
||||
}
|
||||
|
||||
pub fn resolve_variant(&self, struct_lit: &ast::StructLit) -> Option<crate::VariantDef> {
|
||||
let expr_id = self.body_source_map.as_ref()?.node_expr(struct_lit.into())?;
|
||||
let expr_id = self.body_source_map.as_ref()?.node_expr(&struct_lit.clone().into())?;
|
||||
self.infer.as_ref()?.variant_resolution(expr_id)
|
||||
}
|
||||
|
||||
@ -290,18 +290,18 @@ impl SourceAnalyzer {
|
||||
|
||||
pub fn resolve_path(&self, db: &impl HirDatabase, path: &ast::Path) -> Option<PathResolution> {
|
||||
if let Some(path_expr) = path.syntax().parent().and_then(ast::PathExpr::cast) {
|
||||
let expr_id = self.body_source_map.as_ref()?.node_expr(path_expr.into())?;
|
||||
let expr_id = self.body_source_map.as_ref()?.node_expr(&path_expr.into())?;
|
||||
if let Some(assoc) = self.infer.as_ref()?.assoc_resolutions_for_expr(expr_id) {
|
||||
return Some(PathResolution::AssocItem(assoc));
|
||||
}
|
||||
}
|
||||
if let Some(path_pat) = path.syntax().parent().and_then(ast::PathPat::cast) {
|
||||
let pat_id = self.body_source_map.as_ref()?.node_pat(path_pat.into())?;
|
||||
let pat_id = self.body_source_map.as_ref()?.node_pat(&path_pat.into())?;
|
||||
if let Some(assoc) = self.infer.as_ref()?.assoc_resolutions_for_pat(pat_id) {
|
||||
return Some(PathResolution::AssocItem(assoc));
|
||||
}
|
||||
}
|
||||
let hir_path = crate::Path::from_ast(path)?;
|
||||
let hir_path = crate::Path::from_ast(path.clone())?;
|
||||
let res = self.resolver.resolve_path_without_assoc_items(db, &hir_path);
|
||||
let res = res.clone().take_types().or_else(|| res.take_values())?;
|
||||
let res = match res {
|
||||
@ -343,12 +343,12 @@ impl SourceAnalyzer {
|
||||
// FIXME: at least, this should work with any DefWithBody, but ideally
|
||||
// this should be hir-based altogether
|
||||
let fn_def = pat.syntax().ancestors().find_map(ast::FnDef::cast).unwrap();
|
||||
let ptr = Either::A(AstPtr::new(pat.into()));
|
||||
let ptr = Either::A(AstPtr::new(&ast::Pat::from(pat.clone())));
|
||||
fn_def
|
||||
.syntax()
|
||||
.descendants()
|
||||
.filter_map(ast::NameRef::cast)
|
||||
.filter(|name_ref| match self.resolve_local_name(*name_ref) {
|
||||
.filter(|name_ref| match self.resolve_local_name(&name_ref) {
|
||||
None => false,
|
||||
Some(entry) => entry.ptr() == ptr,
|
||||
})
|
||||
@ -411,7 +411,7 @@ fn scope_for(
|
||||
node: &SyntaxNode,
|
||||
) -> Option<ScopeId> {
|
||||
node.ancestors()
|
||||
.map(SyntaxNodePtr::new)
|
||||
.map(|it| SyntaxNodePtr::new(&it))
|
||||
.filter_map(|ptr| source_map.syntax_expr(ptr))
|
||||
.find_map(|it| scopes.scope_for(it))
|
||||
}
|
||||
|
@ -5,7 +5,7 @@ use std::{
|
||||
};
|
||||
|
||||
use ra_arena::{impl_arena_id, Arena, RawId};
|
||||
use ra_syntax::{ast, AstNode, SyntaxNode, SyntaxNodePtr, TreeArc};
|
||||
use ra_syntax::{ast, AstNode, SyntaxNode, SyntaxNodePtr};
|
||||
|
||||
use crate::{AstDatabase, HirFileId};
|
||||
|
||||
@ -42,9 +42,9 @@ impl<N: AstNode> AstId<N> {
|
||||
self.file_id
|
||||
}
|
||||
|
||||
pub(crate) fn to_node(&self, db: &impl AstDatabase) -> TreeArc<N> {
|
||||
pub(crate) fn to_node(&self, db: &impl AstDatabase) -> N {
|
||||
let syntax_node = db.ast_id_to_node(self.file_id, self.file_ast_id.raw);
|
||||
N::cast(&syntax_node).unwrap().to_owned()
|
||||
N::cast(syntax_node).unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
@ -93,7 +93,7 @@ pub struct AstIdMap {
|
||||
impl AstIdMap {
|
||||
pub(crate) fn ast_id_map_query(db: &impl AstDatabase, file_id: HirFileId) -> Arc<AstIdMap> {
|
||||
let map = if let Some(node) = db.parse_or_expand(file_id) {
|
||||
AstIdMap::from_source(&*node)
|
||||
AstIdMap::from_source(&node)
|
||||
} else {
|
||||
AstIdMap::default()
|
||||
};
|
||||
@ -104,9 +104,9 @@ impl AstIdMap {
|
||||
db: &impl AstDatabase,
|
||||
file_id: HirFileId,
|
||||
ast_id: ErasedFileAstId,
|
||||
) -> TreeArc<SyntaxNode> {
|
||||
) -> SyntaxNode {
|
||||
let node = db.parse_or_expand(file_id).unwrap();
|
||||
db.ast_id_map(file_id).arena[ast_id].to_node(&*node).to_owned()
|
||||
db.ast_id_map(file_id).arena[ast_id].to_node(&node)
|
||||
}
|
||||
|
||||
pub(crate) fn ast_id<N: AstNode>(&self, item: &N) -> FileAstId<N> {
|
||||
@ -131,7 +131,7 @@ impl AstIdMap {
|
||||
// change parent's id. This means that, say, adding a new function to a
|
||||
// trait does not change ids of top-level items, which helps caching.
|
||||
bfs(node, |it| {
|
||||
if let Some(module_item) = ast::ModuleItem::cast(it) {
|
||||
if let Some(module_item) = ast::ModuleItem::cast(it.clone()) {
|
||||
res.alloc(module_item.syntax());
|
||||
} else if let Some(macro_call) = ast::MacroCall::cast(it) {
|
||||
res.alloc(macro_call.syntax());
|
||||
@ -146,8 +146,8 @@ impl AstIdMap {
|
||||
}
|
||||
|
||||
/// Walks the subtree in bfs order, calling `f` for each node.
|
||||
fn bfs(node: &SyntaxNode, mut f: impl FnMut(&SyntaxNode)) {
|
||||
let mut curr_layer = vec![node];
|
||||
fn bfs(node: &SyntaxNode, mut f: impl FnMut(SyntaxNode)) {
|
||||
let mut curr_layer = vec![node.clone()];
|
||||
let mut next_layer = vec![];
|
||||
while !curr_layer.is_empty() {
|
||||
curr_layer.drain(..).for_each(|node| {
|
||||
|
@ -31,9 +31,9 @@ impl TraitData {
|
||||
item_list
|
||||
.impl_items()
|
||||
.map(|item_node| match item_node.kind() {
|
||||
ast::ImplItemKind::FnDef(it) => Function { id: ctx.to_def(it) }.into(),
|
||||
ast::ImplItemKind::ConstDef(it) => Const { id: ctx.to_def(it) }.into(),
|
||||
ast::ImplItemKind::TypeAliasDef(it) => TypeAlias { id: ctx.to_def(it) }.into(),
|
||||
ast::ImplItemKind::FnDef(it) => Function { id: ctx.to_def(&it) }.into(),
|
||||
ast::ImplItemKind::ConstDef(it) => Const { id: ctx.to_def(&it) }.into(),
|
||||
ast::ImplItemKind::TypeAliasDef(it) => TypeAlias { id: ctx.to_def(&it) }.into(),
|
||||
})
|
||||
.collect()
|
||||
} else {
|
||||
|
@ -3086,7 +3086,7 @@ fn type_at_pos(db: &MockDatabase, pos: FilePosition) -> String {
|
||||
let file = db.parse(pos.file_id).ok().unwrap();
|
||||
let expr = algo::find_node_at_offset::<ast::Expr>(file.syntax(), pos.offset).unwrap();
|
||||
let analyzer = SourceAnalyzer::new(db, pos.file_id, expr.syntax(), Some(pos.offset));
|
||||
let ty = analyzer.type_of(db, expr).unwrap();
|
||||
let ty = analyzer.type_of(db, &expr).unwrap();
|
||||
ty.display(db).to_string()
|
||||
}
|
||||
|
||||
@ -3126,7 +3126,7 @@ fn infer(content: &str) -> String {
|
||||
types.sort_by_key(|(ptr, _)| (ptr.range().start(), ptr.range().end()));
|
||||
for (syntax_ptr, ty) in &types {
|
||||
let node = syntax_ptr.to_node(source_file.syntax());
|
||||
let (range, text) = if let Some(self_param) = ast::SelfParam::cast(node) {
|
||||
let (range, text) = if let Some(self_param) = ast::SelfParam::cast(node.clone()) {
|
||||
(self_param.self_kw_token().range(), "self".to_string())
|
||||
} else {
|
||||
(syntax_ptr.range(), node.text().to_string().replace("\n", " "))
|
||||
@ -3137,7 +3137,7 @@ fn infer(content: &str) -> String {
|
||||
|
||||
for node in source_file.syntax().descendants() {
|
||||
if node.kind() == FN_DEF || node.kind() == CONST_DEF || node.kind() == STATIC_DEF {
|
||||
let analyzer = SourceAnalyzer::new(&db, file_id, node, None);
|
||||
let analyzer = SourceAnalyzer::new(&db, file_id, &node, None);
|
||||
infer_def(analyzer.inference_result(), analyzer.body_source_map());
|
||||
}
|
||||
}
|
||||
@ -3179,7 +3179,7 @@ fn typing_whitespace_inside_a_function_should_not_invalidate_types() {
|
||||
let node =
|
||||
algo::find_token_at_offset(file.syntax(), pos.offset).right_biased().unwrap().parent();
|
||||
let events = db.log_executed(|| {
|
||||
SourceAnalyzer::new(&db, pos.file_id, node, None);
|
||||
SourceAnalyzer::new(&db, pos.file_id, &node, None);
|
||||
});
|
||||
assert!(format!("{:?}", events).contains("infer"))
|
||||
}
|
||||
@ -3200,7 +3200,7 @@ fn typing_whitespace_inside_a_function_should_not_invalidate_types() {
|
||||
let node =
|
||||
algo::find_token_at_offset(file.syntax(), pos.offset).right_biased().unwrap().parent();
|
||||
let events = db.log_executed(|| {
|
||||
SourceAnalyzer::new(&db, pos.file_id, node, None);
|
||||
SourceAnalyzer::new(&db, pos.file_id, &node, None);
|
||||
});
|
||||
assert!(!format!("{:?}", events).contains("infer"), "{:#?}", events)
|
||||
}
|
||||
|
@ -56,7 +56,7 @@ pub enum TypeRef {
|
||||
|
||||
impl TypeRef {
|
||||
/// Converts an `ast::TypeRef` to a `hir::TypeRef`.
|
||||
pub(crate) fn from_ast(node: &ast::TypeRef) -> Self {
|
||||
pub(crate) fn from_ast(node: ast::TypeRef) -> Self {
|
||||
use ra_syntax::ast::TypeRefKind::*;
|
||||
match node.kind() {
|
||||
ParenType(inner) => TypeRef::from_ast_opt(inner.type_ref()),
|
||||
@ -95,7 +95,7 @@ impl TypeRef {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn from_ast_opt(node: Option<&ast::TypeRef>) -> Self {
|
||||
pub(crate) fn from_ast_opt(node: Option<ast::TypeRef>) -> Self {
|
||||
if let Some(node) = node {
|
||||
TypeRef::from_ast(node)
|
||||
} else {
|
||||
|
@ -11,24 +11,24 @@ use crate::{db::RootDatabase, CallInfo, FilePosition, FunctionSignature};
|
||||
/// Computes parameter information for the given call expression.
|
||||
pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option<CallInfo> {
|
||||
let parse = db.parse(position.file_id);
|
||||
let syntax = parse.tree().syntax();
|
||||
let syntax = parse.tree().syntax().clone();
|
||||
|
||||
// Find the calling expression and it's NameRef
|
||||
let calling_node = FnCallNode::with_node(syntax, position.offset)?;
|
||||
let calling_node = FnCallNode::with_node(&syntax, position.offset)?;
|
||||
let name_ref = calling_node.name_ref()?;
|
||||
|
||||
let analyzer = hir::SourceAnalyzer::new(db, position.file_id, name_ref.syntax(), None);
|
||||
let function = match calling_node {
|
||||
let function = match &calling_node {
|
||||
FnCallNode::CallExpr(expr) => {
|
||||
//FIXME: apply subst
|
||||
let (callable_def, _subst) = analyzer.type_of(db, expr.expr()?)?.as_callable()?;
|
||||
let (callable_def, _subst) = analyzer.type_of(db, &expr.expr()?)?.as_callable()?;
|
||||
match callable_def {
|
||||
hir::CallableDef::Function(it) => it,
|
||||
//FIXME: handle other callables
|
||||
_ => return None,
|
||||
}
|
||||
}
|
||||
FnCallNode::MethodCallExpr(expr) => analyzer.resolve_method_call(expr)?,
|
||||
FnCallNode::MethodCallExpr(expr) => analyzer.resolve_method_call(&expr)?,
|
||||
};
|
||||
|
||||
let mut call_info = CallInfo::new(db, function);
|
||||
@ -73,13 +73,13 @@ pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option<Cal
|
||||
Some(call_info)
|
||||
}
|
||||
|
||||
enum FnCallNode<'a> {
|
||||
CallExpr(&'a ast::CallExpr),
|
||||
MethodCallExpr(&'a ast::MethodCallExpr),
|
||||
enum FnCallNode {
|
||||
CallExpr(ast::CallExpr),
|
||||
MethodCallExpr(ast::MethodCallExpr),
|
||||
}
|
||||
|
||||
impl<'a> FnCallNode<'a> {
|
||||
fn with_node(syntax: &'a SyntaxNode, offset: TextUnit) -> Option<FnCallNode<'a>> {
|
||||
impl FnCallNode {
|
||||
fn with_node(syntax: &SyntaxNode, offset: TextUnit) -> Option<FnCallNode> {
|
||||
if let Some(expr) = find_node_at_offset::<ast::CallExpr>(syntax, offset) {
|
||||
return Some(FnCallNode::CallExpr(expr));
|
||||
}
|
||||
@ -89,8 +89,8 @@ impl<'a> FnCallNode<'a> {
|
||||
None
|
||||
}
|
||||
|
||||
fn name_ref(&self) -> Option<&'a ast::NameRef> {
|
||||
match *self {
|
||||
fn name_ref(&self) -> Option<ast::NameRef> {
|
||||
match self {
|
||||
FnCallNode::CallExpr(call_expr) => Some(match call_expr.expr()?.kind() {
|
||||
ast::ExprKind::PathExpr(path_expr) => path_expr.path()?.segment()?.name_ref()?,
|
||||
_ => return None,
|
||||
@ -102,8 +102,8 @@ impl<'a> FnCallNode<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn arg_list(&self) -> Option<&'a ast::ArgList> {
|
||||
match *self {
|
||||
fn arg_list(&self) -> Option<ast::ArgList> {
|
||||
match self {
|
||||
FnCallNode::CallExpr(expr) => expr.arg_list(),
|
||||
FnCallNode::MethodCallExpr(expr) => expr.arg_list(),
|
||||
}
|
||||
|
@ -5,10 +5,11 @@ use rustc_hash::FxHashSet;
|
||||
|
||||
/// Complete dot accesses, i.e. fields or methods (currently only fields).
|
||||
pub(super) fn complete_dot(acc: &mut Completions, ctx: &CompletionContext) {
|
||||
let receiver_ty = match ctx.dot_receiver.and_then(|it| ctx.analyzer.type_of(ctx.db, it)) {
|
||||
Some(it) => it,
|
||||
None => return,
|
||||
};
|
||||
let receiver_ty =
|
||||
match ctx.dot_receiver.as_ref().and_then(|it| ctx.analyzer.type_of(ctx.db, it)) {
|
||||
Some(it) => it,
|
||||
None => return,
|
||||
};
|
||||
if !ctx.is_call {
|
||||
complete_fields(acc, ctx, receiver_ty.clone());
|
||||
}
|
||||
|
@ -20,7 +20,7 @@ pub(super) fn complete_fn_param(acc: &mut Completions, ctx: &CompletionContext)
|
||||
let _ = visitor_ctx(&mut params)
|
||||
.visit::<ast::SourceFile, _>(process)
|
||||
.visit::<ast::ItemList, _>(process)
|
||||
.accept(node);
|
||||
.accept(&node);
|
||||
}
|
||||
params
|
||||
.into_iter()
|
||||
@ -38,10 +38,7 @@ pub(super) fn complete_fn_param(acc: &mut Completions, ctx: &CompletionContext)
|
||||
.add_to(acc)
|
||||
});
|
||||
|
||||
fn process<'a, N: ast::FnDefOwner>(
|
||||
node: &'a N,
|
||||
params: &mut FxHashMap<String, (u32, &'a ast::Param)>,
|
||||
) {
|
||||
fn process<N: ast::FnDefOwner>(node: N, params: &mut FxHashMap<String, (u32, ast::Param)>) {
|
||||
node.functions().filter_map(|it| it.param_list()).flat_map(|it| it.params()).for_each(
|
||||
|param| {
|
||||
let text = param.syntax().text().to_string();
|
||||
|
@ -52,7 +52,7 @@ pub(super) fn complete_expr_keyword(acc: &mut Completions, ctx: &CompletionConte
|
||||
return;
|
||||
}
|
||||
|
||||
let fn_def = match ctx.function_syntax {
|
||||
let fn_def = match &ctx.function_syntax {
|
||||
Some(it) => it,
|
||||
None => return,
|
||||
};
|
||||
@ -65,7 +65,7 @@ pub(super) fn complete_expr_keyword(acc: &mut Completions, ctx: &CompletionConte
|
||||
acc.add(keyword(ctx, "else", "else {$0}"));
|
||||
acc.add(keyword(ctx, "else if", "else if $0 {}"));
|
||||
}
|
||||
if is_in_loop_body(ctx.token) {
|
||||
if is_in_loop_body(&ctx.token) {
|
||||
if ctx.can_be_stmt {
|
||||
acc.add(keyword(ctx, "continue", "continue;"));
|
||||
acc.add(keyword(ctx, "break", "break;"));
|
||||
@ -74,19 +74,19 @@ pub(super) fn complete_expr_keyword(acc: &mut Completions, ctx: &CompletionConte
|
||||
acc.add(keyword(ctx, "break", "break"));
|
||||
}
|
||||
}
|
||||
acc.add_all(complete_return(ctx, fn_def, ctx.can_be_stmt));
|
||||
acc.add_all(complete_return(ctx, &fn_def, ctx.can_be_stmt));
|
||||
}
|
||||
|
||||
fn is_in_loop_body(leaf: SyntaxToken) -> bool {
|
||||
fn is_in_loop_body(leaf: &SyntaxToken) -> bool {
|
||||
for node in leaf.parent().ancestors() {
|
||||
if node.kind() == FN_DEF || node.kind() == LAMBDA_EXPR {
|
||||
break;
|
||||
}
|
||||
let loop_body = visitor()
|
||||
.visit::<ast::ForExpr, _>(LoopBodyOwner::loop_body)
|
||||
.visit::<ast::WhileExpr, _>(LoopBodyOwner::loop_body)
|
||||
.visit::<ast::LoopExpr, _>(LoopBodyOwner::loop_body)
|
||||
.accept(node);
|
||||
.visit::<ast::ForExpr, _>(|it| it.loop_body())
|
||||
.visit::<ast::WhileExpr, _>(|it| it.loop_body())
|
||||
.visit::<ast::LoopExpr, _>(|it| it.loop_body())
|
||||
.accept(&node);
|
||||
if let Some(Some(body)) = loop_body {
|
||||
if leaf.range().is_subrange(&body.syntax().range()) {
|
||||
return true;
|
||||
|
@ -11,7 +11,8 @@ use ra_text_edit::TextEditBuilder;
|
||||
|
||||
fn postfix_snippet(ctx: &CompletionContext, label: &str, detail: &str, snippet: &str) -> Builder {
|
||||
let edit = {
|
||||
let receiver_range = ctx.dot_receiver.expect("no receiver available").syntax().range();
|
||||
let receiver_range =
|
||||
ctx.dot_receiver.as_ref().expect("no receiver available").syntax().range();
|
||||
let delete_range = TextRange::from_to(receiver_range.start(), ctx.source_range().end());
|
||||
let mut builder = TextEditBuilder::default();
|
||||
builder.replace(delete_range, snippet.to_string());
|
||||
@ -38,9 +39,9 @@ fn is_bool_or_unknown(ty: Option<Ty>) -> bool {
|
||||
}
|
||||
|
||||
pub(super) fn complete_postfix(acc: &mut Completions, ctx: &CompletionContext) {
|
||||
if let Some(dot_receiver) = ctx.dot_receiver {
|
||||
if let Some(dot_receiver) = &ctx.dot_receiver {
|
||||
let receiver_text = dot_receiver.syntax().text().to_string();
|
||||
let receiver_ty = ctx.analyzer.type_of(ctx.db, dot_receiver);
|
||||
let receiver_ty = ctx.analyzer.type_of(ctx.db, &dot_receiver);
|
||||
if is_bool_or_unknown(receiver_ty) {
|
||||
postfix_snippet(ctx, "if", "if expr {}", &format!("if {} {{$0}}", receiver_text))
|
||||
.add_to(acc);
|
||||
|
@ -20,8 +20,8 @@ pub(super) fn complete_scope(acc: &mut Completions, ctx: &CompletionContext) {
|
||||
let mut builder = TextEditBuilder::default();
|
||||
builder.replace(ctx.source_range(), name.to_string());
|
||||
auto_import::auto_import_text_edit(
|
||||
ctx.token.parent(),
|
||||
ctx.token.parent(),
|
||||
&ctx.token.parent(),
|
||||
&ctx.token.parent(),
|
||||
&path,
|
||||
&mut builder,
|
||||
);
|
||||
|
@ -4,8 +4,8 @@ use crate::completion::{CompletionContext, Completions};
|
||||
|
||||
/// Complete fields in fields literals.
|
||||
pub(super) fn complete_struct_literal(acc: &mut Completions, ctx: &CompletionContext) {
|
||||
let (ty, variant) = match ctx.struct_lit_syntax.and_then(|it| {
|
||||
Some((ctx.analyzer.type_of(ctx.db, it.into())?, ctx.analyzer.resolve_variant(it)?))
|
||||
let (ty, variant) = match ctx.struct_lit_syntax.as_ref().and_then(|it| {
|
||||
Some((ctx.analyzer.type_of(ctx.db, &it.clone().into())?, ctx.analyzer.resolve_variant(it)?))
|
||||
}) {
|
||||
Some(it) => it,
|
||||
_ => return,
|
||||
|
@ -16,11 +16,11 @@ pub(crate) struct CompletionContext<'a> {
|
||||
pub(super) db: &'a db::RootDatabase,
|
||||
pub(super) analyzer: hir::SourceAnalyzer,
|
||||
pub(super) offset: TextUnit,
|
||||
pub(super) token: SyntaxToken<'a>,
|
||||
pub(super) token: SyntaxToken,
|
||||
pub(super) module: Option<hir::Module>,
|
||||
pub(super) function_syntax: Option<&'a ast::FnDef>,
|
||||
pub(super) use_item_syntax: Option<&'a ast::UseItem>,
|
||||
pub(super) struct_lit_syntax: Option<&'a ast::StructLit>,
|
||||
pub(super) function_syntax: Option<ast::FnDef>,
|
||||
pub(super) use_item_syntax: Option<ast::UseItem>,
|
||||
pub(super) struct_lit_syntax: Option<ast::StructLit>,
|
||||
pub(super) is_param: bool,
|
||||
/// If a name-binding or reference to a const in a pattern.
|
||||
/// Irrefutable patterns (like let) are excluded.
|
||||
@ -35,7 +35,7 @@ pub(crate) struct CompletionContext<'a> {
|
||||
/// Something is typed at the "top" level, in module or impl/trait.
|
||||
pub(super) is_new_item: bool,
|
||||
/// The receiver if this is a field or method access, i.e. writing something.<|>
|
||||
pub(super) dot_receiver: Option<&'a ast::Expr>,
|
||||
pub(super) dot_receiver: Option<ast::Expr>,
|
||||
/// If this is a call (method or function) in particular, i.e. the () are already there.
|
||||
pub(super) is_call: bool,
|
||||
}
|
||||
@ -50,7 +50,7 @@ impl<'a> CompletionContext<'a> {
|
||||
let token =
|
||||
find_token_at_offset(original_parse.tree().syntax(), position.offset).left_biased()?;
|
||||
let analyzer =
|
||||
hir::SourceAnalyzer::new(db, position.file_id, token.parent(), Some(position.offset));
|
||||
hir::SourceAnalyzer::new(db, position.file_id, &token.parent(), Some(position.offset));
|
||||
let mut ctx = CompletionContext {
|
||||
db,
|
||||
analyzer,
|
||||
@ -109,7 +109,7 @@ impl<'a> CompletionContext<'a> {
|
||||
if is_node::<ast::BindPat>(name.syntax()) {
|
||||
let bind_pat = name.syntax().ancestors().find_map(ast::BindPat::cast).unwrap();
|
||||
let parent = bind_pat.syntax().parent();
|
||||
if parent.and_then(ast::MatchArm::cast).is_some()
|
||||
if parent.clone().and_then(ast::MatchArm::cast).is_some()
|
||||
|| parent.and_then(ast::Condition::cast).is_some()
|
||||
{
|
||||
self.is_pat_binding = true;
|
||||
@ -122,7 +122,7 @@ impl<'a> CompletionContext<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn classify_name_ref(&mut self, original_file: &'a SourceFile, name_ref: &ast::NameRef) {
|
||||
fn classify_name_ref(&mut self, original_file: SourceFile, name_ref: ast::NameRef) {
|
||||
let name_range = name_ref.syntax().range();
|
||||
if name_ref.syntax().parent().and_then(ast::NamedField::cast).is_some() {
|
||||
self.struct_lit_syntax = find_node_at_offset(original_file.syntax(), self.offset);
|
||||
@ -153,7 +153,7 @@ impl<'a> CompletionContext<'a> {
|
||||
None => return,
|
||||
};
|
||||
|
||||
if let Some(segment) = ast::PathSegment::cast(parent) {
|
||||
if let Some(segment) = ast::PathSegment::cast(parent.clone()) {
|
||||
let path = segment.parent_path();
|
||||
self.is_call = path
|
||||
.syntax()
|
||||
@ -162,7 +162,7 @@ impl<'a> CompletionContext<'a> {
|
||||
.and_then(|it| it.syntax().parent().and_then(ast::CallExpr::cast))
|
||||
.is_some();
|
||||
|
||||
if let Some(mut path) = hir::Path::from_ast(path) {
|
||||
if let Some(mut path) = hir::Path::from_ast(path.clone()) {
|
||||
if !path.is_ident() {
|
||||
path.segments.pop().unwrap();
|
||||
self.path_prefix = Some(path);
|
||||
@ -179,7 +179,7 @@ impl<'a> CompletionContext<'a> {
|
||||
.syntax()
|
||||
.ancestors()
|
||||
.find_map(|node| {
|
||||
if let Some(stmt) = ast::ExprStmt::cast(node) {
|
||||
if let Some(stmt) = ast::ExprStmt::cast(node.clone()) {
|
||||
return Some(stmt.syntax().range() == name_ref.syntax().range());
|
||||
}
|
||||
if let Some(block) = ast::Block::cast(node) {
|
||||
@ -203,7 +203,7 @@ impl<'a> CompletionContext<'a> {
|
||||
}
|
||||
}
|
||||
}
|
||||
if let Some(field_expr) = ast::FieldExpr::cast(parent) {
|
||||
if let Some(field_expr) = ast::FieldExpr::cast(parent.clone()) {
|
||||
// The receiver comes before the point of insertion of the fake
|
||||
// ident, so it should have the same range in the non-modified file
|
||||
self.dot_receiver = field_expr
|
||||
@ -222,7 +222,7 @@ impl<'a> CompletionContext<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn find_node_with_range<N: AstNode>(syntax: &SyntaxNode, range: TextRange) -> Option<&N> {
|
||||
fn find_node_with_range<N: AstNode>(syntax: &SyntaxNode, range: TextRange) -> Option<N> {
|
||||
find_covering_element(syntax, range).ancestors().find_map(N::cast)
|
||||
}
|
||||
|
||||
|
@ -35,8 +35,8 @@ pub(crate) fn diagnostics(db: &RootDatabase, file_id: FileId) -> Vec<Diagnostic>
|
||||
}));
|
||||
|
||||
for node in parse.tree().syntax().descendants() {
|
||||
check_unnecessary_braces_in_use_statement(&mut res, file_id, node);
|
||||
check_struct_shorthand_initialization(&mut res, file_id, node);
|
||||
check_unnecessary_braces_in_use_statement(&mut res, file_id, &node);
|
||||
check_struct_shorthand_initialization(&mut res, file_id, &node);
|
||||
}
|
||||
let res = RefCell::new(res);
|
||||
let mut sink = DiagnosticSink::new(|d| {
|
||||
@ -60,7 +60,7 @@ pub(crate) fn diagnostics(db: &RootDatabase, file_id: FileId) -> Vec<Diagnostic>
|
||||
})
|
||||
.on::<hir::diagnostics::MissingFields, _>(|d| {
|
||||
let node = d.ast(db);
|
||||
let mut ast_editor = AstEditor::new(&*node);
|
||||
let mut ast_editor = AstEditor::new(node);
|
||||
for f in d.missed_fields.iter() {
|
||||
ast_editor.append_field(&AstBuilder::<NamedField>::from_name(f));
|
||||
}
|
||||
@ -94,11 +94,11 @@ fn check_unnecessary_braces_in_use_statement(
|
||||
file_id: FileId,
|
||||
node: &SyntaxNode,
|
||||
) -> Option<()> {
|
||||
let use_tree_list = ast::UseTreeList::cast(node)?;
|
||||
let use_tree_list = ast::UseTreeList::cast(node.clone())?;
|
||||
if let Some((single_use_tree,)) = use_tree_list.use_trees().collect_tuple() {
|
||||
let range = use_tree_list.syntax().range();
|
||||
let edit =
|
||||
text_edit_for_remove_unnecessary_braces_with_self_in_use_statement(single_use_tree)
|
||||
text_edit_for_remove_unnecessary_braces_with_self_in_use_statement(&single_use_tree)
|
||||
.unwrap_or_else(|| {
|
||||
let to_replace = single_use_tree.syntax().text().to_string();
|
||||
let mut edit_builder = TextEditBuilder::default();
|
||||
@ -141,7 +141,7 @@ fn check_struct_shorthand_initialization(
|
||||
file_id: FileId,
|
||||
node: &SyntaxNode,
|
||||
) -> Option<()> {
|
||||
let struct_lit = ast::StructLit::cast(node)?;
|
||||
let struct_lit = ast::StructLit::cast(node.clone())?;
|
||||
let named_field_list = struct_lit.named_field_list()?;
|
||||
for named_field in named_field_list.fields() {
|
||||
if let (Some(name_ref), Some(expr)) = (named_field.name_ref(), named_field.expr()) {
|
||||
@ -184,7 +184,7 @@ mod tests {
|
||||
let parse = SourceFile::parse(code);
|
||||
let mut diagnostics = Vec::new();
|
||||
for node in parse.tree().syntax().descendants() {
|
||||
func(&mut diagnostics, FileId(0), node);
|
||||
func(&mut diagnostics, FileId(0), &node);
|
||||
}
|
||||
assert!(diagnostics.is_empty());
|
||||
}
|
||||
@ -193,7 +193,7 @@ mod tests {
|
||||
let parse = SourceFile::parse(before);
|
||||
let mut diagnostics = Vec::new();
|
||||
for node in parse.tree().syntax().descendants() {
|
||||
func(&mut diagnostics, FileId(0), node);
|
||||
func(&mut diagnostics, FileId(0), &node);
|
||||
}
|
||||
let diagnostic =
|
||||
diagnostics.pop().unwrap_or_else(|| panic!("no diagnostics for:\n{}\n", before));
|
||||
|
@ -38,7 +38,7 @@ impl FunctionSignature {
|
||||
pub(crate) fn from_hir(db: &db::RootDatabase, function: hir::Function) -> Self {
|
||||
let doc = function.docs(db);
|
||||
let ast_node = function.source(db).ast;
|
||||
FunctionSignature::from(&*ast_node).with_doc_opt(doc)
|
||||
FunctionSignature::from(&ast_node).with_doc_opt(doc)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -5,7 +5,7 @@ use ra_syntax::{
|
||||
ast::{self, DocCommentsOwner},
|
||||
AstNode, AstPtr, SmolStr,
|
||||
SyntaxKind::{self, NAME},
|
||||
SyntaxNode, TextRange, TreeArc,
|
||||
SyntaxNode, TextRange,
|
||||
};
|
||||
|
||||
use super::short_label::ShortLabel;
|
||||
@ -169,7 +169,7 @@ impl NavigationTarget {
|
||||
let file_id = src.file_id.original_file(db);
|
||||
match src.ast {
|
||||
FieldSource::Named(it) => {
|
||||
NavigationTarget::from_named(file_id, &*it, it.doc_comment_text(), it.short_label())
|
||||
NavigationTarget::from_named(file_id, &it, it.doc_comment_text(), it.short_label())
|
||||
}
|
||||
FieldSource::Pos(it) => {
|
||||
NavigationTarget::from_syntax(file_id, "".into(), None, it.syntax(), None, None)
|
||||
@ -179,13 +179,13 @@ impl NavigationTarget {
|
||||
|
||||
pub(crate) fn from_def_source<A, D>(db: &RootDatabase, def: D) -> NavigationTarget
|
||||
where
|
||||
D: HasSource<Ast = TreeArc<A>>,
|
||||
D: HasSource<Ast = A>,
|
||||
A: ast::DocCommentsOwner + ast::NameOwner + ShortLabel,
|
||||
{
|
||||
let src = def.source(db);
|
||||
NavigationTarget::from_named(
|
||||
src.file_id.original_file(db),
|
||||
&*src.ast,
|
||||
&src.ast,
|
||||
src.ast.doc_comment_text(),
|
||||
src.ast.short_label(),
|
||||
)
|
||||
@ -249,7 +249,7 @@ impl NavigationTarget {
|
||||
log::debug!("nav target {}", src.ast.syntax().debug_dump());
|
||||
NavigationTarget::from_named(
|
||||
src.file_id.original_file(db),
|
||||
&*src.ast,
|
||||
&src.ast,
|
||||
src.ast.doc_comment_text(),
|
||||
None,
|
||||
)
|
||||
@ -318,22 +318,18 @@ pub(crate) fn docs_from_symbol(db: &RootDatabase, symbol: &FileSymbol) -> Option
|
||||
let parse = db.parse(symbol.file_id);
|
||||
let node = symbol.ptr.to_node(parse.tree().syntax()).to_owned();
|
||||
|
||||
fn doc_comments<N: ast::DocCommentsOwner>(node: &N) -> Option<String> {
|
||||
node.doc_comment_text()
|
||||
}
|
||||
|
||||
visitor()
|
||||
.visit(doc_comments::<ast::FnDef>)
|
||||
.visit(doc_comments::<ast::StructDef>)
|
||||
.visit(doc_comments::<ast::EnumDef>)
|
||||
.visit(doc_comments::<ast::TraitDef>)
|
||||
.visit(doc_comments::<ast::Module>)
|
||||
.visit(doc_comments::<ast::TypeAliasDef>)
|
||||
.visit(doc_comments::<ast::ConstDef>)
|
||||
.visit(doc_comments::<ast::StaticDef>)
|
||||
.visit(doc_comments::<ast::NamedFieldDef>)
|
||||
.visit(doc_comments::<ast::EnumVariant>)
|
||||
.visit(doc_comments::<ast::MacroCall>)
|
||||
.visit(|it: ast::FnDef| it.doc_comment_text())
|
||||
.visit(|it: ast::StructDef| it.doc_comment_text())
|
||||
.visit(|it: ast::EnumDef| it.doc_comment_text())
|
||||
.visit(|it: ast::TraitDef| it.doc_comment_text())
|
||||
.visit(|it: ast::Module| it.doc_comment_text())
|
||||
.visit(|it: ast::TypeAliasDef| it.doc_comment_text())
|
||||
.visit(|it: ast::ConstDef| it.doc_comment_text())
|
||||
.visit(|it: ast::StaticDef| it.doc_comment_text())
|
||||
.visit(|it: ast::NamedFieldDef| it.doc_comment_text())
|
||||
.visit(|it: ast::EnumVariant| it.doc_comment_text())
|
||||
.visit(|it: ast::MacroCall| it.doc_comment_text())
|
||||
.accept(&node)?
|
||||
}
|
||||
|
||||
@ -345,15 +341,15 @@ pub(crate) fn description_from_symbol(db: &RootDatabase, symbol: &FileSymbol) ->
|
||||
let node = symbol.ptr.to_node(parse.tree().syntax()).to_owned();
|
||||
|
||||
visitor()
|
||||
.visit(|node: &ast::FnDef| node.short_label())
|
||||
.visit(|node: &ast::StructDef| node.short_label())
|
||||
.visit(|node: &ast::EnumDef| node.short_label())
|
||||
.visit(|node: &ast::TraitDef| node.short_label())
|
||||
.visit(|node: &ast::Module| node.short_label())
|
||||
.visit(|node: &ast::TypeAliasDef| node.short_label())
|
||||
.visit(|node: &ast::ConstDef| node.short_label())
|
||||
.visit(|node: &ast::StaticDef| node.short_label())
|
||||
.visit(|node: &ast::NamedFieldDef| node.short_label())
|
||||
.visit(|node: &ast::EnumVariant| node.short_label())
|
||||
.visit(|node: ast::FnDef| node.short_label())
|
||||
.visit(|node: ast::StructDef| node.short_label())
|
||||
.visit(|node: ast::EnumDef| node.short_label())
|
||||
.visit(|node: ast::TraitDef| node.short_label())
|
||||
.visit(|node: ast::Module| node.short_label())
|
||||
.visit(|node: ast::TypeAliasDef| node.short_label())
|
||||
.visit(|node: ast::ConstDef| node.short_label())
|
||||
.visit(|node: ast::StaticDef| node.short_label())
|
||||
.visit(|node: ast::NamedFieldDef| node.short_label())
|
||||
.visit(|node: ast::EnumVariant| node.short_label())
|
||||
.accept(&node)?
|
||||
}
|
||||
|
@ -24,14 +24,14 @@ pub fn file_structure(file: &SourceFile) -> Vec<StructureNode> {
|
||||
for event in file.syntax().preorder() {
|
||||
match event {
|
||||
WalkEvent::Enter(node) => {
|
||||
if let Some(mut symbol) = structure_node(node) {
|
||||
if let Some(mut symbol) = structure_node(&node) {
|
||||
symbol.parent = stack.last().copied();
|
||||
stack.push(res.len());
|
||||
res.push(symbol);
|
||||
}
|
||||
}
|
||||
WalkEvent::Leave(node) => {
|
||||
if structure_node(node).is_some() {
|
||||
if structure_node(&node).is_some() {
|
||||
stack.pop().unwrap();
|
||||
}
|
||||
}
|
||||
@ -41,19 +41,20 @@ pub fn file_structure(file: &SourceFile) -> Vec<StructureNode> {
|
||||
}
|
||||
|
||||
fn structure_node(node: &SyntaxNode) -> Option<StructureNode> {
|
||||
fn decl<N: NameOwner + AttrsOwner>(node: &N) -> Option<StructureNode> {
|
||||
fn decl<N: NameOwner + AttrsOwner>(node: N) -> Option<StructureNode> {
|
||||
decl_with_detail(node, None)
|
||||
}
|
||||
|
||||
fn decl_with_ascription<N: NameOwner + AttrsOwner + TypeAscriptionOwner>(
|
||||
node: &N,
|
||||
node: N,
|
||||
) -> Option<StructureNode> {
|
||||
decl_with_type_ref(node, node.ascribed_type())
|
||||
let ty = node.ascribed_type();
|
||||
decl_with_type_ref(node, ty)
|
||||
}
|
||||
|
||||
fn decl_with_type_ref<N: NameOwner + AttrsOwner>(
|
||||
node: &N,
|
||||
type_ref: Option<&ast::TypeRef>,
|
||||
node: N,
|
||||
type_ref: Option<ast::TypeRef>,
|
||||
) -> Option<StructureNode> {
|
||||
let detail = type_ref.map(|type_ref| {
|
||||
let mut detail = String::new();
|
||||
@ -64,7 +65,7 @@ fn structure_node(node: &SyntaxNode) -> Option<StructureNode> {
|
||||
}
|
||||
|
||||
fn decl_with_detail<N: NameOwner + AttrsOwner>(
|
||||
node: &N,
|
||||
node: N,
|
||||
detail: Option<String>,
|
||||
) -> Option<StructureNode> {
|
||||
let name = node.name()?;
|
||||
@ -82,22 +83,24 @@ fn structure_node(node: &SyntaxNode) -> Option<StructureNode> {
|
||||
|
||||
fn collapse_ws(node: &SyntaxNode, output: &mut String) {
|
||||
let mut can_insert_ws = false;
|
||||
for line in node.text().chunks().flat_map(|chunk| chunk.lines()) {
|
||||
let line = line.trim();
|
||||
if line.is_empty() {
|
||||
if can_insert_ws {
|
||||
output.push_str(" ");
|
||||
can_insert_ws = false;
|
||||
for chunk in node.text().chunks() {
|
||||
for line in chunk.lines() {
|
||||
let line = line.trim();
|
||||
if line.is_empty() {
|
||||
if can_insert_ws {
|
||||
output.push_str(" ");
|
||||
can_insert_ws = false;
|
||||
}
|
||||
} else {
|
||||
output.push_str(line);
|
||||
can_insert_ws = true;
|
||||
}
|
||||
} else {
|
||||
output.push_str(line);
|
||||
can_insert_ws = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
visitor()
|
||||
.visit(|fn_def: &ast::FnDef| {
|
||||
.visit(|fn_def: ast::FnDef| {
|
||||
let mut detail = String::from("fn");
|
||||
if let Some(type_param_list) = fn_def.type_param_list() {
|
||||
collapse_ws(type_param_list.syntax(), &mut detail);
|
||||
@ -117,11 +120,14 @@ fn structure_node(node: &SyntaxNode) -> Option<StructureNode> {
|
||||
.visit(decl::<ast::EnumVariant>)
|
||||
.visit(decl::<ast::TraitDef>)
|
||||
.visit(decl::<ast::Module>)
|
||||
.visit(|td: &ast::TypeAliasDef| decl_with_type_ref(td, td.type_ref()))
|
||||
.visit(|td: ast::TypeAliasDef| {
|
||||
let ty = td.type_ref();
|
||||
decl_with_type_ref(td, ty)
|
||||
})
|
||||
.visit(decl_with_ascription::<ast::NamedFieldDef>)
|
||||
.visit(decl_with_ascription::<ast::ConstDef>)
|
||||
.visit(decl_with_ascription::<ast::StaticDef>)
|
||||
.visit(|im: &ast::ImplBlock| {
|
||||
.visit(|im: ast::ImplBlock| {
|
||||
let target_type = im.target_type()?;
|
||||
let target_trait = im.target_trait();
|
||||
let label = match target_trait {
|
||||
@ -142,14 +148,14 @@ fn structure_node(node: &SyntaxNode) -> Option<StructureNode> {
|
||||
};
|
||||
Some(node)
|
||||
})
|
||||
.visit(|mc: &ast::MacroCall| {
|
||||
.visit(|mc: ast::MacroCall| {
|
||||
let first_token = mc.syntax().first_token().unwrap();
|
||||
if first_token.text().as_str() != "macro_rules" {
|
||||
return None;
|
||||
}
|
||||
decl(mc)
|
||||
})
|
||||
.accept(node)?
|
||||
.accept(&node)?
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
@ -42,7 +42,7 @@ fn try_extend_selection(root: &SyntaxNode, range: TextRange) -> Option<TextRange
|
||||
TokenAtOffset::None => return None,
|
||||
TokenAtOffset::Single(l) => {
|
||||
if string_kinds.contains(&l.kind()) {
|
||||
extend_single_word_in_comment_or_string(l, offset).unwrap_or_else(|| l.range())
|
||||
extend_single_word_in_comment_or_string(&l, offset).unwrap_or_else(|| l.range())
|
||||
} else {
|
||||
l.range()
|
||||
}
|
||||
@ -56,7 +56,7 @@ fn try_extend_selection(root: &SyntaxNode, range: TextRange) -> Option<TextRange
|
||||
if token.range() != range {
|
||||
return Some(token.range());
|
||||
}
|
||||
if let Some(comment) = ast::Comment::cast(token) {
|
||||
if let Some(comment) = ast::Comment::cast(token.clone()) {
|
||||
if let Some(range) = extend_comments(comment) {
|
||||
return Some(range);
|
||||
}
|
||||
@ -73,7 +73,7 @@ fn try_extend_selection(root: &SyntaxNode, range: TextRange) -> Option<TextRange
|
||||
let node = node.ancestors().take_while(|n| n.range() == node.range()).last().unwrap();
|
||||
|
||||
if node.parent().map(|n| list_kinds.contains(&n.kind())) == Some(true) {
|
||||
if let Some(range) = extend_list_item(node) {
|
||||
if let Some(range) = extend_list_item(&node) {
|
||||
return Some(range);
|
||||
}
|
||||
}
|
||||
@ -82,7 +82,7 @@ fn try_extend_selection(root: &SyntaxNode, range: TextRange) -> Option<TextRange
|
||||
}
|
||||
|
||||
fn extend_single_word_in_comment_or_string(
|
||||
leaf: SyntaxToken,
|
||||
leaf: &SyntaxToken,
|
||||
offset: TextUnit,
|
||||
) -> Option<TextRange> {
|
||||
let text: &str = leaf.text();
|
||||
@ -131,9 +131,9 @@ fn extend_ws(root: &SyntaxNode, ws: SyntaxToken, offset: TextUnit) -> TextRange
|
||||
ws.range()
|
||||
}
|
||||
|
||||
fn pick_best<'a>(l: SyntaxToken<'a>, r: SyntaxToken<'a>) -> SyntaxToken<'a> {
|
||||
return if priority(r) > priority(l) { r } else { l };
|
||||
fn priority(n: SyntaxToken) -> usize {
|
||||
fn pick_best<'a>(l: SyntaxToken, r: SyntaxToken) -> SyntaxToken {
|
||||
return if priority(&r) > priority(&l) { r } else { l };
|
||||
fn priority(n: &SyntaxToken) -> usize {
|
||||
match n.kind() {
|
||||
WHITESPACE => 0,
|
||||
IDENT | T![self] | T![super] | T![crate] | LIFETIME => 2,
|
||||
@ -156,7 +156,7 @@ fn extend_list_item(node: &SyntaxNode) -> Option<TextRange> {
|
||||
SyntaxElement::Token(it) => is_single_line_ws(it),
|
||||
})
|
||||
.next()
|
||||
.and_then(|it| it.as_token())
|
||||
.and_then(|it| it.as_token().cloned())
|
||||
.filter(|node| node.kind() == T![,])
|
||||
}
|
||||
|
||||
@ -167,7 +167,7 @@ fn extend_list_item(node: &SyntaxNode) -> Option<TextRange> {
|
||||
// Include any following whitespace when comma if after list item.
|
||||
let final_node = comma_node
|
||||
.next_sibling_or_token()
|
||||
.and_then(|it| it.as_token())
|
||||
.and_then(|it| it.as_token().cloned())
|
||||
.filter(|node| is_single_line_ws(node))
|
||||
.unwrap_or(comma_node);
|
||||
|
||||
@ -178,8 +178,8 @@ fn extend_list_item(node: &SyntaxNode) -> Option<TextRange> {
|
||||
}
|
||||
|
||||
fn extend_comments(comment: ast::Comment) -> Option<TextRange> {
|
||||
let prev = adj_comments(comment, Direction::Prev);
|
||||
let next = adj_comments(comment, Direction::Next);
|
||||
let prev = adj_comments(&comment, Direction::Prev);
|
||||
let next = adj_comments(&comment, Direction::Next);
|
||||
if prev != next {
|
||||
Some(TextRange::from_to(prev.syntax().range().start(), next.syntax().range().end()))
|
||||
} else {
|
||||
@ -187,14 +187,14 @@ fn extend_comments(comment: ast::Comment) -> Option<TextRange> {
|
||||
}
|
||||
}
|
||||
|
||||
fn adj_comments(comment: ast::Comment, dir: Direction) -> ast::Comment {
|
||||
let mut res = comment;
|
||||
fn adj_comments(comment: &ast::Comment, dir: Direction) -> ast::Comment {
|
||||
let mut res = comment.clone();
|
||||
for element in comment.syntax().siblings_with_tokens(dir) {
|
||||
let token = match element.as_token() {
|
||||
None => break,
|
||||
Some(token) => token,
|
||||
};
|
||||
if let Some(c) = ast::Comment::cast(token) {
|
||||
if let Some(c) = ast::Comment::cast(token.clone()) {
|
||||
res = c
|
||||
} else if token.kind() != WHITESPACE || token.text().contains("\n\n") {
|
||||
break;
|
||||
|
@ -30,7 +30,7 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec<Fold> {
|
||||
for element in file.syntax().descendants_with_tokens() {
|
||||
// Fold items that span multiple lines
|
||||
if let Some(kind) = fold_kind(element.kind()) {
|
||||
let is_multiline = match element {
|
||||
let is_multiline = match &element {
|
||||
SyntaxElement::Node(node) => node.text().contains('\n'),
|
||||
SyntaxElement::Token(token) => token.text().contains('\n'),
|
||||
};
|
||||
@ -56,7 +56,7 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec<Fold> {
|
||||
SyntaxElement::Node(node) => {
|
||||
// Fold groups of imports
|
||||
if node.kind() == USE_ITEM && !visited_imports.contains(&node) {
|
||||
if let Some(range) = contiguous_range_for_group(node, &mut visited_imports) {
|
||||
if let Some(range) = contiguous_range_for_group(&node, &mut visited_imports) {
|
||||
res.push(Fold { range, kind: FoldKind::Imports })
|
||||
}
|
||||
}
|
||||
@ -65,7 +65,7 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec<Fold> {
|
||||
if node.kind() == MODULE && !has_visibility(&node) && !visited_mods.contains(&node)
|
||||
{
|
||||
if let Some(range) =
|
||||
contiguous_range_for_group_unless(node, has_visibility, &mut visited_mods)
|
||||
contiguous_range_for_group_unless(&node, has_visibility, &mut visited_mods)
|
||||
{
|
||||
res.push(Fold { range, kind: FoldKind::Mods })
|
||||
}
|
||||
@ -88,24 +88,24 @@ fn fold_kind(kind: SyntaxKind) -> Option<FoldKind> {
|
||||
}
|
||||
|
||||
fn has_visibility(node: &SyntaxNode) -> bool {
|
||||
ast::Module::cast(node).and_then(|m| m.visibility()).is_some()
|
||||
ast::Module::cast(node.clone()).and_then(|m| m.visibility()).is_some()
|
||||
}
|
||||
|
||||
fn contiguous_range_for_group<'a>(
|
||||
first: &'a SyntaxNode,
|
||||
visited: &mut FxHashSet<&'a SyntaxNode>,
|
||||
fn contiguous_range_for_group(
|
||||
first: &SyntaxNode,
|
||||
visited: &mut FxHashSet<SyntaxNode>,
|
||||
) -> Option<TextRange> {
|
||||
contiguous_range_for_group_unless(first, |_| false, visited)
|
||||
}
|
||||
|
||||
fn contiguous_range_for_group_unless<'a>(
|
||||
first: &'a SyntaxNode,
|
||||
unless: impl Fn(&'a SyntaxNode) -> bool,
|
||||
visited: &mut FxHashSet<&'a SyntaxNode>,
|
||||
fn contiguous_range_for_group_unless(
|
||||
first: &SyntaxNode,
|
||||
unless: impl Fn(&SyntaxNode) -> bool,
|
||||
visited: &mut FxHashSet<SyntaxNode>,
|
||||
) -> Option<TextRange> {
|
||||
visited.insert(first);
|
||||
visited.insert(first.clone());
|
||||
|
||||
let mut last = first;
|
||||
let mut last = first.clone();
|
||||
for element in first.siblings_with_tokens(Direction::Next) {
|
||||
let node = match element {
|
||||
SyntaxElement::Token(token) => {
|
||||
@ -123,15 +123,15 @@ fn contiguous_range_for_group_unless<'a>(
|
||||
};
|
||||
|
||||
// Stop if we find a node that doesn't belong to the group
|
||||
if node.kind() != first.kind() || unless(node) {
|
||||
if node.kind() != first.kind() || unless(&node) {
|
||||
break;
|
||||
}
|
||||
|
||||
visited.insert(node);
|
||||
visited.insert(node.clone());
|
||||
last = node;
|
||||
}
|
||||
|
||||
if first != last {
|
||||
if first != &last {
|
||||
Some(TextRange::from_to(first.range().start(), last.range().end()))
|
||||
} else {
|
||||
// The group consists of only one element, therefore it cannot be folded
|
||||
@ -139,11 +139,11 @@ fn contiguous_range_for_group_unless<'a>(
|
||||
}
|
||||
}
|
||||
|
||||
fn contiguous_range_for_comment<'a>(
|
||||
first: ast::Comment<'a>,
|
||||
visited: &mut FxHashSet<ast::Comment<'a>>,
|
||||
fn contiguous_range_for_comment(
|
||||
first: ast::Comment,
|
||||
visited: &mut FxHashSet<ast::Comment>,
|
||||
) -> Option<TextRange> {
|
||||
visited.insert(first);
|
||||
visited.insert(first.clone());
|
||||
|
||||
// Only fold comments of the same flavor
|
||||
let group_kind = first.kind();
|
||||
@ -151,11 +151,11 @@ fn contiguous_range_for_comment<'a>(
|
||||
return None;
|
||||
}
|
||||
|
||||
let mut last = first;
|
||||
let mut last = first.clone();
|
||||
for element in first.syntax().siblings_with_tokens(Direction::Next) {
|
||||
match element {
|
||||
SyntaxElement::Token(token) => {
|
||||
if let Some(ws) = ast::Whitespace::cast(token) {
|
||||
if let Some(ws) = ast::Whitespace::cast(token.clone()) {
|
||||
if !ws.spans_multiple_lines() {
|
||||
// Ignore whitespace without blank lines
|
||||
continue;
|
||||
@ -163,7 +163,7 @@ fn contiguous_range_for_comment<'a>(
|
||||
}
|
||||
if let Some(c) = ast::Comment::cast(token) {
|
||||
if c.kind() == group_kind {
|
||||
visited.insert(c);
|
||||
visited.insert(c.clone());
|
||||
last = c;
|
||||
continue;
|
||||
}
|
||||
@ -193,7 +193,7 @@ mod tests {
|
||||
fn do_check(text: &str, fold_kinds: &[FoldKind]) {
|
||||
let (ranges, text) = extract_ranges(text, "fold");
|
||||
let parse = SourceFile::parse(&text);
|
||||
let folds = folding_ranges(parse.tree());
|
||||
let folds = folding_ranges(&parse.tree());
|
||||
|
||||
assert_eq!(
|
||||
folds.len(),
|
||||
|
@ -20,13 +20,13 @@ pub(crate) fn goto_definition(
|
||||
position: FilePosition,
|
||||
) -> Option<RangeInfo<Vec<NavigationTarget>>> {
|
||||
let parse = db.parse(position.file_id);
|
||||
let syntax = parse.tree().syntax();
|
||||
if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(syntax, position.offset) {
|
||||
let navs = reference_definition(db, position.file_id, name_ref).to_vec();
|
||||
let syntax = parse.tree().syntax().clone();
|
||||
if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(&syntax, position.offset) {
|
||||
let navs = reference_definition(db, position.file_id, &name_ref).to_vec();
|
||||
return Some(RangeInfo::new(name_ref.syntax().range(), navs.to_vec()));
|
||||
}
|
||||
if let Some(name) = find_node_at_offset::<ast::Name>(syntax, position.offset) {
|
||||
let navs = name_definition(db, position.file_id, name)?;
|
||||
if let Some(name) = find_node_at_offset::<ast::Name>(&syntax, position.offset) {
|
||||
let navs = name_definition(db, position.file_id, &name)?;
|
||||
return Some(RangeInfo::new(name.syntax().range(), navs));
|
||||
}
|
||||
None
|
||||
@ -94,7 +94,7 @@ pub(crate) fn name_definition(
|
||||
) -> Option<Vec<NavigationTarget>> {
|
||||
let parent = name.syntax().parent()?;
|
||||
|
||||
if let Some(module) = ast::Module::cast(&parent) {
|
||||
if let Some(module) = ast::Module::cast(parent.clone()) {
|
||||
if module.has_semi() {
|
||||
if let Some(child_module) =
|
||||
hir::source_binder::module_from_declaration(db, file_id, module)
|
||||
@ -114,38 +114,88 @@ pub(crate) fn name_definition(
|
||||
|
||||
fn named_target(file_id: FileId, node: &SyntaxNode) -> Option<NavigationTarget> {
|
||||
visitor()
|
||||
.visit(|node: &ast::StructDef| {
|
||||
NavigationTarget::from_named(file_id, node, node.doc_comment_text(), node.short_label())
|
||||
.visit(|node: ast::StructDef| {
|
||||
NavigationTarget::from_named(
|
||||
file_id,
|
||||
&node,
|
||||
node.doc_comment_text(),
|
||||
node.short_label(),
|
||||
)
|
||||
})
|
||||
.visit(|node: &ast::EnumDef| {
|
||||
NavigationTarget::from_named(file_id, node, node.doc_comment_text(), node.short_label())
|
||||
.visit(|node: ast::EnumDef| {
|
||||
NavigationTarget::from_named(
|
||||
file_id,
|
||||
&node,
|
||||
node.doc_comment_text(),
|
||||
node.short_label(),
|
||||
)
|
||||
})
|
||||
.visit(|node: &ast::EnumVariant| {
|
||||
NavigationTarget::from_named(file_id, node, node.doc_comment_text(), node.short_label())
|
||||
.visit(|node: ast::EnumVariant| {
|
||||
NavigationTarget::from_named(
|
||||
file_id,
|
||||
&node,
|
||||
node.doc_comment_text(),
|
||||
node.short_label(),
|
||||
)
|
||||
})
|
||||
.visit(|node: &ast::FnDef| {
|
||||
NavigationTarget::from_named(file_id, node, node.doc_comment_text(), node.short_label())
|
||||
.visit(|node: ast::FnDef| {
|
||||
NavigationTarget::from_named(
|
||||
file_id,
|
||||
&node,
|
||||
node.doc_comment_text(),
|
||||
node.short_label(),
|
||||
)
|
||||
})
|
||||
.visit(|node: &ast::TypeAliasDef| {
|
||||
NavigationTarget::from_named(file_id, node, node.doc_comment_text(), node.short_label())
|
||||
.visit(|node: ast::TypeAliasDef| {
|
||||
NavigationTarget::from_named(
|
||||
file_id,
|
||||
&node,
|
||||
node.doc_comment_text(),
|
||||
node.short_label(),
|
||||
)
|
||||
})
|
||||
.visit(|node: &ast::ConstDef| {
|
||||
NavigationTarget::from_named(file_id, node, node.doc_comment_text(), node.short_label())
|
||||
.visit(|node: ast::ConstDef| {
|
||||
NavigationTarget::from_named(
|
||||
file_id,
|
||||
&node,
|
||||
node.doc_comment_text(),
|
||||
node.short_label(),
|
||||
)
|
||||
})
|
||||
.visit(|node: &ast::StaticDef| {
|
||||
NavigationTarget::from_named(file_id, node, node.doc_comment_text(), node.short_label())
|
||||
.visit(|node: ast::StaticDef| {
|
||||
NavigationTarget::from_named(
|
||||
file_id,
|
||||
&node,
|
||||
node.doc_comment_text(),
|
||||
node.short_label(),
|
||||
)
|
||||
})
|
||||
.visit(|node: &ast::TraitDef| {
|
||||
NavigationTarget::from_named(file_id, node, node.doc_comment_text(), node.short_label())
|
||||
.visit(|node: ast::TraitDef| {
|
||||
NavigationTarget::from_named(
|
||||
file_id,
|
||||
&node,
|
||||
node.doc_comment_text(),
|
||||
node.short_label(),
|
||||
)
|
||||
})
|
||||
.visit(|node: &ast::NamedFieldDef| {
|
||||
NavigationTarget::from_named(file_id, node, node.doc_comment_text(), node.short_label())
|
||||
.visit(|node: ast::NamedFieldDef| {
|
||||
NavigationTarget::from_named(
|
||||
file_id,
|
||||
&node,
|
||||
node.doc_comment_text(),
|
||||
node.short_label(),
|
||||
)
|
||||
})
|
||||
.visit(|node: &ast::Module| {
|
||||
NavigationTarget::from_named(file_id, node, node.doc_comment_text(), node.short_label())
|
||||
.visit(|node: ast::Module| {
|
||||
NavigationTarget::from_named(
|
||||
file_id,
|
||||
&node,
|
||||
node.doc_comment_text(),
|
||||
node.short_label(),
|
||||
)
|
||||
})
|
||||
.visit(|node: &ast::MacroCall| {
|
||||
NavigationTarget::from_named(file_id, node, node.doc_comment_text(), None)
|
||||
.visit(|node: ast::MacroCall| {
|
||||
NavigationTarget::from_named(file_id, &node, node.doc_comment_text(), None)
|
||||
})
|
||||
.accept(node)
|
||||
}
|
||||
|
@ -13,15 +13,17 @@ pub(crate) fn goto_type_definition(
|
||||
token
|
||||
.parent()
|
||||
.ancestors()
|
||||
.find(|n| ast::Expr::cast(*n).is_some() || ast::Pat::cast(*n).is_some())
|
||||
.find(|n| ast::Expr::cast(n.clone()).is_some() || ast::Pat::cast(n.clone()).is_some())
|
||||
})?;
|
||||
|
||||
let analyzer = hir::SourceAnalyzer::new(db, position.file_id, node, None);
|
||||
let analyzer = hir::SourceAnalyzer::new(db, position.file_id, &node, None);
|
||||
|
||||
let ty: hir::Ty = if let Some(ty) = ast::Expr::cast(node).and_then(|e| analyzer.type_of(db, e))
|
||||
let ty: hir::Ty = if let Some(ty) =
|
||||
ast::Expr::cast(node.clone()).and_then(|e| analyzer.type_of(db, &e))
|
||||
{
|
||||
ty
|
||||
} else if let Some(ty) = ast::Pat::cast(node).and_then(|p| analyzer.type_of_pat(db, p)) {
|
||||
} else if let Some(ty) = ast::Pat::cast(node.clone()).and_then(|p| analyzer.type_of_pat(db, &p))
|
||||
{
|
||||
ty
|
||||
} else {
|
||||
return None;
|
||||
|
@ -6,7 +6,7 @@ use ra_syntax::{
|
||||
visit::{visitor, Visitor},
|
||||
},
|
||||
ast::{self, DocCommentsOwner},
|
||||
AstNode, TreeArc,
|
||||
AstNode,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
@ -104,7 +104,7 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeIn
|
||||
|
||||
let mut no_fallback = false;
|
||||
|
||||
match classify_name_ref(db, &analyzer, name_ref) {
|
||||
match classify_name_ref(db, &analyzer, &name_ref) {
|
||||
Some(Method(it)) => res.extend(from_def_source(db, it)),
|
||||
Some(Macro(it)) => {
|
||||
let src = it.source(db);
|
||||
@ -163,7 +163,7 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeIn
|
||||
|
||||
if res.is_empty() && !no_fallback {
|
||||
// Fallback index based approach:
|
||||
let symbols = crate::symbol_index::index_resolve(db, name_ref);
|
||||
let symbols = crate::symbol_index::index_resolve(db, &name_ref);
|
||||
for sym in symbols {
|
||||
let docs = docs_from_symbol(db, &sym);
|
||||
let desc = description_from_symbol(db, &sym);
|
||||
@ -177,34 +177,32 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeIn
|
||||
} else if let Some(name) = find_node_at_offset::<ast::Name>(file.syntax(), position.offset) {
|
||||
if let Some(parent) = name.syntax().parent() {
|
||||
let text = visitor()
|
||||
.visit(|node: &ast::StructDef| {
|
||||
.visit(|node: ast::StructDef| {
|
||||
hover_text(node.doc_comment_text(), node.short_label())
|
||||
})
|
||||
.visit(|node: &ast::EnumDef| {
|
||||
.visit(|node: ast::EnumDef| hover_text(node.doc_comment_text(), node.short_label()))
|
||||
.visit(|node: ast::EnumVariant| {
|
||||
hover_text(node.doc_comment_text(), node.short_label())
|
||||
})
|
||||
.visit(|node: &ast::EnumVariant| {
|
||||
.visit(|node: ast::FnDef| hover_text(node.doc_comment_text(), node.short_label()))
|
||||
.visit(|node: ast::TypeAliasDef| {
|
||||
hover_text(node.doc_comment_text(), node.short_label())
|
||||
})
|
||||
.visit(|node: &ast::FnDef| hover_text(node.doc_comment_text(), node.short_label()))
|
||||
.visit(|node: &ast::TypeAliasDef| {
|
||||
.visit(|node: ast::ConstDef| {
|
||||
hover_text(node.doc_comment_text(), node.short_label())
|
||||
})
|
||||
.visit(|node: &ast::ConstDef| {
|
||||
.visit(|node: ast::StaticDef| {
|
||||
hover_text(node.doc_comment_text(), node.short_label())
|
||||
})
|
||||
.visit(|node: &ast::StaticDef| {
|
||||
.visit(|node: ast::TraitDef| {
|
||||
hover_text(node.doc_comment_text(), node.short_label())
|
||||
})
|
||||
.visit(|node: &ast::TraitDef| {
|
||||
.visit(|node: ast::NamedFieldDef| {
|
||||
hover_text(node.doc_comment_text(), node.short_label())
|
||||
})
|
||||
.visit(|node: &ast::NamedFieldDef| {
|
||||
hover_text(node.doc_comment_text(), node.short_label())
|
||||
})
|
||||
.visit(|node: &ast::Module| hover_text(node.doc_comment_text(), node.short_label()))
|
||||
.visit(|node: &ast::MacroCall| hover_text(node.doc_comment_text(), None))
|
||||
.accept(parent);
|
||||
.visit(|node: ast::Module| hover_text(node.doc_comment_text(), node.short_label()))
|
||||
.visit(|node: ast::MacroCall| hover_text(node.doc_comment_text(), None))
|
||||
.accept(&parent);
|
||||
|
||||
if let Some(text) = text {
|
||||
res.extend(text);
|
||||
@ -217,8 +215,9 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeIn
|
||||
}
|
||||
|
||||
if range.is_none() {
|
||||
let node = ancestors_at_offset(file.syntax(), position.offset)
|
||||
.find(|n| ast::Expr::cast(*n).is_some() || ast::Pat::cast(*n).is_some())?;
|
||||
let node = ancestors_at_offset(file.syntax(), position.offset).find(|n| {
|
||||
ast::Expr::cast(n.clone()).is_some() || ast::Pat::cast(n.clone()).is_some()
|
||||
})?;
|
||||
let frange = FileRange { file_id: position.file_id, range: node.range() };
|
||||
res.extend(type_of(db, frange).map(rust_code_markup));
|
||||
range = Some(node.range());
|
||||
@ -233,7 +232,7 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeIn
|
||||
|
||||
fn from_def_source<A, D>(db: &RootDatabase, def: D) -> Option<String>
|
||||
where
|
||||
D: HasSource<Ast = TreeArc<A>>,
|
||||
D: HasSource<Ast = A>,
|
||||
A: ast::DocCommentsOwner + ast::NameOwner + ShortLabel,
|
||||
{
|
||||
let src = def.source(db);
|
||||
@ -243,17 +242,17 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeIn
|
||||
|
||||
pub(crate) fn type_of(db: &RootDatabase, frange: FileRange) -> Option<String> {
|
||||
let parse = db.parse(frange.file_id);
|
||||
let syntax = parse.tree().syntax();
|
||||
let leaf_node = find_covering_element(syntax, frange.range);
|
||||
let leaf_node = find_covering_element(parse.tree().syntax(), frange.range);
|
||||
// if we picked identifier, expand to pattern/expression
|
||||
let node = leaf_node
|
||||
.ancestors()
|
||||
.take_while(|it| it.range() == leaf_node.range())
|
||||
.find(|&it| ast::Expr::cast(it).is_some() || ast::Pat::cast(it).is_some())?;
|
||||
let analyzer = hir::SourceAnalyzer::new(db, frange.file_id, node, None);
|
||||
let ty = if let Some(ty) = ast::Expr::cast(node).and_then(|e| analyzer.type_of(db, e)) {
|
||||
.find(|it| ast::Expr::cast(it.clone()).is_some() || ast::Pat::cast(it.clone()).is_some())?;
|
||||
let analyzer = hir::SourceAnalyzer::new(db, frange.file_id, &node, None);
|
||||
let ty = if let Some(ty) = ast::Expr::cast(node.clone()).and_then(|e| analyzer.type_of(db, &e))
|
||||
{
|
||||
ty
|
||||
} else if let Some(ty) = ast::Pat::cast(node).and_then(|p| analyzer.type_of_pat(db, p)) {
|
||||
} else if let Some(ty) = ast::Pat::cast(node).and_then(|p| analyzer.type_of_pat(db, &p)) {
|
||||
ty
|
||||
} else {
|
||||
return None;
|
||||
|
@ -9,19 +9,19 @@ pub(crate) fn goto_implementation(
|
||||
position: FilePosition,
|
||||
) -> Option<RangeInfo<Vec<NavigationTarget>>> {
|
||||
let parse = db.parse(position.file_id);
|
||||
let syntax = parse.tree().syntax();
|
||||
let syntax = parse.tree().syntax().clone();
|
||||
|
||||
let module = source_binder::module_from_position(db, position)?;
|
||||
|
||||
if let Some(nominal_def) = find_node_at_offset::<ast::NominalDef>(syntax, position.offset) {
|
||||
if let Some(nominal_def) = find_node_at_offset::<ast::NominalDef>(&syntax, position.offset) {
|
||||
return Some(RangeInfo::new(
|
||||
nominal_def.syntax().range(),
|
||||
impls_for_def(db, nominal_def, module)?,
|
||||
impls_for_def(db, &nominal_def, module)?,
|
||||
));
|
||||
} else if let Some(trait_def) = find_node_at_offset::<ast::TraitDef>(syntax, position.offset) {
|
||||
} else if let Some(trait_def) = find_node_at_offset::<ast::TraitDef>(&syntax, position.offset) {
|
||||
return Some(RangeInfo::new(
|
||||
trait_def.syntax().range(),
|
||||
impls_for_trait(db, trait_def, module)?,
|
||||
impls_for_trait(db, &trait_def, module)?,
|
||||
));
|
||||
}
|
||||
|
||||
|
@ -27,7 +27,7 @@ pub fn join_lines(file: &SourceFile, range: TextRange) -> TextEdit {
|
||||
SyntaxElement::Token(token) => token.parent(),
|
||||
};
|
||||
let mut edit = TextEditBuilder::default();
|
||||
for token in node.descendants_with_tokens().filter_map(|it| it.as_token()) {
|
||||
for token in node.descendants_with_tokens().filter_map(|it| it.as_token().cloned()) {
|
||||
let range = match range.intersection(&token.range()) {
|
||||
Some(range) => range,
|
||||
None => continue,
|
||||
@ -37,7 +37,7 @@ pub fn join_lines(file: &SourceFile, range: TextRange) -> TextEdit {
|
||||
let pos: TextUnit = (pos as u32).into();
|
||||
let off = token.range().start() + range.start() + pos;
|
||||
if !edit.invalidates_offset(off) {
|
||||
remove_newline(&mut edit, token, off);
|
||||
remove_newline(&mut edit, &token, off);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -45,7 +45,7 @@ pub fn join_lines(file: &SourceFile, range: TextRange) -> TextEdit {
|
||||
edit.finish()
|
||||
}
|
||||
|
||||
fn remove_newline(edit: &mut TextEditBuilder, token: SyntaxToken, offset: TextUnit) {
|
||||
fn remove_newline(edit: &mut TextEditBuilder, token: &SyntaxToken, offset: TextUnit) {
|
||||
if token.kind() != WHITESPACE || token.text().bytes().filter(|&b| b == b'\n').count() != 1 {
|
||||
// The node is either the first or the last in the file
|
||||
let suff = &token.text()[TextRange::from_to(
|
||||
@ -98,9 +98,10 @@ fn remove_newline(edit: &mut TextEditBuilder, token: SyntaxToken, offset: TextUn
|
||||
TextRange::from_to(prev.range().start(), token.range().end()),
|
||||
space.to_string(),
|
||||
);
|
||||
} else if let (Some(_), Some(next)) =
|
||||
(prev.as_token().and_then(ast::Comment::cast), next.as_token().and_then(ast::Comment::cast))
|
||||
{
|
||||
} else if let (Some(_), Some(next)) = (
|
||||
prev.as_token().cloned().and_then(ast::Comment::cast),
|
||||
next.as_token().cloned().and_then(ast::Comment::cast),
|
||||
) {
|
||||
// Removes: newline (incl. surrounding whitespace), start of the next comment
|
||||
edit.delete(TextRange::from_to(
|
||||
token.range().start(),
|
||||
@ -113,16 +114,16 @@ fn remove_newline(edit: &mut TextEditBuilder, token: SyntaxToken, offset: TextUn
|
||||
}
|
||||
|
||||
fn has_comma_after(node: &SyntaxNode) -> bool {
|
||||
match non_trivia_sibling(node.into(), Direction::Next) {
|
||||
match non_trivia_sibling(node.clone().into(), Direction::Next) {
|
||||
Some(n) => n.kind() == T![,],
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
fn join_single_expr_block(edit: &mut TextEditBuilder, token: SyntaxToken) -> Option<()> {
|
||||
fn join_single_expr_block(edit: &mut TextEditBuilder, token: &SyntaxToken) -> Option<()> {
|
||||
let block = ast::Block::cast(token.parent())?;
|
||||
let block_expr = ast::BlockExpr::cast(block.syntax().parent()?)?;
|
||||
let expr = extract_trivial_expression(block)?;
|
||||
let expr = extract_trivial_expression(&block)?;
|
||||
|
||||
let block_range = block_expr.syntax().range();
|
||||
let mut buf = expr.syntax().text().to_string();
|
||||
@ -139,7 +140,7 @@ fn join_single_expr_block(edit: &mut TextEditBuilder, token: SyntaxToken) -> Opt
|
||||
Some(())
|
||||
}
|
||||
|
||||
fn join_single_use_tree(edit: &mut TextEditBuilder, token: SyntaxToken) -> Option<()> {
|
||||
fn join_single_use_tree(edit: &mut TextEditBuilder, token: &SyntaxToken) -> Option<()> {
|
||||
let use_tree_list = ast::UseTreeList::cast(token.parent())?;
|
||||
let (tree,) = use_tree_list.use_trees().collect_tuple()?;
|
||||
edit.replace(use_tree_list.syntax().range(), tree.syntax().text().to_string());
|
||||
@ -504,7 +505,7 @@ fn foo() {
|
||||
fn check_join_lines_sel(before: &str, after: &str) {
|
||||
let (sel, before) = extract_range(before);
|
||||
let parse = SourceFile::parse(&before);
|
||||
let result = join_lines(parse.tree(), sel);
|
||||
let result = join_lines(&parse.tree(), sel);
|
||||
let actual = result.apply(&before);
|
||||
assert_eq_text!(after, &actual);
|
||||
}
|
||||
|
@ -50,7 +50,7 @@ use ra_db::{
|
||||
salsa::{self, ParallelDatabase},
|
||||
CheckCanceled, SourceDatabase,
|
||||
};
|
||||
use ra_syntax::{SourceFile, TextRange, TextUnit, TreeArc};
|
||||
use ra_syntax::{SourceFile, TextRange, TextUnit};
|
||||
use ra_text_edit::TextEdit;
|
||||
use relative_path::RelativePathBuf;
|
||||
|
||||
@ -325,8 +325,8 @@ impl Analysis {
|
||||
}
|
||||
|
||||
/// Gets the syntax tree of the file.
|
||||
pub fn parse(&self, file_id: FileId) -> TreeArc<SourceFile> {
|
||||
self.db.parse(file_id).tree().to_owned()
|
||||
pub fn parse(&self, file_id: FileId) -> SourceFile {
|
||||
self.db.parse(file_id).tree()
|
||||
}
|
||||
|
||||
/// Gets the file's `LineIndex`: data structure to convert between absolute
|
||||
@ -360,7 +360,7 @@ impl Analysis {
|
||||
let parse = self.db.parse(frange.file_id);
|
||||
let file_edit = SourceFileEdit {
|
||||
file_id: frange.file_id,
|
||||
edit: join_lines::join_lines(parse.tree(), frange.range),
|
||||
edit: join_lines::join_lines(&parse.tree(), frange.range),
|
||||
};
|
||||
SourceChange::source_file_edit("join lines", file_edit)
|
||||
}
|
||||
@ -393,13 +393,13 @@ impl Analysis {
|
||||
/// file outline.
|
||||
pub fn file_structure(&self, file_id: FileId) -> Vec<StructureNode> {
|
||||
let parse = self.db.parse(file_id);
|
||||
file_structure(parse.tree())
|
||||
file_structure(&parse.tree())
|
||||
}
|
||||
|
||||
/// Returns the set of folding ranges.
|
||||
pub fn folding_ranges(&self, file_id: FileId) -> Vec<Fold> {
|
||||
let parse = self.db.parse(file_id);
|
||||
folding_ranges::folding_ranges(parse.tree())
|
||||
folding_ranges::folding_ranges(&parse.tree())
|
||||
}
|
||||
|
||||
/// Fuzzy searches for a symbol.
|
||||
|
@ -26,7 +26,7 @@ mod tests {
|
||||
fn do_check(before: &str, after: &str) {
|
||||
let (pos, before) = extract_offset(before);
|
||||
let parse = SourceFile::parse(&before);
|
||||
let new_pos = match matching_brace(parse.tree(), pos) {
|
||||
let new_pos = match matching_brace(&parse.tree(), pos) {
|
||||
None => pos,
|
||||
Some(pos) => pos,
|
||||
};
|
||||
|
@ -26,7 +26,7 @@ pub(crate) fn classify_name_ref(
|
||||
// Check if it is a method
|
||||
if let Some(method_call) = name_ref.syntax().parent().and_then(ast::MethodCallExpr::cast) {
|
||||
tested_by!(goto_definition_works_for_methods);
|
||||
if let Some(func) = analyzer.resolve_method_call(method_call) {
|
||||
if let Some(func) = analyzer.resolve_method_call(&method_call) {
|
||||
return Some(Method(func));
|
||||
}
|
||||
}
|
||||
@ -40,7 +40,7 @@ pub(crate) fn classify_name_ref(
|
||||
.and_then(ast::MacroCall::cast)
|
||||
{
|
||||
tested_by!(goto_definition_works_for_macros);
|
||||
if let Some(mac) = analyzer.resolve_macro_call(db, macro_call) {
|
||||
if let Some(mac) = analyzer.resolve_macro_call(db, ¯o_call) {
|
||||
return Some(Macro(mac));
|
||||
}
|
||||
}
|
||||
@ -48,7 +48,7 @@ pub(crate) fn classify_name_ref(
|
||||
// It could also be a field access
|
||||
if let Some(field_expr) = name_ref.syntax().parent().and_then(ast::FieldExpr::cast) {
|
||||
tested_by!(goto_definition_works_for_fields);
|
||||
if let Some(field) = analyzer.resolve_field(field_expr) {
|
||||
if let Some(field) = analyzer.resolve_field(&field_expr) {
|
||||
return Some(FieldAccess(field));
|
||||
};
|
||||
}
|
||||
@ -59,7 +59,7 @@ pub(crate) fn classify_name_ref(
|
||||
|
||||
let struct_lit = field_expr.syntax().ancestors().find_map(ast::StructLit::cast);
|
||||
|
||||
if let Some(ty) = struct_lit.and_then(|lit| analyzer.type_of(db, lit.into())) {
|
||||
if let Some(ty) = struct_lit.and_then(|lit| analyzer.type_of(db, &lit.into())) {
|
||||
if let Some((hir::AdtDef::Struct(s), _)) = ty.as_adt() {
|
||||
let hir_path = hir::Path::from_name_ref(name_ref);
|
||||
let hir_name = hir_path.as_ident().unwrap();
|
||||
@ -73,7 +73,7 @@ pub(crate) fn classify_name_ref(
|
||||
|
||||
// General case, a path or a local:
|
||||
if let Some(path) = name_ref.syntax().ancestors().find_map(ast::Path::cast) {
|
||||
if let Some(resolved) = analyzer.resolve_path(db, path) {
|
||||
if let Some(resolved) = analyzer.resolve_path(db, &path) {
|
||||
return match resolved {
|
||||
hir::PathResolution::Def(def) => Some(Def(def)),
|
||||
hir::PathResolution::LocalBinding(Either::A(pat)) => Some(Pat(pat)),
|
||||
|
@ -50,11 +50,11 @@ pub(crate) fn find_all_refs(
|
||||
position: FilePosition,
|
||||
) -> Option<ReferenceSearchResult> {
|
||||
let parse = db.parse(position.file_id);
|
||||
let (binding, analyzer) = find_binding(db, parse.tree(), position)?;
|
||||
let declaration = NavigationTarget::from_bind_pat(position.file_id, binding);
|
||||
let (binding, analyzer) = find_binding(db, &parse.tree(), position)?;
|
||||
let declaration = NavigationTarget::from_bind_pat(position.file_id, &binding);
|
||||
|
||||
let references = analyzer
|
||||
.find_all_refs(binding)
|
||||
.find_all_refs(&binding)
|
||||
.into_iter()
|
||||
.map(move |ref_desc| FileRange { file_id: position.file_id, range: ref_desc.range })
|
||||
.collect::<Vec<_>>();
|
||||
@ -63,9 +63,9 @@ pub(crate) fn find_all_refs(
|
||||
|
||||
fn find_binding<'a>(
|
||||
db: &RootDatabase,
|
||||
source_file: &'a SourceFile,
|
||||
source_file: &SourceFile,
|
||||
position: FilePosition,
|
||||
) -> Option<(&'a ast::BindPat, hir::SourceAnalyzer)> {
|
||||
) -> Option<(ast::BindPat, hir::SourceAnalyzer)> {
|
||||
let syntax = source_file.syntax();
|
||||
if let Some(binding) = find_node_at_offset::<ast::BindPat>(syntax, position.offset) {
|
||||
let analyzer = hir::SourceAnalyzer::new(db, position.file_id, binding.syntax(), None);
|
||||
@ -73,7 +73,7 @@ pub(crate) fn find_all_refs(
|
||||
};
|
||||
let name_ref = find_node_at_offset::<ast::NameRef>(syntax, position.offset)?;
|
||||
let analyzer = hir::SourceAnalyzer::new(db, position.file_id, name_ref.syntax(), None);
|
||||
let resolved = analyzer.resolve_local_name(name_ref)?;
|
||||
let resolved = analyzer.resolve_local_name(&name_ref)?;
|
||||
if let Either::A(ptr) = resolved.ptr() {
|
||||
if let ast::PatKind::BindPat(binding) = ptr.to_node(source_file.syntax()).kind() {
|
||||
return Some((binding, analyzer));
|
||||
@ -89,10 +89,10 @@ pub(crate) fn rename(
|
||||
new_name: &str,
|
||||
) -> Option<SourceChange> {
|
||||
let parse = db.parse(position.file_id);
|
||||
let syntax = parse.tree().syntax();
|
||||
|
||||
if let Some((ast_name, ast_module)) = find_name_and_module_at_offset(syntax, position) {
|
||||
rename_mod(db, ast_name, ast_module, position, new_name)
|
||||
if let Some((ast_name, ast_module)) =
|
||||
find_name_and_module_at_offset(parse.tree().syntax(), position)
|
||||
{
|
||||
rename_mod(db, &ast_name, &ast_module, position, new_name)
|
||||
} else {
|
||||
rename_reference(db, position, new_name)
|
||||
}
|
||||
@ -101,14 +101,10 @@ pub(crate) fn rename(
|
||||
fn find_name_and_module_at_offset(
|
||||
syntax: &SyntaxNode,
|
||||
position: FilePosition,
|
||||
) -> Option<(&ast::Name, &ast::Module)> {
|
||||
let ast_name = find_node_at_offset::<ast::Name>(syntax, position.offset);
|
||||
let ast_name_parent = ast::Module::cast(ast_name?.syntax().parent()?);
|
||||
|
||||
if let (Some(ast_module), Some(name)) = (ast_name_parent, ast_name) {
|
||||
return Some((name, ast_module));
|
||||
}
|
||||
None
|
||||
) -> Option<(ast::Name, ast::Module)> {
|
||||
let ast_name = find_node_at_offset::<ast::Name>(syntax, position.offset)?;
|
||||
let ast_module = ast::Module::cast(ast_name.syntax().parent()?)?;
|
||||
Some((ast_name, ast_module))
|
||||
}
|
||||
|
||||
fn source_edit_from_fileid_range(
|
||||
@ -135,7 +131,8 @@ fn rename_mod(
|
||||
) -> Option<SourceChange> {
|
||||
let mut source_file_edits = Vec::new();
|
||||
let mut file_system_edits = Vec::new();
|
||||
if let Some(module) = source_binder::module_from_declaration(db, position.file_id, &ast_module)
|
||||
if let Some(module) =
|
||||
source_binder::module_from_declaration(db, position.file_id, ast_module.clone())
|
||||
{
|
||||
let src = module.definition_source(db);
|
||||
let file_id = src.file_id.as_original_file();
|
||||
|
@ -26,8 +26,8 @@ pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec<Runnable> {
|
||||
parse.tree().syntax().descendants().filter_map(|i| runnable(db, file_id, i)).collect()
|
||||
}
|
||||
|
||||
fn runnable(db: &RootDatabase, file_id: FileId, item: &SyntaxNode) -> Option<Runnable> {
|
||||
if let Some(fn_def) = ast::FnDef::cast(item) {
|
||||
fn runnable(db: &RootDatabase, file_id: FileId, item: SyntaxNode) -> Option<Runnable> {
|
||||
if let Some(fn_def) = ast::FnDef::cast(item.clone()) {
|
||||
runnable_fn(fn_def)
|
||||
} else if let Some(m) = ast::Module::cast(item) {
|
||||
runnable_mod(db, file_id, m)
|
||||
@ -36,8 +36,8 @@ fn runnable(db: &RootDatabase, file_id: FileId, item: &SyntaxNode) -> Option<Run
|
||||
}
|
||||
}
|
||||
|
||||
fn runnable_fn(fn_def: &ast::FnDef) -> Option<Runnable> {
|
||||
let name = fn_def.name()?.text();
|
||||
fn runnable_fn(fn_def: ast::FnDef) -> Option<Runnable> {
|
||||
let name = fn_def.name()?.text().clone();
|
||||
let kind = if name == "main" {
|
||||
RunnableKind::Bin
|
||||
} else if fn_def.has_atom_attr("test") {
|
||||
@ -50,7 +50,7 @@ fn runnable_fn(fn_def: &ast::FnDef) -> Option<Runnable> {
|
||||
Some(Runnable { range: fn_def.syntax().range(), kind })
|
||||
}
|
||||
|
||||
fn runnable_mod(db: &RootDatabase, file_id: FileId, module: &ast::Module) -> Option<Runnable> {
|
||||
fn runnable_mod(db: &RootDatabase, file_id: FileId, module: ast::Module) -> Option<Runnable> {
|
||||
let has_test_function = module
|
||||
.item_list()?
|
||||
.items()
|
||||
|
@ -104,7 +104,7 @@ impl FromIterator<TableEntry<MacroFile, Option<Parse<SyntaxNode>>>> for SyntaxTr
|
||||
let mut res = SyntaxTreeStats::default();
|
||||
for entry in iter {
|
||||
res.total += 1;
|
||||
if let Some(tree) = entry.value.and_then(|it| it).map(|it| it.tree().to_owned()) {
|
||||
if let Some(tree) = entry.value.and_then(|it| it).map(|it| it.syntax_node()) {
|
||||
res.retained += 1;
|
||||
res.retained_size += tree.memory_size_of_subtree();
|
||||
}
|
||||
|
@ -61,7 +61,7 @@ fn file_symbols(db: &impl SymbolsDatabase, file_id: FileId) -> Arc<SymbolIndex>
|
||||
db.check_canceled();
|
||||
let parse = db.parse(file_id);
|
||||
|
||||
let symbols = source_file_to_file_symbols(parse.tree(), file_id);
|
||||
let symbols = source_file_to_file_symbols(&parse.tree(), file_id);
|
||||
|
||||
// FIXME: add macros here
|
||||
|
||||
@ -173,7 +173,7 @@ impl SymbolIndex {
|
||||
files: impl ParallelIterator<Item = (FileId, Parse<ast::SourceFile>)>,
|
||||
) -> SymbolIndex {
|
||||
let symbols = files
|
||||
.flat_map(|(file_id, file)| source_file_to_file_symbols(file.tree(), file_id))
|
||||
.flat_map(|(file_id, file)| source_file_to_file_symbols(&file.tree(), file_id))
|
||||
.collect::<Vec<_>>();
|
||||
SymbolIndex::new(symbols)
|
||||
}
|
||||
@ -249,7 +249,7 @@ fn source_file_to_file_symbols(source_file: &SourceFile, file_id: FileId) -> Vec
|
||||
for event in source_file.syntax().preorder() {
|
||||
match event {
|
||||
WalkEvent::Enter(node) => {
|
||||
if let Some(mut symbol) = to_file_symbol(node, file_id) {
|
||||
if let Some(mut symbol) = to_file_symbol(&node, file_id) {
|
||||
symbol.container_name = stack.last().cloned();
|
||||
|
||||
stack.push(symbol.name.clone());
|
||||
@ -258,7 +258,7 @@ fn source_file_to_file_symbols(source_file: &SourceFile, file_id: FileId) -> Vec
|
||||
}
|
||||
|
||||
WalkEvent::Leave(node) => {
|
||||
if to_symbol(node).is_some() {
|
||||
if to_symbol(&node).is_some() {
|
||||
stack.pop();
|
||||
}
|
||||
}
|
||||
@ -269,7 +269,7 @@ fn source_file_to_file_symbols(source_file: &SourceFile, file_id: FileId) -> Vec
|
||||
}
|
||||
|
||||
fn to_symbol(node: &SyntaxNode) -> Option<(SmolStr, SyntaxNodePtr, TextRange)> {
|
||||
fn decl<N: NameOwner>(node: &N) -> Option<(SmolStr, SyntaxNodePtr, TextRange)> {
|
||||
fn decl<N: NameOwner>(node: N) -> Option<(SmolStr, SyntaxNodePtr, TextRange)> {
|
||||
let name = node.name()?;
|
||||
let name_range = name.syntax().range();
|
||||
let name = name.text().clone();
|
||||
|
@ -31,8 +31,8 @@ fn is_control_keyword(kind: SyntaxKind) -> bool {
|
||||
}
|
||||
}
|
||||
|
||||
fn is_variable_mutable(db: &RootDatabase, analyzer: &hir::SourceAnalyzer, pat: &ast::Pat) -> bool {
|
||||
let ty = analyzer.type_of_pat(db, pat).unwrap_or(Ty::Unknown);
|
||||
fn is_variable_mutable(db: &RootDatabase, analyzer: &hir::SourceAnalyzer, pat: ast::Pat) -> bool {
|
||||
let ty = analyzer.type_of_pat(db, &pat).unwrap_or(Ty::Unknown);
|
||||
let is_ty_mut = {
|
||||
if let Some((_, mutability)) = ty.as_reference() {
|
||||
match mutability {
|
||||
@ -55,7 +55,7 @@ fn is_variable_mutable(db: &RootDatabase, analyzer: &hir::SourceAnalyzer, pat: &
|
||||
pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRange> {
|
||||
let _p = profile("highlight");
|
||||
let parse = db.parse(file_id);
|
||||
let root = parse.tree().syntax();
|
||||
let root = parse.tree().syntax().clone();
|
||||
|
||||
fn calc_binding_hash(file_id: FileId, text: &SmolStr, shadow_count: u32) -> u64 {
|
||||
fn hash<T: std::hash::Hash + std::fmt::Debug>(x: T) -> u64 {
|
||||
@ -70,6 +70,7 @@ pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRa
|
||||
}
|
||||
|
||||
// Visited nodes to handle highlighting priorities
|
||||
// FIXME: retain only ranges here
|
||||
let mut highlighted: FxHashSet<SyntaxElement> = FxHashSet::default();
|
||||
let mut bindings_shadow_count: FxHashMap<SmolStr, u32> = FxHashMap::default();
|
||||
|
||||
@ -84,14 +85,14 @@ pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRa
|
||||
STRING | RAW_STRING | RAW_BYTE_STRING | BYTE_STRING => "string",
|
||||
ATTR => "attribute",
|
||||
NAME_REF => {
|
||||
if let Some(name_ref) = node.as_node().and_then(ast::NameRef::cast) {
|
||||
if let Some(name_ref) = node.as_node().cloned().and_then(ast::NameRef::cast) {
|
||||
// FIXME: revisit this after #1340
|
||||
use crate::name_ref_kind::{classify_name_ref, NameRefKind::*};
|
||||
use hir::{ImplItem, ModuleDef};
|
||||
|
||||
// FIXME: try to reuse the SourceAnalyzers
|
||||
let analyzer = hir::SourceAnalyzer::new(db, file_id, name_ref.syntax(), None);
|
||||
match classify_name_ref(db, &analyzer, name_ref) {
|
||||
match classify_name_ref(db, &analyzer, &name_ref) {
|
||||
Some(Method(_)) => "function",
|
||||
Some(Macro(_)) => "macro",
|
||||
Some(FieldAccess(_)) => "field",
|
||||
@ -113,13 +114,13 @@ pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRa
|
||||
Some(Pat(ptr)) => {
|
||||
binding_hash = Some({
|
||||
let text =
|
||||
ptr.syntax_node_ptr().to_node(root).text().to_smol_string();
|
||||
ptr.syntax_node_ptr().to_node(&root).text().to_smol_string();
|
||||
let shadow_count =
|
||||
bindings_shadow_count.entry(text.clone()).or_default();
|
||||
calc_binding_hash(file_id, &text, *shadow_count)
|
||||
});
|
||||
|
||||
if is_variable_mutable(db, &analyzer, ptr.to_node(root)) {
|
||||
if is_variable_mutable(db, &analyzer, ptr.to_node(&root)) {
|
||||
"variable.mut"
|
||||
} else {
|
||||
"variable"
|
||||
@ -134,7 +135,7 @@ pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRa
|
||||
}
|
||||
}
|
||||
NAME => {
|
||||
if let Some(name) = node.as_node().and_then(ast::Name::cast) {
|
||||
if let Some(name) = node.as_node().cloned().and_then(ast::Name::cast) {
|
||||
let analyzer = hir::SourceAnalyzer::new(db, file_id, name.syntax(), None);
|
||||
if let Some(pat) = name.syntax().ancestors().find_map(ast::Pat::cast) {
|
||||
binding_hash = Some({
|
||||
@ -176,12 +177,11 @@ pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRa
|
||||
k if is_control_keyword(k) => "keyword.control",
|
||||
k if k.is_keyword() => "keyword",
|
||||
_ => {
|
||||
// let analyzer = hir::SourceAnalyzer::new(db, file_id, name_ref.syntax(), None);
|
||||
if let Some(macro_call) = node.as_node().and_then(ast::MacroCall::cast) {
|
||||
if let Some(macro_call) = node.as_node().cloned().and_then(ast::MacroCall::cast) {
|
||||
if let Some(path) = macro_call.path() {
|
||||
if let Some(segment) = path.segment() {
|
||||
if let Some(name_ref) = segment.name_ref() {
|
||||
highlighted.insert(name_ref.syntax().into());
|
||||
highlighted.insert(name_ref.syntax().clone().into());
|
||||
let range_start = name_ref.syntax().range().start();
|
||||
let mut range_end = name_ref.syntax().range().end();
|
||||
for sibling in path.syntax().siblings_with_tokens(Direction::Next) {
|
||||
@ -230,7 +230,8 @@ pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: boo
|
||||
let mut buf = String::new();
|
||||
buf.push_str(&STYLE);
|
||||
buf.push_str("<pre><code>");
|
||||
let tokens = parse.tree().syntax().descendants_with_tokens().filter_map(|it| it.as_token());
|
||||
let tokens =
|
||||
parse.tree().syntax().descendants_with_tokens().filter_map(|it| it.as_token().cloned());
|
||||
for token in tokens {
|
||||
could_intersect.retain(|it| token.range().start() <= it.range.end());
|
||||
while let Some(r) = ranges.get(frontier) {
|
||||
|
@ -18,7 +18,7 @@ pub(crate) fn syntax_tree(
|
||||
let node = match algo::find_covering_element(parse.tree().syntax(), text_range) {
|
||||
SyntaxElement::Node(node) => node,
|
||||
SyntaxElement::Token(token) => {
|
||||
if let Some(tree) = syntax_tree_for_string(token, text_range) {
|
||||
if let Some(tree) = syntax_tree_for_string(&token, text_range) {
|
||||
return tree;
|
||||
}
|
||||
token.parent()
|
||||
@ -33,7 +33,7 @@ pub(crate) fn syntax_tree(
|
||||
|
||||
/// Attempts parsing the selected contents of a string literal
|
||||
/// as rust syntax and returns its syntax tree
|
||||
fn syntax_tree_for_string(token: SyntaxToken, text_range: TextRange) -> Option<String> {
|
||||
fn syntax_tree_for_string(token: &SyntaxToken, text_range: TextRange) -> Option<String> {
|
||||
// When the range is inside a string
|
||||
// we'll attempt parsing it as rust syntax
|
||||
// to provide the syntax tree of the contents of the string
|
||||
@ -43,7 +43,7 @@ fn syntax_tree_for_string(token: SyntaxToken, text_range: TextRange) -> Option<S
|
||||
}
|
||||
}
|
||||
|
||||
fn syntax_tree_for_token(node: SyntaxToken, text_range: TextRange) -> Option<String> {
|
||||
fn syntax_tree_for_token(node: &SyntaxToken, text_range: TextRange) -> Option<String> {
|
||||
// Range of the full node
|
||||
let node_range = node.range();
|
||||
let text = node.text().to_string();
|
||||
|
@ -1,15 +1,16 @@
|
||||
use crate::{db::RootDatabase, SourceChange, SourceFileEdit};
|
||||
use ra_db::{FilePosition, SourceDatabase};
|
||||
use ra_fmt::leading_indent;
|
||||
use ra_syntax::{
|
||||
algo::{find_node_at_offset, find_token_at_offset, TokenAtOffset},
|
||||
ast::{self, AstToken},
|
||||
AstNode, SourceFile,
|
||||
AstNode, SmolStr, SourceFile,
|
||||
SyntaxKind::*,
|
||||
SyntaxToken, TextRange, TextUnit,
|
||||
};
|
||||
use ra_text_edit::{TextEdit, TextEditBuilder};
|
||||
|
||||
use crate::{db::RootDatabase, SourceChange, SourceFileEdit};
|
||||
|
||||
pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<SourceChange> {
|
||||
let parse = db.parse(position.file_id);
|
||||
let file = parse.tree();
|
||||
@ -43,15 +44,15 @@ pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<Sour
|
||||
)
|
||||
}
|
||||
|
||||
fn node_indent<'a>(file: &'a SourceFile, token: SyntaxToken) -> Option<&'a str> {
|
||||
fn node_indent(file: &SourceFile, token: &SyntaxToken) -> Option<SmolStr> {
|
||||
let ws = match find_token_at_offset(file.syntax(), token.range().start()) {
|
||||
TokenAtOffset::Between(l, r) => {
|
||||
assert!(r == token);
|
||||
assert!(r == *token);
|
||||
l
|
||||
}
|
||||
TokenAtOffset::Single(n) => {
|
||||
assert!(n == token);
|
||||
return Some("");
|
||||
assert!(n == *token);
|
||||
return Some("".into());
|
||||
}
|
||||
TokenAtOffset::None => unreachable!(),
|
||||
};
|
||||
@ -60,12 +61,12 @@ fn node_indent<'a>(file: &'a SourceFile, token: SyntaxToken) -> Option<&'a str>
|
||||
}
|
||||
let text = ws.text();
|
||||
let pos = text.rfind('\n').map(|it| it + 1).unwrap_or(0);
|
||||
Some(&text[pos..])
|
||||
Some(text[pos..].into())
|
||||
}
|
||||
|
||||
pub fn on_eq_typed(file: &SourceFile, eq_offset: TextUnit) -> Option<TextEdit> {
|
||||
assert_eq!(file.syntax().text().char_at(eq_offset), Some('='));
|
||||
let let_stmt: &ast::LetStmt = find_node_at_offset(file.syntax(), eq_offset)?;
|
||||
let let_stmt: ast::LetStmt = find_node_at_offset(file.syntax(), eq_offset)?;
|
||||
if let_stmt.has_semi() {
|
||||
return None;
|
||||
}
|
||||
@ -141,7 +142,7 @@ mod tests {
|
||||
edit.insert(offset, "=".to_string());
|
||||
let before = edit.finish().apply(&before);
|
||||
let parse = SourceFile::parse(&before);
|
||||
if let Some(result) = on_eq_typed(parse.tree(), offset) {
|
||||
if let Some(result) = on_eq_typed(&parse.tree(), offset) {
|
||||
let actual = result.apply(&before);
|
||||
assert_eq_text!(after, &actual);
|
||||
} else {
|
||||
|
@ -599,7 +599,8 @@ mod tests {
|
||||
let macro_definition =
|
||||
source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
|
||||
|
||||
let (definition_tt, _) = ast_to_token_tree(macro_definition.token_tree().unwrap()).unwrap();
|
||||
let (definition_tt, _) =
|
||||
ast_to_token_tree(¯o_definition.token_tree().unwrap()).unwrap();
|
||||
crate::MacroRules::parse(&definition_tt).unwrap()
|
||||
}
|
||||
|
||||
@ -611,7 +612,8 @@ mod tests {
|
||||
let macro_invocation =
|
||||
source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
|
||||
|
||||
let (invocation_tt, _) = ast_to_token_tree(macro_invocation.token_tree().unwrap()).unwrap();
|
||||
let (invocation_tt, _) =
|
||||
ast_to_token_tree(¯o_invocation.token_tree().unwrap()).unwrap();
|
||||
|
||||
expand_rule(&rules.rules[0], &invocation_tt)
|
||||
}
|
||||
|
@ -179,7 +179,8 @@ mod tests {
|
||||
let macro_definition =
|
||||
source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
|
||||
|
||||
let (definition_tt, _) = ast_to_token_tree(macro_definition.token_tree().unwrap()).unwrap();
|
||||
let (definition_tt, _) =
|
||||
ast_to_token_tree(¯o_definition.token_tree().unwrap()).unwrap();
|
||||
parse(&definition_tt)
|
||||
}
|
||||
|
||||
|
@ -2,7 +2,7 @@ use crate::subtree_source::SubtreeTokenSource;
|
||||
use crate::ExpandError;
|
||||
use ra_parser::{ParseError, TreeSink};
|
||||
use ra_syntax::{
|
||||
ast, AstNode, Parse, SmolStr, SyntaxElement, SyntaxKind, SyntaxKind::*, SyntaxNode,
|
||||
ast, AstNode, AstToken, Parse, SmolStr, SyntaxElement, SyntaxKind, SyntaxKind::*, SyntaxNode,
|
||||
SyntaxTreeBuilder, TextRange, TextUnit, T,
|
||||
};
|
||||
use tt::buffer::{Cursor, TokenBuffer};
|
||||
@ -116,8 +116,6 @@ impl TokenMap {
|
||||
/// and strips the ending `*/`
|
||||
/// And then quote the string, which is needed to convert to `tt::Literal`
|
||||
fn doc_comment_text(comment: &ast::Comment) -> SmolStr {
|
||||
use ast::AstToken;
|
||||
|
||||
let prefix_len = comment.prefix().len();
|
||||
let mut text = &comment.text()[prefix_len..];
|
||||
|
||||
@ -132,9 +130,8 @@ fn doc_comment_text(comment: &ast::Comment) -> SmolStr {
|
||||
text.into()
|
||||
}
|
||||
|
||||
fn convert_doc_comment<'a>(token: &ra_syntax::SyntaxToken<'a>) -> Option<Vec<tt::TokenTree>> {
|
||||
use ast::AstToken;
|
||||
let comment = ast::Comment::cast(*token)?;
|
||||
fn convert_doc_comment(token: &ra_syntax::SyntaxToken) -> Option<Vec<tt::TokenTree>> {
|
||||
let comment = ast::Comment::cast(token.clone())?;
|
||||
let doc = comment.kind().doc?;
|
||||
|
||||
// Make `doc="\" Comments\""
|
||||
@ -245,7 +242,7 @@ fn convert_tt(
|
||||
}
|
||||
}
|
||||
SyntaxElement::Node(node) => {
|
||||
let child = convert_tt(token_map, global_offset, node)?.into();
|
||||
let child = convert_tt(token_map, global_offset, &node)?.into();
|
||||
token_trees.push(child);
|
||||
}
|
||||
};
|
||||
|
@ -37,8 +37,8 @@ impl_froms!(TokenTree: Leaf, Subtree);
|
||||
let macro_invocation =
|
||||
source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
|
||||
|
||||
let (definition_tt, _) = ast_to_token_tree(macro_definition.token_tree().unwrap()).unwrap();
|
||||
let (invocation_tt, _) = ast_to_token_tree(macro_invocation.token_tree().unwrap()).unwrap();
|
||||
let (definition_tt, _) = ast_to_token_tree(¯o_definition.token_tree().unwrap()).unwrap();
|
||||
let (invocation_tt, _) = ast_to_token_tree(¯o_invocation.token_tree().unwrap()).unwrap();
|
||||
let rules = crate::MacroRules::parse(&definition_tt).unwrap();
|
||||
let expansion = rules.expand(&invocation_tt).unwrap();
|
||||
assert_eq!(
|
||||
@ -53,7 +53,7 @@ pub(crate) fn create_rules(macro_definition: &str) -> MacroRules {
|
||||
let macro_definition =
|
||||
source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
|
||||
|
||||
let (definition_tt, _) = ast_to_token_tree(macro_definition.token_tree().unwrap()).unwrap();
|
||||
let (definition_tt, _) = ast_to_token_tree(¯o_definition.token_tree().unwrap()).unwrap();
|
||||
crate::MacroRules::parse(&definition_tt).unwrap()
|
||||
}
|
||||
|
||||
@ -62,34 +62,25 @@ pub(crate) fn expand(rules: &MacroRules, invocation: &str) -> tt::Subtree {
|
||||
let macro_invocation =
|
||||
source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
|
||||
|
||||
let (invocation_tt, _) = ast_to_token_tree(macro_invocation.token_tree().unwrap()).unwrap();
|
||||
let (invocation_tt, _) = ast_to_token_tree(¯o_invocation.token_tree().unwrap()).unwrap();
|
||||
|
||||
rules.expand(&invocation_tt).unwrap()
|
||||
}
|
||||
|
||||
pub(crate) fn expand_to_items(
|
||||
rules: &MacroRules,
|
||||
invocation: &str,
|
||||
) -> ra_syntax::TreeArc<ast::MacroItems> {
|
||||
pub(crate) fn expand_to_items(rules: &MacroRules, invocation: &str) -> ast::MacroItems {
|
||||
let expanded = expand(rules, invocation);
|
||||
token_tree_to_macro_items(&expanded).unwrap().tree().to_owned()
|
||||
token_tree_to_macro_items(&expanded).unwrap().tree()
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub(crate) fn expand_to_stmts(
|
||||
rules: &MacroRules,
|
||||
invocation: &str,
|
||||
) -> ra_syntax::TreeArc<ast::MacroStmts> {
|
||||
pub(crate) fn expand_to_stmts(rules: &MacroRules, invocation: &str) -> ast::MacroStmts {
|
||||
let expanded = expand(rules, invocation);
|
||||
token_tree_to_macro_stmts(&expanded).unwrap().tree().to_owned()
|
||||
token_tree_to_macro_stmts(&expanded).unwrap().tree()
|
||||
}
|
||||
|
||||
pub(crate) fn expand_to_expr(
|
||||
rules: &MacroRules,
|
||||
invocation: &str,
|
||||
) -> ra_syntax::TreeArc<ast::Expr> {
|
||||
pub(crate) fn expand_to_expr(rules: &MacroRules, invocation: &str) -> ast::Expr {
|
||||
let expanded = expand(rules, invocation);
|
||||
token_tree_to_expr(&expanded).unwrap().tree().to_owned()
|
||||
token_tree_to_expr(&expanded).unwrap().tree()
|
||||
}
|
||||
|
||||
pub(crate) fn text_to_tokentree(text: &str) -> tt::Subtree {
|
||||
@ -97,7 +88,7 @@ pub(crate) fn text_to_tokentree(text: &str) -> tt::Subtree {
|
||||
let wrapped = format!("wrap_macro!( {} )", text);
|
||||
let wrapped = ast::SourceFile::parse(&wrapped);
|
||||
let wrapped = wrapped.tree().syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
|
||||
let mut wrapped = ast_to_token_tree(wrapped).unwrap().0;
|
||||
let mut wrapped = ast_to_token_tree(&wrapped).unwrap().0;
|
||||
wrapped.delimiter = tt::Delimiter::None;
|
||||
|
||||
wrapped
|
||||
@ -164,8 +155,8 @@ pub(crate) fn assert_expansion(
|
||||
|
||||
let (expanded_tree, expected_tree) = match kind {
|
||||
MacroKind::Items => {
|
||||
let expanded_tree = token_tree_to_macro_items(&expanded).unwrap().tree().to_owned();
|
||||
let expected_tree = token_tree_to_macro_items(&expected).unwrap().tree().to_owned();
|
||||
let expanded_tree = token_tree_to_macro_items(&expanded).unwrap().tree();
|
||||
let expected_tree = token_tree_to_macro_items(&expected).unwrap().tree();
|
||||
|
||||
(
|
||||
debug_dump_ignore_spaces(expanded_tree.syntax()).trim().to_string(),
|
||||
@ -174,8 +165,8 @@ pub(crate) fn assert_expansion(
|
||||
}
|
||||
|
||||
MacroKind::Stmts => {
|
||||
let expanded_tree = token_tree_to_macro_stmts(&expanded).unwrap().tree().to_owned();
|
||||
let expected_tree = token_tree_to_macro_stmts(&expected).unwrap().tree().to_owned();
|
||||
let expanded_tree = token_tree_to_macro_stmts(&expanded).unwrap().tree();
|
||||
let expected_tree = token_tree_to_macro_stmts(&expected).unwrap().tree();
|
||||
|
||||
(
|
||||
debug_dump_ignore_spaces(expanded_tree.syntax()).trim().to_string(),
|
||||
@ -419,7 +410,7 @@ fn test_expand_to_item_list() {
|
||||
",
|
||||
);
|
||||
let expansion = expand(&rules, "structs!(Foo, Bar);");
|
||||
let tree = token_tree_to_macro_items(&expansion).unwrap().tree().to_owned();
|
||||
let tree = token_tree_to_macro_items(&expansion).unwrap().tree();
|
||||
assert_eq!(
|
||||
tree.syntax().debug_dump().trim(),
|
||||
r#"
|
||||
@ -537,7 +528,7 @@ fn test_tt_to_stmts() {
|
||||
);
|
||||
|
||||
let expanded = expand(&rules, "foo!{}");
|
||||
let stmts = token_tree_to_macro_stmts(&expanded).unwrap().tree().to_owned();
|
||||
let stmts = token_tree_to_macro_stmts(&expanded).unwrap().tree();
|
||||
|
||||
assert_eq!(
|
||||
stmts.syntax().debug_dump().trim(),
|
||||
|
@ -10,7 +10,7 @@ repository = "https://github.com/rust-analyzer/rust-analyzer"
|
||||
[dependencies]
|
||||
unicode-xid = "0.1.0"
|
||||
itertools = "0.8.0"
|
||||
rowan = "0.5.0"
|
||||
rowan = "0.5.6"
|
||||
|
||||
# ideally, `serde` should be enabled by `ra_lsp_server`, but we enable it here
|
||||
# to reduce number of compilations
|
||||
|
@ -9,8 +9,8 @@ pub use rowan::TokenAtOffset;
|
||||
pub fn find_token_at_offset(node: &SyntaxNode, offset: TextUnit) -> TokenAtOffset<SyntaxToken> {
|
||||
match node.0.token_at_offset(offset) {
|
||||
TokenAtOffset::None => TokenAtOffset::None,
|
||||
TokenAtOffset::Single(n) => TokenAtOffset::Single(n.into()),
|
||||
TokenAtOffset::Between(l, r) => TokenAtOffset::Between(l.into(), r.into()),
|
||||
TokenAtOffset::Single(n) => TokenAtOffset::Single(SyntaxToken(n)),
|
||||
TokenAtOffset::Between(l, r) => TokenAtOffset::Between(SyntaxToken(l), SyntaxToken(r)),
|
||||
}
|
||||
}
|
||||
|
||||
@ -22,7 +22,7 @@ pub fn find_token_at_offset(node: &SyntaxNode, offset: TextUnit) -> TokenAtOffse
|
||||
pub fn ancestors_at_offset(
|
||||
node: &SyntaxNode,
|
||||
offset: TextUnit,
|
||||
) -> impl Iterator<Item = &SyntaxNode> {
|
||||
) -> impl Iterator<Item = SyntaxNode> {
|
||||
find_token_at_offset(node, offset)
|
||||
.map(|token| token.parent().ancestors())
|
||||
.kmerge_by(|node1, node2| node1.range().len() < node2.range().len())
|
||||
@ -37,7 +37,7 @@ pub fn ancestors_at_offset(
|
||||
/// ```
|
||||
///
|
||||
/// then the shorter node will be silently preferred.
|
||||
pub fn find_node_at_offset<N: AstNode>(syntax: &SyntaxNode, offset: TextUnit) -> Option<&N> {
|
||||
pub fn find_node_at_offset<N: AstNode>(syntax: &SyntaxNode, offset: TextUnit) -> Option<N> {
|
||||
ancestors_at_offset(syntax, offset).find_map(N::cast)
|
||||
}
|
||||
|
||||
@ -59,5 +59,5 @@ pub fn non_trivia_sibling(element: SyntaxElement, direction: Direction) -> Optio
|
||||
}
|
||||
|
||||
pub fn find_covering_element(root: &SyntaxNode, range: TextRange) -> SyntaxElement {
|
||||
root.0.covering_node(range).into()
|
||||
SyntaxElement::new(root.0.covering_node(range))
|
||||
}
|
||||
|
@ -16,7 +16,7 @@ pub trait Visitor<'a>: Sized {
|
||||
fn visit<N, F>(self, f: F) -> Vis<Self, N, F>
|
||||
where
|
||||
N: AstNode + 'a,
|
||||
F: FnOnce(&'a N) -> Self::Output,
|
||||
F: FnOnce(N) -> Self::Output,
|
||||
{
|
||||
Vis { inner: self, f, ph: PhantomData }
|
||||
}
|
||||
@ -29,7 +29,7 @@ pub trait VisitorCtx<'a>: Sized {
|
||||
fn visit<N, F>(self, f: F) -> VisCtx<Self, N, F>
|
||||
where
|
||||
N: AstNode + 'a,
|
||||
F: FnOnce(&'a N, Self::Ctx) -> Self::Output,
|
||||
F: FnOnce(N, Self::Ctx) -> Self::Output,
|
||||
{
|
||||
VisCtx { inner: self, f, ph: PhantomData }
|
||||
}
|
||||
@ -74,13 +74,13 @@ impl<'a, V, N, F> Visitor<'a> for Vis<V, N, F>
|
||||
where
|
||||
V: Visitor<'a>,
|
||||
N: AstNode + 'a,
|
||||
F: FnOnce(&'a N) -> <V as Visitor<'a>>::Output,
|
||||
F: FnOnce(N) -> <V as Visitor<'a>>::Output,
|
||||
{
|
||||
type Output = <V as Visitor<'a>>::Output;
|
||||
|
||||
fn accept(self, node: &'a SyntaxNode) -> Option<Self::Output> {
|
||||
let Vis { inner, f, .. } = self;
|
||||
inner.accept(node).or_else(|| N::cast(node).map(f))
|
||||
inner.accept(node).or_else(|| N::cast(node.clone()).map(f))
|
||||
}
|
||||
}
|
||||
|
||||
@ -95,14 +95,14 @@ impl<'a, V, N, F> VisitorCtx<'a> for VisCtx<V, N, F>
|
||||
where
|
||||
V: VisitorCtx<'a>,
|
||||
N: AstNode + 'a,
|
||||
F: FnOnce(&'a N, <V as VisitorCtx<'a>>::Ctx) -> <V as VisitorCtx<'a>>::Output,
|
||||
F: FnOnce(N, <V as VisitorCtx<'a>>::Ctx) -> <V as VisitorCtx<'a>>::Output,
|
||||
{
|
||||
type Output = <V as VisitorCtx<'a>>::Output;
|
||||
type Ctx = <V as VisitorCtx<'a>>::Ctx;
|
||||
|
||||
fn accept(self, node: &'a SyntaxNode) -> Result<Self::Output, Self::Ctx> {
|
||||
let VisCtx { inner, f, .. } = self;
|
||||
inner.accept(node).or_else(|ctx| match N::cast(node) {
|
||||
inner.accept(node).or_else(|ctx| match N::cast(node.clone()) {
|
||||
None => Err(ctx),
|
||||
Some(node) => Ok(f(node, ctx)),
|
||||
})
|
||||
|
@ -9,7 +9,7 @@ mod expr_extensions;
|
||||
use std::marker::PhantomData;
|
||||
|
||||
use crate::{
|
||||
syntax_node::{SyntaxNode, SyntaxNodeChildren, SyntaxToken, TreeArc},
|
||||
syntax_node::{SyntaxNode, SyntaxNodeChildren, SyntaxToken},
|
||||
SmolStr,
|
||||
};
|
||||
|
||||
@ -25,51 +25,49 @@ pub use self::{
|
||||
/// conversion itself has zero runtime cost: ast and syntax nodes have exactly
|
||||
/// the same representation: a pointer to the tree root and a pointer to the
|
||||
/// node itself.
|
||||
pub trait AstNode:
|
||||
rowan::TransparentNewType<Repr = rowan::SyntaxNode> + ToOwned<Owned = TreeArc<Self>>
|
||||
{
|
||||
fn cast(syntax: &SyntaxNode) -> Option<&Self>
|
||||
pub trait AstNode: Clone {
|
||||
fn cast(syntax: SyntaxNode) -> Option<Self>
|
||||
where
|
||||
Self: Sized;
|
||||
fn syntax(&self) -> &SyntaxNode;
|
||||
}
|
||||
|
||||
/// Like `AstNode`, but wraps tokens rather than interior nodes.
|
||||
pub trait AstToken<'a> {
|
||||
fn cast(token: SyntaxToken<'a>) -> Option<Self>
|
||||
pub trait AstToken {
|
||||
fn cast(token: SyntaxToken) -> Option<Self>
|
||||
where
|
||||
Self: Sized;
|
||||
fn syntax(&self) -> SyntaxToken<'a>;
|
||||
fn text(&self) -> &'a SmolStr {
|
||||
fn syntax(&self) -> &SyntaxToken;
|
||||
fn text(&self) -> &SmolStr {
|
||||
self.syntax().text()
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator over `SyntaxNode` children of a particular AST type.
|
||||
#[derive(Debug)]
|
||||
pub struct AstChildren<'a, N> {
|
||||
inner: SyntaxNodeChildren<'a>,
|
||||
pub struct AstChildren<N> {
|
||||
inner: SyntaxNodeChildren,
|
||||
ph: PhantomData<N>,
|
||||
}
|
||||
|
||||
impl<'a, N> AstChildren<'a, N> {
|
||||
fn new(parent: &'a SyntaxNode) -> Self {
|
||||
impl<N> AstChildren<N> {
|
||||
fn new(parent: &SyntaxNode) -> Self {
|
||||
AstChildren { inner: parent.children(), ph: PhantomData }
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, N: AstNode + 'a> Iterator for AstChildren<'a, N> {
|
||||
type Item = &'a N;
|
||||
fn next(&mut self) -> Option<&'a N> {
|
||||
impl<N: AstNode> Iterator for AstChildren<N> {
|
||||
type Item = N;
|
||||
fn next(&mut self) -> Option<N> {
|
||||
self.inner.by_ref().find_map(N::cast)
|
||||
}
|
||||
}
|
||||
|
||||
fn child_opt<P: AstNode, C: AstNode>(parent: &P) -> Option<&C> {
|
||||
fn child_opt<P: AstNode + ?Sized, C: AstNode>(parent: &P) -> Option<C> {
|
||||
children(parent).next()
|
||||
}
|
||||
|
||||
fn children<P: AstNode, C: AstNode>(parent: &P) -> AstChildren<C> {
|
||||
fn children<P: AstNode + ?Sized, C: AstNode>(parent: &P) -> AstChildren<C> {
|
||||
AstChildren::new(parent.syntax())
|
||||
}
|
||||
|
||||
@ -123,7 +121,7 @@ fn test_doc_comment_preserves_indents() {
|
||||
|
||||
#[test]
|
||||
fn test_where_predicates() {
|
||||
fn assert_bound(text: &str, bound: Option<&TypeBound>) {
|
||||
fn assert_bound(text: &str, bound: Option<TypeBound>) {
|
||||
assert_eq!(text, bound.unwrap().syntax().text().to_string());
|
||||
}
|
||||
|
||||
|
@ -8,20 +8,20 @@ use crate::{
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum ElseBranch<'a> {
|
||||
Block(&'a ast::Block),
|
||||
IfExpr(&'a ast::IfExpr),
|
||||
pub enum ElseBranch {
|
||||
Block(ast::Block),
|
||||
IfExpr(ast::IfExpr),
|
||||
}
|
||||
|
||||
impl ast::IfExpr {
|
||||
pub fn then_branch(&self) -> Option<&ast::Block> {
|
||||
pub fn then_branch(&self) -> Option<ast::Block> {
|
||||
self.blocks().nth(0)
|
||||
}
|
||||
pub fn else_branch(&self) -> Option<ElseBranch> {
|
||||
let res = match self.blocks().nth(1) {
|
||||
Some(block) => ElseBranch::Block(block),
|
||||
None => {
|
||||
let elif: &ast::IfExpr = child_opt(self)?;
|
||||
let elif: ast::IfExpr = child_opt(self)?;
|
||||
ElseBranch::IfExpr(elif)
|
||||
}
|
||||
};
|
||||
@ -60,7 +60,7 @@ impl ast::PrefixExpr {
|
||||
}
|
||||
|
||||
pub fn op_token(&self) -> Option<SyntaxToken> {
|
||||
self.syntax().first_child_or_token()?.as_token()
|
||||
self.syntax().first_child_or_token()?.as_token().cloned()
|
||||
}
|
||||
}
|
||||
|
||||
@ -132,7 +132,7 @@ pub enum BinOp {
|
||||
|
||||
impl ast::BinExpr {
|
||||
fn op_details(&self) -> Option<(SyntaxToken, BinOp)> {
|
||||
self.syntax().children_with_tokens().filter_map(|it| it.as_token()).find_map(|c| {
|
||||
self.syntax().children_with_tokens().filter_map(|it| it.as_token().cloned()).find_map(|c| {
|
||||
match c.kind() {
|
||||
T![||] => Some((c, BinOp::BooleanOr)),
|
||||
T![&&] => Some((c, BinOp::BooleanAnd)),
|
||||
@ -178,15 +178,15 @@ impl ast::BinExpr {
|
||||
self.op_details().map(|t| t.0)
|
||||
}
|
||||
|
||||
pub fn lhs(&self) -> Option<&ast::Expr> {
|
||||
pub fn lhs(&self) -> Option<ast::Expr> {
|
||||
children(self).nth(0)
|
||||
}
|
||||
|
||||
pub fn rhs(&self) -> Option<&ast::Expr> {
|
||||
pub fn rhs(&self) -> Option<ast::Expr> {
|
||||
children(self).nth(1)
|
||||
}
|
||||
|
||||
pub fn sub_exprs(&self) -> (Option<&ast::Expr>, Option<&ast::Expr>) {
|
||||
pub fn sub_exprs(&self) -> (Option<ast::Expr>, Option<ast::Expr>) {
|
||||
let mut children = children(self);
|
||||
let first = children.next();
|
||||
let second = children.next();
|
||||
@ -194,9 +194,9 @@ impl ast::BinExpr {
|
||||
}
|
||||
}
|
||||
|
||||
pub enum ArrayExprKind<'a> {
|
||||
Repeat { initializer: Option<&'a ast::Expr>, repeat: Option<&'a ast::Expr> },
|
||||
ElementList(AstChildren<'a, ast::Expr>),
|
||||
pub enum ArrayExprKind {
|
||||
Repeat { initializer: Option<ast::Expr>, repeat: Option<ast::Expr> },
|
||||
ElementList(AstChildren<ast::Expr>),
|
||||
}
|
||||
|
||||
impl ast::ArrayExpr {
|
||||
@ -275,12 +275,12 @@ impl ast::Literal {
|
||||
#[test]
|
||||
fn test_literal_with_attr() {
|
||||
let parse = ast::SourceFile::parse(r#"const _: &str = { #[attr] "Hello" };"#);
|
||||
let lit = parse.tree.syntax().descendants().find_map(ast::Literal::cast).unwrap();
|
||||
let lit = parse.tree().syntax().descendants().find_map(ast::Literal::cast).unwrap();
|
||||
assert_eq!(lit.token().text(), r#""Hello""#);
|
||||
}
|
||||
|
||||
impl ast::NamedField {
|
||||
pub fn parent_struct_lit(&self) -> &ast::StructLit {
|
||||
pub fn parent_struct_lit(&self) -> ast::StructLit {
|
||||
self.syntax().ancestors().find_map(ast::StructLit::cast).unwrap()
|
||||
}
|
||||
}
|
||||
|
@ -4,7 +4,7 @@
|
||||
use itertools::Itertools;
|
||||
|
||||
use crate::{
|
||||
ast::{self, child_opt, children, AstNode},
|
||||
ast::{self, child_opt, children, AstNode, SyntaxNode},
|
||||
SmolStr, SyntaxElement,
|
||||
SyntaxKind::*,
|
||||
SyntaxToken, T,
|
||||
@ -13,15 +13,20 @@ use ra_parser::SyntaxKind;
|
||||
|
||||
impl ast::Name {
|
||||
pub fn text(&self) -> &SmolStr {
|
||||
let ident = self.syntax().first_child_or_token().unwrap().as_token().unwrap();
|
||||
ident.text()
|
||||
text_of_first_token(self.syntax())
|
||||
}
|
||||
}
|
||||
|
||||
impl ast::NameRef {
|
||||
pub fn text(&self) -> &SmolStr {
|
||||
let ident = self.syntax().first_child_or_token().unwrap().as_token().unwrap();
|
||||
ident.text()
|
||||
text_of_first_token(self.syntax())
|
||||
}
|
||||
}
|
||||
|
||||
fn text_of_first_token(node: &SyntaxNode) -> &SmolStr {
|
||||
match node.0.green().children().first() {
|
||||
Some(rowan::GreenElement::Token(it)) => it.text(),
|
||||
_ => panic!(),
|
||||
}
|
||||
}
|
||||
|
||||
@ -50,10 +55,10 @@ impl ast::Attr {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_call(&self) -> Option<(SmolStr, &ast::TokenTree)> {
|
||||
pub fn as_call(&self) -> Option<(SmolStr, ast::TokenTree)> {
|
||||
let tt = self.value()?;
|
||||
let (_bra, attr, args, _ket) = tt.syntax().children_with_tokens().collect_tuple()?;
|
||||
let args = ast::TokenTree::cast(args.as_node()?)?;
|
||||
let args = ast::TokenTree::cast(args.as_node()?.clone())?;
|
||||
if attr.kind() == IDENT {
|
||||
Some((attr.as_token()?.text().clone(), args))
|
||||
} else {
|
||||
@ -86,16 +91,16 @@ impl ast::Attr {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum PathSegmentKind<'a> {
|
||||
Name(&'a ast::NameRef),
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum PathSegmentKind {
|
||||
Name(ast::NameRef),
|
||||
SelfKw,
|
||||
SuperKw,
|
||||
CrateKw,
|
||||
}
|
||||
|
||||
impl ast::PathSegment {
|
||||
pub fn parent_path(&self) -> &ast::Path {
|
||||
pub fn parent_path(&self) -> ast::Path {
|
||||
self.syntax()
|
||||
.parent()
|
||||
.and_then(ast::Path::cast)
|
||||
@ -125,7 +130,7 @@ impl ast::PathSegment {
|
||||
}
|
||||
|
||||
impl ast::Path {
|
||||
pub fn parent_path(&self) -> Option<&ast::Path> {
|
||||
pub fn parent_path(&self) -> Option<ast::Path> {
|
||||
self.syntax().parent().and_then(ast::Path::cast)
|
||||
}
|
||||
}
|
||||
@ -146,7 +151,7 @@ impl ast::UseTree {
|
||||
}
|
||||
|
||||
impl ast::UseTreeList {
|
||||
pub fn parent_use_tree(&self) -> &ast::UseTree {
|
||||
pub fn parent_use_tree(&self) -> ast::UseTree {
|
||||
self.syntax()
|
||||
.parent()
|
||||
.and_then(ast::UseTree::cast)
|
||||
@ -155,21 +160,21 @@ impl ast::UseTreeList {
|
||||
}
|
||||
|
||||
impl ast::ImplBlock {
|
||||
pub fn target_type(&self) -> Option<&ast::TypeRef> {
|
||||
pub fn target_type(&self) -> Option<ast::TypeRef> {
|
||||
match self.target() {
|
||||
(Some(t), None) | (_, Some(t)) => Some(t),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn target_trait(&self) -> Option<&ast::TypeRef> {
|
||||
pub fn target_trait(&self) -> Option<ast::TypeRef> {
|
||||
match self.target() {
|
||||
(Some(t), Some(_)) => Some(t),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn target(&self) -> (Option<&ast::TypeRef>, Option<&ast::TypeRef>) {
|
||||
fn target(&self) -> (Option<ast::TypeRef>, Option<ast::TypeRef>) {
|
||||
let mut types = children(self);
|
||||
let first = types.next();
|
||||
let second = types.next();
|
||||
@ -182,13 +187,13 @@ impl ast::ImplBlock {
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum StructKind<'a> {
|
||||
Tuple(&'a ast::PosFieldDefList),
|
||||
Named(&'a ast::NamedFieldDefList),
|
||||
pub enum StructKind {
|
||||
Tuple(ast::PosFieldDefList),
|
||||
Named(ast::NamedFieldDefList),
|
||||
Unit,
|
||||
}
|
||||
|
||||
impl StructKind<'_> {
|
||||
impl StructKind {
|
||||
fn from_node<N: AstNode>(node: &N) -> StructKind {
|
||||
if let Some(nfdl) = child_opt::<_, ast::NamedFieldDefList>(node) {
|
||||
StructKind::Named(nfdl)
|
||||
@ -218,7 +223,7 @@ impl ast::StructDef {
|
||||
}
|
||||
|
||||
impl ast::EnumVariant {
|
||||
pub fn parent_enum(&self) -> &ast::EnumDef {
|
||||
pub fn parent_enum(&self) -> ast::EnumDef {
|
||||
self.syntax()
|
||||
.parent()
|
||||
.and_then(|it| it.parent())
|
||||
@ -231,10 +236,10 @@ impl ast::EnumVariant {
|
||||
}
|
||||
|
||||
impl ast::FnDef {
|
||||
pub fn semicolon_token(&self) -> Option<SyntaxToken<'_>> {
|
||||
pub fn semicolon_token(&self) -> Option<SyntaxToken> {
|
||||
self.syntax()
|
||||
.last_child_or_token()
|
||||
.and_then(|it| it.as_token())
|
||||
.and_then(|it| it.as_token().cloned())
|
||||
.filter(|it| it.kind() == T![;])
|
||||
}
|
||||
}
|
||||
@ -258,9 +263,9 @@ impl ast::ExprStmt {
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum FieldKind<'a> {
|
||||
Name(&'a ast::NameRef),
|
||||
Index(SyntaxToken<'a>),
|
||||
pub enum FieldKind {
|
||||
Name(ast::NameRef),
|
||||
Index(SyntaxToken),
|
||||
}
|
||||
|
||||
impl ast::FieldExpr {
|
||||
@ -271,6 +276,7 @@ impl ast::FieldExpr {
|
||||
.find(|c| c.kind() == SyntaxKind::INT_NUMBER || c.kind() == SyntaxKind::FLOAT_NUMBER)
|
||||
.as_ref()
|
||||
.and_then(SyntaxElement::as_token)
|
||||
.cloned()
|
||||
}
|
||||
|
||||
pub fn field_access(&self) -> Option<FieldKind> {
|
||||
@ -326,7 +332,7 @@ impl ast::SelfParam {
|
||||
pub fn self_kw_token(&self) -> SyntaxToken {
|
||||
self.syntax()
|
||||
.children_with_tokens()
|
||||
.filter_map(|it| it.as_token())
|
||||
.filter_map(|it| it.as_token().cloned())
|
||||
.find(|it| it.kind() == T![self])
|
||||
.expect("invalid tree: self param must have self")
|
||||
}
|
||||
@ -355,7 +361,7 @@ impl ast::LifetimeParam {
|
||||
pub fn lifetime_token(&self) -> Option<SyntaxToken> {
|
||||
self.syntax()
|
||||
.children_with_tokens()
|
||||
.filter_map(|it| it.as_token())
|
||||
.filter_map(|it| it.as_token().cloned())
|
||||
.find(|it| it.kind() == LIFETIME)
|
||||
}
|
||||
}
|
||||
@ -364,7 +370,7 @@ impl ast::WherePred {
|
||||
pub fn lifetime_token(&self) -> Option<SyntaxToken> {
|
||||
self.syntax()
|
||||
.children_with_tokens()
|
||||
.filter_map(|it| it.as_token())
|
||||
.filter_map(|it| it.as_token().cloned())
|
||||
.find(|it| it.kind() == LIFETIME)
|
||||
}
|
||||
}
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -11,94 +11,73 @@ the below applies to the result of this template
|
||||
|
||||
#![cfg_attr(rustfmt, rustfmt_skip)]
|
||||
|
||||
use rowan::TransparentNewType;
|
||||
|
||||
use crate::{
|
||||
SyntaxNode, SyntaxKind::*,
|
||||
syntax_node::{TreeArc},
|
||||
ast::{self, AstNode},
|
||||
};
|
||||
{% for node, methods in ast %}
|
||||
// {{ node }}
|
||||
|
||||
{%- if methods.enum %}
|
||||
#[derive(Debug, PartialEq, Eq, Hash)]
|
||||
#[repr(transparent)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct {{ node }} {
|
||||
pub(crate) syntax: SyntaxNode,
|
||||
}
|
||||
unsafe impl TransparentNewType for {{ node }} {
|
||||
type Repr = rowan::SyntaxNode;
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum {{ node }}Kind<'a> {
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum {{ node }}Kind {
|
||||
{%- for kind in methods.enum %}
|
||||
{{ kind }}(&'a {{ kind }}),
|
||||
{{ kind }}({{ kind }}),
|
||||
{%- endfor %}
|
||||
}
|
||||
|
||||
{%- for kind in methods.enum %}
|
||||
impl<'a> From<&'a {{ kind }}> for &'a {{ node }} {
|
||||
fn from(n: &'a {{ kind }}) -> &'a {{ node }} {
|
||||
{{ node }}::cast(&n.syntax).unwrap()
|
||||
impl From<{{ kind }}> for {{ node }} {
|
||||
fn from(n: {{ kind }}) -> {{ node }} {
|
||||
{{ node }}::cast(n.syntax).unwrap()
|
||||
}
|
||||
}
|
||||
{%- endfor %}
|
||||
|
||||
|
||||
impl AstNode for {{ node }} {
|
||||
fn cast(syntax: &SyntaxNode) -> Option<&Self> {
|
||||
fn cast(syntax: SyntaxNode) -> Option<Self> {
|
||||
match syntax.kind() {
|
||||
{%- for kind in methods.enum %}
|
||||
| {{ kind | SCREAM }}
|
||||
{%- endfor %} => Some({{ node }}::from_repr(syntax.into_repr())),
|
||||
{%- endfor %} => Some({{ node }} { syntax }),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
fn syntax(&self) -> &SyntaxNode { &self.syntax }
|
||||
}
|
||||
|
||||
impl ToOwned for {{ node }} {
|
||||
type Owned = TreeArc<{{ node }}>;
|
||||
fn to_owned(&self) -> TreeArc<{{ node }}> { TreeArc::cast(self.syntax.to_owned()) }
|
||||
}
|
||||
|
||||
impl {{ node }} {
|
||||
pub fn kind(&self) -> {{ node }}Kind {
|
||||
match self.syntax.kind() {
|
||||
{%- for kind in methods.enum %}
|
||||
{{ kind | SCREAM }} => {{ node }}Kind::{{ kind }}({{ kind }}::cast(&self.syntax).unwrap()),
|
||||
{{ kind | SCREAM }} => {{ node }}Kind::{{ kind }}({{ kind }}::cast(self.syntax.clone()).unwrap()),
|
||||
{%- endfor %}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
{% else %}
|
||||
#[derive(Debug, PartialEq, Eq, Hash)]
|
||||
#[repr(transparent)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct {{ node }} {
|
||||
pub(crate) syntax: SyntaxNode,
|
||||
}
|
||||
unsafe impl TransparentNewType for {{ node }} {
|
||||
type Repr = rowan::SyntaxNode;
|
||||
}
|
||||
|
||||
impl AstNode for {{ node }} {
|
||||
fn cast(syntax: &SyntaxNode) -> Option<&Self> {
|
||||
fn cast(syntax: SyntaxNode) -> Option<Self> {
|
||||
match syntax.kind() {
|
||||
{{ node | SCREAM }} => Some({{ node }}::from_repr(syntax.into_repr())),
|
||||
{{ node | SCREAM }} => Some({{ node }} { syntax }),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
fn syntax(&self) -> &SyntaxNode { &self.syntax }
|
||||
}
|
||||
|
||||
impl ToOwned for {{ node }} {
|
||||
type Owned = TreeArc<{{ node }}>;
|
||||
fn to_owned(&self) -> TreeArc<{{ node }}> { TreeArc::cast(self.syntax.to_owned()) }
|
||||
}
|
||||
|
||||
{% endif %}
|
||||
{% if methods.traits -%}
|
||||
|
||||
@ -113,7 +92,7 @@ impl {{ node }} {
|
||||
{%- for m in methods.collections -%}
|
||||
{%- set method_name = m.0 -%}
|
||||
{%- set ChildName = m.1 %}
|
||||
pub fn {{ method_name }}(&self) -> impl Iterator<Item = &{{ ChildName }}> {
|
||||
pub fn {{ method_name }}(&self) -> impl Iterator<Item = {{ ChildName }}> {
|
||||
super::children(self)
|
||||
}
|
||||
{% endfor -%}
|
||||
@ -129,7 +108,7 @@ impl {{ node }} {
|
||||
{%- set method_name = m.0 -%}
|
||||
{%- set ChildName = m.1 %}
|
||||
{%- endif %}
|
||||
pub fn {{ method_name }}(&self) -> Option<&{{ ChildName }}> {
|
||||
pub fn {{ method_name }}(&self) -> Option<{{ ChildName }}> {
|
||||
super::child_opt(self)
|
||||
}
|
||||
{% endfor -%}
|
||||
|
@ -6,23 +6,23 @@ use crate::{
|
||||
SyntaxToken,
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct Comment<'a>(SyntaxToken<'a>);
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct Comment(SyntaxToken);
|
||||
|
||||
impl<'a> AstToken<'a> for Comment<'a> {
|
||||
fn cast(token: SyntaxToken<'a>) -> Option<Self> {
|
||||
impl AstToken for Comment {
|
||||
fn cast(token: SyntaxToken) -> Option<Self> {
|
||||
if token.kind() == COMMENT {
|
||||
Some(Comment(token))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
fn syntax(&self) -> SyntaxToken<'a> {
|
||||
self.0
|
||||
fn syntax(&self) -> &SyntaxToken {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Comment<'a> {
|
||||
impl Comment {
|
||||
pub fn kind(&self) -> CommentKind {
|
||||
kind_by_prefix(self.text())
|
||||
}
|
||||
@ -90,22 +90,22 @@ fn prefix_by_kind(kind: CommentKind) -> &'static str {
|
||||
unreachable!()
|
||||
}
|
||||
|
||||
pub struct Whitespace<'a>(SyntaxToken<'a>);
|
||||
pub struct Whitespace(SyntaxToken);
|
||||
|
||||
impl<'a> AstToken<'a> for Whitespace<'a> {
|
||||
fn cast(token: SyntaxToken<'a>) -> Option<Self> {
|
||||
impl AstToken for Whitespace {
|
||||
fn cast(token: SyntaxToken) -> Option<Self> {
|
||||
if token.kind() == WHITESPACE {
|
||||
Some(Whitespace(token))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
fn syntax(&self) -> SyntaxToken<'a> {
|
||||
self.0
|
||||
fn syntax(&self) -> &SyntaxToken {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Whitespace<'a> {
|
||||
impl Whitespace {
|
||||
pub fn spans_multiple_lines(&self) -> bool {
|
||||
let text = self.text();
|
||||
text.find('\n').map_or(false, |idx| text[idx + 1..].contains('\n'))
|
||||
|
@ -10,37 +10,37 @@ use crate::{
|
||||
};
|
||||
|
||||
pub trait TypeAscriptionOwner: AstNode {
|
||||
fn ascribed_type(&self) -> Option<&ast::TypeRef> {
|
||||
fn ascribed_type(&self) -> Option<ast::TypeRef> {
|
||||
child_opt(self)
|
||||
}
|
||||
}
|
||||
|
||||
pub trait NameOwner: AstNode {
|
||||
fn name(&self) -> Option<&ast::Name> {
|
||||
fn name(&self) -> Option<ast::Name> {
|
||||
child_opt(self)
|
||||
}
|
||||
}
|
||||
|
||||
pub trait VisibilityOwner: AstNode {
|
||||
fn visibility(&self) -> Option<&ast::Visibility> {
|
||||
fn visibility(&self) -> Option<ast::Visibility> {
|
||||
child_opt(self)
|
||||
}
|
||||
}
|
||||
|
||||
pub trait LoopBodyOwner: AstNode {
|
||||
fn loop_body(&self) -> Option<&ast::Block> {
|
||||
fn loop_body(&self) -> Option<ast::Block> {
|
||||
child_opt(self)
|
||||
}
|
||||
}
|
||||
|
||||
pub trait TryBlockBodyOwner: AstNode {
|
||||
fn try_body(&self) -> Option<&ast::Block> {
|
||||
fn try_body(&self) -> Option<ast::Block> {
|
||||
child_opt(self)
|
||||
}
|
||||
}
|
||||
|
||||
pub trait ArgListOwner: AstNode {
|
||||
fn arg_list(&self) -> Option<&ast::ArgList> {
|
||||
fn arg_list(&self) -> Option<ast::ArgList> {
|
||||
child_opt(self)
|
||||
}
|
||||
}
|
||||
@ -51,10 +51,10 @@ pub trait FnDefOwner: AstNode {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum ItemOrMacro<'a> {
|
||||
Item(&'a ast::ModuleItem),
|
||||
Macro(&'a ast::MacroCall),
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum ItemOrMacro {
|
||||
Item(ast::ModuleItem),
|
||||
Macro(ast::MacroCall),
|
||||
}
|
||||
|
||||
pub trait ModuleItemOwner: AstNode {
|
||||
@ -67,14 +67,14 @@ pub trait ModuleItemOwner: AstNode {
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ItemOrMacroIter<'a>(SyntaxNodeChildren<'a>);
|
||||
pub struct ItemOrMacroIter(SyntaxNodeChildren);
|
||||
|
||||
impl<'a> Iterator for ItemOrMacroIter<'a> {
|
||||
type Item = ItemOrMacro<'a>;
|
||||
fn next(&mut self) -> Option<ItemOrMacro<'a>> {
|
||||
impl Iterator for ItemOrMacroIter {
|
||||
type Item = ItemOrMacro;
|
||||
fn next(&mut self) -> Option<ItemOrMacro> {
|
||||
loop {
|
||||
let n = self.0.next()?;
|
||||
if let Some(item) = ast::ModuleItem::cast(n) {
|
||||
if let Some(item) = ast::ModuleItem::cast(n.clone()) {
|
||||
return Some(ItemOrMacro::Item(item));
|
||||
}
|
||||
if let Some(call) = ast::MacroCall::cast(n) {
|
||||
@ -85,17 +85,17 @@ impl<'a> Iterator for ItemOrMacroIter<'a> {
|
||||
}
|
||||
|
||||
pub trait TypeParamsOwner: AstNode {
|
||||
fn type_param_list(&self) -> Option<&ast::TypeParamList> {
|
||||
fn type_param_list(&self) -> Option<ast::TypeParamList> {
|
||||
child_opt(self)
|
||||
}
|
||||
|
||||
fn where_clause(&self) -> Option<&ast::WhereClause> {
|
||||
fn where_clause(&self) -> Option<ast::WhereClause> {
|
||||
child_opt(self)
|
||||
}
|
||||
}
|
||||
|
||||
pub trait TypeBoundsOwner: AstNode {
|
||||
fn type_bound_list(&self) -> Option<&ast::TypeBoundList> {
|
||||
fn type_bound_list(&self) -> Option<ast::TypeBoundList> {
|
||||
child_opt(self)
|
||||
}
|
||||
}
|
||||
@ -148,19 +148,19 @@ pub trait DocCommentsOwner: AstNode {
|
||||
}
|
||||
}
|
||||
|
||||
pub struct CommentIter<'a> {
|
||||
iter: SyntaxElementChildren<'a>,
|
||||
pub struct CommentIter {
|
||||
iter: SyntaxElementChildren,
|
||||
}
|
||||
|
||||
impl<'a> Iterator for CommentIter<'a> {
|
||||
type Item = ast::Comment<'a>;
|
||||
fn next(&mut self) -> Option<ast::Comment<'a>> {
|
||||
self.iter.by_ref().find_map(|el| el.as_token().and_then(ast::Comment::cast))
|
||||
impl Iterator for CommentIter {
|
||||
type Item = ast::Comment;
|
||||
fn next(&mut self) -> Option<ast::Comment> {
|
||||
self.iter.by_ref().find_map(|el| el.as_token().cloned().and_then(ast::Comment::cast))
|
||||
}
|
||||
}
|
||||
|
||||
pub trait DefaultTypeParamOwner: AstNode {
|
||||
fn default_type(&self) -> Option<&ast::PathType> {
|
||||
fn default_type(&self) -> Option<ast::PathType> {
|
||||
child_opt(self)
|
||||
}
|
||||
}
|
||||
|
@ -9,7 +9,7 @@ fn check_file_invariants(file: &SourceFile) {
|
||||
|
||||
pub fn check_parser(text: &str) {
|
||||
let file = SourceFile::parse(text);
|
||||
check_file_invariants(&file.tree);
|
||||
check_file_invariants(&file.tree());
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
@ -45,16 +45,16 @@ impl CheckReparse {
|
||||
pub fn run(&self) {
|
||||
let parse = SourceFile::parse(&self.text);
|
||||
let new_parse = parse.reparse(&self.edit);
|
||||
check_file_invariants(&new_parse.tree);
|
||||
assert_eq!(&new_parse.tree.syntax().text().to_string(), &self.edited_text);
|
||||
check_file_invariants(&new_parse.tree());
|
||||
assert_eq!(&new_parse.tree().syntax().text().to_string(), &self.edited_text);
|
||||
let full_reparse = SourceFile::parse(&self.edited_text);
|
||||
for (a, b) in
|
||||
new_parse.tree.syntax().descendants().zip(full_reparse.tree.syntax().descendants())
|
||||
new_parse.tree().syntax().descendants().zip(full_reparse.tree().syntax().descendants())
|
||||
{
|
||||
if (a.kind(), a.range()) != (b.kind(), b.range()) {
|
||||
eprint!("original:\n{}", parse.tree.syntax().debug_dump());
|
||||
eprint!("reparsed:\n{}", new_parse.tree.syntax().debug_dump());
|
||||
eprint!("full reparse:\n{}", full_reparse.tree.syntax().debug_dump());
|
||||
eprint!("original:\n{}", parse.tree().syntax().debug_dump());
|
||||
eprint!("reparsed:\n{}", new_parse.tree().syntax().debug_dump());
|
||||
eprint!("full reparse:\n{}", full_reparse.tree().syntax().debug_dump());
|
||||
assert_eq!(
|
||||
format!("{:?}", a),
|
||||
format!("{:?}", b),
|
||||
|
@ -31,7 +31,7 @@ pub mod ast;
|
||||
#[doc(hidden)]
|
||||
pub mod fuzz;
|
||||
|
||||
use std::{fmt::Write, sync::Arc};
|
||||
use std::{fmt::Write, marker::PhantomData, sync::Arc};
|
||||
|
||||
use ra_text_edit::AtomTextEdit;
|
||||
|
||||
@ -43,8 +43,8 @@ pub use crate::{
|
||||
ptr::{AstPtr, SyntaxNodePtr},
|
||||
syntax_error::{Location, SyntaxError, SyntaxErrorKind},
|
||||
syntax_node::{
|
||||
Direction, InsertPosition, SyntaxElement, SyntaxNode, SyntaxNodeWrapper, SyntaxToken,
|
||||
SyntaxTreeBuilder, TreeArc, WalkEvent,
|
||||
Direction, InsertPosition, SyntaxElement, SyntaxNode, SyntaxToken, SyntaxTreeBuilder,
|
||||
WalkEvent,
|
||||
},
|
||||
syntax_text::SyntaxText,
|
||||
};
|
||||
@ -58,48 +58,63 @@ pub use rowan::{SmolStr, TextRange, TextUnit};
|
||||
/// Note that we always produce a syntax tree, even for completely invalid
|
||||
/// files.
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub struct Parse<T: SyntaxNodeWrapper> {
|
||||
tree: TreeArc<T>,
|
||||
pub struct Parse<T> {
|
||||
green: GreenNode,
|
||||
errors: Arc<Vec<SyntaxError>>,
|
||||
_ty: PhantomData<fn() -> T>,
|
||||
}
|
||||
|
||||
impl<T: SyntaxNodeWrapper> Clone for Parse<T> {
|
||||
impl<T> Clone for Parse<T> {
|
||||
fn clone(&self) -> Parse<T> {
|
||||
Parse { tree: self.tree.clone(), errors: self.errors.clone() }
|
||||
Parse { green: self.green.clone(), errors: self.errors.clone(), _ty: PhantomData }
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: SyntaxNodeWrapper> Parse<T> {
|
||||
fn new(tree: TreeArc<T>, errors: Vec<SyntaxError>) -> Parse<T> {
|
||||
Parse { tree, errors: Arc::new(errors) }
|
||||
impl<T> Parse<T> {
|
||||
fn new(green: GreenNode, errors: Vec<SyntaxError>) -> Parse<T> {
|
||||
Parse { green, errors: Arc::new(errors), _ty: PhantomData }
|
||||
}
|
||||
|
||||
pub fn tree(&self) -> &T {
|
||||
&*self.tree
|
||||
pub fn syntax_node(&self) -> SyntaxNode {
|
||||
SyntaxNode::new(self.green.clone())
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: AstNode> Parse<T> {
|
||||
pub fn to_syntax(self) -> Parse<SyntaxNode> {
|
||||
Parse { green: self.green, errors: self.errors, _ty: PhantomData }
|
||||
}
|
||||
|
||||
pub fn tree(&self) -> T {
|
||||
T::cast(self.syntax_node()).unwrap()
|
||||
}
|
||||
|
||||
pub fn errors(&self) -> &[SyntaxError] {
|
||||
&*self.errors
|
||||
}
|
||||
|
||||
pub fn ok(self) -> Result<TreeArc<T>, Arc<Vec<SyntaxError>>> {
|
||||
pub fn ok(self) -> Result<T, Arc<Vec<SyntaxError>>> {
|
||||
if self.errors.is_empty() {
|
||||
Ok(self.tree)
|
||||
Ok(self.tree())
|
||||
} else {
|
||||
Err(self.errors)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: AstNode> Parse<T> {
|
||||
pub fn to_syntax(this: Self) -> Parse<SyntaxNode> {
|
||||
Parse { tree: this.tree().syntax().to_owned(), errors: this.errors }
|
||||
impl Parse<SyntaxNode> {
|
||||
pub fn cast<N: AstNode>(self) -> Option<Parse<N>> {
|
||||
if N::cast(self.syntax_node()).is_some() {
|
||||
Some(Parse { green: self.green, errors: self.errors, _ty: PhantomData })
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Parse<SourceFile> {
|
||||
pub fn debug_dump(&self) -> String {
|
||||
let mut buf = self.tree.syntax().debug_dump();
|
||||
let mut buf = self.tree().syntax().debug_dump();
|
||||
for err in self.errors.iter() {
|
||||
writeln!(buf, "error {:?}: {}", err.location(), err.kind()).unwrap();
|
||||
}
|
||||
@ -112,45 +127,38 @@ impl Parse<SourceFile> {
|
||||
|
||||
fn incremental_reparse(&self, edit: &AtomTextEdit) -> Option<Parse<SourceFile>> {
|
||||
// FIXME: validation errors are not handled here
|
||||
parsing::incremental_reparse(self.tree.syntax(), edit, self.errors.to_vec()).map(
|
||||
parsing::incremental_reparse(self.tree().syntax(), edit, self.errors.to_vec()).map(
|
||||
|(green_node, errors, _reparsed_range)| Parse {
|
||||
tree: SourceFile::new(green_node),
|
||||
green: green_node,
|
||||
errors: Arc::new(errors),
|
||||
_ty: PhantomData,
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
fn full_reparse(&self, edit: &AtomTextEdit) -> Parse<SourceFile> {
|
||||
let text = edit.apply(self.tree.syntax().text().to_string());
|
||||
let text = edit.apply(self.tree().syntax().text().to_string());
|
||||
SourceFile::parse(&text)
|
||||
}
|
||||
}
|
||||
|
||||
impl Parse<SyntaxNode> {
|
||||
pub fn cast<T: AstNode>(self) -> Option<Parse<T>> {
|
||||
let node = T::cast(&self.tree)?;
|
||||
Some(Parse { tree: node.to_owned(), errors: self.errors })
|
||||
}
|
||||
}
|
||||
|
||||
/// `SourceFile` represents a parse tree for a single Rust file.
|
||||
pub use crate::ast::SourceFile;
|
||||
|
||||
impl SourceFile {
|
||||
fn new(green: GreenNode) -> TreeArc<SourceFile> {
|
||||
fn new(green: GreenNode) -> SourceFile {
|
||||
let root = SyntaxNode::new(green);
|
||||
if cfg!(debug_assertions) {
|
||||
validation::validate_block_structure(&root);
|
||||
}
|
||||
assert_eq!(root.kind(), SyntaxKind::SOURCE_FILE);
|
||||
TreeArc::cast(root)
|
||||
SourceFile::cast(root).unwrap()
|
||||
}
|
||||
|
||||
pub fn parse(text: &str) -> Parse<SourceFile> {
|
||||
let (green, mut errors) = parsing::parse_text(text);
|
||||
let tree = SourceFile::new(green);
|
||||
errors.extend(validation::validate(&tree));
|
||||
Parse { tree, errors: Arc::new(errors) }
|
||||
errors.extend(validation::validate(&SourceFile::new(green.clone())));
|
||||
Parse { green, errors: Arc::new(errors), _ty: PhantomData }
|
||||
}
|
||||
}
|
||||
|
||||
@ -170,14 +178,14 @@ fn api_walkthrough() {
|
||||
// The `parse` method returns a `Parse` -- a pair of syntax tree and a list
|
||||
// of errors. That is, syntax tree is constructed even in presence of errors.
|
||||
let parse = SourceFile::parse(source_code);
|
||||
assert!(parse.errors.is_empty());
|
||||
assert!(parse.errors().is_empty());
|
||||
|
||||
// Due to the way ownership is set up, owned syntax Nodes always live behind
|
||||
// a `TreeArc` smart pointer. `TreeArc` is roughly an `std::sync::Arc` which
|
||||
// points to the whole file instead of an individual node.
|
||||
let file: TreeArc<SourceFile> = parse.tree;
|
||||
// The `tree` method returns an owned syntax node of type `SourceFile`.
|
||||
// Owned nodes are cheap: inside, they are `Rc` handles to the underling data.
|
||||
let file: SourceFile = parse.tree();
|
||||
|
||||
// `SourceFile` is the root of the syntax tree. We can iterate file's items:
|
||||
// `SourceFile` is the root of the syntax tree. We can iterate file's items.
|
||||
// Let's fetch the `foo` function.
|
||||
let mut func = None;
|
||||
for item in file.items() {
|
||||
match item.kind() {
|
||||
@ -185,31 +193,26 @@ fn api_walkthrough() {
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
// The returned items are always references.
|
||||
let func: &ast::FnDef = func.unwrap();
|
||||
|
||||
// All nodes implement `ToOwned` trait, with `Owned = TreeArc<Self>`.
|
||||
// `to_owned` is a cheap operation: atomic increment.
|
||||
let _owned_func: TreeArc<ast::FnDef> = func.to_owned();
|
||||
let func: ast::FnDef = func.unwrap();
|
||||
|
||||
// Each AST node has a bunch of getters for children. All getters return
|
||||
// `Option`s though, to account for incomplete code. Some getters are common
|
||||
// for several kinds of node. In this case, a trait like `ast::NameOwner`
|
||||
// usually exists. By convention, all ast types should be used with `ast::`
|
||||
// qualifier.
|
||||
let name: Option<&ast::Name> = func.name();
|
||||
let name: Option<ast::Name> = func.name();
|
||||
let name = name.unwrap();
|
||||
assert_eq!(name.text(), "foo");
|
||||
|
||||
// Let's get the `1 + 1` expression!
|
||||
let block: &ast::Block = func.body().unwrap();
|
||||
let expr: &ast::Expr = block.expr().unwrap();
|
||||
let block: ast::Block = func.body().unwrap();
|
||||
let expr: ast::Expr = block.expr().unwrap();
|
||||
|
||||
// "Enum"-like nodes are represented using the "kind" pattern. It allows us
|
||||
// to match exhaustively against all flavors of nodes, while maintaining
|
||||
// internal representation flexibility. The drawback is that one can't write
|
||||
// nested matches as one pattern.
|
||||
let bin_expr: &ast::BinExpr = match expr.kind() {
|
||||
let bin_expr: ast::BinExpr = match expr.kind() {
|
||||
ast::ExprKind::BinExpr(e) => e,
|
||||
_ => unreachable!(),
|
||||
};
|
||||
@ -219,23 +222,14 @@ fn api_walkthrough() {
|
||||
let expr_syntax: &SyntaxNode = expr.syntax();
|
||||
|
||||
// Note how `expr` and `bin_expr` are in fact the same node underneath:
|
||||
assert!(std::ptr::eq(expr_syntax, bin_expr.syntax()));
|
||||
assert!(expr_syntax == bin_expr.syntax());
|
||||
|
||||
// To go from CST to AST, `AstNode::cast` function is used:
|
||||
let expr = match ast::Expr::cast(expr_syntax) {
|
||||
let _expr: ast::Expr = match ast::Expr::cast(expr_syntax.clone()) {
|
||||
Some(e) => e,
|
||||
None => unreachable!(),
|
||||
};
|
||||
|
||||
// Note how expr is also a reference!
|
||||
let expr: &ast::Expr = expr;
|
||||
|
||||
// This is possible because the underlying representation is the same:
|
||||
assert_eq!(
|
||||
expr as *const ast::Expr as *const u8,
|
||||
expr_syntax as *const SyntaxNode as *const u8
|
||||
);
|
||||
|
||||
// The two properties each syntax node has is a `SyntaxKind`:
|
||||
assert_eq!(expr_syntax.kind(), SyntaxKind::BIN_EXPR);
|
||||
|
||||
@ -248,7 +242,7 @@ fn api_walkthrough() {
|
||||
assert_eq!(text.to_string(), "1 + 1");
|
||||
|
||||
// There's a bunch of traversal methods on `SyntaxNode`:
|
||||
assert_eq!(expr_syntax.parent(), Some(block.syntax()));
|
||||
assert_eq!(expr_syntax.parent().as_ref(), Some(block.syntax()));
|
||||
assert_eq!(block.syntax().first_child_or_token().map(|it| it.kind()), Some(T!['{']));
|
||||
assert_eq!(
|
||||
expr_syntax.next_sibling_or_token().map(|it| it.kind()),
|
||||
@ -257,7 +251,7 @@ fn api_walkthrough() {
|
||||
|
||||
// As well as some iterator helpers:
|
||||
let f = expr_syntax.ancestors().find_map(ast::FnDef::cast);
|
||||
assert_eq!(f, Some(&*func));
|
||||
assert_eq!(f, Some(func));
|
||||
assert!(expr_syntax.siblings_with_tokens(Direction::Next).any(|it| it.kind() == T!['}']));
|
||||
assert_eq!(
|
||||
expr_syntax.descendants_with_tokens().count(),
|
||||
@ -272,7 +266,7 @@ fn api_walkthrough() {
|
||||
for event in expr_syntax.preorder_with_tokens() {
|
||||
match event {
|
||||
WalkEvent::Enter(node) => {
|
||||
let text = match node {
|
||||
let text = match &node {
|
||||
SyntaxElement::Node(it) => it.text().to_string(),
|
||||
SyntaxElement::Token(it) => it.text().to_string(),
|
||||
};
|
||||
@ -319,7 +313,7 @@ fn api_walkthrough() {
|
||||
let mut exprs_visit = Vec::new();
|
||||
for node in file.syntax().descendants() {
|
||||
if let Some(result) =
|
||||
visitor().visit::<ast::Expr, _>(|expr| expr.syntax().text().to_string()).accept(node)
|
||||
visitor().visit::<ast::Expr, _>(|expr| expr.syntax().text().to_string()).accept(&node)
|
||||
{
|
||||
exprs_visit.push(result);
|
||||
}
|
||||
|
@ -41,7 +41,7 @@ fn reparse_token<'node>(
|
||||
root: &'node SyntaxNode,
|
||||
edit: &AtomTextEdit,
|
||||
) -> Option<(GreenNode, TextRange)> {
|
||||
let token = algo::find_covering_element(root, edit.delete).as_token()?;
|
||||
let token = algo::find_covering_element(root, edit.delete).as_token()?.clone();
|
||||
match token.kind() {
|
||||
WHITESPACE | COMMENT | IDENT | STRING | RAW_STRING => {
|
||||
if token.kind() == WHITESPACE || token.kind() == COMMENT {
|
||||
@ -51,7 +51,7 @@ fn reparse_token<'node>(
|
||||
}
|
||||
}
|
||||
|
||||
let text = get_text_after_edit(token.into(), &edit);
|
||||
let text = get_text_after_edit(token.clone().into(), &edit);
|
||||
let lex_tokens = tokenize(&text);
|
||||
let lex_token = match lex_tokens[..] {
|
||||
[lex_token] if lex_token.kind == token.kind() => lex_token,
|
||||
@ -81,7 +81,7 @@ fn reparse_block<'node>(
|
||||
edit: &AtomTextEdit,
|
||||
) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> {
|
||||
let (node, reparser) = find_reparsable_node(root, edit.delete)?;
|
||||
let text = get_text_after_edit(node.into(), &edit);
|
||||
let text = get_text_after_edit(node.clone().into(), &edit);
|
||||
let tokens = tokenize(&text);
|
||||
if !is_balanced(&tokens) {
|
||||
return None;
|
||||
@ -109,7 +109,7 @@ fn is_contextual_kw(text: &str) -> bool {
|
||||
}
|
||||
}
|
||||
|
||||
fn find_reparsable_node(node: &SyntaxNode, range: TextRange) -> Option<(&SyntaxNode, Reparser)> {
|
||||
fn find_reparsable_node(node: &SyntaxNode, range: TextRange) -> Option<(SyntaxNode, Reparser)> {
|
||||
let node = algo::find_covering_element(node, range);
|
||||
let mut ancestors = match node {
|
||||
SyntaxElement::Token(it) => it.parent().ancestors(),
|
||||
@ -167,8 +167,6 @@ fn merge_errors(
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::sync::Arc;
|
||||
|
||||
use test_utils::{assert_eq_text, extract_range};
|
||||
|
||||
use super::*;
|
||||
@ -180,18 +178,18 @@ mod tests {
|
||||
let after = edit.apply(before.clone());
|
||||
|
||||
let fully_reparsed = SourceFile::parse(&after);
|
||||
let incrementally_reparsed = {
|
||||
let incrementally_reparsed: Parse<SourceFile> = {
|
||||
let f = SourceFile::parse(&before);
|
||||
let edit = AtomTextEdit { delete: range, insert: replace_with.to_string() };
|
||||
let (green, new_errors, range) =
|
||||
incremental_reparse(f.tree.syntax(), &edit, f.errors.to_vec()).unwrap();
|
||||
incremental_reparse(f.tree().syntax(), &edit, f.errors.to_vec()).unwrap();
|
||||
assert_eq!(range.len(), reparsed_len.into(), "reparsed fragment has wrong length");
|
||||
Parse { tree: SourceFile::new(green), errors: Arc::new(new_errors) }
|
||||
Parse::new(green, new_errors)
|
||||
};
|
||||
|
||||
assert_eq_text!(
|
||||
&fully_reparsed.tree.syntax().debug_dump(),
|
||||
&incrementally_reparsed.tree.syntax().debug_dump(),
|
||||
&fully_reparsed.tree().syntax().debug_dump(),
|
||||
&incrementally_reparsed.tree().syntax().debug_dump(),
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -1,6 +1,7 @@
|
||||
use crate::{AstNode, SyntaxKind, SyntaxNode, TextRange};
|
||||
use std::{iter::successors, marker::PhantomData};
|
||||
|
||||
use crate::{AstNode, SyntaxKind, SyntaxNode, TextRange};
|
||||
|
||||
/// A pointer to a syntax node inside a file. It can be used to remember a
|
||||
/// specific node across reparses of the same file.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
@ -14,9 +15,9 @@ impl SyntaxNodePtr {
|
||||
SyntaxNodePtr { range: node.range(), kind: node.kind() }
|
||||
}
|
||||
|
||||
pub fn to_node(self, root: &SyntaxNode) -> &SyntaxNode {
|
||||
pub fn to_node(self, root: &SyntaxNode) -> SyntaxNode {
|
||||
assert!(root.parent().is_none());
|
||||
successors(Some(root), |&node| {
|
||||
successors(Some(root.clone()), |node| {
|
||||
node.children().find(|it| self.range.is_subrange(&it.range()))
|
||||
})
|
||||
.find(|it| it.range() == self.range && it.kind() == self.kind)
|
||||
@ -51,7 +52,7 @@ impl<N: AstNode> AstPtr<N> {
|
||||
AstPtr { raw: SyntaxNodePtr::new(node.syntax()), _ty: PhantomData }
|
||||
}
|
||||
|
||||
pub fn to_node(self, root: &SyntaxNode) -> &N {
|
||||
pub fn to_node(self, root: &SyntaxNode) -> N {
|
||||
let syntax_node = self.raw.to_node(root);
|
||||
N::cast(syntax_node).unwrap()
|
||||
}
|
||||
@ -75,5 +76,5 @@ fn test_local_syntax_ptr() {
|
||||
let field = file.syntax().descendants().find_map(ast::NamedFieldDef::cast).unwrap();
|
||||
let ptr = SyntaxNodePtr::new(field.syntax());
|
||||
let field_syntax = ptr.to_node(file.syntax());
|
||||
assert_eq!(field.syntax(), &*field_syntax);
|
||||
assert_eq!(field.syntax(), &field_syntax);
|
||||
}
|
||||
|
@ -7,14 +7,13 @@
|
||||
//! modules just wraps its API.
|
||||
|
||||
use std::{
|
||||
borrow::Borrow,
|
||||
fmt::{self, Write},
|
||||
iter::successors,
|
||||
ops::RangeInclusive,
|
||||
};
|
||||
|
||||
use ra_parser::ParseError;
|
||||
use rowan::{GreenNodeBuilder, TransparentNewType};
|
||||
use rowan::GreenNodeBuilder;
|
||||
|
||||
use crate::{
|
||||
syntax_error::{SyntaxError, SyntaxErrorKind},
|
||||
@ -33,86 +32,8 @@ pub enum InsertPosition<T> {
|
||||
After(T),
|
||||
}
|
||||
|
||||
/// Marker trait for CST and AST nodes
|
||||
pub trait SyntaxNodeWrapper: TransparentNewType<Repr = rowan::SyntaxNode> {}
|
||||
impl<T: TransparentNewType<Repr = rowan::SyntaxNode>> SyntaxNodeWrapper for T {}
|
||||
|
||||
/// An owning smart pointer for CST or AST node.
|
||||
#[derive(PartialEq, Eq, Hash)]
|
||||
pub struct TreeArc<T: SyntaxNodeWrapper>(pub(crate) rowan::TreeArc<T>);
|
||||
|
||||
impl<T: SyntaxNodeWrapper> Borrow<T> for TreeArc<T> {
|
||||
fn borrow(&self) -> &T {
|
||||
&*self
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> TreeArc<T>
|
||||
where
|
||||
T: SyntaxNodeWrapper,
|
||||
{
|
||||
pub(crate) fn cast<U>(this: TreeArc<T>) -> TreeArc<U>
|
||||
where
|
||||
U: SyntaxNodeWrapper,
|
||||
{
|
||||
TreeArc(rowan::TreeArc::cast(this.0))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> std::ops::Deref for TreeArc<T>
|
||||
where
|
||||
T: SyntaxNodeWrapper,
|
||||
{
|
||||
type Target = T;
|
||||
fn deref(&self) -> &T {
|
||||
self.0.deref()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> PartialEq<T> for TreeArc<T>
|
||||
where
|
||||
T: SyntaxNodeWrapper,
|
||||
T: PartialEq<T>,
|
||||
{
|
||||
fn eq(&self, other: &T) -> bool {
|
||||
let t: &T = self;
|
||||
t == other
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Clone for TreeArc<T>
|
||||
where
|
||||
T: SyntaxNodeWrapper,
|
||||
{
|
||||
fn clone(&self) -> TreeArc<T> {
|
||||
TreeArc(self.0.clone())
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> fmt::Debug for TreeArc<T>
|
||||
where
|
||||
T: SyntaxNodeWrapper,
|
||||
T: fmt::Debug,
|
||||
{
|
||||
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
|
||||
fmt::Debug::fmt(&self.0, fmt)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Hash)]
|
||||
#[repr(transparent)]
|
||||
pub struct SyntaxNode(pub(crate) rowan::SyntaxNode);
|
||||
unsafe impl TransparentNewType for SyntaxNode {
|
||||
type Repr = rowan::SyntaxNode;
|
||||
}
|
||||
|
||||
impl ToOwned for SyntaxNode {
|
||||
type Owned = TreeArc<SyntaxNode>;
|
||||
fn to_owned(&self) -> TreeArc<SyntaxNode> {
|
||||
let ptr = TreeArc(self.0.to_owned());
|
||||
TreeArc::cast(ptr)
|
||||
}
|
||||
}
|
||||
#[derive(PartialEq, Eq, Hash, Clone)]
|
||||
pub struct SyntaxNode(pub(crate) rowan::cursor::SyntaxNode);
|
||||
|
||||
impl fmt::Debug for SyntaxNode {
|
||||
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
|
||||
@ -133,9 +54,9 @@ pub enum Direction {
|
||||
}
|
||||
|
||||
impl SyntaxNode {
|
||||
pub(crate) fn new(green: GreenNode) -> TreeArc<SyntaxNode> {
|
||||
let ptr = TreeArc(rowan::SyntaxNode::new(green, None));
|
||||
TreeArc::cast(ptr)
|
||||
pub(crate) fn new(green: GreenNode) -> SyntaxNode {
|
||||
let inner = rowan::cursor::SyntaxNode::new_root(green);
|
||||
SyntaxNode(inner)
|
||||
}
|
||||
|
||||
pub fn kind(&self) -> SyntaxKind {
|
||||
@ -143,47 +64,47 @@ impl SyntaxNode {
|
||||
}
|
||||
|
||||
pub fn range(&self) -> TextRange {
|
||||
self.0.range()
|
||||
self.0.text_range()
|
||||
}
|
||||
|
||||
pub fn text(&self) -> SyntaxText {
|
||||
SyntaxText::new(self)
|
||||
}
|
||||
|
||||
pub fn parent(&self) -> Option<&SyntaxNode> {
|
||||
self.0.parent().map(SyntaxNode::from_repr)
|
||||
pub fn parent(&self) -> Option<SyntaxNode> {
|
||||
self.0.parent().map(SyntaxNode)
|
||||
}
|
||||
|
||||
pub fn first_child(&self) -> Option<&SyntaxNode> {
|
||||
self.0.first_child().map(SyntaxNode::from_repr)
|
||||
pub fn first_child(&self) -> Option<SyntaxNode> {
|
||||
self.0.first_child().map(SyntaxNode)
|
||||
}
|
||||
|
||||
pub fn first_child_or_token(&self) -> Option<SyntaxElement> {
|
||||
self.0.first_child_or_token().map(SyntaxElement::from)
|
||||
self.0.first_child_or_token().map(SyntaxElement::new)
|
||||
}
|
||||
|
||||
pub fn last_child(&self) -> Option<&SyntaxNode> {
|
||||
self.0.last_child().map(SyntaxNode::from_repr)
|
||||
pub fn last_child(&self) -> Option<SyntaxNode> {
|
||||
self.0.last_child().map(SyntaxNode)
|
||||
}
|
||||
|
||||
pub fn last_child_or_token(&self) -> Option<SyntaxElement> {
|
||||
self.0.last_child_or_token().map(SyntaxElement::from)
|
||||
self.0.last_child_or_token().map(SyntaxElement::new)
|
||||
}
|
||||
|
||||
pub fn next_sibling(&self) -> Option<&SyntaxNode> {
|
||||
self.0.next_sibling().map(SyntaxNode::from_repr)
|
||||
pub fn next_sibling(&self) -> Option<SyntaxNode> {
|
||||
self.0.next_sibling().map(SyntaxNode)
|
||||
}
|
||||
|
||||
pub fn next_sibling_or_token(&self) -> Option<SyntaxElement> {
|
||||
self.0.next_sibling_or_token().map(SyntaxElement::from)
|
||||
self.0.next_sibling_or_token().map(SyntaxElement::new)
|
||||
}
|
||||
|
||||
pub fn prev_sibling(&self) -> Option<&SyntaxNode> {
|
||||
self.0.prev_sibling().map(SyntaxNode::from_repr)
|
||||
pub fn prev_sibling(&self) -> Option<SyntaxNode> {
|
||||
self.0.prev_sibling().map(SyntaxNode)
|
||||
}
|
||||
|
||||
pub fn prev_sibling_or_token(&self) -> Option<SyntaxElement> {
|
||||
self.0.prev_sibling_or_token().map(SyntaxElement::from)
|
||||
self.0.prev_sibling_or_token().map(SyntaxElement::new)
|
||||
}
|
||||
|
||||
pub fn children(&self) -> SyntaxNodeChildren {
|
||||
@ -195,18 +116,18 @@ impl SyntaxNode {
|
||||
}
|
||||
|
||||
pub fn first_token(&self) -> Option<SyntaxToken> {
|
||||
self.0.first_token().map(SyntaxToken::from)
|
||||
self.0.first_token().map(SyntaxToken)
|
||||
}
|
||||
|
||||
pub fn last_token(&self) -> Option<SyntaxToken> {
|
||||
self.0.last_token().map(SyntaxToken::from)
|
||||
self.0.last_token().map(SyntaxToken)
|
||||
}
|
||||
|
||||
pub fn ancestors(&self) -> impl Iterator<Item = &SyntaxNode> {
|
||||
successors(Some(self), |&node| node.parent())
|
||||
pub fn ancestors(&self) -> impl Iterator<Item = SyntaxNode> {
|
||||
successors(Some(self.clone()), |node| node.parent())
|
||||
}
|
||||
|
||||
pub fn descendants(&self) -> impl Iterator<Item = &SyntaxNode> {
|
||||
pub fn descendants(&self) -> impl Iterator<Item = SyntaxNode> {
|
||||
self.preorder().filter_map(|event| match event {
|
||||
WalkEvent::Enter(node) => Some(node),
|
||||
WalkEvent::Leave(_) => None,
|
||||
@ -220,8 +141,8 @@ impl SyntaxNode {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn siblings(&self, direction: Direction) -> impl Iterator<Item = &SyntaxNode> {
|
||||
successors(Some(self), move |&node| match direction {
|
||||
pub fn siblings(&self, direction: Direction) -> impl Iterator<Item = SyntaxNode> {
|
||||
successors(Some(self.clone()), move |node| match direction {
|
||||
Direction::Next => node.next_sibling(),
|
||||
Direction::Prev => node.prev_sibling(),
|
||||
})
|
||||
@ -231,29 +152,29 @@ impl SyntaxNode {
|
||||
&self,
|
||||
direction: Direction,
|
||||
) -> impl Iterator<Item = SyntaxElement> {
|
||||
let me: SyntaxElement = self.into();
|
||||
let me: SyntaxElement = self.clone().into();
|
||||
successors(Some(me), move |el| match direction {
|
||||
Direction::Next => el.next_sibling_or_token(),
|
||||
Direction::Prev => el.prev_sibling_or_token(),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn preorder(&self) -> impl Iterator<Item = WalkEvent<&SyntaxNode>> {
|
||||
pub fn preorder(&self) -> impl Iterator<Item = WalkEvent<SyntaxNode>> {
|
||||
self.0.preorder().map(|event| match event {
|
||||
WalkEvent::Enter(n) => WalkEvent::Enter(SyntaxNode::from_repr(n)),
|
||||
WalkEvent::Leave(n) => WalkEvent::Leave(SyntaxNode::from_repr(n)),
|
||||
WalkEvent::Enter(n) => WalkEvent::Enter(SyntaxNode(n)),
|
||||
WalkEvent::Leave(n) => WalkEvent::Leave(SyntaxNode(n)),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn preorder_with_tokens(&self) -> impl Iterator<Item = WalkEvent<SyntaxElement>> {
|
||||
self.0.preorder_with_tokens().map(|event| match event {
|
||||
WalkEvent::Enter(n) => WalkEvent::Enter(n.into()),
|
||||
WalkEvent::Leave(n) => WalkEvent::Leave(n.into()),
|
||||
WalkEvent::Enter(n) => WalkEvent::Enter(SyntaxElement::new(n)),
|
||||
WalkEvent::Leave(n) => WalkEvent::Leave(SyntaxElement::new(n)),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn memory_size_of_subtree(&self) -> usize {
|
||||
self.0.memory_size_of_subtree()
|
||||
0
|
||||
}
|
||||
|
||||
pub fn debug_dump(&self) -> String {
|
||||
@ -290,11 +211,11 @@ impl SyntaxNode {
|
||||
///
|
||||
/// This is a type-unsafe low-level editing API, if you need to use it,
|
||||
/// prefer to create a type-safe abstraction on top of it instead.
|
||||
pub fn insert_children<'a>(
|
||||
pub fn insert_children(
|
||||
&self,
|
||||
position: InsertPosition<SyntaxElement<'_>>,
|
||||
to_insert: impl Iterator<Item = SyntaxElement<'a>>,
|
||||
) -> TreeArc<SyntaxNode> {
|
||||
position: InsertPosition<SyntaxElement>,
|
||||
to_insert: impl Iterator<Item = SyntaxElement>,
|
||||
) -> SyntaxNode {
|
||||
let mut delta = TextUnit::default();
|
||||
let to_insert = to_insert.map(|element| {
|
||||
delta += element.text_len();
|
||||
@ -303,7 +224,7 @@ impl SyntaxNode {
|
||||
|
||||
let old_children = self.0.green().children();
|
||||
|
||||
let new_children = match position {
|
||||
let new_children = match &position {
|
||||
InsertPosition::First => {
|
||||
to_insert.chain(old_children.iter().cloned()).collect::<Box<[_]>>()
|
||||
}
|
||||
@ -312,7 +233,7 @@ impl SyntaxNode {
|
||||
}
|
||||
InsertPosition::Before(anchor) | InsertPosition::After(anchor) => {
|
||||
let take_anchor = if let InsertPosition::After(_) = position { 1 } else { 0 };
|
||||
let split_at = self.position_of_child(anchor) + take_anchor;
|
||||
let split_at = self.position_of_child(anchor.clone()) + take_anchor;
|
||||
let (before, after) = old_children.split_at(split_at);
|
||||
before
|
||||
.iter()
|
||||
@ -330,13 +251,13 @@ impl SyntaxNode {
|
||||
///
|
||||
/// This is a type-unsafe low-level editing API, if you need to use it,
|
||||
/// prefer to create a type-safe abstraction on top of it instead.
|
||||
pub fn replace_children<'a>(
|
||||
pub fn replace_children(
|
||||
&self,
|
||||
to_delete: RangeInclusive<SyntaxElement<'_>>,
|
||||
to_insert: impl Iterator<Item = SyntaxElement<'a>>,
|
||||
) -> TreeArc<SyntaxNode> {
|
||||
let start = self.position_of_child(*to_delete.start());
|
||||
let end = self.position_of_child(*to_delete.end());
|
||||
to_delete: RangeInclusive<SyntaxElement>,
|
||||
to_insert: impl Iterator<Item = SyntaxElement>,
|
||||
) -> SyntaxNode {
|
||||
let start = self.position_of_child(to_delete.start().clone());
|
||||
let end = self.position_of_child(to_delete.end().clone());
|
||||
let old_children = self.0.green().children();
|
||||
|
||||
let new_children = old_children[..start]
|
||||
@ -348,7 +269,7 @@ impl SyntaxNode {
|
||||
self.with_children(new_children)
|
||||
}
|
||||
|
||||
fn with_children(&self, new_children: Box<[rowan::GreenElement]>) -> TreeArc<SyntaxNode> {
|
||||
fn with_children(&self, new_children: Box<[rowan::GreenElement]>) -> SyntaxNode {
|
||||
let len = new_children.iter().map(|it| it.text_len()).sum::<TextUnit>();
|
||||
let new_node = GreenNode::new(rowan::SyntaxKind(self.kind() as u16), new_children);
|
||||
let new_file_node = self.replace_with(new_node);
|
||||
@ -364,7 +285,7 @@ impl SyntaxNode {
|
||||
fn position_of_child(&self, child: SyntaxElement) -> usize {
|
||||
self.children_with_tokens()
|
||||
.position(|it| it == child)
|
||||
.expect("elemetn is not a child of current element")
|
||||
.expect("element is not a child of current element")
|
||||
}
|
||||
}
|
||||
|
||||
@ -377,11 +298,11 @@ fn to_green_element(element: SyntaxElement) -> rowan::GreenElement {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct SyntaxToken<'a>(pub(crate) rowan::SyntaxToken<'a>);
|
||||
#[derive(Clone, PartialEq, Eq, Hash)]
|
||||
pub struct SyntaxToken(pub(crate) rowan::cursor::SyntaxToken);
|
||||
|
||||
//FIXME: always output text
|
||||
impl<'a> fmt::Debug for SyntaxToken<'a> {
|
||||
impl fmt::Debug for SyntaxToken {
|
||||
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(fmt, "{:?}@{:?}", self.kind(), self.range())?;
|
||||
if self.text().len() < 25 {
|
||||
@ -398,60 +319,54 @@ impl<'a> fmt::Debug for SyntaxToken<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> fmt::Display for SyntaxToken<'a> {
|
||||
impl fmt::Display for SyntaxToken {
|
||||
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
|
||||
fmt::Display::fmt(self.text(), fmt)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<rowan::SyntaxToken<'a>> for SyntaxToken<'a> {
|
||||
fn from(t: rowan::SyntaxToken<'a>) -> Self {
|
||||
SyntaxToken(t)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> SyntaxToken<'a> {
|
||||
impl SyntaxToken {
|
||||
pub fn kind(&self) -> SyntaxKind {
|
||||
self.0.kind().0.into()
|
||||
}
|
||||
|
||||
pub fn text(&self) -> &'a SmolStr {
|
||||
pub fn text(&self) -> &SmolStr {
|
||||
self.0.text()
|
||||
}
|
||||
|
||||
pub fn range(&self) -> TextRange {
|
||||
self.0.range()
|
||||
self.0.text_range()
|
||||
}
|
||||
|
||||
pub fn parent(&self) -> &'a SyntaxNode {
|
||||
SyntaxNode::from_repr(self.0.parent())
|
||||
pub fn parent(&self) -> SyntaxNode {
|
||||
SyntaxNode(self.0.parent())
|
||||
}
|
||||
|
||||
pub fn next_sibling_or_token(&self) -> Option<SyntaxElement<'a>> {
|
||||
self.0.next_sibling_or_token().map(SyntaxElement::from)
|
||||
pub fn next_sibling_or_token(&self) -> Option<SyntaxElement> {
|
||||
self.0.next_sibling_or_token().map(SyntaxElement::new)
|
||||
}
|
||||
|
||||
pub fn prev_sibling_or_token(&self) -> Option<SyntaxElement<'a>> {
|
||||
self.0.prev_sibling_or_token().map(SyntaxElement::from)
|
||||
pub fn prev_sibling_or_token(&self) -> Option<SyntaxElement> {
|
||||
self.0.prev_sibling_or_token().map(SyntaxElement::new)
|
||||
}
|
||||
|
||||
pub fn siblings_with_tokens(
|
||||
&self,
|
||||
direction: Direction,
|
||||
) -> impl Iterator<Item = SyntaxElement<'a>> {
|
||||
let me: SyntaxElement = (*self).into();
|
||||
) -> impl Iterator<Item = SyntaxElement> {
|
||||
let me: SyntaxElement = self.clone().into();
|
||||
successors(Some(me), move |el| match direction {
|
||||
Direction::Next => el.next_sibling_or_token(),
|
||||
Direction::Prev => el.prev_sibling_or_token(),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn next_token(&self) -> Option<SyntaxToken<'a>> {
|
||||
self.0.next_token().map(SyntaxToken::from)
|
||||
pub fn next_token(&self) -> Option<SyntaxToken> {
|
||||
self.0.next_token().map(SyntaxToken)
|
||||
}
|
||||
|
||||
pub fn prev_token(&self) -> Option<SyntaxToken<'a>> {
|
||||
self.0.prev_token().map(SyntaxToken::from)
|
||||
pub fn prev_token(&self) -> Option<SyntaxToken> {
|
||||
self.0.prev_token().map(SyntaxToken)
|
||||
}
|
||||
|
||||
pub(crate) fn replace_with(&self, new_token: GreenToken) -> GreenNode {
|
||||
@ -459,13 +374,25 @@ impl<'a> SyntaxToken<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Hash, Clone, Copy)]
|
||||
pub enum SyntaxElement<'a> {
|
||||
Node(&'a SyntaxNode),
|
||||
Token(SyntaxToken<'a>),
|
||||
#[derive(Debug, PartialEq, Eq, Hash, Clone)]
|
||||
pub enum SyntaxElement {
|
||||
Node(SyntaxNode),
|
||||
Token(SyntaxToken),
|
||||
}
|
||||
|
||||
impl<'a> fmt::Display for SyntaxElement<'a> {
|
||||
impl From<SyntaxNode> for SyntaxElement {
|
||||
fn from(node: SyntaxNode) -> Self {
|
||||
SyntaxElement::Node(node)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<SyntaxToken> for SyntaxElement {
|
||||
fn from(token: SyntaxToken) -> Self {
|
||||
SyntaxElement::Token(token)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for SyntaxElement {
|
||||
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
SyntaxElement::Node(it) => fmt::Display::fmt(it, fmt),
|
||||
@ -474,7 +401,14 @@ impl<'a> fmt::Display for SyntaxElement<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> SyntaxElement<'a> {
|
||||
impl SyntaxElement {
|
||||
pub(crate) fn new(el: rowan::cursor::SyntaxElement) -> Self {
|
||||
match el {
|
||||
rowan::cursor::SyntaxElement::Node(it) => SyntaxElement::Node(SyntaxNode(it)),
|
||||
rowan::cursor::SyntaxElement::Token(it) => SyntaxElement::Token(SyntaxToken(it)),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn kind(&self) -> SyntaxKind {
|
||||
match self {
|
||||
SyntaxElement::Node(it) => it.kind(),
|
||||
@ -482,42 +416,49 @@ impl<'a> SyntaxElement<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_node(&self) -> Option<&'a SyntaxNode> {
|
||||
pub fn as_node(&self) -> Option<&SyntaxNode> {
|
||||
match self {
|
||||
SyntaxElement::Node(node) => Some(*node),
|
||||
SyntaxElement::Node(node) => Some(node),
|
||||
SyntaxElement::Token(_) => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_token(&self) -> Option<SyntaxToken<'a>> {
|
||||
pub fn as_token(&self) -> Option<&SyntaxToken> {
|
||||
match self {
|
||||
SyntaxElement::Node(_) => None,
|
||||
SyntaxElement::Token(token) => Some(*token),
|
||||
SyntaxElement::Token(token) => Some(token),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn next_sibling_or_token(&self) -> Option<SyntaxElement<'a>> {
|
||||
pub fn next_sibling_or_token(&self) -> Option<SyntaxElement> {
|
||||
match self {
|
||||
SyntaxElement::Node(it) => it.next_sibling_or_token(),
|
||||
SyntaxElement::Token(it) => it.next_sibling_or_token(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn prev_sibling_or_token(&self) -> Option<SyntaxElement<'a>> {
|
||||
pub fn prev_sibling_or_token(&self) -> Option<SyntaxElement> {
|
||||
match self {
|
||||
SyntaxElement::Node(it) => it.prev_sibling_or_token(),
|
||||
SyntaxElement::Token(it) => it.prev_sibling_or_token(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn ancestors(&self) -> impl Iterator<Item = &'a SyntaxNode> {
|
||||
pub fn ancestors(&self) -> impl Iterator<Item = SyntaxNode> {
|
||||
match self {
|
||||
SyntaxElement::Node(it) => it,
|
||||
SyntaxElement::Node(it) => it.clone(),
|
||||
SyntaxElement::Token(it) => it.parent(),
|
||||
}
|
||||
.ancestors()
|
||||
}
|
||||
|
||||
pub fn range(&self) -> TextRange {
|
||||
match self {
|
||||
SyntaxElement::Node(it) => it.range(),
|
||||
SyntaxElement::Token(it) => it.range(),
|
||||
}
|
||||
}
|
||||
|
||||
fn text_len(&self) -> TextUnit {
|
||||
match self {
|
||||
SyntaxElement::Node(node) => node.0.green().text_len(),
|
||||
@ -526,55 +467,23 @@ impl<'a> SyntaxElement<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<rowan::SyntaxElement<'a>> for SyntaxElement<'a> {
|
||||
fn from(el: rowan::SyntaxElement<'a>) -> Self {
|
||||
match el {
|
||||
rowan::SyntaxElement::Node(n) => SyntaxElement::Node(SyntaxNode::from_repr(n)),
|
||||
rowan::SyntaxElement::Token(t) => SyntaxElement::Token(t.into()),
|
||||
}
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct SyntaxNodeChildren(rowan::cursor::SyntaxNodeChildren);
|
||||
|
||||
impl Iterator for SyntaxNodeChildren {
|
||||
type Item = SyntaxNode;
|
||||
fn next(&mut self) -> Option<SyntaxNode> {
|
||||
self.0.next().map(SyntaxNode)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a SyntaxNode> for SyntaxElement<'a> {
|
||||
fn from(node: &'a SyntaxNode) -> SyntaxElement<'a> {
|
||||
SyntaxElement::Node(node)
|
||||
}
|
||||
}
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct SyntaxElementChildren(rowan::cursor::SyntaxElementChildren);
|
||||
|
||||
impl<'a> From<SyntaxToken<'a>> for SyntaxElement<'a> {
|
||||
fn from(token: SyntaxToken<'a>) -> SyntaxElement<'a> {
|
||||
SyntaxElement::Token(token)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> SyntaxElement<'a> {
|
||||
pub fn range(&self) -> TextRange {
|
||||
match self {
|
||||
SyntaxElement::Node(it) => it.range(),
|
||||
SyntaxElement::Token(it) => it.range(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct SyntaxNodeChildren<'a>(rowan::SyntaxNodeChildren<'a>);
|
||||
|
||||
impl<'a> Iterator for SyntaxNodeChildren<'a> {
|
||||
type Item = &'a SyntaxNode;
|
||||
|
||||
fn next(&mut self) -> Option<&'a SyntaxNode> {
|
||||
self.0.next().map(SyntaxNode::from_repr)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct SyntaxElementChildren<'a>(rowan::SyntaxElementChildren<'a>);
|
||||
|
||||
impl<'a> Iterator for SyntaxElementChildren<'a> {
|
||||
type Item = SyntaxElement<'a>;
|
||||
|
||||
fn next(&mut self) -> Option<SyntaxElement<'a>> {
|
||||
self.0.next().map(SyntaxElement::from)
|
||||
impl Iterator for SyntaxElementChildren {
|
||||
type Item = SyntaxElement;
|
||||
fn next(&mut self) -> Option<SyntaxElement> {
|
||||
self.0.next().map(SyntaxElement::new)
|
||||
}
|
||||
}
|
||||
|
||||
@ -601,7 +510,7 @@ impl SyntaxTreeBuilder {
|
||||
if cfg!(debug_assertions) {
|
||||
crate::validation::validate_block_structure(&node);
|
||||
}
|
||||
Parse::new(node, errors)
|
||||
Parse::new(node.0.green().clone(), errors)
|
||||
}
|
||||
|
||||
pub fn token(&mut self, kind: SyntaxKind, text: SmolStr) {
|
||||
|
@ -16,29 +16,36 @@ impl<'a> SyntaxText<'a> {
|
||||
SyntaxText { node, range: node.range() }
|
||||
}
|
||||
|
||||
pub fn chunks(&self) -> impl Iterator<Item = &'a str> {
|
||||
pub fn chunks(&self) -> impl Iterator<Item = SmolStr> {
|
||||
let range = self.range;
|
||||
self.node.descendants_with_tokens().filter_map(move |el| match el {
|
||||
SyntaxElement::Token(t) => {
|
||||
let text = t.text();
|
||||
let range = range.intersection(&t.range())?;
|
||||
let range = range - t.range().start();
|
||||
Some(&text[range])
|
||||
let res = if range == t.range() {
|
||||
t.text().clone()
|
||||
} else {
|
||||
let range = range - t.range().start();
|
||||
text[range].into()
|
||||
};
|
||||
Some(res)
|
||||
}
|
||||
SyntaxElement::Node(_) => None,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn push_to(&self, buf: &mut String) {
|
||||
self.chunks().for_each(|it| buf.push_str(it));
|
||||
self.chunks().for_each(|it| buf.push_str(it.as_str()));
|
||||
}
|
||||
|
||||
pub fn to_string(&self) -> String {
|
||||
self.chunks().collect()
|
||||
let mut buf = String::new();
|
||||
self.push_to(&mut buf);
|
||||
buf
|
||||
}
|
||||
|
||||
pub fn to_smol_string(&self) -> SmolStr {
|
||||
self.chunks().collect()
|
||||
self.to_string().into()
|
||||
}
|
||||
|
||||
pub fn contains(&self, c: char) -> bool {
|
||||
@ -52,7 +59,7 @@ impl<'a> SyntaxText<'a> {
|
||||
let pos: TextUnit = (pos as u32).into();
|
||||
return Some(acc + pos);
|
||||
}
|
||||
acc += TextUnit::of_str(chunk);
|
||||
acc += TextUnit::of_str(chunk.as_str());
|
||||
}
|
||||
None
|
||||
}
|
||||
@ -97,7 +104,7 @@ impl<'a> SyntaxText<'a> {
|
||||
let mut start: TextUnit = 0.into();
|
||||
let offset = offset.into();
|
||||
for chunk in self.chunks() {
|
||||
let end = start + TextUnit::of_str(chunk);
|
||||
let end = start + TextUnit::of_str(chunk.as_str());
|
||||
if start <= offset && offset < end {
|
||||
let off: usize = u32::from(offset - start) as usize;
|
||||
return Some(chunk[off..].chars().next().unwrap());
|
||||
@ -129,7 +136,7 @@ impl From<SyntaxText<'_>> for String {
|
||||
impl PartialEq<str> for SyntaxText<'_> {
|
||||
fn eq(&self, mut rhs: &str) -> bool {
|
||||
for chunk in self.chunks() {
|
||||
if !rhs.starts_with(chunk) {
|
||||
if !rhs.starts_with(chunk.as_str()) {
|
||||
return false;
|
||||
}
|
||||
rhs = &rhs[chunk.len()..];
|
||||
|
@ -19,13 +19,13 @@ pub(crate) fn validate(file: &SourceFile) -> Vec<SyntaxError> {
|
||||
.visit::<ast::Literal, _>(validate_literal)
|
||||
.visit::<ast::Block, _>(block::validate_block_node)
|
||||
.visit::<ast::FieldExpr, _>(field_expr::validate_field_expr_node)
|
||||
.accept(node);
|
||||
.accept(&node);
|
||||
}
|
||||
errors
|
||||
}
|
||||
|
||||
// FIXME: kill duplication
|
||||
fn validate_literal(literal: &ast::Literal, acc: &mut Vec<SyntaxError>) {
|
||||
fn validate_literal(literal: ast::Literal, acc: &mut Vec<SyntaxError>) {
|
||||
let token = literal.token();
|
||||
let text = token.text().as_str();
|
||||
match token.kind() {
|
||||
|
@ -5,7 +5,7 @@ use crate::{
|
||||
SyntaxKind::*,
|
||||
};
|
||||
|
||||
pub(crate) fn validate_block_node(node: &ast::Block, errors: &mut Vec<SyntaxError>) {
|
||||
pub(crate) fn validate_block_node(node: ast::Block, errors: &mut Vec<SyntaxError>) {
|
||||
if let Some(parent) = node.syntax().parent() {
|
||||
match parent.kind() {
|
||||
FN_DEF => return,
|
||||
|
@ -4,7 +4,7 @@ use crate::{
|
||||
SyntaxErrorKind::*,
|
||||
};
|
||||
|
||||
pub(crate) fn validate_field_expr_node(node: &ast::FieldExpr, errors: &mut Vec<SyntaxError>) {
|
||||
pub(crate) fn validate_field_expr_node(node: ast::FieldExpr, errors: &mut Vec<SyntaxError>) {
|
||||
if let Some(FieldKind::Index(idx)) = node.field_access() {
|
||||
if idx.text().chars().any(|c| c < '0' || c > '9') {
|
||||
errors.push(SyntaxError::new(InvalidTupleIndexFormat, idx.range()));
|
||||
|
Loading…
Reference in New Issue
Block a user